Merge pull request #3200 from zpatrick/master
Optional "content" argument for "s3_bucket_object"
This commit is contained in:
commit
a888cd6f62
|
@ -1,7 +1,9 @@
|
||||||
package aws
|
package aws
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
@ -32,9 +34,17 @@ func resourceAwsS3BucketObject() *schema.Resource {
|
||||||
},
|
},
|
||||||
|
|
||||||
"source": &schema.Schema{
|
"source": &schema.Schema{
|
||||||
Type: schema.TypeString,
|
Type: schema.TypeString,
|
||||||
Required: true,
|
Optional: true,
|
||||||
ForceNew: true,
|
ForceNew: true,
|
||||||
|
ConflictsWith: []string{"content"},
|
||||||
|
},
|
||||||
|
|
||||||
|
"content": &schema.Schema{
|
||||||
|
Type: schema.TypeString,
|
||||||
|
Optional: true,
|
||||||
|
ForceNew: true,
|
||||||
|
ConflictsWith: []string{"source"},
|
||||||
},
|
},
|
||||||
|
|
||||||
"etag": &schema.Schema{
|
"etag": &schema.Schema{
|
||||||
|
@ -50,19 +60,28 @@ func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) erro
|
||||||
|
|
||||||
bucket := d.Get("bucket").(string)
|
bucket := d.Get("bucket").(string)
|
||||||
key := d.Get("key").(string)
|
key := d.Get("key").(string)
|
||||||
source := d.Get("source").(string)
|
var body io.ReadSeeker
|
||||||
|
|
||||||
file, err := os.Open(source)
|
if v, ok := d.GetOk("source"); ok {
|
||||||
|
source := v.(string)
|
||||||
|
file, err := os.Open(source)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("Error opening S3 bucket object source (%s): %s", source, err)
|
||||||
|
}
|
||||||
|
|
||||||
if err != nil {
|
body = file
|
||||||
return fmt.Errorf("Error opening S3 bucket object source (%s): %s", source, err)
|
} else if v, ok := d.GetOk("content"); ok {
|
||||||
|
content := v.(string)
|
||||||
|
body = bytes.NewReader([]byte(content))
|
||||||
|
} else {
|
||||||
|
return fmt.Errorf("Must specify \"source\" or \"content\" field")
|
||||||
}
|
}
|
||||||
|
|
||||||
resp, err := s3conn.PutObject(
|
resp, err := s3conn.PutObject(
|
||||||
&s3.PutObjectInput{
|
&s3.PutObjectInput{
|
||||||
Bucket: aws.String(bucket),
|
Bucket: aws.String(bucket),
|
||||||
Key: aws.String(key),
|
Key: aws.String(key),
|
||||||
Body: file,
|
Body: body,
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -118,3 +137,4 @@ func resourceAwsS3BucketObjectDelete(d *schema.ResourceData, meta interface{}) e
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@ import (
|
||||||
|
|
||||||
var tf, err = ioutil.TempFile("", "tf")
|
var tf, err = ioutil.TempFile("", "tf")
|
||||||
|
|
||||||
func TestAccAWSS3BucketObject_basic(t *testing.T) {
|
func TestAccAWSS3BucketObject_source(t *testing.T) {
|
||||||
// first write some data to the tempfile just so it's not 0 bytes.
|
// first write some data to the tempfile just so it's not 0 bytes.
|
||||||
ioutil.WriteFile(tf.Name(), []byte("{anything will do }"), 0644)
|
ioutil.WriteFile(tf.Name(), []byte("{anything will do }"), 0644)
|
||||||
resource.Test(t, resource.TestCase{
|
resource.Test(t, resource.TestCase{
|
||||||
|
@ -29,7 +29,26 @@ func TestAccAWSS3BucketObject_basic(t *testing.T) {
|
||||||
CheckDestroy: testAccCheckAWSS3BucketObjectDestroy,
|
CheckDestroy: testAccCheckAWSS3BucketObjectDestroy,
|
||||||
Steps: []resource.TestStep{
|
Steps: []resource.TestStep{
|
||||||
resource.TestStep{
|
resource.TestStep{
|
||||||
Config: testAccAWSS3BucketObjectConfig,
|
Config: testAccAWSS3BucketObjectConfigSource,
|
||||||
|
Check: testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccAWSS3BucketObject_content(t *testing.T) {
|
||||||
|
resource.Test(t, resource.TestCase{
|
||||||
|
PreCheck: func() {
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
testAccPreCheck(t)
|
||||||
|
},
|
||||||
|
Providers: testAccProviders,
|
||||||
|
CheckDestroy: testAccCheckAWSS3BucketObjectDestroy,
|
||||||
|
Steps: []resource.TestStep{
|
||||||
|
resource.TestStep{
|
||||||
|
Config: testAccAWSS3BucketObjectConfigContent,
|
||||||
Check: testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object"),
|
Check: testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -86,14 +105,25 @@ func testAccCheckAWSS3BucketObjectExists(n string) resource.TestCheckFunc {
|
||||||
}
|
}
|
||||||
|
|
||||||
var randomBucket = randInt
|
var randomBucket = randInt
|
||||||
var testAccAWSS3BucketObjectConfig = fmt.Sprintf(`
|
var testAccAWSS3BucketObjectConfigSource = fmt.Sprintf(`
|
||||||
resource "aws_s3_bucket" "object_bucket" {
|
resource "aws_s3_bucket" "object_bucket" {
|
||||||
bucket = "tf-object-test-bucket-%d"
|
bucket = "tf-object-test-bucket-%d"
|
||||||
}
|
}
|
||||||
|
|
||||||
resource "aws_s3_bucket_object" "object" {
|
resource "aws_s3_bucket_object" "object" {
|
||||||
bucket = "${aws_s3_bucket.object_bucket.bucket}"
|
bucket = "${aws_s3_bucket.object_bucket.bucket}"
|
||||||
key = "test-key"
|
key = "test-key"
|
||||||
source = "%s"
|
source = "%s"
|
||||||
}
|
}
|
||||||
`, randomBucket, tf.Name())
|
`, randomBucket, tf.Name())
|
||||||
|
|
||||||
|
var testAccAWSS3BucketObjectConfigContent = fmt.Sprintf(`
|
||||||
|
resource "aws_s3_bucket" "object_bucket" {
|
||||||
|
bucket = "tf-object-test-bucket-%d"
|
||||||
|
}
|
||||||
|
resource "aws_s3_bucket_object" "object" {
|
||||||
|
bucket = "${aws_s3_bucket.object_bucket.bucket}"
|
||||||
|
key = "test-key"
|
||||||
|
content = "some_bucket_content"
|
||||||
|
}
|
||||||
|
`, randomBucket)
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,11 @@ The following arguments are supported:
|
||||||
|
|
||||||
* `bucket` - (Required) The name of the bucket to put the file in.
|
* `bucket` - (Required) The name of the bucket to put the file in.
|
||||||
* `key` - (Required) The name of the object once it is in the bucket.
|
* `key` - (Required) The name of the object once it is in the bucket.
|
||||||
* `source` - (Required) The path to the source file being uploaded to the bucket.
|
* `source` - (Required unless `content` given) The path to the source file being uploaded to the bucket.
|
||||||
|
* `content` - (Required unless `source` given) The literal content being uploaded to the bucket.
|
||||||
|
|
||||||
|
Either `source` or `content` must be provided to specify the bucket content.
|
||||||
|
These two arguments are mutually-exclusive.
|
||||||
|
|
||||||
## Attributes Reference
|
## Attributes Reference
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue