diff --git a/builtin/providers/aws/provider.go b/builtin/providers/aws/provider.go index db90549d2..07e16282f 100644 --- a/builtin/providers/aws/provider.go +++ b/builtin/providers/aws/provider.go @@ -120,6 +120,7 @@ func Provider() terraform.ResourceProvider { "aws_route_table_association": resourceAwsRouteTableAssociation(), "aws_route_table": resourceAwsRouteTable(), "aws_s3_bucket": resourceAwsS3Bucket(), + "aws_s3_bucket_object": resourceAwsS3BucketObject(), "aws_security_group": resourceAwsSecurityGroup(), "aws_security_group_rule": resourceAwsSecurityGroupRule(), "aws_sqs_queue": resourceAwsSqsQueue(), diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object.go b/builtin/providers/aws/resource_aws_s3_bucket_object.go new file mode 100644 index 000000000..74eb558c9 --- /dev/null +++ b/builtin/providers/aws/resource_aws_s3_bucket_object.go @@ -0,0 +1,112 @@ +package aws + +import ( + "fmt" + "log" + "os" + + "github.com/hashicorp/terraform/helper/schema" + + "github.com/awslabs/aws-sdk-go/aws" + "github.com/awslabs/aws-sdk-go/aws/awsutil" + "github.com/awslabs/aws-sdk-go/service/s3" +) + +func resourceAwsS3BucketObject() *schema.Resource { + return &schema.Resource{ + Create: resourceAwsS3BucketObjectPut, + Read: resourceAwsS3BucketObjectRead, + Update: resourceAwsS3BucketObjectPut, + Delete: resourceAwsS3BucketObjectDelete, + + Schema: map[string]*schema.Schema{ + "bucket": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "key": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + + "source": &schema.Schema{ + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + }, + } +} + +func resourceAwsS3BucketObjectPut(d *schema.ResourceData, meta interface{}) error { + s3conn := meta.(*AWSClient).s3conn + + bucket := d.Get("bucket").(string) + key := d.Get("key").(string) + source := d.Get("source").(string) + + file, err := os.Open(source) + + if err != nil { + d.SetId("") + return fmt.Errorf("Error opening S3 bucket object source(%s): %s", source, err) + } + + resp, err := s3conn.PutObject( + &s3.PutObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + Body: file, + }) + + if err != nil { + d.SetId("") + return fmt.Errorf("Error putting object in S3 bucket (%s): %s", bucket, err) + } + + d.SetId(*resp.ETag) + return nil +} + +func resourceAwsS3BucketObjectRead(d *schema.ResourceData, meta interface{}) error { + s3conn := meta.(*AWSClient).s3conn + + bucket := d.Get("bucket").(string) + key := d.Get("key").(string) + + resp, err := s3conn.HeadObject( + &s3.HeadObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + IfMatch: aws.String(d.Id()), + }) + + if err != nil { + // if there is an error reading the object we assume it's not there. + d.SetId("") + log.Printf("Error Reading Object (%s): %s", key, err) + } + + log.Printf(awsutil.StringValue(resp)) + return nil +} + +func resourceAwsS3BucketObjectDelete(d *schema.ResourceData, meta interface{}) error { + s3conn := meta.(*AWSClient).s3conn + + bucket := d.Get("bucket").(string) + key := d.Get("key").(string) + + _, err := s3conn.DeleteObject( + &s3.DeleteObjectInput{ + Bucket: aws.String(bucket), + Key: aws.String(key), + }) + if err != nil { + return fmt.Errorf("Error deleting S3 bucket object: %s", err) + } + return nil +} diff --git a/builtin/providers/aws/resource_aws_s3_bucket_object_test.go b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go new file mode 100644 index 000000000..a06232df7 --- /dev/null +++ b/builtin/providers/aws/resource_aws_s3_bucket_object_test.go @@ -0,0 +1,98 @@ +package aws + +import ( + "fmt" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" + "io/ioutil" + "os" + "testing" + + "github.com/awslabs/aws-sdk-go/aws" + "github.com/awslabs/aws-sdk-go/service/s3" +) + +var tf, err = ioutil.TempFile("", "tf") + +func TestAccAWSS3BucketObject_basic(t *testing.T) { + // first write some data to the tempfile just so it's not 0 bytes. + ioutil.WriteFile(tf.Name(), []byte("{anything will do }"), 0644) + resource.Test(t, resource.TestCase{ + PreCheck: func() { + if err != nil { + panic(err) + } + testAccPreCheck(t) + }, + Providers: testAccProviders, + CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, + Steps: []resource.TestStep{ + resource.TestStep{ + Config: testAccAWSS3BucketObjectConfig, + Check: testAccCheckAWSS3BucketObjectExists("aws_s3_bucket_object.object"), + }, + }, + }) +} + +func testAccCheckAWSS3BucketObjectDestroy(s *terraform.State) error { + s3conn := testAccProvider.Meta().(*AWSClient).s3conn + + for _, rs := range s.RootModule().Resources { + if rs.Type != "aws_s3_bucket_object" { + continue + } + + _, err := s3conn.HeadObject( + &s3.HeadObjectInput{ + Bucket: aws.String(rs.Primary.Attributes["bucket"]), + Key: aws.String(rs.Primary.Attributes["key"]), + IfMatch: aws.String(rs.Primary.ID), + }) + if err == nil { + return fmt.Errorf("AWS S3 Object still exists: %s", rs.Primary.ID) + } + } + return nil +} + +func testAccCheckAWSS3BucketObjectExists(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + + defer os.Remove(tf.Name()) + + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not Found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No S3 Bucket Object ID is set") + } + + s3conn := testAccProvider.Meta().(*AWSClient).s3conn + _, err := s3conn.GetObject( + &s3.GetObjectInput{ + Bucket: aws.String(rs.Primary.Attributes["bucket"]), + Key: aws.String(rs.Primary.Attributes["key"]), + IfMatch: aws.String(rs.Primary.ID), + }) + if err != nil { + return fmt.Errorf("S3Bucket Object error: %s", err) + } + return nil + } +} + +var randomBucket = randInt +var testAccAWSS3BucketObjectConfig = fmt.Sprintf(` +resource "aws_s3_bucket" "object_bucket" { + bucket = "tf-object-test-bucket-%d" +} +resource "aws_s3_bucket_object" "object" { + depends_on = "aws_s3_bucket.object_bucket" + bucket = "tf-object-test-bucket-%d" + key = "test-key" + source = "%s" +} +`, randomBucket, randomBucket, tf.Name())