provider/aws: Added s3 bucket region attribute management (#10482)
This commit is contained in:
parent
f304036d6d
commit
1d090eb1ab
|
@ -28,57 +28,57 @@ func resourceAwsS3Bucket() *schema.Resource {
|
|||
},
|
||||
|
||||
Schema: map[string]*schema.Schema{
|
||||
"bucket": &schema.Schema{
|
||||
"bucket": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ForceNew: true,
|
||||
},
|
||||
|
||||
"arn": &schema.Schema{
|
||||
"arn": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"acl": &schema.Schema{
|
||||
"acl": {
|
||||
Type: schema.TypeString,
|
||||
Default: "private",
|
||||
Optional: true,
|
||||
},
|
||||
|
||||
"policy": &schema.Schema{
|
||||
"policy": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ValidateFunc: validateJsonString,
|
||||
DiffSuppressFunc: suppressEquivalentAwsPolicyDiffs,
|
||||
},
|
||||
|
||||
"cors_rule": &schema.Schema{
|
||||
"cors_rule": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"allowed_headers": &schema.Schema{
|
||||
"allowed_headers": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
"allowed_methods": &schema.Schema{
|
||||
"allowed_methods": {
|
||||
Type: schema.TypeList,
|
||||
Required: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
"allowed_origins": &schema.Schema{
|
||||
"allowed_origins": {
|
||||
Type: schema.TypeList,
|
||||
Required: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
"expose_headers": &schema.Schema{
|
||||
"expose_headers": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Schema{Type: schema.TypeString},
|
||||
},
|
||||
"max_age_seconds": &schema.Schema{
|
||||
"max_age_seconds": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
},
|
||||
|
@ -86,22 +86,22 @@ func resourceAwsS3Bucket() *schema.Resource {
|
|||
},
|
||||
},
|
||||
|
||||
"website": &schema.Schema{
|
||||
"website": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"index_document": &schema.Schema{
|
||||
"index_document": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
|
||||
"error_document": &schema.Schema{
|
||||
"error_document": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
|
||||
"redirect_all_requests_to": &schema.Schema{
|
||||
"redirect_all_requests_to": {
|
||||
Type: schema.TypeString,
|
||||
ConflictsWith: []string{
|
||||
"website.0.index_document",
|
||||
|
@ -111,7 +111,7 @@ func resourceAwsS3Bucket() *schema.Resource {
|
|||
Optional: true,
|
||||
},
|
||||
|
||||
"routing_rules": &schema.Schema{
|
||||
"routing_rules": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ValidateFunc: validateJsonString,
|
||||
|
@ -124,34 +124,34 @@ func resourceAwsS3Bucket() *schema.Resource {
|
|||
},
|
||||
},
|
||||
|
||||
"hosted_zone_id": &schema.Schema{
|
||||
"hosted_zone_id": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"region": &schema.Schema{
|
||||
"region": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
"website_endpoint": &schema.Schema{
|
||||
"website_endpoint": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
"website_domain": &schema.Schema{
|
||||
"website_domain": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
},
|
||||
|
||||
"versioning": &schema.Schema{
|
||||
"versioning": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"enabled": &schema.Schema{
|
||||
"enabled": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
Default: false,
|
||||
|
@ -167,16 +167,16 @@ func resourceAwsS3Bucket() *schema.Resource {
|
|||
},
|
||||
},
|
||||
|
||||
"logging": &schema.Schema{
|
||||
"logging": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"target_bucket": &schema.Schema{
|
||||
"target_bucket": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"target_prefix": &schema.Schema{
|
||||
"target_prefix": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
},
|
||||
|
@ -191,80 +191,80 @@ func resourceAwsS3Bucket() *schema.Resource {
|
|||
},
|
||||
},
|
||||
|
||||
"lifecycle_rule": &schema.Schema{
|
||||
"lifecycle_rule": {
|
||||
Type: schema.TypeList,
|
||||
Optional: true,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"id": &schema.Schema{
|
||||
"id": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ValidateFunc: validateS3BucketLifecycleRuleId,
|
||||
},
|
||||
"prefix": &schema.Schema{
|
||||
"prefix": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
},
|
||||
"enabled": &schema.Schema{
|
||||
"enabled": {
|
||||
Type: schema.TypeBool,
|
||||
Required: true,
|
||||
},
|
||||
"abort_incomplete_multipart_upload_days": &schema.Schema{
|
||||
"abort_incomplete_multipart_upload_days": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
},
|
||||
"expiration": &schema.Schema{
|
||||
"expiration": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Set: expirationHash,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"date": &schema.Schema{
|
||||
"date": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ValidateFunc: validateS3BucketLifecycleTimestamp,
|
||||
},
|
||||
"days": &schema.Schema{
|
||||
"days": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
},
|
||||
"expired_object_delete_marker": &schema.Schema{
|
||||
"expired_object_delete_marker": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"noncurrent_version_expiration": &schema.Schema{
|
||||
"noncurrent_version_expiration": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Set: expirationHash,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"days": &schema.Schema{
|
||||
"days": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"transition": &schema.Schema{
|
||||
"transition": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Set: transitionHash,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"date": &schema.Schema{
|
||||
"date": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
ValidateFunc: validateS3BucketLifecycleTimestamp,
|
||||
},
|
||||
"days": &schema.Schema{
|
||||
"days": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
},
|
||||
"storage_class": &schema.Schema{
|
||||
"storage_class": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ValidateFunc: validateS3BucketLifecycleStorageClass,
|
||||
|
@ -272,17 +272,17 @@ func resourceAwsS3Bucket() *schema.Resource {
|
|||
},
|
||||
},
|
||||
},
|
||||
"noncurrent_version_transition": &schema.Schema{
|
||||
"noncurrent_version_transition": {
|
||||
Type: schema.TypeSet,
|
||||
Optional: true,
|
||||
Set: transitionHash,
|
||||
Elem: &schema.Resource{
|
||||
Schema: map[string]*schema.Schema{
|
||||
"days": &schema.Schema{
|
||||
"days": {
|
||||
Type: schema.TypeInt,
|
||||
Optional: true,
|
||||
},
|
||||
"storage_class": &schema.Schema{
|
||||
"storage_class": {
|
||||
Type: schema.TypeString,
|
||||
Required: true,
|
||||
ValidateFunc: validateS3BucketLifecycleStorageClass,
|
||||
|
@ -294,20 +294,20 @@ func resourceAwsS3Bucket() *schema.Resource {
|
|||
},
|
||||
},
|
||||
|
||||
"force_destroy": &schema.Schema{
|
||||
"force_destroy": {
|
||||
Type: schema.TypeBool,
|
||||
Optional: true,
|
||||
Default: false,
|
||||
},
|
||||
|
||||
"acceleration_status": &schema.Schema{
|
||||
"acceleration_status": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
ValidateFunc: validateS3BucketAccelerationStatus,
|
||||
},
|
||||
|
||||
"request_payer": &schema.Schema{
|
||||
"request_payer": {
|
||||
Type: schema.TypeString,
|
||||
Optional: true,
|
||||
Computed: true,
|
||||
|
@ -321,7 +321,6 @@ func resourceAwsS3Bucket() *schema.Resource {
|
|||
|
||||
func resourceAwsS3BucketCreate(d *schema.ResourceData, meta interface{}) error {
|
||||
s3conn := meta.(*AWSClient).s3conn
|
||||
awsRegion := meta.(*AWSClient).region
|
||||
|
||||
// Get the bucket and acl
|
||||
bucket := d.Get("bucket").(string)
|
||||
|
@ -334,6 +333,14 @@ func resourceAwsS3BucketCreate(d *schema.ResourceData, meta interface{}) error {
|
|||
ACL: aws.String(acl),
|
||||
}
|
||||
|
||||
var awsRegion string
|
||||
if region, ok := d.GetOk("region"); ok {
|
||||
awsRegion = region.(string)
|
||||
} else {
|
||||
awsRegion = meta.(*AWSClient).region
|
||||
}
|
||||
log.Printf("[DEBUG] S3 bucket create: %s, using region: %s", bucket, awsRegion)
|
||||
|
||||
// Special case us-east-1 region and do not set the LocationConstraint.
|
||||
// See "Request Elements: http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketPUT.html
|
||||
if awsRegion != "us-east-1" {
|
||||
|
@ -589,7 +596,7 @@ func resourceAwsS3BucketRead(d *schema.ResourceData, meta interface{}) error {
|
|||
}
|
||||
}
|
||||
|
||||
//read the acceleration status
|
||||
// Read the acceleration status
|
||||
accelerate, err := s3conn.GetBucketAccelerateConfiguration(&s3.GetBucketAccelerateConfigurationInput{
|
||||
Bucket: aws.String(d.Id()),
|
||||
})
|
||||
|
@ -600,7 +607,15 @@ func resourceAwsS3BucketRead(d *schema.ResourceData, meta interface{}) error {
|
|||
if awsErr, ok := err.(awserr.Error); ok && awsErr.Code() != "UnsupportedArgument" {
|
||||
return err
|
||||
}
|
||||
log.Printf("[WARN] S3 bucket: %s, the S3 Transfer Acceleration is not supported in the region: %s", d.Id(), meta.(*AWSClient).region)
|
||||
|
||||
var awsRegion string
|
||||
if region, ok := d.GetOk("region"); ok {
|
||||
awsRegion = region.(string)
|
||||
} else {
|
||||
awsRegion = meta.(*AWSClient).region
|
||||
}
|
||||
|
||||
log.Printf("[WARN] S3 bucket: %s, the S3 Transfer Acceleration is not supported in the region: %s", d.Id(), awsRegion)
|
||||
} else {
|
||||
log.Printf("[DEBUG] S3 bucket: %s, read Acceleration: %v", d.Id(), accelerate)
|
||||
d.Set("acceleration_status", accelerate.Status)
|
||||
|
|
|
@ -31,7 +31,7 @@ func TestAccAWSS3Bucket_basic(t *testing.T) {
|
|||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfig(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -49,6 +49,25 @@ func TestAccAWSS3Bucket_basic(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestAccAWSS3Bucket_region(t *testing.T) {
|
||||
rInt := acctest.RandInt()
|
||||
|
||||
resource.Test(t, resource.TestCase{
|
||||
PreCheck: func() { testAccPreCheck(t) },
|
||||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigWithRegion(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
resource.TestCheckResourceAttr("aws_s3_bucket.bucket", "region", "eu-west-1"),
|
||||
),
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func TestAccAWSS3Bucket_acceleration(t *testing.T) {
|
||||
rInt := acctest.RandInt()
|
||||
|
||||
|
@ -57,7 +76,7 @@ func TestAccAWSS3Bucket_acceleration(t *testing.T) {
|
|||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigWithAcceleration(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -65,7 +84,7 @@ func TestAccAWSS3Bucket_acceleration(t *testing.T) {
|
|||
"aws_s3_bucket.bucket", "acceleration_status", "Enabled"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigWithoutAcceleration(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -85,7 +104,7 @@ func TestAccAWSS3Bucket_RequestPayer(t *testing.T) {
|
|||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigRequestPayerBucketOwner(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -98,7 +117,7 @@ func TestAccAWSS3Bucket_RequestPayer(t *testing.T) {
|
|||
"BucketOwner"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigRequestPayerRequester(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -151,7 +170,7 @@ func TestAccAWSS3Bucket_Policy(t *testing.T) {
|
|||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigWithPolicy(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -159,7 +178,7 @@ func TestAccAWSS3Bucket_Policy(t *testing.T) {
|
|||
"aws_s3_bucket.bucket", testAccAWSS3BucketPolicy(rInt)),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfig(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -167,7 +186,7 @@ func TestAccAWSS3Bucket_Policy(t *testing.T) {
|
|||
"aws_s3_bucket.bucket", ""),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigWithEmptyPolicy(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -189,7 +208,7 @@ func TestAccAWSS3Bucket_UpdateAcl(t *testing.T) {
|
|||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: preConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -197,7 +216,7 @@ func TestAccAWSS3Bucket_UpdateAcl(t *testing.T) {
|
|||
"aws_s3_bucket.bucket", "acl", "public-read"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: postConfig,
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -216,7 +235,7 @@ func TestAccAWSS3Bucket_Website_Simple(t *testing.T) {
|
|||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketWebsiteConfig(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -226,7 +245,7 @@ func TestAccAWSS3Bucket_Website_Simple(t *testing.T) {
|
|||
"aws_s3_bucket.bucket", "website_endpoint", testAccWebsiteEndpoint(rInt)),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketWebsiteConfigWithError(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -236,7 +255,7 @@ func TestAccAWSS3Bucket_Website_Simple(t *testing.T) {
|
|||
"aws_s3_bucket.bucket", "website_endpoint", testAccWebsiteEndpoint(rInt)),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfig(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -257,7 +276,7 @@ func TestAccAWSS3Bucket_WebsiteRedirect(t *testing.T) {
|
|||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketWebsiteConfigWithRedirect(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -267,7 +286,7 @@ func TestAccAWSS3Bucket_WebsiteRedirect(t *testing.T) {
|
|||
"aws_s3_bucket.bucket", "website_endpoint", testAccWebsiteEndpoint(rInt)),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketWebsiteConfigWithHttpsRedirect(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -277,7 +296,7 @@ func TestAccAWSS3Bucket_WebsiteRedirect(t *testing.T) {
|
|||
"aws_s3_bucket.bucket", "website_endpoint", testAccWebsiteEndpoint(rInt)),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfig(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -298,7 +317,7 @@ func TestAccAWSS3Bucket_WebsiteRoutingRules(t *testing.T) {
|
|||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketWebsiteConfigWithRoutingRules(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -307,7 +326,7 @@ func TestAccAWSS3Bucket_WebsiteRoutingRules(t *testing.T) {
|
|||
testAccCheckAWSS3BucketWebsiteRoutingRules(
|
||||
"aws_s3_bucket.bucket",
|
||||
[]*s3.RoutingRule{
|
||||
&s3.RoutingRule{
|
||||
{
|
||||
Condition: &s3.Condition{
|
||||
KeyPrefixEquals: aws.String("docs/"),
|
||||
},
|
||||
|
@ -321,7 +340,7 @@ func TestAccAWSS3Bucket_WebsiteRoutingRules(t *testing.T) {
|
|||
"aws_s3_bucket.bucket", "website_endpoint", testAccWebsiteEndpoint(rInt)),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfig(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -346,7 +365,7 @@ func TestAccAWSS3Bucket_shouldFailNotFound(t *testing.T) {
|
|||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketDestroyedConfig(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -365,7 +384,7 @@ func TestAccAWSS3Bucket_Versioning(t *testing.T) {
|
|||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfig(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -373,7 +392,7 @@ func TestAccAWSS3Bucket_Versioning(t *testing.T) {
|
|||
"aws_s3_bucket.bucket", ""),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigWithVersioning(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -381,7 +400,7 @@ func TestAccAWSS3Bucket_Versioning(t *testing.T) {
|
|||
"aws_s3_bucket.bucket", s3.BucketVersioningStatusEnabled),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigWithDisableVersioning(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -408,7 +427,7 @@ func TestAccAWSS3Bucket_Cors(t *testing.T) {
|
|||
Bucket: aws.String(rs.Primary.ID),
|
||||
CORSConfiguration: &s3.CORSConfiguration{
|
||||
CORSRules: []*s3.CORSRule{
|
||||
&s3.CORSRule{
|
||||
{
|
||||
AllowedHeaders: []*string{aws.String("*")},
|
||||
AllowedMethods: []*string{aws.String("GET")},
|
||||
AllowedOrigins: []*string{aws.String("https://www.example.com")},
|
||||
|
@ -430,14 +449,14 @@ func TestAccAWSS3Bucket_Cors(t *testing.T) {
|
|||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigWithCORS(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
testAccCheckAWSS3BucketCors(
|
||||
"aws_s3_bucket.bucket",
|
||||
[]*s3.CORSRule{
|
||||
&s3.CORSRule{
|
||||
{
|
||||
AllowedHeaders: []*string{aws.String("*")},
|
||||
AllowedMethods: []*string{aws.String("PUT"), aws.String("POST")},
|
||||
AllowedOrigins: []*string{aws.String("https://www.example.com")},
|
||||
|
@ -450,14 +469,14 @@ func TestAccAWSS3Bucket_Cors(t *testing.T) {
|
|||
),
|
||||
ExpectNonEmptyPlan: true,
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigWithCORS(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
testAccCheckAWSS3BucketCors(
|
||||
"aws_s3_bucket.bucket",
|
||||
[]*s3.CORSRule{
|
||||
&s3.CORSRule{
|
||||
{
|
||||
AllowedHeaders: []*string{aws.String("*")},
|
||||
AllowedMethods: []*string{aws.String("PUT"), aws.String("POST")},
|
||||
AllowedOrigins: []*string{aws.String("https://www.example.com")},
|
||||
|
@ -479,7 +498,7 @@ func TestAccAWSS3Bucket_Logging(t *testing.T) {
|
|||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigWithLogging(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -498,7 +517,7 @@ func TestAccAWSS3Bucket_Lifecycle(t *testing.T) {
|
|||
Providers: testAccProviders,
|
||||
CheckDestroy: testAccCheckAWSS3BucketDestroy,
|
||||
Steps: []resource.TestStep{
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigWithLifecycle(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -536,7 +555,7 @@ func TestAccAWSS3Bucket_Lifecycle(t *testing.T) {
|
|||
"aws_s3_bucket.bucket", "lifecycle_rule.1.expiration.2855832418.expired_object_delete_marker", "false"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfigWithVersioningLifecycle(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -566,7 +585,7 @@ func TestAccAWSS3Bucket_Lifecycle(t *testing.T) {
|
|||
"aws_s3_bucket.bucket", "lifecycle_rule.1.noncurrent_version_expiration.80908210.days", "365"),
|
||||
),
|
||||
},
|
||||
resource.TestStep{
|
||||
{
|
||||
Config: testAccAWSS3BucketConfig(rInt),
|
||||
Check: resource.ComposeTestCheckFunc(
|
||||
testAccCheckAWSS3BucketExists("aws_s3_bucket.bucket"),
|
||||
|
@ -897,6 +916,21 @@ resource "aws_s3_bucket" "bucket" {
|
|||
`, randInt)
|
||||
}
|
||||
|
||||
func testAccAWSS3BucketConfigWithRegion(randInt int) string {
|
||||
return fmt.Sprintf(`
|
||||
provider "aws" {
|
||||
alias = "west"
|
||||
region = "eu-west-1"
|
||||
}
|
||||
|
||||
resource "aws_s3_bucket" "bucket" {
|
||||
provider = "aws.west"
|
||||
bucket = "tf-test-bucket-%d"
|
||||
region = "eu-west-1"
|
||||
}
|
||||
`, randInt)
|
||||
}
|
||||
|
||||
func testAccAWSS3BucketWebsiteConfig(randInt int) string {
|
||||
return fmt.Sprintf(`
|
||||
resource "aws_s3_bucket" "bucket" {
|
||||
|
@ -976,8 +1010,15 @@ EOF
|
|||
|
||||
func testAccAWSS3BucketConfigWithAcceleration(randInt int) string {
|
||||
return fmt.Sprintf(`
|
||||
provider "aws" {
|
||||
alias = "west"
|
||||
region = "eu-west-1"
|
||||
}
|
||||
|
||||
resource "aws_s3_bucket" "bucket" {
|
||||
provider = "aws.west"
|
||||
bucket = "tf-test-bucket-%d"
|
||||
region = "eu-west-1"
|
||||
acl = "public-read"
|
||||
acceleration_status = "Enabled"
|
||||
}
|
||||
|
@ -986,8 +1027,15 @@ resource "aws_s3_bucket" "bucket" {
|
|||
|
||||
func testAccAWSS3BucketConfigWithoutAcceleration(randInt int) string {
|
||||
return fmt.Sprintf(`
|
||||
provider "aws" {
|
||||
alias = "west"
|
||||
region = "eu-west-1"
|
||||
}
|
||||
|
||||
resource "aws_s3_bucket" "bucket" {
|
||||
provider = "aws.west"
|
||||
bucket = "tf-test-bucket-%d"
|
||||
region = "eu-west-1"
|
||||
acl = "public-read"
|
||||
acceleration_status = "Suspended"
|
||||
}
|
||||
|
|
|
@ -174,6 +174,7 @@ The following arguments are supported:
|
|||
* `logging` - (Optional) A settings of [bucket logging](https://docs.aws.amazon.com/AmazonS3/latest/UG/ManagingBucketLogging.html) (documented below).
|
||||
* `lifecycle_rule` - (Optional) A configuration of [object lifecycle management](http://docs.aws.amazon.com/AmazonS3/latest/dev/object-lifecycle-mgmt.html) (documented below).
|
||||
* `acceleration_status` - (Optional) Sets the accelerate configuration of an existing bucket. Can be `Enabled` or `Suspended`.
|
||||
* `region` - (Optional) If specified, the AWS region this bucket should reside in. Otherwise, the region used by the callee.
|
||||
* `request_payer` - (Optional) Specifies who should bear the cost of Amazon S3 data transfer.
|
||||
Can be either `BucketOwner` or `Requester`. By default, the owner of the S3 bucket would incur
|
||||
the costs of any data transfer. See [Requester Pays Buckets](http://docs.aws.amazon.com/AmazonS3/latest/dev/RequesterPaysBuckets.html)
|
||||
|
|
Loading…
Reference in New Issue