2015-05-21 19:28:27 +02:00
|
|
|
package google
|
|
|
|
|
|
|
|
import (
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"log"
|
2017-03-03 01:42:28 +01:00
|
|
|
"time"
|
2015-05-21 19:28:27 +02:00
|
|
|
|
2017-03-03 01:42:28 +01:00
|
|
|
"github.com/hashicorp/terraform/helper/resource"
|
2015-05-21 19:28:27 +02:00
|
|
|
"github.com/hashicorp/terraform/helper/schema"
|
|
|
|
|
2015-11-13 21:36:03 +01:00
|
|
|
"google.golang.org/api/googleapi"
|
2015-05-21 19:28:27 +02:00
|
|
|
"google.golang.org/api/storage/v1"
|
|
|
|
)
|
|
|
|
|
|
|
|
func resourceStorageBucket() *schema.Resource {
|
|
|
|
return &schema.Resource{
|
|
|
|
Create: resourceStorageBucketCreate,
|
|
|
|
Read: resourceStorageBucketRead,
|
|
|
|
Update: resourceStorageBucketUpdate,
|
|
|
|
Delete: resourceStorageBucketDelete,
|
2017-05-15 18:38:32 +02:00
|
|
|
Importer: &schema.ResourceImporter{
|
|
|
|
State: resourceStorageBucketStateImporter,
|
|
|
|
},
|
2015-05-21 19:28:27 +02:00
|
|
|
|
|
|
|
Schema: map[string]*schema.Schema{
|
|
|
|
"name": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Required: true,
|
|
|
|
ForceNew: true,
|
|
|
|
},
|
2016-04-10 23:34:15 +02:00
|
|
|
|
|
|
|
"force_destroy": &schema.Schema{
|
|
|
|
Type: schema.TypeBool,
|
|
|
|
Optional: true,
|
|
|
|
Default: false,
|
|
|
|
},
|
|
|
|
|
|
|
|
"location": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Default: "US",
|
|
|
|
Optional: true,
|
|
|
|
ForceNew: true,
|
|
|
|
},
|
|
|
|
|
2015-05-21 19:28:27 +02:00
|
|
|
"predefined_acl": &schema.Schema{
|
2015-09-16 20:46:46 +02:00
|
|
|
Type: schema.TypeString,
|
|
|
|
Deprecated: "Please use resource \"storage_bucket_acl.predefined_acl\" instead.",
|
|
|
|
Optional: true,
|
|
|
|
ForceNew: true,
|
2015-05-21 19:28:27 +02:00
|
|
|
},
|
2016-04-10 23:34:15 +02:00
|
|
|
|
|
|
|
"project": &schema.Schema{
|
2015-05-21 19:28:27 +02:00
|
|
|
Type: schema.TypeString,
|
|
|
|
Optional: true,
|
|
|
|
ForceNew: true,
|
|
|
|
},
|
2016-04-10 23:34:15 +02:00
|
|
|
|
|
|
|
"self_link": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Computed: true,
|
2015-05-21 19:28:27 +02:00
|
|
|
},
|
2016-04-10 23:34:15 +02:00
|
|
|
|
2017-05-11 14:30:06 +02:00
|
|
|
"url": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Computed: true,
|
|
|
|
},
|
|
|
|
|
2016-09-21 21:46:35 +02:00
|
|
|
"storage_class": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Optional: true,
|
|
|
|
Default: "STANDARD",
|
|
|
|
ForceNew: true,
|
|
|
|
},
|
|
|
|
|
2015-09-15 16:54:16 +02:00
|
|
|
"website": &schema.Schema{
|
|
|
|
Type: schema.TypeList,
|
|
|
|
Optional: true,
|
|
|
|
Elem: &schema.Resource{
|
|
|
|
Schema: map[string]*schema.Schema{
|
|
|
|
"main_page_suffix": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Optional: true,
|
|
|
|
},
|
|
|
|
"not_found_page": &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
Optional: true,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2017-05-31 21:44:25 +02:00
|
|
|
|
|
|
|
"cors": &schema.Schema{
|
|
|
|
Type: schema.TypeList,
|
|
|
|
Optional: true,
|
|
|
|
Elem: &schema.Resource{
|
|
|
|
Schema: map[string]*schema.Schema{
|
|
|
|
"origin": &schema.Schema{
|
|
|
|
Type: schema.TypeList,
|
|
|
|
Optional: true,
|
|
|
|
Elem: &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
"method": &schema.Schema{
|
|
|
|
Type: schema.TypeList,
|
|
|
|
Optional: true,
|
|
|
|
Elem: &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
"response_header": &schema.Schema{
|
|
|
|
Type: schema.TypeList,
|
|
|
|
Optional: true,
|
|
|
|
Elem: &schema.Schema{
|
|
|
|
Type: schema.TypeString,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
"max_age_seconds": &schema.Schema{
|
|
|
|
Type: schema.TypeInt,
|
|
|
|
Optional: true,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2015-05-21 19:28:27 +02:00
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func resourceStorageBucketCreate(d *schema.ResourceData, meta interface{}) error {
|
|
|
|
config := meta.(*Config)
|
|
|
|
|
2016-04-10 18:59:57 +02:00
|
|
|
project, err := getProject(d, config)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2015-05-21 19:28:27 +02:00
|
|
|
// Get the bucket and acl
|
|
|
|
bucket := d.Get("name").(string)
|
|
|
|
location := d.Get("location").(string)
|
|
|
|
|
|
|
|
// Create a bucket, setting the acl, location and name.
|
|
|
|
sb := &storage.Bucket{Name: bucket, Location: location}
|
2015-09-15 16:54:16 +02:00
|
|
|
|
2016-09-21 21:46:35 +02:00
|
|
|
if v, ok := d.GetOk("storage_class"); ok {
|
|
|
|
sb.StorageClass = v.(string)
|
|
|
|
}
|
|
|
|
|
2015-09-15 16:54:16 +02:00
|
|
|
if v, ok := d.GetOk("website"); ok {
|
|
|
|
websites := v.([]interface{})
|
|
|
|
|
|
|
|
if len(websites) > 1 {
|
|
|
|
return fmt.Errorf("At most one website block is allowed")
|
|
|
|
}
|
|
|
|
|
|
|
|
sb.Website = &storage.BucketWebsite{}
|
|
|
|
|
|
|
|
website := websites[0].(map[string]interface{})
|
|
|
|
|
|
|
|
if v, ok := website["not_found_page"]; ok {
|
|
|
|
sb.Website.NotFoundPage = v.(string)
|
|
|
|
}
|
|
|
|
|
|
|
|
if v, ok := website["main_page_suffix"]; ok {
|
|
|
|
sb.Website.MainPageSuffix = v.(string)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-05-31 21:44:25 +02:00
|
|
|
if v, ok := d.GetOk("cors"); ok {
|
|
|
|
sb.Cors = expandCors(v.([]interface{}))
|
|
|
|
}
|
|
|
|
|
2017-03-03 01:42:28 +01:00
|
|
|
var res *storage.Bucket
|
2015-09-16 20:46:46 +02:00
|
|
|
|
2017-03-03 01:42:28 +01:00
|
|
|
err = resource.Retry(1*time.Minute, func() *resource.RetryError {
|
|
|
|
call := config.clientStorage.Buckets.Insert(project, sb)
|
|
|
|
if v, ok := d.GetOk("predefined_acl"); ok {
|
|
|
|
call = call.PredefinedAcl(v.(string))
|
|
|
|
}
|
|
|
|
|
|
|
|
res, err = call.Do()
|
|
|
|
if err == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 429 {
|
|
|
|
return resource.RetryableError(gerr)
|
|
|
|
}
|
|
|
|
return resource.NonRetryableError(err)
|
|
|
|
})
|
2015-05-21 19:28:27 +02:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
fmt.Printf("Error creating bucket %s: %v", bucket, err)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Printf("[DEBUG] Created bucket %v at location %v\n\n", res.Name, res.SelfLink)
|
|
|
|
|
|
|
|
d.SetId(res.Id)
|
2017-05-15 18:38:32 +02:00
|
|
|
return resourceStorageBucketRead(d, meta)
|
2015-05-21 19:28:27 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func resourceStorageBucketUpdate(d *schema.ResourceData, meta interface{}) error {
|
2015-09-15 16:54:16 +02:00
|
|
|
config := meta.(*Config)
|
|
|
|
|
|
|
|
sb := &storage.Bucket{}
|
|
|
|
|
|
|
|
if d.HasChange("website") {
|
|
|
|
if v, ok := d.GetOk("website"); ok {
|
|
|
|
websites := v.([]interface{})
|
|
|
|
|
|
|
|
if len(websites) > 1 {
|
|
|
|
return fmt.Errorf("At most one website block is allowed")
|
|
|
|
}
|
|
|
|
|
2015-10-07 22:35:06 +02:00
|
|
|
// Setting fields to "" to be explicit that the PATCH call will
|
|
|
|
// delete this field.
|
2015-09-15 16:54:16 +02:00
|
|
|
if len(websites) == 0 {
|
|
|
|
sb.Website.NotFoundPage = ""
|
|
|
|
sb.Website.MainPageSuffix = ""
|
|
|
|
} else {
|
|
|
|
website := websites[0].(map[string]interface{})
|
|
|
|
sb.Website = &storage.BucketWebsite{}
|
|
|
|
if v, ok := website["not_found_page"]; ok {
|
|
|
|
sb.Website.NotFoundPage = v.(string)
|
|
|
|
} else {
|
|
|
|
sb.Website.NotFoundPage = ""
|
|
|
|
}
|
|
|
|
|
|
|
|
if v, ok := website["main_page_suffix"]; ok {
|
|
|
|
sb.Website.MainPageSuffix = v.(string)
|
|
|
|
} else {
|
|
|
|
sb.Website.MainPageSuffix = ""
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-05-31 21:44:25 +02:00
|
|
|
if v, ok := d.GetOk("cors"); ok {
|
|
|
|
sb.Cors = expandCors(v.([]interface{}))
|
|
|
|
}
|
|
|
|
|
2015-09-15 16:54:16 +02:00
|
|
|
res, err := config.clientStorage.Buckets.Patch(d.Get("name").(string), sb).Do()
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Printf("[DEBUG] Patched bucket %v at location %v\n\n", res.Name, res.SelfLink)
|
|
|
|
|
|
|
|
// Assign the bucket ID as the resource ID
|
|
|
|
d.Set("self_link", res.SelfLink)
|
|
|
|
d.SetId(res.Id)
|
|
|
|
|
2015-05-21 19:28:27 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func resourceStorageBucketRead(d *schema.ResourceData, meta interface{}) error {
|
|
|
|
config := meta.(*Config)
|
|
|
|
|
|
|
|
// Get the bucket and acl
|
|
|
|
bucket := d.Get("name").(string)
|
|
|
|
res, err := config.clientStorage.Buckets.Get(bucket).Do()
|
|
|
|
|
|
|
|
if err != nil {
|
2017-05-10 01:00:47 +02:00
|
|
|
return handleNotFoundError(err, d, fmt.Sprintf("Storage Bucket %q", d.Get("name").(string)))
|
2015-05-21 19:28:27 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
log.Printf("[DEBUG] Read bucket %v at location %v\n\n", res.Name, res.SelfLink)
|
|
|
|
|
|
|
|
// Update the bucket ID according to the resource ID
|
2015-09-15 16:54:16 +02:00
|
|
|
d.Set("self_link", res.SelfLink)
|
2017-05-15 18:38:32 +02:00
|
|
|
d.Set("url", fmt.Sprintf("gs://%s", bucket))
|
|
|
|
d.Set("storage_class", res.StorageClass)
|
|
|
|
d.Set("location", res.Location)
|
2017-05-31 21:44:25 +02:00
|
|
|
d.Set("cors", flattenCors(res.Cors))
|
2015-05-21 19:28:27 +02:00
|
|
|
d.SetId(res.Id)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func resourceStorageBucketDelete(d *schema.ResourceData, meta interface{}) error {
|
|
|
|
config := meta.(*Config)
|
|
|
|
|
|
|
|
// Get the bucket
|
|
|
|
bucket := d.Get("name").(string)
|
|
|
|
|
|
|
|
for {
|
|
|
|
res, err := config.clientStorage.Objects.List(bucket).Do()
|
|
|
|
if err != nil {
|
|
|
|
fmt.Printf("Error Objects.List failed: %v", err)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(res.Items) != 0 {
|
|
|
|
if d.Get("force_destroy").(bool) {
|
|
|
|
// purge the bucket...
|
|
|
|
log.Printf("[DEBUG] GCS Bucket attempting to forceDestroy\n\n")
|
|
|
|
|
|
|
|
for _, object := range res.Items {
|
|
|
|
log.Printf("[DEBUG] Found %s", object.Name)
|
|
|
|
if err := config.clientStorage.Objects.Delete(bucket, object.Name).Do(); err != nil {
|
|
|
|
log.Fatalf("Error trying to delete object: %s %s\n\n", object.Name, err)
|
|
|
|
} else {
|
|
|
|
log.Printf("Object deleted: %s \n\n", object.Name)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
} else {
|
|
|
|
delete_err := errors.New("Error trying to delete a bucket containing objects without `force_destroy` set to true")
|
|
|
|
log.Printf("Error! %s : %s\n\n", bucket, delete_err)
|
|
|
|
return delete_err
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
break // 0 items, bucket empty
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// remove empty bucket
|
2017-03-03 01:42:28 +01:00
|
|
|
err := resource.Retry(1*time.Minute, func() *resource.RetryError {
|
|
|
|
err := config.clientStorage.Buckets.Delete(bucket).Do()
|
|
|
|
if err == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 429 {
|
|
|
|
return resource.RetryableError(gerr)
|
|
|
|
}
|
|
|
|
return resource.NonRetryableError(err)
|
|
|
|
})
|
2015-05-21 19:28:27 +02:00
|
|
|
if err != nil {
|
|
|
|
fmt.Printf("Error deleting bucket %s: %v\n\n", bucket, err)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
log.Printf("[DEBUG] Deleted bucket %v\n\n", bucket)
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
2017-05-15 18:38:32 +02:00
|
|
|
|
|
|
|
func resourceStorageBucketStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) {
|
|
|
|
d.Set("name", d.Id())
|
|
|
|
return []*schema.ResourceData{d}, nil
|
|
|
|
}
|
2017-05-31 21:44:25 +02:00
|
|
|
|
|
|
|
func expandCors(configured []interface{}) []*storage.BucketCors {
|
|
|
|
corsRules := make([]*storage.BucketCors, 0, len(configured))
|
|
|
|
for _, raw := range configured {
|
|
|
|
data := raw.(map[string]interface{})
|
|
|
|
corsRule := storage.BucketCors{
|
|
|
|
Origin: convertSchemaArrayToStringArray(data["origin"].([]interface{})),
|
|
|
|
Method: convertSchemaArrayToStringArray(data["method"].([]interface{})),
|
|
|
|
ResponseHeader: convertSchemaArrayToStringArray(data["response_header"].([]interface{})),
|
|
|
|
MaxAgeSeconds: int64(data["max_age_seconds"].(int)),
|
|
|
|
}
|
|
|
|
|
|
|
|
corsRules = append(corsRules, &corsRule)
|
|
|
|
}
|
|
|
|
return corsRules
|
|
|
|
}
|
|
|
|
|
|
|
|
func convertSchemaArrayToStringArray(input []interface{}) []string {
|
|
|
|
output := make([]string, 0, len(input))
|
|
|
|
for _, val := range input {
|
|
|
|
output = append(output, val.(string))
|
|
|
|
}
|
|
|
|
|
|
|
|
return output
|
|
|
|
}
|
|
|
|
|
|
|
|
func flattenCors(corsRules []*storage.BucketCors) []map[string]interface{} {
|
|
|
|
corsRulesSchema := make([]map[string]interface{}, 0, len(corsRules))
|
|
|
|
for _, corsRule := range corsRules {
|
|
|
|
data := map[string]interface{}{
|
|
|
|
"origin": corsRule.Origin,
|
|
|
|
"method": corsRule.Method,
|
|
|
|
"response_header": corsRule.ResponseHeader,
|
|
|
|
"max_age_seconds": corsRule.MaxAgeSeconds,
|
|
|
|
}
|
|
|
|
|
|
|
|
corsRulesSchema = append(corsRulesSchema, data)
|
|
|
|
}
|
|
|
|
return corsRulesSchema
|
|
|
|
}
|