Add ability to import Google Compute persistent disks (#14573)

* Add ability to import Google Compute persistent disks

* Fix additional URL names
This commit is contained in:
Sam Bashton 2017-05-30 14:16:12 +01:00 committed by Radek Simko
parent 893b2f74a5
commit 56fbe027c3
2 changed files with 76 additions and 4 deletions

View File

@ -0,0 +1,31 @@
package google
import (
"fmt"
"testing"
"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
)
func TestAccComputeDisk_importBasic(t *testing.T) {
resourceName := "google_compute_disk.foobar"
diskName := fmt.Sprintf("disk-test-%s", acctest.RandString(10))
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckComputeDiskDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccComputeDisk_basic(diskName),
},
resource.TestStep{
ResourceName: resourceName,
ImportState: true,
ImportStateVerify: true,
},
},
})
}

View File

@ -4,6 +4,7 @@ import (
"fmt" "fmt"
"log" "log"
"regexp" "regexp"
"strings"
"github.com/hashicorp/terraform/helper/schema" "github.com/hashicorp/terraform/helper/schema"
"google.golang.org/api/compute/v1" "google.golang.org/api/compute/v1"
@ -23,6 +24,9 @@ func resourceComputeDisk() *schema.Resource {
Create: resourceComputeDiskCreate, Create: resourceComputeDiskCreate,
Read: resourceComputeDiskRead, Read: resourceComputeDiskRead,
Delete: resourceComputeDiskDelete, Delete: resourceComputeDiskDelete,
Importer: &schema.ResourceImporter{
State: schema.ImportStatePassthrough,
},
Schema: map[string]*schema.Schema{ Schema: map[string]*schema.Schema{
"name": &schema.Schema{ "name": &schema.Schema{
@ -189,17 +193,54 @@ func resourceComputeDiskRead(d *schema.ResourceData, meta interface{}) error {
return err return err
} }
disk, err := config.clientCompute.Disks.Get( region, err := getRegion(d, config)
project, d.Get("zone").(string), d.Id()).Do() if err != nil {
return err
}
getDisk := func(zone string) (interface{}, error) {
return config.clientCompute.Disks.Get(project, zone, d.Id()).Do()
}
var disk *compute.Disk
if zone, ok := d.GetOk("zone"); ok {
disk, err = config.clientCompute.Disks.Get(
project, zone.(string), d.Id()).Do()
if err != nil { if err != nil {
return handleNotFoundError(err, d, fmt.Sprintf("Disk %q", d.Get("name").(string))) return handleNotFoundError(err, d, fmt.Sprintf("Disk %q", d.Get("name").(string)))
} }
} else {
// If the resource was imported, the only info we have is the ID. Try to find the resource
// by searching in the region of the project.
var resource interface{}
resource, err = getZonalResourceFromRegion(getDisk, region, config.clientCompute, project)
if err != nil {
return err
}
disk = resource.(*compute.Disk)
}
zoneUrlParts := strings.Split(disk.Zone, "/")
typeUrlParts := strings.Split(disk.Type, "/")
d.Set("name", disk.Name)
d.Set("self_link", disk.SelfLink) d.Set("self_link", disk.SelfLink)
d.Set("type", typeUrlParts[len(typeUrlParts)-1])
d.Set("zone", zoneUrlParts[len(zoneUrlParts)-1])
d.Set("size", disk.SizeGb)
d.Set("users", disk.Users)
if disk.DiskEncryptionKey != nil && disk.DiskEncryptionKey.Sha256 != "" { if disk.DiskEncryptionKey != nil && disk.DiskEncryptionKey.Sha256 != "" {
d.Set("disk_encryption_key_sha256", disk.DiskEncryptionKey.Sha256) d.Set("disk_encryption_key_sha256", disk.DiskEncryptionKey.Sha256)
} }
d.Set("users", disk.Users) if disk.SourceImage != "" {
imageUrlParts := strings.Split(disk.SourceImage, "/")
d.Set("image", imageUrlParts[len(imageUrlParts)-1])
}
if disk.SourceSnapshot != "" {
snapshotUrlParts := strings.Split(disk.SourceSnapshot, "/")
d.Set("snapshot", snapshotUrlParts[len(snapshotUrlParts)-1])
}
return nil return nil
} }