Add support for Amazon Elastic Transcoder

- Add resources for elastic transcoder pipelines and presets
This commit is contained in:
James Bardin 2016-05-26 18:29:42 -04:00
parent a1021f32e1
commit 5c1b625c84
7 changed files with 1863 additions and 37 deletions

View File

@ -36,6 +36,7 @@ import (
"github.com/aws/aws-sdk-go/service/elasticache" "github.com/aws/aws-sdk-go/service/elasticache"
"github.com/aws/aws-sdk-go/service/elasticbeanstalk" "github.com/aws/aws-sdk-go/service/elasticbeanstalk"
elasticsearch "github.com/aws/aws-sdk-go/service/elasticsearchservice" elasticsearch "github.com/aws/aws-sdk-go/service/elasticsearchservice"
"github.com/aws/aws-sdk-go/service/elastictranscoder"
"github.com/aws/aws-sdk-go/service/elb" "github.com/aws/aws-sdk-go/service/elb"
"github.com/aws/aws-sdk-go/service/emr" "github.com/aws/aws-sdk-go/service/emr"
"github.com/aws/aws-sdk-go/service/firehose" "github.com/aws/aws-sdk-go/service/firehose"
@ -107,6 +108,7 @@ type AWSClient struct {
firehoseconn *firehose.Firehose firehoseconn *firehose.Firehose
elasticacheconn *elasticache.ElastiCache elasticacheconn *elasticache.ElastiCache
elasticbeanstalkconn *elasticbeanstalk.ElasticBeanstalk elasticbeanstalkconn *elasticbeanstalk.ElasticBeanstalk
elastictranscoderconn *elastictranscoder.ElasticTranscoder
lambdaconn *lambda.Lambda lambdaconn *lambda.Lambda
opsworksconn *opsworks.OpsWorks opsworksconn *opsworks.OpsWorks
glacierconn *glacier.Glacier glacierconn *glacier.Glacier
@ -228,6 +230,9 @@ func (c *Config) Client() (interface{}, error) {
log.Println("[INFO] Initializing Elastic Beanstalk Connection") log.Println("[INFO] Initializing Elastic Beanstalk Connection")
client.elasticbeanstalkconn = elasticbeanstalk.New(sess) client.elasticbeanstalkconn = elasticbeanstalk.New(sess)
log.Println("[INFO] Initializing Elastic Transcoder Connection")
client.elastictranscoderconn = elastictranscoder.New(sess)
authErr := c.ValidateAccountId(client.accountid) authErr := c.ValidateAccountId(client.accountid)
if authErr != nil { if authErr != nil {
errs = append(errs, authErr) errs = append(errs, authErr)

View File

@ -178,6 +178,8 @@ func Provider() terraform.ResourceProvider {
"aws_elastic_beanstalk_configuration_template": resourceAwsElasticBeanstalkConfigurationTemplate(), "aws_elastic_beanstalk_configuration_template": resourceAwsElasticBeanstalkConfigurationTemplate(),
"aws_elastic_beanstalk_environment": resourceAwsElasticBeanstalkEnvironment(), "aws_elastic_beanstalk_environment": resourceAwsElasticBeanstalkEnvironment(),
"aws_elasticsearch_domain": resourceAwsElasticSearchDomain(), "aws_elasticsearch_domain": resourceAwsElasticSearchDomain(),
"aws_elastictranscoder_pipeline": resourceAwsElasticTranscoderPipeline(),
"aws_elastictranscoder_preset": resourceAwsElasticTranscoderPreset(),
"aws_elb": resourceAwsElb(), "aws_elb": resourceAwsElb(),
"aws_flow_log": resourceAwsFlowLog(), "aws_flow_log": resourceAwsFlowLog(),
"aws_glacier_vault": resourceAwsGlacierVault(), "aws_glacier_vault": resourceAwsGlacierVault(),

View File

@ -0,0 +1,475 @@
package aws
import (
"fmt"
"log"
"regexp"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/elastictranscoder"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/helper/schema"
)
func resourceAwsElasticTranscoderPipeline() *schema.Resource {
return &schema.Resource{
Create: resourceAwsElasticTranscoderPipelineCreate,
Read: resourceAwsElasticTranscoderPipelineRead,
Update: resourceAwsElasticTranscoderPipelineUpdate,
Delete: resourceAwsElasticTranscoderPipelineDelete,
Schema: map[string]*schema.Schema{
"arn": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
"aws_kms_key_arn": &schema.Schema{
Type: schema.TypeString,
Optional: true,
},
// ContentConfig also requires ThumbnailConfig
"content_config": &schema.Schema{
Type: schema.TypeSet,
Optional: true,
Computed: true,
MaxItems: 1,
Elem: &schema.Resource{
// elastictranscoder.PipelineOutputConfig
Schema: map[string]*schema.Schema{
"bucket": &schema.Schema{
Type: schema.TypeString,
Optional: true,
// AWS may insert the bucket name here taken from output_bucket
Computed: true,
},
"storage_class": &schema.Schema{
Type: schema.TypeString,
Optional: true,
},
},
},
},
"content_config_permissions": &schema.Schema{
Type: schema.TypeSet,
Optional: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"access": &schema.Schema{
Type: schema.TypeList,
Optional: true,
Elem: &schema.Schema{Type: schema.TypeString},
},
"grantee": &schema.Schema{
Type: schema.TypeString,
Optional: true,
},
"grantee_type": &schema.Schema{
Type: schema.TypeString,
Optional: true,
},
},
},
},
"input_bucket": &schema.Schema{
Type: schema.TypeString,
Required: true,
},
"name": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
ValidateFunc: func(v interface{}, k string) (ws []string, errors []error) {
value := v.(string)
if !regexp.MustCompile(`^[.0-9A-Za-z-_]+$`).MatchString(value) {
errors = append(errors, fmt.Errorf(
"only alphanumeric characters, hyphens, underscores, and periods allowed in %q", k))
}
if len(value) > 40 {
errors = append(errors, fmt.Errorf("%q cannot be longer than 40 characters", k))
}
return
},
},
"notifications": &schema.Schema{
Type: schema.TypeSet,
Optional: true,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"completed": &schema.Schema{
Type: schema.TypeString,
Optional: true,
},
"error": &schema.Schema{
Type: schema.TypeString,
Optional: true,
},
"progressing": &schema.Schema{
Type: schema.TypeString,
Optional: true,
},
"warning": &schema.Schema{
Type: schema.TypeString,
Optional: true,
},
},
},
},
// The output_bucket must be set, or both of content_config.bucket
// and thumbnail_config.bucket.
// This is set as Computed, because the API may or may not return
// this as set based on the other 2 configurations.
"output_bucket": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
},
"role": &schema.Schema{
Type: schema.TypeString,
Required: true,
},
"thumbnail_config": &schema.Schema{
Type: schema.TypeSet,
Optional: true,
Computed: true,
MaxItems: 1,
Elem: &schema.Resource{
// elastictranscoder.PipelineOutputConfig
Schema: map[string]*schema.Schema{
"bucket": &schema.Schema{
Type: schema.TypeString,
Optional: true,
// AWS may insert the bucket name here taken from output_bucket
Computed: true,
},
"storage_class": &schema.Schema{
Type: schema.TypeString,
Optional: true,
},
},
},
},
"thumbnail_config_permissions": &schema.Schema{
Type: schema.TypeSet,
Optional: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"access": &schema.Schema{
Type: schema.TypeList,
Optional: true,
Elem: &schema.Schema{Type: schema.TypeString},
},
"grantee": &schema.Schema{
Type: schema.TypeString,
Optional: true,
},
"grantee_type": &schema.Schema{
Type: schema.TypeString,
Optional: true,
},
},
},
},
},
}
}
func resourceAwsElasticTranscoderPipelineCreate(d *schema.ResourceData, meta interface{}) error {
elastictranscoderconn := meta.(*AWSClient).elastictranscoderconn
req := &elastictranscoder.CreatePipelineInput{
AwsKmsKeyArn: getStringPtr(d, "aws_kms_key_arn"),
ContentConfig: expandETPiplineOutputConfig(d, "content_config"),
InputBucket: aws.String(d.Get("input_bucket").(string)),
Notifications: expandETNotifications(d),
OutputBucket: getStringPtr(d, "output_bucket"),
Role: getStringPtr(d, "role"),
ThumbnailConfig: expandETPiplineOutputConfig(d, "thumbnail_config"),
}
if name, ok := d.GetOk("name"); ok {
req.Name = aws.String(name.(string))
} else {
name := resource.PrefixedUniqueId("tf-et-")
d.Set("name", name)
req.Name = aws.String(name)
}
if (req.OutputBucket == nil && (req.ContentConfig == nil || req.ContentConfig.Bucket == nil)) ||
(req.OutputBucket != nil && req.ContentConfig != nil && req.ContentConfig.Bucket != nil) {
return fmt.Errorf("[ERROR] you must specify only one of output_bucket or content_config.bucket")
}
log.Printf("[DEBUG] Elastic Transcoder Pipeline create opts: %s", req)
resp, err := elastictranscoderconn.CreatePipeline(req)
if err != nil {
return fmt.Errorf("Error creating Elastic Transcoder Pipeline: %s", err)
}
d.SetId(*resp.Pipeline.Id)
for _, w := range resp.Warnings {
log.Printf("[WARN] Elastic Transcoder Pipeline %s: %s", w.Code, w.Message)
}
return resourceAwsElasticTranscoderPipelineRead(d, meta)
}
func expandETNotifications(d *schema.ResourceData) *elastictranscoder.Notifications {
set, ok := d.GetOk("notifications")
if !ok {
return nil
}
s := set.(*schema.Set)
if s == nil || s.Len() == 0 {
return nil
}
m := s.List()[0].(map[string]interface{})
return &elastictranscoder.Notifications{
Completed: getStringPtr(m, "completed"),
Error: getStringPtr(m, "error"),
Progressing: getStringPtr(m, "progressing"),
Warning: getStringPtr(m, "warning"),
}
}
func flattenETNotifications(n *elastictranscoder.Notifications) []map[string]interface{} {
if n == nil {
return nil
}
allEmpty := func(s ...*string) bool {
for _, s := range s {
if s != nil && *s != "" {
return false
}
}
return true
}
// the API always returns a Notifications value, even when all fields are nil
if allEmpty(n.Completed, n.Error, n.Progressing, n.Warning) {
return nil
}
m := setMap(make(map[string]interface{}))
m.SetString("completed", n.Completed)
m.SetString("error", n.Error)
m.SetString("progressing", n.Progressing)
m.SetString("warning", n.Warning)
return m.MapList()
}
func expandETPiplineOutputConfig(d *schema.ResourceData, key string) *elastictranscoder.PipelineOutputConfig {
set, ok := d.GetOk(key)
if !ok {
return nil
}
s := set.(*schema.Set)
if s == nil || s.Len() == 0 {
return nil
}
cc := s.List()[0].(map[string]interface{})
cfg := &elastictranscoder.PipelineOutputConfig{
Bucket: getStringPtr(cc, "bucket"),
StorageClass: getStringPtr(cc, "storage_class"),
}
switch key {
case "content_config":
cfg.Permissions = expandETPermList(d.Get("content_config_permissions").(*schema.Set))
case "thumbnail_config":
cfg.Permissions = expandETPermList(d.Get("thumbnail_config_permissions").(*schema.Set))
}
return cfg
}
func flattenETPipelineOutputConfig(cfg *elastictranscoder.PipelineOutputConfig) []map[string]interface{} {
m := setMap(make(map[string]interface{}))
m.SetString("bucket", cfg.Bucket)
m.SetString("storage_class", cfg.StorageClass)
return m.MapList()
}
func expandETPermList(permissions *schema.Set) []*elastictranscoder.Permission {
var perms []*elastictranscoder.Permission
for _, p := range permissions.List() {
perm := &elastictranscoder.Permission{
Access: getStringPtrList(p.(map[string]interface{}), "access"),
Grantee: getStringPtr(p, "grantee"),
GranteeType: getStringPtr(p, "grantee_type"),
}
perms = append(perms, perm)
}
return perms
}
func flattenETPermList(perms []*elastictranscoder.Permission) []map[string]interface{} {
var set []map[string]interface{}
for _, p := range perms {
m := setMap(make(map[string]interface{}))
m.Set("access", flattenStringList(p.Access))
m.SetString("grantee", p.Grantee)
m.SetString("grantee_type", p.GranteeType)
set = append(set, m)
}
return set
}
func resourceAwsElasticTranscoderPipelineUpdate(d *schema.ResourceData, meta interface{}) error {
elastictranscoderconn := meta.(*AWSClient).elastictranscoderconn
req := &elastictranscoder.UpdatePipelineInput{
Id: aws.String(d.Id()),
}
if d.HasChange("aws_kms_key_arn") {
req.AwsKmsKeyArn = getStringPtr(d, "aws_kms_key_arn")
}
if d.HasChange("content_config") {
req.ContentConfig = expandETPiplineOutputConfig(d, "content_config")
}
if d.HasChange("input_bucket") {
req.InputBucket = getStringPtr(d, "input_bucket")
}
if d.HasChange("name") {
req.Name = getStringPtr(d, "name")
}
if d.HasChange("notifications") {
req.Notifications = expandETNotifications(d)
}
if d.HasChange("role") {
req.Role = getStringPtr(d, "role")
}
if d.HasChange("thumbnail_config") {
req.ThumbnailConfig = expandETPiplineOutputConfig(d, "thumbnail_config")
}
log.Printf("[DEBUG] Updating Elastic Transcoder Pipeline: %#v", req)
output, err := elastictranscoderconn.UpdatePipeline(req)
if err != nil {
return fmt.Errorf("Error updating Elastic Transcoder pipeline: %s", err)
}
for _, w := range output.Warnings {
log.Printf("[WARN] Elastic Transcoder Pipeline %s: %s", w.Code, w.Message)
}
return resourceAwsElasticTranscoderPipelineRead(d, meta)
}
func resourceAwsElasticTranscoderPipelineRead(d *schema.ResourceData, meta interface{}) error {
elastictranscoderconn := meta.(*AWSClient).elastictranscoderconn
resp, err := elastictranscoderconn.ReadPipeline(&elastictranscoder.ReadPipelineInput{
Id: aws.String(d.Id()),
})
if err != nil {
if err, ok := err.(awserr.Error); ok && err.Code() == "ResourceNotFoundException" {
d.SetId("")
return nil
}
return err
}
log.Printf("[DEBUG] Elastic Transcoder Pipeline Read response: %#v", resp)
pipeline := resp.Pipeline
d.Set("arn", *pipeline.Arn)
if arn := pipeline.AwsKmsKeyArn; arn != nil {
d.Set("aws_kms_key_arn", *arn)
}
if pipeline.ContentConfig != nil {
err := d.Set("content_config", flattenETPipelineOutputConfig(pipeline.ContentConfig))
if err != nil {
return fmt.Errorf("error setting content_config: %s", err)
}
if pipeline.ContentConfig.Permissions != nil {
err := d.Set("content_config_permissions", flattenETPermList(pipeline.ContentConfig.Permissions))
if err != nil {
return fmt.Errorf("error setting content_config_permissions: %s", err)
}
}
}
d.Set("input_bucket", *pipeline.InputBucket)
d.Set("name", *pipeline.Name)
notifications := flattenETNotifications(pipeline.Notifications)
if notifications != nil {
if err := d.Set("notifications", notifications); err != nil {
return fmt.Errorf("error setting notifications: %s", err)
}
}
d.Set("role", *pipeline.Role)
if pipeline.ThumbnailConfig != nil {
err := d.Set("thumbnail_config", flattenETPipelineOutputConfig(pipeline.ThumbnailConfig))
if err != nil {
return fmt.Errorf("error setting thumbnail_config: %s", err)
}
if pipeline.ThumbnailConfig.Permissions != nil {
err := d.Set("thumbnail_config_permissions", flattenETPermList(pipeline.ThumbnailConfig.Permissions))
if err != nil {
return fmt.Errorf("error setting thumbnail_config_permissions: %s", err)
}
}
}
if pipeline.OutputBucket != nil {
d.Set("output_bucket", *pipeline.OutputBucket)
}
return nil
}
func resourceAwsElasticTranscoderPipelineDelete(d *schema.ResourceData, meta interface{}) error {
elastictranscoderconn := meta.(*AWSClient).elastictranscoderconn
log.Printf("[DEBUG] Elastic Transcoder Delete Pipeline: %s", d.Id())
_, err := elastictranscoderconn.DeletePipeline(&elastictranscoder.DeletePipelineInput{
Id: aws.String(d.Id()),
})
if err != nil {
return fmt.Errorf("error deleting Elastic Transcoder Pipeline: %s", err)
}
return nil
}

View File

@ -0,0 +1,328 @@
package aws
import (
"fmt"
"testing"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/elastictranscoder"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
)
func TestAccAWSElasticTranscoderPipeline_basic(t *testing.T) {
pipeline := &elastictranscoder.Pipeline{}
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "aws_elastictranscoder_pipeline.bar",
Providers: testAccProviders,
CheckDestroy: testAccCheckElasticTranscoderPipelineDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: awsElasticTranscoderPipelineConfigBasic,
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSElasticTranscoderPipelineExists("aws_elastictranscoder_pipeline.bar", pipeline),
),
},
},
})
}
func TestAccAWSElasticTranscoderPipeline_withContentConfig(t *testing.T) {
pipeline := &elastictranscoder.Pipeline{}
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "aws_elastictranscoder_pipeline.bar",
Providers: testAccProviders,
CheckDestroy: testAccCheckElasticTranscoderPipelineDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: awsElasticTranscoderPipelineWithContentConfig,
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSElasticTranscoderPipelineExists("aws_elastictranscoder_pipeline.bar", pipeline),
),
},
resource.TestStep{
Config: awsElasticTranscoderPipelineWithContentConfigUpdate,
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSElasticTranscoderPipelineExists("aws_elastictranscoder_pipeline.bar", pipeline),
),
},
},
})
}
func TestAccAWSElasticTranscoderPipeline_withPermissions(t *testing.T) {
pipeline := &elastictranscoder.Pipeline{}
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
IDRefreshName: "aws_elastictranscoder_pipeline.baz",
Providers: testAccProviders,
CheckDestroy: testAccCheckElasticTranscoderPipelineDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: awsElasticTranscoderPipelineWithPerms,
Check: resource.ComposeTestCheckFunc(
testAccCheckAWSElasticTranscoderPipelineExists("aws_elastictranscoder_pipeline.baz", pipeline),
),
},
},
})
}
func testAccCheckAWSElasticTranscoderPipelineExists(n string, res *elastictranscoder.Pipeline) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
if !ok {
return fmt.Errorf("Not found: %s", n)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No Pipeline ID is set")
}
conn := testAccProvider.Meta().(*AWSClient).elastictranscoderconn
out, err := conn.ReadPipeline(&elastictranscoder.ReadPipelineInput{
Id: aws.String(rs.Primary.ID),
})
if err != nil {
return err
}
*res = *out.Pipeline
return nil
}
}
func testAccCheckElasticTranscoderPipelineDestroy(s *terraform.State) error {
conn := testAccProvider.Meta().(*AWSClient).elastictranscoderconn
for _, rs := range s.RootModule().Resources {
if rs.Type != "aws_elastictranscoder_pipline" {
continue
}
out, err := conn.ReadPipeline(&elastictranscoder.ReadPipelineInput{
Id: aws.String(rs.Primary.ID),
})
if err == nil {
if out.Pipeline != nil && *out.Pipeline.Id == rs.Primary.ID {
return fmt.Errorf("Elastic Transcoder Pipeline still exists")
}
}
awsErr, ok := err.(awserr.Error)
if !ok {
return err
}
if awsErr.Code() != "ResourceNotFoundException" {
return fmt.Errorf("unexpected error: %s", awsErr)
}
}
return nil
}
const awsElasticTranscoderPipelineConfigBasic = `
resource "aws_elastictranscoder_pipeline" "bar" {
input_bucket = "${aws_s3_bucket.test_bucket.bucket}"
output_bucket = "${aws_s3_bucket.test_bucket.bucket}"
name = "aws_elastictranscoder_pipeline_tf_test_"
role = "${aws_iam_role.test_role.arn}"
}
resource "aws_iam_role" "test_role" {
name = "aws_elastictranscoder_pipeline_tf_test_role_"
assume_role_policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Action": "sts:AssumeRole",
"Principal": {
"Service": "ec2.amazonaws.com"
},
"Effect": "Allow",
"Sid": ""
}
]
}
EOF
}
resource "aws_s3_bucket" "test_bucket" {
bucket = "aws-elasticencoder-pipeline-tf-test-bucket"
acl = "private"
}
`
const awsElasticTranscoderPipelineWithContentConfig = `
resource "aws_elastictranscoder_pipeline" "bar" {
input_bucket = "${aws_s3_bucket.content_bucket.bucket}"
name = "aws_elastictranscoder_pipeline_tf_test_"
role = "${aws_iam_role.test_role.arn}"
content_config = {
bucket = "${aws_s3_bucket.content_bucket.bucket}"
storage_class = "Standard"
}
thumbnail_config = {
bucket = "${aws_s3_bucket.content_bucket.bucket}"
storage_class = "Standard"
}
}
resource "aws_iam_role" "test_role" {
name = "aws_elastictranscoder_pipeline_tf_test_role_"
assume_role_policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Action": "sts:AssumeRole",
"Principal": {
"Service": "ec2.amazonaws.com"
},
"Effect": "Allow",
"Sid": ""
}
]
}
EOF
}
resource "aws_s3_bucket" "content_bucket" {
bucket = "aws-elasticencoder-pipeline-tf-content-bucket"
acl = "private"
}
resource "aws_s3_bucket" "input_bucket" {
bucket = "aws-elasticencoder-pipeline-tf-input-bucket"
acl = "private"
}
resource "aws_s3_bucket" "thumb_bucket" {
bucket = "aws-elasticencoder-pipeline-tf-thumb-bucket"
acl = "private"
}
`
const awsElasticTranscoderPipelineWithContentConfigUpdate = `
resource "aws_elastictranscoder_pipeline" "bar" {
input_bucket = "${aws_s3_bucket.input_bucket.bucket}"
name = "aws_elastictranscoder_pipeline_tf_test_"
role = "${aws_iam_role.test_role.arn}"
content_config = {
bucket = "${aws_s3_bucket.content_bucket.bucket}"
storage_class = "Standard"
}
thumbnail_config = {
bucket = "${aws_s3_bucket.thumb_bucket.bucket}"
storage_class = "Standard"
}
}
resource "aws_iam_role" "test_role" {
name = "aws_elastictranscoder_pipeline_tf_test_role_"
assume_role_policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Action": "sts:AssumeRole",
"Principal": {
"Service": "ec2.amazonaws.com"
},
"Effect": "Allow",
"Sid": ""
}
]
}
EOF
}
resource "aws_s3_bucket" "content_bucket" {
bucket = "aws-elasticencoder-pipeline-tf-content-bucket"
acl = "private"
}
resource "aws_s3_bucket" "input_bucket" {
bucket = "aws-elasticencoder-pipeline-tf-input-bucket"
acl = "private"
}
resource "aws_s3_bucket" "thumb_bucket" {
bucket = "aws-elasticencoder-pipeline-tf-thumb-bucket"
acl = "private"
}
`
const awsElasticTranscoderPipelineWithPerms = `
resource "aws_elastictranscoder_pipeline" "baz" {
input_bucket = "${aws_s3_bucket.content_bucket.bucket}"
name = "aws_elastictranscoder_pipeline_tf_test_"
role = "${aws_iam_role.test_role.arn}"
content_config = {
bucket = "${aws_s3_bucket.content_bucket.bucket}"
storage_class = "Standard"
}
content_config_permissions = {
grantee_type = "Group"
grantee = "AuthenticatedUsers"
access = ["FullControl"]
}
thumbnail_config = {
bucket = "${aws_s3_bucket.content_bucket.bucket}"
storage_class = "Standard"
}
thumbnail_config_permissions = {
grantee_type = "Group"
grantee = "AuthenticatedUsers"
access = ["FullControl"]
}
}
resource "aws_iam_role" "test_role" {
name = "aws_elastictranscoder_pipeline_tf_test_role_"
assume_role_policy = <<EOF
{
"Version": "2012-10-17",
"Statement": [
{
"Action": "sts:AssumeRole",
"Principal": {
"Service": "ec2.amazonaws.com"
},
"Effect": "Allow",
"Sid": ""
}
]
}
EOF
}
resource "aws_s3_bucket" "content_bucket" {
bucket = "aws-elasticencoder-pipeline-tf-content-bucket"
acl = "private"
}
`

View File

@ -0,0 +1,639 @@
package aws
import (
"fmt"
"log"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/elastictranscoder"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/helper/schema"
)
func resourceAwsElasticTranscoderPreset() *schema.Resource {
return &schema.Resource{
Create: resourceAwsElasticTranscoderPresetCreate,
Read: resourceAwsElasticTranscoderPresetRead,
Delete: resourceAwsElasticTranscoderPresetDelete,
Schema: map[string]*schema.Schema{
"arn": &schema.Schema{
Type: schema.TypeString,
Computed: true,
},
"audio": &schema.Schema{
Type: schema.TypeSet,
Optional: true,
ForceNew: true,
MaxItems: 1,
Elem: &schema.Resource{
// elastictranscoder.AudioParameters
Schema: map[string]*schema.Schema{
"audio_packing_mode": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"bit_rate": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"channels": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"codec": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"sample_rate": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
},
},
},
"audio_codec_options": &schema.Schema{
Type: schema.TypeSet,
MaxItems: 1,
Optional: true,
ForceNew: true,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"bit_depth": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"bit_order": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"profile": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"signed": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
},
},
},
"container": &schema.Schema{
Type: schema.TypeString,
Required: true,
ForceNew: true,
},
"description": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"name": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
ForceNew: true,
},
"thumbnails": &schema.Schema{
Type: schema.TypeSet,
MaxItems: 1,
Optional: true,
ForceNew: true,
Elem: &schema.Resource{
// elastictranscoder.Thumbnails
Schema: map[string]*schema.Schema{
"aspect_ratio": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"format": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"interval": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"max_height": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"max_width": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"padding_policy": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"resolution:": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"sizing_policy": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
},
},
},
"type": &schema.Schema{
Type: schema.TypeString,
Optional: true,
Computed: true,
},
"video": &schema.Schema{
Type: schema.TypeSet,
Optional: true,
ForceNew: true,
MaxItems: 1,
Elem: &schema.Resource{
// elastictranscoder.VideoParameters
Schema: map[string]*schema.Schema{
"aspect_ratio": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"bit_rate": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"codec": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"display_aspect_ratio": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"fixed_gop": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"frame_rate": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"keyframes_max_dist": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"max_frame_rate": &schema.Schema{
Type: schema.TypeString,
Default: "30",
Optional: true,
ForceNew: true,
},
"max_height": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"max_width": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"padding_policy": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"resolution": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"sizing_policy": &schema.Schema{
Type: schema.TypeString,
Default: "Fit",
Optional: true,
ForceNew: true,
},
},
},
},
"video_watermarks": &schema.Schema{
Type: schema.TypeSet,
Optional: true,
ForceNew: true,
Elem: &schema.Resource{
// elastictranscoder.PresetWatermark
Schema: map[string]*schema.Schema{
"horizontal_align": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"horizontal_offset": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"id": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"max_height": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"max_width": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"opacity": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"sizing_policy": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"target": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"vertical_align": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
"vertical_offset": &schema.Schema{
Type: schema.TypeString,
Optional: true,
ForceNew: true,
},
},
},
},
"video_codec_options": &schema.Schema{
Type: schema.TypeMap,
Optional: true,
ForceNew: true,
},
},
}
}
func resourceAwsElasticTranscoderPresetCreate(d *schema.ResourceData, meta interface{}) error {
elastictranscoderconn := meta.(*AWSClient).elastictranscoderconn
req := &elastictranscoder.CreatePresetInput{
Audio: expandETAudioParams(d),
Container: aws.String(d.Get("container").(string)),
Description: getStringPtr(d, "description"),
Thumbnails: expandETThumbnails(d),
Video: exapandETVideoParams(d),
}
if name, ok := d.GetOk("name"); ok {
req.Name = aws.String(name.(string))
} else {
name := resource.PrefixedUniqueId("tf-et-preset-")
d.Set("name", name)
req.Name = aws.String(name)
}
log.Printf("[DEBUG] Elastic Transcoder Preset create opts: %s", req)
resp, err := elastictranscoderconn.CreatePreset(req)
if err != nil {
return fmt.Errorf("Error creating Elastic Transcoder Preset: %s", err)
}
if resp.Warning != nil && *resp.Warning != "" {
log.Printf("[WARN] Elastic Transcoder Preset: %s", *resp.Warning)
}
d.SetId(*resp.Preset.Id)
d.Set("arn", *resp.Preset.Arn)
return nil
}
func expandETThumbnails(d *schema.ResourceData) *elastictranscoder.Thumbnails {
set, ok := d.GetOk("thumbnails")
if !ok {
return nil
}
s := set.(*schema.Set)
if s == nil || s.Len() == 0 {
return nil
}
t := s.List()[0].(map[string]interface{})
return &elastictranscoder.Thumbnails{
AspectRatio: getStringPtr(t, "aspect_ratio"),
Format: getStringPtr(t, "format"),
Interval: getStringPtr(t, "interval"),
MaxHeight: getStringPtr(t, "max_height"),
MaxWidth: getStringPtr(t, "max_width"),
PaddingPolicy: getStringPtr(t, "padding_policy"),
Resolution: getStringPtr(t, "resolution"),
SizingPolicy: getStringPtr(t, "sizing_policy"),
}
}
func expandETAudioParams(d *schema.ResourceData) *elastictranscoder.AudioParameters {
set, ok := d.GetOk("audio")
if !ok {
return nil
}
s := set.(*schema.Set)
if s == nil || s.Len() == 0 {
return nil
}
audio := s.List()[0].(map[string]interface{})
return &elastictranscoder.AudioParameters{
AudioPackingMode: getStringPtr(audio, "audio_packing_mode"),
BitRate: getStringPtr(audio, "bit_rate"),
Channels: getStringPtr(audio, "channels"),
Codec: getStringPtr(audio, "codec"),
CodecOptions: expandETAudioCodecOptions(d),
SampleRate: getStringPtr(audio, "sample_rate"),
}
}
func expandETAudioCodecOptions(d *schema.ResourceData) *elastictranscoder.AudioCodecOptions {
s := d.Get("audio_codec_options").(*schema.Set)
if s == nil || s.Len() == 0 {
return nil
}
codec := s.List()[0].(map[string]interface{})
codecOpts := &elastictranscoder.AudioCodecOptions{
BitDepth: getStringPtr(codec, "bit_depth"),
BitOrder: getStringPtr(codec, "bit_order"),
Profile: getStringPtr(codec, "profile"),
Signed: getStringPtr(codec, "signed"),
}
return codecOpts
}
func exapandETVideoParams(d *schema.ResourceData) *elastictranscoder.VideoParameters {
s := d.Get("video").(*schema.Set)
if s == nil || s.Len() == 0 {
return nil
}
p := s.List()[0].(map[string]interface{})
return &elastictranscoder.VideoParameters{
AspectRatio: getStringPtr(p, "aspect_ratio"),
BitRate: getStringPtr(p, "bit_rate"),
Codec: getStringPtr(p, "codec"),
CodecOptions: stringMapToPointers(d.Get("video_codec_options").(map[string]interface{})),
DisplayAspectRatio: getStringPtr(p, "display_aspect_ratio"),
FixedGOP: getStringPtr(p, "fixed_gop"),
FrameRate: getStringPtr(p, "frame_rate"),
KeyframesMaxDist: getStringPtr(p, "keyframes_max_dist"),
MaxFrameRate: getStringPtr(p, "max_frame_rate"),
MaxHeight: getStringPtr(p, "max_height"),
MaxWidth: getStringPtr(p, "max_width"),
PaddingPolicy: getStringPtr(p, "padding_policy"),
Resolution: getStringPtr(p, "resolution"),
SizingPolicy: getStringPtr(p, "sizing_policy"),
Watermarks: expandETVideoWatermarks(d),
}
}
func expandETVideoWatermarks(d *schema.ResourceData) []*elastictranscoder.PresetWatermark {
s := d.Get("video_watermarks").(*schema.Set)
if s == nil || s.Len() == 0 {
return nil
}
var watermarks []*elastictranscoder.PresetWatermark
for _, w := range s.List() {
watermark := &elastictranscoder.PresetWatermark{
HorizontalAlign: getStringPtr(w, "horizontal_align"),
HorizontalOffset: getStringPtr(w, "horizontal_offset"),
Id: getStringPtr(w, "id"),
MaxHeight: getStringPtr(w, "max_height"),
MaxWidth: getStringPtr(w, "max_width"),
Opacity: getStringPtr(w, "opacity"),
SizingPolicy: getStringPtr(w, "sizing_policy"),
Target: getStringPtr(w, "target"),
VerticalAlign: getStringPtr(w, "vertical_align"),
VerticalOffset: getStringPtr(w, "vertical_offset"),
}
watermarks = append(watermarks, watermark)
}
return watermarks
}
func resourceAwsElasticTranscoderPresetRead(d *schema.ResourceData, meta interface{}) error {
elastictranscoderconn := meta.(*AWSClient).elastictranscoderconn
resp, err := elastictranscoderconn.ReadPreset(&elastictranscoder.ReadPresetInput{
Id: aws.String(d.Id()),
})
if err != nil {
if err, ok := err.(awserr.Error); ok && err.Code() == "ResourceNotFoundException" {
d.SetId("")
return nil
}
return err
}
log.Printf("[DEBUG] Elastic Transcoder Preset Read response: %#v", resp)
preset := resp.Preset
d.Set("arn", *preset.Arn)
if preset.Audio != nil {
err := d.Set("audio", flattenETAudioParameters(preset.Audio))
if err != nil {
return err
}
if preset.Audio.CodecOptions != nil {
d.Set("audio_codec_options", flattenETAudioCodecOptions(preset.Audio.CodecOptions))
}
}
d.Set("container", *preset.Container)
d.Set("name", *preset.Name)
if preset.Thumbnails != nil {
err := d.Set("thumbnails", flattenETThumbnails(preset.Thumbnails))
if err != nil {
return err
}
}
d.Set("type", *preset.Type)
if preset.Video != nil {
err := d.Set("video", flattenETVideoParams(preset.Video))
if err != nil {
return err
}
if preset.Video.CodecOptions != nil {
d.Set("video_codec_options", flattenETVideoCodecOptions(preset.Video.CodecOptions))
}
if preset.Video.Watermarks != nil {
d.Set("video_watermarks", flattenETWatermarks(preset.Video.Watermarks))
}
}
return nil
}
func flattenETAudioParameters(audio *elastictranscoder.AudioParameters) []map[string]interface{} {
m := setMap(make(map[string]interface{}))
m.SetString("audio_packing_mode", audio.AudioPackingMode)
m.SetString("bit_rate", audio.BitRate)
m.SetString("channels", audio.Channels)
m.SetString("codec", audio.Codec)
m.SetString("sample_rate", audio.SampleRate)
return m.MapList()
}
func flattenETAudioCodecOptions(opts *elastictranscoder.AudioCodecOptions) []map[string]interface{} {
if opts == nil {
return nil
}
m := setMap(make(map[string]interface{}))
m.SetString("bit_depth", opts.BitDepth)
m.SetString("bit_order", opts.BitOrder)
m.SetString("profile", opts.Profile)
m.SetString("signed", opts.Signed)
return m.MapList()
}
func flattenETThumbnails(thumbs *elastictranscoder.Thumbnails) []map[string]interface{} {
m := setMap(make(map[string]interface{}))
m.SetString("aspect_ratio", thumbs.AspectRatio)
m.SetString("format", thumbs.Format)
m.SetString("interval", thumbs.Interval)
m.SetString("max_height", thumbs.MaxHeight)
m.SetString("max_width", thumbs.MaxWidth)
m.SetString("padding_policy", thumbs.PaddingPolicy)
m.SetString("resolution", thumbs.Resolution)
m.SetString("sizing_policy", thumbs.SizingPolicy)
return m.MapList()
}
func flattenETVideoParams(video *elastictranscoder.VideoParameters) []map[string]interface{} {
m := setMap(make(map[string]interface{}))
m.SetString("aspect_ratio", video.AspectRatio)
m.SetString("bit_rate", video.BitRate)
m.SetString("codec", video.Codec)
m.SetString("display_aspect_ratio", video.DisplayAspectRatio)
m.SetString("fixed_gop", video.FixedGOP)
m.SetString("frame_rate", video.FrameRate)
m.SetString("keyframes_max_dist", video.KeyframesMaxDist)
m.SetString("max_frame_rate", video.MaxFrameRate)
m.SetString("max_height", video.MaxHeight)
m.SetString("max_width", video.MaxWidth)
m.SetString("padding_policy", video.PaddingPolicy)
m.SetString("resolution", video.Resolution)
m.SetString("sizing_policy", video.SizingPolicy)
return m.MapList()
}
func flattenETVideoCodecOptions(opts map[string]*string) []map[string]interface{} {
codecOpts := setMap(make(map[string]interface{}))
for k, v := range opts {
codecOpts.SetString(k, v)
}
return codecOpts.MapList()
}
func flattenETWatermarks(watermarks []*elastictranscoder.PresetWatermark) []map[string]interface{} {
var watermarkSet []map[string]interface{}
for _, w := range watermarks {
watermark := setMap(make(map[string]interface{}))
watermark.SetString("horizontal_align", w.HorizontalAlign)
watermark.SetString("horizontal_offset", w.HorizontalOffset)
watermark.SetString("id", w.Id)
watermark.SetString("max_height", w.MaxHeight)
watermark.SetString("max_width", w.MaxWidth)
watermark.SetString("opacity", w.Opacity)
watermark.SetString("sizing_policy", w.SizingPolicy)
watermark.SetString("target", w.Target)
watermark.SetString("vertical_align", w.VerticalAlign)
watermark.SetString("vertical_offset", w.VerticalOffset)
watermarkSet = append(watermarkSet, watermark.Map())
}
return watermarkSet
}
func resourceAwsElasticTranscoderPresetDelete(d *schema.ResourceData, meta interface{}) error {
elastictranscoderconn := meta.(*AWSClient).elastictranscoderconn
log.Printf("[DEBUG] Elastic Transcoder Delete Preset: %s", d.Id())
_, err := elastictranscoderconn.DeletePreset(&elastictranscoder.DeletePresetInput{
Id: aws.String(d.Id()),
})
if err != nil {
return fmt.Errorf("error deleting Elastic Transcoder Preset: %s", err)
}
return nil
}

View File

@ -0,0 +1,240 @@
package aws
import (
"fmt"
"testing"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/elastictranscoder"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
)
func TestAccAWSElasticTranscoderPreset_basic(t *testing.T) {
preset := &elastictranscoder.Preset{}
name := "aws_elastictranscoder_preset.bar"
// make sure the old preset was destroyed on each intermediate step
// these are very easy to leak
checkExists := func(replaced bool) resource.TestCheckFunc {
return func(s *terraform.State) error {
conn := testAccProvider.Meta().(*AWSClient).elastictranscoderconn
if replaced {
_, err := conn.ReadPreset(&elastictranscoder.ReadPresetInput{Id: preset.Id})
if err != nil {
return nil
}
return fmt.Errorf("Preset Id %s should not exist", preset.Id)
}
rs, ok := s.RootModule().Resources[name]
if !ok {
return fmt.Errorf("Not found: %s", name)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No Preset ID is set")
}
out, err := conn.ReadPreset(&elastictranscoder.ReadPresetInput{
Id: aws.String(rs.Primary.ID),
})
if err != nil {
return err
}
preset = out.Preset
return nil
}
}
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckElasticTranscoderPresetDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: awsElasticTranscoderPresetConfig,
Check: resource.ComposeTestCheckFunc(
checkExists(false),
),
},
resource.TestStep{
Config: awsElasticTranscoderPresetConfig2,
Check: resource.ComposeTestCheckFunc(
checkExists(true),
),
},
resource.TestStep{
Config: awsElasticTranscoderPresetConfig3,
Check: resource.ComposeTestCheckFunc(
checkExists(true),
),
},
},
})
}
func testAccCheckElasticTranscoderPresetDestroy(s *terraform.State) error {
conn := testAccProvider.Meta().(*AWSClient).elastictranscoderconn
for _, rs := range s.RootModule().Resources {
if rs.Type != "aws_elastictranscoder_preset" {
continue
}
out, err := conn.ReadPreset(&elastictranscoder.ReadPresetInput{
Id: aws.String(rs.Primary.ID),
})
if err == nil {
if out.Preset != nil && *out.Preset.Id == rs.Primary.ID {
return fmt.Errorf("Elastic Transcoder Preset still exists")
}
}
awsErr, ok := err.(awserr.Error)
if !ok {
return err
}
if awsErr.Code() != "ResourceNotFoundException" {
return fmt.Errorf("unexpected error: %s", awsErr)
}
}
return nil
}
const awsElasticTranscoderPresetConfig = `
resource "aws_elastictranscoder_preset" "bar" {
container = "mp4"
description = "elastic transcoder preset test 1"
name = "aws_elastictranscoder_preset_tf_test_"
audio = {
audio_packing_mode = "SingleTrack"
bit_rate = 320
channels = 2
codec = "mp3"
sample_rate = 44100
}
}
`
const awsElasticTranscoderPresetConfig2 = `
resource "aws_elastictranscoder_preset" "bar" {
container = "mp4"
description = "elastic transcoder preset test 2"
name = "aws_elastictranscoder_preset_tf_test_"
audio = {
audio_packing_mode = "SingleTrack"
bit_rate = 128
channels = 2
codec = "AAC"
sample_rate = 48000
}
audio_codec_options = {
profile = "auto"
}
video = {
bit_rate = "auto"
codec = "H.264"
display_aspect_ratio = "16:9"
fixed_gop = "true"
frame_rate = "auto"
keyframes_max_dist = 90
max_height = 1080
max_width = 1920
padding_policy = "Pad"
sizing_policy = "Fit"
}
video_codec_options = {
Profile = "main"
Level = "4.1"
MaxReferenceFrames = 4
}
thumbnails = {
format = "jpg"
interval = 5
max_width = 960
max_height = 540
padding_policy = "Pad"
sizing_policy = "Fit"
}
}
`
const awsElasticTranscoderPresetConfig3 = `
resource "aws_elastictranscoder_preset" "bar" {
container = "mp4"
description = "elastic transcoder preset test 3"
name = "aws_elastictranscoder_preset_tf_test_"
audio = {
audio_packing_mode = "SingleTrack"
bit_rate = 96
channels = 2
codec = "AAC"
sample_rate = 44100
}
audio_codec_options = {
profile = "AAC-LC"
}
video = {
bit_rate = "1600"
codec = "H.264"
display_aspect_ratio = "16:9"
fixed_gop = "false"
frame_rate = "auto"
max_frame_rate = "60"
keyframes_max_dist = 240
max_height = "auto"
max_width = "auto"
padding_policy = "Pad"
sizing_policy = "Fit"
}
video_codec_options = {
Profile = "main"
Level = "2.2"
MaxReferenceFrames = 3
InterlaceMode = "Progressive"
ColorSpaceConversionMode = "None"
}
video_watermarks = {
id = "Terraform Test"
max_width = "20%"
max_height = "20%"
sizing_policy = "ShrinkToFit"
horizontal_align = "Right"
horizontal_offset = "10px"
vertical_align = "Bottom"
vertical_offset = "10px"
opacity = "55.5"
target = "Content"
}
thumbnails = {
format = "png"
interval = 120
max_width = "auto"
max_height = "auto"
padding_policy = "Pad"
sizing_policy = "Fit"
}
}
`

View File

@ -4,6 +4,7 @@ import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"reflect"
"sort" "sort"
"strconv" "strconv"
"strings" "strings"
@ -1276,3 +1277,139 @@ func flattenApiGatewayThrottleSettings(settings *apigateway.ThrottleSettings) []
return result return result
} }
// TODO: refactor some of these helper functions and types in the terraform/helper packages
// getStringPtr returns a *string version of the value taken from m, where m
// can be a map[string]interface{} or a *schema.ResourceData. If the key isn't
// present or is empty, getNilString returns nil.
func getStringPtr(m interface{}, key string) *string {
switch m := m.(type) {
case map[string]interface{}:
v := m[key]
if v == nil {
return nil
}
s := v.(string)
if s == "" {
return nil
}
return &s
case *schema.ResourceData:
if v, ok := m.GetOk(key); ok {
if v == nil || v.(string) == "" {
return nil
}
s := v.(string)
return &s
}
default:
panic("unknown type in getStringPtr")
}
return nil
}
// getStringPtrList returns a []*string version of the map value. If the key
// isn't present, getNilStringList returns nil.
func getStringPtrList(m map[string]interface{}, key string) []*string {
if v, ok := m[key]; ok {
var stringList []*string
for _, i := range v.([]interface{}) {
s := i.(string)
stringList = append(stringList, &s)
}
return stringList
}
return nil
}
// a convenience wrapper type for the schema.Set map[string]interface{}
// Set operations only alter the underlying map if the value is not nil
type setMap map[string]interface{}
// SetString sets m[key] = *value only if `value != nil`
func (s setMap) SetString(key string, value *string) {
if value == nil {
return
}
s[key] = *value
}
// SetStringMap sets key to value as a map[string]interface{}, stripping any nil
// values. The value parameter can be a map[string]interface{}, a
// map[string]*string, or a map[string]string.
func (s setMap) SetStringMap(key string, value interface{}) {
// because these methods are meant to be chained without intermediate
// checks for nil, we are likely to get interfaces with dynamic types but
// a nil value.
if reflect.ValueOf(value).IsNil() {
return
}
m := make(map[string]interface{})
switch value := value.(type) {
case map[string]string:
for k, v := range value {
m[k] = v
}
case map[string]*string:
for k, v := range value {
if v == nil {
continue
}
m[k] = *v
}
case map[string]interface{}:
for k, v := range value {
if v == nil {
continue
}
switch v := v.(type) {
case string:
m[k] = v
case *string:
if v != nil {
m[k] = *v
}
default:
panic(fmt.Sprintf("unknown type for SetString: %T", v))
}
}
}
// catch the case where the interface wasn't nil, but we had no non-nil values
if len(m) > 0 {
s[key] = m
}
}
// Set assigns value to s[key] if value isn't nil
func (s setMap) Set(key string, value interface{}) {
if reflect.ValueOf(value).IsNil() {
return
}
s[key] = value
}
// Map returns the raw map type for a shorter type conversion
func (s setMap) Map() map[string]interface{} {
return map[string]interface{}(s)
}
// MapList returns the map[string]interface{} as a single element in a slice to
// match the schema.Set data type used for structs.
func (s setMap) MapList() []map[string]interface{} {
return []map[string]interface{}{s.Map()}
}