Merge pull request #2093 from sdboyer/master
nit: Condense switch fallthroughs into expression lists
This commit is contained in:
commit
1128c64ade
|
@ -51,11 +51,7 @@ func (p *ResourceProvisioner) Validate(c *terraform.ResourceConfig) (ws []string
|
|||
num := 0
|
||||
for name := range c.Raw {
|
||||
switch name {
|
||||
case "scripts":
|
||||
fallthrough
|
||||
case "script":
|
||||
fallthrough
|
||||
case "inline":
|
||||
case "scripts", "script", "inline":
|
||||
num++
|
||||
default:
|
||||
es = append(es, fmt.Errorf("Unknown configuration '%s'", name))
|
||||
|
|
|
@ -33,9 +33,7 @@ type fileLoaderFunc func(path string) (configurable, []string, error)
|
|||
func loadTree(root string) (*importTree, error) {
|
||||
var f fileLoaderFunc
|
||||
switch ext(root) {
|
||||
case ".tf":
|
||||
fallthrough
|
||||
case ".tf.json":
|
||||
case ".tf", ".tf.json":
|
||||
f = loadFileHcl
|
||||
default:
|
||||
}
|
||||
|
|
|
@ -299,9 +299,7 @@ func (x *parserLex) lexString(yylval *parserSymType, quoted bool) (int, bool) {
|
|||
// Let's check to see if we're escaping anything.
|
||||
if c == '\\' {
|
||||
switch n := x.next(); n {
|
||||
case '\\':
|
||||
fallthrough
|
||||
case '"':
|
||||
case '\\', '"':
|
||||
c = n
|
||||
case 'n':
|
||||
c = '\n'
|
||||
|
|
|
@ -78,19 +78,11 @@ func addrToSchema(addr []string, schemaMap map[string]*Schema) []*Schema {
|
|||
}
|
||||
|
||||
switch t := current.Type; t {
|
||||
case TypeBool:
|
||||
fallthrough
|
||||
case TypeInt:
|
||||
fallthrough
|
||||
case TypeFloat:
|
||||
fallthrough
|
||||
case TypeString:
|
||||
case TypeBool, TypeInt, TypeFloat, TypeString:
|
||||
if len(addr) > 0 {
|
||||
return nil
|
||||
}
|
||||
case TypeList:
|
||||
fallthrough
|
||||
case TypeSet:
|
||||
case TypeList, TypeSet:
|
||||
switch v := current.Elem.(type) {
|
||||
case *Resource:
|
||||
current = &Schema{
|
||||
|
|
|
@ -80,13 +80,7 @@ func (r *ConfigFieldReader) readField(
|
|||
k := strings.Join(address, ".")
|
||||
schema := schemaList[len(schemaList)-1]
|
||||
switch schema.Type {
|
||||
case TypeBool:
|
||||
fallthrough
|
||||
case TypeFloat:
|
||||
fallthrough
|
||||
case TypeInt:
|
||||
fallthrough
|
||||
case TypeString:
|
||||
case TypeBool, TypeFloat, TypeInt, TypeString:
|
||||
return r.readPrimitive(k, schema)
|
||||
case TypeList:
|
||||
return readListField(&nestedConfigFieldReader{r}, address, schema)
|
||||
|
|
|
@ -39,13 +39,7 @@ func (r *DiffFieldReader) ReadField(address []string) (FieldReadResult, error) {
|
|||
|
||||
schema := schemaList[len(schemaList)-1]
|
||||
switch schema.Type {
|
||||
case TypeBool:
|
||||
fallthrough
|
||||
case TypeInt:
|
||||
fallthrough
|
||||
case TypeFloat:
|
||||
fallthrough
|
||||
case TypeString:
|
||||
case TypeBool, TypeInt, TypeFloat, TypeString:
|
||||
return r.readPrimitive(address, schema)
|
||||
case TypeList:
|
||||
return readListField(r, address, schema)
|
||||
|
|
|
@ -21,13 +21,7 @@ func (r *MapFieldReader) ReadField(address []string) (FieldReadResult, error) {
|
|||
|
||||
schema := schemaList[len(schemaList)-1]
|
||||
switch schema.Type {
|
||||
case TypeBool:
|
||||
fallthrough
|
||||
case TypeInt:
|
||||
fallthrough
|
||||
case TypeFloat:
|
||||
fallthrough
|
||||
case TypeString:
|
||||
case TypeBool, TypeInt, TypeFloat, TypeString:
|
||||
return r.readPrimitive(address, schema)
|
||||
case TypeList:
|
||||
return readListField(r, address, schema)
|
||||
|
|
|
@ -74,13 +74,7 @@ func (w *MapFieldWriter) set(addr []string, value interface{}) error {
|
|||
|
||||
schema := schemaList[len(schemaList)-1]
|
||||
switch schema.Type {
|
||||
case TypeBool:
|
||||
fallthrough
|
||||
case TypeInt:
|
||||
fallthrough
|
||||
case TypeFloat:
|
||||
fallthrough
|
||||
case TypeString:
|
||||
case TypeBool, TypeInt, TypeFloat, TypeString:
|
||||
return w.setPrimitive(addr, value, schema)
|
||||
case TypeList:
|
||||
return w.setList(addr, value, schema)
|
||||
|
|
|
@ -378,13 +378,7 @@ func (m schemaMap) Input(
|
|||
|
||||
var value interface{}
|
||||
switch v.Type {
|
||||
case TypeBool:
|
||||
fallthrough
|
||||
case TypeInt:
|
||||
fallthrough
|
||||
case TypeFloat:
|
||||
fallthrough
|
||||
case TypeSet:
|
||||
case TypeBool, TypeInt, TypeFloat, TypeSet:
|
||||
continue
|
||||
case TypeString:
|
||||
value, err = m.inputString(input, k, v)
|
||||
|
@ -522,13 +516,7 @@ func (m schemaMap) diff(
|
|||
all bool) error {
|
||||
var err error
|
||||
switch schema.Type {
|
||||
case TypeBool:
|
||||
fallthrough
|
||||
case TypeInt:
|
||||
fallthrough
|
||||
case TypeFloat:
|
||||
fallthrough
|
||||
case TypeString:
|
||||
case TypeBool, TypeInt, TypeFloat, TypeString:
|
||||
err = m.diffString(k, schema, diff, d, all)
|
||||
case TypeList:
|
||||
err = m.diffList(k, schema, diff, d, all)
|
||||
|
@ -1170,9 +1158,7 @@ func (m schemaMap) validateType(
|
|||
var ws []string
|
||||
var es []error
|
||||
switch schema.Type {
|
||||
case TypeSet:
|
||||
fallthrough
|
||||
case TypeList:
|
||||
case TypeSet, TypeList:
|
||||
ws, es = m.validateList(k, raw, schema, c)
|
||||
case TypeMap:
|
||||
ws, es = m.validateMap(k, raw, schema, c)
|
||||
|
|
|
@ -149,15 +149,11 @@ func (d *ModuleDiff) ChangeType() DiffChangeType {
|
|||
for _, r := range d.Resources {
|
||||
change := r.ChangeType()
|
||||
switch change {
|
||||
case DiffCreate:
|
||||
fallthrough
|
||||
case DiffDestroy:
|
||||
case DiffCreate, DiffDestroy:
|
||||
if result == DiffNone {
|
||||
result = change
|
||||
}
|
||||
case DiffDestroyCreate:
|
||||
fallthrough
|
||||
case DiffUpdate:
|
||||
case DiffDestroyCreate, DiffUpdate:
|
||||
result = DiffUpdate
|
||||
}
|
||||
}
|
||||
|
|
|
@ -148,9 +148,7 @@ func (n *GraphNodeConfigResource) DynamicExpand(ctx EvalContext) (*Graph, error)
|
|||
// Primary and non-destroy modes are responsible for creating/destroying
|
||||
// all the nodes, expanding counts.
|
||||
switch n.DestroyMode {
|
||||
case DestroyNone:
|
||||
fallthrough
|
||||
case DestroyPrimary:
|
||||
case DestroyNone, DestroyPrimary:
|
||||
steps = append(steps, &ResourceCountTransformer{
|
||||
Resource: n.Resource,
|
||||
Destroy: n.DestroyMode != DestroyNone,
|
||||
|
|
Loading…
Reference in New Issue