Upgrade mapstructure vendoring to latest version
Apparently we've let this get several years out of date.
This commit is contained in:
parent
1d500f57a6
commit
46994483e1
|
@ -72,7 +72,10 @@ func ComposeDecodeHookFunc(fs ...DecodeHookFunc) DecodeHookFunc {
|
|||
}
|
||||
|
||||
// Modify the from kind to be correct with the new data
|
||||
f = reflect.ValueOf(data).Type()
|
||||
f = nil
|
||||
if val := reflect.ValueOf(data); val.IsValid() {
|
||||
f = val.Type()
|
||||
}
|
||||
}
|
||||
|
||||
return data, nil
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
// The mapstructure package exposes functionality to convert an
|
||||
// abitrary map[string]interface{} into a native Go structure.
|
||||
// arbitrary map[string]interface{} into a native Go structure.
|
||||
//
|
||||
// The Go structure can be arbitrarily complex, containing slices,
|
||||
// other structs, etc. and the decoder will properly decode nested
|
||||
|
@ -8,6 +8,7 @@
|
|||
package mapstructure
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
|
@ -67,6 +68,10 @@ type DecoderConfig struct {
|
|||
// FALSE, false, False. Anything else is an error)
|
||||
// - empty array = empty map and vice versa
|
||||
// - negative numbers to overflowed uint values (base 10)
|
||||
// - slice of maps to a merged map
|
||||
// - single values are converted to slices if required. Each
|
||||
// element is weakly decoded. For example: "4" can become []int{4}
|
||||
// if the target type is an int slice.
|
||||
//
|
||||
WeaklyTypedInput bool
|
||||
|
||||
|
@ -200,7 +205,7 @@ func (d *Decoder) decode(name string, data interface{}, val reflect.Value) error
|
|||
d.config.DecodeHook,
|
||||
dataVal.Type(), val.Type(), data)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("error decoding '%s': %s", name, err)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -227,6 +232,8 @@ func (d *Decoder) decode(name string, data interface{}, val reflect.Value) error
|
|||
err = d.decodePtr(name, data, val)
|
||||
case reflect.Slice:
|
||||
err = d.decodeSlice(name, data, val)
|
||||
case reflect.Func:
|
||||
err = d.decodeFunc(name, data, val)
|
||||
default:
|
||||
// If we reached this point then we weren't able to decode it
|
||||
return fmt.Errorf("%s: unsupported type: %s", name, dataKind)
|
||||
|
@ -245,6 +252,10 @@ func (d *Decoder) decode(name string, data interface{}, val reflect.Value) error
|
|||
// value to "data" of that type.
|
||||
func (d *Decoder) decodeBasic(name string, data interface{}, val reflect.Value) error {
|
||||
dataVal := reflect.ValueOf(data)
|
||||
if !dataVal.IsValid() {
|
||||
dataVal = reflect.Zero(val.Type())
|
||||
}
|
||||
|
||||
dataValType := dataVal.Type()
|
||||
if !dataValType.AssignableTo(val.Type()) {
|
||||
return fmt.Errorf(
|
||||
|
@ -301,6 +312,7 @@ func (d *Decoder) decodeString(name string, data interface{}, val reflect.Value)
|
|||
func (d *Decoder) decodeInt(name string, data interface{}, val reflect.Value) error {
|
||||
dataVal := reflect.ValueOf(data)
|
||||
dataKind := getKind(dataVal)
|
||||
dataType := dataVal.Type()
|
||||
|
||||
switch {
|
||||
case dataKind == reflect.Int:
|
||||
|
@ -322,6 +334,14 @@ func (d *Decoder) decodeInt(name string, data interface{}, val reflect.Value) er
|
|||
} else {
|
||||
return fmt.Errorf("cannot parse '%s' as int: %s", name, err)
|
||||
}
|
||||
case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number":
|
||||
jn := data.(json.Number)
|
||||
i, err := jn.Int64()
|
||||
if err != nil {
|
||||
return fmt.Errorf(
|
||||
"error decoding json.Number into %s: %s", name, err)
|
||||
}
|
||||
val.SetInt(i)
|
||||
default:
|
||||
return fmt.Errorf(
|
||||
"'%s' expected type '%s', got unconvertible type '%s'",
|
||||
|
@ -408,6 +428,7 @@ func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) e
|
|||
func (d *Decoder) decodeFloat(name string, data interface{}, val reflect.Value) error {
|
||||
dataVal := reflect.ValueOf(data)
|
||||
dataKind := getKind(dataVal)
|
||||
dataType := dataVal.Type()
|
||||
|
||||
switch {
|
||||
case dataKind == reflect.Int:
|
||||
|
@ -429,6 +450,14 @@ func (d *Decoder) decodeFloat(name string, data interface{}, val reflect.Value)
|
|||
} else {
|
||||
return fmt.Errorf("cannot parse '%s' as float: %s", name, err)
|
||||
}
|
||||
case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number":
|
||||
jn := data.(json.Number)
|
||||
i, err := jn.Float64()
|
||||
if err != nil {
|
||||
return fmt.Errorf(
|
||||
"error decoding json.Number into %s: %s", name, err)
|
||||
}
|
||||
val.SetFloat(i)
|
||||
default:
|
||||
return fmt.Errorf(
|
||||
"'%s' expected type '%s', got unconvertible type '%s'",
|
||||
|
@ -456,15 +485,30 @@ func (d *Decoder) decodeMap(name string, data interface{}, val reflect.Value) er
|
|||
// Check input type
|
||||
dataVal := reflect.Indirect(reflect.ValueOf(data))
|
||||
if dataVal.Kind() != reflect.Map {
|
||||
// Accept empty array/slice instead of an empty map in weakly typed mode
|
||||
if d.config.WeaklyTypedInput &&
|
||||
(dataVal.Kind() == reflect.Slice || dataVal.Kind() == reflect.Array) &&
|
||||
dataVal.Len() == 0 {
|
||||
val.Set(valMap)
|
||||
return nil
|
||||
} else {
|
||||
return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind())
|
||||
// In weak mode, we accept a slice of maps as an input...
|
||||
if d.config.WeaklyTypedInput {
|
||||
switch dataVal.Kind() {
|
||||
case reflect.Array, reflect.Slice:
|
||||
// Special case for BC reasons (covered by tests)
|
||||
if dataVal.Len() == 0 {
|
||||
val.Set(valMap)
|
||||
return nil
|
||||
}
|
||||
|
||||
for i := 0; i < dataVal.Len(); i++ {
|
||||
err := d.decode(
|
||||
fmt.Sprintf("%s[%d]", name, i),
|
||||
dataVal.Index(i).Interface(), val)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind())
|
||||
}
|
||||
|
||||
// Accumulate errors
|
||||
|
@ -507,7 +551,12 @@ func (d *Decoder) decodePtr(name string, data interface{}, val reflect.Value) er
|
|||
// into that. Then set the value of the pointer to this type.
|
||||
valType := val.Type()
|
||||
valElemType := valType.Elem()
|
||||
realVal := reflect.New(valElemType)
|
||||
|
||||
realVal := val
|
||||
if realVal.IsNil() || d.config.ZeroFields {
|
||||
realVal = reflect.New(valElemType)
|
||||
}
|
||||
|
||||
if err := d.decode(name, data, reflect.Indirect(realVal)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -516,6 +565,19 @@ func (d *Decoder) decodePtr(name string, data interface{}, val reflect.Value) er
|
|||
return nil
|
||||
}
|
||||
|
||||
func (d *Decoder) decodeFunc(name string, data interface{}, val reflect.Value) error {
|
||||
// Create an element of the concrete (non pointer) type and decode
|
||||
// into that. Then set the value of the pointer to this type.
|
||||
dataVal := reflect.Indirect(reflect.ValueOf(data))
|
||||
if val.Type() != dataVal.Type() {
|
||||
return fmt.Errorf(
|
||||
"'%s' expected type '%s', got unconvertible type '%s'",
|
||||
name, val.Type(), dataVal.Type())
|
||||
}
|
||||
val.Set(dataVal)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) error {
|
||||
dataVal := reflect.Indirect(reflect.ValueOf(data))
|
||||
dataValKind := dataVal.Kind()
|
||||
|
@ -523,26 +585,44 @@ func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value)
|
|||
valElemType := valType.Elem()
|
||||
sliceType := reflect.SliceOf(valElemType)
|
||||
|
||||
// Check input type
|
||||
if dataValKind != reflect.Array && dataValKind != reflect.Slice {
|
||||
// Accept empty map instead of array/slice in weakly typed mode
|
||||
if d.config.WeaklyTypedInput && dataVal.Kind() == reflect.Map && dataVal.Len() == 0 {
|
||||
val.Set(reflect.MakeSlice(sliceType, 0, 0))
|
||||
return nil
|
||||
} else {
|
||||
valSlice := val
|
||||
if valSlice.IsNil() || d.config.ZeroFields {
|
||||
// Check input type
|
||||
if dataValKind != reflect.Array && dataValKind != reflect.Slice {
|
||||
if d.config.WeaklyTypedInput {
|
||||
switch {
|
||||
// Empty maps turn into empty slices
|
||||
case dataValKind == reflect.Map:
|
||||
if dataVal.Len() == 0 {
|
||||
val.Set(reflect.MakeSlice(sliceType, 0, 0))
|
||||
return nil
|
||||
}
|
||||
|
||||
// All other types we try to convert to the slice type
|
||||
// and "lift" it into it. i.e. a string becomes a string slice.
|
||||
default:
|
||||
// Just re-try this function with data as a slice.
|
||||
return d.decodeSlice(name, []interface{}{data}, val)
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Errorf(
|
||||
"'%s': source data must be an array or slice, got %s", name, dataValKind)
|
||||
}
|
||||
}
|
||||
|
||||
// Make a new slice to hold our result, same size as the original data.
|
||||
valSlice := reflect.MakeSlice(sliceType, dataVal.Len(), dataVal.Len())
|
||||
}
|
||||
|
||||
// Make a new slice to hold our result, same size as the original data.
|
||||
valSlice = reflect.MakeSlice(sliceType, dataVal.Len(), dataVal.Len())
|
||||
}
|
||||
|
||||
// Accumulate any errors
|
||||
errors := make([]string, 0)
|
||||
|
||||
for i := 0; i < dataVal.Len(); i++ {
|
||||
currentData := dataVal.Index(i).Interface()
|
||||
for valSlice.Len() <= i {
|
||||
valSlice = reflect.Append(valSlice, reflect.Zero(valElemType))
|
||||
}
|
||||
currentField := valSlice.Index(i)
|
||||
|
||||
fieldName := fmt.Sprintf("%s[%d]", name, i)
|
||||
|
@ -607,17 +687,10 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value)
|
|||
structs = structs[1:]
|
||||
|
||||
structType := structVal.Type()
|
||||
|
||||
for i := 0; i < structType.NumField(); i++ {
|
||||
fieldType := structType.Field(i)
|
||||
|
||||
if fieldType.Anonymous {
|
||||
fieldKind := fieldType.Type.Kind()
|
||||
if fieldKind != reflect.Struct {
|
||||
errors = appendErrors(errors,
|
||||
fmt.Errorf("%s: unsupported type: %s", fieldType.Name, fieldKind))
|
||||
continue
|
||||
}
|
||||
}
|
||||
fieldKind := fieldType.Type.Kind()
|
||||
|
||||
// If "squash" is specified in the tag, we squash the field down.
|
||||
squash := false
|
||||
|
@ -630,7 +703,12 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value)
|
|||
}
|
||||
|
||||
if squash {
|
||||
structs = append(structs, val.FieldByName(fieldType.Name))
|
||||
if fieldKind != reflect.Struct {
|
||||
errors = appendErrors(errors,
|
||||
fmt.Errorf("%s: unsupported type for squash: %s", fieldType.Name, fieldKind))
|
||||
} else {
|
||||
structs = append(structs, val.FieldByName(fieldType.Name))
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -653,7 +731,7 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value)
|
|||
if !rawMapVal.IsValid() {
|
||||
// Do a slower search by iterating over each key and
|
||||
// doing case-insensitive search.
|
||||
for dataValKey, _ := range dataValKeys {
|
||||
for dataValKey := range dataValKeys {
|
||||
mK, ok := dataValKey.Interface().(string)
|
||||
if !ok {
|
||||
// Not a string key
|
||||
|
@ -701,7 +779,7 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value)
|
|||
|
||||
if d.config.ErrorUnused && len(dataValKeysUnused) > 0 {
|
||||
keys := make([]string, 0, len(dataValKeysUnused))
|
||||
for rawKey, _ := range dataValKeysUnused {
|
||||
for rawKey := range dataValKeysUnused {
|
||||
keys = append(keys, rawKey.(string))
|
||||
}
|
||||
sort.Strings(keys)
|
||||
|
@ -716,7 +794,7 @@ func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value)
|
|||
|
||||
// Add the unused keys to the list of unused keys if we're tracking metadata
|
||||
if d.config.Metadata != nil {
|
||||
for rawKey, _ := range dataValKeysUnused {
|
||||
for rawKey := range dataValKeysUnused {
|
||||
key := rawKey.(string)
|
||||
if name != "" {
|
||||
key = fmt.Sprintf("%s.%s", name, key)
|
||||
|
|
|
@ -2564,9 +2564,10 @@
|
|||
"revision": "6b17d669fac5e2f71c16658d781ec3fdd3802b69"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "4Js6Jlu93Wa0o6Kjt393L9Z7diE=",
|
||||
"checksumSHA1": "MlX15lJuV8DYARX5RJY8rqrSEWQ=",
|
||||
"path": "github.com/mitchellh/mapstructure",
|
||||
"revision": "281073eb9eb092240d33ef253c404f1cca550309"
|
||||
"revision": "53818660ed4955e899c0bcafa97299a388bd7c8e",
|
||||
"revisionTime": "2017-03-07T20:11:23Z"
|
||||
},
|
||||
{
|
||||
"checksumSHA1": "e/MV3GL8ZOpqyNSKVPtMeqTRR/w=",
|
||||
|
|
Loading…
Reference in New Issue