diff --git a/plans/internal/planproto/doc.go b/plans/internal/planproto/doc.go new file mode 100644 index 000000000..d6ea0f781 --- /dev/null +++ b/plans/internal/planproto/doc.go @@ -0,0 +1,7 @@ +// Package planproto is home to the Go stubs generated from the tfplan protobuf +// schema. +// +// This is an internal package to be used only by Terraform's planfile package. +// From elsewhere in Terraform, use the API exported by the planfile package +// itself. +package planproto diff --git a/plans/internal/planproto/generate.go b/plans/internal/planproto/generate.go new file mode 100644 index 000000000..005343a3a --- /dev/null +++ b/plans/internal/planproto/generate.go @@ -0,0 +1,3 @@ +package planproto + +//go:generate protoc --go_out=paths=source_relative:. planfile.proto diff --git a/plans/internal/planproto/planfile.pb.go b/plans/internal/planproto/planfile.pb.go new file mode 100644 index 000000000..a5fd8f8f7 --- /dev/null +++ b/plans/internal/planproto/planfile.pb.go @@ -0,0 +1,868 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: planfile.proto + +package planproto // import "github.com/hashicorp/terraform/plans/internal/planproto" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +// Action describes the type of action planned for an object. +// Not all action values are valid for all object types. +type Action int32 + +const ( + Action_NOOP Action = 0 + Action_CREATE Action = 1 + Action_READ Action = 2 + Action_UPDATE Action = 3 + Action_REPLACE Action = 4 + Action_DELETE Action = 5 +) + +var Action_name = map[int32]string{ + 0: "NOOP", + 1: "CREATE", + 2: "READ", + 3: "UPDATE", + 4: "REPLACE", + 5: "DELETE", +} +var Action_value = map[string]int32{ + "NOOP": 0, + "CREATE": 1, + "READ": 2, + "UPDATE": 3, + "REPLACE": 4, + "DELETE": 5, +} + +func (x Action) String() string { + return proto.EnumName(Action_name, int32(x)) +} +func (Action) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_planfile_78f69d66ae9e7fdf, []int{0} +} + +type ResourceInstanceChange_ResourceMode int32 + +const ( + ResourceInstanceChange_managed ResourceInstanceChange_ResourceMode = 0 + ResourceInstanceChange_data ResourceInstanceChange_ResourceMode = 1 +) + +var ResourceInstanceChange_ResourceMode_name = map[int32]string{ + 0: "managed", + 1: "data", +} +var ResourceInstanceChange_ResourceMode_value = map[string]int32{ + "managed": 0, + "data": 1, +} + +func (x ResourceInstanceChange_ResourceMode) String() string { + return proto.EnumName(ResourceInstanceChange_ResourceMode_name, int32(x)) +} +func (ResourceInstanceChange_ResourceMode) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_planfile_78f69d66ae9e7fdf, []int{2, 0} +} + +// Plan is the root message type for the tfplan file +type Plan struct { + // Version is incremented whenever there is a breaking change to + // the serialization format. Programs reading serialized plans should + // verify that version is set to the expected value and abort processing + // if not. A breaking change is any change that may cause an older + // consumer to interpret the structure incorrectly. This number will + // not be incremented if an existing consumer can either safely ignore + // changes to the format or if an existing consumer would fail to process + // the file for another message- or field-specific reason. + Version uint64 `protobuf:"varint,1,opt,name=version,proto3" json:"version,omitempty"` + // The variables that were set when creating the plan. Each value is + // a msgpack serialization of an HCL value. + Variables map[string]*DynamicValue `protobuf:"bytes,2,rep,name=variables,proto3" json:"variables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + // An unordered set of proposed changes to resources throughout the + // configuration, including any nested modules. Use the address of + // each resource to determine which module it belongs to. + ResourceChanges []*ResourceInstanceChange `protobuf:"bytes,3,rep,name=resource_changes,json=resourceChanges,proto3" json:"resource_changes,omitempty"` + // An unordered set of proposed changes to outputs in the root module + // of the configuration. This set also includes "no action" changes for + // outputs that are not changing, as context for detecting inconsistencies + // at apply time. + OutputChanges []*OutputChange `protobuf:"bytes,4,rep,name=output_changes,json=outputChanges,proto3" json:"output_changes,omitempty"` + // The version string for the Terraform binary that created this plan. + TerraformVersion string `protobuf:"bytes,14,opt,name=terraform_version,json=terraformVersion,proto3" json:"terraform_version,omitempty"` + // SHA256 digests of all of the provider plugin binaries that were used + // in the creation of this plan. + ProviderHashes map[string]*Hash `protobuf:"bytes,15,rep,name=provider_hashes,json=providerHashes,proto3" json:"provider_hashes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Plan) Reset() { *m = Plan{} } +func (m *Plan) String() string { return proto.CompactTextString(m) } +func (*Plan) ProtoMessage() {} +func (*Plan) Descriptor() ([]byte, []int) { + return fileDescriptor_planfile_78f69d66ae9e7fdf, []int{0} +} +func (m *Plan) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Plan.Unmarshal(m, b) +} +func (m *Plan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Plan.Marshal(b, m, deterministic) +} +func (dst *Plan) XXX_Merge(src proto.Message) { + xxx_messageInfo_Plan.Merge(dst, src) +} +func (m *Plan) XXX_Size() int { + return xxx_messageInfo_Plan.Size(m) +} +func (m *Plan) XXX_DiscardUnknown() { + xxx_messageInfo_Plan.DiscardUnknown(m) +} + +var xxx_messageInfo_Plan proto.InternalMessageInfo + +func (m *Plan) GetVersion() uint64 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *Plan) GetVariables() map[string]*DynamicValue { + if m != nil { + return m.Variables + } + return nil +} + +func (m *Plan) GetResourceChanges() []*ResourceInstanceChange { + if m != nil { + return m.ResourceChanges + } + return nil +} + +func (m *Plan) GetOutputChanges() []*OutputChange { + if m != nil { + return m.OutputChanges + } + return nil +} + +func (m *Plan) GetTerraformVersion() string { + if m != nil { + return m.TerraformVersion + } + return "" +} + +func (m *Plan) GetProviderHashes() map[string]*Hash { + if m != nil { + return m.ProviderHashes + } + return nil +} + +// Change represents a change made to some object, transforming it from an old +// state to a new state. +type Change struct { + // Not all action values are valid for all object types. Consult + // the documentation for any message that embeds Change. + Action Action `protobuf:"varint,1,opt,name=action,proto3,enum=tfplan.Action" json:"action,omitempty"` + // msgpack-encoded HCL values involved in the change. + // - For update and replace, two values are provided that give the old and new values, + // respectively. + // - For create, one value is provided that gives the new value to be created + // - For delete, one value is provided that describes the value being deleted + // - For read, two values are provided that give the prior value for this object + // (or null, if no prior value exists) and the value that was or will be read, + // respectively. + // - For no-op, one value is provided that is left unmodified by this non-change. + Values []*DynamicValue `protobuf:"bytes,2,rep,name=values,proto3" json:"values,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Change) Reset() { *m = Change{} } +func (m *Change) String() string { return proto.CompactTextString(m) } +func (*Change) ProtoMessage() {} +func (*Change) Descriptor() ([]byte, []int) { + return fileDescriptor_planfile_78f69d66ae9e7fdf, []int{1} +} +func (m *Change) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Change.Unmarshal(m, b) +} +func (m *Change) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Change.Marshal(b, m, deterministic) +} +func (dst *Change) XXX_Merge(src proto.Message) { + xxx_messageInfo_Change.Merge(dst, src) +} +func (m *Change) XXX_Size() int { + return xxx_messageInfo_Change.Size(m) +} +func (m *Change) XXX_DiscardUnknown() { + xxx_messageInfo_Change.DiscardUnknown(m) +} + +var xxx_messageInfo_Change proto.InternalMessageInfo + +func (m *Change) GetAction() Action { + if m != nil { + return m.Action + } + return Action_NOOP +} + +func (m *Change) GetValues() []*DynamicValue { + if m != nil { + return m.Values + } + return nil +} + +type ResourceInstanceChange struct { + // module_path is an address to the module that defined this resource. + // module_path is omitted for resources in the root module. For descendent modules + // it is a string like module.foo.module.bar as would be seen at the beginning of a + // resource address. The format of this string is not yet frozen and so external + // callers should treat it as an opaque key for filtering purposes. + ModulePath string `protobuf:"bytes,1,opt,name=module_path,json=modulePath,proto3" json:"module_path,omitempty"` + // mode is the resource mode. + Mode ResourceInstanceChange_ResourceMode `protobuf:"varint,2,opt,name=mode,proto3,enum=tfplan.ResourceInstanceChange_ResourceMode" json:"mode,omitempty"` + // type is the resource type name, like "aws_instance". + Type string `protobuf:"bytes,3,opt,name=type,proto3" json:"type,omitempty"` + // name is the logical name of the resource as defined in configuration. + // For example, in aws_instance.foo this would be "foo". + Name string `protobuf:"bytes,4,opt,name=name,proto3" json:"name,omitempty"` + // instance_key is either an integer index or a string key, depending on which iteration + // attributes ("count" or "for_each") are being used for this resource. If none + // are in use, this field is omitted. + // + // Types that are valid to be assigned to InstanceKey: + // *ResourceInstanceChange_Str + // *ResourceInstanceChange_Int + InstanceKey isResourceInstanceChange_InstanceKey `protobuf_oneof:"instance_key"` + // deposed_key, if set, indicates that this change applies to a deposed + // object for the indicated instance with the given deposed key. If not + // set, the change applies to the instance's current object. + DeposedKey string `protobuf:"bytes,7,opt,name=deposed_key,json=deposedKey,proto3" json:"deposed_key,omitempty"` + // Description of the proposed change. May use "create", "read", "update", + // "replace" and "delete" actions. "no-op" changes are not currently used here + // but consumers must accept and discard them to allow for future expansion. + Change *Change `protobuf:"bytes,8,opt,name=change,proto3" json:"change,omitempty"` + // msgpack representation of an arbitrary object value provided by + // the resource provider as additional context for the change. Must + // be considered an opaque value for any consumer other than the + // provider that generated it. + Private *DynamicValue `protobuf:"bytes,9,opt,name=private,proto3" json:"private,omitempty"` + // An unordered set of paths that prompted the change action to be + // "replace" rather than "update". Empty for any action other than + // "replace". + RequiredReplace []*Path `protobuf:"bytes,10,rep,name=required_replace,json=requiredReplace,proto3" json:"required_replace,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResourceInstanceChange) Reset() { *m = ResourceInstanceChange{} } +func (m *ResourceInstanceChange) String() string { return proto.CompactTextString(m) } +func (*ResourceInstanceChange) ProtoMessage() {} +func (*ResourceInstanceChange) Descriptor() ([]byte, []int) { + return fileDescriptor_planfile_78f69d66ae9e7fdf, []int{2} +} +func (m *ResourceInstanceChange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResourceInstanceChange.Unmarshal(m, b) +} +func (m *ResourceInstanceChange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResourceInstanceChange.Marshal(b, m, deterministic) +} +func (dst *ResourceInstanceChange) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResourceInstanceChange.Merge(dst, src) +} +func (m *ResourceInstanceChange) XXX_Size() int { + return xxx_messageInfo_ResourceInstanceChange.Size(m) +} +func (m *ResourceInstanceChange) XXX_DiscardUnknown() { + xxx_messageInfo_ResourceInstanceChange.DiscardUnknown(m) +} + +var xxx_messageInfo_ResourceInstanceChange proto.InternalMessageInfo + +type isResourceInstanceChange_InstanceKey interface { + isResourceInstanceChange_InstanceKey() +} + +type ResourceInstanceChange_Str struct { + Str string `protobuf:"bytes,5,opt,name=str,proto3,oneof"` +} +type ResourceInstanceChange_Int struct { + Int int64 `protobuf:"varint,6,opt,name=int,proto3,oneof"` +} + +func (*ResourceInstanceChange_Str) isResourceInstanceChange_InstanceKey() {} +func (*ResourceInstanceChange_Int) isResourceInstanceChange_InstanceKey() {} + +func (m *ResourceInstanceChange) GetInstanceKey() isResourceInstanceChange_InstanceKey { + if m != nil { + return m.InstanceKey + } + return nil +} + +func (m *ResourceInstanceChange) GetModulePath() string { + if m != nil { + return m.ModulePath + } + return "" +} + +func (m *ResourceInstanceChange) GetMode() ResourceInstanceChange_ResourceMode { + if m != nil { + return m.Mode + } + return ResourceInstanceChange_managed +} + +func (m *ResourceInstanceChange) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *ResourceInstanceChange) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *ResourceInstanceChange) GetStr() string { + if x, ok := m.GetInstanceKey().(*ResourceInstanceChange_Str); ok { + return x.Str + } + return "" +} + +func (m *ResourceInstanceChange) GetInt() int64 { + if x, ok := m.GetInstanceKey().(*ResourceInstanceChange_Int); ok { + return x.Int + } + return 0 +} + +func (m *ResourceInstanceChange) GetDeposedKey() string { + if m != nil { + return m.DeposedKey + } + return "" +} + +func (m *ResourceInstanceChange) GetChange() *Change { + if m != nil { + return m.Change + } + return nil +} + +func (m *ResourceInstanceChange) GetPrivate() *DynamicValue { + if m != nil { + return m.Private + } + return nil +} + +func (m *ResourceInstanceChange) GetRequiredReplace() []*Path { + if m != nil { + return m.RequiredReplace + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*ResourceInstanceChange) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _ResourceInstanceChange_OneofMarshaler, _ResourceInstanceChange_OneofUnmarshaler, _ResourceInstanceChange_OneofSizer, []interface{}{ + (*ResourceInstanceChange_Str)(nil), + (*ResourceInstanceChange_Int)(nil), + } +} + +func _ResourceInstanceChange_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*ResourceInstanceChange) + // instance_key + switch x := m.InstanceKey.(type) { + case *ResourceInstanceChange_Str: + b.EncodeVarint(5<<3 | proto.WireBytes) + b.EncodeStringBytes(x.Str) + case *ResourceInstanceChange_Int: + b.EncodeVarint(6<<3 | proto.WireVarint) + b.EncodeVarint(uint64(x.Int)) + case nil: + default: + return fmt.Errorf("ResourceInstanceChange.InstanceKey has unexpected type %T", x) + } + return nil +} + +func _ResourceInstanceChange_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*ResourceInstanceChange) + switch tag { + case 5: // instance_key.str + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.InstanceKey = &ResourceInstanceChange_Str{x} + return true, err + case 6: // instance_key.int + if wire != proto.WireVarint { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeVarint() + m.InstanceKey = &ResourceInstanceChange_Int{int64(x)} + return true, err + default: + return false, nil + } +} + +func _ResourceInstanceChange_OneofSizer(msg proto.Message) (n int) { + m := msg.(*ResourceInstanceChange) + // instance_key + switch x := m.InstanceKey.(type) { + case *ResourceInstanceChange_Str: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.Str))) + n += len(x.Str) + case *ResourceInstanceChange_Int: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(x.Int)) + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +type OutputChange struct { + // Name of the output as defined in the root module. + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + // Description of the proposed change. May use "no-op", "create", + // "update" and "delete" actions. + Change *Change `protobuf:"bytes,2,opt,name=change,proto3" json:"change,omitempty"` + // Sensitive, if true, indicates that one or more of the values given + // in "change" is sensitive and should not be shown directly in any + // rendered plan. + Sensitive bool `protobuf:"varint,3,opt,name=sensitive,proto3" json:"sensitive,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *OutputChange) Reset() { *m = OutputChange{} } +func (m *OutputChange) String() string { return proto.CompactTextString(m) } +func (*OutputChange) ProtoMessage() {} +func (*OutputChange) Descriptor() ([]byte, []int) { + return fileDescriptor_planfile_78f69d66ae9e7fdf, []int{3} +} +func (m *OutputChange) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_OutputChange.Unmarshal(m, b) +} +func (m *OutputChange) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_OutputChange.Marshal(b, m, deterministic) +} +func (dst *OutputChange) XXX_Merge(src proto.Message) { + xxx_messageInfo_OutputChange.Merge(dst, src) +} +func (m *OutputChange) XXX_Size() int { + return xxx_messageInfo_OutputChange.Size(m) +} +func (m *OutputChange) XXX_DiscardUnknown() { + xxx_messageInfo_OutputChange.DiscardUnknown(m) +} + +var xxx_messageInfo_OutputChange proto.InternalMessageInfo + +func (m *OutputChange) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *OutputChange) GetChange() *Change { + if m != nil { + return m.Change + } + return nil +} + +func (m *OutputChange) GetSensitive() bool { + if m != nil { + return m.Sensitive + } + return false +} + +// DynamicValue represents a value whose type is not decided until runtime, +// often based on schema information obtained from a plugin. +// +// At present dynamic values are always encoded as msgpack, with extension +// id 0 used to represent the special "unknown" value indicating results +// that won't be known until after apply. +// +// In future other serialization formats may be used, possibly with a +// transitional period of including both as separate attributes of this type. +// Consumers must ignore attributes they don't support and fail if no supported +// attribute is present. The top-level format version will not be incremented +// for changes to the set of dynamic serialization formats. +type DynamicValue struct { + Msgpack []byte `protobuf:"bytes,1,opt,name=msgpack,proto3" json:"msgpack,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DynamicValue) Reset() { *m = DynamicValue{} } +func (m *DynamicValue) String() string { return proto.CompactTextString(m) } +func (*DynamicValue) ProtoMessage() {} +func (*DynamicValue) Descriptor() ([]byte, []int) { + return fileDescriptor_planfile_78f69d66ae9e7fdf, []int{4} +} +func (m *DynamicValue) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DynamicValue.Unmarshal(m, b) +} +func (m *DynamicValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DynamicValue.Marshal(b, m, deterministic) +} +func (dst *DynamicValue) XXX_Merge(src proto.Message) { + xxx_messageInfo_DynamicValue.Merge(dst, src) +} +func (m *DynamicValue) XXX_Size() int { + return xxx_messageInfo_DynamicValue.Size(m) +} +func (m *DynamicValue) XXX_DiscardUnknown() { + xxx_messageInfo_DynamicValue.DiscardUnknown(m) +} + +var xxx_messageInfo_DynamicValue proto.InternalMessageInfo + +func (m *DynamicValue) GetMsgpack() []byte { + if m != nil { + return m.Msgpack + } + return nil +} + +// Hash represents a hash value. +// +// At present hashes always use the SHA256 algorithm. In future other hash +// algorithms may be used, possibly with a transitional period of including +// both as separate attributes of this type. Consumers must ignore attributes +// they don't support and fail if no supported attribute is present. The +// top-level format version will not be incremented for changes to the set of +// hash algorithms. +type Hash struct { + Sha256 []byte `protobuf:"bytes,1,opt,name=sha256,proto3" json:"sha256,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Hash) Reset() { *m = Hash{} } +func (m *Hash) String() string { return proto.CompactTextString(m) } +func (*Hash) ProtoMessage() {} +func (*Hash) Descriptor() ([]byte, []int) { + return fileDescriptor_planfile_78f69d66ae9e7fdf, []int{5} +} +func (m *Hash) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Hash.Unmarshal(m, b) +} +func (m *Hash) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Hash.Marshal(b, m, deterministic) +} +func (dst *Hash) XXX_Merge(src proto.Message) { + xxx_messageInfo_Hash.Merge(dst, src) +} +func (m *Hash) XXX_Size() int { + return xxx_messageInfo_Hash.Size(m) +} +func (m *Hash) XXX_DiscardUnknown() { + xxx_messageInfo_Hash.DiscardUnknown(m) +} + +var xxx_messageInfo_Hash proto.InternalMessageInfo + +func (m *Hash) GetSha256() []byte { + if m != nil { + return m.Sha256 + } + return nil +} + +// Path represents a set of steps to traverse into a data structure. It is +// used to refer to a sub-structure within a dynamic data structure presented +// separately. +type Path struct { + Steps []*Path_Step `protobuf:"bytes,1,rep,name=steps,proto3" json:"steps,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Path) Reset() { *m = Path{} } +func (m *Path) String() string { return proto.CompactTextString(m) } +func (*Path) ProtoMessage() {} +func (*Path) Descriptor() ([]byte, []int) { + return fileDescriptor_planfile_78f69d66ae9e7fdf, []int{6} +} +func (m *Path) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Path.Unmarshal(m, b) +} +func (m *Path) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Path.Marshal(b, m, deterministic) +} +func (dst *Path) XXX_Merge(src proto.Message) { + xxx_messageInfo_Path.Merge(dst, src) +} +func (m *Path) XXX_Size() int { + return xxx_messageInfo_Path.Size(m) +} +func (m *Path) XXX_DiscardUnknown() { + xxx_messageInfo_Path.DiscardUnknown(m) +} + +var xxx_messageInfo_Path proto.InternalMessageInfo + +func (m *Path) GetSteps() []*Path_Step { + if m != nil { + return m.Steps + } + return nil +} + +type Path_Step struct { + // Types that are valid to be assigned to Selector: + // *Path_Step_AttributeName + // *Path_Step_ElementKey + Selector isPath_Step_Selector `protobuf_oneof:"selector"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Path_Step) Reset() { *m = Path_Step{} } +func (m *Path_Step) String() string { return proto.CompactTextString(m) } +func (*Path_Step) ProtoMessage() {} +func (*Path_Step) Descriptor() ([]byte, []int) { + return fileDescriptor_planfile_78f69d66ae9e7fdf, []int{6, 0} +} +func (m *Path_Step) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Path_Step.Unmarshal(m, b) +} +func (m *Path_Step) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Path_Step.Marshal(b, m, deterministic) +} +func (dst *Path_Step) XXX_Merge(src proto.Message) { + xxx_messageInfo_Path_Step.Merge(dst, src) +} +func (m *Path_Step) XXX_Size() int { + return xxx_messageInfo_Path_Step.Size(m) +} +func (m *Path_Step) XXX_DiscardUnknown() { + xxx_messageInfo_Path_Step.DiscardUnknown(m) +} + +var xxx_messageInfo_Path_Step proto.InternalMessageInfo + +type isPath_Step_Selector interface { + isPath_Step_Selector() +} + +type Path_Step_AttributeName struct { + AttributeName string `protobuf:"bytes,1,opt,name=attribute_name,json=attributeName,proto3,oneof"` +} +type Path_Step_ElementKey struct { + ElementKey *DynamicValue `protobuf:"bytes,2,opt,name=element_key,json=elementKey,proto3,oneof"` +} + +func (*Path_Step_AttributeName) isPath_Step_Selector() {} +func (*Path_Step_ElementKey) isPath_Step_Selector() {} + +func (m *Path_Step) GetSelector() isPath_Step_Selector { + if m != nil { + return m.Selector + } + return nil +} + +func (m *Path_Step) GetAttributeName() string { + if x, ok := m.GetSelector().(*Path_Step_AttributeName); ok { + return x.AttributeName + } + return "" +} + +func (m *Path_Step) GetElementKey() *DynamicValue { + if x, ok := m.GetSelector().(*Path_Step_ElementKey); ok { + return x.ElementKey + } + return nil +} + +// XXX_OneofFuncs is for the internal use of the proto package. +func (*Path_Step) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) { + return _Path_Step_OneofMarshaler, _Path_Step_OneofUnmarshaler, _Path_Step_OneofSizer, []interface{}{ + (*Path_Step_AttributeName)(nil), + (*Path_Step_ElementKey)(nil), + } +} + +func _Path_Step_OneofMarshaler(msg proto.Message, b *proto.Buffer) error { + m := msg.(*Path_Step) + // selector + switch x := m.Selector.(type) { + case *Path_Step_AttributeName: + b.EncodeVarint(1<<3 | proto.WireBytes) + b.EncodeStringBytes(x.AttributeName) + case *Path_Step_ElementKey: + b.EncodeVarint(2<<3 | proto.WireBytes) + if err := b.EncodeMessage(x.ElementKey); err != nil { + return err + } + case nil: + default: + return fmt.Errorf("Path_Step.Selector has unexpected type %T", x) + } + return nil +} + +func _Path_Step_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) { + m := msg.(*Path_Step) + switch tag { + case 1: // selector.attribute_name + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + x, err := b.DecodeStringBytes() + m.Selector = &Path_Step_AttributeName{x} + return true, err + case 2: // selector.element_key + if wire != proto.WireBytes { + return true, proto.ErrInternalBadWireType + } + msg := new(DynamicValue) + err := b.DecodeMessage(msg) + m.Selector = &Path_Step_ElementKey{msg} + return true, err + default: + return false, nil + } +} + +func _Path_Step_OneofSizer(msg proto.Message) (n int) { + m := msg.(*Path_Step) + // selector + switch x := m.Selector.(type) { + case *Path_Step_AttributeName: + n += 1 // tag and wire + n += proto.SizeVarint(uint64(len(x.AttributeName))) + n += len(x.AttributeName) + case *Path_Step_ElementKey: + s := proto.Size(x.ElementKey) + n += 1 // tag and wire + n += proto.SizeVarint(uint64(s)) + n += s + case nil: + default: + panic(fmt.Sprintf("proto: unexpected type %T in oneof", x)) + } + return n +} + +func init() { + proto.RegisterType((*Plan)(nil), "tfplan.Plan") + proto.RegisterMapType((map[string]*Hash)(nil), "tfplan.Plan.ProviderHashesEntry") + proto.RegisterMapType((map[string]*DynamicValue)(nil), "tfplan.Plan.VariablesEntry") + proto.RegisterType((*Change)(nil), "tfplan.Change") + proto.RegisterType((*ResourceInstanceChange)(nil), "tfplan.ResourceInstanceChange") + proto.RegisterType((*OutputChange)(nil), "tfplan.OutputChange") + proto.RegisterType((*DynamicValue)(nil), "tfplan.DynamicValue") + proto.RegisterType((*Hash)(nil), "tfplan.Hash") + proto.RegisterType((*Path)(nil), "tfplan.Path") + proto.RegisterType((*Path_Step)(nil), "tfplan.Path.Step") + proto.RegisterEnum("tfplan.Action", Action_name, Action_value) + proto.RegisterEnum("tfplan.ResourceInstanceChange_ResourceMode", ResourceInstanceChange_ResourceMode_name, ResourceInstanceChange_ResourceMode_value) +} + +func init() { proto.RegisterFile("planfile.proto", fileDescriptor_planfile_78f69d66ae9e7fdf) } + +var fileDescriptor_planfile_78f69d66ae9e7fdf = []byte{ + // 794 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x55, 0xdb, 0x6e, 0xe3, 0x36, + 0x10, 0xb5, 0x6c, 0x45, 0x89, 0x27, 0x5e, 0x45, 0xcb, 0x16, 0x0b, 0x21, 0x2d, 0xb6, 0x86, 0x80, + 0x76, 0x8d, 0xdd, 0xc2, 0x01, 0x52, 0xb4, 0xe9, 0xb6, 0x0f, 0x45, 0x2e, 0x02, 0x12, 0xec, 0x25, + 0x06, 0xbb, 0xcd, 0x43, 0x1f, 0x6a, 0x30, 0xd2, 0x24, 0x26, 0x56, 0xa2, 0x54, 0x92, 0x32, 0xe0, + 0xb7, 0xfe, 0x4c, 0xbf, 0xa0, 0x3f, 0x58, 0x90, 0xba, 0x44, 0x01, 0x02, 0x3f, 0x59, 0x73, 0xe6, + 0xcc, 0x88, 0xe7, 0xcc, 0x50, 0x06, 0xbf, 0xcc, 0x98, 0xb8, 0xe3, 0x19, 0xce, 0x4b, 0x59, 0xe8, + 0x82, 0x78, 0xfa, 0xce, 0x20, 0xd1, 0x3f, 0x2e, 0xb8, 0x8b, 0x8c, 0x09, 0x12, 0xc2, 0xee, 0x1a, + 0xa5, 0xe2, 0x85, 0x08, 0x9d, 0xa9, 0x33, 0x73, 0x69, 0x1b, 0x92, 0xb7, 0x30, 0x5e, 0x33, 0xc9, + 0xd9, 0x6d, 0x86, 0x2a, 0x1c, 0x4e, 0x47, 0xb3, 0xfd, 0xe3, 0xaf, 0xe6, 0x75, 0xf9, 0xdc, 0x94, + 0xce, 0x6f, 0xda, 0x6c, 0x2c, 0xb4, 0xdc, 0xd0, 0x07, 0x36, 0xb9, 0x82, 0x40, 0xa2, 0x2a, 0x2a, + 0x99, 0xe0, 0x32, 0x59, 0x31, 0x71, 0x8f, 0x2a, 0x1c, 0xd9, 0x0e, 0x2f, 0xdb, 0x0e, 0xb4, 0xc9, + 0x5f, 0x09, 0xa5, 0x99, 0x48, 0xf0, 0xdc, 0xd2, 0xe8, 0x41, 0x5b, 0x57, 0xc7, 0x8a, 0xfc, 0x0a, + 0x7e, 0x51, 0xe9, 0xb2, 0xd2, 0x5d, 0x23, 0xd7, 0x36, 0xfa, 0xb2, 0x6d, 0x74, 0x6d, 0xb3, 0x4d, + 0xf9, 0xb3, 0xa2, 0x17, 0x29, 0xf2, 0x06, 0x9e, 0x6b, 0x94, 0x92, 0xdd, 0x15, 0x32, 0x5f, 0xb6, + 0x32, 0xfd, 0xa9, 0x33, 0x1b, 0xd3, 0xa0, 0x4b, 0xdc, 0x34, 0x7a, 0xaf, 0xe0, 0xa0, 0x94, 0xc5, + 0x9a, 0xa7, 0x28, 0x97, 0x2b, 0xa6, 0x56, 0xa8, 0xc2, 0x03, 0xfb, 0xaa, 0xe9, 0x23, 0xd5, 0x8b, + 0x86, 0x73, 0x69, 0x29, 0xb5, 0x74, 0xbf, 0x7c, 0x04, 0x1e, 0x52, 0xf0, 0x1f, 0x9b, 0x43, 0x02, + 0x18, 0x7d, 0xc6, 0x8d, 0xb5, 0x78, 0x4c, 0xcd, 0x23, 0x79, 0x0d, 0x3b, 0x6b, 0x96, 0x55, 0x18, + 0x0e, 0xa7, 0x4e, 0x5f, 0xcf, 0xc5, 0x46, 0xb0, 0x9c, 0x27, 0x37, 0x26, 0x47, 0x6b, 0xca, 0x2f, + 0xc3, 0x9f, 0x9d, 0xc3, 0x6b, 0xf8, 0xe2, 0x89, 0x57, 0x3f, 0xd1, 0x38, 0x7a, 0xdc, 0x78, 0xd2, + 0x36, 0x36, 0x55, 0xbd, 0x86, 0xd1, 0x5f, 0xe0, 0xd5, 0x3e, 0x91, 0xef, 0xc0, 0x63, 0x89, 0x6e, + 0x57, 0xc0, 0x3f, 0xf6, 0xdb, 0x92, 0x53, 0x8b, 0xd2, 0x26, 0x4b, 0xbe, 0x07, 0xcf, 0x96, 0xb7, + 0xeb, 0xf0, 0xf4, 0x99, 0x1b, 0x4e, 0xf4, 0xdf, 0x08, 0x5e, 0x3c, 0x3d, 0x65, 0xf2, 0x0d, 0xec, + 0xe7, 0x45, 0x5a, 0x65, 0xb8, 0x2c, 0x99, 0x5e, 0x35, 0x87, 0x87, 0x1a, 0x5a, 0x30, 0xbd, 0x22, + 0xbf, 0x81, 0x9b, 0x17, 0x69, 0x2d, 0xc1, 0x3f, 0x7e, 0xb3, 0x7d, 0x69, 0x3a, 0xf8, 0x43, 0x91, + 0x22, 0xb5, 0x85, 0x84, 0x80, 0xab, 0x37, 0x25, 0x86, 0x23, 0xdb, 0xda, 0x3e, 0x1b, 0x4c, 0xb0, + 0x1c, 0x43, 0xb7, 0xc6, 0xcc, 0x33, 0x21, 0x30, 0x52, 0x5a, 0x86, 0x3b, 0x06, 0xba, 0x1c, 0x50, + 0x13, 0x18, 0x8c, 0x0b, 0x1d, 0x7a, 0x53, 0x67, 0x36, 0x32, 0x18, 0x17, 0xda, 0x9c, 0x38, 0xc5, + 0xb2, 0x50, 0x98, 0x2e, 0x8d, 0xdd, 0xbb, 0xf5, 0x89, 0x1b, 0xe8, 0x1d, 0x6e, 0x8c, 0x87, 0xf5, + 0x82, 0x86, 0x7b, 0xd6, 0xf6, 0xce, 0xc3, 0x66, 0x33, 0x9b, 0x2c, 0x99, 0xc3, 0x6e, 0x29, 0xf9, + 0x9a, 0x69, 0x0c, 0xc7, 0x5b, 0x06, 0xdf, 0x92, 0xc8, 0x89, 0xb9, 0x4a, 0x7f, 0x57, 0x5c, 0x62, + 0xba, 0x94, 0x58, 0x66, 0x2c, 0xc1, 0x10, 0xac, 0xfb, 0xdd, 0x60, 0x8d, 0x63, 0xe6, 0xe2, 0xd4, + 0x2c, 0x5a, 0x93, 0xa2, 0x6f, 0x61, 0xd2, 0xf7, 0x85, 0xec, 0xc3, 0x6e, 0xce, 0x04, 0xbb, 0xc7, + 0x34, 0x18, 0x90, 0x3d, 0x70, 0x53, 0xa6, 0x59, 0xe0, 0x9c, 0xf9, 0x30, 0xe1, 0x8d, 0x9b, 0x46, + 0x59, 0xb4, 0x82, 0x49, 0xff, 0x46, 0x75, 0xa6, 0x39, 0x3d, 0xd3, 0x1e, 0xb4, 0x0e, 0xb7, 0x6a, + 0xfd, 0x1a, 0xc6, 0x0a, 0x85, 0xe2, 0x9a, 0xaf, 0xeb, 0x49, 0xec, 0xd1, 0x07, 0x20, 0x9a, 0xc1, + 0xa4, 0x2f, 0xd9, 0x7c, 0x89, 0x72, 0x75, 0x5f, 0xb2, 0xe4, 0xb3, 0x7d, 0xd9, 0x84, 0xb6, 0x61, + 0xf4, 0x12, 0x5c, 0xb3, 0xbc, 0xe4, 0x05, 0x78, 0x6a, 0xc5, 0x8e, 0x7f, 0xfc, 0xa9, 0x21, 0x34, + 0x51, 0xf4, 0xaf, 0x03, 0xae, 0x5d, 0x9b, 0x57, 0xb0, 0xa3, 0x34, 0x96, 0x2a, 0x74, 0xac, 0x43, + 0xcf, 0xfb, 0x0e, 0xcd, 0x7f, 0xd7, 0x58, 0xd2, 0x3a, 0x7f, 0xa8, 0xc1, 0x35, 0x21, 0x79, 0x05, + 0x3e, 0xd3, 0x5a, 0xf2, 0xdb, 0x4a, 0xe3, 0xf2, 0x41, 0xe7, 0xe5, 0x80, 0x3e, 0xeb, 0xf0, 0x8f, + 0x46, 0xf2, 0x09, 0xec, 0x63, 0x86, 0x39, 0x0a, 0x6d, 0xe7, 0xbf, 0xe5, 0xce, 0x5e, 0x0e, 0x28, + 0x34, 0xd4, 0x77, 0xb8, 0x39, 0x03, 0xd8, 0x53, 0x98, 0x61, 0xa2, 0x0b, 0xf9, 0xfa, 0x03, 0x78, + 0xf5, 0x8d, 0x32, 0xfe, 0x7f, 0xbc, 0xbe, 0x5e, 0x04, 0x03, 0x02, 0xe0, 0x9d, 0xd3, 0xf8, 0xf4, + 0x53, 0x1c, 0x38, 0x06, 0xa5, 0xf1, 0xe9, 0x45, 0x30, 0x34, 0xe8, 0x1f, 0x8b, 0x0b, 0x83, 0x8e, + 0xcc, 0xe0, 0x68, 0xbc, 0x78, 0x7f, 0x7a, 0x1e, 0x07, 0xae, 0x49, 0x5c, 0xc4, 0xef, 0xe3, 0x4f, + 0x71, 0xb0, 0x73, 0xf6, 0xf6, 0xcf, 0x93, 0x7b, 0xae, 0x57, 0xd5, 0xed, 0x3c, 0x29, 0xf2, 0x23, + 0xf3, 0xc9, 0xe2, 0x49, 0x21, 0xcb, 0xa3, 0xee, 0xcb, 0x76, 0x64, 0x0e, 0xa7, 0x8e, 0xb8, 0xd0, + 0x28, 0x05, 0xcb, 0x6c, 0x68, 0xff, 0x06, 0x6e, 0x3d, 0xfb, 0xf3, 0xc3, 0xff, 0x01, 0x00, 0x00, + 0xff, 0xff, 0x19, 0x9b, 0x98, 0x0d, 0x1f, 0x06, 0x00, 0x00, +} diff --git a/plans/internal/planproto/planfile.proto b/plans/internal/planproto/planfile.proto new file mode 100644 index 000000000..557b97916 --- /dev/null +++ b/plans/internal/planproto/planfile.proto @@ -0,0 +1,183 @@ +syntax = "proto3"; +package tfplan; + +// For Terraform's own parsing, the proto stub types go into an internal Go +// package. The public API is in github.com/hashicorp/terraform/plans/planfile . +option go_package = "github.com/hashicorp/terraform/plans/internal/planproto"; + +// Plan is the root message type for the tfplan file +message Plan { + // Version is incremented whenever there is a breaking change to + // the serialization format. Programs reading serialized plans should + // verify that version is set to the expected value and abort processing + // if not. A breaking change is any change that may cause an older + // consumer to interpret the structure incorrectly. This number will + // not be incremented if an existing consumer can either safely ignore + // changes to the format or if an existing consumer would fail to process + // the file for another message- or field-specific reason. + uint64 version = 1; + + // The variables that were set when creating the plan. Each value is + // a msgpack serialization of an HCL value. + map variables = 2; + + // An unordered set of proposed changes to resources throughout the + // configuration, including any nested modules. Use the address of + // each resource to determine which module it belongs to. + repeated ResourceInstanceChange resource_changes = 3; + + // An unordered set of proposed changes to outputs in the root module + // of the configuration. This set also includes "no action" changes for + // outputs that are not changing, as context for detecting inconsistencies + // at apply time. + repeated OutputChange output_changes = 4; + + // The version string for the Terraform binary that created this plan. + string terraform_version = 14; + + // SHA256 digests of all of the provider plugin binaries that were used + // in the creation of this plan. + map provider_hashes = 15; +} + +// Action describes the type of action planned for an object. +// Not all action values are valid for all object types. +enum Action { + NOOP = 0; + CREATE = 1; + READ = 2; + UPDATE = 3; + REPLACE = 4; + DELETE = 5; +} + +// Change represents a change made to some object, transforming it from an old +// state to a new state. +message Change { + // Not all action values are valid for all object types. Consult + // the documentation for any message that embeds Change. + Action action = 1; + + // msgpack-encoded HCL values involved in the change. + // - For update and replace, two values are provided that give the old and new values, + // respectively. + // - For create, one value is provided that gives the new value to be created + // - For delete, one value is provided that describes the value being deleted + // - For read, two values are provided that give the prior value for this object + // (or null, if no prior value exists) and the value that was or will be read, + // respectively. + // - For no-op, one value is provided that is left unmodified by this non-change. + repeated DynamicValue values = 2; +} + +message ResourceInstanceChange { + // module_path is an address to the module that defined this resource. + // module_path is omitted for resources in the root module. For descendent modules + // it is a string like module.foo.module.bar as would be seen at the beginning of a + // resource address. The format of this string is not yet frozen and so external + // callers should treat it as an opaque key for filtering purposes. + string module_path = 1; + + // mode is the resource mode. + ResourceMode mode = 2; + enum ResourceMode { + managed = 0; // for "resource" blocks in configuration + data = 1; // for "data" blocks in configuration + } + + // type is the resource type name, like "aws_instance". + string type = 3; + + // name is the logical name of the resource as defined in configuration. + // For example, in aws_instance.foo this would be "foo". + string name = 4; + + // instance_key is either an integer index or a string key, depending on which iteration + // attributes ("count" or "for_each") are being used for this resource. If none + // are in use, this field is omitted. + oneof instance_key { + string str = 5; + int64 int = 6; + }; + + // deposed_key, if set, indicates that this change applies to a deposed + // object for the indicated instance with the given deposed key. If not + // set, the change applies to the instance's current object. + string deposed_key = 7; + + // Description of the proposed change. May use "create", "read", "update", + // "replace" and "delete" actions. "no-op" changes are not currently used here + // but consumers must accept and discard them to allow for future expansion. + Change change = 8; + + // msgpack representation of an arbitrary object value provided by + // the resource provider as additional context for the change. Must + // be considered an opaque value for any consumer other than the + // provider that generated it. + DynamicValue private = 9; + + // An unordered set of paths that prompted the change action to be + // "replace" rather than "update". Empty for any action other than + // "replace". + repeated Path required_replace = 10; +} + +message OutputChange { + // Name of the output as defined in the root module. + string name = 1; + + // Description of the proposed change. May use "no-op", "create", + // "update" and "delete" actions. + Change change = 2; + + // Sensitive, if true, indicates that one or more of the values given + // in "change" is sensitive and should not be shown directly in any + // rendered plan. + bool sensitive = 3; +} + +// DynamicValue represents a value whose type is not decided until runtime, +// often based on schema information obtained from a plugin. +// +// At present dynamic values are always encoded as msgpack, with extension +// id 0 used to represent the special "unknown" value indicating results +// that won't be known until after apply. +// +// In future other serialization formats may be used, possibly with a +// transitional period of including both as separate attributes of this type. +// Consumers must ignore attributes they don't support and fail if no supported +// attribute is present. The top-level format version will not be incremented +// for changes to the set of dynamic serialization formats. +message DynamicValue { + bytes msgpack = 1; +} + +// Hash represents a hash value. +// +// At present hashes always use the SHA256 algorithm. In future other hash +// algorithms may be used, possibly with a transitional period of including +// both as separate attributes of this type. Consumers must ignore attributes +// they don't support and fail if no supported attribute is present. The +// top-level format version will not be incremented for changes to the set of +// hash algorithms. +message Hash { + bytes sha256 = 1; +} + +// Path represents a set of steps to traverse into a data structure. It is +// used to refer to a sub-structure within a dynamic data structure presented +// separately. +message Path { + message Step { + oneof selector { + // Set "attribute_name" to represent looking up an attribute + // in the current object value. + string attribute_name = 1; + + // Set "element_key" to represent looking up an element in + // an indexable collection type. + DynamicValue element_key = 2; + } + } + repeated Step steps = 1; +} diff --git a/plans/planfile/config_snapshot.go b/plans/planfile/config_snapshot.go new file mode 100644 index 000000000..a78a99b31 --- /dev/null +++ b/plans/planfile/config_snapshot.go @@ -0,0 +1,218 @@ +package planfile + +import ( + "archive/zip" + "encoding/json" + "fmt" + "io/ioutil" + "path" + "sort" + "strings" + "time" + + version "github.com/hashicorp/go-version" + "github.com/hashicorp/terraform/configs/configload" +) + +const configSnapshotPrefix = "tfconfig/" +const configSnapshotManifestFile = configSnapshotPrefix + "modules.json" +const configSnapshotModulePrefix = configSnapshotPrefix + "m-" + +type configSnapshotModuleRecord struct { + Key string `json:"Key"` + SourceAddr string `json:"Source,omitempty"` + VersionStr string `json:"Version,omitempty"` + Dir string `json:"Dir"` +} +type configSnapshotModuleManifest []configSnapshotModuleRecord + +func readConfigSnapshot(z *zip.Reader) (*configload.Snapshot, error) { + // Errors from this function are expected to be reported with some + // additional prefix context about them being in a config snapshot, + // so they should not themselves refer to the config snapshot. + // They are also generally indicative of an invalid file, and so since + // plan files should not be hand-constructed we don't need to worry + // about making the messages user-actionable. + + snap := &configload.Snapshot{ + Modules: map[string]*configload.SnapshotModule{}, + } + var manifestSrc []byte + + // For processing our source files, we'll just sweep over all the files + // and react to the one-by-one to start, and then clean up afterwards + // when we'll presumably have found the manifest file. + for _, file := range z.File { + switch { + + case file.Name == configSnapshotManifestFile: + // It's the manifest file, so we'll just read it raw into + // manifestSrc for now and process it below. + r, err := file.Open() + if err != nil { + return nil, fmt.Errorf("failed to open module manifest: %s", r) + } + manifestSrc, err = ioutil.ReadAll(r) + if err != nil { + return nil, fmt.Errorf("failed to read module manifest: %s", r) + } + + case strings.HasPrefix(file.Name, configSnapshotModulePrefix): + relName := file.Name[len(configSnapshotModulePrefix):] + moduleKey, fileName := path.Split(relName) + + // moduleKey should currently have a trailing slash on it, which we + // can use to recognize the difference between the root module + // (just a trailing slash) and no module path at all (empty string). + if moduleKey == "" { + // ignore invalid config entry + continue + } + moduleKey = moduleKey[:len(moduleKey)-1] // trim trailing slash + + r, err := file.Open() + if err != nil { + return nil, fmt.Errorf("failed to open snapshot of %s from module %q: %s", fileName, moduleKey, err) + } + fileSrc, err := ioutil.ReadAll(r) + if err != nil { + return nil, fmt.Errorf("failed to read snapshot of %s from module %q: %s", fileName, moduleKey, err) + } + + if _, exists := snap.Modules[moduleKey]; !exists { + snap.Modules[moduleKey] = &configload.SnapshotModule{ + Files: map[string][]byte{}, + // Will fill in everything else afterwards, when we + // process the manifest. + } + } + snap.Modules[moduleKey].Files[fileName] = fileSrc + } + } + + if manifestSrc == nil { + return nil, fmt.Errorf("config snapshot does not have manifest file") + } + + var manifest configSnapshotModuleManifest + err := json.Unmarshal(manifestSrc, &manifest) + if err != nil { + return nil, fmt.Errorf("invalid module manifest: %s", err) + } + + for _, record := range manifest { + modSnap, exists := snap.Modules[record.Key] + if !exists { + // We'll allow this, assuming that it's a module with no files. + // This is still weird, since we generally reject modules with + // no files, but we'll allow it because downstream errors will + // catch it in that case. + modSnap = &configload.SnapshotModule{ + Files: map[string][]byte{}, + } + snap.Modules[record.Key] = modSnap + } + modSnap.SourceAddr = record.SourceAddr + modSnap.Dir = record.Dir + if record.VersionStr != "" { + v, err := version.NewVersion(record.VersionStr) + if err != nil { + return nil, fmt.Errorf("manifest has invalid version string %q for module %q", record.VersionStr, record.Key) + } + modSnap.Version = v + } + } + + // Finally, we'll make sure we don't have any errant files for modules that + // aren't in the manifest. + for k := range snap.Modules { + found := false + for _, record := range manifest { + if record.Key == k { + found = true + break + } + } + if !found { + return nil, fmt.Errorf("found files for module %q that isn't recorded in the manifest", k) + } + } + + return snap, nil +} + +// writeConfigSnapshot adds to the given zip.Writer one or more files +// representing the given snapshot. +// +// This file creates new files in the writer, so any already-open writer +// for the file will be invalidated by this call. The writer remains open +// when this function returns. +func writeConfigSnapshot(snap *configload.Snapshot, z *zip.Writer) error { + // Errors from this function are expected to be reported with some + // additional prefix context about them being in a config snapshot, + // so they should not themselves refer to the config snapshot. + // They are also indicative of a bug in the caller, so they do not + // need to be user-actionable. + + var manifest configSnapshotModuleManifest + keys := make([]string, 0, len(snap.Modules)) + for k := range snap.Modules { + keys = append(keys, k) + } + sort.Strings(keys) + + // We'll re-use this fileheader for each Create we do below. + + for _, k := range keys { + snapMod := snap.Modules[k] + record := configSnapshotModuleRecord{ + Dir: snapMod.Dir, + Key: k, + SourceAddr: snapMod.SourceAddr, + } + if snapMod.Version != nil { + record.VersionStr = snapMod.Version.String() + } + manifest = append(manifest, record) + + pathPrefix := fmt.Sprintf("%s%s/", configSnapshotModulePrefix, k) + for filename, src := range snapMod.Files { + zh := &zip.FileHeader{ + Name: pathPrefix + filename, + Method: zip.Deflate, + Modified: time.Now(), + } + w, err := z.CreateHeader(zh) + if err != nil { + return fmt.Errorf("failed to create snapshot of %s from module %q: %s", zh.Name, k, err) + } + _, err = w.Write(src) + if err != nil { + return fmt.Errorf("failed to write snapshot of %s from module %q: %s", zh.Name, k, err) + } + } + } + + // Now we'll write our manifest + { + zh := &zip.FileHeader{ + Name: configSnapshotManifestFile, + Method: zip.Deflate, + Modified: time.Now(), + } + src, err := json.MarshalIndent(manifest, "", " ") + if err != nil { + return fmt.Errorf("failed to serialize module manifest: %s", err) + } + w, err := z.CreateHeader(zh) + if err != nil { + return fmt.Errorf("failed to create module manifest: %s", err) + } + _, err = w.Write(src) + if err != nil { + return fmt.Errorf("failed to write module manifest: %s", err) + } + } + + return nil +} diff --git a/plans/planfile/config_snapshot_test.go b/plans/planfile/config_snapshot_test.go new file mode 100644 index 000000000..2efd0031f --- /dev/null +++ b/plans/planfile/config_snapshot_test.go @@ -0,0 +1,52 @@ +package planfile + +import ( + "archive/zip" + "bytes" + "path/filepath" + "reflect" + "testing" + + "github.com/davecgh/go-spew/spew" + + "github.com/hashicorp/terraform/configs/configload" +) + +func TestConfigSnapshotRoundtrip(t *testing.T) { + fixtureDir := filepath.Join("testdata", "test-config") + loader, err := configload.NewLoader(&configload.Config{ + ModulesDir: filepath.Join(fixtureDir, ".terraform", "modules"), + }) + if err != nil { + t.Fatal(err) + } + + _, snapIn, diags := loader.LoadConfigWithSnapshot(fixtureDir) + if diags.HasErrors() { + t.Fatal(diags.Error()) + } + + var buf bytes.Buffer + zw := zip.NewWriter(&buf) + err = writeConfigSnapshot(snapIn, zw) + if err != nil { + t.Fatalf("failed to write snapshot: %s", err) + } + zw.Close() + + raw := buf.Bytes() + r := bytes.NewReader(raw) + zr, err := zip.NewReader(r, int64(len(raw))) + if err != nil { + t.Fatal(err) + } + + snapOut, err := readConfigSnapshot(zr) + if err != nil { + t.Fatalf("failed to read snapshot: %s", err) + } + + if !reflect.DeepEqual(snapIn, snapOut) { + t.Errorf("result does not match input\nresult: %sinput: %s", spew.Sdump(snapOut), spew.Sdump(snapIn)) + } +} diff --git a/plans/planfile/doc.go b/plans/planfile/doc.go new file mode 100644 index 000000000..edd16af2b --- /dev/null +++ b/plans/planfile/doc.go @@ -0,0 +1,6 @@ +// Package planfile deals with the file format used to serialize plans to disk +// and then deserialize them back into memory later. +// +// A plan file contains the planned changes along with the configuration and +// state snapshot that they are based on. +package planfile diff --git a/plans/planfile/planfile_test.go b/plans/planfile/planfile_test.go new file mode 100644 index 000000000..71619ef0a --- /dev/null +++ b/plans/planfile/planfile_test.go @@ -0,0 +1,112 @@ +package planfile + +import ( + "io/ioutil" + "path/filepath" + "reflect" + "testing" + + "github.com/davecgh/go-spew/spew" + + "github.com/hashicorp/terraform/plans" + + version "github.com/hashicorp/go-version" + + "github.com/hashicorp/terraform/configs/configload" + "github.com/hashicorp/terraform/states" + "github.com/hashicorp/terraform/states/statefile" +) + +func TestRoundtrip(t *testing.T) { + fixtureDir := filepath.Join("testdata", "test-config") + loader, err := configload.NewLoader(&configload.Config{ + ModulesDir: filepath.Join(fixtureDir, ".terraform", "modules"), + }) + if err != nil { + t.Fatal(err) + } + + _, snapIn, diags := loader.LoadConfigWithSnapshot(fixtureDir) + if diags.HasErrors() { + t.Fatal(diags.Error()) + } + + // Just a minimal state file so we can test that it comes out again at all. + // We don't need to test the entire thing because the state file + // serialization is already tested in its own package. + stateFileIn := &statefile.File{ + TerraformVersion: version.Must(version.NewVersion("1.0.0")), + Serial: 1, + Lineage: "abc123", + State: states.NewState(), + } + + // Minimal plan too, since the serialization of the tfplan portion of the + // file is tested more fully in tfplan_test.go . + planIn := &plans.Plan{ + Changes: &plans.Changes{ + Resources: []*plans.ResourceInstanceChange{}, + RootOutputs: map[string]*plans.OutputChange{}, + }, + ProviderSHA256s: map[string][]byte{}, + VariableValues: map[string]plans.DynamicValue{ + "foo": plans.DynamicValue([]byte("foo placeholder")), + }, + } + + workDir, err := ioutil.TempDir("", "tf-planfile") + if err != nil { + t.Fatal(err) + } + planFn := filepath.Join(workDir, "tfplan") + + err = Create(planFn, snapIn, stateFileIn, planIn) + if err != nil { + t.Fatalf("failed to create plan file: %s", err) + } + + pr, err := Open(planFn) + if err != nil { + t.Fatalf("failed to open plan file for reading: %s", err) + } + + t.Run("ReadPlan", func(t *testing.T) { + planOut, err := pr.ReadPlan() + if err != nil { + t.Fatalf("failed to read plan: %s", err) + } + if !reflect.DeepEqual(planIn, planOut) { + t.Errorf("plan did not survive round-trip\nresult: %sinput: %s", spew.Sdump(planOut), spew.Sdump(planIn)) + } + }) + + t.Run("ReadStateFile", func(t *testing.T) { + stateFileOut, err := pr.ReadStateFile() + if err != nil { + t.Fatalf("failed to read state: %s", err) + } + if !reflect.DeepEqual(stateFileIn, stateFileOut) { + t.Errorf("state file did not survive round-trip\nresult: %sinput: %s", spew.Sdump(stateFileOut), spew.Sdump(stateFileIn)) + } + }) + + t.Run("ReadConfigSnapshot", func(t *testing.T) { + snapOut, err := pr.ReadConfigSnapshot() + if err != nil { + t.Fatalf("failed to read config snapshot: %s", err) + } + if !reflect.DeepEqual(snapIn, snapOut) { + t.Errorf("config snapshot did not survive round-trip\nresult: %sinput: %s", spew.Sdump(snapOut), spew.Sdump(snapIn)) + } + }) + + t.Run("ReadConfig", func(t *testing.T) { + // Reading from snapshots is tested in the configload package, so + // here we'll just test that we can successfully do it, to see if the + // glue code in _this_ package is correct. + _, diags := pr.ReadConfig() + if diags.HasErrors() { + t.Errorf("when reading config: %s", diags.Err()) + } + }) +} diff --git a/plans/planfile/reader.go b/plans/planfile/reader.go new file mode 100644 index 000000000..579e28599 --- /dev/null +++ b/plans/planfile/reader.go @@ -0,0 +1,148 @@ +package planfile + +import ( + "archive/zip" + "bytes" + "fmt" + "io/ioutil" + + "github.com/hashicorp/terraform/configs" + "github.com/hashicorp/terraform/configs/configload" + "github.com/hashicorp/terraform/plans" + "github.com/hashicorp/terraform/states/statefile" + "github.com/hashicorp/terraform/tfdiags" +) + +const tfstateFilename = "tfstate" + +// Reader is the main type used to read plan files. Create a Reader by calling +// Open. +// +// A plan file is a random-access file format, so methods of Reader must +// be used to access the individual portions of the file for further +// processing. +type Reader struct { + zip *zip.ReadCloser +} + +// Open creates a Reader for the file at the given filename, or returns an +// error if the file doesn't seem to be a planfile. +func Open(filename string) (*Reader, error) { + r, err := zip.OpenReader(filename) + if err != nil { + // To give a better error message, we'll sniff to see if this looks + // like our old plan format from versions prior to 0.12. + if b, sErr := ioutil.ReadFile(filename); sErr == nil { + if bytes.HasPrefix(b, []byte("tfplan")) { + return nil, fmt.Errorf("the given plan file was created by an earlier version of Terraform; plan files cannot be shared between different Terraform versions") + } + } + return nil, err + } + + // Sniff to make sure this looks like a plan file, as opposed to any other + // random zip file the user might have around. + var planFile *zip.File + for _, file := range r.File { + if file.Name == tfplanFilename { + planFile = file + break + } + } + if planFile == nil { + return nil, fmt.Errorf("the given file is not a valid plan file") + } + + // For now, we'll just accept the presence of the tfplan file as enough, + // and wait to validate the version when the caller requests the plan + // itself. + + return &Reader{ + zip: r, + }, nil +} + +// ReadPlan reads the plan embedded in the plan file. +// +// Errors can be returned for various reasons, including if the plan file +// is not of an appropriate format version, if it was created by a different +// version of Terraform, if it is invalid, etc. +func (r *Reader) ReadPlan() (*plans.Plan, error) { + var planFile *zip.File + for _, file := range r.zip.File { + if file.Name == tfplanFilename { + planFile = file + break + } + } + if planFile == nil { + // This should never happen because we checked for this file during + // Open, but we'll check anyway to be safe. + return nil, fmt.Errorf("the plan file is invalid") + } + + pr, err := planFile.Open() + if err != nil { + return nil, fmt.Errorf("failed to retrieve plan from plan file: %s", err) + } + defer pr.Close() + + return readTfplan(pr) +} + +// ReadStateFile reads the state file embedded in the plan file. +// +// If the plan file contains no embedded state file, the returned error is +// statefile.ErrNoState. +func (r *Reader) ReadStateFile() (*statefile.File, error) { + for _, file := range r.zip.File { + if file.Name == tfstateFilename { + r, err := file.Open() + if err != nil { + return nil, fmt.Errorf("failed to extract state from plan file: %s", err) + } + return statefile.Read(r) + } + } + return nil, statefile.ErrNoState +} + +// ReadConfigSnapshot reads the configuration snapshot embedded in the plan +// file. +// +// This is a lower-level alternative to ReadConfig that just extracts the +// source files, without attempting to parse them. +func (r *Reader) ReadConfigSnapshot() (*configload.Snapshot, error) { + return readConfigSnapshot(&r.zip.Reader) +} + +// ReadConfig reads the configuration embedded in the plan file. +// +// Internally this function delegates to the configs/configload package to +// parse the embedded configuration and so it returns diagnostics (rather than +// a native Go error as with other methods on Reader). +func (r *Reader) ReadConfig() (*configs.Config, tfdiags.Diagnostics) { + var diags tfdiags.Diagnostics + + snap, err := r.ReadConfigSnapshot() + if err != nil { + diags = diags.Append(tfdiags.Sourceless( + tfdiags.Error, + "Failed to read configuration from plan file", + fmt.Sprintf("The configuration file snapshot in the plan file could not be read: %s.", err), + )) + return nil, diags + } + + loader := configload.NewLoaderFromSnapshot(snap) + rootDir := snap.Modules[""].Dir // Root module base directory + config, configDiags := loader.LoadConfig(rootDir) + diags = diags.Append(configDiags) + + return config, diags +} + +// Close closes the file, after which no other operations may be performed. +func (r *Reader) Close() error { + return r.zip.Close() +} diff --git a/plans/planfile/testdata/test-config/.terraform/modules/child_a/child_a.tf b/plans/planfile/testdata/test-config/.terraform/modules/child_a/child_a.tf new file mode 100644 index 000000000..2f4d0f1a0 --- /dev/null +++ b/plans/planfile/testdata/test-config/.terraform/modules/child_a/child_a.tf @@ -0,0 +1,4 @@ + +module "child_c" { + source = "./child_c" +} diff --git a/plans/planfile/testdata/test-config/.terraform/modules/child_a/child_c/child_c.tf b/plans/planfile/testdata/test-config/.terraform/modules/child_a/child_c/child_c.tf new file mode 100644 index 000000000..785d98d98 --- /dev/null +++ b/plans/planfile/testdata/test-config/.terraform/modules/child_a/child_c/child_c.tf @@ -0,0 +1,4 @@ + +output "hello" { + value = "Hello from child_c" +} diff --git a/plans/planfile/testdata/test-config/.terraform/modules/child_b.child_d/child_d.tf b/plans/planfile/testdata/test-config/.terraform/modules/child_b.child_d/child_d.tf new file mode 100644 index 000000000..145576a36 --- /dev/null +++ b/plans/planfile/testdata/test-config/.terraform/modules/child_b.child_d/child_d.tf @@ -0,0 +1,4 @@ + +output "hello" { + value = "Hello from child_d" +} diff --git a/plans/planfile/testdata/test-config/.terraform/modules/child_b/child_b.tf b/plans/planfile/testdata/test-config/.terraform/modules/child_b/child_b.tf new file mode 100644 index 000000000..4a1b247d3 --- /dev/null +++ b/plans/planfile/testdata/test-config/.terraform/modules/child_b/child_b.tf @@ -0,0 +1,5 @@ + +module "child_d" { + source = "example.com/foo/bar_d/baz" + # Intentionally no version here +} diff --git a/plans/planfile/testdata/test-config/.terraform/modules/modules.json b/plans/planfile/testdata/test-config/.terraform/modules/modules.json new file mode 100644 index 000000000..ba691877f --- /dev/null +++ b/plans/planfile/testdata/test-config/.terraform/modules/modules.json @@ -0,0 +1,32 @@ +{ + "Modules": [ + { + "Key": "", + "Source": "", + "Dir": "testdata/test-config" + }, + { + "Key": "child_a", + "Source": "example.com/foo/bar_a/baz", + "Version": "1.0.1", + "Dir": "testdata/test-config/.terraform/modules/child_a" + }, + { + "Key": "child_b", + "Source": "example.com/foo/bar_b/baz", + "Version": "1.0.0", + "Dir": "testdata/test-config/.terraform/modules/child_b" + }, + { + "Key": "child_a.child_c", + "Source": "./child_c", + "Dir": "testdata/test-config/.terraform/modules/child_a/child_c" + }, + { + "Key": "child_b.child_d", + "Source": "example.com/foo/bar_d/baz", + "Version": "1.2.0", + "Dir": "testdata/test-config/.terraform/modules/child_b.child_d" + } + ] +} diff --git a/plans/planfile/testdata/test-config/root.tf b/plans/planfile/testdata/test-config/root.tf new file mode 100644 index 000000000..8a4473942 --- /dev/null +++ b/plans/planfile/testdata/test-config/root.tf @@ -0,0 +1,10 @@ + +module "child_a" { + source = "example.com/foo/bar_a/baz" + version = ">= 1.0.0" +} + +module "child_b" { + source = "example.com/foo/bar_b/baz" + version = ">= 1.0.0" +} diff --git a/plans/planfile/tfplan.go b/plans/planfile/tfplan.go new file mode 100644 index 000000000..db93491d0 --- /dev/null +++ b/plans/planfile/tfplan.go @@ -0,0 +1,390 @@ +package planfile + +import ( + "fmt" + "io" + "io/ioutil" + + "github.com/golang/protobuf/proto" + + "github.com/hashicorp/terraform/addrs" + "github.com/hashicorp/terraform/plans" + "github.com/hashicorp/terraform/plans/internal/planproto" + "github.com/hashicorp/terraform/states" + "github.com/hashicorp/terraform/tfdiags" + "github.com/hashicorp/terraform/version" +) + +const tfplanFormatVersion = 3 +const tfplanFilename = "tfplan" + +// --------------------------------------------------------------------------- +// This file deals with the internal structure of the "tfplan" sub-file within +// the plan file format. It's all private API, wrapped by methods defined +// elsewhere. This is the only file that should import the +// ../internal/planproto package, which contains the ugly stubs generated +// by the protobuf compiler. +// --------------------------------------------------------------------------- + +// readTfplan reads a protobuf-encoded description from the plan portion of +// a plan file, which is stored in a special file in the archive called +// "tfplan". +func readTfplan(r io.Reader) (*plans.Plan, error) { + src, err := ioutil.ReadAll(r) + if err != nil { + return nil, err + } + + var rawPlan planproto.Plan + err = proto.Unmarshal(src, &rawPlan) + if err != nil { + return nil, fmt.Errorf("parse error: %s", err) + } + + if rawPlan.Version != tfplanFormatVersion { + return nil, fmt.Errorf("unsupported plan file format version %d; only version %d is supported", rawPlan.Version, tfplanFormatVersion) + } + + if rawPlan.TerraformVersion != version.String() { + return nil, fmt.Errorf("plan file was created by Terraform %s, but this is %s; plan files cannot be transferred between different Terraform versions", rawPlan.TerraformVersion, version.String()) + } + + plan := &plans.Plan{ + VariableValues: map[string]plans.DynamicValue{}, + Changes: &plans.Changes{ + RootOutputs: map[string]*plans.OutputChange{}, + Resources: []*plans.ResourceInstanceChange{}, + }, + + ProviderSHA256s: map[string][]byte{}, + } + + for _, rawOC := range rawPlan.OutputChanges { + name := rawOC.Name + change, err := changeFromTfplan(rawOC.Change) + if err != nil { + return nil, fmt.Errorf("invalid plan for output %q: %s", name, err) + } + + plan.Changes.RootOutputs[name] = &plans.OutputChange{ + Change: *change, + Sensitive: rawOC.Sensitive, + } + } + + for _, rawRC := range rawPlan.ResourceChanges { + change, err := resourceChangeFromTfplan(rawRC) + if err != nil { + // errors from resourceChangeFromTfplan already include context + return nil, err + } + + plan.Changes.Resources = append(plan.Changes.Resources, change) + } + + for name, rawHashObj := range rawPlan.ProviderHashes { + if len(rawHashObj.Sha256) == 0 { + return nil, fmt.Errorf("no SHA256 hash for provider %q plugin", name) + } + + plan.ProviderSHA256s[name] = rawHashObj.Sha256 + } + + for name, rawVal := range rawPlan.Variables { + val, err := valueFromTfplan(rawVal) + if err != nil { + return nil, fmt.Errorf("invalid value for input variable %q: %s", name, err) + } + plan.VariableValues[name] = val + } + + return plan, nil +} + +func resourceChangeFromTfplan(rawChange *planproto.ResourceInstanceChange) (*plans.ResourceInstanceChange, error) { + if rawChange == nil { + // Should never happen in practice, since protobuf can't represent + // a nil value in a list. + return nil, fmt.Errorf("resource change object is absent") + } + + ret := &plans.ResourceInstanceChange{} + + moduleAddr := addrs.RootModuleInstance + if rawChange.ModulePath != "" { + var diags tfdiags.Diagnostics + moduleAddr, diags = addrs.ParseModuleInstanceStr(rawChange.ModulePath) + if diags.HasErrors() { + return nil, diags.Err() + } + } + + var mode addrs.ResourceMode + switch rawChange.Mode { + case planproto.ResourceInstanceChange_managed: + mode = addrs.ManagedResourceMode + case planproto.ResourceInstanceChange_data: + mode = addrs.DataResourceMode + default: + return nil, fmt.Errorf("resource has invalid mode %s", rawChange.Mode) + } + + typeName := rawChange.Type + name := rawChange.Name + + resAddr := addrs.Resource{ + Mode: mode, + Type: typeName, + Name: name, + } + + var instKey addrs.InstanceKey + switch rawTk := rawChange.InstanceKey.(type) { + case *planproto.ResourceInstanceChange_Int: + instKey = addrs.IntKey(rawTk.Int) + case *planproto.ResourceInstanceChange_Str: + instKey = addrs.StringKey(rawTk.Str) + default: + return nil, fmt.Errorf("instance of %s has invalid key type %T", resAddr.Absolute(moduleAddr), rawChange.InstanceKey) + } + + ret.Addr = resAddr.Instance(instKey).Absolute(moduleAddr) + + if rawChange.DeposedKey != "" { + if len(rawChange.DeposedKey) != 8 { + return nil, fmt.Errorf("deposed object for %s has invalid deposed key %q", ret.Addr, rawChange.DeposedKey) + } + ret.DeposedKey = states.DeposedKey(rawChange.DeposedKey) + } + + change, err := changeFromTfplan(rawChange.Change) + if err != nil { + return nil, fmt.Errorf("invalid plan for resource %s: %s", ret.Addr, err) + } + + ret.Change = *change + + return ret, nil +} + +func changeFromTfplan(rawChange *planproto.Change) (*plans.Change, error) { + if rawChange == nil { + return nil, fmt.Errorf("change object is absent") + } + + ret := &plans.Change{} + + // -1 indicates that there is no index. We'll customize these below + // depending on the change action, and then decode. + beforeIdx, afterIdx := -1, -1 + + switch rawChange.Action { + case planproto.Action_NOOP: + ret.Action = plans.NoOp + beforeIdx = 0 + afterIdx = 0 + case planproto.Action_CREATE: + ret.Action = plans.Create + afterIdx = 0 + case planproto.Action_READ: + ret.Action = plans.Read + beforeIdx = 0 + afterIdx = 1 + case planproto.Action_UPDATE: + ret.Action = plans.Update + beforeIdx = 0 + afterIdx = 1 + case planproto.Action_REPLACE: + ret.Action = plans.Replace + beforeIdx = 0 + afterIdx = 1 + case planproto.Action_DELETE: + ret.Action = plans.Delete + beforeIdx = 0 + default: + return nil, fmt.Errorf("invalid change action %s", rawChange.Action) + } + + if beforeIdx != -1 { + if l := len(rawChange.Values); l <= beforeIdx { + return nil, fmt.Errorf("incorrect number of values (%d) for %s change", l, rawChange.Action) + } + var err error + ret.Before, err = valueFromTfplan(rawChange.Values[beforeIdx]) + if err != nil { + return nil, fmt.Errorf("invalid \"before\" value: %s", err) + } + if ret.Before == nil { + return nil, fmt.Errorf("missing \"before\" value: %s", err) + } + } + if afterIdx != -1 { + if l := len(rawChange.Values); l <= afterIdx { + return nil, fmt.Errorf("incorrect number of values (%d) for %s change", l, rawChange.Action) + } + var err error + ret.After, err = valueFromTfplan(rawChange.Values[afterIdx]) + if err != nil { + return nil, fmt.Errorf("invalid \"after\" value: %s", err) + } + if ret.After == nil { + return nil, fmt.Errorf("missing \"after\" value: %s", err) + } + } + + return ret, nil +} + +func valueFromTfplan(rawV *planproto.DynamicValue) (plans.DynamicValue, error) { + if len(rawV.Msgpack) == 0 { // len(0) because that's the default value for a "bytes" in protobuf + return nil, fmt.Errorf("dynamic value does not have msgpack serialization") + } + + return plans.DynamicValue(rawV.Msgpack), nil +} + +// writeTfplan serializes the given plan into the protobuf-based format used +// for the "tfplan" portion of a plan file. +func writeTfplan(plan *plans.Plan, w io.Writer) error { + rawPlan := &planproto.Plan{ + Version: tfplanFormatVersion, + TerraformVersion: version.String(), + ProviderHashes: map[string]*planproto.Hash{}, + + Variables: map[string]*planproto.DynamicValue{}, + OutputChanges: []*planproto.OutputChange{}, + ResourceChanges: []*planproto.ResourceInstanceChange{}, + } + + for name, oc := range plan.Changes.RootOutputs { + // Writing outputs as cty.DynamicPseudoType forces the stored values + // to also contain dynamic type information, so we can recover the + // original type when we read the values back in readTFPlan. + protoChange, err := changeToTfplan(&oc.Change) + if err != nil { + return fmt.Errorf("cannot write output value %q: %s", name, err) + } + + rawPlan.OutputChanges = append(rawPlan.OutputChanges, &planproto.OutputChange{ + Name: name, + Change: protoChange, + Sensitive: oc.Sensitive, + }) + } + + for _, rc := range plan.Changes.Resources { + rawRC, err := resourceChangeToTfplan(rc) + if err != nil { + return err + } + rawPlan.ResourceChanges = append(rawPlan.ResourceChanges, rawRC) + } + + for name, hash := range plan.ProviderSHA256s { + rawPlan.ProviderHashes[name] = &planproto.Hash{ + Sha256: hash, + } + } + + for name, val := range plan.VariableValues { + rawPlan.Variables[name] = valueToTfplan(val) + } + + src, err := proto.Marshal(rawPlan) + if err != nil { + return fmt.Errorf("serialization error: %s", err) + } + + _, err = w.Write(src) + if err != nil { + return fmt.Errorf("failed to write plan to plan file: %s", err) + } + + return nil +} + +func resourceChangeToTfplan(change *plans.ResourceInstanceChange) (*planproto.ResourceInstanceChange, error) { + ret := &planproto.ResourceInstanceChange{} + + ret.ModulePath = change.Addr.Module.String() + + relAddr := change.Addr.Resource + + switch relAddr.Resource.Mode { + case addrs.ManagedResourceMode: + ret.Mode = planproto.ResourceInstanceChange_managed + case addrs.DataResourceMode: + ret.Mode = planproto.ResourceInstanceChange_data + default: + return nil, fmt.Errorf("resource %s has unsupported mode %s", relAddr, relAddr.Resource.Mode) + } + + ret.Type = relAddr.Resource.Type + ret.Name = relAddr.Resource.Name + + switch tk := relAddr.Key.(type) { + case addrs.IntKey: + ret.InstanceKey = &planproto.ResourceInstanceChange_Int{ + Int: int64(tk), + } + case addrs.StringKey: + ret.InstanceKey = &planproto.ResourceInstanceChange_Str{ + Str: string(tk), + } + default: + return nil, fmt.Errorf("resource %s has unsupported instance key type %T", relAddr, relAddr.Key) + } + + ret.DeposedKey = string(change.DeposedKey) + + valChange, err := changeToTfplan(&change.Change) + if err != nil { + return nil, fmt.Errorf("failed to serialize resource %s change: %s", relAddr, err) + } + ret.Change = valChange + + return ret, nil +} + +func changeToTfplan(change *plans.Change) (*planproto.Change, error) { + ret := &planproto.Change{} + + before := valueToTfplan(change.Before) + after := valueToTfplan(change.After) + + switch change.Action { + case plans.NoOp: + ret.Action = planproto.Action_NOOP + ret.Values = []*planproto.DynamicValue{before} // before and after should be identical + case plans.Create: + ret.Action = planproto.Action_CREATE + ret.Values = []*planproto.DynamicValue{after} + case plans.Read: + ret.Action = planproto.Action_READ + ret.Values = []*planproto.DynamicValue{before, after} + case plans.Update: + ret.Action = planproto.Action_UPDATE + ret.Values = []*planproto.DynamicValue{before, after} + case plans.Replace: + ret.Action = planproto.Action_REPLACE + ret.Values = []*planproto.DynamicValue{before, after} + case plans.Delete: + ret.Action = planproto.Action_DELETE + ret.Values = []*planproto.DynamicValue{before} + default: + return nil, fmt.Errorf("invalid change action %s", change.Action) + } + + return ret, nil +} + +func valueToTfplan(val plans.DynamicValue) *planproto.DynamicValue { + if val == nil { + // protobuf can't represent nil, so we'll represent it as a + // DynamicValue that has no serializations at all. + return &planproto.DynamicValue{} + } + return &planproto.DynamicValue{ + Msgpack: []byte(val), + } +} diff --git a/plans/planfile/tfplan_test.go b/plans/planfile/tfplan_test.go new file mode 100644 index 000000000..0489ff42f --- /dev/null +++ b/plans/planfile/tfplan_test.go @@ -0,0 +1,133 @@ +package planfile + +import ( + "bytes" + "testing" + + "github.com/go-test/deep" + "github.com/zclconf/go-cty/cty" + + "github.com/hashicorp/terraform/addrs" + "github.com/hashicorp/terraform/plans" +) + +func TestTFPlanRoundTrip(t *testing.T) { + objTy := cty.Object(map[string]cty.Type{ + "id": cty.String, + }) + + plan := &plans.Plan{ + VariableValues: map[string]plans.DynamicValue{ + "foo": mustNewDynamicValueStr("foo value"), + }, + Changes: &plans.Changes{ + RootOutputs: map[string]*plans.OutputChange{ + "bar": { + Change: plans.Change{ + Action: plans.Create, + After: mustNewDynamicValueStr("bar value"), + }, + Sensitive: false, + }, + "baz": { + Change: plans.Change{ + Action: plans.NoOp, + Before: mustNewDynamicValueStr("baz value"), + After: mustNewDynamicValueStr("baz value"), + }, + Sensitive: false, + }, + "secret": { + Change: plans.Change{ + Action: plans.Update, + Before: mustNewDynamicValueStr("old secret value"), + After: mustNewDynamicValueStr("new secret value"), + }, + Sensitive: true, + }, + }, + Resources: []*plans.ResourceInstanceChange{ + { + Addr: addrs.Resource{ + Mode: addrs.ManagedResourceMode, + Type: "test_thing", + Name: "woot", + }.Instance(addrs.IntKey(0)).Absolute(addrs.RootModuleInstance), + Change: plans.Change{ + Action: plans.Replace, + Before: mustNewDynamicValue(cty.ObjectVal(map[string]cty.Value{ + "id": cty.StringVal("foo-bar-baz"), + }), objTy), + After: mustNewDynamicValue(cty.ObjectVal(map[string]cty.Value{ + "id": cty.UnknownVal(cty.String), + }), objTy), + }, + }, + { + Addr: addrs.Resource{ + Mode: addrs.ManagedResourceMode, + Type: "test_thing", + Name: "woot", + }.Instance(addrs.IntKey(0)).Absolute(addrs.RootModuleInstance), + DeposedKey: "foodface", + Change: plans.Change{ + Action: plans.Delete, + Before: mustNewDynamicValue(cty.ObjectVal(map[string]cty.Value{ + "id": cty.StringVal("bar-baz-foo"), + }), objTy), + }, + }, + }, + }, + ProviderSHA256s: map[string][]byte{ + "test": []byte{ + 0xba, 0x5e, 0x1e, 0x55, 0xb0, 0x1d, 0xfa, 0xce, + 0xef, 0xfe, 0xc7, 0xed, 0x1a, 0xbe, 0x11, 0xed, + 0x5c, 0xa1, 0xab, 0x1e, 0xda, 0x7a, 0xba, 0x5e, + 0x70, 0x7a, 0x11, 0xed, 0xb0, 0x07, 0xab, 0x1e, + }, + }, + } + + var buf bytes.Buffer + err := writeTfplan(plan, &buf) + if err != nil { + t.Fatal(err) + } + + newPlan, err := readTfplan(&buf) + if err != nil { + t.Fatal(err) + } + + { + oldDepth := deep.MaxDepth + oldCompare := deep.CompareUnexportedFields + deep.MaxDepth = 20 + deep.CompareUnexportedFields = true + defer func() { + deep.MaxDepth = oldDepth + deep.CompareUnexportedFields = oldCompare + }() + } + for _, problem := range deep.Equal(newPlan, plan) { + t.Error(problem) + } +} + +func mustNewDynamicValue(val cty.Value, ty cty.Type) plans.DynamicValue { + ret, err := plans.NewDynamicValue(val, ty) + if err != nil { + panic(err) + } + return ret +} + +func mustNewDynamicValueStr(val string) plans.DynamicValue { + realVal := cty.StringVal(val) + ret, err := plans.NewDynamicValue(realVal, cty.String) + if err != nil { + panic(err) + } + return ret +} diff --git a/plans/planfile/writer.go b/plans/planfile/writer.go new file mode 100644 index 000000000..7759463b3 --- /dev/null +++ b/plans/planfile/writer.go @@ -0,0 +1,72 @@ +package planfile + +import ( + "archive/zip" + "fmt" + "os" + "time" + + "github.com/hashicorp/terraform/configs/configload" + "github.com/hashicorp/terraform/plans" + "github.com/hashicorp/terraform/states/statefile" +) + +// Create creates a new plan file with the given filename, overwriting any +// file that might already exist there. +// +// A plan file contains both a snapshot of the configuration and of the latest +// state file in addition to the plan itself, so that Terraform can detect +// if the world has changed since the plan was created and thus refuse to +// apply it. +func Create(filename string, configSnap *configload.Snapshot, stateFile *statefile.File, plan *plans.Plan) error { + f, err := os.Create(filename) + if err != nil { + return err + } + defer f.Close() + + zw := zip.NewWriter(f) + defer zw.Close() + + // tfplan file + { + w, err := zw.CreateHeader(&zip.FileHeader{ + Name: tfplanFilename, + Method: zip.Deflate, + Modified: time.Now(), + }) + if err != nil { + return fmt.Errorf("failed to create tfplan file: %s", err) + } + err = writeTfplan(plan, w) + if err != nil { + return fmt.Errorf("failed to write plan: %s", err) + } + } + + // tfstate file + { + w, err := zw.CreateHeader(&zip.FileHeader{ + Name: tfstateFilename, + Method: zip.Deflate, + Modified: time.Now(), + }) + if err != nil { + return fmt.Errorf("failed to create embedded tfstate file: %s", err) + } + err = statefile.Write(stateFile, w) + if err != nil { + return fmt.Errorf("failed to write state snapshot: %s", err) + } + } + + // tfconfig directory + { + err := writeConfigSnapshot(configSnap, zw) + if err != nil { + return fmt.Errorf("failed to write config snapshot: %s", err) + } + } + + return nil +}