use recorded changes for outputs

We record output changes in the plan, but don't currently use them for
anything other than display. If we have a wholly known output value
stored in the plan, we should prefer that for apply in order to ensure
consistency with the planned values. This also avoids cases where
evaluation during apply cannot happen correctly, like when all resources
are being removed or we are executing a destroy.

We also need to record output Delete changes when the plan is for
destroy operation. Otherwise without a change, the apply step will
attempt to evaluate the outputs, causing errors, or leaving them in the
state with stale values.
This commit is contained in:
James Bardin 2020-10-09 11:09:36 -04:00
parent eb2a027684
commit d8e6d66362
5 changed files with 145 additions and 32 deletions

View File

@ -9142,10 +9142,13 @@ func TestContext2Apply_plannedDestroyInterpolatedCount(t *testing.T) {
} }
// Applying the plan should now succeed // Applying the plan should now succeed
_, diags = ctx.Apply() state, diags = ctx.Apply()
if diags.HasErrors() { if diags.HasErrors() {
t.Fatalf("apply failed: %s", diags.Err()) t.Fatalf("apply failed: %s", diags.Err())
} }
if !state.Empty() {
t.Fatalf("state not empty: %s\n", state)
}
} }
func TestContext2Apply_scaleInMultivarRef(t *testing.T) { func TestContext2Apply_scaleInMultivarRef(t *testing.T) {

View File

@ -91,7 +91,7 @@ func (b *ApplyGraphBuilder) Steps() []GraphTransformer {
&RootVariableTransformer{Config: b.Config}, &RootVariableTransformer{Config: b.Config},
&ModuleVariableTransformer{Config: b.Config}, &ModuleVariableTransformer{Config: b.Config},
&LocalTransformer{Config: b.Config}, &LocalTransformer{Config: b.Config},
&OutputTransformer{Config: b.Config}, &OutputTransformer{Config: b.Config, Changes: b.Changes},
// Creates all the resource instances represented in the diff, along // Creates all the resource instances represented in the diff, along
// with dependency edges against the whole-resource nodes added by // with dependency edges against the whole-resource nodes added by

View File

@ -72,6 +72,11 @@ func (b *DestroyPlanGraphBuilder) Steps() []GraphTransformer {
State: b.State, State: b.State,
}, },
&OutputTransformer{
Config: b.Config,
Destroy: true,
},
// Attach the state // Attach the state
&AttachStateTransformer{State: b.State}, &AttachStateTransformer{State: b.State},

View File

@ -11,15 +11,18 @@ import (
"github.com/hashicorp/terraform/lang" "github.com/hashicorp/terraform/lang"
"github.com/hashicorp/terraform/plans" "github.com/hashicorp/terraform/plans"
"github.com/hashicorp/terraform/states" "github.com/hashicorp/terraform/states"
"github.com/hashicorp/terraform/tfdiags"
"github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty"
) )
// nodeExpandOutput is the placeholder for an output that has not yet had // nodeExpandOutput is the placeholder for an output that has not yet had
// its module path expanded. // its module path expanded.
type nodeExpandOutput struct { type nodeExpandOutput struct {
Addr addrs.OutputValue Addr addrs.OutputValue
Module addrs.Module Module addrs.Module
Config *configs.Output Config *configs.Output
Changes []*plans.OutputChangeSrc
Destroy bool
} }
var ( var (
@ -39,16 +42,62 @@ func (n *nodeExpandOutput) temporaryValue() bool {
} }
func (n *nodeExpandOutput) DynamicExpand(ctx EvalContext) (*Graph, error) { func (n *nodeExpandOutput) DynamicExpand(ctx EvalContext) (*Graph, error) {
if n.Destroy {
return n.planDestroyOutputs(ctx)
}
var g Graph var g Graph
expander := ctx.InstanceExpander() expander := ctx.InstanceExpander()
for _, module := range expander.ExpandModule(n.Module) { for _, module := range expander.ExpandModule(n.Module) {
absAddr := n.Addr.Absolute(module)
// Find any recorded change for this output
var change *plans.OutputChangeSrc
for _, c := range n.Changes {
if c.Addr.String() == absAddr.String() {
change = c
break
}
}
o := &NodeApplyableOutput{ o := &NodeApplyableOutput{
Addr: n.Addr.Absolute(module), Addr: absAddr,
Config: n.Config,
Change: change,
}
log.Printf("[TRACE] Expanding output: adding %s as %T", o.Addr.String(), o)
g.Add(o)
}
return &g, nil
}
// if we're planing a destroy operation, add destroy nodes for all root outputs
// in the state.
func (n *nodeExpandOutput) planDestroyOutputs(ctx EvalContext) (*Graph, error) {
// we only need to plan destroying root outputs
// Other module outputs may be used during destruction by providers that
// need to interpolate values.
if !n.Module.IsRoot() {
return nil, nil
}
state := ctx.State()
if state == nil {
return nil, nil
}
ms := state.Module(addrs.RootModuleInstance)
var g Graph
for _, output := range ms.OutputValues {
o := &NodeDestroyableOutput{
Addr: output.Addr,
Config: n.Config, Config: n.Config,
} }
log.Printf("[TRACE] Expanding output: adding %s as %T", o.Addr.String(), o) log.Printf("[TRACE] Expanding output: adding %s as %T", o.Addr.String(), o)
g.Add(o) g.Add(o)
} }
return &g, nil return &g, nil
} }
@ -108,6 +157,8 @@ func (n *nodeExpandOutput) References() []*addrs.Reference {
type NodeApplyableOutput struct { type NodeApplyableOutput struct {
Addr addrs.AbsOutputValue Addr addrs.AbsOutputValue
Config *configs.Output // Config is the output in the config Config *configs.Output // Config is the output in the config
// If this is being evaluated during apply, we may have a change recorded already
Change *plans.OutputChangeSrc
} }
var ( var (
@ -199,27 +250,7 @@ func (n *NodeApplyableOutput) References() []*addrs.Reference {
// GraphNodeExecutable // GraphNodeExecutable
func (n *NodeApplyableOutput) Execute(ctx EvalContext, op walkOperation) error { func (n *NodeApplyableOutput) Execute(ctx EvalContext, op walkOperation) error {
// This has to run before we have a state lock, since evaluation also var diags tfdiags.Diagnostics
// reads the state
val, diags := ctx.EvaluateExpr(n.Config.Expr, cty.DynamicPseudoType, nil)
// We'll handle errors below, after we have loaded the module.
// Outputs don't have a separate mode for validation, so validate
// depends_on expressions here too
diags = diags.Append(validateDependsOn(ctx, n.Config.DependsOn))
// Ensure that non-sensitive outputs don't include sensitive values
_, marks := val.UnmarkDeep()
_, hasSensitive := marks["sensitive"]
if !n.Config.Sensitive && hasSensitive {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Output refers to sensitive values",
Detail: "Expressions used in outputs can only refer to sensitive values if the sensitive attribute is true.",
Subject: n.Config.DeclRange.Ptr(),
})
}
state := ctx.State() state := ctx.State()
if state == nil { if state == nil {
return nil return nil
@ -227,6 +258,40 @@ func (n *NodeApplyableOutput) Execute(ctx EvalContext, op walkOperation) error {
changes := ctx.Changes() // may be nil, if we're not working on a changeset changes := ctx.Changes() // may be nil, if we're not working on a changeset
val := cty.UnknownVal(cty.DynamicPseudoType)
changeRecorded := n.Change != nil
// we we have a change recorded, we don't need to re-evaluate if the value
// was known
if changeRecorded {
var err error
val, err = n.Change.After.Decode(cty.DynamicPseudoType)
diags = diags.Append(err)
}
// If there was no change recorded, or the recorded change was not wholly
// known, then we need to re-evaluate the output
if !changeRecorded || !val.IsWhollyKnown() {
// This has to run before we have a state lock, since evaluation also
// reads the state
val, diags = ctx.EvaluateExpr(n.Config.Expr, cty.DynamicPseudoType, nil)
// We'll handle errors below, after we have loaded the module.
// Outputs don't have a separate mode for validation, so validate
// depends_on expressions here too
diags = diags.Append(validateDependsOn(ctx, n.Config.DependsOn))
// Ensure that non-sensitive outputs don't include sensitive values
_, marks := val.UnmarkDeep()
_, hasSensitive := marks["sensitive"]
if !n.Config.Sensitive && hasSensitive {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Output refers to sensitive values",
Detail: "Expressions used in outputs can only refer to sensitive values if the sensitive attribute is true.",
Subject: n.Config.DeclRange.Ptr(),
})
}
}
// handling the interpolation error // handling the interpolation error
if diags.HasErrors() { if diags.HasErrors() {
if flagWarnOutputErrors { if flagWarnOutputErrors {
@ -261,7 +326,7 @@ func (n *NodeApplyableOutput) DotNode(name string, opts *dag.DotOpts) *dag.DotNo
} }
} }
// NodeDestroyableOutput represents an output that is "destroybale": // NodeDestroyableOutput represents an output that is "destroyable":
// its application will remove the output from the state. // its application will remove the output from the state.
type NodeDestroyableOutput struct { type NodeDestroyableOutput struct {
Addr addrs.AbsOutputValue Addr addrs.AbsOutputValue
@ -293,6 +358,32 @@ func (n *NodeDestroyableOutput) Execute(ctx EvalContext, op walkOperation) error
if state == nil { if state == nil {
return nil return nil
} }
changes := ctx.Changes()
if changes != nil {
change := &plans.OutputChange{
Addr: n.Addr,
Change: plans.Change{
// This is just a weird placeholder delete action since
// we don't have an actual prior value to indicate.
// FIXME: Generate real planned changes for output values
// that include the old values.
Action: plans.Delete,
Before: cty.NullVal(cty.DynamicPseudoType),
After: cty.NullVal(cty.DynamicPseudoType),
},
}
cs, err := change.Encode()
if err != nil {
// Should never happen, since we just constructed this right above
panic(fmt.Sprintf("planned change for %s could not be encoded: %s", n.Addr, err))
}
log.Printf("[TRACE] planDestroyOutput: Saving %s change for %s in changeset", change.Action, n.Addr)
changes.RemoveOutputChange(n.Addr) // remove any existing planned change, if present
changes.AppendOutputChange(cs) // add the new planned change
}
state.RemoveOutputValue(n.Addr) state.RemoveOutputValue(n.Addr)
return nil return nil
} }

View File

@ -6,6 +6,7 @@ import (
"github.com/hashicorp/terraform/addrs" "github.com/hashicorp/terraform/addrs"
"github.com/hashicorp/terraform/configs" "github.com/hashicorp/terraform/configs"
"github.com/hashicorp/terraform/dag" "github.com/hashicorp/terraform/dag"
"github.com/hashicorp/terraform/plans"
) )
// OutputTransformer is a GraphTransformer that adds all the outputs // OutputTransformer is a GraphTransformer that adds all the outputs
@ -15,7 +16,12 @@ import (
// aren't changing since there is no downside: the state will be available // aren't changing since there is no downside: the state will be available
// even if the dependent items aren't changing. // even if the dependent items aren't changing.
type OutputTransformer struct { type OutputTransformer struct {
Config *configs.Config Config *configs.Config
Changes *plans.Changes
// if this is a planed destroy, root outputs are still in the configuration
// so we need to record that we wish to remove them
Destroy bool
} }
func (t *OutputTransformer) Transform(g *Graph) error { func (t *OutputTransformer) Transform(g *Graph) error {
@ -40,11 +46,19 @@ func (t *OutputTransformer) transform(g *Graph, c *configs.Config) error {
// Add plannable outputs to the graph, which will be dynamically expanded // Add plannable outputs to the graph, which will be dynamically expanded
// into NodeApplyableOutputs to reflect possible expansion // into NodeApplyableOutputs to reflect possible expansion
// through the presence of "count" or "for_each" on the modules. // through the presence of "count" or "for_each" on the modules.
var changes []*plans.OutputChangeSrc
if t.Changes != nil {
changes = t.Changes.Outputs
}
for _, o := range c.Module.Outputs { for _, o := range c.Module.Outputs {
node := &nodeExpandOutput{ node := &nodeExpandOutput{
Addr: addrs.OutputValue{Name: o.Name}, Addr: addrs.OutputValue{Name: o.Name},
Module: c.Path, Module: c.Path,
Config: o, Config: o,
Changes: changes,
Destroy: t.Destroy,
} }
log.Printf("[TRACE] OutputTransformer: adding %s as %T", o.Name, node) log.Printf("[TRACE] OutputTransformer: adding %s as %T", o.Name, node)
g.Add(node) g.Add(node)