Merge pull request #26321 from hashicorp/jbardin/simplify-data-lifecycle

simplify data lifecycle
This commit is contained in:
James Bardin 2020-09-22 10:27:16 -04:00 committed by GitHub
commit 915d4e4b45
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 13 additions and 560 deletions

View File

@ -73,13 +73,12 @@ func TestPlanApplyInAutomation(t *testing.T) {
// stateResources := plan.Changes.Resources
diffResources := plan.Changes.Resources
if len(diffResources) != 2 {
if len(diffResources) != 1 {
t.Errorf("incorrect number of resources in plan")
}
expected := map[string]plans.Action{
"data.template_file.test": plans.Read,
"null_resource.test": plans.Create,
"null_resource.test": plans.Create,
}
for _, r := range diffResources {

View File

@ -72,13 +72,12 @@ func TestPrimarySeparatePlan(t *testing.T) {
}
diffResources := plan.Changes.Resources
if len(diffResources) != 2 {
if len(diffResources) != 1 {
t.Errorf("incorrect number of resources in plan")
}
expected := map[string]plans.Action{
"data.template_file.test": plans.Read,
"null_resource.test": plans.Create,
"null_resource.test": plans.Create,
}
for _, r := range diffResources {

View File

@ -1879,22 +1879,11 @@ func TestContext2Plan_computedInFunction(t *testing.T) {
diags := ctx.Validate()
assertNoErrors(t, diags)
state, diags := ctx.Refresh() // data resource is read in this step
_, diags = ctx.Plan()
assertNoErrors(t, diags)
if !p.ReadDataSourceCalled {
t.Fatalf("ReadDataSource was not called on provider during refresh; should've been called")
}
p.ReadDataSourceCalled = false // reset for next call
t.Logf("state after refresh:\n%s", state)
_, diags = ctx.Plan() // should do nothing with data resource in this step, since it was already read
assertNoErrors(t, diags)
if p.ReadDataSourceCalled {
// there was no config change to read during plan
t.Fatalf("ReadDataSource should not have been called")
t.Fatalf("ReadDataSource was not called on provider during plan; should've been called")
}
}
@ -5007,11 +4996,6 @@ func TestContext2Plan_createBeforeDestroy_depends_datasource(t *testing.T) {
},
})
// We're skipping ctx.Refresh here, which simulates what happens when
// running "terraform plan -refresh=false". As a result, we don't get our
// usual opportunity to read the data source during the refresh step and
// thus the plan call below is forced to produce a deferred read action.
plan, diags := ctx.Plan()
if diags.HasErrors() {
t.Fatalf("unexpected errors: %s", diags.Err())
@ -5052,22 +5036,6 @@ func TestContext2Plan_createBeforeDestroy_depends_datasource(t *testing.T) {
"num": cty.StringVal("2"),
"computed": cty.StringVal("data_id"),
}), ric.After)
case "data.aws_vpc.bar[0]":
if res.Action != plans.Read {
t.Fatalf("resource %s should be read, got %s", ric.Addr, ric.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.StringVal("data_id"),
"foo": cty.StringVal("0"),
}), ric.After)
case "data.aws_vpc.bar[1]":
if res.Action != plans.Read {
t.Fatalf("resource %s should be read, got %s", ric.Addr, ric.Action)
}
checkVals(t, objectVal(t, schema, map[string]cty.Value{
"id": cty.StringVal("data_id"),
"foo": cty.StringVal("1"),
}), ric.After)
default:
t.Fatal("unknown instance:", i)
}
@ -5077,8 +5045,6 @@ func TestContext2Plan_createBeforeDestroy_depends_datasource(t *testing.T) {
wantAddrs := map[string]struct{}{
"aws_instance.foo[0]": struct{}{},
"aws_instance.foo[1]": struct{}{},
"data.aws_vpc.bar[0]": struct{}{},
"data.aws_vpc.bar[1]": struct{}{},
}
if !cmp.Equal(seenAddrs, wantAddrs) {
t.Errorf("incorrect addresses in changeset:\n%s", cmp.Diff(wantAddrs, seenAddrs))

View File

@ -907,9 +907,7 @@ func TestContext2Refresh_dataCount(t *testing.T) {
t.Fatalf("refresh errors: %s", diags.Err())
}
checkStateString(t, s, `data.test.foo.0:
ID =
provider = provider["registry.terraform.io/hashicorp/test"]`)
checkStateString(t, s, `<no state>`)
}
func TestContext2Refresh_dataState(t *testing.T) {

View File

@ -44,22 +44,6 @@ func (n *evalReadDataApply) Eval(ctx EvalContext) (interface{}, error) {
return nil, err
}
// We have a change and it is complete, which means we read the data
// source during plan and only need to store it in state.
if planned.After.IsWhollyKnown() {
if err := ctx.Hook(func(h Hook) (HookAction, error) {
return h.PostApply(absAddr, states.CurrentGen, planned.After, nil)
}); err != nil {
diags = diags.Append(err)
}
*n.State = &states.ResourceInstanceObject{
Value: planned.After,
Status: states.ObjectReady,
}
return nil, diags.ErrWithWarnings()
}
config := *n.Config
providerSchema := *n.ProviderSchema
schema, _ := providerSchema.SchemaForResourceAddr(n.Addr.ContainingResource())

View File

@ -7,7 +7,6 @@ import (
"github.com/zclconf/go-cty/cty"
"github.com/hashicorp/terraform/configs/configschema"
"github.com/hashicorp/terraform/plans"
"github.com/hashicorp/terraform/plans/objchange"
"github.com/hashicorp/terraform/states"
@ -102,18 +101,8 @@ func (n *evalReadDataPlan) Eval(ctx EvalContext) (interface{}, error) {
return nil, diags.ErrWithWarnings()
}
// If we have a stored state we may not need to re-read the data source.
// Check the config against the state to see if there are any difference.
proposedVal, hasChanges := dataObjectHasChanges(schema, priorVal, configVal)
if !hasChanges {
log.Printf("[TRACE] evalReadDataPlan: %s no change detected, using existing state", absAddr)
// state looks up to date, and must have been read during refresh
return nil, diags.ErrWithWarnings()
}
log.Printf("[TRACE] evalReadDataPlan: %s configuration changed, planning data source", absAddr)
// We have a complete configuration with no dependencies to wait on, so we
// can read the data source into the state.
newVal, readDiags := n.readDataSource(ctx, configVal)
diags = diags.Append(readDiags)
if diags.HasErrors() {
@ -122,6 +111,10 @@ func (n *evalReadDataPlan) Eval(ctx EvalContext) (interface{}, error) {
// if we have a prior value, we can check for any irregularities in the response
if !priorVal.IsNull() {
// While we don't propose planned changes for data sources, we can
// generate a proposed value for comparison to ensure the data source
// is returning a result following the rules of the provider contract.
proposedVal := objchange.ProposedNewObject(schema, priorVal, configVal)
if errs := objchange.AssertObjectCompatible(schema, proposedVal, newVal); len(errs) > 0 {
// Resources have the LegacyTypeSystem field to signal when they are
// using an SDK which may not produce precise values. While data
@ -138,27 +131,6 @@ func (n *evalReadDataPlan) Eval(ctx EvalContext) (interface{}, error) {
}
}
// We still default to read here, to indicate any changes in the plan, even
// though this will already be written in the refreshed state.
action := plans.Read
if priorVal.Equals(newVal).True() {
action = plans.NoOp
}
// The returned value from ReadDataSource must be non-nil and known,
// which we store in the change. Apply will use the fact that the After
// value is wholly kown to save the state directly, rather than reading the
// data source again.
*n.OutputChange = &plans.ResourceInstanceChange{
Addr: absAddr,
ProviderAddr: n.ProviderAddr,
Change: plans.Change{
Action: action,
Before: priorVal,
After: newVal,
},
}
*n.State = &states.ResourceInstanceObject{
Value: newVal,
Status: states.ObjectReady,
@ -190,97 +162,3 @@ func (n *evalReadDataPlan) forcePlanRead(ctx EvalContext) bool {
}
return false
}
// dataObjectHasChanges determines if the newly evaluated config would cause
// any changes in the stored value, indicating that we need to re-read this
// data source. The proposed value is returned for validation against the
// ReadDataSource response.
func dataObjectHasChanges(schema *configschema.Block, priorVal, configVal cty.Value) (proposedVal cty.Value, hasChanges bool) {
if priorVal.IsNull() {
return priorVal, true
}
// Applying the configuration to the stored state will allow us to detect any changes.
proposedVal = objchange.ProposedNewObject(schema, priorVal, configVal)
if !configVal.IsWhollyKnown() {
// Config should have been known here, but handle it the same as ProposedNewObject
return proposedVal, true
}
// Normalize the prior value so we can correctly compare the two even if
// the prior value came through the legacy SDK.
priorVal = createEmptyBlocks(schema, priorVal)
return proposedVal, proposedVal.Equals(priorVal).False()
}
// createEmptyBlocks will fill in null TypeList or TypeSet blocks with Empty
// values. Our decoder will always decode blocks as empty containers, but the
// legacy SDK may replace those will null values. Normalizing these values
// allows us to correctly compare the ProposedNewObject value in
// dataObjectyHasChanges.
func createEmptyBlocks(schema *configschema.Block, val cty.Value) cty.Value {
if val.IsNull() || !val.IsKnown() {
return val
}
if !val.Type().IsObjectType() {
panic(fmt.Sprintf("unexpected type %#v\n", val.Type()))
}
// if there are no blocks, don't bother recreating the cty.Value
if len(schema.BlockTypes) == 0 {
return val
}
objMap := val.AsValueMap()
for name, blockType := range schema.BlockTypes {
block, ok := objMap[name]
if !ok {
continue
}
// helper to build the recursive block values
nextBlocks := func() []cty.Value {
// this is only called once we know this is a non-null List or Set
// with a length > 0
newVals := make([]cty.Value, 0, block.LengthInt())
for it := block.ElementIterator(); it.Next(); {
_, val := it.Element()
newVals = append(newVals, createEmptyBlocks(&blockType.Block, val))
}
return newVals
}
// Blocks are always decoded as empty containers, but the legacy
// SDK may return null when they are empty.
switch blockType.Nesting {
// We are only concerned with block types that can come from the legacy
// sdk, which means TypeList or TypeSet.
case configschema.NestingList:
ety := block.Type().ElementType()
switch {
case block.IsNull():
objMap[name] = cty.ListValEmpty(ety)
case block.LengthInt() == 0:
continue
default:
objMap[name] = cty.ListVal(nextBlocks())
}
case configschema.NestingSet:
ety := block.Type().ElementType()
switch {
case block.IsNull():
objMap[name] = cty.SetValEmpty(ety)
case block.LengthInt() == 0:
continue
default:
objMap[name] = cty.SetVal(nextBlocks())
}
}
}
return cty.ObjectVal(objMap)
}

View File

@ -1,371 +0,0 @@
package terraform
import (
"testing"
"github.com/hashicorp/terraform/configs/configschema"
"github.com/zclconf/go-cty/cty"
)
func TestReadDataCreateEmptyBlocks(t *testing.T) {
setSchema := &configschema.Block{
BlockTypes: map[string]*configschema.NestedBlock{
"set": {
Nesting: configschema.NestingSet,
Block: configschema.Block{
Attributes: map[string]*configschema.Attribute{
"attr": {
Type: cty.String,
Optional: true,
},
},
},
},
},
}
nestedSetSchema := &configschema.Block{
BlockTypes: map[string]*configschema.NestedBlock{
"set": {
Nesting: configschema.NestingSet,
Block: configschema.Block{
BlockTypes: map[string]*configschema.NestedBlock{
"nested-set": {
Nesting: configschema.NestingSet,
Block: configschema.Block{
Attributes: map[string]*configschema.Attribute{
"attr": {
Type: cty.String,
Optional: true,
},
},
},
},
},
},
},
},
}
listSchema := &configschema.Block{
BlockTypes: map[string]*configschema.NestedBlock{
"list": {
Nesting: configschema.NestingList,
Block: configschema.Block{
Attributes: map[string]*configschema.Attribute{
"attr": {
Type: cty.String,
Optional: true,
},
},
},
},
},
}
nestedListSchema := &configschema.Block{
BlockTypes: map[string]*configschema.NestedBlock{
"list": {
Nesting: configschema.NestingList,
Block: configschema.Block{
BlockTypes: map[string]*configschema.NestedBlock{
"nested-list": {
Nesting: configschema.NestingList,
Block: configschema.Block{
Attributes: map[string]*configschema.Attribute{
"attr": {
Type: cty.String,
Optional: true,
},
},
},
},
},
},
},
},
}
singleSchema := &configschema.Block{
BlockTypes: map[string]*configschema.NestedBlock{
"single": {
Nesting: configschema.NestingSingle,
Block: configschema.Block{
Attributes: map[string]*configschema.Attribute{
"attr": {
Type: cty.String,
Optional: true,
},
},
},
},
},
}
for _, tc := range []struct {
name string
schema *configschema.Block
val cty.Value
expect cty.Value
}{
{
"set-block",
setSchema,
cty.ObjectVal(map[string]cty.Value{
"set": cty.SetVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("ok"),
}),
}),
}),
cty.ObjectVal(map[string]cty.Value{
"set": cty.SetVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("ok"),
}),
}),
}),
},
{
"set-block-empty",
setSchema,
cty.ObjectVal(map[string]cty.Value{
"set": cty.SetValEmpty(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
),
}),
cty.ObjectVal(map[string]cty.Value{
"set": cty.SetValEmpty(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
),
}),
},
{
"set-block-null",
setSchema,
cty.ObjectVal(map[string]cty.Value{
"set": cty.NullVal(cty.Set(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
)),
}),
cty.ObjectVal(map[string]cty.Value{
"set": cty.SetValEmpty(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
),
}),
},
{
"list-block",
listSchema,
cty.ObjectVal(map[string]cty.Value{
"list": cty.ListVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("ok"),
}),
}),
}),
cty.ObjectVal(map[string]cty.Value{
"list": cty.ListVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("ok"),
}),
}),
}),
},
{
"list-block-empty",
listSchema,
cty.ObjectVal(map[string]cty.Value{
"list": cty.ListValEmpty(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
),
}),
cty.ObjectVal(map[string]cty.Value{
"list": cty.ListValEmpty(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
),
}),
},
{
"list-block-null",
listSchema,
cty.ObjectVal(map[string]cty.Value{
"list": cty.NullVal(cty.List(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
)),
}),
cty.ObjectVal(map[string]cty.Value{
"list": cty.ListValEmpty(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
),
}),
},
{
"nested-set-block",
nestedSetSchema,
cty.ObjectVal(map[string]cty.Value{
"set": cty.SetVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"nested-set": cty.SetVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("ok"),
}),
}),
}),
}),
}),
cty.ObjectVal(map[string]cty.Value{
"set": cty.SetVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"nested-set": cty.SetVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"attr": cty.StringVal("ok"),
}),
}),
}),
}),
}),
},
{
"nested-set-block-empty",
nestedSetSchema,
cty.ObjectVal(map[string]cty.Value{
"set": cty.SetVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"nested-set": cty.SetValEmpty(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
),
}),
}),
}),
cty.ObjectVal(map[string]cty.Value{
"set": cty.SetVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"nested-set": cty.SetValEmpty(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
),
}),
}),
}),
},
{
"nested-set-block-null",
nestedSetSchema,
cty.ObjectVal(map[string]cty.Value{
"set": cty.SetVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"nested-set": cty.NullVal(cty.Set(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
)),
}),
}),
}),
cty.ObjectVal(map[string]cty.Value{
"set": cty.SetVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"nested-set": cty.SetValEmpty(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
),
}),
}),
}),
},
{
"nested-list-block-empty",
nestedListSchema,
cty.ObjectVal(map[string]cty.Value{
"list": cty.ListVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"nested-list": cty.ListValEmpty(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
),
}),
}),
}),
cty.ObjectVal(map[string]cty.Value{
"list": cty.ListVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"nested-list": cty.ListValEmpty(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
),
}),
}),
}),
},
{
"nested-list-block-null",
nestedListSchema,
cty.ObjectVal(map[string]cty.Value{
"list": cty.ListVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"nested-list": cty.NullVal(cty.List(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
)),
}),
}),
}),
cty.ObjectVal(map[string]cty.Value{
"list": cty.ListVal([]cty.Value{
cty.ObjectVal(map[string]cty.Value{
"nested-list": cty.ListValEmpty(
cty.Object(map[string]cty.Type{
"attr": cty.String,
}),
),
}),
}),
}),
},
{
"single-block-null",
singleSchema,
cty.ObjectVal(map[string]cty.Value{
"single": cty.NullVal(cty.Object(map[string]cty.Type{
"attr": cty.String,
})),
}),
cty.ObjectVal(map[string]cty.Value{
"single": cty.NullVal(cty.Object(map[string]cty.Type{
"attr": cty.String,
})),
}),
},
} {
t.Run(tc.name, func(t *testing.T) {
val := createEmptyBlocks(tc.schema, tc.val)
if !tc.expect.Equals(val).True() {
t.Fatalf("\nexpected: %#v\ngot : %#v\n", tc.expect, val)
}
})
}
}