configs: Decode preconditions and postconditions

This allows precondition and postcondition checks to be declared for
resources and output values as long as the preconditions_postconditions
experiment is enabled.

Terraform Core doesn't currently know anything about these features, so
as of this commit declaring them does nothing at all.
This commit is contained in:
Martin Atkins 2020-11-20 14:56:21 -08:00 committed by Alisdair McDiarmid
parent 82c518209d
commit 9076400436
11 changed files with 279 additions and 20 deletions

View File

@ -64,6 +64,17 @@ func decodeCheckRuleBlock(block *hcl.Block, override bool) (*CheckRule, hcl.Diag
if attr, exists := content.Attributes["condition"]; exists { if attr, exists := content.Attributes["condition"]; exists {
cr.Condition = attr.Expr cr.Condition = attr.Expr
if len(cr.Condition.Variables()) == 0 {
// A condition expression that doesn't refer to any variable is
// pointless, because its result would always be a constant.
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: fmt.Sprintf("Invalid %s expression", block.Type),
Detail: "The condition expression must refer to at least one object from elsewhere in the configuration, or else its result would not be checking anything.",
Subject: cr.Condition.Range().Ptr(),
})
}
} }
if attr, exists := content.Attributes["error_message"]; exists { if attr, exists := content.Attributes["error_message"]; exists {

View File

@ -209,6 +209,55 @@ func checkModuleExperiments(m *Module) hcl.Diagnostics {
} }
} }
if !m.ActiveExperiments.Has(experiments.PreconditionsPostconditions) {
for _, r := range m.ManagedResources {
for _, c := range r.Preconditions {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Preconditions are experimental",
Detail: "The resource preconditions feature is currently an opt-in experiment, subject to change in future releases based on feedback.\n\nActivate the feature for this module by adding preconditions_postconditions to the list of active experiments.",
Subject: c.DeclRange.Ptr(),
})
}
for _, c := range r.Postconditions {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Postconditions are experimental",
Detail: "The resource preconditions feature is currently an opt-in experiment, subject to change in future releases based on feedback.\n\nActivate the feature for this module by adding preconditions_postconditions to the list of active experiments.",
Subject: c.DeclRange.Ptr(),
})
}
}
for _, r := range m.DataResources {
for _, c := range r.Preconditions {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Preconditions are experimental",
Detail: "The resource preconditions feature is currently an opt-in experiment, subject to change in future releases based on feedback.\n\nActivate the feature for this module by adding preconditions_postconditions to the list of active experiments.",
Subject: c.DeclRange.Ptr(),
})
}
for _, c := range r.Postconditions {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Postconditions are experimental",
Detail: "The resource preconditions feature is currently an opt-in experiment, subject to change in future releases based on feedback.\n\nActivate the feature for this module by adding preconditions_postconditions to the list of active experiments.",
Subject: c.DeclRange.Ptr(),
})
}
}
for _, o := range m.Outputs {
for _, c := range o.Preconditions {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Preconditions are experimental",
Detail: "The output value preconditions feature is currently an opt-in experiment, subject to change in future releases based on feedback.\n\nActivate the feature for this module by adding preconditions_postconditions to the list of active experiments.",
Subject: c.DeclRange.Ptr(),
})
}
}
}
return diags return diags
} }

View File

@ -356,6 +356,8 @@ type Output struct {
DependsOn []hcl.Traversal DependsOn []hcl.Traversal
Sensitive bool Sensitive bool
Preconditions []*CheckRule
DescriptionSet bool DescriptionSet bool
SensitiveSet bool SensitiveSet bool
@ -409,6 +411,26 @@ func decodeOutputBlock(block *hcl.Block, override bool) (*Output, hcl.Diagnostic
o.DependsOn = append(o.DependsOn, deps...) o.DependsOn = append(o.DependsOn, deps...)
} }
for _, block := range content.Blocks {
switch block.Type {
case "precondition":
cr, moreDiags := decodeCheckRuleBlock(block, override)
diags = append(diags, moreDiags...)
o.Preconditions = append(o.Preconditions, cr)
case "postcondition":
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Postconditions are not allowed",
Detail: "Output values can only have preconditions, not postconditions.",
Subject: block.TypeRange.Ptr(),
})
default:
// The cases above should be exhaustive for all block types
// defined in the block type schema, so this shouldn't happen.
panic(fmt.Sprintf("unexpected lifecycle sub-block type %q", block.Type))
}
}
return o, diags return o, diags
} }
@ -497,4 +519,8 @@ var outputBlockSchema = &hcl.BodySchema{
Name: "sensitive", Name: "sensitive",
}, },
}, },
Blocks: []hcl.BlockHeaderSchema{
{Type: "precondition"},
{Type: "postcondition"},
},
} }

View File

@ -142,14 +142,14 @@ func (p *Parser) loadConfigFile(path string, override bool) (*File, hcl.Diagnost
} }
case "resource": case "resource":
cfg, cfgDiags := decodeResourceBlock(block) cfg, cfgDiags := decodeResourceBlock(block, override)
diags = append(diags, cfgDiags...) diags = append(diags, cfgDiags...)
if cfg != nil { if cfg != nil {
file.ManagedResources = append(file.ManagedResources, cfg) file.ManagedResources = append(file.ManagedResources, cfg)
} }
case "data": case "data":
cfg, cfgDiags := decodeDataBlock(block) cfg, cfgDiags := decodeDataBlock(block, override)
diags = append(diags, cfgDiags...) diags = append(diags, cfgDiags...)
if cfg != nil { if cfg != nil {
file.DataResources = append(file.DataResources, cfg) file.DataResources = append(file.DataResources, cfg)

View File

@ -97,7 +97,7 @@ func TestParserLoadConfigFileFailureMessages(t *testing.T) {
{ {
"invalid-files/data-resource-lifecycle.tf", "invalid-files/data-resource-lifecycle.tf",
hcl.DiagError, hcl.DiagError,
"Unsupported lifecycle block", "Invalid data resource lifecycle argument",
}, },
{ {
"invalid-files/variable-type-unknown.tf", "invalid-files/variable-type-unknown.tf",

View File

@ -22,6 +22,9 @@ type Resource struct {
ProviderConfigRef *ProviderConfigRef ProviderConfigRef *ProviderConfigRef
Provider addrs.Provider Provider addrs.Provider
Preconditions []*CheckRule
Postconditions []*CheckRule
DependsOn []hcl.Traversal DependsOn []hcl.Traversal
// Managed is populated only for Mode = addrs.ManagedResourceMode, // Managed is populated only for Mode = addrs.ManagedResourceMode,
@ -81,7 +84,7 @@ func (r *Resource) ProviderConfigAddr() addrs.LocalProviderConfig {
} }
} }
func decodeResourceBlock(block *hcl.Block) (*Resource, hcl.Diagnostics) { func decodeResourceBlock(block *hcl.Block, override bool) (*Resource, hcl.Diagnostics) {
var diags hcl.Diagnostics var diags hcl.Diagnostics
r := &Resource{ r := &Resource{
Mode: addrs.ManagedResourceMode, Mode: addrs.ManagedResourceMode,
@ -237,6 +240,24 @@ func decodeResourceBlock(block *hcl.Block) (*Resource, hcl.Diagnostics) {
} }
for _, block := range lcContent.Blocks {
switch block.Type {
case "precondition", "postcondition":
cr, moreDiags := decodeCheckRuleBlock(block, override)
diags = append(diags, moreDiags...)
switch block.Type {
case "precondition":
r.Preconditions = append(r.Preconditions, cr)
case "postcondition":
r.Postconditions = append(r.Postconditions, cr)
}
default:
// The cases above should be exhaustive for all block types
// defined in the lifecycle schema, so this shouldn't happen.
panic(fmt.Sprintf("unexpected lifecycle sub-block type %q", block.Type))
}
}
case "connection": case "connection":
if seenConnection != nil { if seenConnection != nil {
diags = append(diags, &hcl.Diagnostic{ diags = append(diags, &hcl.Diagnostic{
@ -307,7 +328,7 @@ func decodeResourceBlock(block *hcl.Block) (*Resource, hcl.Diagnostics) {
return r, diags return r, diags
} }
func decodeDataBlock(block *hcl.Block) (*Resource, hcl.Diagnostics) { func decodeDataBlock(block *hcl.Block, override bool) (*Resource, hcl.Diagnostics) {
var diags hcl.Diagnostics var diags hcl.Diagnostics
r := &Resource{ r := &Resource{
Mode: addrs.DataResourceMode, Mode: addrs.DataResourceMode,
@ -368,6 +389,7 @@ func decodeDataBlock(block *hcl.Block) (*Resource, hcl.Diagnostics) {
} }
var seenEscapeBlock *hcl.Block var seenEscapeBlock *hcl.Block
var seenLifecycle *hcl.Block
for _, block := range content.Blocks { for _, block := range content.Blocks {
switch block.Type { switch block.Type {
@ -391,21 +413,59 @@ func decodeDataBlock(block *hcl.Block) (*Resource, hcl.Diagnostics) {
// will see a blend of both. // will see a blend of both.
r.Config = hcl.MergeBodies([]hcl.Body{r.Config, block.Body}) r.Config = hcl.MergeBodies([]hcl.Body{r.Config, block.Body})
// The rest of these are just here to reserve block type names for future use.
case "lifecycle": case "lifecycle":
if seenLifecycle != nil {
diags = append(diags, &hcl.Diagnostic{ diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError, Severity: hcl.DiagError,
Summary: "Unsupported lifecycle block", Summary: "Duplicate lifecycle block",
Detail: "Data resources do not have lifecycle settings, so a lifecycle block is not allowed.", Detail: fmt.Sprintf("This resource already has a lifecycle block at %s.", seenLifecycle.DefRange),
Subject: &block.DefRange, Subject: block.DefRange.Ptr(),
}) })
continue
}
seenLifecycle = block
lcContent, lcDiags := block.Body.Content(resourceLifecycleBlockSchema)
diags = append(diags, lcDiags...)
// All of the attributes defined for resource lifecycle are for
// managed resources only, so we can emit a common error message
// for any given attributes that HCL accepted.
for name, attr := range lcContent.Attributes {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid data resource lifecycle argument",
Detail: fmt.Sprintf("The lifecycle argument %q is defined only for managed resources (\"resource\" blocks), and is not valid for data resources.", name),
Subject: attr.NameRange.Ptr(),
})
}
for _, block := range lcContent.Blocks {
switch block.Type {
case "precondition", "postcondition":
cr, moreDiags := decodeCheckRuleBlock(block, override)
diags = append(diags, moreDiags...)
switch block.Type {
case "precondition":
r.Preconditions = append(r.Preconditions, cr)
case "postcondition":
r.Postconditions = append(r.Postconditions, cr)
}
default:
// The cases above should be exhaustive for all block types
// defined in the lifecycle schema, so this shouldn't happen.
panic(fmt.Sprintf("unexpected lifecycle sub-block type %q", block.Type))
}
}
default: default:
// Any other block types are ones we're reserving for future use,
// but don't have any defined meaning today.
diags = append(diags, &hcl.Diagnostic{ diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError, Severity: hcl.DiagError,
Summary: "Reserved block type name in data block", Summary: "Reserved block type name in data block",
Detail: fmt.Sprintf("The block type name %q is reserved for use by Terraform in a future version.", block.Type), Detail: fmt.Sprintf("The block type name %q is reserved for use by Terraform in a future version.", block.Type),
Subject: &block.TypeRange, Subject: block.TypeRange.Ptr(),
}) })
} }
} }
@ -551,13 +611,17 @@ var resourceBlockSchema = &hcl.BodySchema{
var dataBlockSchema = &hcl.BodySchema{ var dataBlockSchema = &hcl.BodySchema{
Attributes: commonResourceAttributes, Attributes: commonResourceAttributes,
Blocks: []hcl.BlockHeaderSchema{ Blocks: []hcl.BlockHeaderSchema{
{Type: "lifecycle"}, // reserved for future use {Type: "lifecycle"},
{Type: "locals"}, // reserved for future use {Type: "locals"}, // reserved for future use
{Type: "_"}, // meta-argument escaping block {Type: "_"}, // meta-argument escaping block
}, },
} }
var resourceLifecycleBlockSchema = &hcl.BodySchema{ var resourceLifecycleBlockSchema = &hcl.BodySchema{
// We tell HCL that these elements are all valid for both "resource"
// and "data" lifecycle blocks, but the rules are actually more restrictive
// than that. We deal with that after decoding so that we can return
// more specific error messages than HCL would typically return itself.
Attributes: []hcl.AttributeSchema{ Attributes: []hcl.AttributeSchema{
{ {
Name: "create_before_destroy", Name: "create_before_destroy",
@ -569,4 +633,8 @@ var resourceLifecycleBlockSchema = &hcl.BodySchema{
Name: "ignore_changes", Name: "ignore_changes",
}, },
}, },
Blocks: []hcl.BlockHeaderSchema{
{Type: "precondition"},
{Type: "postcondition"},
},
} }

View File

@ -0,0 +1,34 @@
resource "test" "test" {
lifecycle {
precondition {
condition = true # ERROR: Invalid precondition expression
error_message = "Must be true."
}
postcondition {
condition = true # ERROR: Invalid postcondition expression
error_message = "Must be true."
}
}
}
data "test" "test" {
lifecycle {
precondition {
condition = true # ERROR: Invalid precondition expression
error_message = "Must be true."
}
postcondition {
condition = true # ERROR: Invalid postcondition expression
error_message = "Must be true."
}
}
}
output "test" {
value = ""
precondition {
condition = true # ERROR: Invalid precondition expression
error_message = "Must be true."
}
}

View File

@ -1,3 +0,0 @@
data "test" "foo" {
lifecycle {}
}

View File

@ -1,5 +1,7 @@
data "example" "example" { data "example" "example" {
lifecycle { lifecycle {
# This block intentionally left blank # The lifecycle arguments are not valid for data resources:
# only the precondition and postcondition blocks are allowed.
ignore_changes = []
} }
} }

View File

@ -0,0 +1,34 @@
resource "test" "test" {
lifecycle {
precondition { # ERROR: Preconditions are experimental
condition = path.module != ""
error_message = "Must be true."
}
postcondition { # ERROR: Postconditions are experimental
condition = path.module != ""
error_message = "Must be true."
}
}
}
data "test" "test" {
lifecycle {
precondition { # ERROR: Preconditions are experimental
condition = path.module != ""
error_message = "Must be true."
}
postcondition { # ERROR: Postconditions are experimental
condition = path.module != ""
error_message = "Must be true."
}
}
}
output "test" {
value = ""
precondition { # ERROR: Preconditions are experimental
condition = path.module != ""
error_message = "Must be true."
}
}

View File

@ -0,0 +1,38 @@
terraform {
experiments = [preconditions_postconditions] # WARNING: Experimental feature "preconditions_postconditions" is active
}
resource "test" "test" {
lifecycle {
precondition {
condition = path.module != ""
error_message = "Must be true."
}
postcondition {
condition = path.module != ""
error_message = "Must be true."
}
}
}
data "test" "test" {
lifecycle {
precondition {
condition = path.module != ""
error_message = "Must be true."
}
postcondition {
condition = path.module != ""
error_message = "Must be true."
}
}
}
output "test" {
value = ""
precondition {
condition = path.module != ""
error_message = "Must be true."
}
}