ElaborateFromConfigBody handles deeply nested paths more precisely
Some checks failed
Quick Checks / Unit Tests (push) Has been cancelled
Quick Checks / Race Tests (push) Has been cancelled
Quick Checks / End-to-end Tests (push) Has been cancelled
Quick Checks / Code Consistency Checks (push) Has been cancelled

This commit is contained in:
Daniel Schmidt 2026-01-19 17:38:01 +01:00
parent 6d6b75dada
commit 3f705da69c
No known key found for this signature in database
GPG key ID: 377C3A4D62FBBBE2
13 changed files with 157 additions and 114 deletions

View file

@ -8,6 +8,7 @@ import (
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/terraform/internal/addrs"
"github.com/hashicorp/terraform/internal/configs/configschema"
"github.com/hashicorp/terraform/internal/lang/marks"
"github.com/hashicorp/terraform/internal/tfdiags"
"github.com/zclconf/go-cty/cty"
@ -85,7 +86,7 @@ func (d *Deprecations) deprecationMarksToDiagnostics(deprecationMarks []marks.De
// ValidateAsConfig checks the given value for deprecation marks and returns diagnostics
// for each deprecation found, unless deprecation warnings are suppressed for the given module.
// It checks for deeply nested deprecation marks as well.
func (d *Deprecations) ValidateAsConfig(value cty.Value, module addrs.Module) tfdiags.Diagnostics {
func (d *Deprecations) ValidateAsConfig(value cty.Value, schema *configschema.Block, module addrs.Module) tfdiags.Diagnostics {
var diags tfdiags.Diagnostics
_, pvms := value.UnmarkDeepWithPaths()

View file

@ -71,7 +71,7 @@ func (n *NodeActionDeclarationInstance) Execute(ctx EvalContext, _ walkOperation
valDiags := validateResourceForbiddenEphemeralValues(ctx, configVal, n.Schema.ConfigSchema)
diags = diags.Append(valDiags.InConfigBody(n.Config.Config, n.Addr.String()))
deprecationDiags := ctx.Deprecations().ValidateAsConfig(configVal, n.ModulePath())
deprecationDiags := ctx.Deprecations().ValidateAsConfig(configVal, n.Schema.ConfigSchema, n.ModulePath())
diags = diags.Append(deprecationDiags.InConfigBody(n.Config.Config, n.Addr.String()))
if diags.HasErrors() {

View file

@ -69,7 +69,7 @@ func (n *NodeActionDeclarationPartialExpanded) Execute(ctx EvalContext, op walkO
return diags
}
deprecationDiags := ctx.Deprecations().ValidateAsConfig(configVal, n.ActionAddr().Module)
deprecationDiags := ctx.Deprecations().ValidateAsConfig(configVal, n.Schema.ConfigSchema, n.ActionAddr().Module)
diags = diags.Append(deprecationDiags)
if diags.HasErrors() {
return diags

View file

@ -102,7 +102,7 @@ func (n *NodeValidatableAction) Execute(ctx EvalContext, _ walkOperation) tfdiag
}
}
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, n.ModulePath()).InConfigBody(n.Config.Config, n.Addr.String()))
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, schema.ConfigSchema, n.ModulePath()).InConfigBody(n.Config.Config, n.Addr.String()))
valDiags = validateResourceForbiddenEphemeralValues(ctx, configVal, schema.ConfigSchema)
diags = diags.Append(valDiags.InConfigBody(config, n.Addr.String()))

View file

@ -83,7 +83,7 @@ func (n *NodeApplyableProvider) ValidateProvider(ctx EvalContext, provider provi
return diags
}
deprecationDiags := ctx.Deprecations().ValidateAsConfig(configVal, n.Addr.Module)
deprecationDiags := ctx.Deprecations().ValidateAsConfig(configVal, configSchema, n.Addr.Module)
diags = diags.Append(deprecationDiags.InConfigBody(configBody, n.Addr.String()))
if diags.HasErrors() {
return diags

View file

@ -866,7 +866,7 @@ func (n *NodeAbstractResourceInstance) plan(
diags = diags.Append(
validateResourceForbiddenEphemeralValues(ctx, origConfigVal, schema.Body).InConfigBody(n.Config.Config, n.Addr.String()),
)
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(origConfigVal, n.ModulePath()).InConfigBody(n.Config.Config, n.Addr.String()))
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(origConfigVal, schema.Body, n.ModulePath()).InConfigBody(n.Config.Config, n.Addr.String()))
if diags.HasErrors() {
return nil, nil, deferred, keyData, diags
}
@ -1773,7 +1773,7 @@ func (n *NodeAbstractResourceInstance) providerMetas(ctx EvalContext) (cty.Value
metaConfigVal, _, configDiags = ctx.EvaluateBlock(m.Config, providerSchema.ProviderMeta.Body, nil, EvalDataForNoInstanceKey)
diags = diags.Append(configDiags)
diags = diags.Append(
ctx.Deprecations().ValidateAsConfig(metaConfigVal, ctx.Path().Module()).InConfigBody(m.Config, n.Addr.String()),
ctx.Deprecations().ValidateAsConfig(metaConfigVal, providerSchema.ProviderMeta.Body, ctx.Path().Module()).InConfigBody(m.Config, n.Addr.String()),
)
metaConfigVal = marks.RemoveDeprecationMarks(metaConfigVal)
}
@ -1853,7 +1853,7 @@ func (n *NodeAbstractResourceInstance) planDataSource(ctx EvalContext, checkRule
validateResourceForbiddenEphemeralValues(ctx, configVal, schema.Body).InConfigBody(n.Config.Config, n.Addr.String()),
)
diags = diags.Append(
ctx.Deprecations().ValidateAsConfig(configVal, ctx.Path().Module()).InConfigBody(n.Config.Config, n.Addr.String()),
ctx.Deprecations().ValidateAsConfig(configVal, schema.Body, ctx.Path().Module()).InConfigBody(n.Config.Config, n.Addr.String()),
)
configVal = marks.RemoveDeprecationMarks(configVal)
if diags.HasErrors() {
@ -2194,7 +2194,7 @@ func (n *NodeAbstractResourceInstance) applyDataSource(ctx EvalContext, planned
}
diags = diags.Append(
ctx.Deprecations().ValidateAsConfig(configVal, n.ModulePath()).InConfigBody(n.Config.Config, n.Addr.String()),
ctx.Deprecations().ValidateAsConfig(configVal, schema.Body, n.ModulePath()).InConfigBody(n.Config.Config, n.Addr.String()),
)
if diags.HasErrors() {
return nil, keyData, diags
@ -2512,7 +2512,7 @@ func (n *NodeAbstractResourceInstance) evalProvisionerConfig(ctx EvalContext, bo
config, _, configDiags := ctx.EvaluateBlock(body, schema, n.ResourceInstanceAddr().Resource, keyData)
diags = diags.Append(configDiags)
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(config, n.ModulePath()).InConfigBody(body, n.Addr.String()))
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(config, schema, n.ModulePath()).InConfigBody(body, n.Addr.String()))
config = marks.RemoveDeprecationMarks(config)
return config, diags
@ -2531,7 +2531,7 @@ func (n *NodeAbstractResourceInstance) evalDestroyProvisionerConfig(ctx EvalCont
evalScope := ctx.EvaluationScope(n.ResourceInstanceAddr().Resource, nil, keyData)
config, evalDiags := evalScope.EvalSelfBlock(body, self, schema, keyData)
diags = diags.Append(evalDiags)
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(config, n.ModulePath()).InConfigBody(body, n.Addr.String()))
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(config, schema, n.ModulePath()).InConfigBody(body, n.Addr.String()))
config = marks.RemoveDeprecationMarks(config)
return config, diags
}

View file

@ -76,7 +76,7 @@ func ephemeralResourceOpen(ctx EvalContext, inp ephemeralResourceInput) (*provid
if diags.HasErrors() {
return nil, diags
}
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, ctx.Path().Module()).InConfigBody(config.Config, inp.addr.String()))
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, schema.Body, ctx.Path().Module()).InConfigBody(config.Config, inp.addr.String()))
if diags.HasErrors() {
return nil, diags
}

View file

@ -638,7 +638,7 @@ func (n *NodePlannableResourceInstance) importState(ctx EvalContext, addr addrs.
diags = diags.Append(configDiags)
return nil, deferred, diags
}
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, n.ModulePath()).InConfigBody(n.Config.Config, absAddr.String()))
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, schema.Body, n.ModulePath()).InConfigBody(n.Config.Config, absAddr.String()))
if diags.HasErrors() {
return nil, deferred, diags
}

View file

@ -51,7 +51,7 @@ func (n *NodePlannableResourceInstance) listResourceExecute(ctx EvalContext) (di
return diags
}
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(blockVal, n.ModulePath()).InConfigBody(config.Config, n.Addr.String()))
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(blockVal, schema.FullSchema, n.ModulePath()).InConfigBody(config.Config, n.Addr.String()))
if diags.HasErrors() {
return diags
}

View file

@ -200,7 +200,7 @@ func (n *nodePlannablePartialExpandedResource) managedResourceExecute(ctx EvalCo
return &change, diags
}
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, n.ResourceAddr().Module).InConfigBody(n.config.Config, n.addr.String()))
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, schema.Body, n.ResourceAddr().Module).InConfigBody(n.config.Config, n.addr.String()))
if diags.HasErrors() {
return &change, diags
}
@ -359,7 +359,7 @@ func (n *nodePlannablePartialExpandedResource) dataResourceExecute(ctx EvalConte
return &change, diags
}
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, n.ResourceAddr().Module).InConfigBody(n.config.Config, n.addr.String()))
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, schema.Body, n.ResourceAddr().Module).InConfigBody(n.config.Config, n.addr.String()))
if diags.HasErrors() {
return &change, diags
}

View file

@ -144,7 +144,7 @@ func (n *NodeValidatableResource) evaluateBlock(ctx EvalContext, body hcl.Body,
keyData, selfAddr := n.stubRepetitionData(n.Config.Count != nil, n.Config.ForEach != nil)
val, hclBody, diags := ctx.EvaluateBlock(body, schema, selfAddr, keyData)
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(val, n.Addr.Module).InConfigBody(body, n.Addr.String()))
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(val, schema, n.Addr.Module).InConfigBody(body, n.Addr.String()))
return marks.RemoveDeprecationMarks(val), hclBody, diags
}
@ -360,7 +360,7 @@ func (n *NodeValidatableResource) validateResource(ctx EvalContext) tfdiags.Diag
diags = diags.Append(
validateResourceForbiddenEphemeralValues(ctx, configVal, schema.Body).InConfigBody(n.Config.Config, n.Addr.String()),
)
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, n.ModulePath()).InConfigBody(n.Config.Config, n.Addr.String()))
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, schema.Body, n.ModulePath()).InConfigBody(n.Config.Config, n.Addr.String()))
if n.Config.Managed != nil { // can be nil only in tests with poorly-configured mocks
for _, traversal := range n.Config.Managed.IgnoreChanges {
@ -440,7 +440,7 @@ func (n *NodeValidatableResource) validateResource(ctx EvalContext) tfdiags.Diag
diags = diags.Append(
validateResourceForbiddenEphemeralValues(ctx, configVal, schema.Body).InConfigBody(n.Config.Config, n.Addr.String()),
)
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, n.ModulePath()))
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, schema.Body, n.ModulePath()))
// Use unmarked value for validate request
unmarkedConfigVal, _ := configVal.UnmarkDeep()
@ -469,7 +469,7 @@ func (n *NodeValidatableResource) validateResource(ctx EvalContext) tfdiags.Diag
return diags
}
diags = diags.Append(
ctx.Deprecations().ValidateAsConfig(configVal, n.ModulePath()).InConfigBody(n.Config.Config, n.Addr.String()),
ctx.Deprecations().ValidateAsConfig(configVal, schema.Body, n.ModulePath()).InConfigBody(n.Config.Config, n.Addr.String()),
)
// Use unmarked value for validate request
unmarkedConfigVal, _ := configVal.UnmarkDeep()
@ -479,7 +479,6 @@ func (n *NodeValidatableResource) validateResource(ctx EvalContext) tfdiags.Diag
}
resp := provider.ValidateEphemeralResourceConfig(req)
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(configVal, n.ModulePath()))
diags = diags.Append(resp.Diagnostics.InConfigBody(n.Config.Config, n.Addr.String()))
case addrs.ListResourceMode:
schema := providerSchema.SchemaForListResourceType(n.Config.Type)
@ -503,7 +502,8 @@ func (n *NodeValidatableResource) validateResource(ctx EvalContext) tfdiags.Diag
if valDiags.HasErrors() {
return diags
}
diags = diags.Append(ctx.Deprecations().ValidateAsConfig(blockVal, n.ModulePath()))
deprecationDiags := ctx.Deprecations().ValidateAsConfig(blockVal, schema.FullSchema, n.ModulePath())
diags = diags.Append(deprecationDiags.InConfigBody(n.Config.Config, n.Addr.String()))
}
if n.Config.List.Limit != nil {

View file

@ -156,7 +156,6 @@ func (d *attributeDiagnostic) ElaborateFromConfigBody(body hcl.Body, addr string
// presence of errors where performance isn't a concern.
traverse := d.attrPath[:]
final := d.attrPath[len(d.attrPath)-1]
// Index should never be the first step
// as indexing of top blocks (such as resources & data sources)
@ -167,54 +166,19 @@ func (d *attributeDiagnostic) ElaborateFromConfigBody(body hcl.Body, addr string
return &ret
}
// Process index separately
idxStep, hasIdx := final.(cty.IndexStep)
if hasIdx {
final = d.attrPath[len(d.attrPath)-2]
traverse = d.attrPath[:len(d.attrPath)-1]
}
// If we have more than one step after removing index
// then we'll first try to traverse to a child body
// corresponding to the requested path.
remaining := traverse
if len(traverse) > 1 {
body = traversePathSteps(traverse, body)
body, remaining = getDeepestBodyFromPath(body, traverse)
}
// Default is to indicate a missing item in the deepest body we reached
// while traversing.
subject := SourceRangeFromHCL(body.MissingItemRange())
subject := SourceRangeFromHCL(rangeOfDeepestAttributeValueFromPath(body, remaining))
ret.subject = &subject
// Once we get here, "final" should be a GetAttr step that maps to an
// attribute in our current body.
finalStep, isAttr := final.(cty.GetAttrStep)
if !isAttr {
return &ret
}
content, _, contentDiags := body.PartialContent(&hcl.BodySchema{
Attributes: []hcl.AttributeSchema{
{
Name: finalStep.Name,
Required: true,
},
},
})
if contentDiags.HasErrors() {
return &ret
}
if attr, ok := content.Attributes[finalStep.Name]; ok {
hclRange := attr.Expr.Range()
if hasIdx {
// Try to be more precise by finding index range
hclRange = hclRangeFromIndexStepAndAttribute(idxStep, attr)
}
subject = SourceRangeFromHCL(hclRange)
ret.subject = &subject
}
return &ret
}
@ -243,13 +207,15 @@ func (d *attributeDiagnostic) Equals(otherDiag ComparableDiagnostic) bool {
return sourceRangeEquals(d.subject, od.subject)
}
func traversePathSteps(traverse []cty.PathStep, body hcl.Body) hcl.Body {
func getDeepestBodyFromPath(body hcl.Body, traverse []cty.PathStep) (hcl.Body, []cty.PathStep) {
lastProcessedIndex := -1
LOOP:
for i := 0; i < len(traverse); i++ {
step := traverse[i]
switch tStep := step.(type) {
case cty.GetAttrStep:
var next cty.PathStep
if i < (len(traverse) - 1) {
next = traverse[i+1]
@ -281,7 +247,7 @@ func traversePathSteps(traverse []cty.PathStep, body hcl.Body) hcl.Body {
},
})
if contentDiags.HasErrors() {
return body
break LOOP
}
filtered := make([]*hcl.Block, 0, len(content.Blocks))
for _, block := range content.Blocks {
@ -291,22 +257,24 @@ func traversePathSteps(traverse []cty.PathStep, body hcl.Body) hcl.Body {
}
if len(filtered) == 0 {
// Step doesn't refer to a block
continue
break LOOP
}
switch indexType {
case cty.NilType: // no index at all
if len(filtered) != 1 {
return body
break LOOP
}
body = filtered[0].Body
lastProcessedIndex = i
case cty.Number:
var idx int
err := gocty.FromCtyValue(indexVal, &idx)
if err != nil || idx >= len(filtered) {
return body
break LOOP
}
body = filtered[idx].Body
lastProcessedIndex = i
case cty.String:
key := indexVal.AsString()
var block *hcl.Block
@ -319,56 +287,109 @@ func traversePathSteps(traverse []cty.PathStep, body hcl.Body) hcl.Body {
if block == nil {
// No block with this key, so we'll just indicate a
// missing item in the containing block.
return body
break LOOP
}
body = block.Body
lastProcessedIndex = i
default:
// Should never happen, because only string and numeric indices
// are supported by cty collections.
return body
break LOOP
}
default:
// For any other kind of step, we'll just return our current body
// as the subject and accept that this is a little inaccurate.
return body
break LOOP
}
}
return body
return body, traverse[lastProcessedIndex+1:]
}
func hclRangeFromIndexStepAndAttribute(idxStep cty.IndexStep, attr *hcl.Attribute) hcl.Range {
switch idxStep.Key.Type() {
case cty.Number:
var idx int
err := gocty.FromCtyValue(idxStep.Key, &idx)
items, diags := hcl.ExprList(attr.Expr)
func rangeOfDeepestAttributeValueFromPath(body hcl.Body, traverse cty.Path) hcl.Range {
if len(traverse) == 0 {
return body.MissingItemRange()
}
// First we need to use the first traverse item to get the final attribute
// expression.
current, rest := traverse[0], traverse[1:]
currentGetAttr, ok := current.(cty.GetAttrStep)
if !ok {
// If the remaining basis is not an attribute access something went wrong.
// We can't do anything better than returning the bodies missing item range.
return body.MissingItemRange()
}
content, _, contentDiags := body.PartialContent(&hcl.BodySchema{
Attributes: []hcl.AttributeSchema{
{
Name: currentGetAttr.Name,
Required: true,
},
},
})
if contentDiags.HasErrors() {
return body.MissingItemRange()
}
attr, ok := content.Attributes[currentGetAttr.Name]
if !ok {
// We could not find the attribute, this should have emitted a diag above, but just in case
return body.MissingItemRange()
}
// Now we need to loop through the rest of the path and progressively introspect
// the HCL expression.
currentExpr := attr.Expr
STEP_ITERATION:
for _, step := range rest {
// We treat cty.IndexStep[type=String] and cty.GetAttrStep the same, so we just
// need to deal with list indexes first
if idxStep, ok := step.(cty.IndexStep); ok && idxStep.Key.Type() == cty.Number {
var idx int
err := gocty.FromCtyValue(idxStep.Key, &idx)
items, diags := hcl.ExprList(currentExpr)
if diags.HasErrors() {
return currentExpr.Range()
}
if err != nil || idx >= len(items) {
return attr.NameRange
}
currentExpr = items[idx]
continue STEP_ITERATION
}
var stepKey string
switch s := step.(type) {
case cty.GetAttrStep:
stepKey = s.Name
case cty.IndexStep:
stepKey = s.Key.AsString()
default: // should not happen
return currentExpr.Range()
}
pairs, diags := hcl.ExprMap(currentExpr)
if diags.HasErrors() {
return attr.Expr.Range()
return currentExpr.Range()
}
if err != nil || idx >= len(items) {
return attr.NameRange
}
return items[idx].Range()
case cty.String:
pairs, diags := hcl.ExprMap(attr.Expr)
if diags.HasErrors() {
return attr.Expr.Range()
}
stepKey := idxStep.Key.AsString()
for _, kvPair := range pairs {
key, diags := kvPair.Key.Value(nil)
if diags.HasErrors() {
return attr.Expr.Range()
return currentExpr.Range()
}
if key.AsString() == stepKey {
startRng := kvPair.Value.StartRange()
return startRng
currentExpr = kvPair.Value
continue STEP_ITERATION
}
}
// If we could not find the item return early
return attr.NameRange
}
return attr.Expr.Range()
return currentExpr.Range()
}
func (d *attributeDiagnostic) Source() Source {

View file

@ -38,6 +38,7 @@ parent {
nested_map = {
first_key = "first_value"
second_key = "2nd value"
third_key = [{ num = 1}, {num = 2, value = { another_level = ["no", "yes"]}}]
}
}
tuple_of_one = ["one"]
@ -323,8 +324,8 @@ simple_attr = "val"
),
&SourceRange{
Filename: "test.tf",
Start: SourcePos{Line: 22, Column: 19, Byte: 266},
End: SourcePos{Line: 22, Column: 30, Byte: 277},
Start: SourcePos{Line: 22, Column: 18, Byte: 265},
End: SourcePos{Line: 22, Column: 31, Byte: 278},
},
},
{
@ -340,8 +341,28 @@ simple_attr = "val"
),
&SourceRange{
Filename: "test.tf",
Start: SourcePos{Line: 23, Column: 19, Byte: 297},
End: SourcePos{Line: 23, Column: 28, Byte: 306},
Start: SourcePos{Line: 23, Column: 18, Byte: 296},
End: SourcePos{Line: 23, Column: 29, Byte: 307},
},
},
{
AttributeValue(
Error,
"parent.nested_map.third_key[1].value[1]",
"detail",
cty.Path{
cty.GetAttrStep{Name: "parent"},
cty.GetAttrStep{Name: "nested_map"},
cty.IndexStep{Key: cty.StringVal("third_key")},
cty.IndexStep{Key: cty.NumberIntVal(1)},
cty.GetAttrStep{Name: "value"},
cty.IndexStep{Key: cty.NumberIntVal(1)},
},
),
&SourceRange{
Filename: "test.tf",
Start: SourcePos{Line: 24, Column: 48, Byte: 355},
End: SourcePos{Line: 24, Column: 80, Byte: 387},
},
},
{
@ -375,8 +396,8 @@ simple_attr = "val"
),
&SourceRange{
Filename: "test.tf",
Start: SourcePos{Line: 26, Column: 17, Byte: 330},
End: SourcePos{Line: 26, Column: 22, Byte: 335},
Start: SourcePos{Line: 27, Column: 17, Byte: 412},
End: SourcePos{Line: 27, Column: 22, Byte: 417},
},
},
{
@ -391,8 +412,8 @@ simple_attr = "val"
),
&SourceRange{
Filename: "test.tf",
Start: SourcePos{Line: 27, Column: 17, Byte: 353},
End: SourcePos{Line: 27, Column: 24, Byte: 360},
Start: SourcePos{Line: 28, Column: 17, Byte: 435},
End: SourcePos{Line: 28, Column: 24, Byte: 442},
},
},
{
@ -407,8 +428,8 @@ simple_attr = "val"
),
&SourceRange{
Filename: "test.tf",
Start: SourcePos{Line: 27, Column: 26, Byte: 362},
End: SourcePos{Line: 27, Column: 33, Byte: 369},
Start: SourcePos{Line: 28, Column: 26, Byte: 444},
End: SourcePos{Line: 28, Column: 33, Byte: 451},
},
},
{
@ -423,8 +444,8 @@ simple_attr = "val"
),
&SourceRange{
Filename: "test.tf",
Start: SourcePos{Line: 26, Column: 1, Byte: 314},
End: SourcePos{Line: 26, Column: 13, Byte: 326},
Start: SourcePos{Line: 27, Column: 1, Byte: 396},
End: SourcePos{Line: 27, Column: 13, Byte: 408},
},
},
{
@ -440,8 +461,8 @@ simple_attr = "val"
),
&SourceRange{
Filename: "test.tf",
Start: SourcePos{Line: 27, Column: 1, Byte: 337},
End: SourcePos{Line: 27, Column: 13, Byte: 349},
Start: SourcePos{Line: 28, Column: 1, Byte: 419},
End: SourcePos{Line: 28, Column: 13, Byte: 431},
},
},
{
@ -456,8 +477,8 @@ simple_attr = "val"
),
&SourceRange{
Filename: "test.tf",
Start: SourcePos{Line: 29, Column: 13, Byte: 396},
End: SourcePos{Line: 29, Column: 16, Byte: 399},
Start: SourcePos{Line: 30, Column: 12, Byte: 477},
End: SourcePos{Line: 30, Column: 17, Byte: 482},
},
},
{
@ -472,8 +493,8 @@ simple_attr = "val"
),
&SourceRange{
Filename: "test.tf",
Start: SourcePos{Line: 30, Column: 13, Byte: 413},
End: SourcePos{Line: 30, Column: 16, Byte: 416},
Start: SourcePos{Line: 31, Column: 12, Byte: 494},
End: SourcePos{Line: 31, Column: 17, Byte: 499},
},
},
{
@ -488,8 +509,8 @@ simple_attr = "val"
),
&SourceRange{
Filename: "test.tf",
Start: SourcePos{Line: 28, Column: 1, Byte: 371},
End: SourcePos{Line: 28, Column: 9, Byte: 379},
Start: SourcePos{Line: 29, Column: 1, Byte: 453},
End: SourcePos{Line: 29, Column: 9, Byte: 461},
},
},
{
@ -503,8 +524,8 @@ simple_attr = "val"
),
&SourceRange{
Filename: "test.tf",
Start: SourcePos{Line: 32, Column: 15, Byte: 434},
End: SourcePos{Line: 32, Column: 20, Byte: 439},
Start: SourcePos{Line: 33, Column: 15, Byte: 516},
End: SourcePos{Line: 33, Column: 20, Byte: 521},
},
},
{