1package hclsyntax
2
3import (
4	"bytes"
5	"fmt"
6	"strconv"
7	"unicode/utf8"
8
9	"github.com/apparentlymart/go-textseg/textseg"
10	"github.com/hashicorp/hcl2/hcl"
11	"github.com/zclconf/go-cty/cty"
12)
13
14type parser struct {
15	*peeker
16
17	// set to true if any recovery is attempted. The parser can use this
18	// to attempt to reduce error noise by suppressing "bad token" errors
19	// in recovery mode, assuming that the recovery heuristics have failed
20	// in this case and left the peeker in a wrong place.
21	recovery bool
22}
23
24func (p *parser) ParseBody(end TokenType) (*Body, hcl.Diagnostics) {
25	attrs := Attributes{}
26	blocks := Blocks{}
27	var diags hcl.Diagnostics
28
29	startRange := p.PrevRange()
30	var endRange hcl.Range
31
32Token:
33	for {
34		next := p.Peek()
35		if next.Type == end {
36			endRange = p.NextRange()
37			p.Read()
38			break Token
39		}
40
41		switch next.Type {
42		case TokenNewline:
43			p.Read()
44			continue
45		case TokenIdent:
46			item, itemDiags := p.ParseBodyItem()
47			diags = append(diags, itemDiags...)
48			switch titem := item.(type) {
49			case *Block:
50				blocks = append(blocks, titem)
51			case *Attribute:
52				if existing, exists := attrs[titem.Name]; exists {
53					diags = append(diags, &hcl.Diagnostic{
54						Severity: hcl.DiagError,
55						Summary:  "Attribute redefined",
56						Detail: fmt.Sprintf(
57							"The argument %q was already set at %s. Each argument may be set only once.",
58							titem.Name, existing.NameRange.String(),
59						),
60						Subject: &titem.NameRange,
61					})
62				} else {
63					attrs[titem.Name] = titem
64				}
65			default:
66				// This should never happen for valid input, but may if a
67				// syntax error was detected in ParseBodyItem that prevented
68				// it from even producing a partially-broken item. In that
69				// case, it would've left at least one error in the diagnostics
70				// slice we already dealt with above.
71				//
72				// We'll assume ParseBodyItem attempted recovery to leave
73				// us in a reasonable position to try parsing the next item.
74				continue
75			}
76		default:
77			bad := p.Read()
78			if !p.recovery {
79				if bad.Type == TokenOQuote {
80					diags = append(diags, &hcl.Diagnostic{
81						Severity: hcl.DiagError,
82						Summary:  "Invalid argument name",
83						Detail:   "Argument names must not be quoted.",
84						Subject:  &bad.Range,
85					})
86				} else {
87					diags = append(diags, &hcl.Diagnostic{
88						Severity: hcl.DiagError,
89						Summary:  "Argument or block definition required",
90						Detail:   "An argument or block definition is required here.",
91						Subject:  &bad.Range,
92					})
93				}
94			}
95			endRange = p.PrevRange() // arbitrary, but somewhere inside the body means better diagnostics
96
97			p.recover(end) // attempt to recover to the token after the end of this body
98			break Token
99		}
100	}
101
102	return &Body{
103		Attributes: attrs,
104		Blocks:     blocks,
105
106		SrcRange: hcl.RangeBetween(startRange, endRange),
107		EndRange: hcl.Range{
108			Filename: endRange.Filename,
109			Start:    endRange.End,
110			End:      endRange.End,
111		},
112	}, diags
113}
114
115func (p *parser) ParseBodyItem() (Node, hcl.Diagnostics) {
116	ident := p.Read()
117	if ident.Type != TokenIdent {
118		p.recoverAfterBodyItem()
119		return nil, hcl.Diagnostics{
120			{
121				Severity: hcl.DiagError,
122				Summary:  "Argument or block definition required",
123				Detail:   "An argument or block definition is required here.",
124				Subject:  &ident.Range,
125			},
126		}
127	}
128
129	next := p.Peek()
130
131	switch next.Type {
132	case TokenEqual:
133		return p.finishParsingBodyAttribute(ident, false)
134	case TokenOQuote, TokenOBrace, TokenIdent:
135		return p.finishParsingBodyBlock(ident)
136	default:
137		p.recoverAfterBodyItem()
138		return nil, hcl.Diagnostics{
139			{
140				Severity: hcl.DiagError,
141				Summary:  "Argument or block definition required",
142				Detail:   "An argument or block definition is required here. To set an argument, use the equals sign \"=\" to introduce the argument value.",
143				Subject:  &ident.Range,
144			},
145		}
146	}
147
148	return nil, nil
149}
150
151// parseSingleAttrBody is a weird variant of ParseBody that deals with the
152// body of a nested block containing only one attribute value all on a single
153// line, like foo { bar = baz } . It expects to find a single attribute item
154// immediately followed by the end token type with no intervening newlines.
155func (p *parser) parseSingleAttrBody(end TokenType) (*Body, hcl.Diagnostics) {
156	ident := p.Read()
157	if ident.Type != TokenIdent {
158		p.recoverAfterBodyItem()
159		return nil, hcl.Diagnostics{
160			{
161				Severity: hcl.DiagError,
162				Summary:  "Argument or block definition required",
163				Detail:   "An argument or block definition is required here.",
164				Subject:  &ident.Range,
165			},
166		}
167	}
168
169	var attr *Attribute
170	var diags hcl.Diagnostics
171
172	next := p.Peek()
173
174	switch next.Type {
175	case TokenEqual:
176		node, attrDiags := p.finishParsingBodyAttribute(ident, true)
177		diags = append(diags, attrDiags...)
178		attr = node.(*Attribute)
179	case TokenOQuote, TokenOBrace, TokenIdent:
180		p.recoverAfterBodyItem()
181		return nil, hcl.Diagnostics{
182			{
183				Severity: hcl.DiagError,
184				Summary:  "Argument definition required",
185				Detail:   fmt.Sprintf("A single-line block definition can contain only a single argument. If you meant to define argument %q, use an equals sign to assign it a value. To define a nested block, place it on a line of its own within its parent block.", ident.Bytes),
186				Subject:  hcl.RangeBetween(ident.Range, next.Range).Ptr(),
187			},
188		}
189	default:
190		p.recoverAfterBodyItem()
191		return nil, hcl.Diagnostics{
192			{
193				Severity: hcl.DiagError,
194				Summary:  "Argument or block definition required",
195				Detail:   "An argument or block definition is required here. To set an argument, use the equals sign \"=\" to introduce the argument value.",
196				Subject:  &ident.Range,
197			},
198		}
199	}
200
201	return &Body{
202		Attributes: Attributes{
203			string(ident.Bytes): attr,
204		},
205
206		SrcRange: attr.SrcRange,
207		EndRange: hcl.Range{
208			Filename: attr.SrcRange.Filename,
209			Start:    attr.SrcRange.End,
210			End:      attr.SrcRange.End,
211		},
212	}, diags
213
214}
215
216func (p *parser) finishParsingBodyAttribute(ident Token, singleLine bool) (Node, hcl.Diagnostics) {
217	eqTok := p.Read() // eat equals token
218	if eqTok.Type != TokenEqual {
219		// should never happen if caller behaves
220		panic("finishParsingBodyAttribute called with next not equals")
221	}
222
223	var endRange hcl.Range
224
225	expr, diags := p.ParseExpression()
226	if p.recovery && diags.HasErrors() {
227		// recovery within expressions tends to be tricky, so we've probably
228		// landed somewhere weird. We'll try to reset to the start of a body
229		// item so parsing can continue.
230		endRange = p.PrevRange()
231		p.recoverAfterBodyItem()
232	} else {
233		endRange = p.PrevRange()
234		if !singleLine {
235			end := p.Peek()
236			if end.Type != TokenNewline && end.Type != TokenEOF {
237				if !p.recovery {
238					summary := "Missing newline after argument"
239					detail := "An argument definition must end with a newline."
240
241					if end.Type == TokenComma {
242						summary = "Unexpected comma after argument"
243						detail = "Argument definitions must be separated by newlines, not commas. " + detail
244					}
245
246					diags = append(diags, &hcl.Diagnostic{
247						Severity: hcl.DiagError,
248						Summary:  summary,
249						Detail:   detail,
250						Subject:  &end.Range,
251						Context:  hcl.RangeBetween(ident.Range, end.Range).Ptr(),
252					})
253				}
254				endRange = p.PrevRange()
255				p.recoverAfterBodyItem()
256			} else {
257				endRange = p.PrevRange()
258				p.Read() // eat newline
259			}
260		}
261	}
262
263	return &Attribute{
264		Name: string(ident.Bytes),
265		Expr: expr,
266
267		SrcRange:    hcl.RangeBetween(ident.Range, endRange),
268		NameRange:   ident.Range,
269		EqualsRange: eqTok.Range,
270	}, diags
271}
272
273func (p *parser) finishParsingBodyBlock(ident Token) (Node, hcl.Diagnostics) {
274	var blockType = string(ident.Bytes)
275	var diags hcl.Diagnostics
276	var labels []string
277	var labelRanges []hcl.Range
278
279	var oBrace Token
280
281Token:
282	for {
283		tok := p.Peek()
284
285		switch tok.Type {
286
287		case TokenOBrace:
288			oBrace = p.Read()
289			break Token
290
291		case TokenOQuote:
292			label, labelRange, labelDiags := p.parseQuotedStringLiteral()
293			diags = append(diags, labelDiags...)
294			labels = append(labels, label)
295			labelRanges = append(labelRanges, labelRange)
296			// parseQuoteStringLiteral recovers up to the closing quote
297			// if it encounters problems, so we can continue looking for
298			// more labels and eventually the block body even.
299
300		case TokenIdent:
301			tok = p.Read() // eat token
302			label, labelRange := string(tok.Bytes), tok.Range
303			labels = append(labels, label)
304			labelRanges = append(labelRanges, labelRange)
305
306		default:
307			switch tok.Type {
308			case TokenEqual:
309				diags = append(diags, &hcl.Diagnostic{
310					Severity: hcl.DiagError,
311					Summary:  "Invalid block definition",
312					Detail:   "The equals sign \"=\" indicates an argument definition, and must not be used when defining a block.",
313					Subject:  &tok.Range,
314					Context:  hcl.RangeBetween(ident.Range, tok.Range).Ptr(),
315				})
316			case TokenNewline:
317				diags = append(diags, &hcl.Diagnostic{
318					Severity: hcl.DiagError,
319					Summary:  "Invalid block definition",
320					Detail:   "A block definition must have block content delimited by \"{\" and \"}\", starting on the same line as the block header.",
321					Subject:  &tok.Range,
322					Context:  hcl.RangeBetween(ident.Range, tok.Range).Ptr(),
323				})
324			default:
325				if !p.recovery {
326					diags = append(diags, &hcl.Diagnostic{
327						Severity: hcl.DiagError,
328						Summary:  "Invalid block definition",
329						Detail:   "Either a quoted string block label or an opening brace (\"{\") is expected here.",
330						Subject:  &tok.Range,
331						Context:  hcl.RangeBetween(ident.Range, tok.Range).Ptr(),
332					})
333				}
334			}
335
336			p.recoverAfterBodyItem()
337
338			return &Block{
339				Type:   blockType,
340				Labels: labels,
341				Body: &Body{
342					SrcRange: ident.Range,
343					EndRange: ident.Range,
344				},
345
346				TypeRange:       ident.Range,
347				LabelRanges:     labelRanges,
348				OpenBraceRange:  ident.Range, // placeholder
349				CloseBraceRange: ident.Range, // placeholder
350			}, diags
351		}
352	}
353
354	// Once we fall out here, the peeker is pointed just after our opening
355	// brace, so we can begin our nested body parsing.
356	var body *Body
357	var bodyDiags hcl.Diagnostics
358	switch p.Peek().Type {
359	case TokenNewline, TokenEOF, TokenCBrace:
360		body, bodyDiags = p.ParseBody(TokenCBrace)
361	default:
362		// Special one-line, single-attribute block parsing mode.
363		body, bodyDiags = p.parseSingleAttrBody(TokenCBrace)
364		switch p.Peek().Type {
365		case TokenCBrace:
366			p.Read() // the happy path - just consume the closing brace
367		case TokenComma:
368			// User seems to be trying to use the object-constructor
369			// comma-separated style, which isn't permitted for blocks.
370			diags = append(diags, &hcl.Diagnostic{
371				Severity: hcl.DiagError,
372				Summary:  "Invalid single-argument block definition",
373				Detail:   "Single-line block syntax can include only one argument definition. To define multiple arguments, use the multi-line block syntax with one argument definition per line.",
374				Subject:  p.Peek().Range.Ptr(),
375			})
376			p.recover(TokenCBrace)
377		case TokenNewline:
378			// We don't allow weird mixtures of single and multi-line syntax.
379			diags = append(diags, &hcl.Diagnostic{
380				Severity: hcl.DiagError,
381				Summary:  "Invalid single-argument block definition",
382				Detail:   "An argument definition on the same line as its containing block creates a single-line block definition, which must also be closed on the same line. Place the block's closing brace immediately after the argument definition.",
383				Subject:  p.Peek().Range.Ptr(),
384			})
385			p.recover(TokenCBrace)
386		default:
387			// Some other weird thing is going on. Since we can't guess a likely
388			// user intent for this one, we'll skip it if we're already in
389			// recovery mode.
390			if !p.recovery {
391				diags = append(diags, &hcl.Diagnostic{
392					Severity: hcl.DiagError,
393					Summary:  "Invalid single-argument block definition",
394					Detail:   "A single-line block definition must end with a closing brace immediately after its single argument definition.",
395					Subject:  p.Peek().Range.Ptr(),
396				})
397			}
398			p.recover(TokenCBrace)
399		}
400	}
401	diags = append(diags, bodyDiags...)
402	cBraceRange := p.PrevRange()
403
404	eol := p.Peek()
405	if eol.Type == TokenNewline || eol.Type == TokenEOF {
406		p.Read() // eat newline
407	} else {
408		if !p.recovery {
409			diags = append(diags, &hcl.Diagnostic{
410				Severity: hcl.DiagError,
411				Summary:  "Missing newline after block definition",
412				Detail:   "A block definition must end with a newline.",
413				Subject:  &eol.Range,
414				Context:  hcl.RangeBetween(ident.Range, eol.Range).Ptr(),
415			})
416		}
417		p.recoverAfterBodyItem()
418	}
419
420	// We must never produce a nil body, since the caller may attempt to
421	// do analysis of a partial result when there's an error, so we'll
422	// insert a placeholder if we otherwise failed to produce a valid
423	// body due to one of the syntax error paths above.
424	if body == nil && diags.HasErrors() {
425		body = &Body{
426			SrcRange: hcl.RangeBetween(oBrace.Range, cBraceRange),
427			EndRange: cBraceRange,
428		}
429	}
430
431	return &Block{
432		Type:   blockType,
433		Labels: labels,
434		Body:   body,
435
436		TypeRange:       ident.Range,
437		LabelRanges:     labelRanges,
438		OpenBraceRange:  oBrace.Range,
439		CloseBraceRange: cBraceRange,
440	}, diags
441}
442
443func (p *parser) ParseExpression() (Expression, hcl.Diagnostics) {
444	return p.parseTernaryConditional()
445}
446
447func (p *parser) parseTernaryConditional() (Expression, hcl.Diagnostics) {
448	// The ternary conditional operator (.. ? .. : ..) behaves somewhat
449	// like a binary operator except that the "symbol" is itself
450	// an expression enclosed in two punctuation characters.
451	// The middle expression is parsed as if the ? and : symbols
452	// were parentheses. The "rhs" (the "false expression") is then
453	// treated right-associatively so it behaves similarly to the
454	// middle in terms of precedence.
455
456	startRange := p.NextRange()
457	var condExpr, trueExpr, falseExpr Expression
458	var diags hcl.Diagnostics
459
460	condExpr, condDiags := p.parseBinaryOps(binaryOps)
461	diags = append(diags, condDiags...)
462	if p.recovery && condDiags.HasErrors() {
463		return condExpr, diags
464	}
465
466	questionMark := p.Peek()
467	if questionMark.Type != TokenQuestion {
468		return condExpr, diags
469	}
470
471	p.Read() // eat question mark
472
473	trueExpr, trueDiags := p.ParseExpression()
474	diags = append(diags, trueDiags...)
475	if p.recovery && trueDiags.HasErrors() {
476		return condExpr, diags
477	}
478
479	colon := p.Peek()
480	if colon.Type != TokenColon {
481		diags = append(diags, &hcl.Diagnostic{
482			Severity: hcl.DiagError,
483			Summary:  "Missing false expression in conditional",
484			Detail:   "The conditional operator (...?...:...) requires a false expression, delimited by a colon.",
485			Subject:  &colon.Range,
486			Context:  hcl.RangeBetween(startRange, colon.Range).Ptr(),
487		})
488		return condExpr, diags
489	}
490
491	p.Read() // eat colon
492
493	falseExpr, falseDiags := p.ParseExpression()
494	diags = append(diags, falseDiags...)
495	if p.recovery && falseDiags.HasErrors() {
496		return condExpr, diags
497	}
498
499	return &ConditionalExpr{
500		Condition:   condExpr,
501		TrueResult:  trueExpr,
502		FalseResult: falseExpr,
503
504		SrcRange: hcl.RangeBetween(startRange, falseExpr.Range()),
505	}, diags
506}
507
508// parseBinaryOps calls itself recursively to work through all of the
509// operator precedence groups, and then eventually calls parseExpressionTerm
510// for each operand.
511func (p *parser) parseBinaryOps(ops []map[TokenType]*Operation) (Expression, hcl.Diagnostics) {
512	if len(ops) == 0 {
513		// We've run out of operators, so now we'll just try to parse a term.
514		return p.parseExpressionWithTraversals()
515	}
516
517	thisLevel := ops[0]
518	remaining := ops[1:]
519
520	var lhs, rhs Expression
521	var operation *Operation
522	var diags hcl.Diagnostics
523
524	// Parse a term that might be the first operand of a binary
525	// operation or it might just be a standalone term.
526	// We won't know until we've parsed it and can look ahead
527	// to see if there's an operator token for this level.
528	lhs, lhsDiags := p.parseBinaryOps(remaining)
529	diags = append(diags, lhsDiags...)
530	if p.recovery && lhsDiags.HasErrors() {
531		return lhs, diags
532	}
533
534	// We'll keep eating up operators until we run out, so that operators
535	// with the same precedence will combine in a left-associative manner:
536	// a+b+c => (a+b)+c, not a+(b+c)
537	//
538	// Should we later want to have right-associative operators, a way
539	// to achieve that would be to call back up to ParseExpression here
540	// instead of iteratively parsing only the remaining operators.
541	for {
542		next := p.Peek()
543		var newOp *Operation
544		var ok bool
545		if newOp, ok = thisLevel[next.Type]; !ok {
546			break
547		}
548
549		// Are we extending an expression started on the previous iteration?
550		if operation != nil {
551			lhs = &BinaryOpExpr{
552				LHS: lhs,
553				Op:  operation,
554				RHS: rhs,
555
556				SrcRange: hcl.RangeBetween(lhs.Range(), rhs.Range()),
557			}
558		}
559
560		operation = newOp
561		p.Read() // eat operator token
562		var rhsDiags hcl.Diagnostics
563		rhs, rhsDiags = p.parseBinaryOps(remaining)
564		diags = append(diags, rhsDiags...)
565		if p.recovery && rhsDiags.HasErrors() {
566			return lhs, diags
567		}
568	}
569
570	if operation == nil {
571		return lhs, diags
572	}
573
574	return &BinaryOpExpr{
575		LHS: lhs,
576		Op:  operation,
577		RHS: rhs,
578
579		SrcRange: hcl.RangeBetween(lhs.Range(), rhs.Range()),
580	}, diags
581}
582
583func (p *parser) parseExpressionWithTraversals() (Expression, hcl.Diagnostics) {
584	term, diags := p.parseExpressionTerm()
585	ret, moreDiags := p.parseExpressionTraversals(term)
586	diags = append(diags, moreDiags...)
587	return ret, diags
588}
589
590func (p *parser) parseExpressionTraversals(from Expression) (Expression, hcl.Diagnostics) {
591	var diags hcl.Diagnostics
592	ret := from
593
594Traversal:
595	for {
596		next := p.Peek()
597
598		switch next.Type {
599		case TokenDot:
600			// Attribute access or splat
601			dot := p.Read()
602			attrTok := p.Peek()
603
604			switch attrTok.Type {
605			case TokenIdent:
606				attrTok = p.Read() // eat token
607				name := string(attrTok.Bytes)
608				rng := hcl.RangeBetween(dot.Range, attrTok.Range)
609				step := hcl.TraverseAttr{
610					Name:     name,
611					SrcRange: rng,
612				}
613
614				ret = makeRelativeTraversal(ret, step, rng)
615
616			case TokenNumberLit:
617				// This is a weird form we inherited from HIL, allowing numbers
618				// to be used as attributes as a weird way of writing [n].
619				// This was never actually a first-class thing in HIL, but
620				// HIL tolerated sequences like .0. in its variable names and
621				// calling applications like Terraform exploited that to
622				// introduce indexing syntax where none existed.
623				numTok := p.Read() // eat token
624				attrTok = numTok
625
626				// This syntax is ambiguous if multiple indices are used in
627				// succession, like foo.0.1.baz: that actually parses as
628				// a fractional number 0.1. Since we're only supporting this
629				// syntax for compatibility with legacy Terraform
630				// configurations, and Terraform does not tend to have lists
631				// of lists, we'll choose to reject that here with a helpful
632				// error message, rather than failing later because the index
633				// isn't a whole number.
634				if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 {
635					first := numTok.Bytes[:dotIdx]
636					second := numTok.Bytes[dotIdx+1:]
637					diags = append(diags, &hcl.Diagnostic{
638						Severity: hcl.DiagError,
639						Summary:  "Invalid legacy index syntax",
640						Detail:   fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax instead, like [%s][%s].", first, second),
641						Subject:  &attrTok.Range,
642					})
643					rng := hcl.RangeBetween(dot.Range, numTok.Range)
644					step := hcl.TraverseIndex{
645						Key:      cty.DynamicVal,
646						SrcRange: rng,
647					}
648					ret = makeRelativeTraversal(ret, step, rng)
649					break
650				}
651
652				numVal, numDiags := p.numberLitValue(numTok)
653				diags = append(diags, numDiags...)
654
655				rng := hcl.RangeBetween(dot.Range, numTok.Range)
656				step := hcl.TraverseIndex{
657					Key:      numVal,
658					SrcRange: rng,
659				}
660
661				ret = makeRelativeTraversal(ret, step, rng)
662
663			case TokenStar:
664				// "Attribute-only" splat expression.
665				// (This is a kinda weird construct inherited from HIL, which
666				// behaves a bit like a [*] splat except that it is only able
667				// to do attribute traversals into each of its elements,
668				// whereas foo[*] can support _any_ traversal.
669				marker := p.Read() // eat star
670				trav := make(hcl.Traversal, 0, 1)
671				var firstRange, lastRange hcl.Range
672				firstRange = p.NextRange()
673				for p.Peek().Type == TokenDot {
674					dot := p.Read()
675
676					if p.Peek().Type == TokenNumberLit {
677						// Continuing the "weird stuff inherited from HIL"
678						// theme, we also allow numbers as attribute names
679						// inside splats and interpret them as indexing
680						// into a list, for expressions like:
681						// foo.bar.*.baz.0.foo
682						numTok := p.Read()
683
684						// Weird special case if the user writes something
685						// like foo.bar.*.baz.0.0.foo, where 0.0 parses
686						// as a number.
687						if dotIdx := bytes.IndexByte(numTok.Bytes, '.'); dotIdx >= 0 {
688							first := numTok.Bytes[:dotIdx]
689							second := numTok.Bytes[dotIdx+1:]
690							diags = append(diags, &hcl.Diagnostic{
691								Severity: hcl.DiagError,
692								Summary:  "Invalid legacy index syntax",
693								Detail:   fmt.Sprintf("When using the legacy index syntax, chaining two indexes together is not permitted. Use the proper index syntax with a full splat expression [*] instead, like [%s][%s].", first, second),
694								Subject:  &attrTok.Range,
695							})
696							trav = append(trav, hcl.TraverseIndex{
697								Key:      cty.DynamicVal,
698								SrcRange: hcl.RangeBetween(dot.Range, numTok.Range),
699							})
700							lastRange = numTok.Range
701							continue
702						}
703
704						numVal, numDiags := p.numberLitValue(numTok)
705						diags = append(diags, numDiags...)
706						trav = append(trav, hcl.TraverseIndex{
707							Key:      numVal,
708							SrcRange: hcl.RangeBetween(dot.Range, numTok.Range),
709						})
710						lastRange = numTok.Range
711						continue
712					}
713
714					if p.Peek().Type != TokenIdent {
715						if !p.recovery {
716							if p.Peek().Type == TokenStar {
717								diags = append(diags, &hcl.Diagnostic{
718									Severity: hcl.DiagError,
719									Summary:  "Nested splat expression not allowed",
720									Detail:   "A splat expression (*) cannot be used inside another attribute-only splat expression.",
721									Subject:  p.Peek().Range.Ptr(),
722								})
723							} else {
724								diags = append(diags, &hcl.Diagnostic{
725									Severity: hcl.DiagError,
726									Summary:  "Invalid attribute name",
727									Detail:   "An attribute name is required after a dot.",
728									Subject:  &attrTok.Range,
729								})
730							}
731						}
732						p.setRecovery()
733						continue Traversal
734					}
735
736					attrTok := p.Read()
737					trav = append(trav, hcl.TraverseAttr{
738						Name:     string(attrTok.Bytes),
739						SrcRange: hcl.RangeBetween(dot.Range, attrTok.Range),
740					})
741					lastRange = attrTok.Range
742				}
743
744				itemExpr := &AnonSymbolExpr{
745					SrcRange: hcl.RangeBetween(dot.Range, marker.Range),
746				}
747				var travExpr Expression
748				if len(trav) == 0 {
749					travExpr = itemExpr
750				} else {
751					travExpr = &RelativeTraversalExpr{
752						Source:    itemExpr,
753						Traversal: trav,
754						SrcRange:  hcl.RangeBetween(firstRange, lastRange),
755					}
756				}
757
758				ret = &SplatExpr{
759					Source: ret,
760					Each:   travExpr,
761					Item:   itemExpr,
762
763					SrcRange:    hcl.RangeBetween(dot.Range, lastRange),
764					MarkerRange: hcl.RangeBetween(dot.Range, marker.Range),
765				}
766
767			default:
768				diags = append(diags, &hcl.Diagnostic{
769					Severity: hcl.DiagError,
770					Summary:  "Invalid attribute name",
771					Detail:   "An attribute name is required after a dot.",
772					Subject:  &attrTok.Range,
773				})
774				// This leaves the peeker in a bad place, so following items
775				// will probably be misparsed until we hit something that
776				// allows us to re-sync.
777				//
778				// We will probably need to do something better here eventually
779				// in order to support autocomplete triggered by typing a
780				// period.
781				p.setRecovery()
782			}
783
784		case TokenOBrack:
785			// Indexing of a collection.
786			// This may or may not be a hcl.Traverser, depending on whether
787			// the key value is something constant.
788
789			open := p.Read()
790			switch p.Peek().Type {
791			case TokenStar:
792				// This is a full splat expression, like foo[*], which consumes
793				// the rest of the traversal steps after it using a recursive
794				// call to this function.
795				p.Read() // consume star
796				close := p.Read()
797				if close.Type != TokenCBrack && !p.recovery {
798					diags = append(diags, &hcl.Diagnostic{
799						Severity: hcl.DiagError,
800						Summary:  "Missing close bracket on splat index",
801						Detail:   "The star for a full splat operator must be immediately followed by a closing bracket (\"]\").",
802						Subject:  &close.Range,
803					})
804					close = p.recover(TokenCBrack)
805				}
806				// Splat expressions use a special "anonymous symbol"  as a
807				// placeholder in an expression to be evaluated once for each
808				// item in the source expression.
809				itemExpr := &AnonSymbolExpr{
810					SrcRange: hcl.RangeBetween(open.Range, close.Range),
811				}
812				// Now we'll recursively call this same function to eat any
813				// remaining traversal steps against the anonymous symbol.
814				travExpr, nestedDiags := p.parseExpressionTraversals(itemExpr)
815				diags = append(diags, nestedDiags...)
816
817				ret = &SplatExpr{
818					Source: ret,
819					Each:   travExpr,
820					Item:   itemExpr,
821
822					SrcRange:    hcl.RangeBetween(open.Range, travExpr.Range()),
823					MarkerRange: hcl.RangeBetween(open.Range, close.Range),
824				}
825
826			default:
827
828				var close Token
829				p.PushIncludeNewlines(false) // arbitrary newlines allowed in brackets
830				keyExpr, keyDiags := p.ParseExpression()
831				diags = append(diags, keyDiags...)
832				if p.recovery && keyDiags.HasErrors() {
833					close = p.recover(TokenCBrack)
834				} else {
835					close = p.Read()
836					if close.Type != TokenCBrack && !p.recovery {
837						diags = append(diags, &hcl.Diagnostic{
838							Severity: hcl.DiagError,
839							Summary:  "Missing close bracket on index",
840							Detail:   "The index operator must end with a closing bracket (\"]\").",
841							Subject:  &close.Range,
842						})
843						close = p.recover(TokenCBrack)
844					}
845				}
846				p.PopIncludeNewlines()
847
848				if lit, isLit := keyExpr.(*LiteralValueExpr); isLit {
849					litKey, _ := lit.Value(nil)
850					rng := hcl.RangeBetween(open.Range, close.Range)
851					step := hcl.TraverseIndex{
852						Key:      litKey,
853						SrcRange: rng,
854					}
855					ret = makeRelativeTraversal(ret, step, rng)
856				} else if tmpl, isTmpl := keyExpr.(*TemplateExpr); isTmpl && tmpl.IsStringLiteral() {
857					litKey, _ := tmpl.Value(nil)
858					rng := hcl.RangeBetween(open.Range, close.Range)
859					step := hcl.TraverseIndex{
860						Key:      litKey,
861						SrcRange: rng,
862					}
863					ret = makeRelativeTraversal(ret, step, rng)
864				} else {
865					rng := hcl.RangeBetween(open.Range, close.Range)
866					ret = &IndexExpr{
867						Collection: ret,
868						Key:        keyExpr,
869
870						SrcRange:  rng,
871						OpenRange: open.Range,
872					}
873				}
874			}
875
876		default:
877			break Traversal
878		}
879	}
880
881	return ret, diags
882}
883
884// makeRelativeTraversal takes an expression and a traverser and returns
885// a traversal expression that combines the two. If the given expression
886// is already a traversal, it is extended in place (mutating it) and
887// returned. If it isn't, a new RelativeTraversalExpr is created and returned.
888func makeRelativeTraversal(expr Expression, next hcl.Traverser, rng hcl.Range) Expression {
889	switch texpr := expr.(type) {
890	case *ScopeTraversalExpr:
891		texpr.Traversal = append(texpr.Traversal, next)
892		texpr.SrcRange = hcl.RangeBetween(texpr.SrcRange, rng)
893		return texpr
894	case *RelativeTraversalExpr:
895		texpr.Traversal = append(texpr.Traversal, next)
896		texpr.SrcRange = hcl.RangeBetween(texpr.SrcRange, rng)
897		return texpr
898	default:
899		return &RelativeTraversalExpr{
900			Source:    expr,
901			Traversal: hcl.Traversal{next},
902			SrcRange:  rng,
903		}
904	}
905}
906
907func (p *parser) parseExpressionTerm() (Expression, hcl.Diagnostics) {
908	start := p.Peek()
909
910	switch start.Type {
911	case TokenOParen:
912		p.Read() // eat open paren
913
914		p.PushIncludeNewlines(false)
915
916		expr, diags := p.ParseExpression()
917		if diags.HasErrors() {
918			// attempt to place the peeker after our closing paren
919			// before we return, so that the next parser has some
920			// chance of finding a valid expression.
921			p.recover(TokenCParen)
922			p.PopIncludeNewlines()
923			return expr, diags
924		}
925
926		close := p.Peek()
927		if close.Type != TokenCParen {
928			diags = append(diags, &hcl.Diagnostic{
929				Severity: hcl.DiagError,
930				Summary:  "Unbalanced parentheses",
931				Detail:   "Expected a closing parenthesis to terminate the expression.",
932				Subject:  &close.Range,
933				Context:  hcl.RangeBetween(start.Range, close.Range).Ptr(),
934			})
935			p.setRecovery()
936		}
937
938		p.Read() // eat closing paren
939		p.PopIncludeNewlines()
940
941		return expr, diags
942
943	case TokenNumberLit:
944		tok := p.Read() // eat number token
945
946		numVal, diags := p.numberLitValue(tok)
947		return &LiteralValueExpr{
948			Val:      numVal,
949			SrcRange: tok.Range,
950		}, diags
951
952	case TokenIdent:
953		tok := p.Read() // eat identifier token
954
955		if p.Peek().Type == TokenOParen {
956			return p.finishParsingFunctionCall(tok)
957		}
958
959		name := string(tok.Bytes)
960		switch name {
961		case "true":
962			return &LiteralValueExpr{
963				Val:      cty.True,
964				SrcRange: tok.Range,
965			}, nil
966		case "false":
967			return &LiteralValueExpr{
968				Val:      cty.False,
969				SrcRange: tok.Range,
970			}, nil
971		case "null":
972			return &LiteralValueExpr{
973				Val:      cty.NullVal(cty.DynamicPseudoType),
974				SrcRange: tok.Range,
975			}, nil
976		default:
977			return &ScopeTraversalExpr{
978				Traversal: hcl.Traversal{
979					hcl.TraverseRoot{
980						Name:     name,
981						SrcRange: tok.Range,
982					},
983				},
984				SrcRange: tok.Range,
985			}, nil
986		}
987
988	case TokenOQuote, TokenOHeredoc:
989		open := p.Read() // eat opening marker
990		closer := p.oppositeBracket(open.Type)
991		exprs, passthru, _, diags := p.parseTemplateInner(closer, tokenOpensFlushHeredoc(open))
992
993		closeRange := p.PrevRange()
994
995		if passthru {
996			if len(exprs) != 1 {
997				panic("passthru set with len(exprs) != 1")
998			}
999			return &TemplateWrapExpr{
1000				Wrapped:  exprs[0],
1001				SrcRange: hcl.RangeBetween(open.Range, closeRange),
1002			}, diags
1003		}
1004
1005		return &TemplateExpr{
1006			Parts:    exprs,
1007			SrcRange: hcl.RangeBetween(open.Range, closeRange),
1008		}, diags
1009
1010	case TokenMinus:
1011		tok := p.Read() // eat minus token
1012
1013		// Important to use parseExpressionWithTraversals rather than parseExpression
1014		// here, otherwise we can capture a following binary expression into
1015		// our negation.
1016		// e.g. -46+5 should parse as (-46)+5, not -(46+5)
1017		operand, diags := p.parseExpressionWithTraversals()
1018		return &UnaryOpExpr{
1019			Op:  OpNegate,
1020			Val: operand,
1021
1022			SrcRange:    hcl.RangeBetween(tok.Range, operand.Range()),
1023			SymbolRange: tok.Range,
1024		}, diags
1025
1026	case TokenBang:
1027		tok := p.Read() // eat bang token
1028
1029		// Important to use parseExpressionWithTraversals rather than parseExpression
1030		// here, otherwise we can capture a following binary expression into
1031		// our negation.
1032		operand, diags := p.parseExpressionWithTraversals()
1033		return &UnaryOpExpr{
1034			Op:  OpLogicalNot,
1035			Val: operand,
1036
1037			SrcRange:    hcl.RangeBetween(tok.Range, operand.Range()),
1038			SymbolRange: tok.Range,
1039		}, diags
1040
1041	case TokenOBrack:
1042		return p.parseTupleCons()
1043
1044	case TokenOBrace:
1045		return p.parseObjectCons()
1046
1047	default:
1048		var diags hcl.Diagnostics
1049		if !p.recovery {
1050			diags = append(diags, &hcl.Diagnostic{
1051				Severity: hcl.DiagError,
1052				Summary:  "Invalid expression",
1053				Detail:   "Expected the start of an expression, but found an invalid expression token.",
1054				Subject:  &start.Range,
1055			})
1056		}
1057		p.setRecovery()
1058
1059		// Return a placeholder so that the AST is still structurally sound
1060		// even in the presence of parse errors.
1061		return &LiteralValueExpr{
1062			Val:      cty.DynamicVal,
1063			SrcRange: start.Range,
1064		}, diags
1065	}
1066}
1067
1068func (p *parser) numberLitValue(tok Token) (cty.Value, hcl.Diagnostics) {
1069	// The cty.ParseNumberVal is always the same behavior as converting a
1070	// string to a number, ensuring we always interpret decimal numbers in
1071	// the same way.
1072	numVal, err := cty.ParseNumberVal(string(tok.Bytes))
1073	if err != nil {
1074		ret := cty.UnknownVal(cty.Number)
1075		return ret, hcl.Diagnostics{
1076			{
1077				Severity: hcl.DiagError,
1078				Summary:  "Invalid number literal",
1079				// FIXME: not a very good error message, but convert only
1080				// gives us "a number is required", so not much help either.
1081				Detail:  "Failed to recognize the value of this number literal.",
1082				Subject: &tok.Range,
1083			},
1084		}
1085	}
1086	return numVal, nil
1087}
1088
1089// finishParsingFunctionCall parses a function call assuming that the function
1090// name was already read, and so the peeker should be pointing at the opening
1091// parenthesis after the name.
1092func (p *parser) finishParsingFunctionCall(name Token) (Expression, hcl.Diagnostics) {
1093	openTok := p.Read()
1094	if openTok.Type != TokenOParen {
1095		// should never happen if callers behave
1096		panic("finishParsingFunctionCall called with non-parenthesis as next token")
1097	}
1098
1099	var args []Expression
1100	var diags hcl.Diagnostics
1101	var expandFinal bool
1102	var closeTok Token
1103
1104	// Arbitrary newlines are allowed inside the function call parentheses.
1105	p.PushIncludeNewlines(false)
1106
1107Token:
1108	for {
1109		tok := p.Peek()
1110
1111		if tok.Type == TokenCParen {
1112			closeTok = p.Read() // eat closing paren
1113			break Token
1114		}
1115
1116		arg, argDiags := p.ParseExpression()
1117		args = append(args, arg)
1118		diags = append(diags, argDiags...)
1119		if p.recovery && argDiags.HasErrors() {
1120			// if there was a parse error in the argument then we've
1121			// probably been left in a weird place in the token stream,
1122			// so we'll bail out with a partial argument list.
1123			p.recover(TokenCParen)
1124			break Token
1125		}
1126
1127		sep := p.Read()
1128		if sep.Type == TokenCParen {
1129			closeTok = sep
1130			break Token
1131		}
1132
1133		if sep.Type == TokenEllipsis {
1134			expandFinal = true
1135
1136			if p.Peek().Type != TokenCParen {
1137				if !p.recovery {
1138					diags = append(diags, &hcl.Diagnostic{
1139						Severity: hcl.DiagError,
1140						Summary:  "Missing closing parenthesis",
1141						Detail:   "An expanded function argument (with ...) must be immediately followed by closing parentheses.",
1142						Subject:  &sep.Range,
1143						Context:  hcl.RangeBetween(name.Range, sep.Range).Ptr(),
1144					})
1145				}
1146				closeTok = p.recover(TokenCParen)
1147			} else {
1148				closeTok = p.Read() // eat closing paren
1149			}
1150			break Token
1151		}
1152
1153		if sep.Type != TokenComma {
1154			diags = append(diags, &hcl.Diagnostic{
1155				Severity: hcl.DiagError,
1156				Summary:  "Missing argument separator",
1157				Detail:   "A comma is required to separate each function argument from the next.",
1158				Subject:  &sep.Range,
1159				Context:  hcl.RangeBetween(name.Range, sep.Range).Ptr(),
1160			})
1161			closeTok = p.recover(TokenCParen)
1162			break Token
1163		}
1164
1165		if p.Peek().Type == TokenCParen {
1166			// A trailing comma after the last argument gets us in here.
1167			closeTok = p.Read() // eat closing paren
1168			break Token
1169		}
1170
1171	}
1172
1173	p.PopIncludeNewlines()
1174
1175	return &FunctionCallExpr{
1176		Name: string(name.Bytes),
1177		Args: args,
1178
1179		ExpandFinal: expandFinal,
1180
1181		NameRange:       name.Range,
1182		OpenParenRange:  openTok.Range,
1183		CloseParenRange: closeTok.Range,
1184	}, diags
1185}
1186
1187func (p *parser) parseTupleCons() (Expression, hcl.Diagnostics) {
1188	open := p.Read()
1189	if open.Type != TokenOBrack {
1190		// Should never happen if callers are behaving
1191		panic("parseTupleCons called without peeker pointing to open bracket")
1192	}
1193
1194	p.PushIncludeNewlines(false)
1195	defer p.PopIncludeNewlines()
1196
1197	if forKeyword.TokenMatches(p.Peek()) {
1198		return p.finishParsingForExpr(open)
1199	}
1200
1201	var close Token
1202
1203	var diags hcl.Diagnostics
1204	var exprs []Expression
1205
1206	for {
1207		next := p.Peek()
1208		if next.Type == TokenCBrack {
1209			close = p.Read() // eat closer
1210			break
1211		}
1212
1213		expr, exprDiags := p.ParseExpression()
1214		exprs = append(exprs, expr)
1215		diags = append(diags, exprDiags...)
1216
1217		if p.recovery && exprDiags.HasErrors() {
1218			// If expression parsing failed then we are probably in a strange
1219			// place in the token stream, so we'll bail out and try to reset
1220			// to after our closing bracket to allow parsing to continue.
1221			close = p.recover(TokenCBrack)
1222			break
1223		}
1224
1225		next = p.Peek()
1226		if next.Type == TokenCBrack {
1227			close = p.Read() // eat closer
1228			break
1229		}
1230
1231		if next.Type != TokenComma {
1232			if !p.recovery {
1233				diags = append(diags, &hcl.Diagnostic{
1234					Severity: hcl.DiagError,
1235					Summary:  "Missing item separator",
1236					Detail:   "Expected a comma to mark the beginning of the next item.",
1237					Subject:  &next.Range,
1238					Context:  hcl.RangeBetween(open.Range, next.Range).Ptr(),
1239				})
1240			}
1241			close = p.recover(TokenCBrack)
1242			break
1243		}
1244
1245		p.Read() // eat comma
1246
1247	}
1248
1249	return &TupleConsExpr{
1250		Exprs: exprs,
1251
1252		SrcRange:  hcl.RangeBetween(open.Range, close.Range),
1253		OpenRange: open.Range,
1254	}, diags
1255}
1256
1257func (p *parser) parseObjectCons() (Expression, hcl.Diagnostics) {
1258	open := p.Read()
1259	if open.Type != TokenOBrace {
1260		// Should never happen if callers are behaving
1261		panic("parseObjectCons called without peeker pointing to open brace")
1262	}
1263
1264	// We must temporarily stop looking at newlines here while we check for
1265	// a "for" keyword, since for expressions are _not_ newline-sensitive,
1266	// even though object constructors are.
1267	p.PushIncludeNewlines(false)
1268	isFor := forKeyword.TokenMatches(p.Peek())
1269	p.PopIncludeNewlines()
1270	if isFor {
1271		return p.finishParsingForExpr(open)
1272	}
1273
1274	p.PushIncludeNewlines(true)
1275	defer p.PopIncludeNewlines()
1276
1277	var close Token
1278
1279	var diags hcl.Diagnostics
1280	var items []ObjectConsItem
1281
1282	for {
1283		next := p.Peek()
1284		if next.Type == TokenNewline {
1285			p.Read() // eat newline
1286			continue
1287		}
1288
1289		if next.Type == TokenCBrace {
1290			close = p.Read() // eat closer
1291			break
1292		}
1293
1294		var key Expression
1295		var keyDiags hcl.Diagnostics
1296		key, keyDiags = p.ParseExpression()
1297		diags = append(diags, keyDiags...)
1298
1299		if p.recovery && keyDiags.HasErrors() {
1300			// If expression parsing failed then we are probably in a strange
1301			// place in the token stream, so we'll bail out and try to reset
1302			// to after our closing brace to allow parsing to continue.
1303			close = p.recover(TokenCBrace)
1304			break
1305		}
1306
1307		// We wrap up the key expression in a special wrapper that deals
1308		// with our special case that naked identifiers as object keys
1309		// are interpreted as literal strings.
1310		key = &ObjectConsKeyExpr{Wrapped: key}
1311
1312		next = p.Peek()
1313		if next.Type != TokenEqual && next.Type != TokenColon {
1314			if !p.recovery {
1315				switch next.Type {
1316				case TokenNewline, TokenComma:
1317					diags = append(diags, &hcl.Diagnostic{
1318						Severity: hcl.DiagError,
1319						Summary:  "Missing attribute value",
1320						Detail:   "Expected an attribute value, introduced by an equals sign (\"=\").",
1321						Subject:  &next.Range,
1322						Context:  hcl.RangeBetween(open.Range, next.Range).Ptr(),
1323					})
1324				case TokenIdent:
1325					// Although this might just be a plain old missing equals
1326					// sign before a reference, one way to get here is to try
1327					// to write an attribute name containing a period followed
1328					// by a digit, which was valid in HCL1, like this:
1329					//     foo1.2_bar = "baz"
1330					// We can't know exactly what the user intended here, but
1331					// we'll augment our message with an extra hint in this case
1332					// in case it is helpful.
1333					diags = append(diags, &hcl.Diagnostic{
1334						Severity: hcl.DiagError,
1335						Summary:  "Missing key/value separator",
1336						Detail:   "Expected an equals sign (\"=\") to mark the beginning of the attribute value. If you intended to given an attribute name containing periods or spaces, write the name in quotes to create a string literal.",
1337						Subject:  &next.Range,
1338						Context:  hcl.RangeBetween(open.Range, next.Range).Ptr(),
1339					})
1340				default:
1341					diags = append(diags, &hcl.Diagnostic{
1342						Severity: hcl.DiagError,
1343						Summary:  "Missing key/value separator",
1344						Detail:   "Expected an equals sign (\"=\") to mark the beginning of the attribute value.",
1345						Subject:  &next.Range,
1346						Context:  hcl.RangeBetween(open.Range, next.Range).Ptr(),
1347					})
1348				}
1349			}
1350			close = p.recover(TokenCBrace)
1351			break
1352		}
1353
1354		p.Read() // eat equals sign or colon
1355
1356		value, valueDiags := p.ParseExpression()
1357		diags = append(diags, valueDiags...)
1358
1359		if p.recovery && valueDiags.HasErrors() {
1360			// If expression parsing failed then we are probably in a strange
1361			// place in the token stream, so we'll bail out and try to reset
1362			// to after our closing brace to allow parsing to continue.
1363			close = p.recover(TokenCBrace)
1364			break
1365		}
1366
1367		items = append(items, ObjectConsItem{
1368			KeyExpr:   key,
1369			ValueExpr: value,
1370		})
1371
1372		next = p.Peek()
1373		if next.Type == TokenCBrace {
1374			close = p.Read() // eat closer
1375			break
1376		}
1377
1378		if next.Type != TokenComma && next.Type != TokenNewline {
1379			if !p.recovery {
1380				diags = append(diags, &hcl.Diagnostic{
1381					Severity: hcl.DiagError,
1382					Summary:  "Missing attribute separator",
1383					Detail:   "Expected a newline or comma to mark the beginning of the next attribute.",
1384					Subject:  &next.Range,
1385					Context:  hcl.RangeBetween(open.Range, next.Range).Ptr(),
1386				})
1387			}
1388			close = p.recover(TokenCBrace)
1389			break
1390		}
1391
1392		p.Read() // eat comma or newline
1393
1394	}
1395
1396	return &ObjectConsExpr{
1397		Items: items,
1398
1399		SrcRange:  hcl.RangeBetween(open.Range, close.Range),
1400		OpenRange: open.Range,
1401	}, diags
1402}
1403
1404func (p *parser) finishParsingForExpr(open Token) (Expression, hcl.Diagnostics) {
1405	p.PushIncludeNewlines(false)
1406	defer p.PopIncludeNewlines()
1407	introducer := p.Read()
1408	if !forKeyword.TokenMatches(introducer) {
1409		// Should never happen if callers are behaving
1410		panic("finishParsingForExpr called without peeker pointing to 'for' identifier")
1411	}
1412
1413	var makeObj bool
1414	var closeType TokenType
1415	switch open.Type {
1416	case TokenOBrace:
1417		makeObj = true
1418		closeType = TokenCBrace
1419	case TokenOBrack:
1420		makeObj = false // making a tuple
1421		closeType = TokenCBrack
1422	default:
1423		// Should never happen if callers are behaving
1424		panic("finishParsingForExpr called with invalid open token")
1425	}
1426
1427	var diags hcl.Diagnostics
1428	var keyName, valName string
1429
1430	if p.Peek().Type != TokenIdent {
1431		if !p.recovery {
1432			diags = append(diags, &hcl.Diagnostic{
1433				Severity: hcl.DiagError,
1434				Summary:  "Invalid 'for' expression",
1435				Detail:   "For expression requires variable name after 'for'.",
1436				Subject:  p.Peek().Range.Ptr(),
1437				Context:  hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1438			})
1439		}
1440		close := p.recover(closeType)
1441		return &LiteralValueExpr{
1442			Val:      cty.DynamicVal,
1443			SrcRange: hcl.RangeBetween(open.Range, close.Range),
1444		}, diags
1445	}
1446
1447	valName = string(p.Read().Bytes)
1448
1449	if p.Peek().Type == TokenComma {
1450		// What we just read was actually the key, then.
1451		keyName = valName
1452		p.Read() // eat comma
1453
1454		if p.Peek().Type != TokenIdent {
1455			if !p.recovery {
1456				diags = append(diags, &hcl.Diagnostic{
1457					Severity: hcl.DiagError,
1458					Summary:  "Invalid 'for' expression",
1459					Detail:   "For expression requires value variable name after comma.",
1460					Subject:  p.Peek().Range.Ptr(),
1461					Context:  hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1462				})
1463			}
1464			close := p.recover(closeType)
1465			return &LiteralValueExpr{
1466				Val:      cty.DynamicVal,
1467				SrcRange: hcl.RangeBetween(open.Range, close.Range),
1468			}, diags
1469		}
1470
1471		valName = string(p.Read().Bytes)
1472	}
1473
1474	if !inKeyword.TokenMatches(p.Peek()) {
1475		if !p.recovery {
1476			diags = append(diags, &hcl.Diagnostic{
1477				Severity: hcl.DiagError,
1478				Summary:  "Invalid 'for' expression",
1479				Detail:   "For expression requires the 'in' keyword after its name declarations.",
1480				Subject:  p.Peek().Range.Ptr(),
1481				Context:  hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1482			})
1483		}
1484		close := p.recover(closeType)
1485		return &LiteralValueExpr{
1486			Val:      cty.DynamicVal,
1487			SrcRange: hcl.RangeBetween(open.Range, close.Range),
1488		}, diags
1489	}
1490	p.Read() // eat 'in' keyword
1491
1492	collExpr, collDiags := p.ParseExpression()
1493	diags = append(diags, collDiags...)
1494	if p.recovery && collDiags.HasErrors() {
1495		close := p.recover(closeType)
1496		return &LiteralValueExpr{
1497			Val:      cty.DynamicVal,
1498			SrcRange: hcl.RangeBetween(open.Range, close.Range),
1499		}, diags
1500	}
1501
1502	if p.Peek().Type != TokenColon {
1503		if !p.recovery {
1504			diags = append(diags, &hcl.Diagnostic{
1505				Severity: hcl.DiagError,
1506				Summary:  "Invalid 'for' expression",
1507				Detail:   "For expression requires a colon after the collection expression.",
1508				Subject:  p.Peek().Range.Ptr(),
1509				Context:  hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1510			})
1511		}
1512		close := p.recover(closeType)
1513		return &LiteralValueExpr{
1514			Val:      cty.DynamicVal,
1515			SrcRange: hcl.RangeBetween(open.Range, close.Range),
1516		}, diags
1517	}
1518	p.Read() // eat colon
1519
1520	var keyExpr, valExpr Expression
1521	var keyDiags, valDiags hcl.Diagnostics
1522	valExpr, valDiags = p.ParseExpression()
1523	if p.Peek().Type == TokenFatArrow {
1524		// What we just parsed was actually keyExpr
1525		p.Read() // eat the fat arrow
1526		keyExpr, keyDiags = valExpr, valDiags
1527
1528		valExpr, valDiags = p.ParseExpression()
1529	}
1530	diags = append(diags, keyDiags...)
1531	diags = append(diags, valDiags...)
1532	if p.recovery && (keyDiags.HasErrors() || valDiags.HasErrors()) {
1533		close := p.recover(closeType)
1534		return &LiteralValueExpr{
1535			Val:      cty.DynamicVal,
1536			SrcRange: hcl.RangeBetween(open.Range, close.Range),
1537		}, diags
1538	}
1539
1540	group := false
1541	var ellipsis Token
1542	if p.Peek().Type == TokenEllipsis {
1543		ellipsis = p.Read()
1544		group = true
1545	}
1546
1547	var condExpr Expression
1548	var condDiags hcl.Diagnostics
1549	if ifKeyword.TokenMatches(p.Peek()) {
1550		p.Read() // eat "if"
1551		condExpr, condDiags = p.ParseExpression()
1552		diags = append(diags, condDiags...)
1553		if p.recovery && condDiags.HasErrors() {
1554			close := p.recover(p.oppositeBracket(open.Type))
1555			return &LiteralValueExpr{
1556				Val:      cty.DynamicVal,
1557				SrcRange: hcl.RangeBetween(open.Range, close.Range),
1558			}, diags
1559		}
1560	}
1561
1562	var close Token
1563	if p.Peek().Type == closeType {
1564		close = p.Read()
1565	} else {
1566		if !p.recovery {
1567			diags = append(diags, &hcl.Diagnostic{
1568				Severity: hcl.DiagError,
1569				Summary:  "Invalid 'for' expression",
1570				Detail:   "Extra characters after the end of the 'for' expression.",
1571				Subject:  p.Peek().Range.Ptr(),
1572				Context:  hcl.RangeBetween(open.Range, p.Peek().Range).Ptr(),
1573			})
1574		}
1575		close = p.recover(closeType)
1576	}
1577
1578	if !makeObj {
1579		if keyExpr != nil {
1580			diags = append(diags, &hcl.Diagnostic{
1581				Severity: hcl.DiagError,
1582				Summary:  "Invalid 'for' expression",
1583				Detail:   "Key expression is not valid when building a tuple.",
1584				Subject:  keyExpr.Range().Ptr(),
1585				Context:  hcl.RangeBetween(open.Range, close.Range).Ptr(),
1586			})
1587		}
1588
1589		if group {
1590			diags = append(diags, &hcl.Diagnostic{
1591				Severity: hcl.DiagError,
1592				Summary:  "Invalid 'for' expression",
1593				Detail:   "Grouping ellipsis (...) cannot be used when building a tuple.",
1594				Subject:  &ellipsis.Range,
1595				Context:  hcl.RangeBetween(open.Range, close.Range).Ptr(),
1596			})
1597		}
1598	} else {
1599		if keyExpr == nil {
1600			diags = append(diags, &hcl.Diagnostic{
1601				Severity: hcl.DiagError,
1602				Summary:  "Invalid 'for' expression",
1603				Detail:   "Key expression is required when building an object.",
1604				Subject:  valExpr.Range().Ptr(),
1605				Context:  hcl.RangeBetween(open.Range, close.Range).Ptr(),
1606			})
1607		}
1608	}
1609
1610	return &ForExpr{
1611		KeyVar:   keyName,
1612		ValVar:   valName,
1613		CollExpr: collExpr,
1614		KeyExpr:  keyExpr,
1615		ValExpr:  valExpr,
1616		CondExpr: condExpr,
1617		Group:    group,
1618
1619		SrcRange:   hcl.RangeBetween(open.Range, close.Range),
1620		OpenRange:  open.Range,
1621		CloseRange: close.Range,
1622	}, diags
1623}
1624
1625// parseQuotedStringLiteral is a helper for parsing quoted strings that
1626// aren't allowed to contain any interpolations, such as block labels.
1627func (p *parser) parseQuotedStringLiteral() (string, hcl.Range, hcl.Diagnostics) {
1628	oQuote := p.Read()
1629	if oQuote.Type != TokenOQuote {
1630		return "", oQuote.Range, hcl.Diagnostics{
1631			{
1632				Severity: hcl.DiagError,
1633				Summary:  "Invalid string literal",
1634				Detail:   "A quoted string is required here.",
1635				Subject:  &oQuote.Range,
1636			},
1637		}
1638	}
1639
1640	var diags hcl.Diagnostics
1641	ret := &bytes.Buffer{}
1642	var cQuote Token
1643
1644Token:
1645	for {
1646		tok := p.Read()
1647		switch tok.Type {
1648
1649		case TokenCQuote:
1650			cQuote = tok
1651			break Token
1652
1653		case TokenQuotedLit:
1654			s, sDiags := p.decodeStringLit(tok)
1655			diags = append(diags, sDiags...)
1656			ret.WriteString(s)
1657
1658		case TokenTemplateControl, TokenTemplateInterp:
1659			which := "$"
1660			if tok.Type == TokenTemplateControl {
1661				which = "%"
1662			}
1663
1664			diags = append(diags, &hcl.Diagnostic{
1665				Severity: hcl.DiagError,
1666				Summary:  "Invalid string literal",
1667				Detail: fmt.Sprintf(
1668					"Template sequences are not allowed in this string. To include a literal %q, double it (as \"%s%s\") to escape it.",
1669					which, which, which,
1670				),
1671				Subject: &tok.Range,
1672				Context: hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(),
1673			})
1674
1675			// Now that we're returning an error callers won't attempt to use
1676			// the result for any real operations, but they might try to use
1677			// the partial AST for other analyses, so we'll leave a marker
1678			// to indicate that there was something invalid in the string to
1679			// help avoid misinterpretation of the partial result
1680			ret.WriteString(which)
1681			ret.WriteString("{ ... }")
1682
1683			p.recover(TokenTemplateSeqEnd) // we'll try to keep parsing after the sequence ends
1684
1685		case TokenEOF:
1686			diags = append(diags, &hcl.Diagnostic{
1687				Severity: hcl.DiagError,
1688				Summary:  "Unterminated string literal",
1689				Detail:   "Unable to find the closing quote mark before the end of the file.",
1690				Subject:  &tok.Range,
1691				Context:  hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(),
1692			})
1693			break Token
1694
1695		default:
1696			// Should never happen, as long as the scanner is behaving itself
1697			diags = append(diags, &hcl.Diagnostic{
1698				Severity: hcl.DiagError,
1699				Summary:  "Invalid string literal",
1700				Detail:   "This item is not valid in a string literal.",
1701				Subject:  &tok.Range,
1702				Context:  hcl.RangeBetween(oQuote.Range, tok.Range).Ptr(),
1703			})
1704			p.recover(TokenCQuote)
1705			break Token
1706
1707		}
1708
1709	}
1710
1711	return ret.String(), hcl.RangeBetween(oQuote.Range, cQuote.Range), diags
1712}
1713
1714// decodeStringLit processes the given token, which must be either a
1715// TokenQuotedLit or a TokenStringLit, returning the string resulting from
1716// resolving any escape sequences.
1717//
1718// If any error diagnostics are returned, the returned string may be incomplete
1719// or otherwise invalid.
1720func (p *parser) decodeStringLit(tok Token) (string, hcl.Diagnostics) {
1721	var quoted bool
1722	switch tok.Type {
1723	case TokenQuotedLit:
1724		quoted = true
1725	case TokenStringLit:
1726		quoted = false
1727	default:
1728		panic("decodeQuotedLit can only be used with TokenStringLit and TokenQuotedLit tokens")
1729	}
1730	var diags hcl.Diagnostics
1731
1732	ret := make([]byte, 0, len(tok.Bytes))
1733	slices := scanStringLit(tok.Bytes, quoted)
1734
1735	// We will mutate rng constantly as we walk through our token slices below.
1736	// Any diagnostics must take a copy of this rng rather than simply pointing
1737	// to it, e.g. by using rng.Ptr() rather than &rng.
1738	rng := tok.Range
1739	rng.End = rng.Start
1740
1741Slices:
1742	for _, slice := range slices {
1743		if len(slice) == 0 {
1744			continue
1745		}
1746
1747		// Advance the start of our range to where the previous token ended
1748		rng.Start = rng.End
1749
1750		// Advance the end of our range to after our token.
1751		b := slice
1752		for len(b) > 0 {
1753			adv, ch, _ := textseg.ScanGraphemeClusters(b, true)
1754			rng.End.Byte += adv
1755			switch ch[0] {
1756			case '\r', '\n':
1757				rng.End.Line++
1758				rng.End.Column = 1
1759			default:
1760				rng.End.Column++
1761			}
1762			b = b[adv:]
1763		}
1764
1765	TokenType:
1766		switch slice[0] {
1767		case '\\':
1768			if !quoted {
1769				// If we're not in quoted mode then just treat this token as
1770				// normal. (Slices can still start with backslash even if we're
1771				// not specifically looking for backslash sequences.)
1772				break TokenType
1773			}
1774			if len(slice) < 2 {
1775				diags = append(diags, &hcl.Diagnostic{
1776					Severity: hcl.DiagError,
1777					Summary:  "Invalid escape sequence",
1778					Detail:   "Backslash must be followed by an escape sequence selector character.",
1779					Subject:  rng.Ptr(),
1780				})
1781				break TokenType
1782			}
1783
1784			switch slice[1] {
1785
1786			case 'n':
1787				ret = append(ret, '\n')
1788				continue Slices
1789			case 'r':
1790				ret = append(ret, '\r')
1791				continue Slices
1792			case 't':
1793				ret = append(ret, '\t')
1794				continue Slices
1795			case '"':
1796				ret = append(ret, '"')
1797				continue Slices
1798			case '\\':
1799				ret = append(ret, '\\')
1800				continue Slices
1801			case 'u', 'U':
1802				if slice[1] == 'u' && len(slice) != 6 {
1803					diags = append(diags, &hcl.Diagnostic{
1804						Severity: hcl.DiagError,
1805						Summary:  "Invalid escape sequence",
1806						Detail:   "The \\u escape sequence must be followed by four hexadecimal digits.",
1807						Subject:  rng.Ptr(),
1808					})
1809					break TokenType
1810				} else if slice[1] == 'U' && len(slice) != 10 {
1811					diags = append(diags, &hcl.Diagnostic{
1812						Severity: hcl.DiagError,
1813						Summary:  "Invalid escape sequence",
1814						Detail:   "The \\U escape sequence must be followed by eight hexadecimal digits.",
1815						Subject:  rng.Ptr(),
1816					})
1817					break TokenType
1818				}
1819
1820				numHex := string(slice[2:])
1821				num, err := strconv.ParseUint(numHex, 16, 32)
1822				if err != nil {
1823					// Should never happen because the scanner won't match
1824					// a sequence of digits that isn't valid.
1825					panic(err)
1826				}
1827
1828				r := rune(num)
1829				l := utf8.RuneLen(r)
1830				if l == -1 {
1831					diags = append(diags, &hcl.Diagnostic{
1832						Severity: hcl.DiagError,
1833						Summary:  "Invalid escape sequence",
1834						Detail:   fmt.Sprintf("Cannot encode character U+%04x in UTF-8.", num),
1835						Subject:  rng.Ptr(),
1836					})
1837					break TokenType
1838				}
1839				for i := 0; i < l; i++ {
1840					ret = append(ret, 0)
1841				}
1842				rb := ret[len(ret)-l:]
1843				utf8.EncodeRune(rb, r)
1844
1845				continue Slices
1846
1847			default:
1848				diags = append(diags, &hcl.Diagnostic{
1849					Severity: hcl.DiagError,
1850					Summary:  "Invalid escape sequence",
1851					Detail:   fmt.Sprintf("The symbol %q is not a valid escape sequence selector.", slice[1:]),
1852					Subject:  rng.Ptr(),
1853				})
1854				ret = append(ret, slice[1:]...)
1855				continue Slices
1856			}
1857
1858		case '$', '%':
1859			if len(slice) != 3 {
1860				// Not long enough to be our escape sequence, so it's literal.
1861				break TokenType
1862			}
1863
1864			if slice[1] == slice[0] && slice[2] == '{' {
1865				ret = append(ret, slice[0])
1866				ret = append(ret, '{')
1867				continue Slices
1868			}
1869
1870			break TokenType
1871		}
1872
1873		// If we fall out here or break out of here from the switch above
1874		// then this slice is just a literal.
1875		ret = append(ret, slice...)
1876	}
1877
1878	return string(ret), diags
1879}
1880
1881// setRecovery turns on recovery mode without actually doing any recovery.
1882// This can be used when a parser knowingly leaves the peeker in a useless
1883// place and wants to suppress errors that might result from that decision.
1884func (p *parser) setRecovery() {
1885	p.recovery = true
1886}
1887
1888// recover seeks forward in the token stream until it finds TokenType "end",
1889// then returns with the peeker pointed at the following token.
1890//
1891// If the given token type is a bracketer, this function will additionally
1892// count nested instances of the brackets to try to leave the peeker at
1893// the end of the _current_ instance of that bracketer, skipping over any
1894// nested instances. This is a best-effort operation and may have
1895// unpredictable results on input with bad bracketer nesting.
1896func (p *parser) recover(end TokenType) Token {
1897	start := p.oppositeBracket(end)
1898	p.recovery = true
1899
1900	nest := 0
1901	for {
1902		tok := p.Read()
1903		ty := tok.Type
1904		if end == TokenTemplateSeqEnd && ty == TokenTemplateControl {
1905			// normalize so that our matching behavior can work, since
1906			// TokenTemplateControl/TokenTemplateInterp are asymmetrical
1907			// with TokenTemplateSeqEnd and thus we need to count both
1908			// openers if that's the closer we're looking for.
1909			ty = TokenTemplateInterp
1910		}
1911
1912		switch ty {
1913		case start:
1914			nest++
1915		case end:
1916			if nest < 1 {
1917				return tok
1918			}
1919
1920			nest--
1921		case TokenEOF:
1922			return tok
1923		}
1924	}
1925}
1926
1927// recoverOver seeks forward in the token stream until it finds a block
1928// starting with TokenType "start", then finds the corresponding end token,
1929// leaving the peeker pointed at the token after that end token.
1930//
1931// The given token type _must_ be a bracketer. For example, if the given
1932// start token is TokenOBrace then the parser will be left at the _end_ of
1933// the next brace-delimited block encountered, or at EOF if no such block
1934// is found or it is unclosed.
1935func (p *parser) recoverOver(start TokenType) {
1936	end := p.oppositeBracket(start)
1937
1938	// find the opening bracket first
1939Token:
1940	for {
1941		tok := p.Read()
1942		switch tok.Type {
1943		case start, TokenEOF:
1944			break Token
1945		}
1946	}
1947
1948	// Now use our existing recover function to locate the _end_ of the
1949	// container we've found.
1950	p.recover(end)
1951}
1952
1953func (p *parser) recoverAfterBodyItem() {
1954	p.recovery = true
1955	var open []TokenType
1956
1957Token:
1958	for {
1959		tok := p.Read()
1960
1961		switch tok.Type {
1962
1963		case TokenNewline:
1964			if len(open) == 0 {
1965				break Token
1966			}
1967
1968		case TokenEOF:
1969			break Token
1970
1971		case TokenOBrace, TokenOBrack, TokenOParen, TokenOQuote, TokenOHeredoc, TokenTemplateInterp, TokenTemplateControl:
1972			open = append(open, tok.Type)
1973
1974		case TokenCBrace, TokenCBrack, TokenCParen, TokenCQuote, TokenCHeredoc:
1975			opener := p.oppositeBracket(tok.Type)
1976			for len(open) > 0 && open[len(open)-1] != opener {
1977				open = open[:len(open)-1]
1978			}
1979			if len(open) > 0 {
1980				open = open[:len(open)-1]
1981			}
1982
1983		case TokenTemplateSeqEnd:
1984			for len(open) > 0 && open[len(open)-1] != TokenTemplateInterp && open[len(open)-1] != TokenTemplateControl {
1985				open = open[:len(open)-1]
1986			}
1987			if len(open) > 0 {
1988				open = open[:len(open)-1]
1989			}
1990
1991		}
1992	}
1993}
1994
1995// oppositeBracket finds the bracket that opposes the given bracketer, or
1996// NilToken if the given token isn't a bracketer.
1997//
1998// "Bracketer", for the sake of this function, is one end of a matching
1999// open/close set of tokens that establish a bracketing context.
2000func (p *parser) oppositeBracket(ty TokenType) TokenType {
2001	switch ty {
2002
2003	case TokenOBrace:
2004		return TokenCBrace
2005	case TokenOBrack:
2006		return TokenCBrack
2007	case TokenOParen:
2008		return TokenCParen
2009	case TokenOQuote:
2010		return TokenCQuote
2011	case TokenOHeredoc:
2012		return TokenCHeredoc
2013
2014	case TokenCBrace:
2015		return TokenOBrace
2016	case TokenCBrack:
2017		return TokenOBrack
2018	case TokenCParen:
2019		return TokenOParen
2020	case TokenCQuote:
2021		return TokenOQuote
2022	case TokenCHeredoc:
2023		return TokenOHeredoc
2024
2025	case TokenTemplateControl:
2026		return TokenTemplateSeqEnd
2027	case TokenTemplateInterp:
2028		return TokenTemplateSeqEnd
2029	case TokenTemplateSeqEnd:
2030		// This is ambigous, but we return Interp here because that's
2031		// what's assumed by the "recover" method.
2032		return TokenTemplateInterp
2033
2034	default:
2035		return TokenNil
2036	}
2037}
2038
2039func errPlaceholderExpr(rng hcl.Range) Expression {
2040	return &LiteralValueExpr{
2041		Val:      cty.DynamicVal,
2042		SrcRange: rng,
2043	}
2044}
2045