1package hclsyntax
2
3import (
4	"github.com/hashicorp/hcl2/hcl"
5)
6
7// ParseConfig parses the given buffer as a whole HCL config file, returning
8// a *hcl.File representing its contents. If HasErrors called on the returned
9// diagnostics returns true, the returned body is likely to be incomplete
10// and should therefore be used with care.
11//
12// The body in the returned file has dynamic type *hclsyntax.Body, so callers
13// may freely type-assert this to get access to the full hclsyntax API in
14// situations where detailed access is required. However, most common use-cases
15// should be served using the hcl.Body interface to ensure compatibility with
16// other configurationg syntaxes, such as JSON.
17func ParseConfig(src []byte, filename string, start hcl.Pos) (*hcl.File, hcl.Diagnostics) {
18	tokens, diags := LexConfig(src, filename, start)
19	peeker := newPeeker(tokens, false)
20	parser := &parser{peeker: peeker}
21	body, parseDiags := parser.ParseBody(TokenEOF)
22	diags = append(diags, parseDiags...)
23
24	// Panic if the parser uses incorrect stack discipline with the peeker's
25	// newlines stack, since otherwise it will produce confusing downstream
26	// errors.
27	peeker.AssertEmptyIncludeNewlinesStack()
28
29	return &hcl.File{
30		Body:  body,
31		Bytes: src,
32
33		Nav: navigation{
34			root: body,
35		},
36	}, diags
37}
38
39// ParseExpression parses the given buffer as a standalone HCL expression,
40// returning it as an instance of Expression.
41func ParseExpression(src []byte, filename string, start hcl.Pos) (Expression, hcl.Diagnostics) {
42	tokens, diags := LexExpression(src, filename, start)
43	peeker := newPeeker(tokens, false)
44	parser := &parser{peeker: peeker}
45
46	// Bare expressions are always parsed in  "ignore newlines" mode, as if
47	// they were wrapped in parentheses.
48	parser.PushIncludeNewlines(false)
49
50	expr, parseDiags := parser.ParseExpression()
51	diags = append(diags, parseDiags...)
52
53	next := parser.Peek()
54	if next.Type != TokenEOF && !parser.recovery {
55		diags = append(diags, &hcl.Diagnostic{
56			Severity: hcl.DiagError,
57			Summary:  "Extra characters after expression",
58			Detail:   "An expression was successfully parsed, but extra characters were found after it.",
59			Subject:  &next.Range,
60		})
61	}
62
63	parser.PopIncludeNewlines()
64
65	// Panic if the parser uses incorrect stack discipline with the peeker's
66	// newlines stack, since otherwise it will produce confusing downstream
67	// errors.
68	peeker.AssertEmptyIncludeNewlinesStack()
69
70	return expr, diags
71}
72
73// ParseTemplate parses the given buffer as a standalone HCL template,
74// returning it as an instance of Expression.
75func ParseTemplate(src []byte, filename string, start hcl.Pos) (Expression, hcl.Diagnostics) {
76	tokens, diags := LexTemplate(src, filename, start)
77	peeker := newPeeker(tokens, false)
78	parser := &parser{peeker: peeker}
79	expr, parseDiags := parser.ParseTemplate()
80	diags = append(diags, parseDiags...)
81
82	// Panic if the parser uses incorrect stack discipline with the peeker's
83	// newlines stack, since otherwise it will produce confusing downstream
84	// errors.
85	peeker.AssertEmptyIncludeNewlinesStack()
86
87	return expr, diags
88}
89
90// ParseTraversalAbs parses the given buffer as a standalone absolute traversal.
91//
92// Parsing as a traversal is more limited than parsing as an expession since
93// it allows only attribute and indexing operations on variables. Traverals
94// are useful as a syntax for referring to objects without necessarily
95// evaluating them.
96func ParseTraversalAbs(src []byte, filename string, start hcl.Pos) (hcl.Traversal, hcl.Diagnostics) {
97	tokens, diags := LexExpression(src, filename, start)
98	peeker := newPeeker(tokens, false)
99	parser := &parser{peeker: peeker}
100
101	// Bare traverals are always parsed in  "ignore newlines" mode, as if
102	// they were wrapped in parentheses.
103	parser.PushIncludeNewlines(false)
104
105	expr, parseDiags := parser.ParseTraversalAbs()
106	diags = append(diags, parseDiags...)
107
108	parser.PopIncludeNewlines()
109
110	// Panic if the parser uses incorrect stack discipline with the peeker's
111	// newlines stack, since otherwise it will produce confusing downstream
112	// errors.
113	peeker.AssertEmptyIncludeNewlinesStack()
114
115	return expr, diags
116}
117
118// LexConfig performs lexical analysis on the given buffer, treating it as a
119// whole HCL config file, and returns the resulting tokens.
120//
121// Only minimal validation is done during lexical analysis, so the returned
122// diagnostics may include errors about lexical issues such as bad character
123// encodings or unrecognized characters, but full parsing is required to
124// detect _all_ syntax errors.
125func LexConfig(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) {
126	tokens := scanTokens(src, filename, start, scanNormal)
127	diags := checkInvalidTokens(tokens)
128	return tokens, diags
129}
130
131// LexExpression performs lexical analysis on the given buffer, treating it as
132// a standalone HCL expression, and returns the resulting tokens.
133//
134// Only minimal validation is done during lexical analysis, so the returned
135// diagnostics may include errors about lexical issues such as bad character
136// encodings or unrecognized characters, but full parsing is required to
137// detect _all_ syntax errors.
138func LexExpression(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) {
139	// This is actually just the same thing as LexConfig, since configs
140	// and expressions lex in the same way.
141	tokens := scanTokens(src, filename, start, scanNormal)
142	diags := checkInvalidTokens(tokens)
143	return tokens, diags
144}
145
146// LexTemplate performs lexical analysis on the given buffer, treating it as a
147// standalone HCL template, and returns the resulting tokens.
148//
149// Only minimal validation is done during lexical analysis, so the returned
150// diagnostics may include errors about lexical issues such as bad character
151// encodings or unrecognized characters, but full parsing is required to
152// detect _all_ syntax errors.
153func LexTemplate(src []byte, filename string, start hcl.Pos) (Tokens, hcl.Diagnostics) {
154	tokens := scanTokens(src, filename, start, scanTemplate)
155	diags := checkInvalidTokens(tokens)
156	return tokens, diags
157}
158
159// ValidIdentifier tests if the given string could be a valid identifier in
160// a native syntax expression.
161//
162// This is useful when accepting names from the user that will be used as
163// variable or attribute names in the scope, to ensure that any name chosen
164// will be traversable using the variable or attribute traversal syntax.
165func ValidIdentifier(s string) bool {
166	// This is a kinda-expensive way to do something pretty simple, but it
167	// is easiest to do with our existing scanner-related infrastructure here
168	// and nobody should be validating identifiers in a tight loop.
169	tokens := scanTokens([]byte(s), "", hcl.Pos{}, scanIdentOnly)
170	return len(tokens) == 2 && tokens[0].Type == TokenIdent && tokens[1].Type == TokenEOF
171}
172