1package parser
2
3import (
4	"errors"
5	"fmt"
6	"regexp"
7	"strings"
8
9	"github.com/gorilla/css/scanner"
10
11	"github.com/aymerick/douceur/css"
12)
13
14const (
15	importantSuffixRegexp = `(?i)\s*!important\s*$`
16)
17
18var (
19	importantRegexp *regexp.Regexp
20)
21
22// Parser represents a CSS parser
23type Parser struct {
24	scan *scanner.Scanner // Tokenizer
25
26	// Tokens parsed but not consumed yet
27	tokens []*scanner.Token
28
29	// Rule embedding level
30	embedLevel int
31}
32
33func init() {
34	importantRegexp = regexp.MustCompile(importantSuffixRegexp)
35}
36
37// NewParser instanciates a new parser
38func NewParser(txt string) *Parser {
39	return &Parser{
40		scan: scanner.New(txt),
41	}
42}
43
44// Parse parses a whole stylesheet
45func Parse(text string) (*css.Stylesheet, error) {
46	result, err := NewParser(text).ParseStylesheet()
47	if err != nil {
48		return nil, err
49	}
50
51	return result, nil
52}
53
54// ParseDeclarations parses CSS declarations
55func ParseDeclarations(text string) ([]*css.Declaration, error) {
56	result, err := NewParser(text).ParseDeclarations()
57	if err != nil {
58		return nil, err
59	}
60
61	return result, nil
62}
63
64// ParseStylesheet parses a stylesheet
65func (parser *Parser) ParseStylesheet() (*css.Stylesheet, error) {
66	result := css.NewStylesheet()
67
68	// Parse BOM
69	if _, err := parser.parseBOM(); err != nil {
70		return result, err
71	}
72
73	// Parse list of rules
74	rules, err := parser.ParseRules()
75	if err != nil {
76		return result, err
77	}
78
79	result.Rules = rules
80
81	return result, nil
82}
83
84// ParseRules parses a list of rules
85func (parser *Parser) ParseRules() ([]*css.Rule, error) {
86	result := []*css.Rule{}
87
88	inBlock := false
89	if parser.tokenChar("{") {
90		// parsing a block of rules
91		inBlock = true
92		parser.embedLevel++
93
94		parser.shiftToken()
95	}
96
97	for parser.tokenParsable() {
98		if parser.tokenIgnorable() {
99			parser.shiftToken()
100		} else if parser.tokenChar("}") {
101			if !inBlock {
102				errMsg := fmt.Sprintf("Unexpected } character: %s", parser.nextToken().String())
103				return result, errors.New(errMsg)
104			}
105
106			parser.shiftToken()
107			parser.embedLevel--
108
109			// finished
110			break
111		} else {
112			rule, err := parser.ParseRule()
113			if err != nil {
114				return result, err
115			}
116
117			rule.EmbedLevel = parser.embedLevel
118			result = append(result, rule)
119		}
120	}
121
122	return result, parser.err()
123}
124
125// ParseRule parses a rule
126func (parser *Parser) ParseRule() (*css.Rule, error) {
127	if parser.tokenAtKeyword() {
128		return parser.parseAtRule()
129	}
130
131	return parser.parseQualifiedRule()
132}
133
134// ParseDeclarations parses a list of declarations
135func (parser *Parser) ParseDeclarations() ([]*css.Declaration, error) {
136	result := []*css.Declaration{}
137
138	if parser.tokenChar("{") {
139		parser.shiftToken()
140	}
141
142	for parser.tokenParsable() {
143		if parser.tokenIgnorable() {
144			parser.shiftToken()
145		} else if parser.tokenChar("}") {
146			// end of block
147			parser.shiftToken()
148			break
149		} else {
150			declaration, err := parser.ParseDeclaration()
151			if err != nil {
152				return result, err
153			}
154
155			result = append(result, declaration)
156		}
157	}
158
159	return result, parser.err()
160}
161
162// ParseDeclaration parses a declaration
163func (parser *Parser) ParseDeclaration() (*css.Declaration, error) {
164	result := css.NewDeclaration()
165	curValue := ""
166
167	for parser.tokenParsable() {
168		if parser.tokenChar(":") {
169			result.Property = strings.TrimSpace(curValue)
170			curValue = ""
171
172			parser.shiftToken()
173		} else if parser.tokenChar(";") || parser.tokenChar("}") {
174			if result.Property == "" {
175				errMsg := fmt.Sprintf("Unexpected ; character: %s", parser.nextToken().String())
176				return result, errors.New(errMsg)
177			}
178
179			if importantRegexp.MatchString(curValue) {
180				result.Important = true
181				curValue = importantRegexp.ReplaceAllString(curValue, "")
182			}
183
184			result.Value = strings.TrimSpace(curValue)
185
186			if parser.tokenChar(";") {
187				parser.shiftToken()
188			}
189
190			// finished
191			break
192		} else {
193			token := parser.shiftToken()
194			curValue += token.Value
195		}
196	}
197
198	// log.Printf("[parsed] Declaration: %s", result.String())
199
200	return result, parser.err()
201}
202
203// Parse an At Rule
204func (parser *Parser) parseAtRule() (*css.Rule, error) {
205	// parse rule name (eg: "@import")
206	token := parser.shiftToken()
207
208	result := css.NewRule(css.AtRule)
209	result.Name = token.Value
210
211	for parser.tokenParsable() {
212		if parser.tokenChar(";") {
213			parser.shiftToken()
214
215			// finished
216			break
217		} else if parser.tokenChar("{") {
218			if result.EmbedsRules() {
219				// parse rules block
220				rules, err := parser.ParseRules()
221				if err != nil {
222					return result, err
223				}
224
225				result.Rules = rules
226			} else {
227				// parse declarations block
228				declarations, err := parser.ParseDeclarations()
229				if err != nil {
230					return result, err
231				}
232
233				result.Declarations = declarations
234			}
235
236			// finished
237			break
238		} else {
239			// parse prelude
240			prelude, err := parser.parsePrelude()
241			if err != nil {
242				return result, err
243			}
244
245			result.Prelude = prelude
246		}
247	}
248
249	// log.Printf("[parsed] Rule: %s", result.String())
250
251	return result, parser.err()
252}
253
254// Parse a Qualified Rule
255func (parser *Parser) parseQualifiedRule() (*css.Rule, error) {
256	result := css.NewRule(css.QualifiedRule)
257
258	for parser.tokenParsable() {
259		if parser.tokenChar("{") {
260			if result.Prelude == "" {
261				errMsg := fmt.Sprintf("Unexpected { character: %s", parser.nextToken().String())
262				return result, errors.New(errMsg)
263			}
264
265			// parse declarations block
266			declarations, err := parser.ParseDeclarations()
267			if err != nil {
268				return result, err
269			}
270
271			result.Declarations = declarations
272
273			// finished
274			break
275		} else {
276			// parse prelude
277			prelude, err := parser.parsePrelude()
278			if err != nil {
279				return result, err
280			}
281
282			result.Prelude = prelude
283		}
284	}
285
286	result.Selectors = strings.Split(result.Prelude, ",")
287	for i, sel := range result.Selectors {
288		result.Selectors[i] = strings.TrimSpace(sel)
289	}
290
291	// log.Printf("[parsed] Rule: %s", result.String())
292
293	return result, parser.err()
294}
295
296// Parse Rule prelude
297func (parser *Parser) parsePrelude() (string, error) {
298	result := ""
299
300	for parser.tokenParsable() && !parser.tokenEndOfPrelude() {
301		token := parser.shiftToken()
302		result += token.Value
303	}
304
305	result = strings.TrimSpace(result)
306
307	// log.Printf("[parsed] prelude: %s", result)
308
309	return result, parser.err()
310}
311
312// Parse BOM
313func (parser *Parser) parseBOM() (bool, error) {
314	if parser.nextToken().Type == scanner.TokenBOM {
315		parser.shiftToken()
316		return true, nil
317	}
318
319	return false, parser.err()
320}
321
322// Returns next token without removing it from tokens buffer
323func (parser *Parser) nextToken() *scanner.Token {
324	if len(parser.tokens) == 0 {
325		// fetch next token
326		nextToken := parser.scan.Next()
327
328		// log.Printf("[token] %s => %v", nextToken.Type.String(), nextToken.Value)
329
330		// queue it
331		parser.tokens = append(parser.tokens, nextToken)
332	}
333
334	return parser.tokens[0]
335}
336
337// Returns next token and remove it from the tokens buffer
338func (parser *Parser) shiftToken() *scanner.Token {
339	var result *scanner.Token
340
341	result, parser.tokens = parser.tokens[0], parser.tokens[1:]
342	return result
343}
344
345// Returns tokenizer error, or nil if no error
346func (parser *Parser) err() error {
347	if parser.tokenError() {
348		token := parser.nextToken()
349		return fmt.Errorf("Tokenizer error: %s", token.String())
350	}
351
352	return nil
353}
354
355// Returns true if next token is Error
356func (parser *Parser) tokenError() bool {
357	return parser.nextToken().Type == scanner.TokenError
358}
359
360// Returns true if next token is EOF
361func (parser *Parser) tokenEOF() bool {
362	return parser.nextToken().Type == scanner.TokenEOF
363}
364
365// Returns true if next token is a whitespace
366func (parser *Parser) tokenWS() bool {
367	return parser.nextToken().Type == scanner.TokenS
368}
369
370// Returns true if next token is a comment
371func (parser *Parser) tokenComment() bool {
372	return parser.nextToken().Type == scanner.TokenComment
373}
374
375// Returns true if next token is a CDO or a CDC
376func (parser *Parser) tokenCDOorCDC() bool {
377	switch parser.nextToken().Type {
378	case scanner.TokenCDO, scanner.TokenCDC:
379		return true
380	default:
381		return false
382	}
383}
384
385// Returns true if next token is ignorable
386func (parser *Parser) tokenIgnorable() bool {
387	return parser.tokenWS() || parser.tokenComment() || parser.tokenCDOorCDC()
388}
389
390// Returns true if next token is parsable
391func (parser *Parser) tokenParsable() bool {
392	return !parser.tokenEOF() && !parser.tokenError()
393}
394
395// Returns true if next token is an At Rule keyword
396func (parser *Parser) tokenAtKeyword() bool {
397	return parser.nextToken().Type == scanner.TokenAtKeyword
398}
399
400// Returns true if next token is given character
401func (parser *Parser) tokenChar(value string) bool {
402	token := parser.nextToken()
403	return (token.Type == scanner.TokenChar) && (token.Value == value)
404}
405
406// Returns true if next token marks the end of a prelude
407func (parser *Parser) tokenEndOfPrelude() bool {
408	return parser.tokenChar(";") || parser.tokenChar("{")
409}
410