1
2import yaml
3import pprint
4
5# Tokens mnemonic:
6# directive:            %
7# document_start:       ---
8# document_end:         ...
9# alias:                *
10# anchor:               &
11# tag:                  !
12# scalar                _
13# block_sequence_start: [[
14# block_mapping_start:  {{
15# block_end:            ]}
16# flow_sequence_start:  [
17# flow_sequence_end:    ]
18# flow_mapping_start:   {
19# flow_mapping_end:     }
20# entry:                ,
21# key:                  ?
22# value:                :
23
24_replaces = {
25    yaml.DirectiveToken: '%',
26    yaml.DocumentStartToken: '---',
27    yaml.DocumentEndToken: '...',
28    yaml.AliasToken: '*',
29    yaml.AnchorToken: '&',
30    yaml.TagToken: '!',
31    yaml.ScalarToken: '_',
32    yaml.BlockSequenceStartToken: '[[',
33    yaml.BlockMappingStartToken: '{{',
34    yaml.BlockEndToken: ']}',
35    yaml.FlowSequenceStartToken: '[',
36    yaml.FlowSequenceEndToken: ']',
37    yaml.FlowMappingStartToken: '{',
38    yaml.FlowMappingEndToken: '}',
39    yaml.BlockEntryToken: ',',
40    yaml.FlowEntryToken: ',',
41    yaml.KeyToken: '?',
42    yaml.ValueToken: ':',
43}
44
45def test_tokens(data_filename, tokens_filename, verbose=False):
46    tokens1 = []
47    tokens2 = open(tokens_filename, 'rb').read().split()
48    try:
49        for token in yaml.scan(open(data_filename, 'rb')):
50            if not isinstance(token, (yaml.StreamStartToken, yaml.StreamEndToken)):
51                tokens1.append(_replaces[token.__class__])
52    finally:
53        if verbose:
54            print "TOKENS1:", ' '.join(tokens1)
55            print "TOKENS2:", ' '.join(tokens2)
56    assert len(tokens1) == len(tokens2), (tokens1, tokens2)
57    for token1, token2 in zip(tokens1, tokens2):
58        assert token1 == token2, (token1, token2)
59
60test_tokens.unittest = ['.data', '.tokens']
61
62def test_scanner(data_filename, canonical_filename, verbose=False):
63    for filename in [data_filename, canonical_filename]:
64        tokens = []
65        try:
66            for token in yaml.scan(open(filename, 'rb')):
67                tokens.append(token.__class__.__name__)
68        finally:
69            if verbose:
70                pprint.pprint(tokens)
71
72test_scanner.unittest = ['.data', '.canonical']
73
74if __name__ == '__main__':
75    import test_appliance
76    test_appliance.run(globals())
77
78