1# 2# This file is part of snmpclitools software. 3# 4# Copyright (c) 2005-2018, Ilya Etingof <etingof@gmail.com> 5# License: http://snmplabs.com/snmpclitools/license.html 6# 7import sys 8from snmpclitools.cli import spark 9 10# AST 11 12 13class ConfigToken: 14 # Abstract grammar token 15 def __init__(self, typ, attr=None): 16 self.type = typ 17 self.attr = attr 18 19 def __eq__(self, other): 20 return self.type == other 21 22 def __ne__(self, other): 23 return self.type != other 24 25 def __lt__(self, other): 26 return self.type < other 27 28 def __le__(self, other): 29 return self.type <= other 30 31 def __gt__(self, other): 32 return self.type > other 33 34 def __ge__(self, other): 35 return self.type >= other 36 37 def __repr__(self): 38 return self.attr or self.type 39 40 def __str__(self): 41 if self.attr is None: 42 return '%s' % self.type 43 else: 44 return '%s(%s)' % (self.type, self.attr) 45 46 47class ConfigNode: 48 # AST node class -- N-ary tree 49 def __init__(self, typ, attr=None): 50 self.type, self.attr = typ, attr 51 self._kids = [] 52 53 def __getitem__(self, i): 54 return self._kids[i] 55 56 def __len__(self): 57 return len(self._kids) 58 if sys.version_info[0] < 3: 59 def __setslice__(self, low, high, seq): 60 self._kids[low:high] = seq 61 else: 62 def __setitem__(self, idx, seq): 63 self._kids[idx] = seq 64 65 def __eq__(self, other): 66 return self.type == other 67 68 def __ne__(self, other): 69 return self.type != other 70 71 def __lt__(self, other): 72 return self.type < other 73 74 def __le__(self, other): 75 return self.type <= other 76 77 def __gt__(self, other): 78 return self.type > other 79 80 def __ge__(self, other): 81 return self.type >= other 82 83 def __str__(self): 84 if self.attr is None: 85 return self.type 86 else: 87 return '%s(%s)' % (self.type, self.attr) 88 89 90# Scanner 91 92class __ScannerTemplate(spark.GenericScanner): 93 def tokenize(self, data): 94 self.rv = [] 95 spark.GenericScanner.tokenize(self, data) 96 return self.rv 97 98 99class __FirstLevelScanner(__ScannerTemplate): 100 def t_string(self, s): 101 r' [!#\$%&\'\(\)\*\+,\.//0-9<=>\?@A-Z\\\^_`a-z\{\|\}~][!#\$%&\'\(\)\*\+,\-\.//0-9<=>\?@A-Z\\\^_`a-z\{\|\}~]* ' 102 self.rv.append(ConfigToken('string', s)) 103 104 105class __SecondLevelScanner(__FirstLevelScanner): 106 def t_semicolon(self, s): 107 r' : ' 108 self.rv.append(ConfigToken('semicolon')) 109 110 def t_lparen(self, s): 111 r' \[ ' 112 self.rv.append(ConfigToken('lparen')) 113 114 def t_rparen(self, s): 115 r' \] ' 116 self.rv.append(ConfigToken('rparen')) 117 118 def t_quote(self, s): 119 r' \" ' 120 self.rv.append(ConfigToken('quote')) 121 122 def t_whitespace(self, s): 123 r' \s+ ' 124 self.rv.append(ConfigToken('whitespace')) 125 126ScannerTemplate = __SecondLevelScanner 127 128 129# Parser 130 131class ParserTemplate(spark.GenericASTBuilder): 132 initialSymbol = None 133 134 def __init__(self, startSymbol=None): 135 if startSymbol is None: 136 startSymbol = self.initialSymbol 137 spark.GenericASTBuilder.__init__(self, ConfigNode, startSymbol) 138 139 def terminal(self, token): 140 # Reduce to homogeneous AST. 141 return ConfigNode(token.type, token.attr) 142 143 144# Generator 145 146class GeneratorTemplate(spark.GenericASTTraversal): 147 def __init__(self): # Skip superclass constructor 148 pass 149 150 def typestring(self, node): 151 return node.type 152 153 def preorder(self, client, node): 154 try: 155 name = 'n_' + self.typestring(node) 156 if hasattr(self, name): 157 func = getattr(self, name) 158 func(client, node) 159 else: 160 self.default(client, node) 161 except spark.GenericASTTraversalPruningException: 162 return client 163 164 for kid in node: 165 self.preorder(client, kid) 166 167 name = name + '_exit' 168 if hasattr(self, name): 169 func = getattr(self, name) 170 func(client, node) 171 172 return client 173 174 def default(self, client, node): 175 pass 176