1#!/usr/local/bin/python3.8
2# -*- coding: utf-8 -*-
3
4
5import string
6
7from mathics_scanner import (
8    InvalidSyntaxError,
9    Tokeniser,
10    TranslateError,
11    is_symbol_name,
12)
13
14from mathics.core.parser.ast import Node, Number, Symbol, String, Filename
15from mathics.core.parser.operators import (
16    prefix_ops,
17    postfix_ops,
18    left_binary_ops,
19    right_binary_ops,
20    nonassoc_binary_ops,
21    flat_binary_ops,
22    ternary_ops,
23    binary_ops,
24    all_ops,
25    inequality_ops,
26    misc_ops,
27)
28
29
30special_symbols = {
31    "\u03C0": "Pi",  # Pi
32    "\uF74D": "E",  # ExponentialE
33    "\uF74E": "I",  # ImaginaryI
34    "\uF74F": "I",  # ImaginaryJ
35    "\u221E": "Infinity",  # Infinity
36    "\u00B0": "Degree",  # Degree
37}
38
39
40permitted_digits = {c: i for i, c in enumerate(string.digits + string.ascii_lowercase)}
41permitted_digits["."] = 0
42
43
44class Parser(object):
45    def __init__(self):
46        # no implicit times on these tokens
47        self.halt_tags = set(
48            [
49                "END",
50                "RawRightAssociation",
51                "RawRightParenthesis",
52                "RawComma",
53                "RawRightBrace",
54                "RawRightBracket",
55                "RawColon",
56                "DifferentialD",
57            ]
58        )
59
60    def parse(self, feeder):
61        self.feeder = feeder
62        self.tokeniser = Tokeniser(feeder)
63        self.current_token = None
64        self.bracket_depth = 0
65        self.box_depth = 0
66        return self.parse_e()
67
68    def next(self):
69        if self.current_token is None:
70            self.current_token = self.tokeniser.next()
71        return self.current_token
72
73    def next_noend(self):
74        "returns next token which is not END"
75        while True:
76            token = self.next()
77            if token.tag != "END":
78                return token
79            self.incomplete(token.pos)
80
81    def consume(self):
82        self.current_token = None
83
84    def incomplete(self, pos):
85        self.tokeniser.incomplete()
86        self.backtrack(pos)
87
88    def expect(self, expected_tag):
89        token = self.next_noend()
90        if token.tag == expected_tag:
91            self.consume()
92        else:
93            self.tokeniser.sntx_message(token.pos)
94            raise InvalidSyntaxError()
95
96    def backtrack(self, pos):
97        self.tokeniser.pos = pos
98        self.current_token = None
99
100    def parse_e(self):
101        result = []
102        while self.next().tag != "END":
103            result.append(self.parse_exp(0))
104        if len(result) > 1:
105            return Node("Times", *result)
106        if len(result) == 1:
107            return result[0]
108        else:
109            return None
110
111    def parse_exp(self, p):
112        result = self.parse_p()
113        while True:
114            if self.bracket_depth > 0:
115                token = self.next_noend()
116            else:
117                token = self.next()
118            tag = token.tag
119            method = getattr(self, "e_" + tag, None)
120            if method is not None:
121                new_result = method(result, token, p)
122            elif tag in inequality_ops:
123                new_result = self.parse_inequality(result, token, p)
124            elif tag in binary_ops:
125                new_result = self.parse_binary(result, token, p)
126            elif tag in ternary_ops:
127                new_result = self.parse_ternary(result, token, p)
128            elif tag in postfix_ops:
129                new_result = self.parse_postfix(result, token, p)
130            elif tag not in self.halt_tags and flat_binary_ops["Times"] >= p:
131                # implicit times
132                q = flat_binary_ops["Times"]
133                child = self.parse_exp(q + 1)
134                new_result = Node("Times", result, child).flatten()
135            else:
136                new_result = None
137            if new_result is None:
138                break
139            else:
140                result = new_result
141        return result
142
143    def parse_p(self):
144        token = self.next_noend()
145        tag = token.tag
146        method = getattr(self, "p_" + tag, None)
147        if method is not None:
148            return method(token)
149        elif tag in prefix_ops:
150            self.consume()
151            q = prefix_ops[tag]
152            child = self.parse_exp(q)
153            return Node(tag, child)
154        else:
155            self.tokeniser.sntx_message(token.pos)
156            raise InvalidSyntaxError()
157
158    def parse_box(self, p):
159        result = None
160        while True:
161            token = self.next()
162            tag = token.tag
163            method = getattr(self, "b_" + tag, None)
164            if method is not None:
165                new_result = method(result, token, p)
166            elif tag in ("OtherscriptBox", "RightRowBox"):
167                break
168            elif tag == "END":
169                self.incomplete(token.pos)
170            elif result is None and tag != "END":
171                self.consume()
172                new_result = String(token.text)
173                if new_result.value == r"\(":
174                    new_result = self.p_LeftRowBox(token)
175            else:
176                new_result = None
177            if new_result is None:
178                break
179            else:
180                result = new_result
181        if result is None:
182            result = String("")
183        return result
184
185    def parse_seq(self):
186        result = []
187        while True:
188            token = self.next_noend()
189            tag = token.tag
190            if tag == "RawComma":
191                self.tokeniser.feeder.message("Syntax", "com")
192                result.append(Symbol("Null"))
193                self.consume()
194            elif tag in ("RawRightAssociation", "RawRightBrace", "RawRightBracket"):
195                if result:
196                    self.tokeniser.feeder.message("Syntax", "com")
197                    result.append(Symbol("Null"))
198                break
199            else:
200                result.append(self.parse_exp(0))
201                token = self.next_noend()
202                tag = token.tag
203                if tag == "RawComma":
204                    self.consume()
205                    continue
206                elif tag in ("RawRightAssociation", "RawRightBrace", "RawRightBracket"):
207                    break
208        return result
209
210    def parse_inequality(self, expr1, token, p):
211        tag = token.tag
212        q = flat_binary_ops[tag]
213        if q < p:
214            return None
215        self.consume()
216        head = expr1.get_head_name()
217        expr2 = self.parse_exp(q + 1)
218        if head == "Inequality" and not expr1.parenthesised:
219            expr1.children.append(Symbol(tag))
220            expr1.children.append(expr2)
221        elif head in inequality_ops and head != tag and not expr1.parenthesised:
222            children = []
223            first = True
224            for child in expr1.children:
225                if not first:
226                    children.append(Symbol(head))
227                children.append(child)
228                first = False
229            children.append(Symbol(tag))
230            children.append(expr2)
231            expr1 = Node("Inequality", *children)
232        else:
233            expr1 = Node(tag, expr1, expr2).flatten()
234        return expr1
235
236    def parse_binary(self, expr1, token, p):
237        tag = token.tag
238        q = binary_ops[tag]
239        if q < p:
240            return None
241        self.consume()
242        if tag not in right_binary_ops:
243            q += 1
244        expr2 = self.parse_exp(q)
245        # flatten or associate
246        if (
247            tag in nonassoc_binary_ops
248            and expr1.get_head_name() == tag
249            and not expr1.parenthesised
250        ):
251            self.tokeniser.sntx_message(token.pos)
252            raise InvalidSyntaxError()
253        result = Node(tag, expr1, expr2)
254        if tag in flat_binary_ops:
255            result.flatten()
256        return result
257
258    def parse_postfix(self, expr1, token, p):
259        tag = token.tag
260        q = postfix_ops[tag]
261        if q < p:
262            return None
263        self.consume()
264        return Node(tag, expr1)
265
266    # P methods
267    #
268    # p_xxx methods are called from parse_p.
269    # Called with one Token and return a Node.
270    # Used for prefix operators and brackets.
271
272    def p_Factorial(self, token):
273        self.consume()
274        q = prefix_ops["Not"]
275        child = self.parse_exp(q)
276        return Node("Not", child)
277
278    def p_Factorial2(self, token):
279        self.consume()
280        q = prefix_ops["Not"]
281        child = self.parse_exp(q)
282        return Node("Not", Node("Not", child))
283
284    def p_RawLeftParenthesis(self, token):
285        self.consume()
286        self.bracket_depth += 1
287        result = self.parse_exp(0)
288        self.expect("RawRightParenthesis")
289        self.bracket_depth -= 1
290        result.parenthesised = True
291        return result
292
293    def p_RawLeftBrace(self, token):
294        self.consume()
295        self.bracket_depth += 1
296        seq = self.parse_seq()
297        self.expect("RawRightBrace")
298        self.bracket_depth -= 1
299        return Node("List", *seq)
300
301    def p_RawLeftAssociation(self, token):
302        self.consume()
303        self.bracket_depth += 1
304        seq = self.parse_seq()
305        self.expect("RawRightAssociation")
306        self.bracket_depth -= 1
307        return Node("Association", *seq)
308
309    def p_LeftRowBox(self, token):
310        self.consume()
311        children = []
312        self.box_depth += 1
313        self.bracket_depth += 1
314        token = self.next()
315        while token.tag not in ("RightRowBox", "OtherscriptBox"):
316            newnode = self.parse_box(0)
317            children.append(newnode)
318            token = self.next()
319        if len(children) == 0:
320            result = String("")
321        elif len(children) == 1:
322            result = children[0]
323        else:
324            result = Node("RowBox", Node("List", *children))
325        self.expect("RightRowBox")
326        self.box_depth -= 1
327        self.bracket_depth -= 1
328        result.parenthesised = True
329        return result
330
331    def p_Number(self, token):
332        s = token.text
333
334        # sign
335        if s[0] == "-":
336            s = s[1:]
337            sign = -1
338        else:
339            sign = 1
340
341        # base
342        s = s.split("^^")
343        if len(s) == 1:
344            base, s = 10, s[0]
345        else:
346            assert len(s) == 2
347            base, s = int(s[0]), s[1]
348            if not 2 <= base <= 36:
349                self.tokeniser.feeder.message("General", "base", base, token.text, 36)
350                self.tokeniser.sntx_message(token.pos)
351                raise InvalidSyntaxError()
352
353        # mantissa
354        s = s.split("*^")
355        if len(s) == 1:
356            exp, s = 0, s[0]
357        else:
358            # TODO modify regex and provide error if `exp` is not an int
359            exp, s = int(s[1]), s[0]
360
361        # precision/accuracy
362        s = s.split("`", 1)
363        if len(s) == 1:
364            s, suffix = s[0], None
365        else:
366            s, suffix = s[0], s[1]
367
368        for i, c in enumerate(s.lower()):
369            if permitted_digits[c] >= base:
370                self.tokeniser.feeder.message("General", "digit", i + 1, s, base)
371                self.tokeniser.sntx_message(token.pos)
372                raise InvalidSyntaxError()
373
374        result = Number(s, sign=sign, base=base, suffix=suffix, exp=exp)
375        self.consume()
376        return result
377
378    def p_String(self, token):
379        result = String(token.text[1:-1])
380        self.consume()
381        return result
382
383    def p_Symbol(self, token):
384        symbol_name = special_symbols.get(token.text, token.text)
385        result = Symbol(symbol_name, context=None)
386        self.consume()
387        return result
388
389    def p_Filename(self, token):
390        result = Filename(token.text)
391        self.consume()
392        return result
393
394    def p_Span(self, token):
395        return self.e_Span(Number("1"), token, 0)
396
397    def p_Integral(self, token):
398        self.consume()
399        inner_prec, outer_prec = all_ops["Sum"] + 1, all_ops["Power"] - 1
400        expr1 = self.parse_exp(inner_prec)
401        self.expect("DifferentialD")
402        expr2 = self.parse_exp(outer_prec)
403        return Node("Integrate", expr1, expr2)
404
405    def p_Pattern(self, token):
406        self.consume()
407        text = token.text
408        if "." in text:
409            name = text[:-2]
410            if name:
411                return Node(
412                    "Optional",
413                    Node("Pattern", Symbol(name, context=None), Node("Blank")),
414                )
415            else:
416                return Node("Optional", Node("Blank"))
417        pieces = text.split("_")
418        count = len(pieces) - 1
419        if count == 1:
420            name = "Blank"
421        elif count == 2:
422            name = "BlankSequence"
423        elif count == 3:
424            name = "BlankNullSequence"
425        if pieces[-1]:
426            blank = Node(name, Symbol(pieces[-1], context=None))
427        else:
428            blank = Node(name)
429        if pieces[0]:
430            return Node("Pattern", Symbol(pieces[0], context=None), blank)
431        else:
432            return blank
433
434    def p_Minus(self, token):
435        self.consume()
436        q = prefix_ops["Minus"]
437        expr = self.parse_exp(q)
438        if isinstance(expr, Number) and not expr.value.startswith("-"):
439            expr.value = "-" + expr.value
440            return expr
441        else:
442            return Node("Times", Number("1", sign=-1), expr).flatten()
443
444    def p_Plus(self, token):
445        self.consume()
446        q = prefix_ops["Minus"]
447        # note flattening here even flattens e.g. + a + b
448        return Node("Plus", self.parse_exp(q)).flatten()
449
450    def p_PlusMinus(self, token):
451        self.consume()
452        q = prefix_ops["Minus"]
453        return Node("PlusMinus", self.parse_exp(q))
454
455    def p_MinusPlus(self, token):
456        self.consume()
457        q = prefix_ops["Minus"]
458        return Node("MinusPlus", self.parse_exp(q))
459
460    def p_Out(self, token):
461        self.consume()
462        text = token.text
463        if text == "%":
464            return Node("Out")
465        if text.endswith("%"):
466            n = str(-len(text))
467        else:
468            n = text[1:]
469        return Node("Out", Number(n))
470
471    def p_Slot(self, token):
472        self.consume()
473        text = token.text
474        if len(text) == 1:
475            n = Number("1")
476        else:
477            n = text[1:]
478            if n.isdigit():
479                n = Number(n)
480            else:
481                n = String(n)
482        return Node("Slot", n)
483
484    def p_SlotSequence(self, token):
485        self.consume()
486        text = token.text
487        if len(text) == 2:
488            n = "1"
489        else:
490            n = text[2:]
491        return Node("SlotSequence", Number(n))
492
493    def p_Increment(self, token):
494        self.consume()
495        q = prefix_ops["PreIncrement"]
496        return Node("PreIncrement", self.parse_exp(q))
497
498    def p_Decrement(self, token):
499        self.consume()
500        q = prefix_ops["PreDecrement"]
501        return Node("PreDecrement", self.parse_exp(q))
502
503    def p_PatternTest(self, token):
504        self.consume()
505        q = prefix_ops["Definition"]
506        child = self.parse_exp(q)
507        return Node(
508            "Information", child, Node("Rule", Symbol("LongForm"), Symbol("False"))
509        )
510
511    def p_Information(self, token):
512        self.consume()
513        q = prefix_ops["Information"]
514        child = self.parse_exp(q)
515        if child.__class__ is not Symbol:
516            raise InvalidSyntaxError()
517        return Node(
518            "Information", child, Node("Rule", Symbol("LongForm"), Symbol("True"))
519        )
520
521    # E methods
522    #
523    # e_xxx methods are called from parse_e.
524    # They expect args (Node, Token precedence) and return Node or None.
525    # Used for binary and ternary operators.
526    # return None if precedence is too low.
527
528    def e_Span(self, expr1, token, p):
529        q = ternary_ops["Span"]
530        if q < p:
531            return None
532
533        if expr1.get_head_name() == "Span" and not expr1.parenthesised:
534            return None
535        self.consume()
536        # Span[expr1, expr2]
537        token = self.next()
538        if token.tag == "Span":
539            expr2 = Symbol("All")
540        elif token.tag == "END" and self.bracket_depth == 0:
541            # So that e.g. 'x = 1 ;;' doesn't wait for newline in the frontend
542            expr2 = Symbol("All")
543            return Node("Span", expr1, expr2)
544        else:
545            messages = list(self.feeder.messages)
546            try:
547                expr2 = self.parse_exp(q + 1)
548            except TranslateError:
549                expr2 = Symbol("All")
550                self.backtrack(token.pos)
551                self.feeder.messages = messages
552        token = self.next()
553        if token.tag == "Span":
554            self.consume()
555            messages = list(self.feeder.messages)
556            try:
557                expr3 = self.parse_exp(q + 1)
558                return Node("Span", expr1, expr2, expr3)
559            except TranslateError:
560                self.backtrack(token.pos)
561                self.feeder.messages = messages
562        return Node("Span", expr1, expr2)
563
564    def e_RawLeftBracket(self, expr, token, p):
565        q = all_ops["Part"]
566        if q < p:
567            return None
568        self.consume()
569        self.bracket_depth += 1
570        token = self.next_noend()
571        if token.tag == "RawLeftBracket":
572            self.consume()
573            seq = self.parse_seq()
574            self.expect("RawRightBracket")
575            self.expect("RawRightBracket")
576            self.bracket_depth -= 1
577            return Node("Part", expr, *seq)
578        else:
579            seq = self.parse_seq()
580            self.expect("RawRightBracket")
581            self.bracket_depth -= 1
582            result = Node(expr, *seq)
583            result.parenthesised = True
584            return result
585
586    def e_Infix(self, expr1, token, p):
587        q = ternary_ops["Infix"]
588        if q < p:
589            return None
590        self.consume()
591        expr2 = self.parse_exp(q + 1)
592        self.expect("Infix")
593        expr3 = self.parse_exp(q + 1)
594        return Node(expr2, expr1, expr3)
595
596    def e_Postfix(self, expr1, token, p):
597        q = left_binary_ops["Postfix"]
598        if q < p:
599            return None
600        self.consume()
601        # postix has lowest prec and is left assoc
602        expr2 = self.parse_exp(q + 1)
603        return Node(expr2, expr1)
604
605    def e_Prefix(self, expr1, token, p):
606        q = 640
607        if 640 < p:
608            return None
609        self.consume()
610        expr2 = self.parse_exp(q)
611        return Node(expr1, expr2)
612
613    def e_ApplyList(self, expr1, token, p):
614        q = right_binary_ops["Apply"]
615        if q < p:
616            return None
617        self.consume()
618        expr2 = self.parse_exp(q)
619        expr3 = Node("List", Number("1"))
620        return Node("Apply", expr1, expr2, expr3)
621
622    def e_Function(self, expr1, token, p):
623        q = postfix_ops["Function"]
624        if q < p:
625            return None
626        # postfix or right-binary determined by symbol
627        self.consume()
628        if token.text == "&":
629            return Node("Function", expr1)
630        else:
631            expr2 = self.parse_exp(q)
632            return Node("Function", expr1, expr2)
633
634    def e_RawColon(self, expr1, token, p):
635        head_name = expr1.get_head_name()
636        if head_name == "Symbol":
637            head = "Pattern"
638        elif head_name in (
639            "Blank",
640            "BlankSequence",
641            "BlankNullSequence",
642            "Pattern",
643            "Optional",
644        ):
645            head = "Optional"
646        else:
647            return None
648        q = all_ops[head]
649        if p == 151:
650            return None
651        self.consume()
652        expr2 = self.parse_exp(q + 1)
653        return Node(head, expr1, expr2)
654
655    def e_Semicolon(self, expr1, token, p):
656        q = flat_binary_ops["CompoundExpression"]
657        if q < p:
658            return None
659        self.consume()
660
661        # XXX this has to come before call to self.next()
662        pos = self.tokeniser.pos
663        messages = list(self.feeder.messages)
664
665        # So that e.g. 'x = 1;' doesn't wait for newline in the frontend
666        tag = self.next().tag
667        if tag == "END" and self.bracket_depth == 0:
668            expr2 = Symbol("Null")
669            return Node("CompoundExpression", expr1, expr2).flatten()
670
671        # XXX look for next expr otherwise backtrack
672        try:
673            expr2 = self.parse_exp(q + 1)
674        except TranslateError:
675            self.backtrack(pos)
676            self.feeder.messages = messages
677            expr2 = Symbol("Null")
678        return Node("CompoundExpression", expr1, expr2).flatten()
679
680    def e_Minus(self, expr1, token, p):
681        q = left_binary_ops["Subtract"]
682        if q < p:
683            return None
684        self.consume()
685        expr2 = self.parse_exp(q + 1)
686        if isinstance(expr2, Number) and not expr2.value.startswith("-"):
687            expr2.value = "-" + expr2.value
688        else:
689            expr2 = Node("Times", Number("1", sign=-1), expr2).flatten()
690        return Node("Plus", expr1, expr2).flatten()
691
692    def e_TagSet(self, expr1, token, p):
693        q = all_ops["Set"]
694        if q < p:
695            return None
696        self.consume()
697        expr2 = self.parse_exp(q + 1)
698        # examine next token
699        token = self.next_noend()
700        tag = token.tag
701        if tag == "Set":
702            head = "TagSet"
703        elif tag == "SetDelayed":
704            head = "TagSetDelayed"
705        elif tag == "Unset":
706            head = "TagUnset"
707        else:
708            self.tokeniser.sntx_message(token.pos)
709            raise InvalidSyntaxError()
710        self.consume()
711        if head == "TagUnset":
712            return Node(head, expr1, expr2)
713        expr3 = self.parse_exp(q + 1)
714        return Node(head, expr1, expr2, expr3)
715
716    def e_Unset(self, expr1, token, p):
717        q = all_ops["Set"]
718        if q < p:
719            return None
720        self.consume()
721        return Node("Unset", expr1)
722
723    def e_Derivative(self, expr1, token, p):
724        q = postfix_ops["Derivative"]
725        if q < p:
726            return None
727        n = 0
728        while self.next().tag == "Derivative":
729            self.consume()
730            n += 1
731        head = Node("Derivative", Number(str(n)))
732        return Node(head, expr1)
733
734    def e_Divide(self, expr1, token, p):
735        q = left_binary_ops["Divide"]
736        if q < p:
737            return None
738        self.consume()
739        expr2 = self.parse_exp(q + 1)
740        return Node(
741            "Times", expr1, Node("Power", expr2, Number("1", sign=-1))
742        ).flatten()
743
744    def e_Alternatives(self, expr1, token, p):
745        q = flat_binary_ops["Alternatives"]
746        if q < p:
747            return None
748        self.consume()
749        expr2 = self.parse_exp(q + 1)
750        return Node("Alternatives", expr1, expr2).flatten()
751
752    def e_MessageName(self, expr1, token, p):
753        leaves = [expr1]
754        while self.next().tag == "MessageName":
755            self.consume()
756            token = self.next()
757            if token.tag == "Symbol":
758                # silently convert Symbol to String
759                self.consume()
760                leaf = String(token.text)
761            elif token.tag == "String":
762                leaf = self.p_String(token)
763            else:
764                self.tokeniser.sntx_message(token.pos)
765                raise InvalidSyntaxError()
766            leaves.append(leaf)
767        return Node("MessageName", *leaves)
768
769    # B methods
770    #
771    # b_xxx methods are called from parse_box.
772    # They expect args (Node, Token precedence) and return Node or None.
773    # The first argument may be None if the LHS is absent.
774    # Used for boxes.
775
776    def b_SqrtBox(self, box0, token, p):
777        if box0 is not None:
778            return None
779        self.consume()
780        q = misc_ops["SqrtBox"]
781        box1 = self.parse_box(q)
782        if self.next().tag == "OtherscriptBox":
783            self.consume()
784            box2 = self.parse_box(q)
785            return Node("RadicalBox", box1, box2)
786        else:
787            return Node("SqrtBox", box1)
788
789    def b_SuperscriptBox(self, box1, token, p):
790        q = misc_ops["SuperscriptBox"]
791        if q < p:
792            return None
793        if box1 is None:
794            box1 = String("")
795        self.consume()
796        box2 = self.parse_box(q)
797        if self.next().tag == "OtherscriptBox":
798            self.consume()
799            box3 = self.parse_box(misc_ops["SubsuperscriptBox"])
800            return Node("SubsuperscriptBox", box1, box3, box2)
801        else:
802            return Node("SuperscriptBox", box1, box2)
803
804    def b_SubscriptBox(self, box1, token, p):
805        q = misc_ops["SubscriptBox"]
806        if q < p:
807            return None
808        if box1 is None:
809            box1 = String("")
810        self.consume()
811        box2 = self.parse_box(q)
812        if self.next().tag == "OtherscriptBox":
813            self.consume()
814            box3 = self.parse_box(misc_ops["SubsuperscriptBox"])
815            return Node("SubsuperscriptBox", box1, box2, box3)
816        else:
817            return Node("SubscriptBox", box1, box2)
818
819    def b_UnderscriptBox(self, box1, token, p):
820        q = misc_ops["UnderscriptBox"]
821        if q < p:
822            return None
823        if box1 is None:
824            box1 = String("")
825        self.consume()
826        box2 = self.parse_box(q)
827        if self.next().tag == "OtherscriptBox":
828            self.consume()
829            box3 = self.parse_box(misc_ops["UnderoverscriptBox"])
830            return Node("UnderoverscriptBox", box1, box2, box3)
831        else:
832            return Node("UnderscriptBox", box1, box2)
833
834    def b_FractionBox(self, box1, token, p):
835        q = misc_ops["FractionBox"]
836        if q < p:
837            return None
838        if box1 is None:
839            box1 = String("")
840        self.consume()
841        box2 = self.parse_box(q + 1)
842        return Node("FractionBox", box1, box2)
843
844    def b_FormBox(self, box1, token, p):
845        q = misc_ops["FormBox"]
846        if q < p:
847            return None
848        if box1 is None:
849            box1 = Symbol("StandardForm")  # RawForm
850        elif is_symbol_name(box1.value):
851            box1 = Symbol(box1.value, context=None)
852        else:
853            box1 = Node("Removed", String("$$Failure"))
854        self.consume()
855        box2 = self.parse_box(q)
856        return Node("FormBox", box2, box1)
857
858    def b_OverscriptBox(self, box1, token, p):
859        q = misc_ops["OverscriptBox"]
860        if q < p:
861            return None
862        if box1 is None:
863            box1 = String("")
864        self.consume()
865        box2 = self.parse_box(q)
866        if self.next().tag == "OtherscriptBox":
867            self.consume()
868            box3 = self.parse_box(misc_ops["UnderoverscriptBox"])
869            return Node("UnderoverscriptBox", box1, box3, box2)
870        else:
871            return Node("OverscriptBox", box1, box2)
872