1from typing import Callable, Dict, List, Optional, Union, TypeVar, cast
2from functools import partial
3
4from .ast import (
5    ArgumentNode,
6    BooleanValueNode,
7    DefinitionNode,
8    DirectiveDefinitionNode,
9    DirectiveNode,
10    DocumentNode,
11    EnumTypeDefinitionNode,
12    EnumTypeExtensionNode,
13    EnumValueDefinitionNode,
14    EnumValueNode,
15    FieldDefinitionNode,
16    FieldNode,
17    FloatValueNode,
18    FragmentDefinitionNode,
19    FragmentSpreadNode,
20    InlineFragmentNode,
21    InputObjectTypeDefinitionNode,
22    InputObjectTypeExtensionNode,
23    InputValueDefinitionNode,
24    IntValueNode,
25    InterfaceTypeDefinitionNode,
26    InterfaceTypeExtensionNode,
27    ListTypeNode,
28    ListValueNode,
29    Location,
30    NameNode,
31    NamedTypeNode,
32    NonNullTypeNode,
33    NullValueNode,
34    ObjectFieldNode,
35    ObjectTypeDefinitionNode,
36    ObjectTypeExtensionNode,
37    ObjectValueNode,
38    OperationDefinitionNode,
39    OperationType,
40    OperationTypeDefinitionNode,
41    ScalarTypeDefinitionNode,
42    ScalarTypeExtensionNode,
43    SchemaDefinitionNode,
44    SchemaExtensionNode,
45    SelectionNode,
46    SelectionSetNode,
47    StringValueNode,
48    TypeNode,
49    TypeSystemDefinitionNode,
50    TypeSystemExtensionNode,
51    UnionTypeDefinitionNode,
52    UnionTypeExtensionNode,
53    ValueNode,
54    VariableDefinitionNode,
55    VariableNode,
56)
57from .directive_locations import DirectiveLocation
58from .ast import Token
59from .lexer import Lexer, is_punctuator_token_kind
60from .source import Source, is_source
61from .token_kind import TokenKind
62from ..error import GraphQLError, GraphQLSyntaxError
63
64__all__ = ["parse", "parse_type", "parse_value"]
65
66T = TypeVar("T")
67
68SourceType = Union[Source, str]
69
70
71def parse(
72    source: SourceType,
73    no_location: bool = False,
74    experimental_fragment_variables: bool = False,
75) -> DocumentNode:
76    """Given a GraphQL source, parse it into a Document.
77
78    Throws GraphQLError if a syntax error is encountered.
79
80    By default, the parser creates AST nodes that know the location in the source that
81    they correspond to. The ``no_location`` option disables that behavior for
82    performance or testing.
83
84    Experimental features:
85
86    If ``experimental_fragment_variables`` is set to ``True``, the parser will
87    understand and parse variable definitions contained in a fragment definition.
88    They'll be represented in the
89    :attr:`~graphql.language.FragmentDefinitionNode.variable_definitions` field
90    of the :class:`~graphql.language.FragmentDefinitionNode`.
91
92    The syntax is identical to normal, query-defined variables. For example::
93
94        fragment A($var: Boolean = false) on T  {
95          ...
96        }
97    """
98    parser = Parser(
99        source,
100        no_location=no_location,
101        experimental_fragment_variables=experimental_fragment_variables,
102    )
103    return parser.parse_document()
104
105
106def parse_value(
107    source: SourceType,
108    no_location: bool = False,
109    experimental_fragment_variables: bool = False,
110) -> ValueNode:
111    """Parse the AST for a given string containing a GraphQL value.
112
113    Throws GraphQLError if a syntax error is encountered.
114
115    This is useful within tools that operate upon GraphQL Values directly and in
116    isolation of complete GraphQL documents.
117
118    Consider providing the results to the utility function:
119    :func:`~graphql.utilities.value_from_ast`.
120    """
121    parser = Parser(
122        source,
123        no_location=no_location,
124        experimental_fragment_variables=experimental_fragment_variables,
125    )
126    parser.expect_token(TokenKind.SOF)
127    value = parser.parse_value_literal(False)
128    parser.expect_token(TokenKind.EOF)
129    return value
130
131
132def parse_type(
133    source: SourceType,
134    no_location: bool = False,
135    experimental_fragment_variables: bool = False,
136) -> TypeNode:
137    """Parse the AST for a given string containing a GraphQL Type.
138
139    Throws GraphQLError if a syntax error is encountered.
140
141    This is useful within tools that operate upon GraphQL Types directly and
142    in isolation of complete GraphQL documents.
143
144    Consider providing the results to the utility function:
145    :func:`~graphql.utilities.value_from_ast`.
146    """
147    parser = Parser(
148        source,
149        no_location=no_location,
150        experimental_fragment_variables=experimental_fragment_variables,
151    )
152    parser.expect_token(TokenKind.SOF)
153    type_ = parser.parse_type_reference()
154    parser.expect_token(TokenKind.EOF)
155    return type_
156
157
158class Parser:
159    """GraphQL AST parser.
160
161    This class is exported only to assist people in implementing their own parsers
162    without duplicating too much code and should be used only as last resort for cases
163    such as experimental syntax or if certain features couldn't be contributed upstream.
164
165    It's still part of the internal API and is versioned, so any changes to it are never
166    considered breaking changes. If you still need to support multiple versions of the
167    library, please use the `__version_info__` variable for version detection.
168    """
169
170    _lexer: Lexer
171    _no_Location: bool
172    _experimental_fragment_variables: bool
173
174    def __init__(
175        self,
176        source: SourceType,
177        no_location: bool = False,
178        experimental_fragment_variables: bool = False,
179    ):
180        source = (
181            cast(Source, source) if is_source(source) else Source(cast(str, source))
182        )
183
184        self._lexer = Lexer(source)
185        self._no_location = no_location
186        self._experimental_fragment_variables = experimental_fragment_variables
187
188    def parse_name(self) -> NameNode:
189        """Convert a name lex token into a name parse node."""
190        token = self.expect_token(TokenKind.NAME)
191        return NameNode(value=token.value, loc=self.loc(token))
192
193    # Implement the parsing rules in the Document section.
194
195    def parse_document(self) -> DocumentNode:
196        """Document: Definition+"""
197        start = self._lexer.token
198        return DocumentNode(
199            definitions=self.many(TokenKind.SOF, self.parse_definition, TokenKind.EOF),
200            loc=self.loc(start),
201        )
202
203    _parse_definition_method_names: Dict[str, str] = {
204        **dict.fromkeys(("query", "mutation", "subscription"), "operation_definition"),
205        "fragment": "fragment_definition",
206        **dict.fromkeys(
207            (
208                "schema",
209                "scalar",
210                "type",
211                "interface",
212                "union",
213                "enum",
214                "input",
215                "directive",
216            ),
217            "type_system_definition",
218        ),
219        "extend": "type_system_extension",
220    }
221
222    def parse_definition(self) -> DefinitionNode:
223        """Definition: ExecutableDefinition or TypeSystemDefinition/Extension
224
225        ExecutableDefinition: OperationDefinition or FragmentDefinition
226        """
227        if self.peek(TokenKind.NAME):
228            method_name = self._parse_definition_method_names.get(
229                cast(str, self._lexer.token.value)
230            )
231            if method_name:
232                return getattr(self, f"parse_{method_name}")()
233        elif self.peek(TokenKind.BRACE_L):
234            return self.parse_operation_definition()
235        elif self.peek_description():
236            return self.parse_type_system_definition()
237        raise self.unexpected()
238
239    _parse_executable_definition_method_names: Dict[str, str] = {
240        **dict.fromkeys(("query", "mutation", "subscription"), "operation_definition"),
241        **dict.fromkeys(("fragment",), "fragment_definition"),
242    }
243
244    # Implement the parsing rules in the Operations section.
245
246    def parse_operation_definition(self) -> OperationDefinitionNode:
247        """OperationDefinition"""
248        start = self._lexer.token
249        if self.peek(TokenKind.BRACE_L):
250            return OperationDefinitionNode(
251                operation=OperationType.QUERY,
252                name=None,
253                variable_definitions=[],
254                directives=[],
255                selection_set=self.parse_selection_set(),
256                loc=self.loc(start),
257            )
258        operation = self.parse_operation_type()
259        name = self.parse_name() if self.peek(TokenKind.NAME) else None
260        return OperationDefinitionNode(
261            operation=operation,
262            name=name,
263            variable_definitions=self.parse_variable_definitions(),
264            directives=self.parse_directives(False),
265            selection_set=self.parse_selection_set(),
266            loc=self.loc(start),
267        )
268
269    def parse_operation_type(self) -> OperationType:
270        """OperationType: one of query mutation subscription"""
271        operation_token = self.expect_token(TokenKind.NAME)
272        try:
273            return OperationType(operation_token.value)
274        except ValueError:
275            raise self.unexpected(operation_token)
276
277    def parse_variable_definitions(self) -> List[VariableDefinitionNode]:
278        """VariableDefinitions: (VariableDefinition+)"""
279        return self.optional_many(
280            TokenKind.PAREN_L, self.parse_variable_definition, TokenKind.PAREN_R
281        )
282
283    def parse_variable_definition(self) -> VariableDefinitionNode:
284        """VariableDefinition: Variable: Type DefaultValue? Directives[Const]?"""
285        start = self._lexer.token
286        return VariableDefinitionNode(
287            variable=self.parse_variable(),
288            type=self.expect_token(TokenKind.COLON) and self.parse_type_reference(),
289            default_value=self.parse_value_literal(True)
290            if self.expect_optional_token(TokenKind.EQUALS)
291            else None,
292            directives=self.parse_directives(True),
293            loc=self.loc(start),
294        )
295
296    def parse_variable(self) -> VariableNode:
297        """Variable: $Name"""
298        start = self._lexer.token
299        self.expect_token(TokenKind.DOLLAR)
300        return VariableNode(name=self.parse_name(), loc=self.loc(start))
301
302    def parse_selection_set(self) -> SelectionSetNode:
303        """SelectionSet: {Selection+}"""
304        start = self._lexer.token
305        return SelectionSetNode(
306            selections=self.many(
307                TokenKind.BRACE_L, self.parse_selection, TokenKind.BRACE_R
308            ),
309            loc=self.loc(start),
310        )
311
312    def parse_selection(self) -> SelectionNode:
313        """Selection: Field or FragmentSpread or InlineFragment"""
314        return (
315            self.parse_fragment if self.peek(TokenKind.SPREAD) else self.parse_field
316        )()
317
318    def parse_field(self) -> FieldNode:
319        """Field: Alias? Name Arguments? Directives? SelectionSet?"""
320        start = self._lexer.token
321        name_or_alias = self.parse_name()
322        if self.expect_optional_token(TokenKind.COLON):
323            alias: Optional[NameNode] = name_or_alias
324            name = self.parse_name()
325        else:
326            alias = None
327            name = name_or_alias
328        return FieldNode(
329            alias=alias,
330            name=name,
331            arguments=self.parse_arguments(False),
332            directives=self.parse_directives(False),
333            selection_set=self.parse_selection_set()
334            if self.peek(TokenKind.BRACE_L)
335            else None,
336            loc=self.loc(start),
337        )
338
339    def parse_arguments(self, is_const: bool) -> List[ArgumentNode]:
340        """Arguments[Const]: (Argument[?Const]+)"""
341        item = self.parse_const_argument if is_const else self.parse_argument
342        return self.optional_many(TokenKind.PAREN_L, item, TokenKind.PAREN_R)
343
344    def parse_argument(self) -> ArgumentNode:
345        """Argument: Name : Value"""
346        start = self._lexer.token
347        name = self.parse_name()
348
349        self.expect_token(TokenKind.COLON)
350        return ArgumentNode(
351            name=name, value=self.parse_value_literal(False), loc=self.loc(start)
352        )
353
354    def parse_const_argument(self) -> ArgumentNode:
355        """Argument[Const]: Name : Value[?Const]"""
356        start = self._lexer.token
357        return ArgumentNode(
358            name=self.parse_name(),
359            value=self.expect_token(TokenKind.COLON) and self.parse_value_literal(True),
360            loc=self.loc(start),
361        )
362
363    # Implement the parsing rules in the Fragments section.
364
365    def parse_fragment(self) -> Union[FragmentSpreadNode, InlineFragmentNode]:
366        """Corresponds to both FragmentSpread and InlineFragment in the spec.
367
368        FragmentSpread: ... FragmentName Directives?
369        InlineFragment: ... TypeCondition? Directives? SelectionSet
370        """
371        start = self._lexer.token
372        self.expect_token(TokenKind.SPREAD)
373
374        has_type_condition = self.expect_optional_keyword("on")
375        if not has_type_condition and self.peek(TokenKind.NAME):
376            return FragmentSpreadNode(
377                name=self.parse_fragment_name(),
378                directives=self.parse_directives(False),
379                loc=self.loc(start),
380            )
381        return InlineFragmentNode(
382            type_condition=self.parse_named_type() if has_type_condition else None,
383            directives=self.parse_directives(False),
384            selection_set=self.parse_selection_set(),
385            loc=self.loc(start),
386        )
387
388    def parse_fragment_definition(self) -> FragmentDefinitionNode:
389        """FragmentDefinition"""
390        start = self._lexer.token
391        self.expect_keyword("fragment")
392        # Experimental support for defining variables within fragments changes
393        # the grammar of FragmentDefinition
394        if self._experimental_fragment_variables:
395            return FragmentDefinitionNode(
396                name=self.parse_fragment_name(),
397                variable_definitions=self.parse_variable_definitions(),
398                type_condition=self.parse_type_condition(),
399                directives=self.parse_directives(False),
400                selection_set=self.parse_selection_set(),
401                loc=self.loc(start),
402            )
403        return FragmentDefinitionNode(
404            name=self.parse_fragment_name(),
405            type_condition=self.parse_type_condition(),
406            directives=self.parse_directives(False),
407            selection_set=self.parse_selection_set(),
408            loc=self.loc(start),
409        )
410
411    def parse_fragment_name(self) -> NameNode:
412        """FragmentName: Name but not ``on``"""
413        if self._lexer.token.value == "on":
414            raise self.unexpected()
415        return self.parse_name()
416
417    def parse_type_condition(self) -> NamedTypeNode:
418        """TypeCondition: NamedType"""
419        self.expect_keyword("on")
420        return self.parse_named_type()
421
422    # Implement the parsing rules in the Values section.
423
424    _parse_value_literal_method_names: Dict[TokenKind, str] = {
425        TokenKind.BRACKET_L: "list",
426        TokenKind.BRACE_L: "object",
427        TokenKind.INT: "int",
428        TokenKind.FLOAT: "float",
429        TokenKind.STRING: "string_literal",
430        TokenKind.BLOCK_STRING: "string_literal",
431        TokenKind.NAME: "named_values",
432        TokenKind.DOLLAR: "variable_value",
433    }
434
435    def parse_value_literal(self, is_const: bool) -> ValueNode:
436        method_name = self._parse_value_literal_method_names.get(self._lexer.token.kind)
437        if method_name:  # pragma: no cover
438            return getattr(self, f"parse_{method_name}")(is_const)
439        raise self.unexpected()  # pragma: no cover
440
441    def parse_string_literal(self, _is_const: bool = False) -> StringValueNode:
442        token = self._lexer.token
443        self._lexer.advance()
444        return StringValueNode(
445            value=token.value,
446            block=token.kind == TokenKind.BLOCK_STRING,
447            loc=self.loc(token),
448        )
449
450    def parse_list(self, is_const: bool) -> ListValueNode:
451        """ListValue[Const]"""
452        start = self._lexer.token
453        item = partial(self.parse_value_literal, is_const)
454        # noinspection PyTypeChecker
455        return ListValueNode(
456            values=self.any(TokenKind.BRACKET_L, item, TokenKind.BRACKET_R),
457            loc=self.loc(start),
458        )
459
460    def parse_object_field(self, is_const: bool) -> ObjectFieldNode:
461        start = self._lexer.token
462        name = self.parse_name()
463        self.expect_token(TokenKind.COLON)
464
465        return ObjectFieldNode(
466            name=name, value=self.parse_value_literal(is_const), loc=self.loc(start)
467        )
468
469    def parse_object(self, is_const: bool) -> ObjectValueNode:
470        """ObjectValue[Const]"""
471        start = self._lexer.token
472        item = partial(self.parse_object_field, is_const)
473        return ObjectValueNode(
474            fields=self.any(TokenKind.BRACE_L, item, TokenKind.BRACE_R),
475            loc=self.loc(start),
476        )
477
478    def parse_int(self, _is_const: bool = False) -> IntValueNode:
479        token = self._lexer.token
480        self._lexer.advance()
481        return IntValueNode(value=token.value, loc=self.loc(token))
482
483    def parse_float(self, _is_const: bool = False) -> FloatValueNode:
484        token = self._lexer.token
485        self._lexer.advance()
486        return FloatValueNode(value=token.value, loc=self.loc(token))
487
488    def parse_named_values(self, _is_const: bool = False) -> ValueNode:
489        token = self._lexer.token
490        value = token.value
491        self._lexer.advance()
492        if value == "true":
493            return BooleanValueNode(value=True, loc=self.loc(token))
494        if value == "false":
495            return BooleanValueNode(value=False, loc=self.loc(token))
496        if value == "null":
497            return NullValueNode(loc=self.loc(token))
498        return EnumValueNode(value=value, loc=self.loc(token))
499
500    def parse_variable_value(self, is_const: bool) -> VariableNode:
501        if not is_const:
502            return self.parse_variable()
503        raise self.unexpected()
504
505    # Implement the parsing rules in the Directives section.
506
507    def parse_directives(self, is_const: bool) -> List[DirectiveNode]:
508        """Directives[Const]: Directive[?Const]+"""
509        directives: List[DirectiveNode] = []
510        append = directives.append
511        while self.peek(TokenKind.AT):
512            append(self.parse_directive(is_const))
513        return directives
514
515    def parse_directive(self, is_const: bool) -> DirectiveNode:
516        """Directive[Const]: @ Name Arguments[?Const]?"""
517        start = self._lexer.token
518        self.expect_token(TokenKind.AT)
519        return DirectiveNode(
520            name=self.parse_name(),
521            arguments=self.parse_arguments(is_const),
522            loc=self.loc(start),
523        )
524
525    # Implement the parsing rules in the Types section.
526
527    def parse_type_reference(self) -> TypeNode:
528        """Type: NamedType or ListType or NonNullType"""
529        start = self._lexer.token
530        if self.expect_optional_token(TokenKind.BRACKET_L):
531            type_ = self.parse_type_reference()
532            self.expect_token(TokenKind.BRACKET_R)
533            type_ = ListTypeNode(type=type_, loc=self.loc(start))
534        else:
535            type_ = self.parse_named_type()
536        if self.expect_optional_token(TokenKind.BANG):
537            return NonNullTypeNode(type=type_, loc=self.loc(start))
538        return type_
539
540    def parse_named_type(self) -> NamedTypeNode:
541        """NamedType: Name"""
542        start = self._lexer.token
543        return NamedTypeNode(name=self.parse_name(), loc=self.loc(start))
544
545    # Implement the parsing rules in the Type Definition section.
546
547    _parse_type_system_definition_method_names: Dict[str, str] = {
548        "schema": "schema_definition",
549        "scalar": "scalar_type_definition",
550        "type": "object_type_definition",
551        "interface": "interface_type_definition",
552        "union": "union_type_definition",
553        "enum": "enum_type_definition",
554        "input": "input_object_type_definition",
555        "directive": "directive_definition",
556    }
557
558    def parse_type_system_definition(self) -> TypeSystemDefinitionNode:
559        """TypeSystemDefinition"""
560        # Many definitions begin with a description and require a lookahead.
561        keyword_token = (
562            self._lexer.lookahead() if self.peek_description() else self._lexer.token
563        )
564        method_name = self._parse_type_system_definition_method_names.get(
565            cast(str, keyword_token.value)
566        )
567        if method_name:
568            return getattr(self, f"parse_{method_name}")()
569        raise self.unexpected(keyword_token)
570
571    _parse_type_extension_method_names: Dict[str, str] = {
572        "schema": "schema_extension",
573        "scalar": "scalar_type_extension",
574        "type": "object_type_extension",
575        "interface": "interface_type_extension",
576        "union": "union_type_extension",
577        "enum": "enum_type_extension",
578        "input": "input_object_type_extension",
579    }
580
581    def parse_type_system_extension(self) -> TypeSystemExtensionNode:
582        """TypeSystemExtension"""
583        keyword_token = self._lexer.lookahead()
584        if keyword_token.kind == TokenKind.NAME:
585            method_name = self._parse_type_extension_method_names.get(
586                cast(str, keyword_token.value)
587            )
588            if method_name:  # pragma: no cover
589                return getattr(self, f"parse_{method_name}")()
590        raise self.unexpected(keyword_token)
591
592    def peek_description(self) -> bool:
593        return self.peek(TokenKind.STRING) or self.peek(TokenKind.BLOCK_STRING)
594
595    def parse_description(self) -> Optional[StringValueNode]:
596        """Description: StringValue"""
597        if self.peek_description():
598            return self.parse_string_literal()
599        return None
600
601    def parse_schema_definition(self) -> SchemaDefinitionNode:
602        """SchemaDefinition"""
603        start = self._lexer.token
604        description = self.parse_description()
605        self.expect_keyword("schema")
606        directives = self.parse_directives(True)
607        operation_types = self.many(
608            TokenKind.BRACE_L, self.parse_operation_type_definition, TokenKind.BRACE_R
609        )
610        return SchemaDefinitionNode(
611            description=description,
612            directives=directives,
613            operation_types=operation_types,
614            loc=self.loc(start),
615        )
616
617    def parse_operation_type_definition(self) -> OperationTypeDefinitionNode:
618        """OperationTypeDefinition: OperationType : NamedType"""
619        start = self._lexer.token
620        operation = self.parse_operation_type()
621        self.expect_token(TokenKind.COLON)
622        type_ = self.parse_named_type()
623        return OperationTypeDefinitionNode(
624            operation=operation, type=type_, loc=self.loc(start)
625        )
626
627    def parse_scalar_type_definition(self) -> ScalarTypeDefinitionNode:
628        """ScalarTypeDefinition: Description? scalar Name Directives[Const]?"""
629        start = self._lexer.token
630        description = self.parse_description()
631        self.expect_keyword("scalar")
632        name = self.parse_name()
633        directives = self.parse_directives(True)
634        return ScalarTypeDefinitionNode(
635            description=description,
636            name=name,
637            directives=directives,
638            loc=self.loc(start),
639        )
640
641    def parse_object_type_definition(self) -> ObjectTypeDefinitionNode:
642        """ObjectTypeDefinition"""
643        start = self._lexer.token
644        description = self.parse_description()
645        self.expect_keyword("type")
646        name = self.parse_name()
647        interfaces = self.parse_implements_interfaces()
648        directives = self.parse_directives(True)
649        fields = self.parse_fields_definition()
650        return ObjectTypeDefinitionNode(
651            description=description,
652            name=name,
653            interfaces=interfaces,
654            directives=directives,
655            fields=fields,
656            loc=self.loc(start),
657        )
658
659    def parse_implements_interfaces(self) -> List[NamedTypeNode]:
660        """ImplementsInterfaces"""
661        if not self.expect_optional_keyword("implements"):
662            return []
663
664        return self.delimited_many(TokenKind.AMP, self.parse_named_type)
665
666    def parse_fields_definition(self) -> List[FieldDefinitionNode]:
667        """FieldsDefinition: {FieldDefinition+}"""
668        return self.optional_many(
669            TokenKind.BRACE_L, self.parse_field_definition, TokenKind.BRACE_R
670        )
671
672    def parse_field_definition(self) -> FieldDefinitionNode:
673        """FieldDefinition"""
674        start = self._lexer.token
675        description = self.parse_description()
676        name = self.parse_name()
677        args = self.parse_argument_defs()
678        self.expect_token(TokenKind.COLON)
679        type_ = self.parse_type_reference()
680        directives = self.parse_directives(True)
681        return FieldDefinitionNode(
682            description=description,
683            name=name,
684            arguments=args,
685            type=type_,
686            directives=directives,
687            loc=self.loc(start),
688        )
689
690    def parse_argument_defs(self) -> List[InputValueDefinitionNode]:
691        """ArgumentsDefinition: (InputValueDefinition+)"""
692        return self.optional_many(
693            TokenKind.PAREN_L, self.parse_input_value_def, TokenKind.PAREN_R
694        )
695
696    def parse_input_value_def(self) -> InputValueDefinitionNode:
697        """InputValueDefinition"""
698        start = self._lexer.token
699        description = self.parse_description()
700        name = self.parse_name()
701        self.expect_token(TokenKind.COLON)
702        type_ = self.parse_type_reference()
703        default_value = (
704            self.parse_value_literal(True)
705            if self.expect_optional_token(TokenKind.EQUALS)
706            else None
707        )
708        directives = self.parse_directives(True)
709        return InputValueDefinitionNode(
710            description=description,
711            name=name,
712            type=type_,
713            default_value=default_value,
714            directives=directives,
715            loc=self.loc(start),
716        )
717
718    def parse_interface_type_definition(self) -> InterfaceTypeDefinitionNode:
719        """InterfaceTypeDefinition"""
720        start = self._lexer.token
721        description = self.parse_description()
722        self.expect_keyword("interface")
723        name = self.parse_name()
724        interfaces = self.parse_implements_interfaces()
725        directives = self.parse_directives(True)
726        fields = self.parse_fields_definition()
727        return InterfaceTypeDefinitionNode(
728            description=description,
729            name=name,
730            interfaces=interfaces,
731            directives=directives,
732            fields=fields,
733            loc=self.loc(start),
734        )
735
736    def parse_union_type_definition(self) -> UnionTypeDefinitionNode:
737        """UnionTypeDefinition"""
738        start = self._lexer.token
739        description = self.parse_description()
740        self.expect_keyword("union")
741        name = self.parse_name()
742        directives = self.parse_directives(True)
743        types = self.parse_union_member_types()
744        return UnionTypeDefinitionNode(
745            description=description,
746            name=name,
747            directives=directives,
748            types=types,
749            loc=self.loc(start),
750        )
751
752    def parse_union_member_types(self) -> List[NamedTypeNode]:
753        """UnionMemberTypes"""
754        return (
755            self.delimited_many(TokenKind.PIPE, self.parse_named_type)
756            if self.expect_optional_token(TokenKind.EQUALS)
757            else []
758        )
759
760    def parse_enum_type_definition(self) -> EnumTypeDefinitionNode:
761        """UnionTypeDefinition"""
762        start = self._lexer.token
763        description = self.parse_description()
764        self.expect_keyword("enum")
765        name = self.parse_name()
766        directives = self.parse_directives(True)
767        values = self.parse_enum_values_definition()
768        return EnumTypeDefinitionNode(
769            description=description,
770            name=name,
771            directives=directives,
772            values=values,
773            loc=self.loc(start),
774        )
775
776    def parse_enum_values_definition(self) -> List[EnumValueDefinitionNode]:
777        """EnumValuesDefinition: {EnumValueDefinition+}"""
778        return self.optional_many(
779            TokenKind.BRACE_L, self.parse_enum_value_definition, TokenKind.BRACE_R
780        )
781
782    def parse_enum_value_definition(self) -> EnumValueDefinitionNode:
783        """EnumValueDefinition: Description? EnumValue Directives[Const]?"""
784        start = self._lexer.token
785        description = self.parse_description()
786        name = self.parse_name()
787        directives = self.parse_directives(True)
788        return EnumValueDefinitionNode(
789            description=description,
790            name=name,
791            directives=directives,
792            loc=self.loc(start),
793        )
794
795    def parse_input_object_type_definition(self) -> InputObjectTypeDefinitionNode:
796        """InputObjectTypeDefinition"""
797        start = self._lexer.token
798        description = self.parse_description()
799        self.expect_keyword("input")
800        name = self.parse_name()
801        directives = self.parse_directives(True)
802        fields = self.parse_input_fields_definition()
803        return InputObjectTypeDefinitionNode(
804            description=description,
805            name=name,
806            directives=directives,
807            fields=fields,
808            loc=self.loc(start),
809        )
810
811    def parse_input_fields_definition(self) -> List[InputValueDefinitionNode]:
812        """InputFieldsDefinition: {InputValueDefinition+}"""
813        return self.optional_many(
814            TokenKind.BRACE_L, self.parse_input_value_def, TokenKind.BRACE_R
815        )
816
817    def parse_schema_extension(self) -> SchemaExtensionNode:
818        """SchemaExtension"""
819        start = self._lexer.token
820        self.expect_keyword("extend")
821        self.expect_keyword("schema")
822        directives = self.parse_directives(True)
823        operation_types = self.optional_many(
824            TokenKind.BRACE_L, self.parse_operation_type_definition, TokenKind.BRACE_R
825        )
826        if not directives and not operation_types:
827            raise self.unexpected()
828        return SchemaExtensionNode(
829            directives=directives, operation_types=operation_types, loc=self.loc(start)
830        )
831
832    def parse_scalar_type_extension(self) -> ScalarTypeExtensionNode:
833        """ScalarTypeExtension"""
834        start = self._lexer.token
835        self.expect_keyword("extend")
836        self.expect_keyword("scalar")
837        name = self.parse_name()
838        directives = self.parse_directives(True)
839        if not directives:
840            raise self.unexpected()
841        return ScalarTypeExtensionNode(
842            name=name, directives=directives, loc=self.loc(start)
843        )
844
845    def parse_object_type_extension(self) -> ObjectTypeExtensionNode:
846        """ObjectTypeExtension"""
847        start = self._lexer.token
848        self.expect_keyword("extend")
849        self.expect_keyword("type")
850        name = self.parse_name()
851        interfaces = self.parse_implements_interfaces()
852        directives = self.parse_directives(True)
853        fields = self.parse_fields_definition()
854        if not (interfaces or directives or fields):
855            raise self.unexpected()
856        return ObjectTypeExtensionNode(
857            name=name,
858            interfaces=interfaces,
859            directives=directives,
860            fields=fields,
861            loc=self.loc(start),
862        )
863
864    def parse_interface_type_extension(self) -> InterfaceTypeExtensionNode:
865        """InterfaceTypeExtension"""
866        start = self._lexer.token
867        self.expect_keyword("extend")
868        self.expect_keyword("interface")
869        name = self.parse_name()
870        interfaces = self.parse_implements_interfaces()
871        directives = self.parse_directives(True)
872        fields = self.parse_fields_definition()
873        if not (interfaces or directives or fields):
874            raise self.unexpected()
875        return InterfaceTypeExtensionNode(
876            name=name,
877            interfaces=interfaces,
878            directives=directives,
879            fields=fields,
880            loc=self.loc(start),
881        )
882
883    def parse_union_type_extension(self) -> UnionTypeExtensionNode:
884        """UnionTypeExtension"""
885        start = self._lexer.token
886        self.expect_keyword("extend")
887        self.expect_keyword("union")
888        name = self.parse_name()
889        directives = self.parse_directives(True)
890        types = self.parse_union_member_types()
891        if not (directives or types):
892            raise self.unexpected()
893        return UnionTypeExtensionNode(
894            name=name, directives=directives, types=types, loc=self.loc(start)
895        )
896
897    def parse_enum_type_extension(self) -> EnumTypeExtensionNode:
898        """EnumTypeExtension"""
899        start = self._lexer.token
900        self.expect_keyword("extend")
901        self.expect_keyword("enum")
902        name = self.parse_name()
903        directives = self.parse_directives(True)
904        values = self.parse_enum_values_definition()
905        if not (directives or values):
906            raise self.unexpected()
907        return EnumTypeExtensionNode(
908            name=name, directives=directives, values=values, loc=self.loc(start)
909        )
910
911    def parse_input_object_type_extension(self) -> InputObjectTypeExtensionNode:
912        """InputObjectTypeExtension"""
913        start = self._lexer.token
914        self.expect_keyword("extend")
915        self.expect_keyword("input")
916        name = self.parse_name()
917        directives = self.parse_directives(True)
918        fields = self.parse_input_fields_definition()
919        if not (directives or fields):
920            raise self.unexpected()
921        return InputObjectTypeExtensionNode(
922            name=name, directives=directives, fields=fields, loc=self.loc(start)
923        )
924
925    def parse_directive_definition(self) -> DirectiveDefinitionNode:
926        """DirectiveDefinition"""
927        start = self._lexer.token
928        description = self.parse_description()
929        self.expect_keyword("directive")
930        self.expect_token(TokenKind.AT)
931        name = self.parse_name()
932        args = self.parse_argument_defs()
933        repeatable = self.expect_optional_keyword("repeatable")
934        self.expect_keyword("on")
935        locations = self.parse_directive_locations()
936        return DirectiveDefinitionNode(
937            description=description,
938            name=name,
939            arguments=args,
940            repeatable=repeatable,
941            locations=locations,
942            loc=self.loc(start),
943        )
944
945    def parse_directive_locations(self) -> List[NameNode]:
946        """DirectiveLocations"""
947        return self.delimited_many(TokenKind.PIPE, self.parse_directive_location)
948
949    def parse_directive_location(self) -> NameNode:
950        """DirectiveLocation"""
951        start = self._lexer.token
952        name = self.parse_name()
953        if name.value in DirectiveLocation.__members__:
954            return name
955        raise self.unexpected(start)
956
957    # Core parsing utility functions
958
959    def loc(self, start_token: Token) -> Optional[Location]:
960        """Return a location object.
961
962        Used to identify the place in the source that created a given parsed object.
963        """
964        if not self._no_location:
965            end_token = self._lexer.last_token
966            source = self._lexer.source
967            return Location(start_token, end_token, source)
968        return None
969
970    def peek(self, kind: TokenKind) -> bool:
971        """Determine if the next token is of a given kind"""
972        return self._lexer.token.kind == kind
973
974    def expect_token(self, kind: TokenKind) -> Token:
975        """Expect the next token to be of the given kind.
976
977        If the next token is of the given kind, return that token after advancing the
978        lexer. Otherwise, do not change the parser state and throw an error.
979        """
980        token = self._lexer.token
981        if token.kind == kind:
982            self._lexer.advance()
983            return token
984
985        raise GraphQLSyntaxError(
986            self._lexer.source,
987            token.start,
988            f"Expected {get_token_kind_desc(kind)}, found {get_token_desc(token)}.",
989        )
990
991    def expect_optional_token(self, kind: TokenKind) -> Optional[Token]:
992        """Expect the next token optionally to be of the given kind.
993
994        If the next token is of the given kind, return that token after advancing the
995        lexer. Otherwise, do not change the parser state and return None.
996        """
997        token = self._lexer.token
998        if token.kind == kind:
999            self._lexer.advance()
1000            return token
1001
1002        return None
1003
1004    def expect_keyword(self, value: str) -> None:
1005        """Expect the next token to be a given keyword.
1006
1007        If the next token is a given keyword, advance the lexer.
1008        Otherwise, do not change the parser state and throw an error.
1009        """
1010        token = self._lexer.token
1011        if token.kind == TokenKind.NAME and token.value == value:
1012            self._lexer.advance()
1013        else:
1014            raise GraphQLSyntaxError(
1015                self._lexer.source,
1016                token.start,
1017                f"Expected '{value}', found {get_token_desc(token)}.",
1018            )
1019
1020    def expect_optional_keyword(self, value: str) -> bool:
1021        """Expect the next token optionally to be a given keyword.
1022
1023        If the next token is a given keyword, return True after advancing the lexer.
1024        Otherwise, do not change the parser state and return False.
1025        """
1026        token = self._lexer.token
1027        if token.kind == TokenKind.NAME and token.value == value:
1028            self._lexer.advance()
1029            return True
1030
1031        return False
1032
1033    def unexpected(self, at_token: Optional[Token] = None) -> GraphQLError:
1034        """Create an error when an unexpected lexed token is encountered."""
1035        token = at_token or self._lexer.token
1036        return GraphQLSyntaxError(
1037            self._lexer.source, token.start, f"Unexpected {get_token_desc(token)}."
1038        )
1039
1040    def any(
1041        self, open_kind: TokenKind, parse_fn: Callable[[], T], close_kind: TokenKind
1042    ) -> List[T]:
1043        """Fetch any matching nodes, possibly none.
1044
1045        Returns a possibly empty list of parse nodes, determined by the ``parse_fn``.
1046        This list begins with a lex token of ``open_kind`` and ends with a lex token of
1047        ``close_kind``. Advances the parser to the next lex token after the closing
1048        token.
1049        """
1050        self.expect_token(open_kind)
1051        nodes: List[T] = []
1052        append = nodes.append
1053        expect_optional_token = partial(self.expect_optional_token, close_kind)
1054        while not expect_optional_token():
1055            append(parse_fn())
1056        return nodes
1057
1058    def optional_many(
1059        self, open_kind: TokenKind, parse_fn: Callable[[], T], close_kind: TokenKind
1060    ) -> List[T]:
1061        """Fetch matching nodes, maybe none.
1062
1063        Returns a list of parse nodes, determined by the ``parse_fn``. It can be empty
1064        only if the open token is missing, otherwise it will always return a non-empty
1065        list that begins with a lex token of ``open_kind`` and ends with a lex token of
1066        ``close_kind``. Advances the parser to the next lex token after the closing
1067        token.
1068        """
1069        if self.expect_optional_token(open_kind):
1070            nodes = [parse_fn()]
1071            append = nodes.append
1072            expect_optional_token = partial(self.expect_optional_token, close_kind)
1073            while not expect_optional_token():
1074                append(parse_fn())
1075            return nodes
1076        return []
1077
1078    def many(
1079        self, open_kind: TokenKind, parse_fn: Callable[[], T], close_kind: TokenKind
1080    ) -> List[T]:
1081        """Fetch matching nodes, at least one.
1082
1083        Returns a non-empty list of parse nodes, determined by the ``parse_fn``. This
1084        list begins with a lex token of ``open_kind`` and ends with a lex token of
1085        ``close_kind``. Advances the parser to the next lex token after the closing
1086        token.
1087        """
1088        self.expect_token(open_kind)
1089        nodes = [parse_fn()]
1090        append = nodes.append
1091        expect_optional_token = partial(self.expect_optional_token, close_kind)
1092        while not expect_optional_token():
1093            append(parse_fn())
1094        return nodes
1095
1096    def delimited_many(
1097        self, delimiter_kind: TokenKind, parse_fn: Callable[[], T]
1098    ) -> List[T]:
1099        """Fetch many delimited nodes.
1100
1101        Returns a non-empty list of parse nodes, determined by the ``parse_fn``. This
1102        list may begin with a lex token of ``delimiter_kind`` followed by items
1103        separated by lex tokens of ``delimiter_kind``. Advances the parser to the next
1104        lex token after the last item in the list.
1105        """
1106        expect_optional_token = partial(self.expect_optional_token, delimiter_kind)
1107        expect_optional_token()
1108        nodes: List[T] = []
1109        append = nodes.append
1110        while True:
1111            append(parse_fn())
1112            if not expect_optional_token():
1113                break
1114        return nodes
1115
1116
1117def get_token_desc(token: Token) -> str:
1118    """Describe a token as a string for debugging."""
1119    value = token.value
1120    return get_token_kind_desc(token.kind) + (
1121        f" '{value}'" if value is not None else ""
1122    )
1123
1124
1125def get_token_kind_desc(kind: TokenKind) -> str:
1126    """Describe a token kind as a string for debugging."""
1127    return f"'{kind.value}'" if is_punctuator_token_kind(kind) else kind.value
1128