#!/usr/bin/env python3.8
# @generated by pegen from /home/docs/checkouts/readthedocs.org/user_builds/scenic-lang/envs/latest/lib/python3.10/site-packages/scenic/syntax/scenic.gram
import ast
import sys
import tokenize
from typing import Any, Optional
from pegen.parser import memoize, memoize_left_rec, logger, Parser
import enum
import io
import itertools
import os
import sys
import token
from typing import (
Any, Callable, Iterator, List, Literal, NoReturn, Sequence, Tuple, TypeVar, Union
)
from pegen.tokenizer import Tokenizer
import scenic.syntax.ast as s
from scenic.core.errors import ScenicParseError
# Singleton ast nodes, created once for efficiency
Load = ast.Load()
Store = ast.Store()
Del = ast.Del()
Node = TypeVar("Node")
FC = TypeVar("FC", ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef)
EXPR_NAME_MAPPING = {
ast.Attribute: "attribute",
ast.Subscript: "subscript",
ast.Starred: "starred",
ast.Name: "name",
ast.List: "list",
ast.Tuple: "tuple",
ast.Lambda: "lambda",
ast.Call: "function call",
ast.BoolOp: "expression",
ast.BinOp: "expression",
ast.UnaryOp: "expression",
ast.GeneratorExp: "generator expression",
ast.Yield: "yield expression",
ast.YieldFrom: "yield expression",
ast.Await: "await expression",
ast.ListComp: "list comprehension",
ast.SetComp: "set comprehension",
ast.DictComp: "dict comprehension",
ast.Dict: "dict literal",
ast.Set: "set display",
ast.JoinedStr: "f-string expression",
ast.FormattedValue: "f-string expression",
ast.Compare: "comparison",
ast.IfExp: "conditional expression",
ast.NamedExpr: "named expression",
}
[docs]def parse_file(
path: str,
py_version: Optional[tuple]=None,
token_stream_factory: Optional[
Callable[[Callable[[], str]], Iterator[tokenize.TokenInfo]]
] = None,
verbose:bool = False,
) -> ast.Module:
"""Parse a file."""
with open(path) as f:
tok_stream = (
token_stream_factory(f.readline)
if token_stream_factory else
tokenize.generate_tokens(f.readline)
)
tokenizer = Tokenizer(tok_stream, verbose=verbose, path=path)
parser = ScenicParser(
tokenizer,
verbose=verbose,
filename=os.path.basename(path),
py_version=py_version
)
return parser.parse("file")
[docs]def parse_string(
source: str,
mode: Union[Literal["eval"], Literal["exec"]],
py_version: Optional[tuple]=None,
token_stream_factory: Optional[
Callable[[Callable[[], str]], Iterator[tokenize.TokenInfo]]
] = None,
verbose: bool = False,
filename: str = "<unknown>",
) -> Any:
"""Parse a string."""
tok_stream = (
token_stream_factory(io.StringIO(source).readline)
if token_stream_factory else
tokenize.generate_tokens(io.StringIO(source).readline)
)
tokenizer = Tokenizer(tok_stream, verbose=verbose)
parser = ScenicParser(tokenizer, verbose=verbose, py_version=py_version, filename=filename)
return parser.parse(mode if mode == "eval" else "file")
[docs]class Target(enum.Enum):
FOR_TARGETS = enum.auto()
STAR_TARGETS = enum.auto()
DEL_TARGETS = enum.auto()
class Parser(Parser):
#: Name of the source file, used in error reports
filename : str
def __init__(self,
tokenizer: Tokenizer, *,
verbose: bool = False,
filename: str = "<unknown>",
py_version: Optional[tuple] = None,
) -> None:
super().__init__(tokenizer, verbose=verbose)
self.filename = filename
self.py_version = min(py_version, sys.version_info) if py_version else sys.version_info
def parse(self, rule: str, call_invalid_rules: bool = False) -> Optional[ast.AST]:
self.call_invalid_rules = call_invalid_rules
res = getattr(self, rule)()
if res is None:
# Grab the last token that was parsed in the first run to avoid
# polluting a generic error reports with progress made by invalid rules.
last_token = self._tokenizer.diagnose()
if not call_invalid_rules:
self.call_invalid_rules = True
# Reset the parser cache to be able to restart parsing from the
# beginning.
self._reset(0) # type: ignore
self._cache.clear()
res = getattr(self, rule)()
self.raise_raw_syntax_error("invalid syntax", last_token.start, last_token.end)
return res
def check_version(self, min_version: Tuple[int, ...], error_msg: str, node: Node) -> Node:
"""Check that the python version is high enough for a rule to apply.
"""
if self.py_version >= min_version:
return node
else:
raise ScenicParseError(SyntaxError(
f"{error_msg} is only supported in Python {min_version} and above."
))
def raise_indentation_error(self, msg: str) -> None:
"""Raise an indentation error."""
last_token = self._tokenizer.diagnose()
args = (self.filename, last_token.start[0], last_token.start[1] + 1, last_token.line)
if sys.version_info >= (3, 10):
args += (last_token.end[0], last_token.end[1] + 1)
raise ScenicParseError(IndentationError(msg, args))
def get_expr_name(self, node) -> str:
"""Get a descriptive name for an expression."""
# See https://github.com/python/cpython/blob/master/Parser/pegen.c#L161
assert node is not None
node_t = type(node)
if node_t is ast.Constant:
v = node.value
if v is Ellipsis:
return "ellipsis"
elif v is None:
return str(v)
# Avoid treating 1 as True through == comparison
elif v is True:
return str(v)
elif v is False:
return str(v)
else:
return "literal"
try:
return EXPR_NAME_MAPPING[node_t]
except KeyError:
raise ValueError(
f"unexpected expression in assignment {type(node).__name__} "
f"(line {node.lineno})."
)
def get_invalid_target(self, target: Target, node: Optional[ast.AST]) -> Optional[ast.AST]:
"""Get the meaningful invalid target for different assignment type."""
if node is None:
return None
# We only need to visit List and Tuple nodes recursively as those
# are the only ones that can contain valid names in targets when
# they are parsed as expressions. Any other kind of expression
# that is a container (like Sets or Dicts) is directly invalid and
# we do not need to visit it recursively.
if isinstance(node, (ast.List, ast.Tuple)):
for e in node.elts:
if (inv := self.get_invalid_target(target, e)) is not None:
return inv
elif isinstance(node, ast.Starred):
if target is Target.DEL_TARGETS:
return node
return self.get_invalid_target(target, node.value)
elif isinstance(node, ast.Compare):
# This is needed, because the `a in b` in `for a in b` gets parsed
# as a comparison, and so we need to search the left side of the comparison
# for invalid targets.
if target is Target.FOR_TARGETS:
if isinstance(node.ops[0], ast.In):
return self.get_invalid_target(target, node.left)
return None
return node
elif isinstance(node, (ast.Name, ast.Subscript, ast.Attribute)):
return None
else:
return node
def set_expr_context(self, node, context):
"""Set the context (Load, Store, Del) of an ast node."""
node.ctx = context
return node
def ensure_real(self, number: ast.Constant) -> float:
value = ast.literal_eval(number.string)
if type(value) is complex:
self.raise_syntax_error_known_location("real number required in complex literal", number)
return value
def ensure_imaginary(self, number: ast.Constant) -> complex:
value = ast.literal_eval(number.string)
if type(value) is not complex:
self.raise_syntax_error_known_location("imaginary number required in complex literal", number)
return value
def check_fstring_conversion(self, mark: tokenize.TokenInfo, name: tokenize.TokenInfo) -> tokenize.TokenInfo:
if mark.lineno != name.lineno or mark.col_offset != name.col_offset:
self.raise_syntax_error_known_range(
"f-string: conversion type must come right after the exclamanation mark",
mark,
name
)
s = name.string
if len(s) > 1 or s not in ("s", "r", "a"):
self.raise_syntax_error_known_location(
f"f-string: invalid conversion character '{s}': expected 's', 'r', or 'a'",
name,
)
return name
def _concat_strings_in_constant(self, parts) -> Union[str, bytes]:
s = ast.literal_eval(parts[0].string)
for ss in parts[1:]:
s += ast.literal_eval(ss.string)
args = dict(
value=s,
lineno=parts[0].start[0],
col_offset=parts[0].start[1],
end_lineno=parts[-1].end[0],
end_col_offset=parts[0].end[1],
)
if parts[0].string.startswith("u"):
args["kind"] = "u"
return ast.Constant(**args)
def concatenate_strings(self, parts):
"""Concatenate multiple tokens and ast.JoinedStr"""
# Get proper start and stop
start = end = None
if isinstance(parts[0], ast.JoinedStr):
start = parts[0].lineno, parts[0].col_offset
if isinstance(parts[-1], ast.JoinedStr):
end = parts[-1].end_lineno, parts[-1].end_col_offset
# Combine the different parts
seen_joined = False
values = []
ss = []
for p in parts:
if isinstance(p, ast.JoinedStr):
seen_joined = True
if ss:
values.append(self._concat_strings_in_constant(ss))
ss.clear()
values.extend(p.values)
else:
ss.append(p)
if ss:
values.append(self._concat_strings_in_constant(ss))
consolidated = []
for p in values:
if consolidated and isinstance(consolidated[-1], ast.Constant) and isinstance(p, ast.Constant):
consolidated[-1].value += p.value
consolidated[-1].end_lineno = p.end_lineno
consolidated[-1].end_col_offset = p.end_col_offset
else:
consolidated.append(p)
if not seen_joined and len(values) == 1 and isinstance(values[0], ast.Constant):
return values[0]
else:
return ast.JoinedStr(
values=consolidated,
lineno=start[0] if start else values[0].lineno,
col_offset=start[1] if start else values[0].col_offset,
end_lineno=end[0] if end else values[-1].end_lineno,
end_col_offset=end[1] if end else values[-1].end_col_offset,
)
def generate_ast_for_string(self, tokens):
"""Generate AST nodes for strings."""
err_args = None
line_offset = tokens[0].start[0]
line = line_offset
col_offset = 0
source = "(\n"
for t in tokens:
n_line = t.start[0] - line
if n_line:
col_offset = 0
source += """\n""" * n_line + ' ' * (t.start[1] - col_offset) + t.string
line, col_offset = t.end
source += "\n)"
try:
m = ast.parse(source)
except SyntaxError as err:
args = (err.filename, err.lineno + line_offset - 2, err.offset, err.text)
if sys.version_info >= (3, 10):
args += (err.end_lineno + line_offset - 2, err.end_offset)
err_args = (err.msg, args)
# Ensure we do not keep the frame alive longer than necessary
# by explicitely deleting the error once we got what we needed out
# of it
del err
# Avoid getting a triple nesting in the error report that does not
# bring anything relevant to the traceback.
if err_args is not None:
raise ScenicParseError(SyntaxError(*err_args))
node = m.body[0].value
# Since we asked Python to parse an alterred source starting at line 2
# we alter the lineno of the returned AST to recover the right line.
# If the string start at line 1, tha AST says 2 so we need to decrement by 1
# hence the -2.
ast.increment_lineno(node, line_offset - 2)
return node
def extract_import_level(self, tokens: List[tokenize.TokenInfo]) -> int:
"""Extract the relative import level from the tokens preceding the module name.
'.' count for one and '...' for 3.
"""
level = 0
for t in tokens:
if t.string == ".":
level += 1
else:
level += 3
return level
def set_decorators(self,
target: FC,
decorators: list
) -> FC:
"""Set the decorators on a function or class definition."""
target.decorator_list = decorators
return target
def get_comparison_ops(self, pairs):
return [op for op, _ in pairs]
def get_comparators(self, pairs):
return [comp for _, comp in pairs]
def set_arg_type_comment(self, arg, type_comment):
if type_comment or sys.version_info < (3, 9):
arg.type_comment = type_comment
return arg
def make_arguments(self,
pos_only: Optional[List[Tuple[ast.arg, None]]],
pos_only_with_default: List[Tuple[ast.arg, Any]],
param_no_default: Optional[List[Tuple[ast.arg, None]]],
param_default: Optional[List[Tuple[ast.arg, Any]]],
after_star: Optional[Tuple[Optional[ast.arg], List[Tuple[ast.arg, Any]], Optional[ast.arg]]]
) -> ast.arguments:
"""Build a function definition arguments."""
defaults = (
[d for _, d in pos_only_with_default if d is not None]
if pos_only_with_default else
[]
)
defaults += (
[d for _, d in param_default if d is not None]
if param_default else
[]
)
pos_only = pos_only or pos_only_with_default
# Because we need to combine pos only with and without default even
# the version with no default is a tuple
pos_only = [p for p, _ in pos_only]
params = (param_no_default or []) + ([p for p, _ in param_default] if param_default else [])
# If after_star is None, make a default tuple
after_star = after_star or (None, [], None)
return ast.arguments(
posonlyargs=pos_only,
args=params,
defaults=defaults,
vararg=after_star[0],
kwonlyargs=[p for p, _ in after_star[1]],
kw_defaults=[d for _, d in after_star[1]],
kwarg=after_star[2]
)
def _build_syntax_error(
self,
message: str,
start: Optional[Tuple[int, int]] = None,
end: Optional[Tuple[int, int]] = None
) -> None:
line_from_token = start is None and end is None
if start is None or end is None:
tok = self._tokenizer.diagnose()
start = start or tok.start
end = end or tok.end
if line_from_token:
line = tok.line
else:
# End is used only to get the proper text
line = "\n".join(
self._tokenizer.get_lines(list(range(start[0], end[0] + 1)))
)
# tokenize.py index column offset from 0 while Cpython index column
# offset at 1 when reporting SyntaxError, so we need to increment
# the column offset when reporting the error.
args = (self.filename, start[0], start[1] + 1, line)
if sys.version_info >= (3, 10):
args += (end[0], end[1] + 1)
return ScenicParseError(SyntaxError(message, args))
def raise_raw_syntax_error(
self,
message: str,
start: Optional[Tuple[int, int]] = None,
end: Optional[Tuple[int, int]] = None
) -> NoReturn:
raise self._build_syntax_error(message, start, end)
def make_syntax_error(self, message: str) -> None:
return self._build_syntax_error(message)
def expect_forced(self, res: Any, expectation: str) -> Optional[tokenize.TokenInfo]:
if res is None:
last_token = self._tokenizer.diagnose()
end = last_token.start
if sys.version_info >= (3, 12) or (sys.version_info >= (3, 11) and last_token.type != 4): # i.e. not a
end = last_token.end
self.raise_raw_syntax_error(
f"expected {expectation}", last_token.start, end
)
return res
def raise_syntax_error(self, message: str) -> NoReturn:
"""Raise a syntax error."""
tok = self._tokenizer.diagnose()
raise self._build_syntax_error(message, tok.start, tok.end if sys.version_info >= (3, 12) or tok.type != 4 else tok.start)
def raise_syntax_error_known_location(
self, message: str, node: Union[ast.AST, tokenize.TokenInfo]
) -> NoReturn:
"""Raise a syntax error that occured at a given AST node."""
if isinstance(node, tokenize.TokenInfo):
start = node.start
end = node.end
else:
start = node.lineno, node.col_offset
end = node.end_lineno, node.end_col_offset
raise self._build_syntax_error(message, start, end)
def raise_syntax_error_known_range(
self,
message: str,
start_node: Union[ast.AST, tokenize.TokenInfo],
end_node: Union[ast.AST, tokenize.TokenInfo]
) -> NoReturn:
if isinstance(start_node, tokenize.TokenInfo):
start = start_node.start
else:
start = start_node.lineno, start_node.col_offset
if isinstance(end_node, tokenize.TokenInfo):
end = end_node.end
else:
end = end_node.end_lineno, end_node.end_col_offset
raise self._build_syntax_error(message, start, end)
def raise_syntax_error_starting_from(
self,
message: str,
start_node: Union[ast.AST, tokenize.TokenInfo]
) -> NoReturn:
if isinstance(start_node, tokenize.TokenInfo):
start = start_node.start
else:
start = start_node.lineno, start_node.col_offset
last_token = self._tokenizer.diagnose()
raise self._build_syntax_error(message, start, last_token.start)
def raise_syntax_error_invalid_target(
self, target: Target, node: Optional[ast.AST]
) -> NoReturn:
invalid_target = self.get_invalid_target(target, node)
if invalid_target is None:
return None
if target in (Target.STAR_TARGETS, Target.FOR_TARGETS):
msg = f"cannot assign to {self.get_expr_name(invalid_target)}"
else:
msg = f"cannot delete {self.get_expr_name(invalid_target)}"
self.raise_syntax_error_known_location(msg, invalid_target)
def raise_syntax_error_on_next_token(self, message: str) -> NoReturn:
next_token = self._tokenizer.peek()
raise self._build_syntax_error(message, next_token.start, next_token.end)
# scenic helpers
def extend_new_specifiers(self, node: s.New, specifiers: List[ast.AST]) -> s.New:
node.specifiers.extend(specifiers)
return node
# Keywords and soft keywords are listed at the end of the parser definition.
class ScenicParser(Parser):
@memoize
def start(self) -> Optional[Any]:
# start: file
mark = self._mark()
if (
(file := self.file())
):
return file;
self._reset(mark)
return None;
@memoize
def file(self) -> Optional[ast . Module]:
# file: statements? $
mark = self._mark()
if (
(a := self.statements(),)
and
(self.expect('ENDMARKER'))
):
return ast . Module ( body = a or [] , type_ignores = [] );
self._reset(mark)
return None;
@memoize
def interactive(self) -> Optional[ast . Interactive]:
# interactive: statement_newline
mark = self._mark()
if (
(a := self.statement_newline())
):
return ast . Interactive ( body = a );
self._reset(mark)
return None;
@memoize
def eval(self) -> Optional[ast . Expression]:
# eval: expressions NEWLINE* $
mark = self._mark()
if (
(a := self.expressions())
and
(self._loop0_1(),)
and
(self.expect('ENDMARKER'))
):
return ast . Expression ( body = a );
self._reset(mark)
return None;
@memoize
def func_type(self) -> Optional[ast . FunctionType]:
# func_type: '(' type_expressions? ')' '->' expression NEWLINE* $
mark = self._mark()
if (
(self.expect('('))
and
(a := self.type_expressions(),)
and
(self.expect(')'))
and
(self.expect('->'))
and
(b := self.expression())
and
(self._loop0_2(),)
and
(self.expect('ENDMARKER'))
):
return ast . FunctionType ( argtypes = a , returns = b );
self._reset(mark)
return None;
@memoize
def fstring(self) -> Optional[Any]:
# fstring: FSTRING_START fstring_mid* FSTRING_END
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.fstring_start())
and
(b := self._loop0_3(),)
and
(self.fstring_end())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . JoinedStr ( values = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def statements(self) -> Optional[list]:
# statements: statement+
mark = self._mark()
if (
(a := self._loop1_4())
):
return list ( itertools . chain . from_iterable ( a ) );
self._reset(mark)
return None;
@memoize
def statement(self) -> Optional[list]:
# statement: scenic_compound_stmt | compound_stmt | scenic_stmts | simple_stmts
mark = self._mark()
if (
(a := self.scenic_compound_stmt())
):
return [a];
self._reset(mark)
if (
(a := self.compound_stmt())
):
return [a];
self._reset(mark)
if (
(a := self.scenic_stmts())
):
return a;
self._reset(mark)
if (
(a := self.simple_stmts())
):
return a;
self._reset(mark)
return None;
@memoize
def statement_newline(self) -> Optional[list]:
# statement_newline: compound_stmt NEWLINE | simple_stmts | NEWLINE | $
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.compound_stmt())
and
(self.expect('NEWLINE'))
):
return [a];
self._reset(mark)
if (
(simple_stmts := self.simple_stmts())
):
return simple_stmts;
self._reset(mark)
if (
(self.expect('NEWLINE'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return [ast . Pass ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )];
self._reset(mark)
if (
(self.expect('ENDMARKER'))
):
return None;
self._reset(mark)
return None;
@memoize
def simple_stmts(self) -> Optional[list]:
# simple_stmts: simple_stmt !';' NEWLINE | ';'.simple_stmt+ ';'? NEWLINE
mark = self._mark()
if (
(a := self.simple_stmt())
and
(self.negative_lookahead(self.expect, ';'))
and
(self.expect('NEWLINE'))
):
return [a];
self._reset(mark)
if (
(a := self._gather_5())
and
(self.expect(';'),)
and
(self.expect('NEWLINE'))
):
return a;
self._reset(mark)
return None;
@memoize
def scenic_stmts(self) -> Optional[list]:
# scenic_stmts: scenic_stmt !';' NEWLINE | ';'.scenic_stmt+ ';'? NEWLINE
mark = self._mark()
if (
(a := self.scenic_stmt())
and
(self.negative_lookahead(self.expect, ';'))
and
(self.expect('NEWLINE'))
):
return [a];
self._reset(mark)
if (
(a := self._gather_7())
and
(self.expect(';'),)
and
(self.expect('NEWLINE'))
):
return a;
self._reset(mark)
return None;
@memoize
def simple_stmt(self) -> Optional[Any]:
# simple_stmt: assignment | &"type" type_alias | star_expressions | &'return' return_stmt | &('import' | 'from') import_stmt | &'raise' raise_stmt | 'pass' | &'del' del_stmt | &'yield' yield_stmt | &'assert' assert_stmt | 'break' | 'continue' | &'global' global_stmt | &'nonlocal' nonlocal_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(assignment := self.assignment())
):
return assignment;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, "type"))
and
(type_alias := self.type_alias())
):
return type_alias;
self._reset(mark)
if (
(e := self.star_expressions())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Expr ( value = e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'return'))
and
(return_stmt := self.return_stmt())
):
return return_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self._tmp_9, ))
and
(import_stmt := self.import_stmt())
):
return import_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'raise'))
and
(raise_stmt := self.raise_stmt())
):
return raise_stmt;
self._reset(mark)
if (
(self.expect('pass'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Pass ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'del'))
and
(del_stmt := self.del_stmt())
):
return del_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'yield'))
and
(yield_stmt := self.yield_stmt())
):
return yield_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'assert'))
and
(assert_stmt := self.assert_stmt())
):
return assert_stmt;
self._reset(mark)
if (
(self.expect('break'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Break ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('continue'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Continue ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'global'))
and
(global_stmt := self.global_stmt())
):
return global_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'nonlocal'))
and
(nonlocal_stmt := self.nonlocal_stmt())
):
return nonlocal_stmt;
self._reset(mark)
return None;
@memoize
def compound_stmt(self) -> Optional[Any]:
# compound_stmt: &('def' | '@' | 'async') function_def | &'if' if_stmt | &('class' | '@') class_def | &('with' | 'async') with_stmt | &('for' | 'async') for_stmt | &'try' try_stmt | &'while' while_stmt | match_stmt
mark = self._mark()
if (
(self.positive_lookahead(self._tmp_10, ))
and
(function_def := self.function_def())
):
return function_def;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'if'))
and
(if_stmt := self.if_stmt())
):
return if_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self._tmp_11, ))
and
(class_def := self.class_def())
):
return class_def;
self._reset(mark)
if (
(self.positive_lookahead(self._tmp_12, ))
and
(with_stmt := self.with_stmt())
):
return with_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self._tmp_13, ))
and
(for_stmt := self.for_stmt())
):
return for_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'try'))
and
(try_stmt := self.try_stmt())
):
return try_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'while'))
and
(while_stmt := self.while_stmt())
):
return while_stmt;
self._reset(mark)
if (
(match_stmt := self.match_stmt())
):
return match_stmt;
self._reset(mark)
return None;
@memoize
def scenic_stmt(self) -> Optional[Any]:
# scenic_stmt: scenic_model_stmt | scenic_tracked_assignment | scenic_param_stmt | scenic_require_stmt | scenic_record_initial_stmt | scenic_record_final_stmt | scenic_record_stmt | scenic_mutate_stmt | scenic_terminate_simulation_when_stmt | scenic_terminate_when_stmt | scenic_terminate_after_stmt | scenic_take_stmt | scenic_wait_stmt | scenic_terminate_simulation_stmt | scenic_terminate_stmt | scenic_do_choose_stmt | scenic_do_shuffle_stmt | scenic_do_for_stmt | scenic_do_until_stmt | scenic_do_stmt | scenic_abort_stmt | scenic_simulator_stmt
mark = self._mark()
if (
(scenic_model_stmt := self.scenic_model_stmt())
):
return scenic_model_stmt;
self._reset(mark)
if (
(scenic_tracked_assignment := self.scenic_tracked_assignment())
):
return scenic_tracked_assignment;
self._reset(mark)
if (
(scenic_param_stmt := self.scenic_param_stmt())
):
return scenic_param_stmt;
self._reset(mark)
if (
(scenic_require_stmt := self.scenic_require_stmt())
):
return scenic_require_stmt;
self._reset(mark)
if (
(scenic_record_initial_stmt := self.scenic_record_initial_stmt())
):
return scenic_record_initial_stmt;
self._reset(mark)
if (
(scenic_record_final_stmt := self.scenic_record_final_stmt())
):
return scenic_record_final_stmt;
self._reset(mark)
if (
(scenic_record_stmt := self.scenic_record_stmt())
):
return scenic_record_stmt;
self._reset(mark)
if (
(scenic_mutate_stmt := self.scenic_mutate_stmt())
):
return scenic_mutate_stmt;
self._reset(mark)
if (
(scenic_terminate_simulation_when_stmt := self.scenic_terminate_simulation_when_stmt())
):
return scenic_terminate_simulation_when_stmt;
self._reset(mark)
if (
(scenic_terminate_when_stmt := self.scenic_terminate_when_stmt())
):
return scenic_terminate_when_stmt;
self._reset(mark)
if (
(scenic_terminate_after_stmt := self.scenic_terminate_after_stmt())
):
return scenic_terminate_after_stmt;
self._reset(mark)
if (
(scenic_take_stmt := self.scenic_take_stmt())
):
return scenic_take_stmt;
self._reset(mark)
if (
(scenic_wait_stmt := self.scenic_wait_stmt())
):
return scenic_wait_stmt;
self._reset(mark)
if (
(scenic_terminate_simulation_stmt := self.scenic_terminate_simulation_stmt())
):
return scenic_terminate_simulation_stmt;
self._reset(mark)
if (
(scenic_terminate_stmt := self.scenic_terminate_stmt())
):
return scenic_terminate_stmt;
self._reset(mark)
if (
(scenic_do_choose_stmt := self.scenic_do_choose_stmt())
):
return scenic_do_choose_stmt;
self._reset(mark)
if (
(scenic_do_shuffle_stmt := self.scenic_do_shuffle_stmt())
):
return scenic_do_shuffle_stmt;
self._reset(mark)
if (
(scenic_do_for_stmt := self.scenic_do_for_stmt())
):
return scenic_do_for_stmt;
self._reset(mark)
if (
(scenic_do_until_stmt := self.scenic_do_until_stmt())
):
return scenic_do_until_stmt;
self._reset(mark)
if (
(scenic_do_stmt := self.scenic_do_stmt())
):
return scenic_do_stmt;
self._reset(mark)
if (
(scenic_abort_stmt := self.scenic_abort_stmt())
):
return scenic_abort_stmt;
self._reset(mark)
if (
(scenic_simulator_stmt := self.scenic_simulator_stmt())
):
return scenic_simulator_stmt;
self._reset(mark)
return None;
@memoize
def scenic_compound_stmt(self) -> Optional[Any]:
# scenic_compound_stmt: scenic_tracked_assign_new_stmt | scenic_assign_new_stmt | scenic_expr_new_stmt | scenic_behavior_def | scenic_monitor_def | scenic_scenario_def | scenic_try_interrupt_stmt | scenic_override_stmt
mark = self._mark()
if (
(scenic_tracked_assign_new_stmt := self.scenic_tracked_assign_new_stmt())
):
return scenic_tracked_assign_new_stmt;
self._reset(mark)
if (
(scenic_assign_new_stmt := self.scenic_assign_new_stmt())
):
return scenic_assign_new_stmt;
self._reset(mark)
if (
(scenic_expr_new_stmt := self.scenic_expr_new_stmt())
):
return scenic_expr_new_stmt;
self._reset(mark)
if (
(scenic_behavior_def := self.scenic_behavior_def())
):
return scenic_behavior_def;
self._reset(mark)
if (
(scenic_monitor_def := self.scenic_monitor_def())
):
return scenic_monitor_def;
self._reset(mark)
if (
(scenic_scenario_def := self.scenic_scenario_def())
):
return scenic_scenario_def;
self._reset(mark)
if (
(scenic_try_interrupt_stmt := self.scenic_try_interrupt_stmt())
):
return scenic_try_interrupt_stmt;
self._reset(mark)
if (
(scenic_override_stmt := self.scenic_override_stmt())
):
return scenic_override_stmt;
self._reset(mark)
return None;
@memoize
def assignment(self) -> Optional[Any]:
# assignment: NAME ':' expression ['=' annotated_rhs] | ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] | ((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT? | single_target augassign ~ (yield_expr | star_expressions) | invalid_assignment
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(self.expect(':'))
and
(b := self.expression())
and
(c := self._tmp_14(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 6 ) , "Variable annotation syntax is" , ast . AnnAssign ( target = ast . Name ( id = a . string , ctx = Store , lineno = a . start [0] , col_offset = a . start [1] , end_lineno = a . end [0] , end_col_offset = a . end [1] , ) , annotation = b , value = c , simple = 1 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) );
self._reset(mark)
if (
(a := self._tmp_15())
and
(self.expect(':'))
and
(b := self.expression())
and
(c := self._tmp_16(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 6 ) , "Variable annotation syntax is" , ast . AnnAssign ( target = a , annotation = b , value = c , simple = 0 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) );
self._reset(mark)
if (
(a := self._loop1_17())
and
(b := self._tmp_18())
and
(self.negative_lookahead(self.expect, '='))
and
(tc := self.type_comment(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Assign ( targets = a , value = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
cut = False
if (
(a := self.single_target())
and
(b := self.augassign())
and
(cut := True)
and
(c := self._tmp_19())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . AugAssign ( target = a , op = b , value = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if cut:
return None;
if (
self.call_invalid_rules
and
(self.invalid_assignment())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def annotated_rhs(self) -> Optional[Any]:
# annotated_rhs: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions;
self._reset(mark)
return None;
@memoize
def augassign(self) -> Optional[Any]:
# augassign: '+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | '<<=' | '>>=' | '**=' | '//='
mark = self._mark()
if (
(self.expect('+='))
):
return ast . Add ( );
self._reset(mark)
if (
(self.expect('-='))
):
return ast . Sub ( );
self._reset(mark)
if (
(self.expect('*='))
):
return ast . Mult ( );
self._reset(mark)
if (
(self.expect('@='))
):
return self . check_version ( ( 3 , 5 ) , "The '@' operator is" , ast . MatMult ( ) );
self._reset(mark)
if (
(self.expect('/='))
):
return ast . Div ( );
self._reset(mark)
if (
(self.expect('%='))
):
return ast . Mod ( );
self._reset(mark)
if (
(self.expect('&='))
):
return ast . BitAnd ( );
self._reset(mark)
if (
(self.expect('|='))
):
return ast . BitOr ( );
self._reset(mark)
if (
(self.expect('^='))
):
return ast . BitXor ( );
self._reset(mark)
if (
(self.expect('<<='))
):
return ast . LShift ( );
self._reset(mark)
if (
(self.expect('>>='))
):
return ast . RShift ( );
self._reset(mark)
if (
(self.expect('**='))
):
return ast . Pow ( );
self._reset(mark)
if (
(self.expect('//='))
):
return ast . FloorDiv ( );
self._reset(mark)
return None;
@memoize
def return_stmt(self) -> Optional[ast . Return]:
# return_stmt: 'return' star_expressions?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('return'))
and
(a := self.star_expressions(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Return ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def raise_stmt(self) -> Optional[ast . Raise]:
# raise_stmt: 'raise' expression ['from' expression] | 'raise'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('raise'))
and
(a := self.expression())
and
(b := self._tmp_20(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Raise ( exc = a , cause = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('raise'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Raise ( exc = None , cause = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def global_stmt(self) -> Optional[ast . Global]:
# global_stmt: 'global' ','.NAME+
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('global'))
and
(a := self._gather_21())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Global ( names = [n . string for n in a] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def nonlocal_stmt(self) -> Optional[ast . Nonlocal]:
# nonlocal_stmt: 'nonlocal' ','.NAME+
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('nonlocal'))
and
(a := self._gather_23())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Nonlocal ( names = [n . string for n in a] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def del_stmt(self) -> Optional[ast . Delete]:
# del_stmt: 'del' del_targets &(';' | NEWLINE) | invalid_del_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('del'))
and
(a := self.del_targets())
and
(self.positive_lookahead(self._tmp_25, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Delete ( targets = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_del_stmt())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def yield_stmt(self) -> Optional[ast . Expr]:
# yield_stmt: yield_expr
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(y := self.yield_expr())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Expr ( value = y , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def assert_stmt(self) -> Optional[ast . Assert]:
# assert_stmt: 'assert' expression [',' expression]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('assert'))
and
(a := self.expression())
and
(b := self._tmp_26(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Assert ( test = a , msg = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def import_stmt(self) -> Optional[ast . Import]:
# import_stmt: invalid_import | import_name | import_from
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_import())
):
return None # pragma: no cover;
self._reset(mark)
if (
(import_name := self.import_name())
):
return import_name;
self._reset(mark)
if (
(import_from := self.import_from())
):
return import_from;
self._reset(mark)
return None;
@memoize
def import_name(self) -> Optional[ast . Import]:
# import_name: 'import' dotted_as_names
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('import'))
and
(a := self.dotted_as_names())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Import ( names = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def import_from(self) -> Optional[ast . ImportFrom]:
# import_from: 'from' (('.' | '...'))* dotted_name 'import' import_from_targets | 'from' (('.' | '...'))+ 'import' import_from_targets
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('from'))
and
(a := self._loop0_27(),)
and
(b := self.dotted_name())
and
(self.expect('import'))
and
(c := self.import_from_targets())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ImportFrom ( module = b , names = c , level = self . extract_import_level ( a ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('from'))
and
(a := self._loop1_28())
and
(self.expect('import'))
and
(b := self.import_from_targets())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ImportFrom ( names = b , level = self . extract_import_level ( a ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . ImportFrom ( module = None , names = b , level = self . extract_import_level ( a ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def import_from_targets(self) -> Optional[List [ast . alias]]:
# import_from_targets: '(' import_from_as_names ','? ')' | import_from_as_names !',' | '*' | invalid_import_from_targets
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('('))
and
(a := self.import_from_as_names())
and
(self.expect(','),)
and
(self.expect(')'))
):
return a;
self._reset(mark)
if (
(import_from_as_names := self.import_from_as_names())
and
(self.negative_lookahead(self.expect, ','))
):
return import_from_as_names;
self._reset(mark)
if (
(self.expect('*'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return [ast . alias ( name = "*" , asname = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )];
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_import_from_targets())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def import_from_as_names(self) -> Optional[List [ast . alias]]:
# import_from_as_names: ','.import_from_as_name+
mark = self._mark()
if (
(a := self._gather_29())
):
return a;
self._reset(mark)
return None;
@memoize
def import_from_as_name(self) -> Optional[ast . alias]:
# import_from_as_name: NAME ['as' NAME]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(b := self._tmp_31(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . alias ( name = a . string , asname = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def dotted_as_names(self) -> Optional[List [ast . alias]]:
# dotted_as_names: ','.dotted_as_name+
mark = self._mark()
if (
(a := self._gather_32())
):
return a;
self._reset(mark)
return None;
@memoize
def dotted_as_name(self) -> Optional[ast . alias]:
# dotted_as_name: dotted_name ['as' NAME]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.dotted_name())
and
(b := self._tmp_34(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . alias ( name = a , asname = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize_left_rec
def dotted_name(self) -> Optional[str]:
# dotted_name: dotted_name '.' NAME | NAME
mark = self._mark()
if (
(a := self.dotted_name())
and
(self.expect('.'))
and
(b := self.name())
):
return a + "." + b . string;
self._reset(mark)
if (
(a := self.name())
):
return a . string;
self._reset(mark)
return None;
@memoize
def block(self) -> Optional[list]:
# block: NEWLINE INDENT statements DEDENT | simple_stmts | invalid_block
mark = self._mark()
if (
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(a := self.statements())
and
(self.expect('DEDENT'))
):
return a;
self._reset(mark)
if (
(simple_stmts := self.simple_stmts())
):
return simple_stmts;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_block())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def decorators(self) -> Optional[Any]:
# decorators: decorator+
mark = self._mark()
if (
(_loop1_35 := self._loop1_35())
):
return _loop1_35;
self._reset(mark)
return None;
@memoize
def decorator(self) -> Optional[Any]:
# decorator: ('@' dec_maybe_call NEWLINE) | ('@' named_expression NEWLINE)
mark = self._mark()
if (
(a := self._tmp_36())
):
return a;
self._reset(mark)
if (
(a := self._tmp_37())
):
return self . check_version ( ( 3 , 9 ) , "Generic decorator are" , a );
self._reset(mark)
return None;
@memoize
def dec_maybe_call(self) -> Optional[Any]:
# dec_maybe_call: dec_primary '(' arguments? ')' | dec_primary
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(dn := self.dec_primary())
and
(self.expect('('))
and
(z := self.arguments(),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Call ( func = dn , args = z [0] if z else [] , keywords = z [1] if z else [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(dec_primary := self.dec_primary())
):
return dec_primary;
self._reset(mark)
return None;
@memoize_left_rec
def dec_primary(self) -> Optional[Any]:
# dec_primary: dec_primary '.' NAME | NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.dec_primary())
and
(self.expect('.'))
and
(b := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = a , attr = b . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = a . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def class_def(self) -> Optional[ast . ClassDef]:
# class_def: decorators class_def_raw | class_def_raw
mark = self._mark()
if (
(a := self.decorators())
and
(b := self.class_def_raw())
):
return self . set_decorators ( b , a );
self._reset(mark)
if (
(class_def_raw := self.class_def_raw())
):
return class_def_raw;
self._reset(mark)
return None;
@memoize
def class_def_raw(self) -> Optional[ast . ClassDef]:
# class_def_raw: invalid_class_def_raw | 'class' NAME type_params? ['(' arguments? ')'] &&':' scenic_class_def_block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_class_def_raw())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('class'))
and
(a := self.name())
and
(t := self.type_params(),)
and
(b := self._tmp_38(),)
and
(self.expect_forced(self.expect(':'), "':'"))
and
(c := self.scenic_class_def_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ( ast . ClassDef ( a . string , bases = b [0] if b else [] , keywords = b [1] if b else [] , body = c , decorator_list = [] , type_params = t or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) if sys . version_info >= ( 3 , 12 ) else ast . ClassDef ( a . string , bases = b [0] if b else [] , keywords = b [1] if b else [] , body = c , decorator_list = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) );
self._reset(mark)
return None;
@memoize
def scenic_class_def_block(self) -> Optional[Any]:
# scenic_class_def_block: NEWLINE INDENT scenic_class_statements DEDENT | simple_stmts | invalid_block
mark = self._mark()
if (
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(a := self.scenic_class_statements())
and
(self.expect('DEDENT'))
):
return a;
self._reset(mark)
if (
(simple_stmts := self.simple_stmts())
):
return simple_stmts;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_block())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def scenic_class_statements(self) -> Optional[list]:
# scenic_class_statements: scenic_class_statement+
mark = self._mark()
if (
(a := self._loop1_39())
):
return list ( itertools . chain . from_iterable ( a ) );
self._reset(mark)
return None;
@memoize
def scenic_class_statement(self) -> Optional[list]:
# scenic_class_statement: scenic_class_property_stmt | compound_stmt | scenic_stmts | simple_stmts
mark = self._mark()
if (
(a := self.scenic_class_property_stmt())
):
return [a];
self._reset(mark)
if (
(a := self.compound_stmt())
):
return [a];
self._reset(mark)
if (
(a := self.scenic_stmts())
):
return a;
self._reset(mark)
if (
(a := self.simple_stmts())
):
return a;
self._reset(mark)
return None;
@memoize
def scenic_class_property_stmt(self) -> Optional[Any]:
# scenic_class_property_stmt: NAME ['[' ','.scenic_class_property_attribute+ ']'] ':' expression NEWLINE
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(b := self._tmp_40(),)
and
(self.expect(':'))
and
(c := self.expression())
and
(self.expect('NEWLINE'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . PropertyDef ( property = a . string , attributes = b if b is not None else [] , value = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
return None;
@memoize
def scenic_class_property_attribute(self) -> Optional[Any]:
# scenic_class_property_attribute: &&("additive" | "dynamic" | "final")
# nullable=True
mark = self._mark()
if (
(forced := self.expect_forced(self._tmp_41(), '''("additive" | "dynamic" | "final")'''))
):
return forced;
self._reset(mark)
return None;
@memoize
def scenic_assign_new_stmt(self) -> Optional[Any]:
# scenic_assign_new_stmt: ((star_targets '='))+ (scenic_new_block) !'=' TYPE_COMMENT?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self._loop1_42())
and
(b := self.scenic_new_block())
and
(self.negative_lookahead(self.expect, '='))
and
(tc := self.type_comment(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Assign ( targets = a , value = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_tracked_assign_new_stmt(self) -> Optional[Any]:
# scenic_tracked_assign_new_stmt: scenic_tracked_name '=' scenic_new_block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.scenic_tracked_name())
and
(self.expect('='))
and
(b := self.scenic_new_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . TrackedAssign ( target = a , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_expr_new_stmt(self) -> Optional[Any]:
# scenic_expr_new_stmt: scenic_new_block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.scenic_new_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Expr ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_new_block(self) -> Optional[Any]:
# scenic_new_block: scenic_new_expr ',' NEWLINE INDENT scenic_new_block_body DEDENT
mark = self._mark()
if (
(a := self.scenic_new_expr())
and
(self.expect(','))
and
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(b := self.scenic_new_block_body())
and
(self.expect('DEDENT'))
):
return self . extend_new_specifiers ( a , b );
self._reset(mark)
return None;
@memoize
def scenic_new_block_body(self) -> Optional[Any]:
# scenic_new_block_body: ((scenic_specifiers ',' NEWLINE))* scenic_specifiers NEWLINE | ((scenic_specifiers ',' NEWLINE))+
mark = self._mark()
if (
(b := self._loop0_43(),)
and
(c := self.scenic_specifiers())
and
(self.expect('NEWLINE'))
):
return list ( itertools . chain . from_iterable ( b ) ) + c;
self._reset(mark)
if (
(b := self._loop1_44())
):
return list ( itertools . chain . from_iterable ( b ) );
self._reset(mark)
return None;
@memoize
def scenic_behavior_def(self) -> Optional[Any]:
# scenic_behavior_def: "behavior" NAME '(' params? ')' &&':' scenic_behavior_def_block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("behavior"))
and
(a := self.name())
and
(self.expect('('))
and
(b := self.params(),)
and
(self.expect(')'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(c := self.scenic_behavior_def_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . BehaviorDef ( a . string , args = b or self . make_arguments ( None , [] , None , [] , None ) , docstring = c [0] , header = c [1] , body = c [2] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
return None;
@memoize
def scenic_behavior_def_block(self) -> Optional[Any]:
# scenic_behavior_def_block: NEWLINE INDENT [STRING NEWLINE] scenic_behavior_header? scenic_behavior_statements DEDENT | invalid_block
mark = self._mark()
if (
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(a := self._tmp_45(),)
and
(b := self.scenic_behavior_header(),)
and
(c := self.scenic_behavior_statements())
and
(self.expect('DEDENT'))
):
return ( a , b or [] , c );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_block())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def scenic_behavior_statements(self) -> Optional[list]:
# scenic_behavior_statements: scenic_behavior_statement+
mark = self._mark()
if (
(a := self._loop1_46())
):
return list ( itertools . chain . from_iterable ( a ) );
self._reset(mark)
return None;
@memoize
def scenic_behavior_statement(self) -> Optional[list]:
# scenic_behavior_statement: scenic_invalid_behavior_statement | statement
mark = self._mark()
if (
(scenic_invalid_behavior_statement := self.scenic_invalid_behavior_statement())
):
return scenic_invalid_behavior_statement;
self._reset(mark)
if (
(a := self.statement())
):
return a;
self._reset(mark)
return None;
@memoize
def scenic_invalid_behavior_statement(self) -> Optional[Any]:
# scenic_invalid_behavior_statement: "invariant" ':' expression | "precondition" ':' expression
mark = self._mark()
if (
(a := self.expect("invariant"))
and
(self.expect(':'))
and
(a_1 := self.expression())
):
return self . raise_syntax_error_known_location ( "invariant can only be set at the beginning of behavior definitions" , a );
self._reset(mark)
if (
(a := self.expect("precondition"))
and
(self.expect(':'))
and
(a_1 := self.expression())
):
return self . raise_syntax_error_known_location ( "precondition can only be set at the beginning of behavior definitions" , a );
self._reset(mark)
return None;
@memoize
def scenic_behavior_header(self) -> Optional[Any]:
# scenic_behavior_header: (((scenic_precondition_stmt | scenic_invariant_stmt) NEWLINE))+
mark = self._mark()
if (
(a := self._loop1_47())
):
return a;
self._reset(mark)
return None;
@memoize
def scenic_precondition_stmt(self) -> Optional[Any]:
# scenic_precondition_stmt: "precondition" ':' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("precondition"))
and
(self.expect(':'))
and
(a := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Precondition ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_invariant_stmt(self) -> Optional[Any]:
# scenic_invariant_stmt: "invariant" ':' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("invariant"))
and
(self.expect(':'))
and
(a := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Invariant ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_monitor_def(self) -> Optional[Any]:
# scenic_monitor_def: invalid_monitor | "monitor" NAME '(' params? ')' &&':' scenic_monitor_def_block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_monitor())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect("monitor"))
and
(a := self.name())
and
(self.expect('('))
and
(b := self.params(),)
and
(self.expect(')'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(c := self.scenic_monitor_def_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . MonitorDef ( a . string , args = b or self . make_arguments ( None , [] , None , [] , None ) , docstring = c [0] , body = c [1] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def invalid_monitor(self) -> Optional[NoReturn]:
# invalid_monitor: "monitor" NAME ':'
mark = self._mark()
if (
(self.expect("monitor"))
and
(self.name())
and
(a := self.expect(':'))
):
return self . raise_syntax_error_known_location ( "2.0-style monitor must be converted to use parentheses and explicit require" , a );
self._reset(mark)
return None;
@memoize
def scenic_monitor_def_block(self) -> Optional[Any]:
# scenic_monitor_def_block: NEWLINE INDENT [STRING NEWLINE] scenic_monitor_statements DEDENT
mark = self._mark()
if (
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(a := self._tmp_48(),)
and
(b := self.scenic_monitor_statements())
and
(self.expect('DEDENT'))
):
return ( a , b );
self._reset(mark)
return None;
@memoize
def scenic_monitor_statements(self) -> Optional[list]:
# scenic_monitor_statements: statement+
mark = self._mark()
if (
(a := self._loop1_49())
):
return list ( itertools . chain . from_iterable ( a ) );
self._reset(mark)
return None;
@memoize
def scenic_scenario_def(self) -> Optional[Any]:
# scenic_scenario_def: "scenario" NAME ['(' params? ')'] &&':' scenic_scenario_def_block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("scenario"))
and
(a := self.name())
and
(b := self._tmp_50(),)
and
(self.expect_forced(self.expect(':'), "':'"))
and
(c := self.scenic_scenario_def_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . ScenarioDef ( a . string , args = b or self . make_arguments ( None , [] , None , [] , None ) , docstring = c [0] , header = c [1] , setup = c [2] , compose = c [3] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
return None;
@memoize
def scenic_scenario_def_block(self) -> Optional[Any]:
# scenic_scenario_def_block: NEWLINE INDENT [STRING NEWLINE] scenic_behavior_header? scenic_scenario_setup_block? scenic_scenario_compose_block? DEDENT | NEWLINE INDENT [STRING NEWLINE] statements DEDENT
mark = self._mark()
if (
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(a := self._tmp_51(),)
and
(b := self.scenic_behavior_header(),)
and
(c := self.scenic_scenario_setup_block(),)
and
(d := self.scenic_scenario_compose_block(),)
and
(self.expect('DEDENT'))
):
return ( a , b or [] , c or [] , d or [] );
self._reset(mark)
if (
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(a := self._tmp_52(),)
and
(b := self.statements())
and
(self.expect('DEDENT'))
):
return ( a , [] , b , [] );
self._reset(mark)
return None;
@memoize
def scenic_scenario_setup_block(self) -> Optional[Any]:
# scenic_scenario_setup_block: "setup" &&':' block
mark = self._mark()
if (
(self.expect("setup"))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
):
return b;
self._reset(mark)
return None;
@memoize
def scenic_scenario_compose_block(self) -> Optional[Any]:
# scenic_scenario_compose_block: "compose" &&':' block
mark = self._mark()
if (
(self.expect("compose"))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
):
return b;
self._reset(mark)
return None;
@memoize
def scenic_override_stmt(self) -> Optional[Any]:
# scenic_override_stmt: "override" primary scenic_specifiers NEWLINE | "override" primary scenic_specifiers ',' NEWLINE INDENT scenic_new_block_body DEDENT
mark = self._mark()
if (
(self.expect("override"))
and
(e := self.primary())
and
(ss := self.scenic_specifiers())
and
(self.expect('NEWLINE'))
):
return s . Override ( target = e , specifiers = ss );
self._reset(mark)
if (
(self.expect("override"))
and
(e := self.primary())
and
(ss := self.scenic_specifiers())
and
(self.expect(','))
and
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(t := self.scenic_new_block_body())
and
(self.expect('DEDENT'))
):
return s . Override ( target = e , specifiers = ss + t );
self._reset(mark)
return None;
@memoize
def function_def(self) -> Optional[Union [ast . FunctionDef , ast . AsyncFunctionDef]]:
# function_def: decorators function_def_raw | function_def_raw
mark = self._mark()
if (
(d := self.decorators())
and
(f := self.function_def_raw())
):
return self . set_decorators ( f , d );
self._reset(mark)
if (
(f := self.function_def_raw())
):
return self . set_decorators ( f , [] );
self._reset(mark)
return None;
@memoize
def function_def_raw(self) -> Optional[Union [ast . FunctionDef , ast . AsyncFunctionDef]]:
# function_def_raw: invalid_def_raw | 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block | 'async' 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_def_raw())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('def'))
and
(n := self.name())
and
(t := self.type_params(),)
and
(self.expect_forced(self.expect('('), "'('"))
and
(params := self.params(),)
and
(self.expect(')'))
and
(a := self._tmp_53(),)
and
(self.expect_forced(self.expect(':'), "':'"))
and
(tc := self.func_type_comment(),)
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ( ast . FunctionDef ( name = n . string , args = params or self . make_arguments ( None , [] , None , [] , None ) , returns = a , body = b , type_comment = tc , type_params = t or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) if sys . version_info >= ( 3 , 12 ) else ast . FunctionDef ( name = n . string , args = params or self . make_arguments ( None , [] , None , [] , None ) , returns = a , body = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) );
self._reset(mark)
if (
(self.expect('async'))
and
(self.expect('def'))
and
(n := self.name())
and
(t := self.type_params(),)
and
(self.expect_forced(self.expect('('), "'('"))
and
(params := self.params(),)
and
(self.expect(')'))
and
(a := self._tmp_54(),)
and
(self.expect_forced(self.expect(':'), "':'"))
and
(tc := self.func_type_comment(),)
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ( self . check_version ( ( 3 , 5 ) , "Async functions are" , ast . AsyncFunctionDef ( name = n . string , args = params or self . make_arguments ( None , [] , None , [] , None ) , returns = a , body = b , type_comment = tc , type_params = t or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) ) if sys . version_info >= ( 3 , 12 ) else self . check_version ( ( 3 , 5 ) , "Async functions are" , ast . AsyncFunctionDef ( name = n . string , args = params or self . make_arguments ( None , [] , None , [] , None ) , returns = a , body = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) ) );
self._reset(mark)
return None;
@memoize
def params(self) -> Optional[Any]:
# params: invalid_parameters | parameters
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_parameters())
):
return None # pragma: no cover;
self._reset(mark)
if (
(parameters := self.parameters())
):
return parameters;
self._reset(mark)
return None;
@memoize
def parameters(self) -> Optional[ast . arguments]:
# parameters: slash_no_default param_no_default* param_with_default* star_etc? | slash_with_default param_with_default* star_etc? | param_no_default+ param_with_default* star_etc? | param_with_default+ star_etc? | star_etc
mark = self._mark()
if (
(a := self.slash_no_default())
and
(b := self._loop0_55(),)
and
(c := self._loop0_56(),)
and
(d := self.star_etc(),)
):
return self . check_version ( ( 3 , 8 ) , "Positional only arguments are" , self . make_arguments ( a , [] , b , c , d ) );
self._reset(mark)
if (
(a := self.slash_with_default())
and
(b := self._loop0_57(),)
and
(c := self.star_etc(),)
):
return self . check_version ( ( 3 , 8 ) , "Positional only arguments are" , self . make_arguments ( None , a , None , b , c ) , );
self._reset(mark)
if (
(a := self._loop1_58())
and
(b := self._loop0_59(),)
and
(c := self.star_etc(),)
):
return self . make_arguments ( None , [] , a , b , c );
self._reset(mark)
if (
(a := self._loop1_60())
and
(b := self.star_etc(),)
):
return self . make_arguments ( None , [] , None , a , b );
self._reset(mark)
if (
(a := self.star_etc())
):
return self . make_arguments ( None , [] , None , None , a );
self._reset(mark)
return None;
@memoize
def slash_no_default(self) -> Optional[List [Tuple [ast . arg , None]]]:
# slash_no_default: param_no_default+ '/' ',' | param_no_default+ '/' &')'
mark = self._mark()
if (
(a := self._loop1_61())
and
(self.expect('/'))
and
(self.expect(','))
):
return [( p , None ) for p in a];
self._reset(mark)
if (
(a := self._loop1_62())
and
(self.expect('/'))
and
(self.positive_lookahead(self.expect, ')'))
):
return [( p , None ) for p in a];
self._reset(mark)
return None;
@memoize
def slash_with_default(self) -> Optional[List [Tuple [ast . arg , Any]]]:
# slash_with_default: param_no_default* param_with_default+ '/' ',' | param_no_default* param_with_default+ '/' &')'
mark = self._mark()
if (
(a := self._loop0_63(),)
and
(b := self._loop1_64())
and
(self.expect('/'))
and
(self.expect(','))
):
return ( [( p , None ) for p in a] if a else [] ) + b;
self._reset(mark)
if (
(a := self._loop0_65(),)
and
(b := self._loop1_66())
and
(self.expect('/'))
and
(self.positive_lookahead(self.expect, ')'))
):
return ( [( p , None ) for p in a] if a else [] ) + b;
self._reset(mark)
return None;
@memoize
def star_etc(self) -> Optional[Tuple [Optional [ast . arg] , List [Tuple [ast . arg , Any]] , Optional [ast . arg]]]:
# star_etc: invalid_star_etc | '*' param_no_default param_maybe_default* kwds? | '*' param_no_default_star_annotation param_maybe_default* kwds? | '*' ',' param_maybe_default+ kwds? | kwds
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_star_etc())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('*'))
and
(a := self.param_no_default())
and
(b := self._loop0_67(),)
and
(c := self.kwds(),)
):
return ( a , b , c );
self._reset(mark)
if (
(self.expect('*'))
and
(a := self.param_no_default_star_annotation())
and
(b := self._loop0_68(),)
and
(c := self.kwds(),)
):
return ( a , b , c );
self._reset(mark)
if (
(self.expect('*'))
and
(self.expect(','))
and
(b := self._loop1_69())
and
(c := self.kwds(),)
):
return ( None , b , c );
self._reset(mark)
if (
(a := self.kwds())
):
return ( None , [] , a );
self._reset(mark)
return None;
@memoize
def kwds(self) -> Optional[ast . arg]:
# kwds: invalid_kwds | '**' param_no_default
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_kwds())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('**'))
and
(a := self.param_no_default())
):
return a;
self._reset(mark)
return None;
@memoize
def param_no_default(self) -> Optional[ast . arg]:
# param_no_default: param ',' TYPE_COMMENT? | param TYPE_COMMENT? &')'
mark = self._mark()
if (
(a := self.param())
and
(self.expect(','))
and
(tc := self.type_comment(),)
):
return self . set_arg_type_comment ( a , tc );
self._reset(mark)
if (
(a := self.param())
and
(tc := self.type_comment(),)
and
(self.positive_lookahead(self.expect, ')'))
):
return self . set_arg_type_comment ( a , tc );
self._reset(mark)
return None;
@memoize
def param_no_default_star_annotation(self) -> Optional[ast . arg]:
# param_no_default_star_annotation: param_star_annotation ',' TYPE_COMMENT? | param_star_annotation TYPE_COMMENT? &')'
mark = self._mark()
if (
(a := self.param_star_annotation())
and
(self.expect(','))
and
(tc := self.type_comment(),)
):
return self . set_arg_type_comment ( a , tc );
self._reset(mark)
if (
(a := self.param_star_annotation())
and
(tc := self.type_comment(),)
and
(self.positive_lookahead(self.expect, ')'))
):
return self . set_arg_type_comment ( a , tc );
self._reset(mark)
return None;
@memoize
def param_with_default(self) -> Optional[Tuple [ast . arg , Any]]:
# param_with_default: param default ',' TYPE_COMMENT? | param default TYPE_COMMENT? &')'
mark = self._mark()
if (
(a := self.param())
and
(c := self.default())
and
(self.expect(','))
and
(tc := self.type_comment(),)
):
return ( self . set_arg_type_comment ( a , tc ) , c );
self._reset(mark)
if (
(a := self.param())
and
(c := self.default())
and
(tc := self.type_comment(),)
and
(self.positive_lookahead(self.expect, ')'))
):
return ( self . set_arg_type_comment ( a , tc ) , c );
self._reset(mark)
return None;
@memoize
def param_maybe_default(self) -> Optional[Tuple [ast . arg , Any]]:
# param_maybe_default: param default? ',' TYPE_COMMENT? | param default? TYPE_COMMENT? &')'
mark = self._mark()
if (
(a := self.param())
and
(c := self.default(),)
and
(self.expect(','))
and
(tc := self.type_comment(),)
):
return ( self . set_arg_type_comment ( a , tc ) , c );
self._reset(mark)
if (
(a := self.param())
and
(c := self.default(),)
and
(tc := self.type_comment(),)
and
(self.positive_lookahead(self.expect, ')'))
):
return ( self . set_arg_type_comment ( a , tc ) , c );
self._reset(mark)
return None;
@memoize
def param(self) -> Optional[Any]:
# param: NAME annotation?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(b := self.annotation(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . arg ( arg = a . string , annotation = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def param_star_annotation(self) -> Optional[Any]:
# param_star_annotation: NAME star_annotation
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(b := self.star_annotation())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . arg ( arg = a . string , annotations = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def annotation(self) -> Optional[Any]:
# annotation: ':' expression
mark = self._mark()
if (
(self.expect(':'))
and
(a := self.expression())
):
return a;
self._reset(mark)
return None;
@memoize
def star_annotation(self) -> Optional[Any]:
# star_annotation: ':' star_expression
mark = self._mark()
if (
(self.expect(':'))
and
(a := self.star_expression())
):
return a;
self._reset(mark)
return None;
@memoize
def default(self) -> Optional[Any]:
# default: '=' expression | invalid_default
mark = self._mark()
if (
(self.expect('='))
and
(a := self.expression())
):
return a;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_default())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def if_stmt(self) -> Optional[ast . If]:
# if_stmt: invalid_if_stmt | 'if' named_expression ':' block elif_stmt | 'if' named_expression ':' block else_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_if_stmt())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('if'))
and
(a := self.named_expression())
and
(self.expect(':'))
and
(b := self.block())
and
(c := self.elif_stmt())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . If ( test = a , body = b , orelse = c or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('if'))
and
(a := self.named_expression())
and
(self.expect(':'))
and
(b := self.block())
and
(c := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . If ( test = a , body = b , orelse = c or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def elif_stmt(self) -> Optional[List [ast . If]]:
# elif_stmt: invalid_elif_stmt | 'elif' named_expression ':' block elif_stmt | 'elif' named_expression ':' block else_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_elif_stmt())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('elif'))
and
(a := self.named_expression())
and
(self.expect(':'))
and
(b := self.block())
and
(c := self.elif_stmt())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return [ast . If ( test = a , body = b , orelse = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )];
self._reset(mark)
if (
(self.expect('elif'))
and
(a := self.named_expression())
and
(self.expect(':'))
and
(b := self.block())
and
(c := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return [ast . If ( test = a , body = b , orelse = c or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )];
self._reset(mark)
return None;
@memoize
def else_block(self) -> Optional[list]:
# else_block: invalid_else_stmt | 'else' &&':' block
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_else_stmt())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('else'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
):
return b;
self._reset(mark)
return None;
@memoize
def while_stmt(self) -> Optional[ast . While]:
# while_stmt: invalid_while_stmt | 'while' named_expression ':' block else_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_while_stmt())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('while'))
and
(a := self.named_expression())
and
(self.expect(':'))
and
(b := self.block())
and
(c := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . While ( test = a , body = b , orelse = c or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def for_stmt(self) -> Optional[Union [ast . For , ast . AsyncFor]]:
# for_stmt: invalid_for_stmt | 'for' star_targets 'in' ~ star_expressions &&':' TYPE_COMMENT? block else_block? | 'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? | invalid_for_target
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_for_stmt())
):
return None # pragma: no cover;
self._reset(mark)
cut = False
if (
(self.expect('for'))
and
(t := self.star_targets())
and
(self.expect('in'))
and
(cut := True)
and
(ex := self.star_expressions())
and
(self.expect_forced(self.expect(':'), "':'"))
and
(tc := self.type_comment(),)
and
(b := self.block())
and
(el := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . For ( target = t , iter = ex , body = b , orelse = el or [] , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if cut:
return None;
cut = False
if (
(self.expect('async'))
and
(self.expect('for'))
and
(t := self.star_targets())
and
(self.expect('in'))
and
(cut := True)
and
(ex := self.star_expressions())
and
(self.expect(':'))
and
(tc := self.type_comment(),)
and
(b := self.block())
and
(el := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 5 ) , "Async for loops are" , ast . AsyncFor ( target = t , iter = ex , body = b , orelse = el or [] , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) );
self._reset(mark)
if cut:
return None;
if (
self.call_invalid_rules
and
(self.invalid_for_target())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def with_stmt(self) -> Optional[Union [ast . With , ast . AsyncWith]]:
# with_stmt: invalid_with_stmt_indent | 'with' '(' ','.with_item+ ','? ')' ':' block | 'with' ','.with_item+ ':' TYPE_COMMENT? block | 'async' 'with' '(' ','.with_item+ ','? ')' ':' block | 'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block | invalid_with_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_with_stmt_indent())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('with'))
and
(self.expect('('))
and
(a := self._gather_70())
and
(self.expect(','),)
and
(self.expect(')'))
and
(self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 9 ) , "Parenthesized with items" , ast . With ( items = a , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) );
self._reset(mark)
if (
(self.expect('with'))
and
(a := self._gather_72())
and
(self.expect(':'))
and
(tc := self.type_comment(),)
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . With ( items = a , body = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('async'))
and
(self.expect('with'))
and
(self.expect('('))
and
(a := self._gather_74())
and
(self.expect(','),)
and
(self.expect(')'))
and
(self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 9 ) , "Parenthesized with items" , ast . AsyncWith ( items = a , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) );
self._reset(mark)
if (
(self.expect('async'))
and
(self.expect('with'))
and
(a := self._gather_76())
and
(self.expect(':'))
and
(tc := self.type_comment(),)
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 5 ) , "Async with statements are" , ast . AsyncWith ( items = a , body = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_with_stmt())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def with_item(self) -> Optional[ast . withitem]:
# with_item: expression 'as' star_target &(',' | ')' | ':') | invalid_with_item | expression
mark = self._mark()
if (
(e := self.expression())
and
(self.expect('as'))
and
(t := self.star_target())
and
(self.positive_lookahead(self._tmp_78, ))
):
return ast . withitem ( context_expr = e , optional_vars = t );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_with_item())
):
return None # pragma: no cover;
self._reset(mark)
if (
(e := self.expression())
):
return ast . withitem ( context_expr = e , optional_vars = None );
self._reset(mark)
return None;
@memoize
def try_stmt(self) -> Optional[ast . Try]:
# try_stmt: invalid_try_stmt | 'try' &&':' block finally_block | 'try' &&':' block except_block+ else_block? finally_block? | 'try' &&':' block except_star_block+ else_block? finally_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_try_stmt())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('try'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
and
(f := self.finally_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Try ( body = b , handlers = [] , orelse = [] , finalbody = f , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('try'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
and
(ex := self._loop1_79())
and
(el := self.else_block(),)
and
(f := self.finally_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Try ( body = b , handlers = ex , orelse = el or [] , finalbody = f or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('try'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
and
(ex := self._loop1_80())
and
(el := self.else_block(),)
and
(f := self.finally_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 11 ) , "Exception groups are" , ( ast . TryStar ( body = b , handlers = ex , orelse = el or [] , finalbody = f or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 11 ) else None ) );
self._reset(mark)
return None;
@memoize
def scenic_try_interrupt_stmt(self) -> Optional[s . TryInterrupt]:
# scenic_try_interrupt_stmt: 'try' &&':' block interrupt_when_block+ except_block* else_block? finally_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('try'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
and
(iw := self._loop1_81())
and
(ex := self._loop0_82(),)
and
(el := self.else_block(),)
and
(f := self.finally_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . TryInterrupt ( body = b , interrupt_when_handlers = iw , except_handlers = ex , orelse = el or [] , finalbody = f or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
return None;
@memoize
def interrupt_when_block(self) -> Optional[Any]:
# interrupt_when_block: "interrupt" "when" expression &&':' block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("interrupt"))
and
(self.expect("when"))
and
(e := self.expression())
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . InterruptWhenHandler ( cond = e , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def except_block(self) -> Optional[ast . ExceptHandler]:
# except_block: invalid_except_stmt_indent | 'except' expression ['as' NAME] ':' block | 'except' ':' block | invalid_except_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_except_stmt_indent())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('except'))
and
(e := self.expression())
and
(t := self._tmp_83(),)
and
(self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ExceptHandler ( type = e , name = t , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('except'))
and
(self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ExceptHandler ( type = None , name = None , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_except_stmt())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def except_star_block(self) -> Optional[ast . ExceptHandler]:
# except_star_block: invalid_except_star_stmt_indent | 'except' '*' expression ['as' NAME] ':' block | invalid_except_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_except_star_stmt_indent())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('except'))
and
(self.expect('*'))
and
(e := self.expression())
and
(t := self._tmp_84(),)
and
(self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ExceptHandler ( type = e , name = t , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_except_stmt())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def finally_block(self) -> Optional[list]:
# finally_block: invalid_finally_stmt | 'finally' &&':' block
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_finally_stmt())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('finally'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(a := self.block())
):
return a;
self._reset(mark)
return None;
@memoize
def match_stmt(self) -> Optional["ast.Match"]:
# match_stmt: "match" subject_expr ':' NEWLINE INDENT case_block+ DEDENT | invalid_match_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("match"))
and
(subject := self.subject_expr())
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(cases := self._loop1_85())
and
(self.expect('DEDENT'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Match ( subject = subject , cases = cases , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_match_stmt())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def subject_expr(self) -> Optional[Any]:
# subject_expr: star_named_expression ',' star_named_expressions? | named_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(value := self.star_named_expression())
and
(self.expect(','))
and
(values := self.star_named_expressions(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 10 ) , "Pattern matching is" , ast . Tuple ( elts = [value] + ( values or [] ) , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) );
self._reset(mark)
if (
(e := self.named_expression())
):
return self . check_version ( ( 3 , 10 ) , "Pattern matching is" , e );
self._reset(mark)
return None;
@memoize
def case_block(self) -> Optional["ast.match_case"]:
# case_block: invalid_case_block | "case" patterns guard? ':' block
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_case_block())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect("case"))
and
(pattern := self.patterns())
and
(guard := self.guard(),)
and
(self.expect(':'))
and
(body := self.block())
):
return ast . match_case ( pattern = pattern , guard = guard , body = body );
self._reset(mark)
return None;
@memoize
def guard(self) -> Optional[Any]:
# guard: 'if' named_expression
mark = self._mark()
if (
(self.expect('if'))
and
(guard := self.named_expression())
):
return guard;
self._reset(mark)
return None;
@memoize
def patterns(self) -> Optional[Any]:
# patterns: open_sequence_pattern | pattern
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(patterns := self.open_sequence_pattern())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSequence ( patterns = patterns , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(pattern := self.pattern())
):
return pattern;
self._reset(mark)
return None;
@memoize
def pattern(self) -> Optional[Any]:
# pattern: as_pattern | or_pattern
mark = self._mark()
if (
(as_pattern := self.as_pattern())
):
return as_pattern;
self._reset(mark)
if (
(or_pattern := self.or_pattern())
):
return or_pattern;
self._reset(mark)
return None;
@memoize
def as_pattern(self) -> Optional["ast.MatchAs"]:
# as_pattern: or_pattern 'as' pattern_capture_target | invalid_as_pattern
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(pattern := self.or_pattern())
and
(self.expect('as'))
and
(target := self.pattern_capture_target())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchAs ( pattern = pattern , name = target , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_as_pattern())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def or_pattern(self) -> Optional["ast.MatchOr"]:
# or_pattern: '|'.closed_pattern+
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(patterns := self._gather_86())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchOr ( patterns = patterns , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if len ( patterns ) > 1 else patterns [0];
self._reset(mark)
return None;
@memoize
def closed_pattern(self) -> Optional[Any]:
# closed_pattern: literal_pattern | capture_pattern | wildcard_pattern | value_pattern | group_pattern | sequence_pattern | mapping_pattern | class_pattern
mark = self._mark()
if (
(literal_pattern := self.literal_pattern())
):
return literal_pattern;
self._reset(mark)
if (
(capture_pattern := self.capture_pattern())
):
return capture_pattern;
self._reset(mark)
if (
(wildcard_pattern := self.wildcard_pattern())
):
return wildcard_pattern;
self._reset(mark)
if (
(value_pattern := self.value_pattern())
):
return value_pattern;
self._reset(mark)
if (
(group_pattern := self.group_pattern())
):
return group_pattern;
self._reset(mark)
if (
(sequence_pattern := self.sequence_pattern())
):
return sequence_pattern;
self._reset(mark)
if (
(mapping_pattern := self.mapping_pattern())
):
return mapping_pattern;
self._reset(mark)
if (
(class_pattern := self.class_pattern())
):
return class_pattern;
self._reset(mark)
return None;
@memoize
def literal_pattern(self) -> Optional[Any]:
# literal_pattern: signed_number !('+' | '-') | complex_number | strings | 'None' | 'True' | 'False'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(value := self.signed_number())
and
(self.negative_lookahead(self._tmp_88, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchValue ( value = value , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(value := self.complex_number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchValue ( value = value , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(value := self.strings())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchValue ( value = value , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('None'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSingleton ( value = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('True'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSingleton ( value = True , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('False'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSingleton ( value = False , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def literal_expr(self) -> Optional[Any]:
# literal_expr: signed_number !('+' | '-') | complex_number | strings | 'None' | 'True' | 'False'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(signed_number := self.signed_number())
and
(self.negative_lookahead(self._tmp_89, ))
):
return signed_number;
self._reset(mark)
if (
(complex_number := self.complex_number())
):
return complex_number;
self._reset(mark)
if (
(strings := self.strings())
):
return strings;
self._reset(mark)
if (
(self.expect('None'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('True'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = True , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('False'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = False , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def complex_number(self) -> Optional[Any]:
# complex_number: signed_real_number '+' imaginary_number | signed_real_number '-' imaginary_number
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(real := self.signed_real_number())
and
(self.expect('+'))
and
(imag := self.imaginary_number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = real , op = ast . Add ( ) , right = imag , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(real := self.signed_real_number())
and
(self.expect('-'))
and
(imag := self.imaginary_number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = real , op = ast . Sub ( ) , right = imag , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def signed_number(self) -> Optional[Any]:
# signed_number: NUMBER | '-' NUMBER
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = ast . literal_eval ( a . string ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('-'))
and
(a := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . USub ( ) , operand = ast . Constant ( value = ast . literal_eval ( a . string ) , lineno = a . start [0] , col_offset = a . start [1] , end_lineno = a . end [0] , end_col_offset = a . end [1] ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
return None;
@memoize
def signed_real_number(self) -> Optional[Any]:
# signed_real_number: real_number | '-' real_number
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(real_number := self.real_number())
):
return real_number;
self._reset(mark)
if (
(self.expect('-'))
and
(real := self.real_number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . USub ( ) , operand = real , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def real_number(self) -> Optional[ast . Constant]:
# real_number: NUMBER
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(real := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = self . ensure_real ( real ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def imaginary_number(self) -> Optional[ast . Constant]:
# imaginary_number: NUMBER
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(imag := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = self . ensure_imaginary ( imag ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def capture_pattern(self) -> Optional[Any]:
# capture_pattern: pattern_capture_target
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(target := self.pattern_capture_target())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchAs ( pattern = None , name = target , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def pattern_capture_target(self) -> Optional[str]:
# pattern_capture_target: !"_" NAME !('.' | '(' | '=')
mark = self._mark()
if (
(self.negative_lookahead(self.expect, "_"))
and
(name := self.name())
and
(self.negative_lookahead(self._tmp_90, ))
):
return name . string;
self._reset(mark)
return None;
@memoize
def wildcard_pattern(self) -> Optional["ast.MatchAs"]:
# wildcard_pattern: "_"
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("_"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchAs ( pattern = None , target = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def value_pattern(self) -> Optional["ast.MatchValue"]:
# value_pattern: attr !('.' | '(' | '=')
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(attr := self.attr())
and
(self.negative_lookahead(self._tmp_91, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchValue ( value = attr , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize_left_rec
def attr(self) -> Optional[ast . Attribute]:
# attr: name_or_attr '.' NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(value := self.name_or_attr())
and
(self.expect('.'))
and
(attr := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = value , attr = attr . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@logger
def name_or_attr(self) -> Optional[Any]:
# name_or_attr: attr | NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(attr := self.attr())
):
return attr;
self._reset(mark)
if (
(name := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = name . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def group_pattern(self) -> Optional[Any]:
# group_pattern: '(' pattern ')'
mark = self._mark()
if (
(self.expect('('))
and
(pattern := self.pattern())
and
(self.expect(')'))
):
return pattern;
self._reset(mark)
return None;
@memoize
def sequence_pattern(self) -> Optional["ast.MatchSequence"]:
# sequence_pattern: '[' maybe_sequence_pattern? ']' | '(' open_sequence_pattern? ')'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('['))
and
(patterns := self.maybe_sequence_pattern(),)
and
(self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSequence ( patterns = patterns or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('('))
and
(patterns := self.open_sequence_pattern(),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSequence ( patterns = patterns or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def open_sequence_pattern(self) -> Optional[Any]:
# open_sequence_pattern: maybe_star_pattern ',' maybe_sequence_pattern?
mark = self._mark()
if (
(pattern := self.maybe_star_pattern())
and
(self.expect(','))
and
(patterns := self.maybe_sequence_pattern(),)
):
return [pattern] + ( patterns or [] );
self._reset(mark)
return None;
@memoize
def maybe_sequence_pattern(self) -> Optional[Any]:
# maybe_sequence_pattern: ','.maybe_star_pattern+ ','?
mark = self._mark()
if (
(patterns := self._gather_92())
and
(self.expect(','),)
):
return patterns;
self._reset(mark)
return None;
@memoize
def maybe_star_pattern(self) -> Optional[Any]:
# maybe_star_pattern: star_pattern | pattern
mark = self._mark()
if (
(star_pattern := self.star_pattern())
):
return star_pattern;
self._reset(mark)
if (
(pattern := self.pattern())
):
return pattern;
self._reset(mark)
return None;
@memoize
def star_pattern(self) -> Optional[Any]:
# star_pattern: '*' pattern_capture_target | '*' wildcard_pattern
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('*'))
and
(target := self.pattern_capture_target())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchStar ( name = target , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('*'))
and
(self.wildcard_pattern())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchStar ( target = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def mapping_pattern(self) -> Optional[Any]:
# mapping_pattern: '{' '}' | '{' double_star_pattern ','? '}' | '{' items_pattern ',' double_star_pattern ','? '}' | '{' items_pattern ','? '}'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('{'))
and
(self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchMapping ( keys = [] , patterns = [] , rest = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('{'))
and
(rest := self.double_star_pattern())
and
(self.expect(','),)
and
(self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchMapping ( keys = [] , patterns = [] , rest = rest , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('{'))
and
(items := self.items_pattern())
and
(self.expect(','))
and
(rest := self.double_star_pattern())
and
(self.expect(','),)
and
(self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchMapping ( keys = [k for k , _ in items] , patterns = [p for _ , p in items] , rest = rest , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
if (
(self.expect('{'))
and
(items := self.items_pattern())
and
(self.expect(','),)
and
(self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchMapping ( keys = [k for k , _ in items] , patterns = [p for _ , p in items] , rest = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
return None;
@memoize
def items_pattern(self) -> Optional[Any]:
# items_pattern: ','.key_value_pattern+
mark = self._mark()
if (
(_gather_94 := self._gather_94())
):
return _gather_94;
self._reset(mark)
return None;
@memoize
def key_value_pattern(self) -> Optional[Any]:
# key_value_pattern: (literal_expr | attr) ':' pattern
mark = self._mark()
if (
(key := self._tmp_96())
and
(self.expect(':'))
and
(pattern := self.pattern())
):
return ( key , pattern );
self._reset(mark)
return None;
@memoize
def double_star_pattern(self) -> Optional[Any]:
# double_star_pattern: '**' pattern_capture_target
mark = self._mark()
if (
(self.expect('**'))
and
(target := self.pattern_capture_target())
):
return target;
self._reset(mark)
return None;
@memoize
def class_pattern(self) -> Optional["ast.MatchClass"]:
# class_pattern: name_or_attr '(' ')' | name_or_attr '(' positional_patterns ','? ')' | name_or_attr '(' keyword_patterns ','? ')' | name_or_attr '(' positional_patterns ',' keyword_patterns ','? ')' | invalid_class_pattern
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(cls := self.name_or_attr())
and
(self.expect('('))
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchClass ( cls = cls , patterns = [] , kwd_attrs = [] , kwd_patterns = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(cls := self.name_or_attr())
and
(self.expect('('))
and
(patterns := self.positional_patterns())
and
(self.expect(','),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchClass ( cls = cls , patterns = patterns , kwd_attrs = [] , kwd_patterns = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(cls := self.name_or_attr())
and
(self.expect('('))
and
(keywords := self.keyword_patterns())
and
(self.expect(','),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchClass ( cls = cls , patterns = [] , kwd_attrs = [k for k , _ in keywords] , kwd_patterns = [p for _ , p in keywords] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
if (
(cls := self.name_or_attr())
and
(self.expect('('))
and
(patterns := self.positional_patterns())
and
(self.expect(','))
and
(keywords := self.keyword_patterns())
and
(self.expect(','),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchClass ( cls = cls , patterns = patterns , kwd_attrs = [k for k , _ in keywords] , kwd_patterns = [p for _ , p in keywords] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_class_pattern())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def positional_patterns(self) -> Optional[Any]:
# positional_patterns: ','.pattern+
mark = self._mark()
if (
(args := self._gather_97())
):
return args;
self._reset(mark)
return None;
@memoize
def keyword_patterns(self) -> Optional[Any]:
# keyword_patterns: ','.keyword_pattern+
mark = self._mark()
if (
(_gather_99 := self._gather_99())
):
return _gather_99;
self._reset(mark)
return None;
@memoize
def keyword_pattern(self) -> Optional[Any]:
# keyword_pattern: NAME '=' pattern
mark = self._mark()
if (
(arg := self.name())
and
(self.expect('='))
and
(value := self.pattern())
):
return ( arg . string , value );
self._reset(mark)
return None;
@memoize
def type_alias(self) -> Optional["ast.TypeAlias"]:
# type_alias: "type" NAME type_params? '=' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("type"))
and
(n := self.name())
and
(t := self.type_params(),)
and
(self.expect('='))
and
(b := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 12 ) , "Type statement is" , ( ast . TypeAlias ( name = ast . Name ( id = n . string , ctx = Store , lineno = n . start [0] , col_offset = n . start [1] , end_lineno = n . end [0] , end_col_offset = n . end [1] , ) , type_params = t or [] , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 12 ) else None ) );
self._reset(mark)
return None;
@memoize
def type_params(self) -> Optional[list]:
# type_params: '[' type_param_seq ']'
mark = self._mark()
if (
(self.expect('['))
and
(t := self.type_param_seq())
and
(self.expect(']'))
):
return self . check_version ( ( 3 , 12 ) , "Type parameter lists are" , t );
self._reset(mark)
return None;
@memoize
def type_param_seq(self) -> Optional[Any]:
# type_param_seq: ','.type_param+ ','?
mark = self._mark()
if (
(a := self._gather_101())
and
(self.expect(','),)
):
return a;
self._reset(mark)
return None;
@memoize
def type_param(self) -> Optional[Any]:
# type_param: NAME type_param_bound? | '*' NAME ":" expression | '*' NAME | '**' NAME ":" expression | '**' NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(b := self.type_param_bound(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . TypeVar ( name = a . string , bound = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 12 ) else object ( );
self._reset(mark)
if (
(self.expect('*'))
and
(self.name())
and
(colon := self.expect(":"))
and
(e := self.expression())
):
return self . raise_syntax_error_starting_from ( "cannot use constraints with TypeVarTuple" if isinstance ( e , ast . Tuple ) else "cannot use bound with TypeVarTuple" , colon );
self._reset(mark)
if (
(self.expect('*'))
and
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . TypeVarTuple ( name = a . string , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 12 ) else object ( );
self._reset(mark)
if (
(self.expect('**'))
and
(self.name())
and
(colon := self.expect(":"))
and
(e := self.expression())
):
return self . raise_syntax_error_starting_from ( "cannot use constraints with ParamSpec" if isinstance ( e , ast . Tuple ) else "cannot use bound with ParamSpec" , colon );
self._reset(mark)
if (
(self.expect('**'))
and
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ParamSpec ( name = a . string , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 12 ) else object ( );
self._reset(mark)
return None;
@memoize
def type_param_bound(self) -> Optional[Any]:
# type_param_bound: ":" expression
mark = self._mark()
if (
(self.expect(":"))
and
(e := self.expression())
):
return e;
self._reset(mark)
return None;
@memoize
def expressions(self) -> Optional[Any]:
# expressions: expression ((',' expression))+ ','? | expression ',' | expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.expression())
and
(b := self._loop1_103())
and
(self.expect(','),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] + b , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.expression())
and
(self.expect(','))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(expression := self.expression())
):
return expression;
self._reset(mark)
return None;
@memoize
def expression(self) -> Optional[Any]:
# expression: invalid_scenic_instance_creation | invalid_expression | invalid_legacy_expression | disjunction 'if' disjunction 'else' disjunction | disjunction | lambdef
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_scenic_instance_creation())
):
return None # pragma: no cover;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_expression())
):
return None # pragma: no cover;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_legacy_expression())
):
return None # pragma: no cover;
self._reset(mark)
if (
(a := self.disjunction())
and
(self.expect('if'))
and
(b := self.disjunction())
and
(self.expect('else'))
and
(c := self.disjunction())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . IfExp ( body = a , test = b , orelse = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(disjunction := self.disjunction())
):
return disjunction;
self._reset(mark)
if (
(lambdef := self.lambdef())
):
return lambdef;
self._reset(mark)
return None;
@memoize
def scenic_temporal_expression(self) -> Optional[Any]:
# scenic_temporal_expression: invalid_expression | invalid_legacy_expression | scenic_until 'if' scenic_until 'else' scenic_until | scenic_until | lambdef
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_expression())
):
return None # pragma: no cover;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_legacy_expression())
):
return None # pragma: no cover;
self._reset(mark)
if (
(a := self.scenic_until())
and
(self.expect('if'))
and
(b := self.scenic_until())
and
(self.expect('else'))
and
(c := self.scenic_until())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . IfExp ( body = a , test = b , orelse = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_until := self.scenic_until())
):
return scenic_until;
self._reset(mark)
if (
(lambdef := self.lambdef())
):
return lambdef;
self._reset(mark)
return None;
@memoize
def yield_expr(self) -> Optional[Any]:
# yield_expr: 'yield' 'from' expression | 'yield' star_expressions?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('yield'))
and
(self.expect('from'))
and
(a := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . YieldFrom ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('yield'))
and
(a := self.star_expressions(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Yield ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def star_expressions(self) -> Optional[Any]:
# star_expressions: star_expression ((',' star_expression))+ ','? | star_expression ',' | star_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.star_expression())
and
(b := self._loop1_104())
and
(self.expect(','),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] + b , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.star_expression())
and
(self.expect(','))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(star_expression := self.star_expression())
):
return star_expression;
self._reset(mark)
return None;
@memoize
def star_expression(self) -> Optional[Any]:
# star_expression: '*' bitwise_or | expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('*'))
and
(a := self.bitwise_or())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Starred ( value = a , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(expression := self.expression())
):
return expression;
self._reset(mark)
return None;
@memoize
def star_named_expressions(self) -> Optional[Any]:
# star_named_expressions: ','.star_named_expression+ ','?
mark = self._mark()
if (
(a := self._gather_105())
and
(self.expect(','),)
):
return a;
self._reset(mark)
return None;
@memoize
def star_named_expression(self) -> Optional[Any]:
# star_named_expression: '*' bitwise_or | named_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('*'))
and
(a := self.bitwise_or())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Starred ( value = a , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(named_expression := self.named_expression())
):
return named_expression;
self._reset(mark)
return None;
@memoize
def assignment_expression(self) -> Optional[Any]:
# assignment_expression: NAME ':=' ~ expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
cut = False
if (
(a := self.name())
and
(self.expect(':='))
and
(cut := True)
and
(b := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 8 ) , "The ':=' operator is" , ast . NamedExpr ( target = ast . Name ( id = a . string , ctx = Store , lineno = a . start [0] , col_offset = a . start [1] , end_lineno = a . end [0] , end_col_offset = a . end [1] ) , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) );
self._reset(mark)
if cut:
return None;
return None;
@memoize
def named_expression(self) -> Optional[Any]:
# named_expression: assignment_expression | invalid_named_expression | expression !':='
mark = self._mark()
if (
(assignment_expression := self.assignment_expression())
):
return assignment_expression;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_named_expression())
):
return None # pragma: no cover;
self._reset(mark)
if (
(a := self.expression())
and
(self.negative_lookahead(self.expect, ':='))
):
return a;
self._reset(mark)
return None;
@logger
def scenic_until(self) -> Optional[Any]:
# scenic_until: invalid_scenic_until | scenic_above_until 'until' scenic_above_until | scenic_above_until
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_scenic_until())
):
return None # pragma: no cover;
self._reset(mark)
if (
(a := self.scenic_above_until())
and
(self.expect('until'))
and
(b := self.scenic_above_until())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . UntilOp ( a , b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_above_until := self.scenic_above_until())
):
return scenic_above_until;
self._reset(mark)
return None;
@logger
def scenic_above_until(self) -> Optional[Any]:
# scenic_above_until: scenic_temporal_prefix | scenic_implication
mark = self._mark()
if (
(scenic_temporal_prefix := self.scenic_temporal_prefix())
):
return scenic_temporal_prefix;
self._reset(mark)
if (
(scenic_implication := self.scenic_implication())
):
return scenic_implication;
self._reset(mark)
return None;
@memoize
def scenic_temporal_prefix(self) -> Optional[Any]:
# scenic_temporal_prefix: "next" scenic_above_until | "eventually" scenic_above_until | "always" scenic_above_until
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("next"))
and
(e := self.scenic_above_until())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Next ( e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("eventually"))
and
(e := self.scenic_above_until())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Eventually ( e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("always"))
and
(e := self.scenic_above_until())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Always ( e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@logger
def scenic_implication(self) -> Optional[Any]:
# scenic_implication: invalid_scenic_implication | scenic_temporal_disjunction "implies" (scenic_temporal_prefix | scenic_temporal_disjunction) | scenic_temporal_disjunction
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_scenic_implication())
):
return None # pragma: no cover;
self._reset(mark)
if (
(a := self.scenic_temporal_disjunction())
and
(self.expect("implies"))
and
(b := self._tmp_107())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . ImpliesOp ( a , b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_temporal_disjunction := self.scenic_temporal_disjunction())
):
return scenic_temporal_disjunction;
self._reset(mark)
return None;
@memoize
def disjunction(self) -> Optional[Any]:
# disjunction: conjunction (('or' conjunction))+ | conjunction
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.conjunction())
and
(b := self._loop1_108())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BoolOp ( op = ast . Or ( ) , values = [a] + b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(conjunction := self.conjunction())
):
return conjunction;
self._reset(mark)
return None;
@memoize
def scenic_temporal_disjunction(self) -> Optional[Any]:
# scenic_temporal_disjunction: scenic_temporal_conjunction (('or' (scenic_temporal_prefix | scenic_temporal_conjunction)))+ | scenic_temporal_conjunction
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.scenic_temporal_conjunction())
and
(b := self._loop1_109())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BoolOp ( op = ast . Or ( ) , values = [a] + b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_temporal_conjunction := self.scenic_temporal_conjunction())
):
return scenic_temporal_conjunction;
self._reset(mark)
return None;
@memoize
def conjunction(self) -> Optional[Any]:
# conjunction: inversion (('and' inversion))+ | inversion
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.inversion())
and
(b := self._loop1_110())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BoolOp ( op = ast . And ( ) , values = [a] + b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(inversion := self.inversion())
):
return inversion;
self._reset(mark)
return None;
@memoize
def scenic_temporal_conjunction(self) -> Optional[Any]:
# scenic_temporal_conjunction: scenic_temporal_inversion (('and' (scenic_temporal_prefix | scenic_temporal_inversion)))+ | scenic_temporal_inversion
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.scenic_temporal_inversion())
and
(b := self._loop1_111())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BoolOp ( op = ast . And ( ) , values = [a] + b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_temporal_inversion := self.scenic_temporal_inversion())
):
return scenic_temporal_inversion;
self._reset(mark)
return None;
@memoize
def inversion(self) -> Optional[Any]:
# inversion: 'not' !("visible" inversion) inversion | comparison
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('not'))
and
(self.negative_lookahead(self._tmp_112, ))
and
(a := self.inversion())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . Not ( ) , operand = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(comparison := self.comparison())
):
return comparison;
self._reset(mark)
return None;
@memoize
def scenic_temporal_inversion(self) -> Optional[Any]:
# scenic_temporal_inversion: 'not' !("visible" scenic_temporal_inversion) (scenic_temporal_prefix | scenic_temporal_inversion) | scenic_temporal_group | comparison
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('not'))
and
(self.negative_lookahead(self._tmp_113, ))
and
(a := self._tmp_114())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . Not ( ) , operand = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_temporal_group := self.scenic_temporal_group())
):
return scenic_temporal_group;
self._reset(mark)
if (
(comparison := self.comparison())
):
return comparison;
self._reset(mark)
return None;
@memoize
def scenic_temporal_group(self) -> Optional[Any]:
# scenic_temporal_group: '(' scenic_temporal_expression ')' &('until' | 'or' | 'and' | ')' | ';' | NEWLINE)
mark = self._mark()
if (
(self.expect('('))
and
(a := self.scenic_temporal_expression())
and
(self.expect(')'))
and
(self.positive_lookahead(self._tmp_115, ))
):
return a;
self._reset(mark)
return None;
@memoize
def scenic_new_expr(self) -> Optional[Any]:
# scenic_new_expr: 'new' NAME scenic_specifiers?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('new'))
and
(n := self.name())
and
(ss := self.scenic_specifiers(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . New ( className = n . string , specifiers = ss , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_specifiers(self) -> Optional[Any]:
# scenic_specifiers: ','.scenic_specifier+
mark = self._mark()
if (
(ss := self._gather_116())
):
return ss;
self._reset(mark)
return None;
@memoize
def scenic_specifier(self) -> Optional[Any]:
# scenic_specifier: scenic_valid_specifier | invalid_scenic_specifier
mark = self._mark()
if (
(scenic_valid_specifier := self.scenic_valid_specifier())
):
return scenic_valid_specifier;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_scenic_specifier())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def scenic_valid_specifier(self) -> Optional[Any]:
# scenic_valid_specifier: 'with' NAME expression | 'at' expression | "offset" 'by' expression | "offset" "along" expression 'by' expression | scenic_specifier_position_direction expression ['by' expression] | "beyond" expression 'by' expression ['from' expression] | "visible" ['from' expression] | 'not' "visible" ['from' expression] | 'in' expression | 'on' expression | "contained" 'in' expression | "following" expression ['from' expression] 'for' expression | "facing" "toward" expression | "facing" "away" "from" expression | "facing" "directly" "toward" expression | "facing" "directly" "away" "from" expression | "facing" expression | "apparently" "facing" expression ['from' expression]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('with'))
and
(p := self.name())
and
(v := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . WithSpecifier ( prop = p . string , value = v , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('at'))
and
(position := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . AtSpecifier ( position = position , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("offset"))
and
(self.expect('by'))
and
(o := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . OffsetBySpecifier ( offset = o , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("offset"))
and
(self.expect("along"))
and
(d := self.expression())
and
(self.expect('by'))
and
(o := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . OffsetAlongSpecifier ( direction = d , offset = o , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(direction := self.scenic_specifier_position_direction())
and
(position := self.expression())
and
(distance := self._tmp_118(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . DirectionOfSpecifier ( direction = direction , position = position , distance = distance , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("beyond"))
and
(v := self.expression())
and
(self.expect('by'))
and
(o := self.expression())
and
(b := self._tmp_119(),)
):
return s . BeyondSpecifier ( position = v , offset = o , base = b );
self._reset(mark)
if (
(self.expect("visible"))
and
(b := self._tmp_120(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . VisibleSpecifier ( base = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('not'))
and
(self.expect("visible"))
and
(b := self._tmp_121(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . NotVisibleSpecifier ( base = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('in'))
and
(r := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . InSpecifier ( region = r , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('on'))
and
(r := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . OnSpecifier ( region = r , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("contained"))
and
(self.expect('in'))
and
(r := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . ContainedInSpecifier ( region = r , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("following"))
and
(f := self.expression())
and
(b := self._tmp_122(),)
and
(self.expect('for'))
and
(d := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FollowingSpecifier ( field = f , distance = d , base = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("facing"))
and
(self.expect("toward"))
and
(p := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FacingTowardSpecifier ( position = p , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("facing"))
and
(self.expect("away"))
and
(self.expect("from"))
and
(p := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FacingAwayFromSpecifier ( position = p , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("facing"))
and
(self.expect("directly"))
and
(self.expect("toward"))
and
(p := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FacingDirectlyTowardSpecifier ( position = p , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("facing"))
and
(self.expect("directly"))
and
(self.expect("away"))
and
(self.expect("from"))
and
(p := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FacingDirectlyAwayFromSpecifier ( position = p , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("facing"))
and
(h := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FacingSpecifier ( heading = h , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("apparently"))
and
(self.expect("facing"))
and
(h := self.expression())
and
(v := self._tmp_123(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . ApparentlyFacingSpecifier ( heading = h , base = v , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_specifier_position_direction(self) -> Optional[Any]:
# scenic_specifier_position_direction: "left" "of" | "right" "of" | "ahead" "of" | "behind" | "above" | "below"
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("left"))
and
(self.expect("of"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . LeftOf ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("right"))
and
(self.expect("of"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . RightOf ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("ahead"))
and
(self.expect("of"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . AheadOf ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("behind"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Behind ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("above"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Above ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("below"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Below ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def comparison(self) -> Optional[Any]:
# comparison: bitwise_or compare_op_bitwise_or_pair+ | bitwise_or
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_or())
and
(b := self._loop1_124())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Compare ( left = a , ops = self . get_comparison_ops ( b ) , comparators = self . get_comparators ( b ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(bitwise_or := self.bitwise_or())
):
return bitwise_or;
self._reset(mark)
return None;
@memoize
def compare_op_bitwise_or_pair(self) -> Optional[Any]:
# compare_op_bitwise_or_pair: eq_bitwise_or | noteq_bitwise_or | lte_bitwise_or | lt_bitwise_or | gte_bitwise_or | gt_bitwise_or | notin_bitwise_or | in_bitwise_or | isnot_bitwise_or | is_bitwise_or
mark = self._mark()
if (
(eq_bitwise_or := self.eq_bitwise_or())
):
return eq_bitwise_or;
self._reset(mark)
if (
(noteq_bitwise_or := self.noteq_bitwise_or())
):
return noteq_bitwise_or;
self._reset(mark)
if (
(lte_bitwise_or := self.lte_bitwise_or())
):
return lte_bitwise_or;
self._reset(mark)
if (
(lt_bitwise_or := self.lt_bitwise_or())
):
return lt_bitwise_or;
self._reset(mark)
if (
(gte_bitwise_or := self.gte_bitwise_or())
):
return gte_bitwise_or;
self._reset(mark)
if (
(gt_bitwise_or := self.gt_bitwise_or())
):
return gt_bitwise_or;
self._reset(mark)
if (
(notin_bitwise_or := self.notin_bitwise_or())
):
return notin_bitwise_or;
self._reset(mark)
if (
(in_bitwise_or := self.in_bitwise_or())
):
return in_bitwise_or;
self._reset(mark)
if (
(isnot_bitwise_or := self.isnot_bitwise_or())
):
return isnot_bitwise_or;
self._reset(mark)
if (
(is_bitwise_or := self.is_bitwise_or())
):
return is_bitwise_or;
self._reset(mark)
return None;
@memoize
def eq_bitwise_or(self) -> Optional[Any]:
# eq_bitwise_or: '==' bitwise_or
mark = self._mark()
if (
(self.expect('=='))
and
(a := self.bitwise_or())
):
return ( ast . Eq ( ) , a );
self._reset(mark)
return None;
@memoize
def noteq_bitwise_or(self) -> Optional[tuple]:
# noteq_bitwise_or: '!=' bitwise_or
mark = self._mark()
if (
(self.expect('!='))
and
(a := self.bitwise_or())
):
return ( ast . NotEq ( ) , a );
self._reset(mark)
return None;
@memoize
def lte_bitwise_or(self) -> Optional[Any]:
# lte_bitwise_or: '<=' bitwise_or
mark = self._mark()
if (
(self.expect('<='))
and
(a := self.bitwise_or())
):
return ( ast . LtE ( ) , a );
self._reset(mark)
return None;
@memoize
def lt_bitwise_or(self) -> Optional[Any]:
# lt_bitwise_or: '<' bitwise_or
mark = self._mark()
if (
(self.expect('<'))
and
(a := self.bitwise_or())
):
return ( ast . Lt ( ) , a );
self._reset(mark)
return None;
@memoize
def gte_bitwise_or(self) -> Optional[Any]:
# gte_bitwise_or: '>=' bitwise_or
mark = self._mark()
if (
(self.expect('>='))
and
(a := self.bitwise_or())
):
return ( ast . GtE ( ) , a );
self._reset(mark)
return None;
@memoize
def gt_bitwise_or(self) -> Optional[Any]:
# gt_bitwise_or: '>' bitwise_or
mark = self._mark()
if (
(self.expect('>'))
and
(a := self.bitwise_or())
):
return ( ast . Gt ( ) , a );
self._reset(mark)
return None;
@memoize
def notin_bitwise_or(self) -> Optional[Any]:
# notin_bitwise_or: 'not' 'in' bitwise_or
mark = self._mark()
if (
(self.expect('not'))
and
(self.expect('in'))
and
(a := self.bitwise_or())
):
return ( ast . NotIn ( ) , a );
self._reset(mark)
return None;
@memoize
def in_bitwise_or(self) -> Optional[Any]:
# in_bitwise_or: 'in' bitwise_or
mark = self._mark()
if (
(self.expect('in'))
and
(a := self.bitwise_or())
):
return ( ast . In ( ) , a );
self._reset(mark)
return None;
@memoize
def isnot_bitwise_or(self) -> Optional[Any]:
# isnot_bitwise_or: 'is' 'not' bitwise_or
mark = self._mark()
if (
(self.expect('is'))
and
(self.expect('not'))
and
(a := self.bitwise_or())
):
return ( ast . IsNot ( ) , a );
self._reset(mark)
return None;
@memoize
def is_bitwise_or(self) -> Optional[Any]:
# is_bitwise_or: 'is' bitwise_or
mark = self._mark()
if (
(self.expect('is'))
and
(a := self.bitwise_or())
):
return ( ast . Is ( ) , a );
self._reset(mark)
return None;
@memoize_left_rec
def bitwise_or(self) -> Optional[Any]:
# bitwise_or: scenic_visible_from | scenic_not_visible_from | scenic_can_see | scenic_intersects | bitwise_or '|' bitwise_xor | bitwise_xor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(scenic_visible_from := self.scenic_visible_from())
):
return scenic_visible_from;
self._reset(mark)
if (
(scenic_not_visible_from := self.scenic_not_visible_from())
):
return scenic_not_visible_from;
self._reset(mark)
if (
(scenic_can_see := self.scenic_can_see())
):
return scenic_can_see;
self._reset(mark)
if (
(scenic_intersects := self.scenic_intersects())
):
return scenic_intersects;
self._reset(mark)
if (
(a := self.bitwise_or())
and
(self.expect('|'))
and
(b := self.bitwise_xor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . BitOr ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(bitwise_xor := self.bitwise_xor())
):
return bitwise_xor;
self._reset(mark)
return None;
@logger
def scenic_visible_from(self) -> Optional[Any]:
# scenic_visible_from: bitwise_or "visible" 'from' bitwise_xor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_or())
and
(self.expect("visible"))
and
(self.expect('from'))
and
(b := self.bitwise_xor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . VisibleFromOp ( region = a , base = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@logger
def scenic_not_visible_from(self) -> Optional[Any]:
# scenic_not_visible_from: bitwise_or "not" "visible" 'from' bitwise_xor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_or())
and
(self.expect("not"))
and
(self.expect("visible"))
and
(self.expect('from'))
and
(b := self.bitwise_xor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . NotVisibleFromOp ( region = a , base = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@logger
def scenic_can_see(self) -> Optional[Any]:
# scenic_can_see: bitwise_or "can" "see" bitwise_xor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_or())
and
(self.expect("can"))
and
(self.expect("see"))
and
(b := self.bitwise_xor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . CanSeeOp ( left = a , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@logger
def scenic_intersects(self) -> Optional[Any]:
# scenic_intersects: bitwise_or "intersects" bitwise_xor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_or())
and
(self.expect("intersects"))
and
(b := self.bitwise_xor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . IntersectsOp ( left = a , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize_left_rec
def bitwise_xor(self) -> Optional[Any]:
# bitwise_xor: scenic_offset_along | bitwise_xor '^' bitwise_and | bitwise_and
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(scenic_offset_along := self.scenic_offset_along())
):
return scenic_offset_along;
self._reset(mark)
if (
(a := self.bitwise_xor())
and
(self.expect('^'))
and
(b := self.bitwise_and())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . BitXor ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(bitwise_and := self.bitwise_and())
):
return bitwise_and;
self._reset(mark)
return None;
@logger
def scenic_offset_along(self) -> Optional[Any]:
# scenic_offset_along: bitwise_xor "offset" "along" bitwise_xor 'by' bitwise_and
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_xor())
and
(self.expect("offset"))
and
(self.expect("along"))
and
(b := self.bitwise_xor())
and
(self.expect('by'))
and
(c := self.bitwise_and())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . OffsetAlongOp ( base = a , direction = b , offset = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize_left_rec
def bitwise_and(self) -> Optional[Any]:
<