#!/usr/bin/env python3.8
# @generated by pegen from /home/docs/checkouts/readthedocs.org/user_builds/scenic-lang/envs/latest/lib/python3.10/site-packages/scenic/syntax/scenic.gram
import ast
import sys
import tokenize
from typing import Any, Optional
from pegen.parser import memoize, memoize_left_rec, logger, Parser
import enum
import io
import itertools
import os
import sys
import token
from typing import (
Any, Callable, Iterator, List, Literal, NoReturn, Sequence, Tuple, TypeVar, Union
)
from pegen.tokenizer import Tokenizer
import scenic.syntax.ast as s
from scenic.core.errors import ScenicParseError
# Singleton ast nodes, created once for efficiency
Load = ast.Load()
Store = ast.Store()
Del = ast.Del()
Node = TypeVar("Node")
FC = TypeVar("FC", ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef)
EXPR_NAME_MAPPING = {
ast.Attribute: "attribute",
ast.Subscript: "subscript",
ast.Starred: "starred",
ast.Name: "name",
ast.List: "list",
ast.Tuple: "tuple",
ast.Lambda: "lambda",
ast.Call: "function call",
ast.BoolOp: "expression",
ast.BinOp: "expression",
ast.UnaryOp: "expression",
ast.GeneratorExp: "generator expression",
ast.Yield: "yield expression",
ast.YieldFrom: "yield expression",
ast.Await: "await expression",
ast.ListComp: "list comprehension",
ast.SetComp: "set comprehension",
ast.DictComp: "dict comprehension",
ast.Dict: "dict literal",
ast.Set: "set display",
ast.JoinedStr: "f-string expression",
ast.FormattedValue: "f-string expression",
ast.Compare: "comparison",
ast.IfExp: "conditional expression",
ast.NamedExpr: "named expression",
}
[docs]def parse_file(
path: str,
py_version: Optional[tuple]=None,
token_stream_factory: Optional[
Callable[[Callable[[], str]], Iterator[tokenize.TokenInfo]]
] = None,
verbose:bool = False,
) -> ast.Module:
"""Parse a file."""
with open(path) as f:
tok_stream = (
token_stream_factory(f.readline)
if token_stream_factory else
tokenize.generate_tokens(f.readline)
)
tokenizer = Tokenizer(tok_stream, verbose=verbose, path=path)
parser = ScenicParser(
tokenizer,
verbose=verbose,
filename=os.path.basename(path),
py_version=py_version
)
return parser.parse("file")
[docs]def parse_string(
source: str,
mode: Union[Literal["eval"], Literal["exec"]],
py_version: Optional[tuple]=None,
token_stream_factory: Optional[
Callable[[Callable[[], str]], Iterator[tokenize.TokenInfo]]
] = None,
verbose: bool = False,
filename: str = "<unknown>",
) -> Any:
"""Parse a string."""
tok_stream = (
token_stream_factory(io.StringIO(source).readline)
if token_stream_factory else
tokenize.generate_tokens(io.StringIO(source).readline)
)
tokenizer = Tokenizer(tok_stream, verbose=verbose)
parser = ScenicParser(tokenizer, verbose=verbose, py_version=py_version, filename=filename)
return parser.parse(mode if mode == "eval" else "file")
[docs]class Target(enum.Enum):
FOR_TARGETS = enum.auto()
STAR_TARGETS = enum.auto()
DEL_TARGETS = enum.auto()
class Parser(Parser):
#: Name of the source file, used in error reports
filename : str
def __init__(self,
tokenizer: Tokenizer, *,
verbose: bool = False,
filename: str = "<unknown>",
py_version: Optional[tuple] = None,
) -> None:
super().__init__(tokenizer, verbose=verbose)
self.filename = filename
self.py_version = min(py_version, sys.version_info) if py_version else sys.version_info
def parse(self, rule: str, call_invalid_rules: bool = False) -> Optional[ast.AST]:
self.call_invalid_rules = call_invalid_rules
res = getattr(self, rule)()
if res is None:
# Grab the last token that was parsed in the first run to avoid
# polluting a generic error reports with progress made by invalid rules.
last_token = self._tokenizer.diagnose()
if not call_invalid_rules:
self.call_invalid_rules = True
# Reset the parser cache to be able to restart parsing from the
# beginning.
self._reset(0) # type: ignore
self._cache.clear()
res = getattr(self, rule)()
self.raise_raw_syntax_error("invalid syntax", last_token.start, last_token.end)
return res
def check_version(self, min_version: Tuple[int, ...], error_msg: str, node: Node) -> Node:
"""Check that the python version is high enough for a rule to apply.
"""
if self.py_version >= min_version:
return node
else:
raise ScenicParseError(SyntaxError(
f"{error_msg} is only supported in Python {min_version} and above."
))
def raise_indentation_error(self, msg: str) -> None:
"""Raise an indentation error."""
last_token = self._tokenizer.diagnose()
args = (self.filename, last_token.start[0], last_token.start[1] + 1, last_token.line)
if sys.version_info >= (3, 10):
args += (last_token.end[0], last_token.end[1] + 1)
raise ScenicParseError(IndentationError(msg, args))
def get_expr_name(self, node) -> str:
"""Get a descriptive name for an expression."""
# See https://github.com/python/cpython/blob/master/Parser/pegen.c#L161
assert node is not None
node_t = type(node)
if node_t is ast.Constant:
v = node.value
if v is Ellipsis:
return "ellipsis"
elif v is None:
return str(v)
# Avoid treating 1 as True through == comparison
elif v is True:
return str(v)
elif v is False:
return str(v)
else:
return "literal"
try:
return EXPR_NAME_MAPPING[node_t]
except KeyError:
raise ValueError(
f"unexpected expression in assignment {type(node).__name__} "
f"(line {node.lineno})."
)
def get_invalid_target(self, target: Target, node: Optional[ast.AST]) -> Optional[ast.AST]:
"""Get the meaningful invalid target for different assignment type."""
if node is None:
return None
# We only need to visit List and Tuple nodes recursively as those
# are the only ones that can contain valid names in targets when
# they are parsed as expressions. Any other kind of expression
# that is a container (like Sets or Dicts) is directly invalid and
# we do not need to visit it recursively.
if isinstance(node, (ast.List, ast.Tuple)):
for e in node.elts:
if (inv := self.get_invalid_target(target, e)) is not None:
return inv
elif isinstance(node, ast.Starred):
if target is Target.DEL_TARGETS:
return node
return self.get_invalid_target(target, node.value)
elif isinstance(node, ast.Compare):
# This is needed, because the `a in b` in `for a in b` gets parsed
# as a comparison, and so we need to search the left side of the comparison
# for invalid targets.
if target is Target.FOR_TARGETS:
if isinstance(node.ops[0], ast.In):
return self.get_invalid_target(target, node.left)
return None
return node
elif isinstance(node, (ast.Name, ast.Subscript, ast.Attribute)):
return None
else:
return node
def set_expr_context(self, node, context):
"""Set the context (Load, Store, Del) of an ast node."""
node.ctx = context
return node
def ensure_real(self, number: ast.Constant) -> float:
value = ast.literal_eval(number.string)
if type(value) is complex:
self.raise_syntax_error_known_location("real number required in complex literal", number)
return value
def ensure_imaginary(self, number: ast.Constant) -> complex:
value = ast.literal_eval(number.string)
if type(value) is not complex:
self.raise_syntax_error_known_location("imaginary number required in complex literal", number)
return value
def check_fstring_conversion(self, mark: tokenize.TokenInfo, name: tokenize.TokenInfo) -> tokenize.TokenInfo:
if mark.lineno != name.lineno or mark.col_offset != name.col_offset:
self.raise_syntax_error_known_range(
"f-string: conversion type must come right after the exclamanation mark",
mark,
name
)
s = name.string
if len(s) > 1 or s not in ("s", "r", "a"):
self.raise_syntax_error_known_location(
f"f-string: invalid conversion character '{s}': expected 's', 'r', or 'a'",
name,
)
return name
def _concat_strings_in_constant(self, parts) -> Union[str, bytes]:
s = ast.literal_eval(parts[0].string)
for ss in parts[1:]:
s += ast.literal_eval(ss.string)
args = dict(
value=s,
lineno=parts[0].start[0],
col_offset=parts[0].start[1],
end_lineno=parts[-1].end[0],
end_col_offset=parts[0].end[1],
)
if parts[0].string.startswith("u"):
args["kind"] = "u"
return ast.Constant(**args)
def concatenate_strings(self, parts):
"""Concatenate multiple tokens and ast.JoinedStr"""
# Get proper start and stop
start = end = None
if isinstance(parts[0], ast.JoinedStr):
start = parts[0].lineno, parts[0].col_offset
if isinstance(parts[-1], ast.JoinedStr):
end = parts[-1].end_lineno, parts[-1].end_col_offset
# Combine the different parts
seen_joined = False
values = []
ss = []
for p in parts:
if isinstance(p, ast.JoinedStr):
seen_joined = True
if ss:
values.append(self._concat_strings_in_constant(ss))
ss.clear()
values.extend(p.values)
else:
ss.append(p)
if ss:
values.append(self._concat_strings_in_constant(ss))
consolidated = []
for p in values:
if consolidated and isinstance(consolidated[-1], ast.Constant) and isinstance(p, ast.Constant):
consolidated[-1].value += p.value
consolidated[-1].end_lineno = p.end_lineno
consolidated[-1].end_col_offset = p.end_col_offset
else:
consolidated.append(p)
if not seen_joined and len(values) == 1 and isinstance(values[0], ast.Constant):
return values[0]
else:
return ast.JoinedStr(
values=consolidated,
lineno=start[0] if start else values[0].lineno,
col_offset=start[1] if start else values[0].col_offset,
end_lineno=end[0] if end else values[-1].end_lineno,
end_col_offset=end[1] if end else values[-1].end_col_offset,
)
def generate_ast_for_string(self, tokens):
"""Generate AST nodes for strings."""
err_args = None
line_offset = tokens[0].start[0]
line = line_offset
col_offset = 0
source = "(\n"
for t in tokens:
n_line = t.start[0] - line
if n_line:
col_offset = 0
source += """\n""" * n_line + ' ' * (t.start[1] - col_offset) + t.string
line, col_offset = t.end
source += "\n)"
try:
m = ast.parse(source)
except SyntaxError as err:
args = (err.filename, err.lineno + line_offset - 2, err.offset, err.text)
if sys.version_info >= (3, 10):
args += (err.end_lineno + line_offset - 2, err.end_offset)
err_args = (err.msg, args)
# Ensure we do not keep the frame alive longer than necessary
# by explicitely deleting the error once we got what we needed out
# of it
del err
# Avoid getting a triple nesting in the error report that does not
# bring anything relevant to the traceback.
if err_args is not None:
raise ScenicParseError(SyntaxError(*err_args))
node = m.body[0].value
# Since we asked Python to parse an alterred source starting at line 2
# we alter the lineno of the returned AST to recover the right line.
# If the string start at line 1, tha AST says 2 so we need to decrement by 1
# hence the -2.
ast.increment_lineno(node, line_offset - 2)
return node
def extract_import_level(self, tokens: List[tokenize.TokenInfo]) -> int:
"""Extract the relative import level from the tokens preceding the module name.
'.' count for one and '...' for 3.
"""
level = 0
for t in tokens:
if t.string == ".":
level += 1
else:
level += 3
return level
def set_decorators(self,
target: FC,
decorators: list
) -> FC:
"""Set the decorators on a function or class definition."""
target.decorator_list = decorators
return target
def get_comparison_ops(self, pairs):
return [op for op, _ in pairs]
def get_comparators(self, pairs):
return [comp for _, comp in pairs]
def set_arg_type_comment(self, arg, type_comment):
if type_comment or sys.version_info < (3, 9):
arg.type_comment = type_comment
return arg
def make_arguments(self,
pos_only: Optional[List[Tuple[ast.arg, None]]],
pos_only_with_default: List[Tuple[ast.arg, Any]],
param_no_default: Optional[List[Tuple[ast.arg, None]]],
param_default: Optional[List[Tuple[ast.arg, Any]]],
after_star: Optional[Tuple[Optional[ast.arg], List[Tuple[ast.arg, Any]], Optional[ast.arg]]]
) -> ast.arguments:
"""Build a function definition arguments."""
defaults = (
[d for _, d in pos_only_with_default if d is not None]
if pos_only_with_default else
[]
)
defaults += (
[d for _, d in param_default if d is not None]
if param_default else
[]
)
pos_only = pos_only or pos_only_with_default
# Because we need to combine pos only with and without default even
# the version with no default is a tuple
pos_only = [p for p, _ in pos_only]
params = (param_no_default or []) + ([p for p, _ in param_default] if param_default else [])
# If after_star is None, make a default tuple
after_star = after_star or (None, [], None)
return ast.arguments(
posonlyargs=pos_only,
args=params,
defaults=defaults,
vararg=after_star[0],
kwonlyargs=[p for p, _ in after_star[1]],
kw_defaults=[d for _, d in after_star[1]],
kwarg=after_star[2]
)
def _build_syntax_error(
self,
message: str,
start: Optional[Tuple[int, int]] = None,
end: Optional[Tuple[int, int]] = None
) -> None:
line_from_token = start is None and end is None
if start is None or end is None:
tok = self._tokenizer.diagnose()
start = start or tok.start
end = end or tok.end
if line_from_token:
line = tok.line
else:
# End is used only to get the proper text
line = "\n".join(
self._tokenizer.get_lines(list(range(start[0], end[0] + 1)))
)
# tokenize.py index column offset from 0 while Cpython index column
# offset at 1 when reporting SyntaxError, so we need to increment
# the column offset when reporting the error.
args = (self.filename, start[0], start[1] + 1, line)
if sys.version_info >= (3, 10):
args += (end[0], end[1] + 1)
return ScenicParseError(SyntaxError(message, args))
def raise_raw_syntax_error(
self,
message: str,
start: Optional[Tuple[int, int]] = None,
end: Optional[Tuple[int, int]] = None
) -> NoReturn:
raise self._build_syntax_error(message, start, end)
def make_syntax_error(self, message: str) -> None:
return self._build_syntax_error(message)
def expect_forced(self, res: Any, expectation: str) -> Optional[tokenize.TokenInfo]:
if res is None:
last_token = self._tokenizer.diagnose()
end = last_token.start
if sys.version_info >= (3, 12) or (sys.version_info >= (3, 11) and last_token.type != 4): # i.e. not a
end = last_token.end
self.raise_raw_syntax_error(
f"expected {expectation}", last_token.start, end
)
return res
def raise_syntax_error(self, message: str) -> NoReturn:
"""Raise a syntax error."""
tok = self._tokenizer.diagnose()
raise self._build_syntax_error(message, tok.start, tok.end if sys.version_info >= (3, 12) or tok.type != 4 else tok.start)
def raise_syntax_error_known_location(
self, message: str, node: Union[ast.AST, tokenize.TokenInfo]
) -> NoReturn:
"""Raise a syntax error that occured at a given AST node."""
if isinstance(node, tokenize.TokenInfo):
start = node.start
end = node.end
else:
start = node.lineno, node.col_offset
end = node.end_lineno, node.end_col_offset
raise self._build_syntax_error(message, start, end)
def raise_syntax_error_known_range(
self,
message: str,
start_node: Union[ast.AST, tokenize.TokenInfo],
end_node: Union[ast.AST, tokenize.TokenInfo]
) -> NoReturn:
if isinstance(start_node, tokenize.TokenInfo):
start = start_node.start
else:
start = start_node.lineno, start_node.col_offset
if isinstance(end_node, tokenize.TokenInfo):
end = end_node.end
else:
end = end_node.end_lineno, end_node.end_col_offset
raise self._build_syntax_error(message, start, end)
def raise_syntax_error_starting_from(
self,
message: str,
start_node: Union[ast.AST, tokenize.TokenInfo]
) -> NoReturn:
if isinstance(start_node, tokenize.TokenInfo):
start = start_node.start
else:
start = start_node.lineno, start_node.col_offset
last_token = self._tokenizer.diagnose()
raise self._build_syntax_error(message, start, last_token.start)
def raise_syntax_error_invalid_target(
self, target: Target, node: Optional[ast.AST]
) -> NoReturn:
invalid_target = self.get_invalid_target(target, node)
if invalid_target is None:
return None
if target in (Target.STAR_TARGETS, Target.FOR_TARGETS):
msg = f"cannot assign to {self.get_expr_name(invalid_target)}"
else:
msg = f"cannot delete {self.get_expr_name(invalid_target)}"
self.raise_syntax_error_known_location(msg, invalid_target)
def raise_syntax_error_on_next_token(self, message: str) -> NoReturn:
next_token = self._tokenizer.peek()
raise self._build_syntax_error(message, next_token.start, next_token.end)
# scenic helpers
def extend_new_specifiers(self, node: s.New, specifiers: List[ast.AST]) -> s.New:
node.specifiers.extend(specifiers)
return node
# Keywords and soft keywords are listed at the end of the parser definition.
class ScenicParser(Parser):
@memoize
def start(self) -> Optional[Any]:
# start: file
mark = self._mark()
if (
(file := self.file())
):
return file;
self._reset(mark)
return None;
@memoize
def file(self) -> Optional[ast . Module]:
# file: statements? $
mark = self._mark()
if (
(a := self.statements(),)
and
(self.expect('ENDMARKER'))
):
return ast . Module ( body = a or [] , type_ignores = [] );
self._reset(mark)
return None;
@memoize
def interactive(self) -> Optional[ast . Interactive]:
# interactive: statement_newline
mark = self._mark()
if (
(a := self.statement_newline())
):
return ast . Interactive ( body = a );
self._reset(mark)
return None;
@memoize
def eval(self) -> Optional[ast . Expression]:
# eval: expressions NEWLINE* $
mark = self._mark()
if (
(a := self.expressions())
and
(self._loop0_1(),)
and
(self.expect('ENDMARKER'))
):
return ast . Expression ( body = a );
self._reset(mark)
return None;
@memoize
def func_type(self) -> Optional[ast . FunctionType]:
# func_type: '(' type_expressions? ')' '->' expression NEWLINE* $
mark = self._mark()
if (
(self.expect('('))
and
(a := self.type_expressions(),)
and
(self.expect(')'))
and
(self.expect('->'))
and
(b := self.expression())
and
(self._loop0_2(),)
and
(self.expect('ENDMARKER'))
):
return ast . FunctionType ( argtypes = a , returns = b );
self._reset(mark)
return None;
@memoize
def fstring(self) -> Optional[Any]:
# fstring: FSTRING_START fstring_mid* FSTRING_END
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.fstring_start())
and
(b := self._loop0_3(),)
and
(self.fstring_end())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . JoinedStr ( values = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def statements(self) -> Optional[list]:
# statements: statement+
mark = self._mark()
if (
(a := self._loop1_4())
):
return list ( itertools . chain . from_iterable ( a ) );
self._reset(mark)
return None;
@memoize
def statement(self) -> Optional[list]:
# statement: scenic_compound_stmt | compound_stmt | scenic_stmts | simple_stmts
mark = self._mark()
if (
(a := self.scenic_compound_stmt())
):
return [a];
self._reset(mark)
if (
(a := self.compound_stmt())
):
return [a];
self._reset(mark)
if (
(a := self.scenic_stmts())
):
return a;
self._reset(mark)
if (
(a := self.simple_stmts())
):
return a;
self._reset(mark)
return None;
@memoize
def statement_newline(self) -> Optional[list]:
# statement_newline: compound_stmt NEWLINE | simple_stmts | NEWLINE | $
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.compound_stmt())
and
(self.expect('NEWLINE'))
):
return [a];
self._reset(mark)
if (
(simple_stmts := self.simple_stmts())
):
return simple_stmts;
self._reset(mark)
if (
(self.expect('NEWLINE'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return [ast . Pass ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )];
self._reset(mark)
if (
(self.expect('ENDMARKER'))
):
return None;
self._reset(mark)
return None;
@memoize
def simple_stmts(self) -> Optional[list]:
# simple_stmts: simple_stmt !';' NEWLINE | ';'.simple_stmt+ ';'? NEWLINE
mark = self._mark()
if (
(a := self.simple_stmt())
and
(self.negative_lookahead(self.expect, ';'))
and
(self.expect('NEWLINE'))
):
return [a];
self._reset(mark)
if (
(a := self._gather_5())
and
(self.expect(';'),)
and
(self.expect('NEWLINE'))
):
return a;
self._reset(mark)
return None;
@memoize
def scenic_stmts(self) -> Optional[list]:
# scenic_stmts: scenic_stmt !';' NEWLINE | ';'.scenic_stmt+ ';'? NEWLINE
mark = self._mark()
if (
(a := self.scenic_stmt())
and
(self.negative_lookahead(self.expect, ';'))
and
(self.expect('NEWLINE'))
):
return [a];
self._reset(mark)
if (
(a := self._gather_7())
and
(self.expect(';'),)
and
(self.expect('NEWLINE'))
):
return a;
self._reset(mark)
return None;
@memoize
def simple_stmt(self) -> Optional[Any]:
# simple_stmt: assignment | &"type" type_alias | star_expressions | &'return' return_stmt | &('import' | 'from') import_stmt | &'raise' raise_stmt | 'pass' | &'del' del_stmt | &'yield' yield_stmt | &'assert' assert_stmt | 'break' | 'continue' | &'global' global_stmt | &'nonlocal' nonlocal_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(assignment := self.assignment())
):
return assignment;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, "type"))
and
(type_alias := self.type_alias())
):
return type_alias;
self._reset(mark)
if (
(e := self.star_expressions())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Expr ( value = e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'return'))
and
(return_stmt := self.return_stmt())
):
return return_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self._tmp_9, ))
and
(import_stmt := self.import_stmt())
):
return import_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'raise'))
and
(raise_stmt := self.raise_stmt())
):
return raise_stmt;
self._reset(mark)
if (
(self.expect('pass'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Pass ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'del'))
and
(del_stmt := self.del_stmt())
):
return del_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'yield'))
and
(yield_stmt := self.yield_stmt())
):
return yield_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'assert'))
and
(assert_stmt := self.assert_stmt())
):
return assert_stmt;
self._reset(mark)
if (
(self.expect('break'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Break ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('continue'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Continue ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'global'))
and
(global_stmt := self.global_stmt())
):
return global_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'nonlocal'))
and
(nonlocal_stmt := self.nonlocal_stmt())
):
return nonlocal_stmt;
self._reset(mark)
return None;
@memoize
def compound_stmt(self) -> Optional[Any]:
# compound_stmt: &('def' | '@' | 'async') function_def | &'if' if_stmt | &('class' | '@') class_def | &('with' | 'async') with_stmt | &('for' | 'async') for_stmt | &'try' try_stmt | &'while' while_stmt | match_stmt
mark = self._mark()
if (
(self.positive_lookahead(self._tmp_10, ))
and
(function_def := self.function_def())
):
return function_def;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'if'))
and
(if_stmt := self.if_stmt())
):
return if_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self._tmp_11, ))
and
(class_def := self.class_def())
):
return class_def;
self._reset(mark)
if (
(self.positive_lookahead(self._tmp_12, ))
and
(with_stmt := self.with_stmt())
):
return with_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self._tmp_13, ))
and
(for_stmt := self.for_stmt())
):
return for_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'try'))
and
(try_stmt := self.try_stmt())
):
return try_stmt;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, 'while'))
and
(while_stmt := self.while_stmt())
):
return while_stmt;
self._reset(mark)
if (
(match_stmt := self.match_stmt())
):
return match_stmt;
self._reset(mark)
return None;
@memoize
def scenic_stmt(self) -> Optional[Any]:
# scenic_stmt: scenic_model_stmt | scenic_tracked_assignment | scenic_param_stmt | scenic_require_stmt | scenic_record_initial_stmt | scenic_record_final_stmt | scenic_record_stmt | scenic_mutate_stmt | scenic_terminate_simulation_when_stmt | scenic_terminate_when_stmt | scenic_terminate_after_stmt | scenic_take_stmt | scenic_wait_stmt | scenic_terminate_simulation_stmt | scenic_terminate_stmt | scenic_do_choose_stmt | scenic_do_shuffle_stmt | scenic_do_for_stmt | scenic_do_until_stmt | scenic_do_stmt | scenic_abort_stmt | scenic_simulator_stmt
mark = self._mark()
if (
(scenic_model_stmt := self.scenic_model_stmt())
):
return scenic_model_stmt;
self._reset(mark)
if (
(scenic_tracked_assignment := self.scenic_tracked_assignment())
):
return scenic_tracked_assignment;
self._reset(mark)
if (
(scenic_param_stmt := self.scenic_param_stmt())
):
return scenic_param_stmt;
self._reset(mark)
if (
(scenic_require_stmt := self.scenic_require_stmt())
):
return scenic_require_stmt;
self._reset(mark)
if (
(scenic_record_initial_stmt := self.scenic_record_initial_stmt())
):
return scenic_record_initial_stmt;
self._reset(mark)
if (
(scenic_record_final_stmt := self.scenic_record_final_stmt())
):
return scenic_record_final_stmt;
self._reset(mark)
if (
(scenic_record_stmt := self.scenic_record_stmt())
):
return scenic_record_stmt;
self._reset(mark)
if (
(scenic_mutate_stmt := self.scenic_mutate_stmt())
):
return scenic_mutate_stmt;
self._reset(mark)
if (
(scenic_terminate_simulation_when_stmt := self.scenic_terminate_simulation_when_stmt())
):
return scenic_terminate_simulation_when_stmt;
self._reset(mark)
if (
(scenic_terminate_when_stmt := self.scenic_terminate_when_stmt())
):
return scenic_terminate_when_stmt;
self._reset(mark)
if (
(scenic_terminate_after_stmt := self.scenic_terminate_after_stmt())
):
return scenic_terminate_after_stmt;
self._reset(mark)
if (
(scenic_take_stmt := self.scenic_take_stmt())
):
return scenic_take_stmt;
self._reset(mark)
if (
(scenic_wait_stmt := self.scenic_wait_stmt())
):
return scenic_wait_stmt;
self._reset(mark)
if (
(scenic_terminate_simulation_stmt := self.scenic_terminate_simulation_stmt())
):
return scenic_terminate_simulation_stmt;
self._reset(mark)
if (
(scenic_terminate_stmt := self.scenic_terminate_stmt())
):
return scenic_terminate_stmt;
self._reset(mark)
if (
(scenic_do_choose_stmt := self.scenic_do_choose_stmt())
):
return scenic_do_choose_stmt;
self._reset(mark)
if (
(scenic_do_shuffle_stmt := self.scenic_do_shuffle_stmt())
):
return scenic_do_shuffle_stmt;
self._reset(mark)
if (
(scenic_do_for_stmt := self.scenic_do_for_stmt())
):
return scenic_do_for_stmt;
self._reset(mark)
if (
(scenic_do_until_stmt := self.scenic_do_until_stmt())
):
return scenic_do_until_stmt;
self._reset(mark)
if (
(scenic_do_stmt := self.scenic_do_stmt())
):
return scenic_do_stmt;
self._reset(mark)
if (
(scenic_abort_stmt := self.scenic_abort_stmt())
):
return scenic_abort_stmt;
self._reset(mark)
if (
(scenic_simulator_stmt := self.scenic_simulator_stmt())
):
return scenic_simulator_stmt;
self._reset(mark)
return None;
@memoize
def scenic_compound_stmt(self) -> Optional[Any]:
# scenic_compound_stmt: scenic_tracked_assign_new_stmt | scenic_assign_new_stmt | scenic_expr_new_stmt | scenic_behavior_def | scenic_monitor_def | scenic_scenario_def | scenic_try_interrupt_stmt | scenic_override_stmt
mark = self._mark()
if (
(scenic_tracked_assign_new_stmt := self.scenic_tracked_assign_new_stmt())
):
return scenic_tracked_assign_new_stmt;
self._reset(mark)
if (
(scenic_assign_new_stmt := self.scenic_assign_new_stmt())
):
return scenic_assign_new_stmt;
self._reset(mark)
if (
(scenic_expr_new_stmt := self.scenic_expr_new_stmt())
):
return scenic_expr_new_stmt;
self._reset(mark)
if (
(scenic_behavior_def := self.scenic_behavior_def())
):
return scenic_behavior_def;
self._reset(mark)
if (
(scenic_monitor_def := self.scenic_monitor_def())
):
return scenic_monitor_def;
self._reset(mark)
if (
(scenic_scenario_def := self.scenic_scenario_def())
):
return scenic_scenario_def;
self._reset(mark)
if (
(scenic_try_interrupt_stmt := self.scenic_try_interrupt_stmt())
):
return scenic_try_interrupt_stmt;
self._reset(mark)
if (
(scenic_override_stmt := self.scenic_override_stmt())
):
return scenic_override_stmt;
self._reset(mark)
return None;
@memoize
def assignment(self) -> Optional[Any]:
# assignment: NAME ':' expression ['=' annotated_rhs] | ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] | ((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT? | single_target augassign ~ (yield_expr | star_expressions) | invalid_assignment
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(self.expect(':'))
and
(b := self.expression())
and
(c := self._tmp_14(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 6 ) , "Variable annotation syntax is" , ast . AnnAssign ( target = ast . Name ( id = a . string , ctx = Store , lineno = a . start [0] , col_offset = a . start [1] , end_lineno = a . end [0] , end_col_offset = a . end [1] , ) , annotation = b , value = c , simple = 1 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) );
self._reset(mark)
if (
(a := self._tmp_15())
and
(self.expect(':'))
and
(b := self.expression())
and
(c := self._tmp_16(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 6 ) , "Variable annotation syntax is" , ast . AnnAssign ( target = a , annotation = b , value = c , simple = 0 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) );
self._reset(mark)
if (
(a := self._loop1_17())
and
(b := self._tmp_18())
and
(self.negative_lookahead(self.expect, '='))
and
(tc := self.type_comment(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Assign ( targets = a , value = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
cut = False
if (
(a := self.single_target())
and
(b := self.augassign())
and
(cut := True)
and
(c := self._tmp_19())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . AugAssign ( target = a , op = b , value = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if cut:
return None;
if (
self.call_invalid_rules
and
(self.invalid_assignment())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def annotated_rhs(self) -> Optional[Any]:
# annotated_rhs: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions;
self._reset(mark)
return None;
@memoize
def augassign(self) -> Optional[Any]:
# augassign: '+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | '<<=' | '>>=' | '**=' | '//='
mark = self._mark()
if (
(self.expect('+='))
):
return ast . Add ( );
self._reset(mark)
if (
(self.expect('-='))
):
return ast . Sub ( );
self._reset(mark)
if (
(self.expect('*='))
):
return ast . Mult ( );
self._reset(mark)
if (
(self.expect('@='))
):
return self . check_version ( ( 3 , 5 ) , "The '@' operator is" , ast . MatMult ( ) );
self._reset(mark)
if (
(self.expect('/='))
):
return ast . Div ( );
self._reset(mark)
if (
(self.expect('%='))
):
return ast . Mod ( );
self._reset(mark)
if (
(self.expect('&='))
):
return ast . BitAnd ( );
self._reset(mark)
if (
(self.expect('|='))
):
return ast . BitOr ( );
self._reset(mark)
if (
(self.expect('^='))
):
return ast . BitXor ( );
self._reset(mark)
if (
(self.expect('<<='))
):
return ast . LShift ( );
self._reset(mark)
if (
(self.expect('>>='))
):
return ast . RShift ( );
self._reset(mark)
if (
(self.expect('**='))
):
return ast . Pow ( );
self._reset(mark)
if (
(self.expect('//='))
):
return ast . FloorDiv ( );
self._reset(mark)
return None;
@memoize
def return_stmt(self) -> Optional[ast . Return]:
# return_stmt: 'return' star_expressions?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('return'))
and
(a := self.star_expressions(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Return ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def raise_stmt(self) -> Optional[ast . Raise]:
# raise_stmt: 'raise' expression ['from' expression] | 'raise'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('raise'))
and
(a := self.expression())
and
(b := self._tmp_20(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Raise ( exc = a , cause = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('raise'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Raise ( exc = None , cause = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def global_stmt(self) -> Optional[ast . Global]:
# global_stmt: 'global' ','.NAME+
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('global'))
and
(a := self._gather_21())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Global ( names = [n . string for n in a] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def nonlocal_stmt(self) -> Optional[ast . Nonlocal]:
# nonlocal_stmt: 'nonlocal' ','.NAME+
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('nonlocal'))
and
(a := self._gather_23())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Nonlocal ( names = [n . string for n in a] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def del_stmt(self) -> Optional[ast . Delete]:
# del_stmt: 'del' del_targets &(';' | NEWLINE) | invalid_del_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('del'))
and
(a := self.del_targets())
and
(self.positive_lookahead(self._tmp_25, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Delete ( targets = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_del_stmt())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def yield_stmt(self) -> Optional[ast . Expr]:
# yield_stmt: yield_expr
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(y := self.yield_expr())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Expr ( value = y , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def assert_stmt(self) -> Optional[ast . Assert]:
# assert_stmt: 'assert' expression [',' expression]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('assert'))
and
(a := self.expression())
and
(b := self._tmp_26(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Assert ( test = a , msg = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def import_stmt(self) -> Optional[ast . Import]:
# import_stmt: invalid_import | import_name | import_from
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_import())
):
return None # pragma: no cover;
self._reset(mark)
if (
(import_name := self.import_name())
):
return import_name;
self._reset(mark)
if (
(import_from := self.import_from())
):
return import_from;
self._reset(mark)
return None;
@memoize
def import_name(self) -> Optional[ast . Import]:
# import_name: 'import' dotted_as_names
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('import'))
and
(a := self.dotted_as_names())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Import ( names = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def import_from(self) -> Optional[ast . ImportFrom]:
# import_from: 'from' (('.' | '...'))* dotted_name 'import' import_from_targets | 'from' (('.' | '...'))+ 'import' import_from_targets
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('from'))
and
(a := self._loop0_27(),)
and
(b := self.dotted_name())
and
(self.expect('import'))
and
(c := self.import_from_targets())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ImportFrom ( module = b , names = c , level = self . extract_import_level ( a ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('from'))
and
(a := self._loop1_28())
and
(self.expect('import'))
and
(b := self.import_from_targets())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ImportFrom ( names = b , level = self . extract_import_level ( a ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . ImportFrom ( module = None , names = b , level = self . extract_import_level ( a ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def import_from_targets(self) -> Optional[List [ast . alias]]:
# import_from_targets: '(' import_from_as_names ','? ')' | import_from_as_names !',' | '*' | invalid_import_from_targets
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('('))
and
(a := self.import_from_as_names())
and
(self.expect(','),)
and
(self.expect(')'))
):
return a;
self._reset(mark)
if (
(import_from_as_names := self.import_from_as_names())
and
(self.negative_lookahead(self.expect, ','))
):
return import_from_as_names;
self._reset(mark)
if (
(self.expect('*'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return [ast . alias ( name = "*" , asname = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )];
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_import_from_targets())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def import_from_as_names(self) -> Optional[List [ast . alias]]:
# import_from_as_names: ','.import_from_as_name+
mark = self._mark()
if (
(a := self._gather_29())
):
return a;
self._reset(mark)
return None;
@memoize
def import_from_as_name(self) -> Optional[ast . alias]:
# import_from_as_name: NAME ['as' NAME]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(b := self._tmp_31(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . alias ( name = a . string , asname = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def dotted_as_names(self) -> Optional[List [ast . alias]]:
# dotted_as_names: ','.dotted_as_name+
mark = self._mark()
if (
(a := self._gather_32())
):
return a;
self._reset(mark)
return None;
@memoize
def dotted_as_name(self) -> Optional[ast . alias]:
# dotted_as_name: dotted_name ['as' NAME]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.dotted_name())
and
(b := self._tmp_34(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . alias ( name = a , asname = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize_left_rec
def dotted_name(self) -> Optional[str]:
# dotted_name: dotted_name '.' NAME | NAME
mark = self._mark()
if (
(a := self.dotted_name())
and
(self.expect('.'))
and
(b := self.name())
):
return a + "." + b . string;
self._reset(mark)
if (
(a := self.name())
):
return a . string;
self._reset(mark)
return None;
@memoize
def block(self) -> Optional[list]:
# block: NEWLINE INDENT statements DEDENT | simple_stmts | invalid_block
mark = self._mark()
if (
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(a := self.statements())
and
(self.expect('DEDENT'))
):
return a;
self._reset(mark)
if (
(simple_stmts := self.simple_stmts())
):
return simple_stmts;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_block())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def decorators(self) -> Optional[Any]:
# decorators: decorator+
mark = self._mark()
if (
(_loop1_35 := self._loop1_35())
):
return _loop1_35;
self._reset(mark)
return None;
@memoize
def decorator(self) -> Optional[Any]:
# decorator: ('@' dec_maybe_call NEWLINE) | ('@' named_expression NEWLINE)
mark = self._mark()
if (
(a := self._tmp_36())
):
return a;
self._reset(mark)
if (
(a := self._tmp_37())
):
return self . check_version ( ( 3 , 9 ) , "Generic decorator are" , a );
self._reset(mark)
return None;
@memoize
def dec_maybe_call(self) -> Optional[Any]:
# dec_maybe_call: dec_primary '(' arguments? ')' | dec_primary
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(dn := self.dec_primary())
and
(self.expect('('))
and
(z := self.arguments(),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Call ( func = dn , args = z [0] if z else [] , keywords = z [1] if z else [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(dec_primary := self.dec_primary())
):
return dec_primary;
self._reset(mark)
return None;
@memoize_left_rec
def dec_primary(self) -> Optional[Any]:
# dec_primary: dec_primary '.' NAME | NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.dec_primary())
and
(self.expect('.'))
and
(b := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = a , attr = b . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = a . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def class_def(self) -> Optional[ast . ClassDef]:
# class_def: decorators class_def_raw | class_def_raw
mark = self._mark()
if (
(a := self.decorators())
and
(b := self.class_def_raw())
):
return self . set_decorators ( b , a );
self._reset(mark)
if (
(class_def_raw := self.class_def_raw())
):
return class_def_raw;
self._reset(mark)
return None;
@memoize
def class_def_raw(self) -> Optional[ast . ClassDef]:
# class_def_raw: invalid_class_def_raw | 'class' NAME type_params? ['(' arguments? ')'] &&':' scenic_class_def_block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_class_def_raw())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('class'))
and
(a := self.name())
and
(t := self.type_params(),)
and
(b := self._tmp_38(),)
and
(self.expect_forced(self.expect(':'), "':'"))
and
(c := self.scenic_class_def_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ( ast . ClassDef ( a . string , bases = b [0] if b else [] , keywords = b [1] if b else [] , body = c , decorator_list = [] , type_params = t or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) if sys . version_info >= ( 3 , 12 ) else ast . ClassDef ( a . string , bases = b [0] if b else [] , keywords = b [1] if b else [] , body = c , decorator_list = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) );
self._reset(mark)
return None;
@memoize
def scenic_class_def_block(self) -> Optional[Any]:
# scenic_class_def_block: NEWLINE INDENT scenic_class_statements DEDENT | simple_stmts | invalid_block
mark = self._mark()
if (
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(a := self.scenic_class_statements())
and
(self.expect('DEDENT'))
):
return a;
self._reset(mark)
if (
(simple_stmts := self.simple_stmts())
):
return simple_stmts;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_block())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def scenic_class_statements(self) -> Optional[list]:
# scenic_class_statements: scenic_class_statement+
mark = self._mark()
if (
(a := self._loop1_39())
):
return list ( itertools . chain . from_iterable ( a ) );
self._reset(mark)
return None;
@memoize
def scenic_class_statement(self) -> Optional[list]:
# scenic_class_statement: scenic_class_property_stmt | compound_stmt | scenic_stmts | simple_stmts
mark = self._mark()
if (
(a := self.scenic_class_property_stmt())
):
return [a];
self._reset(mark)
if (
(a := self.compound_stmt())
):
return [a];
self._reset(mark)
if (
(a := self.scenic_stmts())
):
return a;
self._reset(mark)
if (
(a := self.simple_stmts())
):
return a;
self._reset(mark)
return None;
@memoize
def scenic_class_property_stmt(self) -> Optional[Any]:
# scenic_class_property_stmt: NAME ['[' ','.scenic_class_property_attribute+ ']'] ':' expression NEWLINE
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(b := self._tmp_40(),)
and
(self.expect(':'))
and
(c := self.expression())
and
(self.expect('NEWLINE'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . PropertyDef ( property = a . string , attributes = b if b is not None else [] , value = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
return None;
@memoize
def scenic_class_property_attribute(self) -> Optional[Any]:
# scenic_class_property_attribute: &&("additive" | "dynamic" | "final")
# nullable=True
mark = self._mark()
if (
(forced := self.expect_forced(self._tmp_41(), '''("additive" | "dynamic" | "final")'''))
):
return forced;
self._reset(mark)
return None;
@memoize
def scenic_assign_new_stmt(self) -> Optional[Any]:
# scenic_assign_new_stmt: ((star_targets '='))+ (scenic_new_block) !'=' TYPE_COMMENT?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self._loop1_42())
and
(b := self.scenic_new_block())
and
(self.negative_lookahead(self.expect, '='))
and
(tc := self.type_comment(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Assign ( targets = a , value = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_tracked_assign_new_stmt(self) -> Optional[Any]:
# scenic_tracked_assign_new_stmt: scenic_tracked_name '=' scenic_new_block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.scenic_tracked_name())
and
(self.expect('='))
and
(b := self.scenic_new_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . TrackedAssign ( target = a , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_expr_new_stmt(self) -> Optional[Any]:
# scenic_expr_new_stmt: scenic_new_block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.scenic_new_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Expr ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_new_block(self) -> Optional[Any]:
# scenic_new_block: scenic_new_expr ',' NEWLINE INDENT scenic_new_block_body DEDENT
mark = self._mark()
if (
(a := self.scenic_new_expr())
and
(self.expect(','))
and
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(b := self.scenic_new_block_body())
and
(self.expect('DEDENT'))
):
return self . extend_new_specifiers ( a , b );
self._reset(mark)
return None;
@memoize
def scenic_new_block_body(self) -> Optional[Any]:
# scenic_new_block_body: ((scenic_specifiers ',' NEWLINE))* scenic_specifiers NEWLINE | ((scenic_specifiers ',' NEWLINE))+
mark = self._mark()
if (
(b := self._loop0_43(),)
and
(c := self.scenic_specifiers())
and
(self.expect('NEWLINE'))
):
return list ( itertools . chain . from_iterable ( b ) ) + c;
self._reset(mark)
if (
(b := self._loop1_44())
):
return list ( itertools . chain . from_iterable ( b ) );
self._reset(mark)
return None;
@memoize
def scenic_behavior_def(self) -> Optional[Any]:
# scenic_behavior_def: "behavior" NAME '(' params? ')' &&':' scenic_behavior_def_block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("behavior"))
and
(a := self.name())
and
(self.expect('('))
and
(b := self.params(),)
and
(self.expect(')'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(c := self.scenic_behavior_def_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . BehaviorDef ( a . string , args = b or self . make_arguments ( None , [] , None , [] , None ) , docstring = c [0] , header = c [1] , body = c [2] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
return None;
@memoize
def scenic_behavior_def_block(self) -> Optional[Any]:
# scenic_behavior_def_block: NEWLINE INDENT [STRING NEWLINE] scenic_behavior_header? scenic_behavior_statements DEDENT | invalid_block
mark = self._mark()
if (
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(a := self._tmp_45(),)
and
(b := self.scenic_behavior_header(),)
and
(c := self.scenic_behavior_statements())
and
(self.expect('DEDENT'))
):
return ( a , b or [] , c );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_block())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def scenic_behavior_statements(self) -> Optional[list]:
# scenic_behavior_statements: scenic_behavior_statement+
mark = self._mark()
if (
(a := self._loop1_46())
):
return list ( itertools . chain . from_iterable ( a ) );
self._reset(mark)
return None;
@memoize
def scenic_behavior_statement(self) -> Optional[list]:
# scenic_behavior_statement: scenic_invalid_behavior_statement | statement
mark = self._mark()
if (
(scenic_invalid_behavior_statement := self.scenic_invalid_behavior_statement())
):
return scenic_invalid_behavior_statement;
self._reset(mark)
if (
(a := self.statement())
):
return a;
self._reset(mark)
return None;
@memoize
def scenic_invalid_behavior_statement(self) -> Optional[Any]:
# scenic_invalid_behavior_statement: "invariant" ':' expression | "precondition" ':' expression
mark = self._mark()
if (
(a := self.expect("invariant"))
and
(self.expect(':'))
and
(a_1 := self.expression())
):
return self . raise_syntax_error_known_location ( "invariant can only be set at the beginning of behavior definitions" , a );
self._reset(mark)
if (
(a := self.expect("precondition"))
and
(self.expect(':'))
and
(a_1 := self.expression())
):
return self . raise_syntax_error_known_location ( "precondition can only be set at the beginning of behavior definitions" , a );
self._reset(mark)
return None;
@memoize
def scenic_behavior_header(self) -> Optional[Any]:
# scenic_behavior_header: (((scenic_precondition_stmt | scenic_invariant_stmt) NEWLINE))+
mark = self._mark()
if (
(a := self._loop1_47())
):
return a;
self._reset(mark)
return None;
@memoize
def scenic_precondition_stmt(self) -> Optional[Any]:
# scenic_precondition_stmt: "precondition" ':' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("precondition"))
and
(self.expect(':'))
and
(a := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Precondition ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_invariant_stmt(self) -> Optional[Any]:
# scenic_invariant_stmt: "invariant" ':' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("invariant"))
and
(self.expect(':'))
and
(a := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Invariant ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_monitor_def(self) -> Optional[Any]:
# scenic_monitor_def: invalid_monitor | "monitor" NAME '(' params? ')' &&':' scenic_monitor_def_block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_monitor())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect("monitor"))
and
(a := self.name())
and
(self.expect('('))
and
(b := self.params(),)
and
(self.expect(')'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(c := self.scenic_monitor_def_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . MonitorDef ( a . string , args = b or self . make_arguments ( None , [] , None , [] , None ) , docstring = c [0] , body = c [1] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def invalid_monitor(self) -> Optional[NoReturn]:
# invalid_monitor: "monitor" NAME ':'
mark = self._mark()
if (
(self.expect("monitor"))
and
(self.name())
and
(a := self.expect(':'))
):
return self . raise_syntax_error_known_location ( "2.0-style monitor must be converted to use parentheses and explicit require" , a );
self._reset(mark)
return None;
@memoize
def scenic_monitor_def_block(self) -> Optional[Any]:
# scenic_monitor_def_block: NEWLINE INDENT [STRING NEWLINE] scenic_monitor_statements DEDENT
mark = self._mark()
if (
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(a := self._tmp_48(),)
and
(b := self.scenic_monitor_statements())
and
(self.expect('DEDENT'))
):
return ( a , b );
self._reset(mark)
return None;
@memoize
def scenic_monitor_statements(self) -> Optional[list]:
# scenic_monitor_statements: statement+
mark = self._mark()
if (
(a := self._loop1_49())
):
return list ( itertools . chain . from_iterable ( a ) );
self._reset(mark)
return None;
@memoize
def scenic_scenario_def(self) -> Optional[Any]:
# scenic_scenario_def: "scenario" NAME ['(' params? ')'] &&':' scenic_scenario_def_block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("scenario"))
and
(a := self.name())
and
(b := self._tmp_50(),)
and
(self.expect_forced(self.expect(':'), "':'"))
and
(c := self.scenic_scenario_def_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . ScenarioDef ( a . string , args = b or self . make_arguments ( None , [] , None , [] , None ) , docstring = c [0] , header = c [1] , setup = c [2] , compose = c [3] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
return None;
@memoize
def scenic_scenario_def_block(self) -> Optional[Any]:
# scenic_scenario_def_block: NEWLINE INDENT [STRING NEWLINE] scenic_behavior_header? scenic_scenario_setup_block? scenic_scenario_compose_block? DEDENT | NEWLINE INDENT [STRING NEWLINE] statements DEDENT
mark = self._mark()
if (
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(a := self._tmp_51(),)
and
(b := self.scenic_behavior_header(),)
and
(c := self.scenic_scenario_setup_block(),)
and
(d := self.scenic_scenario_compose_block(),)
and
(self.expect('DEDENT'))
):
return ( a , b or [] , c or [] , d or [] );
self._reset(mark)
if (
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(a := self._tmp_52(),)
and
(b := self.statements())
and
(self.expect('DEDENT'))
):
return ( a , [] , b , [] );
self._reset(mark)
return None;
@memoize
def scenic_scenario_setup_block(self) -> Optional[Any]:
# scenic_scenario_setup_block: "setup" &&':' block
mark = self._mark()
if (
(self.expect("setup"))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
):
return b;
self._reset(mark)
return None;
@memoize
def scenic_scenario_compose_block(self) -> Optional[Any]:
# scenic_scenario_compose_block: "compose" &&':' block
mark = self._mark()
if (
(self.expect("compose"))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
):
return b;
self._reset(mark)
return None;
@memoize
def scenic_override_stmt(self) -> Optional[Any]:
# scenic_override_stmt: "override" primary scenic_specifiers NEWLINE | "override" primary scenic_specifiers ',' NEWLINE INDENT scenic_new_block_body DEDENT
mark = self._mark()
if (
(self.expect("override"))
and
(e := self.primary())
and
(ss := self.scenic_specifiers())
and
(self.expect('NEWLINE'))
):
return s . Override ( target = e , specifiers = ss );
self._reset(mark)
if (
(self.expect("override"))
and
(e := self.primary())
and
(ss := self.scenic_specifiers())
and
(self.expect(','))
and
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(t := self.scenic_new_block_body())
and
(self.expect('DEDENT'))
):
return s . Override ( target = e , specifiers = ss + t );
self._reset(mark)
return None;
@memoize
def function_def(self) -> Optional[Union [ast . FunctionDef , ast . AsyncFunctionDef]]:
# function_def: decorators function_def_raw | function_def_raw
mark = self._mark()
if (
(d := self.decorators())
and
(f := self.function_def_raw())
):
return self . set_decorators ( f , d );
self._reset(mark)
if (
(f := self.function_def_raw())
):
return self . set_decorators ( f , [] );
self._reset(mark)
return None;
@memoize
def function_def_raw(self) -> Optional[Union [ast . FunctionDef , ast . AsyncFunctionDef]]:
# function_def_raw: invalid_def_raw | 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block | 'async' 'def' NAME type_params? &&'(' params? ')' ['->' expression] &&':' func_type_comment? block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_def_raw())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('def'))
and
(n := self.name())
and
(t := self.type_params(),)
and
(self.expect_forced(self.expect('('), "'('"))
and
(params := self.params(),)
and
(self.expect(')'))
and
(a := self._tmp_53(),)
and
(self.expect_forced(self.expect(':'), "':'"))
and
(tc := self.func_type_comment(),)
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ( ast . FunctionDef ( name = n . string , args = params or self . make_arguments ( None , [] , None , [] , None ) , returns = a , body = b , type_comment = tc , type_params = t or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) if sys . version_info >= ( 3 , 12 ) else ast . FunctionDef ( name = n . string , args = params or self . make_arguments ( None , [] , None , [] , None ) , returns = a , body = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) );
self._reset(mark)
if (
(self.expect('async'))
and
(self.expect('def'))
and
(n := self.name())
and
(t := self.type_params(),)
and
(self.expect_forced(self.expect('('), "'('"))
and
(params := self.params(),)
and
(self.expect(')'))
and
(a := self._tmp_54(),)
and
(self.expect_forced(self.expect(':'), "':'"))
and
(tc := self.func_type_comment(),)
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ( self . check_version ( ( 3 , 5 ) , "Async functions are" , ast . AsyncFunctionDef ( name = n . string , args = params or self . make_arguments ( None , [] , None , [] , None ) , returns = a , body = b , type_comment = tc , type_params = t or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) ) if sys . version_info >= ( 3 , 12 ) else self . check_version ( ( 3 , 5 ) , "Async functions are" , ast . AsyncFunctionDef ( name = n . string , args = params or self . make_arguments ( None , [] , None , [] , None ) , returns = a , body = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) ) );
self._reset(mark)
return None;
@memoize
def params(self) -> Optional[Any]:
# params: invalid_parameters | parameters
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_parameters())
):
return None # pragma: no cover;
self._reset(mark)
if (
(parameters := self.parameters())
):
return parameters;
self._reset(mark)
return None;
@memoize
def parameters(self) -> Optional[ast . arguments]:
# parameters: slash_no_default param_no_default* param_with_default* star_etc? | slash_with_default param_with_default* star_etc? | param_no_default+ param_with_default* star_etc? | param_with_default+ star_etc? | star_etc
mark = self._mark()
if (
(a := self.slash_no_default())
and
(b := self._loop0_55(),)
and
(c := self._loop0_56(),)
and
(d := self.star_etc(),)
):
return self . check_version ( ( 3 , 8 ) , "Positional only arguments are" , self . make_arguments ( a , [] , b , c , d ) );
self._reset(mark)
if (
(a := self.slash_with_default())
and
(b := self._loop0_57(),)
and
(c := self.star_etc(),)
):
return self . check_version ( ( 3 , 8 ) , "Positional only arguments are" , self . make_arguments ( None , a , None , b , c ) , );
self._reset(mark)
if (
(a := self._loop1_58())
and
(b := self._loop0_59(),)
and
(c := self.star_etc(),)
):
return self . make_arguments ( None , [] , a , b , c );
self._reset(mark)
if (
(a := self._loop1_60())
and
(b := self.star_etc(),)
):
return self . make_arguments ( None , [] , None , a , b );
self._reset(mark)
if (
(a := self.star_etc())
):
return self . make_arguments ( None , [] , None , None , a );
self._reset(mark)
return None;
@memoize
def slash_no_default(self) -> Optional[List [Tuple [ast . arg , None]]]:
# slash_no_default: param_no_default+ '/' ',' | param_no_default+ '/' &')'
mark = self._mark()
if (
(a := self._loop1_61())
and
(self.expect('/'))
and
(self.expect(','))
):
return [( p , None ) for p in a];
self._reset(mark)
if (
(a := self._loop1_62())
and
(self.expect('/'))
and
(self.positive_lookahead(self.expect, ')'))
):
return [( p , None ) for p in a];
self._reset(mark)
return None;
@memoize
def slash_with_default(self) -> Optional[List [Tuple [ast . arg , Any]]]:
# slash_with_default: param_no_default* param_with_default+ '/' ',' | param_no_default* param_with_default+ '/' &')'
mark = self._mark()
if (
(a := self._loop0_63(),)
and
(b := self._loop1_64())
and
(self.expect('/'))
and
(self.expect(','))
):
return ( [( p , None ) for p in a] if a else [] ) + b;
self._reset(mark)
if (
(a := self._loop0_65(),)
and
(b := self._loop1_66())
and
(self.expect('/'))
and
(self.positive_lookahead(self.expect, ')'))
):
return ( [( p , None ) for p in a] if a else [] ) + b;
self._reset(mark)
return None;
@memoize
def star_etc(self) -> Optional[Tuple [Optional [ast . arg] , List [Tuple [ast . arg , Any]] , Optional [ast . arg]]]:
# star_etc: invalid_star_etc | '*' param_no_default param_maybe_default* kwds? | '*' param_no_default_star_annotation param_maybe_default* kwds? | '*' ',' param_maybe_default+ kwds? | kwds
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_star_etc())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('*'))
and
(a := self.param_no_default())
and
(b := self._loop0_67(),)
and
(c := self.kwds(),)
):
return ( a , b , c );
self._reset(mark)
if (
(self.expect('*'))
and
(a := self.param_no_default_star_annotation())
and
(b := self._loop0_68(),)
and
(c := self.kwds(),)
):
return ( a , b , c );
self._reset(mark)
if (
(self.expect('*'))
and
(self.expect(','))
and
(b := self._loop1_69())
and
(c := self.kwds(),)
):
return ( None , b , c );
self._reset(mark)
if (
(a := self.kwds())
):
return ( None , [] , a );
self._reset(mark)
return None;
@memoize
def kwds(self) -> Optional[ast . arg]:
# kwds: invalid_kwds | '**' param_no_default
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_kwds())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('**'))
and
(a := self.param_no_default())
):
return a;
self._reset(mark)
return None;
@memoize
def param_no_default(self) -> Optional[ast . arg]:
# param_no_default: param ',' TYPE_COMMENT? | param TYPE_COMMENT? &')'
mark = self._mark()
if (
(a := self.param())
and
(self.expect(','))
and
(tc := self.type_comment(),)
):
return self . set_arg_type_comment ( a , tc );
self._reset(mark)
if (
(a := self.param())
and
(tc := self.type_comment(),)
and
(self.positive_lookahead(self.expect, ')'))
):
return self . set_arg_type_comment ( a , tc );
self._reset(mark)
return None;
@memoize
def param_no_default_star_annotation(self) -> Optional[ast . arg]:
# param_no_default_star_annotation: param_star_annotation ',' TYPE_COMMENT? | param_star_annotation TYPE_COMMENT? &')'
mark = self._mark()
if (
(a := self.param_star_annotation())
and
(self.expect(','))
and
(tc := self.type_comment(),)
):
return self . set_arg_type_comment ( a , tc );
self._reset(mark)
if (
(a := self.param_star_annotation())
and
(tc := self.type_comment(),)
and
(self.positive_lookahead(self.expect, ')'))
):
return self . set_arg_type_comment ( a , tc );
self._reset(mark)
return None;
@memoize
def param_with_default(self) -> Optional[Tuple [ast . arg , Any]]:
# param_with_default: param default ',' TYPE_COMMENT? | param default TYPE_COMMENT? &')'
mark = self._mark()
if (
(a := self.param())
and
(c := self.default())
and
(self.expect(','))
and
(tc := self.type_comment(),)
):
return ( self . set_arg_type_comment ( a , tc ) , c );
self._reset(mark)
if (
(a := self.param())
and
(c := self.default())
and
(tc := self.type_comment(),)
and
(self.positive_lookahead(self.expect, ')'))
):
return ( self . set_arg_type_comment ( a , tc ) , c );
self._reset(mark)
return None;
@memoize
def param_maybe_default(self) -> Optional[Tuple [ast . arg , Any]]:
# param_maybe_default: param default? ',' TYPE_COMMENT? | param default? TYPE_COMMENT? &')'
mark = self._mark()
if (
(a := self.param())
and
(c := self.default(),)
and
(self.expect(','))
and
(tc := self.type_comment(),)
):
return ( self . set_arg_type_comment ( a , tc ) , c );
self._reset(mark)
if (
(a := self.param())
and
(c := self.default(),)
and
(tc := self.type_comment(),)
and
(self.positive_lookahead(self.expect, ')'))
):
return ( self . set_arg_type_comment ( a , tc ) , c );
self._reset(mark)
return None;
@memoize
def param(self) -> Optional[Any]:
# param: NAME annotation?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(b := self.annotation(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . arg ( arg = a . string , annotation = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def param_star_annotation(self) -> Optional[Any]:
# param_star_annotation: NAME star_annotation
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(b := self.star_annotation())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . arg ( arg = a . string , annotations = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def annotation(self) -> Optional[Any]:
# annotation: ':' expression
mark = self._mark()
if (
(self.expect(':'))
and
(a := self.expression())
):
return a;
self._reset(mark)
return None;
@memoize
def star_annotation(self) -> Optional[Any]:
# star_annotation: ':' star_expression
mark = self._mark()
if (
(self.expect(':'))
and
(a := self.star_expression())
):
return a;
self._reset(mark)
return None;
@memoize
def default(self) -> Optional[Any]:
# default: '=' expression | invalid_default
mark = self._mark()
if (
(self.expect('='))
and
(a := self.expression())
):
return a;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_default())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def if_stmt(self) -> Optional[ast . If]:
# if_stmt: invalid_if_stmt | 'if' named_expression ':' block elif_stmt | 'if' named_expression ':' block else_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_if_stmt())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('if'))
and
(a := self.named_expression())
and
(self.expect(':'))
and
(b := self.block())
and
(c := self.elif_stmt())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . If ( test = a , body = b , orelse = c or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('if'))
and
(a := self.named_expression())
and
(self.expect(':'))
and
(b := self.block())
and
(c := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . If ( test = a , body = b , orelse = c or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def elif_stmt(self) -> Optional[List [ast . If]]:
# elif_stmt: invalid_elif_stmt | 'elif' named_expression ':' block elif_stmt | 'elif' named_expression ':' block else_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_elif_stmt())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('elif'))
and
(a := self.named_expression())
and
(self.expect(':'))
and
(b := self.block())
and
(c := self.elif_stmt())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return [ast . If ( test = a , body = b , orelse = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )];
self._reset(mark)
if (
(self.expect('elif'))
and
(a := self.named_expression())
and
(self.expect(':'))
and
(b := self.block())
and
(c := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return [ast . If ( test = a , body = b , orelse = c or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset )];
self._reset(mark)
return None;
@memoize
def else_block(self) -> Optional[list]:
# else_block: invalid_else_stmt | 'else' &&':' block
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_else_stmt())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('else'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
):
return b;
self._reset(mark)
return None;
@memoize
def while_stmt(self) -> Optional[ast . While]:
# while_stmt: invalid_while_stmt | 'while' named_expression ':' block else_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_while_stmt())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('while'))
and
(a := self.named_expression())
and
(self.expect(':'))
and
(b := self.block())
and
(c := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . While ( test = a , body = b , orelse = c or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def for_stmt(self) -> Optional[Union [ast . For , ast . AsyncFor]]:
# for_stmt: invalid_for_stmt | 'for' star_targets 'in' ~ star_expressions &&':' TYPE_COMMENT? block else_block? | 'async' 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? | invalid_for_target
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_for_stmt())
):
return None # pragma: no cover;
self._reset(mark)
cut = False
if (
(self.expect('for'))
and
(t := self.star_targets())
and
(self.expect('in'))
and
(cut := True)
and
(ex := self.star_expressions())
and
(self.expect_forced(self.expect(':'), "':'"))
and
(tc := self.type_comment(),)
and
(b := self.block())
and
(el := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . For ( target = t , iter = ex , body = b , orelse = el or [] , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if cut:
return None;
cut = False
if (
(self.expect('async'))
and
(self.expect('for'))
and
(t := self.star_targets())
and
(self.expect('in'))
and
(cut := True)
and
(ex := self.star_expressions())
and
(self.expect(':'))
and
(tc := self.type_comment(),)
and
(b := self.block())
and
(el := self.else_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 5 ) , "Async for loops are" , ast . AsyncFor ( target = t , iter = ex , body = b , orelse = el or [] , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) );
self._reset(mark)
if cut:
return None;
if (
self.call_invalid_rules
and
(self.invalid_for_target())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def with_stmt(self) -> Optional[Union [ast . With , ast . AsyncWith]]:
# with_stmt: invalid_with_stmt_indent | 'with' '(' ','.with_item+ ','? ')' ':' block | 'with' ','.with_item+ ':' TYPE_COMMENT? block | 'async' 'with' '(' ','.with_item+ ','? ')' ':' block | 'async' 'with' ','.with_item+ ':' TYPE_COMMENT? block | invalid_with_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_with_stmt_indent())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('with'))
and
(self.expect('('))
and
(a := self._gather_70())
and
(self.expect(','),)
and
(self.expect(')'))
and
(self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 9 ) , "Parenthesized with items" , ast . With ( items = a , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) );
self._reset(mark)
if (
(self.expect('with'))
and
(a := self._gather_72())
and
(self.expect(':'))
and
(tc := self.type_comment(),)
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . With ( items = a , body = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('async'))
and
(self.expect('with'))
and
(self.expect('('))
and
(a := self._gather_74())
and
(self.expect(','),)
and
(self.expect(')'))
and
(self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 9 ) , "Parenthesized with items" , ast . AsyncWith ( items = a , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) );
self._reset(mark)
if (
(self.expect('async'))
and
(self.expect('with'))
and
(a := self._gather_76())
and
(self.expect(':'))
and
(tc := self.type_comment(),)
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 5 ) , "Async with statements are" , ast . AsyncWith ( items = a , body = b , type_comment = tc , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_with_stmt())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def with_item(self) -> Optional[ast . withitem]:
# with_item: expression 'as' star_target &(',' | ')' | ':') | invalid_with_item | expression
mark = self._mark()
if (
(e := self.expression())
and
(self.expect('as'))
and
(t := self.star_target())
and
(self.positive_lookahead(self._tmp_78, ))
):
return ast . withitem ( context_expr = e , optional_vars = t );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_with_item())
):
return None # pragma: no cover;
self._reset(mark)
if (
(e := self.expression())
):
return ast . withitem ( context_expr = e , optional_vars = None );
self._reset(mark)
return None;
@memoize
def try_stmt(self) -> Optional[ast . Try]:
# try_stmt: invalid_try_stmt | 'try' &&':' block finally_block | 'try' &&':' block except_block+ else_block? finally_block? | 'try' &&':' block except_star_block+ else_block? finally_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_try_stmt())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('try'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
and
(f := self.finally_block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Try ( body = b , handlers = [] , orelse = [] , finalbody = f , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('try'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
and
(ex := self._loop1_79())
and
(el := self.else_block(),)
and
(f := self.finally_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Try ( body = b , handlers = ex , orelse = el or [] , finalbody = f or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('try'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
and
(ex := self._loop1_80())
and
(el := self.else_block(),)
and
(f := self.finally_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 11 ) , "Exception groups are" , ( ast . TryStar ( body = b , handlers = ex , orelse = el or [] , finalbody = f or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 11 ) else None ) );
self._reset(mark)
return None;
@memoize
def scenic_try_interrupt_stmt(self) -> Optional[s . TryInterrupt]:
# scenic_try_interrupt_stmt: 'try' &&':' block interrupt_when_block+ except_block* else_block? finally_block?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('try'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
and
(iw := self._loop1_81())
and
(ex := self._loop0_82(),)
and
(el := self.else_block(),)
and
(f := self.finally_block(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . TryInterrupt ( body = b , interrupt_when_handlers = iw , except_handlers = ex , orelse = el or [] , finalbody = f or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
return None;
@memoize
def interrupt_when_block(self) -> Optional[Any]:
# interrupt_when_block: "interrupt" "when" expression &&':' block
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("interrupt"))
and
(self.expect("when"))
and
(e := self.expression())
and
(self.expect_forced(self.expect(':'), "':'"))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . InterruptWhenHandler ( cond = e , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def except_block(self) -> Optional[ast . ExceptHandler]:
# except_block: invalid_except_stmt_indent | 'except' expression ['as' NAME] ':' block | 'except' ':' block | invalid_except_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_except_stmt_indent())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('except'))
and
(e := self.expression())
and
(t := self._tmp_83(),)
and
(self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ExceptHandler ( type = e , name = t , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('except'))
and
(self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ExceptHandler ( type = None , name = None , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_except_stmt())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def except_star_block(self) -> Optional[ast . ExceptHandler]:
# except_star_block: invalid_except_star_stmt_indent | 'except' '*' expression ['as' NAME] ':' block | invalid_except_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_except_star_stmt_indent())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('except'))
and
(self.expect('*'))
and
(e := self.expression())
and
(t := self._tmp_84(),)
and
(self.expect(':'))
and
(b := self.block())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ExceptHandler ( type = e , name = t , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_except_stmt())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def finally_block(self) -> Optional[list]:
# finally_block: invalid_finally_stmt | 'finally' &&':' block
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_finally_stmt())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('finally'))
and
(self.expect_forced(self.expect(':'), "':'"))
and
(a := self.block())
):
return a;
self._reset(mark)
return None;
@memoize
def match_stmt(self) -> Optional["ast.Match"]:
# match_stmt: "match" subject_expr ':' NEWLINE INDENT case_block+ DEDENT | invalid_match_stmt
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("match"))
and
(subject := self.subject_expr())
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
and
(cases := self._loop1_85())
and
(self.expect('DEDENT'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Match ( subject = subject , cases = cases , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_match_stmt())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def subject_expr(self) -> Optional[Any]:
# subject_expr: star_named_expression ',' star_named_expressions? | named_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(value := self.star_named_expression())
and
(self.expect(','))
and
(values := self.star_named_expressions(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 10 ) , "Pattern matching is" , ast . Tuple ( elts = [value] + ( values or [] ) , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) );
self._reset(mark)
if (
(e := self.named_expression())
):
return self . check_version ( ( 3 , 10 ) , "Pattern matching is" , e );
self._reset(mark)
return None;
@memoize
def case_block(self) -> Optional["ast.match_case"]:
# case_block: invalid_case_block | "case" patterns guard? ':' block
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_case_block())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect("case"))
and
(pattern := self.patterns())
and
(guard := self.guard(),)
and
(self.expect(':'))
and
(body := self.block())
):
return ast . match_case ( pattern = pattern , guard = guard , body = body );
self._reset(mark)
return None;
@memoize
def guard(self) -> Optional[Any]:
# guard: 'if' named_expression
mark = self._mark()
if (
(self.expect('if'))
and
(guard := self.named_expression())
):
return guard;
self._reset(mark)
return None;
@memoize
def patterns(self) -> Optional[Any]:
# patterns: open_sequence_pattern | pattern
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(patterns := self.open_sequence_pattern())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSequence ( patterns = patterns , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(pattern := self.pattern())
):
return pattern;
self._reset(mark)
return None;
@memoize
def pattern(self) -> Optional[Any]:
# pattern: as_pattern | or_pattern
mark = self._mark()
if (
(as_pattern := self.as_pattern())
):
return as_pattern;
self._reset(mark)
if (
(or_pattern := self.or_pattern())
):
return or_pattern;
self._reset(mark)
return None;
@memoize
def as_pattern(self) -> Optional["ast.MatchAs"]:
# as_pattern: or_pattern 'as' pattern_capture_target | invalid_as_pattern
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(pattern := self.or_pattern())
and
(self.expect('as'))
and
(target := self.pattern_capture_target())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchAs ( pattern = pattern , name = target , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_as_pattern())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def or_pattern(self) -> Optional["ast.MatchOr"]:
# or_pattern: '|'.closed_pattern+
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(patterns := self._gather_86())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchOr ( patterns = patterns , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if len ( patterns ) > 1 else patterns [0];
self._reset(mark)
return None;
@memoize
def closed_pattern(self) -> Optional[Any]:
# closed_pattern: literal_pattern | capture_pattern | wildcard_pattern | value_pattern | group_pattern | sequence_pattern | mapping_pattern | class_pattern
mark = self._mark()
if (
(literal_pattern := self.literal_pattern())
):
return literal_pattern;
self._reset(mark)
if (
(capture_pattern := self.capture_pattern())
):
return capture_pattern;
self._reset(mark)
if (
(wildcard_pattern := self.wildcard_pattern())
):
return wildcard_pattern;
self._reset(mark)
if (
(value_pattern := self.value_pattern())
):
return value_pattern;
self._reset(mark)
if (
(group_pattern := self.group_pattern())
):
return group_pattern;
self._reset(mark)
if (
(sequence_pattern := self.sequence_pattern())
):
return sequence_pattern;
self._reset(mark)
if (
(mapping_pattern := self.mapping_pattern())
):
return mapping_pattern;
self._reset(mark)
if (
(class_pattern := self.class_pattern())
):
return class_pattern;
self._reset(mark)
return None;
@memoize
def literal_pattern(self) -> Optional[Any]:
# literal_pattern: signed_number !('+' | '-') | complex_number | strings | 'None' | 'True' | 'False'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(value := self.signed_number())
and
(self.negative_lookahead(self._tmp_88, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchValue ( value = value , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(value := self.complex_number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchValue ( value = value , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(value := self.strings())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchValue ( value = value , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('None'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSingleton ( value = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('True'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSingleton ( value = True , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('False'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSingleton ( value = False , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def literal_expr(self) -> Optional[Any]:
# literal_expr: signed_number !('+' | '-') | complex_number | strings | 'None' | 'True' | 'False'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(signed_number := self.signed_number())
and
(self.negative_lookahead(self._tmp_89, ))
):
return signed_number;
self._reset(mark)
if (
(complex_number := self.complex_number())
):
return complex_number;
self._reset(mark)
if (
(strings := self.strings())
):
return strings;
self._reset(mark)
if (
(self.expect('None'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('True'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = True , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('False'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = False , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def complex_number(self) -> Optional[Any]:
# complex_number: signed_real_number '+' imaginary_number | signed_real_number '-' imaginary_number
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(real := self.signed_real_number())
and
(self.expect('+'))
and
(imag := self.imaginary_number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = real , op = ast . Add ( ) , right = imag , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(real := self.signed_real_number())
and
(self.expect('-'))
and
(imag := self.imaginary_number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = real , op = ast . Sub ( ) , right = imag , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def signed_number(self) -> Optional[Any]:
# signed_number: NUMBER | '-' NUMBER
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = ast . literal_eval ( a . string ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('-'))
and
(a := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . USub ( ) , operand = ast . Constant ( value = ast . literal_eval ( a . string ) , lineno = a . start [0] , col_offset = a . start [1] , end_lineno = a . end [0] , end_col_offset = a . end [1] ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
return None;
@memoize
def signed_real_number(self) -> Optional[Any]:
# signed_real_number: real_number | '-' real_number
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(real_number := self.real_number())
):
return real_number;
self._reset(mark)
if (
(self.expect('-'))
and
(real := self.real_number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . USub ( ) , operand = real , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def real_number(self) -> Optional[ast . Constant]:
# real_number: NUMBER
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(real := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = self . ensure_real ( real ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def imaginary_number(self) -> Optional[ast . Constant]:
# imaginary_number: NUMBER
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(imag := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = self . ensure_imaginary ( imag ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def capture_pattern(self) -> Optional[Any]:
# capture_pattern: pattern_capture_target
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(target := self.pattern_capture_target())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchAs ( pattern = None , name = target , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def pattern_capture_target(self) -> Optional[str]:
# pattern_capture_target: !"_" NAME !('.' | '(' | '=')
mark = self._mark()
if (
(self.negative_lookahead(self.expect, "_"))
and
(name := self.name())
and
(self.negative_lookahead(self._tmp_90, ))
):
return name . string;
self._reset(mark)
return None;
@memoize
def wildcard_pattern(self) -> Optional["ast.MatchAs"]:
# wildcard_pattern: "_"
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("_"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchAs ( pattern = None , target = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def value_pattern(self) -> Optional["ast.MatchValue"]:
# value_pattern: attr !('.' | '(' | '=')
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(attr := self.attr())
and
(self.negative_lookahead(self._tmp_91, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchValue ( value = attr , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize_left_rec
def attr(self) -> Optional[ast . Attribute]:
# attr: name_or_attr '.' NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(value := self.name_or_attr())
and
(self.expect('.'))
and
(attr := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = value , attr = attr . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@logger
def name_or_attr(self) -> Optional[Any]:
# name_or_attr: attr | NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(attr := self.attr())
):
return attr;
self._reset(mark)
if (
(name := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = name . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def group_pattern(self) -> Optional[Any]:
# group_pattern: '(' pattern ')'
mark = self._mark()
if (
(self.expect('('))
and
(pattern := self.pattern())
and
(self.expect(')'))
):
return pattern;
self._reset(mark)
return None;
@memoize
def sequence_pattern(self) -> Optional["ast.MatchSequence"]:
# sequence_pattern: '[' maybe_sequence_pattern? ']' | '(' open_sequence_pattern? ')'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('['))
and
(patterns := self.maybe_sequence_pattern(),)
and
(self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSequence ( patterns = patterns or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('('))
and
(patterns := self.open_sequence_pattern(),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchSequence ( patterns = patterns or [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def open_sequence_pattern(self) -> Optional[Any]:
# open_sequence_pattern: maybe_star_pattern ',' maybe_sequence_pattern?
mark = self._mark()
if (
(pattern := self.maybe_star_pattern())
and
(self.expect(','))
and
(patterns := self.maybe_sequence_pattern(),)
):
return [pattern] + ( patterns or [] );
self._reset(mark)
return None;
@memoize
def maybe_sequence_pattern(self) -> Optional[Any]:
# maybe_sequence_pattern: ','.maybe_star_pattern+ ','?
mark = self._mark()
if (
(patterns := self._gather_92())
and
(self.expect(','),)
):
return patterns;
self._reset(mark)
return None;
@memoize
def maybe_star_pattern(self) -> Optional[Any]:
# maybe_star_pattern: star_pattern | pattern
mark = self._mark()
if (
(star_pattern := self.star_pattern())
):
return star_pattern;
self._reset(mark)
if (
(pattern := self.pattern())
):
return pattern;
self._reset(mark)
return None;
@memoize
def star_pattern(self) -> Optional[Any]:
# star_pattern: '*' pattern_capture_target | '*' wildcard_pattern
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('*'))
and
(target := self.pattern_capture_target())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchStar ( name = target , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('*'))
and
(self.wildcard_pattern())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchStar ( target = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def mapping_pattern(self) -> Optional[Any]:
# mapping_pattern: '{' '}' | '{' double_star_pattern ','? '}' | '{' items_pattern ',' double_star_pattern ','? '}' | '{' items_pattern ','? '}'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('{'))
and
(self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchMapping ( keys = [] , patterns = [] , rest = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('{'))
and
(rest := self.double_star_pattern())
and
(self.expect(','),)
and
(self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchMapping ( keys = [] , patterns = [] , rest = rest , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('{'))
and
(items := self.items_pattern())
and
(self.expect(','))
and
(rest := self.double_star_pattern())
and
(self.expect(','),)
and
(self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchMapping ( keys = [k for k , _ in items] , patterns = [p for _ , p in items] , rest = rest , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
if (
(self.expect('{'))
and
(items := self.items_pattern())
and
(self.expect(','),)
and
(self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchMapping ( keys = [k for k , _ in items] , patterns = [p for _ , p in items] , rest = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
return None;
@memoize
def items_pattern(self) -> Optional[Any]:
# items_pattern: ','.key_value_pattern+
mark = self._mark()
if (
(_gather_94 := self._gather_94())
):
return _gather_94;
self._reset(mark)
return None;
@memoize
def key_value_pattern(self) -> Optional[Any]:
# key_value_pattern: (literal_expr | attr) ':' pattern
mark = self._mark()
if (
(key := self._tmp_96())
and
(self.expect(':'))
and
(pattern := self.pattern())
):
return ( key , pattern );
self._reset(mark)
return None;
@memoize
def double_star_pattern(self) -> Optional[Any]:
# double_star_pattern: '**' pattern_capture_target
mark = self._mark()
if (
(self.expect('**'))
and
(target := self.pattern_capture_target())
):
return target;
self._reset(mark)
return None;
@memoize
def class_pattern(self) -> Optional["ast.MatchClass"]:
# class_pattern: name_or_attr '(' ')' | name_or_attr '(' positional_patterns ','? ')' | name_or_attr '(' keyword_patterns ','? ')' | name_or_attr '(' positional_patterns ',' keyword_patterns ','? ')' | invalid_class_pattern
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(cls := self.name_or_attr())
and
(self.expect('('))
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchClass ( cls = cls , patterns = [] , kwd_attrs = [] , kwd_patterns = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(cls := self.name_or_attr())
and
(self.expect('('))
and
(patterns := self.positional_patterns())
and
(self.expect(','),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchClass ( cls = cls , patterns = patterns , kwd_attrs = [] , kwd_patterns = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(cls := self.name_or_attr())
and
(self.expect('('))
and
(keywords := self.keyword_patterns())
and
(self.expect(','),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchClass ( cls = cls , patterns = [] , kwd_attrs = [k for k , _ in keywords] , kwd_patterns = [p for _ , p in keywords] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
if (
(cls := self.name_or_attr())
and
(self.expect('('))
and
(patterns := self.positional_patterns())
and
(self.expect(','))
and
(keywords := self.keyword_patterns())
and
(self.expect(','),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . MatchClass ( cls = cls , patterns = patterns , kwd_attrs = [k for k , _ in keywords] , kwd_patterns = [p for _ , p in keywords] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_class_pattern())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def positional_patterns(self) -> Optional[Any]:
# positional_patterns: ','.pattern+
mark = self._mark()
if (
(args := self._gather_97())
):
return args;
self._reset(mark)
return None;
@memoize
def keyword_patterns(self) -> Optional[Any]:
# keyword_patterns: ','.keyword_pattern+
mark = self._mark()
if (
(_gather_99 := self._gather_99())
):
return _gather_99;
self._reset(mark)
return None;
@memoize
def keyword_pattern(self) -> Optional[Any]:
# keyword_pattern: NAME '=' pattern
mark = self._mark()
if (
(arg := self.name())
and
(self.expect('='))
and
(value := self.pattern())
):
return ( arg . string , value );
self._reset(mark)
return None;
@memoize
def type_alias(self) -> Optional["ast.TypeAlias"]:
# type_alias: "type" NAME type_params? '=' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("type"))
and
(n := self.name())
and
(t := self.type_params(),)
and
(self.expect('='))
and
(b := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 12 ) , "Type statement is" , ( ast . TypeAlias ( name = ast . Name ( id = n . string , ctx = Store , lineno = n . start [0] , col_offset = n . start [1] , end_lineno = n . end [0] , end_col_offset = n . end [1] , ) , type_params = t or [] , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 12 ) else None ) );
self._reset(mark)
return None;
@memoize
def type_params(self) -> Optional[list]:
# type_params: '[' type_param_seq ']'
mark = self._mark()
if (
(self.expect('['))
and
(t := self.type_param_seq())
and
(self.expect(']'))
):
return self . check_version ( ( 3 , 12 ) , "Type parameter lists are" , t );
self._reset(mark)
return None;
@memoize
def type_param_seq(self) -> Optional[Any]:
# type_param_seq: ','.type_param+ ','?
mark = self._mark()
if (
(a := self._gather_101())
and
(self.expect(','),)
):
return a;
self._reset(mark)
return None;
@memoize
def type_param(self) -> Optional[Any]:
# type_param: NAME type_param_bound? | '*' NAME ":" expression | '*' NAME | '**' NAME ":" expression | '**' NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
and
(b := self.type_param_bound(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . TypeVar ( name = a . string , bound = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 12 ) else object ( );
self._reset(mark)
if (
(self.expect('*'))
and
(self.name())
and
(colon := self.expect(":"))
and
(e := self.expression())
):
return self . raise_syntax_error_starting_from ( "cannot use constraints with TypeVarTuple" if isinstance ( e , ast . Tuple ) else "cannot use bound with TypeVarTuple" , colon );
self._reset(mark)
if (
(self.expect('*'))
and
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . TypeVarTuple ( name = a . string , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 12 ) else object ( );
self._reset(mark)
if (
(self.expect('**'))
and
(self.name())
and
(colon := self.expect(":"))
and
(e := self.expression())
):
return self . raise_syntax_error_starting_from ( "cannot use constraints with ParamSpec" if isinstance ( e , ast . Tuple ) else "cannot use bound with ParamSpec" , colon );
self._reset(mark)
if (
(self.expect('**'))
and
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ParamSpec ( name = a . string , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 12 ) else object ( );
self._reset(mark)
return None;
@memoize
def type_param_bound(self) -> Optional[Any]:
# type_param_bound: ":" expression
mark = self._mark()
if (
(self.expect(":"))
and
(e := self.expression())
):
return e;
self._reset(mark)
return None;
@memoize
def expressions(self) -> Optional[Any]:
# expressions: expression ((',' expression))+ ','? | expression ',' | expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.expression())
and
(b := self._loop1_103())
and
(self.expect(','),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] + b , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.expression())
and
(self.expect(','))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(expression := self.expression())
):
return expression;
self._reset(mark)
return None;
@memoize
def expression(self) -> Optional[Any]:
# expression: invalid_scenic_instance_creation | invalid_expression | invalid_legacy_expression | disjunction 'if' disjunction 'else' disjunction | disjunction | lambdef
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_scenic_instance_creation())
):
return None # pragma: no cover;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_expression())
):
return None # pragma: no cover;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_legacy_expression())
):
return None # pragma: no cover;
self._reset(mark)
if (
(a := self.disjunction())
and
(self.expect('if'))
and
(b := self.disjunction())
and
(self.expect('else'))
and
(c := self.disjunction())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . IfExp ( body = a , test = b , orelse = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(disjunction := self.disjunction())
):
return disjunction;
self._reset(mark)
if (
(lambdef := self.lambdef())
):
return lambdef;
self._reset(mark)
return None;
@memoize
def scenic_temporal_expression(self) -> Optional[Any]:
# scenic_temporal_expression: invalid_expression | invalid_legacy_expression | scenic_until 'if' scenic_until 'else' scenic_until | scenic_until | lambdef
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_expression())
):
return None # pragma: no cover;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_legacy_expression())
):
return None # pragma: no cover;
self._reset(mark)
if (
(a := self.scenic_until())
and
(self.expect('if'))
and
(b := self.scenic_until())
and
(self.expect('else'))
and
(c := self.scenic_until())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . IfExp ( body = a , test = b , orelse = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_until := self.scenic_until())
):
return scenic_until;
self._reset(mark)
if (
(lambdef := self.lambdef())
):
return lambdef;
self._reset(mark)
return None;
@memoize
def yield_expr(self) -> Optional[Any]:
# yield_expr: 'yield' 'from' expression | 'yield' star_expressions?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('yield'))
and
(self.expect('from'))
and
(a := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . YieldFrom ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('yield'))
and
(a := self.star_expressions(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Yield ( value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def star_expressions(self) -> Optional[Any]:
# star_expressions: star_expression ((',' star_expression))+ ','? | star_expression ',' | star_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.star_expression())
and
(b := self._loop1_104())
and
(self.expect(','),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] + b , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.star_expression())
and
(self.expect(','))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(star_expression := self.star_expression())
):
return star_expression;
self._reset(mark)
return None;
@memoize
def star_expression(self) -> Optional[Any]:
# star_expression: '*' bitwise_or | expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('*'))
and
(a := self.bitwise_or())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Starred ( value = a , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(expression := self.expression())
):
return expression;
self._reset(mark)
return None;
@memoize
def star_named_expressions(self) -> Optional[Any]:
# star_named_expressions: ','.star_named_expression+ ','?
mark = self._mark()
if (
(a := self._gather_105())
and
(self.expect(','),)
):
return a;
self._reset(mark)
return None;
@memoize
def star_named_expression(self) -> Optional[Any]:
# star_named_expression: '*' bitwise_or | named_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('*'))
and
(a := self.bitwise_or())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Starred ( value = a , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(named_expression := self.named_expression())
):
return named_expression;
self._reset(mark)
return None;
@memoize
def assignment_expression(self) -> Optional[Any]:
# assignment_expression: NAME ':=' ~ expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
cut = False
if (
(a := self.name())
and
(self.expect(':='))
and
(cut := True)
and
(b := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 8 ) , "The ':=' operator is" , ast . NamedExpr ( target = ast . Name ( id = a . string , ctx = Store , lineno = a . start [0] , col_offset = a . start [1] , end_lineno = a . end [0] , end_col_offset = a . end [1] ) , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , ) );
self._reset(mark)
if cut:
return None;
return None;
@memoize
def named_expression(self) -> Optional[Any]:
# named_expression: assignment_expression | invalid_named_expression | expression !':='
mark = self._mark()
if (
(assignment_expression := self.assignment_expression())
):
return assignment_expression;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_named_expression())
):
return None # pragma: no cover;
self._reset(mark)
if (
(a := self.expression())
and
(self.negative_lookahead(self.expect, ':='))
):
return a;
self._reset(mark)
return None;
@logger
def scenic_until(self) -> Optional[Any]:
# scenic_until: invalid_scenic_until | scenic_above_until 'until' scenic_above_until | scenic_above_until
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_scenic_until())
):
return None # pragma: no cover;
self._reset(mark)
if (
(a := self.scenic_above_until())
and
(self.expect('until'))
and
(b := self.scenic_above_until())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . UntilOp ( a , b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_above_until := self.scenic_above_until())
):
return scenic_above_until;
self._reset(mark)
return None;
@logger
def scenic_above_until(self) -> Optional[Any]:
# scenic_above_until: scenic_temporal_prefix | scenic_implication
mark = self._mark()
if (
(scenic_temporal_prefix := self.scenic_temporal_prefix())
):
return scenic_temporal_prefix;
self._reset(mark)
if (
(scenic_implication := self.scenic_implication())
):
return scenic_implication;
self._reset(mark)
return None;
@memoize
def scenic_temporal_prefix(self) -> Optional[Any]:
# scenic_temporal_prefix: "next" scenic_above_until | "eventually" scenic_above_until | "always" scenic_above_until
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("next"))
and
(e := self.scenic_above_until())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Next ( e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("eventually"))
and
(e := self.scenic_above_until())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Eventually ( e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("always"))
and
(e := self.scenic_above_until())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Always ( e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@logger
def scenic_implication(self) -> Optional[Any]:
# scenic_implication: invalid_scenic_implication | scenic_temporal_disjunction "implies" (scenic_temporal_prefix | scenic_temporal_disjunction) | scenic_temporal_disjunction
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_scenic_implication())
):
return None # pragma: no cover;
self._reset(mark)
if (
(a := self.scenic_temporal_disjunction())
and
(self.expect("implies"))
and
(b := self._tmp_107())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . ImpliesOp ( a , b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_temporal_disjunction := self.scenic_temporal_disjunction())
):
return scenic_temporal_disjunction;
self._reset(mark)
return None;
@memoize
def disjunction(self) -> Optional[Any]:
# disjunction: conjunction (('or' conjunction))+ | conjunction
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.conjunction())
and
(b := self._loop1_108())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BoolOp ( op = ast . Or ( ) , values = [a] + b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(conjunction := self.conjunction())
):
return conjunction;
self._reset(mark)
return None;
@memoize
def scenic_temporal_disjunction(self) -> Optional[Any]:
# scenic_temporal_disjunction: scenic_temporal_conjunction (('or' (scenic_temporal_prefix | scenic_temporal_conjunction)))+ | scenic_temporal_conjunction
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.scenic_temporal_conjunction())
and
(b := self._loop1_109())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BoolOp ( op = ast . Or ( ) , values = [a] + b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_temporal_conjunction := self.scenic_temporal_conjunction())
):
return scenic_temporal_conjunction;
self._reset(mark)
return None;
@memoize
def conjunction(self) -> Optional[Any]:
# conjunction: inversion (('and' inversion))+ | inversion
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.inversion())
and
(b := self._loop1_110())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BoolOp ( op = ast . And ( ) , values = [a] + b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(inversion := self.inversion())
):
return inversion;
self._reset(mark)
return None;
@memoize
def scenic_temporal_conjunction(self) -> Optional[Any]:
# scenic_temporal_conjunction: scenic_temporal_inversion (('and' (scenic_temporal_prefix | scenic_temporal_inversion)))+ | scenic_temporal_inversion
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.scenic_temporal_inversion())
and
(b := self._loop1_111())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BoolOp ( op = ast . And ( ) , values = [a] + b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_temporal_inversion := self.scenic_temporal_inversion())
):
return scenic_temporal_inversion;
self._reset(mark)
return None;
@memoize
def inversion(self) -> Optional[Any]:
# inversion: 'not' !("visible" inversion) inversion | comparison
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('not'))
and
(self.negative_lookahead(self._tmp_112, ))
and
(a := self.inversion())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . Not ( ) , operand = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(comparison := self.comparison())
):
return comparison;
self._reset(mark)
return None;
@memoize
def scenic_temporal_inversion(self) -> Optional[Any]:
# scenic_temporal_inversion: 'not' !("visible" scenic_temporal_inversion) (scenic_temporal_prefix | scenic_temporal_inversion) | scenic_temporal_group | comparison
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('not'))
and
(self.negative_lookahead(self._tmp_113, ))
and
(a := self._tmp_114())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . Not ( ) , operand = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_temporal_group := self.scenic_temporal_group())
):
return scenic_temporal_group;
self._reset(mark)
if (
(comparison := self.comparison())
):
return comparison;
self._reset(mark)
return None;
@memoize
def scenic_temporal_group(self) -> Optional[Any]:
# scenic_temporal_group: '(' scenic_temporal_expression ')' &('until' | 'or' | 'and' | ')' | ';' | NEWLINE)
mark = self._mark()
if (
(self.expect('('))
and
(a := self.scenic_temporal_expression())
and
(self.expect(')'))
and
(self.positive_lookahead(self._tmp_115, ))
):
return a;
self._reset(mark)
return None;
@memoize
def scenic_new_expr(self) -> Optional[Any]:
# scenic_new_expr: 'new' NAME scenic_specifiers?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('new'))
and
(n := self.name())
and
(ss := self.scenic_specifiers(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . New ( className = n . string , specifiers = ss , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_specifiers(self) -> Optional[Any]:
# scenic_specifiers: ','.scenic_specifier+
mark = self._mark()
if (
(ss := self._gather_116())
):
return ss;
self._reset(mark)
return None;
@memoize
def scenic_specifier(self) -> Optional[Any]:
# scenic_specifier: scenic_valid_specifier | invalid_scenic_specifier
mark = self._mark()
if (
(scenic_valid_specifier := self.scenic_valid_specifier())
):
return scenic_valid_specifier;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_scenic_specifier())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def scenic_valid_specifier(self) -> Optional[Any]:
# scenic_valid_specifier: 'with' NAME expression | 'at' expression | "offset" 'by' expression | "offset" "along" expression 'by' expression | scenic_specifier_position_direction expression ['by' expression] | "beyond" expression 'by' expression ['from' expression] | "visible" ['from' expression] | 'not' "visible" ['from' expression] | 'in' expression | 'on' expression | "contained" 'in' expression | "following" expression ['from' expression] 'for' expression | "facing" "toward" expression | "facing" "away" "from" expression | "facing" "directly" "toward" expression | "facing" "directly" "away" "from" expression | "facing" expression | "apparently" "facing" expression ['from' expression]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('with'))
and
(p := self.name())
and
(v := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . WithSpecifier ( prop = p . string , value = v , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('at'))
and
(position := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . AtSpecifier ( position = position , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("offset"))
and
(self.expect('by'))
and
(o := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . OffsetBySpecifier ( offset = o , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("offset"))
and
(self.expect("along"))
and
(d := self.expression())
and
(self.expect('by'))
and
(o := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . OffsetAlongSpecifier ( direction = d , offset = o , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(direction := self.scenic_specifier_position_direction())
and
(position := self.expression())
and
(distance := self._tmp_118(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . DirectionOfSpecifier ( direction = direction , position = position , distance = distance , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("beyond"))
and
(v := self.expression())
and
(self.expect('by'))
and
(o := self.expression())
and
(b := self._tmp_119(),)
):
return s . BeyondSpecifier ( position = v , offset = o , base = b );
self._reset(mark)
if (
(self.expect("visible"))
and
(b := self._tmp_120(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . VisibleSpecifier ( base = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('not'))
and
(self.expect("visible"))
and
(b := self._tmp_121(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . NotVisibleSpecifier ( base = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('in'))
and
(r := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . InSpecifier ( region = r , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('on'))
and
(r := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . OnSpecifier ( region = r , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("contained"))
and
(self.expect('in'))
and
(r := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . ContainedInSpecifier ( region = r , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("following"))
and
(f := self.expression())
and
(b := self._tmp_122(),)
and
(self.expect('for'))
and
(d := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FollowingSpecifier ( field = f , distance = d , base = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("facing"))
and
(self.expect("toward"))
and
(p := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FacingTowardSpecifier ( position = p , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("facing"))
and
(self.expect("away"))
and
(self.expect("from"))
and
(p := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FacingAwayFromSpecifier ( position = p , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("facing"))
and
(self.expect("directly"))
and
(self.expect("toward"))
and
(p := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FacingDirectlyTowardSpecifier ( position = p , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("facing"))
and
(self.expect("directly"))
and
(self.expect("away"))
and
(self.expect("from"))
and
(p := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FacingDirectlyAwayFromSpecifier ( position = p , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("facing"))
and
(h := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FacingSpecifier ( heading = h , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("apparently"))
and
(self.expect("facing"))
and
(h := self.expression())
and
(v := self._tmp_123(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . ApparentlyFacingSpecifier ( heading = h , base = v , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_specifier_position_direction(self) -> Optional[Any]:
# scenic_specifier_position_direction: "left" "of" | "right" "of" | "ahead" "of" | "behind" | "above" | "below"
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("left"))
and
(self.expect("of"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . LeftOf ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("right"))
and
(self.expect("of"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . RightOf ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("ahead"))
and
(self.expect("of"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . AheadOf ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("behind"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Behind ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("above"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Above ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("below"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Below ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def comparison(self) -> Optional[Any]:
# comparison: bitwise_or compare_op_bitwise_or_pair+ | bitwise_or
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_or())
and
(b := self._loop1_124())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Compare ( left = a , ops = self . get_comparison_ops ( b ) , comparators = self . get_comparators ( b ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(bitwise_or := self.bitwise_or())
):
return bitwise_or;
self._reset(mark)
return None;
@memoize
def compare_op_bitwise_or_pair(self) -> Optional[Any]:
# compare_op_bitwise_or_pair: eq_bitwise_or | noteq_bitwise_or | lte_bitwise_or | lt_bitwise_or | gte_bitwise_or | gt_bitwise_or | notin_bitwise_or | in_bitwise_or | isnot_bitwise_or | is_bitwise_or
mark = self._mark()
if (
(eq_bitwise_or := self.eq_bitwise_or())
):
return eq_bitwise_or;
self._reset(mark)
if (
(noteq_bitwise_or := self.noteq_bitwise_or())
):
return noteq_bitwise_or;
self._reset(mark)
if (
(lte_bitwise_or := self.lte_bitwise_or())
):
return lte_bitwise_or;
self._reset(mark)
if (
(lt_bitwise_or := self.lt_bitwise_or())
):
return lt_bitwise_or;
self._reset(mark)
if (
(gte_bitwise_or := self.gte_bitwise_or())
):
return gte_bitwise_or;
self._reset(mark)
if (
(gt_bitwise_or := self.gt_bitwise_or())
):
return gt_bitwise_or;
self._reset(mark)
if (
(notin_bitwise_or := self.notin_bitwise_or())
):
return notin_bitwise_or;
self._reset(mark)
if (
(in_bitwise_or := self.in_bitwise_or())
):
return in_bitwise_or;
self._reset(mark)
if (
(isnot_bitwise_or := self.isnot_bitwise_or())
):
return isnot_bitwise_or;
self._reset(mark)
if (
(is_bitwise_or := self.is_bitwise_or())
):
return is_bitwise_or;
self._reset(mark)
return None;
@memoize
def eq_bitwise_or(self) -> Optional[Any]:
# eq_bitwise_or: '==' bitwise_or
mark = self._mark()
if (
(self.expect('=='))
and
(a := self.bitwise_or())
):
return ( ast . Eq ( ) , a );
self._reset(mark)
return None;
@memoize
def noteq_bitwise_or(self) -> Optional[tuple]:
# noteq_bitwise_or: '!=' bitwise_or
mark = self._mark()
if (
(self.expect('!='))
and
(a := self.bitwise_or())
):
return ( ast . NotEq ( ) , a );
self._reset(mark)
return None;
@memoize
def lte_bitwise_or(self) -> Optional[Any]:
# lte_bitwise_or: '<=' bitwise_or
mark = self._mark()
if (
(self.expect('<='))
and
(a := self.bitwise_or())
):
return ( ast . LtE ( ) , a );
self._reset(mark)
return None;
@memoize
def lt_bitwise_or(self) -> Optional[Any]:
# lt_bitwise_or: '<' bitwise_or
mark = self._mark()
if (
(self.expect('<'))
and
(a := self.bitwise_or())
):
return ( ast . Lt ( ) , a );
self._reset(mark)
return None;
@memoize
def gte_bitwise_or(self) -> Optional[Any]:
# gte_bitwise_or: '>=' bitwise_or
mark = self._mark()
if (
(self.expect('>='))
and
(a := self.bitwise_or())
):
return ( ast . GtE ( ) , a );
self._reset(mark)
return None;
@memoize
def gt_bitwise_or(self) -> Optional[Any]:
# gt_bitwise_or: '>' bitwise_or
mark = self._mark()
if (
(self.expect('>'))
and
(a := self.bitwise_or())
):
return ( ast . Gt ( ) , a );
self._reset(mark)
return None;
@memoize
def notin_bitwise_or(self) -> Optional[Any]:
# notin_bitwise_or: 'not' 'in' bitwise_or
mark = self._mark()
if (
(self.expect('not'))
and
(self.expect('in'))
and
(a := self.bitwise_or())
):
return ( ast . NotIn ( ) , a );
self._reset(mark)
return None;
@memoize
def in_bitwise_or(self) -> Optional[Any]:
# in_bitwise_or: 'in' bitwise_or
mark = self._mark()
if (
(self.expect('in'))
and
(a := self.bitwise_or())
):
return ( ast . In ( ) , a );
self._reset(mark)
return None;
@memoize
def isnot_bitwise_or(self) -> Optional[Any]:
# isnot_bitwise_or: 'is' 'not' bitwise_or
mark = self._mark()
if (
(self.expect('is'))
and
(self.expect('not'))
and
(a := self.bitwise_or())
):
return ( ast . IsNot ( ) , a );
self._reset(mark)
return None;
@memoize
def is_bitwise_or(self) -> Optional[Any]:
# is_bitwise_or: 'is' bitwise_or
mark = self._mark()
if (
(self.expect('is'))
and
(a := self.bitwise_or())
):
return ( ast . Is ( ) , a );
self._reset(mark)
return None;
@memoize_left_rec
def bitwise_or(self) -> Optional[Any]:
# bitwise_or: scenic_visible_from | scenic_not_visible_from | scenic_can_see | scenic_intersects | bitwise_or '|' bitwise_xor | bitwise_xor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(scenic_visible_from := self.scenic_visible_from())
):
return scenic_visible_from;
self._reset(mark)
if (
(scenic_not_visible_from := self.scenic_not_visible_from())
):
return scenic_not_visible_from;
self._reset(mark)
if (
(scenic_can_see := self.scenic_can_see())
):
return scenic_can_see;
self._reset(mark)
if (
(scenic_intersects := self.scenic_intersects())
):
return scenic_intersects;
self._reset(mark)
if (
(a := self.bitwise_or())
and
(self.expect('|'))
and
(b := self.bitwise_xor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . BitOr ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(bitwise_xor := self.bitwise_xor())
):
return bitwise_xor;
self._reset(mark)
return None;
@logger
def scenic_visible_from(self) -> Optional[Any]:
# scenic_visible_from: bitwise_or "visible" 'from' bitwise_xor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_or())
and
(self.expect("visible"))
and
(self.expect('from'))
and
(b := self.bitwise_xor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . VisibleFromOp ( region = a , base = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@logger
def scenic_not_visible_from(self) -> Optional[Any]:
# scenic_not_visible_from: bitwise_or "not" "visible" 'from' bitwise_xor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_or())
and
(self.expect("not"))
and
(self.expect("visible"))
and
(self.expect('from'))
and
(b := self.bitwise_xor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . NotVisibleFromOp ( region = a , base = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@logger
def scenic_can_see(self) -> Optional[Any]:
# scenic_can_see: bitwise_or "can" "see" bitwise_xor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_or())
and
(self.expect("can"))
and
(self.expect("see"))
and
(b := self.bitwise_xor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . CanSeeOp ( left = a , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@logger
def scenic_intersects(self) -> Optional[Any]:
# scenic_intersects: bitwise_or "intersects" bitwise_xor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_or())
and
(self.expect("intersects"))
and
(b := self.bitwise_xor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . IntersectsOp ( left = a , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize_left_rec
def bitwise_xor(self) -> Optional[Any]:
# bitwise_xor: scenic_offset_along | bitwise_xor '^' bitwise_and | bitwise_and
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(scenic_offset_along := self.scenic_offset_along())
):
return scenic_offset_along;
self._reset(mark)
if (
(a := self.bitwise_xor())
and
(self.expect('^'))
and
(b := self.bitwise_and())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . BitXor ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(bitwise_and := self.bitwise_and())
):
return bitwise_and;
self._reset(mark)
return None;
@logger
def scenic_offset_along(self) -> Optional[Any]:
# scenic_offset_along: bitwise_xor "offset" "along" bitwise_xor 'by' bitwise_and
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_xor())
and
(self.expect("offset"))
and
(self.expect("along"))
and
(b := self.bitwise_xor())
and
(self.expect('by'))
and
(c := self.bitwise_and())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . OffsetAlongOp ( base = a , direction = b , offset = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize_left_rec
def bitwise_and(self) -> Optional[Any]:
# bitwise_and: scenic_relative_to | bitwise_and '&' shift_expr | shift_expr
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(scenic_relative_to := self.scenic_relative_to())
):
return scenic_relative_to;
self._reset(mark)
if (
(a := self.bitwise_and())
and
(self.expect('&'))
and
(b := self.shift_expr())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . BitAnd ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(shift_expr := self.shift_expr())
):
return shift_expr;
self._reset(mark)
return None;
@logger
def scenic_relative_to(self) -> Optional[Any]:
# scenic_relative_to: bitwise_and ("relative" 'to' | "offset" 'by') shift_expr
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.bitwise_and())
and
(self._tmp_125())
and
(b := self.shift_expr())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . RelativeToOp ( left = a , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize_left_rec
def shift_expr(self) -> Optional[Any]:
# shift_expr: scenic_at | shift_expr '<<' sum | shift_expr '>>' sum | scenic_prefix_operators
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(scenic_at := self.scenic_at())
):
return scenic_at;
self._reset(mark)
if (
(a := self.shift_expr())
and
(self.expect('<<'))
and
(b := self.sum())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . LShift ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.shift_expr())
and
(self.expect('>>'))
and
(b := self.sum())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . RShift ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_prefix_operators := self.scenic_prefix_operators())
):
return scenic_prefix_operators;
self._reset(mark)
return None;
@logger
def scenic_at(self) -> Optional[Any]:
# scenic_at: shift_expr 'at' sum
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.shift_expr())
and
(self.expect('at'))
and
(b := self.sum())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FieldAtOp ( left = a , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_prefix_operators(self) -> Optional[Any]:
# scenic_prefix_operators: "relative" "position" "of" expression 'from' scenic_prefix_operators | "relative" "position" "of" scenic_prefix_operators | "relative" "heading" "of" expression 'from' scenic_prefix_operators | "relative" "heading" "of" scenic_prefix_operators | "apparent" "heading" "of" expression 'from' scenic_prefix_operators | "apparent" "heading" "of" scenic_prefix_operators | &"distance" scenic_distance_from_op | "distance" "past" expression 'of' scenic_prefix_operators | "distance" "past" scenic_prefix_operators | &"angle" scenic_angle_from_op | &"altitude" scenic_altitude_from_op | "follow" expression 'from' expression 'for' scenic_prefix_operators | "visible" scenic_prefix_operators | 'not' "visible" scenic_prefix_operators | scenic_position_of_op_position 'of' scenic_prefix_operators | sum
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("relative"))
and
(self.expect("position"))
and
(self.expect("of"))
and
(e1 := self.expression())
and
(self.expect('from'))
and
(e2 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . RelativePositionOp ( target = e1 , base = e2 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("relative"))
and
(self.expect("position"))
and
(self.expect("of"))
and
(e1 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . RelativePositionOp ( target = e1 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("relative"))
and
(self.expect("heading"))
and
(self.expect("of"))
and
(e1 := self.expression())
and
(self.expect('from'))
and
(e2 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . RelativeHeadingOp ( target = e1 , base = e2 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("relative"))
and
(self.expect("heading"))
and
(self.expect("of"))
and
(e1 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . RelativeHeadingOp ( target = e1 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("apparent"))
and
(self.expect("heading"))
and
(self.expect("of"))
and
(e1 := self.expression())
and
(self.expect('from'))
and
(e2 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . ApparentHeadingOp ( target = e1 , base = e2 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("apparent"))
and
(self.expect("heading"))
and
(self.expect("of"))
and
(e1 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . ApparentHeadingOp ( target = e1 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.positive_lookahead(self.expect, "distance"))
and
(scenic_distance_from_op := self.scenic_distance_from_op())
):
return scenic_distance_from_op;
self._reset(mark)
if (
(self.expect("distance"))
and
(self.expect("past"))
and
(e1 := self.expression())
and
(self.expect('of'))
and
(e2 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . DistancePastOp ( target = e1 , base = e2 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("distance"))
and
(self.expect("past"))
and
(e1 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . DistancePastOp ( target = e1 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.positive_lookahead(self.expect, "angle"))
and
(scenic_angle_from_op := self.scenic_angle_from_op())
):
return scenic_angle_from_op;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, "altitude"))
and
(scenic_altitude_from_op := self.scenic_altitude_from_op())
):
return scenic_altitude_from_op;
self._reset(mark)
if (
(self.expect("follow"))
and
(e1 := self.expression())
and
(self.expect('from'))
and
(e2 := self.expression())
and
(self.expect('for'))
and
(e3 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FollowOp ( target = e1 , base = e2 , distance = e3 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("visible"))
and
(e := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . VisibleOp ( region = e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('not'))
and
(self.expect("visible"))
and
(e := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . NotVisibleOp ( region = e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(p := self.scenic_position_of_op_position())
and
(self.expect('of'))
and
(e := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . PositionOfOp ( position = p , target = e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(sum := self.sum())
):
return sum;
self._reset(mark)
return None;
@memoize
def scenic_distance_from_op(self) -> Optional[Any]:
# scenic_distance_from_op: "distance" 'from' expression 'to' scenic_prefix_operators | "distance" 'to' expression 'from' scenic_prefix_operators | "distance" ('to' | 'from') scenic_prefix_operators
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("distance"))
and
(self.expect('from'))
and
(e1 := self.expression())
and
(self.expect('to'))
and
(e2 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . DistanceFromOp ( target = e1 , base = e2 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("distance"))
and
(self.expect('to'))
and
(e1 := self.expression())
and
(self.expect('from'))
and
(e2 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . DistanceFromOp ( target = e1 , base = e2 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("distance"))
and
(self._tmp_126())
and
(e1 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . DistanceFromOp ( target = e1 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_angle_from_op(self) -> Optional[Any]:
# scenic_angle_from_op: "angle" 'from' expression 'to' scenic_prefix_operators | "angle" 'to' expression 'from' scenic_prefix_operators | "angle" 'to' scenic_prefix_operators | "angle" 'from' scenic_prefix_operators
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("angle"))
and
(self.expect('from'))
and
(e1 := self.expression())
and
(self.expect('to'))
and
(e2 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . AngleFromOp ( base = e1 , target = e2 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("angle"))
and
(self.expect('to'))
and
(e1 := self.expression())
and
(self.expect('from'))
and
(e2 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . AngleFromOp ( target = e1 , base = e2 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("angle"))
and
(self.expect('to'))
and
(e1 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . AngleFromOp ( target = e1 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("angle"))
and
(self.expect('from'))
and
(e1 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . AngleFromOp ( base = e1 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_altitude_from_op(self) -> Optional[Any]:
# scenic_altitude_from_op: "altitude" 'from' expression 'to' scenic_prefix_operators | "altitude" 'to' expression 'from' scenic_prefix_operators | "altitude" 'to' scenic_prefix_operators | "altitude" 'from' scenic_prefix_operators
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("altitude"))
and
(self.expect('from'))
and
(e1 := self.expression())
and
(self.expect('to'))
and
(e2 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . AltitudeFromOp ( base = e1 , target = e2 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("altitude"))
and
(self.expect('to'))
and
(e1 := self.expression())
and
(self.expect('from'))
and
(e2 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . AltitudeFromOp ( target = e1 , base = e2 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("altitude"))
and
(self.expect('to'))
and
(e1 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . AltitudeFromOp ( target = e1 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("altitude"))
and
(self.expect('from'))
and
(e1 := self.scenic_prefix_operators())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . AltitudeFromOp ( base = e1 , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_position_of_op_position(self) -> Optional[Any]:
# scenic_position_of_op_position: "top" "front" "left" | "top" "front" "right" | "top" "back" "left" | "top" "back" "right" | "bottom" "front" "left" | "bottom" "front" "right" | "bottom" "back" "left" | "bottom" "back" "right" | "front" "left" | "front" "right" | "back" "left" | "back" "right" | "front" | "back" | "left" | "right" | "top" | "bottom"
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("top"))
and
(self.expect("front"))
and
(self.expect("left"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . TopFrontLeft ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("top"))
and
(self.expect("front"))
and
(self.expect("right"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . TopFrontRight ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("top"))
and
(self.expect("back"))
and
(self.expect("left"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . TopBackLeft ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("top"))
and
(self.expect("back"))
and
(self.expect("right"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . TopBackRight ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("bottom"))
and
(self.expect("front"))
and
(self.expect("left"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . BottomFrontLeft ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("bottom"))
and
(self.expect("front"))
and
(self.expect("right"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . BottomFrontRight ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("bottom"))
and
(self.expect("back"))
and
(self.expect("left"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . BottomBackLeft ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("bottom"))
and
(self.expect("back"))
and
(self.expect("right"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . BottomBackRight ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("front"))
and
(self.expect("left"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FrontLeft ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("front"))
and
(self.expect("right"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . FrontRight ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("back"))
and
(self.expect("left"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . BackLeft ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("back"))
and
(self.expect("right"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . BackRight ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("front"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Front ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("back"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Back ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("left"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Left ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("right"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Right ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("top"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Top ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("bottom"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Bottom ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize_left_rec
def sum(self) -> Optional[Any]:
# sum: sum '+' term | sum '-' term | term
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.sum())
and
(self.expect('+'))
and
(b := self.term())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . Add ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.sum())
and
(self.expect('-'))
and
(b := self.term())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . Sub ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(term := self.term())
):
return term;
self._reset(mark)
return None;
@memoize_left_rec
def term(self) -> Optional[Any]:
# term: scenic_vector | scenic_deg | term '*' factor | term '/' factor | term '//' factor | term '%' factor | term '@' factor | factor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(scenic_vector := self.scenic_vector())
):
return scenic_vector;
self._reset(mark)
if (
(scenic_deg := self.scenic_deg())
):
return scenic_deg;
self._reset(mark)
if (
(a := self.term())
and
(self.expect('*'))
and
(b := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . Mult ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.term())
and
(self.expect('/'))
and
(b := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . Div ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.term())
and
(self.expect('//'))
and
(b := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . FloorDiv ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.term())
and
(self.expect('%'))
and
(b := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . Mod ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.term())
and
(self.expect('@'))
and
(b := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 5 ) , "The '@' operator is" , ast . BinOp ( left = a , op = ast . MatMult ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) );
self._reset(mark)
if (
(factor := self.factor())
):
return factor;
self._reset(mark)
return None;
@logger
def scenic_vector(self) -> Optional[Any]:
# scenic_vector: term '@' factor
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.term())
and
(self.expect('@'))
and
(b := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . VectorOp ( left = a , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@logger
def scenic_deg(self) -> Optional[Any]:
# scenic_deg: term "deg"
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.term())
and
(self.expect("deg"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . DegOp ( operand = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def factor(self) -> Optional[Any]:
# factor: '+' factor | '-' factor | '~' factor | power
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('+'))
and
(a := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . UAdd ( ) , operand = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('-'))
and
(a := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . USub ( ) , operand = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('~'))
and
(a := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . UnaryOp ( op = ast . Invert ( ) , operand = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(power := self.power())
):
return power;
self._reset(mark)
return None;
@memoize
def power(self) -> Optional[Any]:
# power: await_primary '**' factor | scenic_new
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.await_primary())
and
(self.expect('**'))
and
(b := self.factor())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . BinOp ( left = a , op = ast . Pow ( ) , right = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(scenic_new := self.scenic_new())
):
return scenic_new;
self._reset(mark)
return None;
@memoize
def scenic_new(self) -> Optional[Any]:
# scenic_new: scenic_new_expr | await_primary
mark = self._mark()
if (
(scenic_new_expr := self.scenic_new_expr())
):
return scenic_new_expr;
self._reset(mark)
if (
(await_primary := self.await_primary())
):
return await_primary;
self._reset(mark)
return None;
@memoize
def await_primary(self) -> Optional[Any]:
# await_primary: 'await' primary | primary
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('await'))
and
(a := self.primary())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return self . check_version ( ( 3 , 5 ) , "Await expressions are" , ast . Await ( a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) );
self._reset(mark)
if (
(primary := self.primary())
):
return primary;
self._reset(mark)
return None;
@memoize_left_rec
def primary(self) -> Optional[Any]:
# primary: primary '.' NAME | primary genexp | primary '(' arguments? ')' | primary '[' slices ']' | atom
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.primary())
and
(self.expect('.'))
and
(b := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = a , attr = b . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.primary())
and
(b := self.genexp())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Call ( func = a , args = [b] , keywords = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.primary())
and
(self.expect('('))
and
(b := self.arguments(),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Call ( func = a , args = b [0] if b else [] , keywords = b [1] if b else [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
if (
(a := self.primary())
and
(self.expect('['))
and
(b := self.slices())
and
(self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Subscript ( value = a , slice = b , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(atom := self.atom())
):
return atom;
self._reset(mark)
return None;
@memoize
def slices(self) -> Optional[Any]:
# slices: slice !',' | ','.(slice | starred_expression)+ ','?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.slice())
and
(self.negative_lookahead(self.expect, ','))
):
return a;
self._reset(mark)
if (
(a := self._gather_127())
and
(self.expect(','),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = a , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ( ast . ExtSlice ( dims = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if any ( isinstance ( e , ast . Slice ) for e in a ) else ast . Index ( value = ast . Tuple ( elts = [e . value for e in a] , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) );
self._reset(mark)
return None;
@memoize
def slice(self) -> Optional[Any]:
# slice: expression? ':' expression? [':' expression?] | named_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.expression(),)
and
(self.expect(':'))
and
(b := self.expression(),)
and
(c := self._tmp_129(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Slice ( lower = a , upper = b , step = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.named_expression())
):
return a if sys . version_info >= ( 3 , 9 ) or isinstance ( a , ast . Slice ) else ast . Index ( value = a , lineno = a . lineno , col_offset = a . col_offset , end_lineno = a . end_lineno , end_col_offset = a . end_col_offset );
self._reset(mark)
return None;
@memoize
def atom(self) -> Optional[Any]:
# atom: "initial" "scenario" | NAME | 'True' | 'False' | 'None' | &(STRING | FSTRING_START) strings | NUMBER | &'(' (tuple | group | genexp) | &'[' (list | listcomp) | &'{' (dict | set | dictcomp | setcomp) | '...'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("initial"))
and
(self.expect("scenario"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . InitialScenario ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = a . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('True'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = True , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . Constant ( value = True , kind = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('False'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = False , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . Constant ( value = False , kind = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('None'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . Constant ( value = None , kind = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.positive_lookahead(self._tmp_130, ))
and
(strings := self.strings())
):
return strings;
self._reset(mark)
if (
(a := self.number())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = ast . literal_eval ( a . string ) , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . Constant ( value = ast . literal_eval ( a . string ) , kind = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.positive_lookahead(self.expect, '('))
and
(_tmp_131 := self._tmp_131())
):
return _tmp_131;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, '['))
and
(_tmp_132 := self._tmp_132())
):
return _tmp_132;
self._reset(mark)
if (
(self.positive_lookahead(self.expect, '{'))
and
(_tmp_133 := self._tmp_133())
):
return _tmp_133;
self._reset(mark)
if (
(self.expect('...'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = Ellipsis , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . Constant ( value = Ellipsis , kind = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def group(self) -> Optional[Any]:
# group: '(' (yield_expr | named_expression) ')' | invalid_group
mark = self._mark()
if (
(self.expect('('))
and
(a := self._tmp_134())
and
(self.expect(')'))
):
return a;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_group())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def lambdef(self) -> Optional[Any]:
# lambdef: 'lambda' lambda_params? ':' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('lambda'))
and
(a := self.lambda_params(),)
and
(self.expect(':'))
and
(b := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Lambda ( args = a or self . make_arguments ( None , [] , None , [] , ( None , [] , None ) ) , body = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def lambda_params(self) -> Optional[Any]:
# lambda_params: invalid_lambda_parameters | lambda_parameters
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_lambda_parameters())
):
return None # pragma: no cover;
self._reset(mark)
if (
(lambda_parameters := self.lambda_parameters())
):
return lambda_parameters;
self._reset(mark)
return None;
@memoize
def lambda_parameters(self) -> Optional[ast . arguments]:
# lambda_parameters: lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? | lambda_slash_with_default lambda_param_with_default* lambda_star_etc? | lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? | lambda_param_with_default+ lambda_star_etc? | lambda_star_etc
mark = self._mark()
if (
(a := self.lambda_slash_no_default())
and
(b := self._loop0_135(),)
and
(c := self._loop0_136(),)
and
(d := self.lambda_star_etc(),)
):
return self . make_arguments ( a , [] , b , c , d );
self._reset(mark)
if (
(a := self.lambda_slash_with_default())
and
(b := self._loop0_137(),)
and
(c := self.lambda_star_etc(),)
):
return self . make_arguments ( None , a , None , b , c );
self._reset(mark)
if (
(a := self._loop1_138())
and
(b := self._loop0_139(),)
and
(c := self.lambda_star_etc(),)
):
return self . make_arguments ( None , [] , a , b , c );
self._reset(mark)
if (
(a := self._loop1_140())
and
(b := self.lambda_star_etc(),)
):
return self . make_arguments ( None , [] , None , a , b );
self._reset(mark)
if (
(a := self.lambda_star_etc())
):
return self . make_arguments ( None , [] , None , [] , a );
self._reset(mark)
return None;
@memoize
def lambda_slash_no_default(self) -> Optional[List [Tuple [ast . arg , None]]]:
# lambda_slash_no_default: lambda_param_no_default+ '/' ',' | lambda_param_no_default+ '/' &':'
mark = self._mark()
if (
(a := self._loop1_141())
and
(self.expect('/'))
and
(self.expect(','))
):
return [( p , None ) for p in a];
self._reset(mark)
if (
(a := self._loop1_142())
and
(self.expect('/'))
and
(self.positive_lookahead(self.expect, ':'))
):
return [( p , None ) for p in a];
self._reset(mark)
return None;
@memoize
def lambda_slash_with_default(self) -> Optional[List [Tuple [ast . arg , Any]]]:
# lambda_slash_with_default: lambda_param_no_default* lambda_param_with_default+ '/' ',' | lambda_param_no_default* lambda_param_with_default+ '/' &':'
mark = self._mark()
if (
(a := self._loop0_143(),)
and
(b := self._loop1_144())
and
(self.expect('/'))
and
(self.expect(','))
):
return ( [( p , None ) for p in a] if a else [] ) + b;
self._reset(mark)
if (
(a := self._loop0_145(),)
and
(b := self._loop1_146())
and
(self.expect('/'))
and
(self.positive_lookahead(self.expect, ':'))
):
return ( [( p , None ) for p in a] if a else [] ) + b;
self._reset(mark)
return None;
@memoize
def lambda_star_etc(self) -> Optional[Tuple [Optional [ast . arg] , List [Tuple [ast . arg , Any]] , Optional [ast . arg]]]:
# lambda_star_etc: invalid_lambda_star_etc | '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? | '*' ',' lambda_param_maybe_default+ lambda_kwds? | lambda_kwds
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_lambda_star_etc())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('*'))
and
(a := self.lambda_param_no_default())
and
(b := self._loop0_147(),)
and
(c := self.lambda_kwds(),)
):
return ( a , b , c );
self._reset(mark)
if (
(self.expect('*'))
and
(self.expect(','))
and
(b := self._loop1_148())
and
(c := self.lambda_kwds(),)
):
return ( None , b , c );
self._reset(mark)
if (
(a := self.lambda_kwds())
):
return ( None , [] , a );
self._reset(mark)
return None;
@memoize
def lambda_kwds(self) -> Optional[ast . arg]:
# lambda_kwds: invalid_lambda_kwds | '**' lambda_param_no_default
mark = self._mark()
if (
self.call_invalid_rules
and
(self.invalid_lambda_kwds())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('**'))
and
(a := self.lambda_param_no_default())
):
return a;
self._reset(mark)
return None;
@memoize
def lambda_param_no_default(self) -> Optional[ast . arg]:
# lambda_param_no_default: lambda_param ',' | lambda_param &':'
mark = self._mark()
if (
(a := self.lambda_param())
and
(self.expect(','))
):
return a;
self._reset(mark)
if (
(a := self.lambda_param())
and
(self.positive_lookahead(self.expect, ':'))
):
return a;
self._reset(mark)
return None;
@memoize
def lambda_param_with_default(self) -> Optional[Tuple [ast . arg , Any]]:
# lambda_param_with_default: lambda_param default ',' | lambda_param default &':'
mark = self._mark()
if (
(a := self.lambda_param())
and
(c := self.default())
and
(self.expect(','))
):
return ( a , c );
self._reset(mark)
if (
(a := self.lambda_param())
and
(c := self.default())
and
(self.positive_lookahead(self.expect, ':'))
):
return ( a , c );
self._reset(mark)
return None;
@memoize
def lambda_param_maybe_default(self) -> Optional[Tuple [ast . arg , Any]]:
# lambda_param_maybe_default: lambda_param default? ',' | lambda_param default? &':'
mark = self._mark()
if (
(a := self.lambda_param())
and
(c := self.default(),)
and
(self.expect(','))
):
return ( a , c );
self._reset(mark)
if (
(a := self.lambda_param())
and
(c := self.default(),)
and
(self.positive_lookahead(self.expect, ':'))
):
return ( a , c );
self._reset(mark)
return None;
@memoize
def lambda_param(self) -> Optional[ast . arg]:
# lambda_param: NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . arg ( arg = a . string , annotation = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset ) if sys . version_info >= ( 3 , 9 ) else ast . arg ( arg = a . string , annotation = None , type_comment = None , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_model_stmt(self) -> Optional[Any]:
# scenic_model_stmt: "model" dotted_name
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("model"))
and
(a := self.dotted_name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Model ( name = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_tracked_assignment(self) -> Optional[Any]:
# scenic_tracked_assignment: scenic_tracked_name '=' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.scenic_tracked_name())
and
(self.expect('='))
and
(b := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . TrackedAssign ( target = a , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_tracked_name(self) -> Optional[Any]:
# scenic_tracked_name: "ego" | "workspace"
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("ego"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Ego ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("workspace"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Workspace ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_param_stmt(self) -> Optional[Any]:
# scenic_param_stmt: "param" (','.scenic_param_stmt_param+)
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("param"))
and
(elts := self._gather_149())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Param ( elts = elts , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_param_stmt_param(self) -> Optional[Any]:
# scenic_param_stmt_param: scenic_param_stmt_id '=' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(name := self.scenic_param_stmt_id())
and
(self.expect('='))
and
(e := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . parameter ( name , e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_param_stmt_id(self) -> Optional[Any]:
# scenic_param_stmt_id: NAME | STRING
mark = self._mark()
if (
(a := self.name())
):
return a . string;
self._reset(mark)
if (
(a := self.string())
):
return a . string [1 : - 1];
self._reset(mark)
return None;
@memoize
def scenic_require_stmt(self) -> Optional[Any]:
# scenic_require_stmt: 'require' "monitor" expression ['as' scenic_require_stmt_name] | invalid_scenic_require_prob | 'require' ['[' NUMBER ']'] scenic_temporal_expression ['as' scenic_require_stmt_name]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('require'))
and
(self.expect("monitor"))
and
(e := self.expression())
and
(n := self._tmp_151(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . RequireMonitor ( monitor = e , name = n , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_scenic_require_prob())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('require'))
and
(p := self._tmp_152(),)
and
(e := self.scenic_temporal_expression())
and
(n := self._tmp_153(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Require ( cond = e , prob = p , name = n , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_require_stmt_name(self) -> Optional[Any]:
# scenic_require_stmt_name: (NAME | NUMBER) | STRING
mark = self._mark()
if (
(a := self._tmp_154())
):
return a . string;
self._reset(mark)
if (
(a := self.string())
):
return a . string [1 : - 1];
self._reset(mark)
return None;
@memoize
def scenic_record_stmt(self) -> Optional[Any]:
# scenic_record_stmt: "record" expression ['as' scenic_require_stmt_name]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("record"))
and
(e := self.expression())
and
(n := self._tmp_155(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Record ( value = e , name = n , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_record_initial_stmt(self) -> Optional[Any]:
# scenic_record_initial_stmt: "record" "initial" expression ['as' scenic_require_stmt_name]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("record"))
and
(self.expect("initial"))
and
(e := self.expression())
and
(n := self._tmp_156(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . RecordInitial ( value = e , name = n , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_record_final_stmt(self) -> Optional[Any]:
# scenic_record_final_stmt: "record" "final" expression ['as' scenic_require_stmt_name]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("record"))
and
(self.expect("final"))
and
(e := self.expression())
and
(n := self._tmp_157(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . RecordFinal ( value = e , name = n , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_mutate_stmt(self) -> Optional[Any]:
# scenic_mutate_stmt: "mutate" (','.scenic_mutate_stmt_id+)? ['by' expression]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("mutate"))
and
(elts := self._gather_158(),)
and
(scale := self._tmp_160(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Mutate ( elts = elts if elts is not None else [] , scale = scale , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_mutate_stmt_id(self) -> Optional[Any]:
# scenic_mutate_stmt_id: NAME
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = a . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_abort_stmt(self) -> Optional[Any]:
# scenic_abort_stmt: "abort"
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("abort"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Abort ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_take_stmt(self) -> Optional[Any]:
# scenic_take_stmt: "take" (','.expression+)
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("take"))
and
(elts := self._gather_161())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Take ( elts = elts , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_wait_stmt(self) -> Optional[Any]:
# scenic_wait_stmt: "wait"
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("wait"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Wait ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_terminate_simulation_when_stmt(self) -> Optional[Any]:
# scenic_terminate_simulation_when_stmt: "terminate" "simulation" "when" expression ['as' scenic_require_stmt_name]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("terminate"))
and
(self.expect("simulation"))
and
(self.expect("when"))
and
(v := self.expression())
and
(n := self._tmp_163(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . TerminateSimulationWhen ( v , name = n , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_terminate_when_stmt(self) -> Optional[Any]:
# scenic_terminate_when_stmt: "terminate" "when" expression ['as' scenic_require_stmt_name]
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("terminate"))
and
(self.expect("when"))
and
(v := self.expression())
and
(n := self._tmp_164(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . TerminateWhen ( v , name = n , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_terminate_after_stmt(self) -> Optional[Any]:
# scenic_terminate_after_stmt: "terminate" "after" scenic_dynamic_duration
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("terminate"))
and
(self.expect("after"))
and
(v := self.scenic_dynamic_duration())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . TerminateAfter ( v , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_terminate_simulation_stmt(self) -> Optional[Any]:
# scenic_terminate_simulation_stmt: "terminate" "simulation"
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("terminate"))
and
(self.expect("simulation"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . TerminateSimulation ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_terminate_stmt(self) -> Optional[Any]:
# scenic_terminate_stmt: "terminate"
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("terminate"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Terminate ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_do_choose_stmt(self) -> Optional[Any]:
# scenic_do_choose_stmt: 'do' "choose" (','.expression+)
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('do'))
and
(self.expect("choose"))
and
(e := self._gather_165())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . DoChoose ( e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_do_shuffle_stmt(self) -> Optional[Any]:
# scenic_do_shuffle_stmt: 'do' "shuffle" (','.expression+)
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('do'))
and
(self.expect("shuffle"))
and
(e := self._gather_167())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . DoShuffle ( e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_do_for_stmt(self) -> Optional[Any]:
# scenic_do_for_stmt: 'do' (','.expression+) 'for' scenic_dynamic_duration
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('do'))
and
(e := self._gather_169())
and
(self.expect('for'))
and
(u := self.scenic_dynamic_duration())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . DoFor ( elts = e , duration = u , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_dynamic_duration(self) -> Optional[Any]:
# scenic_dynamic_duration: expression "seconds" | expression "steps" | invalid_scenic_dynamic_duration
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(v := self.expression())
and
(self.expect("seconds"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Seconds ( v , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(v := self.expression())
and
(self.expect("steps"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Steps ( v , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_scenic_dynamic_duration())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def scenic_do_until_stmt(self) -> Optional[Any]:
# scenic_do_until_stmt: 'do' (','.disjunction+) 'until' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('do'))
and
(e := self._gather_171())
and
(self.expect('until'))
and
(cond := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . DoUntil ( elts = e , cond = cond , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_do_stmt(self) -> Optional[Any]:
# scenic_do_stmt: 'do' (','.expression+)
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('do'))
and
(e := self._gather_173())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Do ( elts = e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def scenic_simulator_stmt(self) -> Optional[Any]:
# scenic_simulator_stmt: "simulator" expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("simulator"))
and
(e := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Simulator ( value = e , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def fstring_mid(self) -> Optional[Any]:
# fstring_mid: fstring_replacement_field | FSTRING_MIDDLE
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(fstring_replacement_field := self.fstring_replacement_field())
):
return fstring_replacement_field;
self._reset(mark)
if (
(t := self.fstring_middle())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = t . string , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def fstring_replacement_field(self) -> Optional[Any]:
# fstring_replacement_field: '{' (yield_expr | star_expressions) "="? fstring_conversion? fstring_full_format_spec? '}' | invalid_replacement_field
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('{'))
and
(a := self._tmp_175())
and
(debug_expr := self.expect("="),)
and
(conversion := self.fstring_conversion(),)
and
(format := self.fstring_full_format_spec(),)
and
(self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . FormattedValue ( value = a , conversion = ( conversion . decode ( ) [0] if conversion else ( b'r' [0] if debug_expr else - 1 ) ) , format_spec = format , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_replacement_field())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def fstring_conversion(self) -> Optional[int]:
# fstring_conversion: "!" NAME
mark = self._mark()
if (
(conv_token := self.expect("!"))
and
(conv := self.name())
):
return self . check_fstring_conversion ( conv_token , conv );
self._reset(mark)
return None;
@memoize
def fstring_full_format_spec(self) -> Optional[Any]:
# fstring_full_format_spec: ':' fstring_format_spec*
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect(':'))
and
(spec := self._loop0_176(),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . JoinedStr ( values = spec if spec and ( len ( spec ) > 1 or spec [0] . value ) else [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
return None;
@memoize
def fstring_format_spec(self) -> Optional[Any]:
# fstring_format_spec: FSTRING_MIDDLE | fstring_replacement_field
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(t := self.fstring_middle())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Constant ( value = t . string , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(fstring_replacement_field := self.fstring_replacement_field())
):
return fstring_replacement_field;
self._reset(mark)
return None;
@memoize
def strings(self) -> Optional[Any]:
# strings: ((fstring | STRING))+
mark = self._mark()
if (
(a := self._loop1_177())
):
return self . concatenate_strings ( a ) if sys . version_info >= ( 3 , 12 ) else self . generate_ast_for_string ( a );
self._reset(mark)
return None;
@memoize
def list(self) -> Optional[ast . List]:
# list: '[' star_named_expressions? ']' | '**' expression '=' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('['))
and
(a := self.star_named_expressions(),)
and
(self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . List ( elts = a or [] , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.expect('**'))
and
(self.expression())
and
(self.expect('='))
and
(b := self.expression())
):
return self . raise_syntax_error_known_range ( "cannot assign to keyword argument unpacking" , a , b );
self._reset(mark)
return None;
@memoize
def tuple(self) -> Optional[ast . Tuple]:
# tuple: '(' [star_named_expression ',' star_named_expressions?] ')'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('('))
and
(a := self._tmp_178(),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = a or [] , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def set(self) -> Optional[ast . Set]:
# set: '{' star_named_expressions '}'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('{'))
and
(a := self.star_named_expressions())
and
(self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Set ( elts = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def dict(self) -> Optional[ast . Dict]:
# dict: '{' double_starred_kvpairs? '}' | '{' invalid_double_starred_kvpairs '}'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('{'))
and
(a := self.double_starred_kvpairs(),)
and
(self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Dict ( keys = [kv [0] for kv in ( a or [] )] , values = [kv [1] for kv in ( a or [] )] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.expect('{'))
and
(self.invalid_double_starred_kvpairs())
and
(self.expect('}'))
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def double_starred_kvpairs(self) -> Optional[list]:
# double_starred_kvpairs: ','.double_starred_kvpair+ ','?
mark = self._mark()
if (
(a := self._gather_179())
and
(self.expect(','),)
):
return a;
self._reset(mark)
return None;
@memoize
def double_starred_kvpair(self) -> Optional[Any]:
# double_starred_kvpair: '**' bitwise_or | kvpair
mark = self._mark()
if (
(self.expect('**'))
and
(a := self.bitwise_or())
):
return ( None , a );
self._reset(mark)
if (
(kvpair := self.kvpair())
):
return kvpair;
self._reset(mark)
return None;
@memoize
def kvpair(self) -> Optional[tuple]:
# kvpair: expression ':' expression
mark = self._mark()
if (
(a := self.expression())
and
(self.expect(':'))
and
(b := self.expression())
):
return ( a , b );
self._reset(mark)
return None;
@memoize
def for_if_clauses(self) -> Optional[List [ast . comprehension]]:
# for_if_clauses: for_if_clause+
mark = self._mark()
if (
(a := self._loop1_181())
):
return a;
self._reset(mark)
return None;
@memoize
def for_if_clause(self) -> Optional[ast . comprehension]:
# for_if_clause: 'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))* | 'for' star_targets 'in' ~ disjunction (('if' disjunction))* | invalid_for_target
mark = self._mark()
cut = False
if (
(self.expect('async'))
and
(self.expect('for'))
and
(a := self.star_targets())
and
(self.expect('in'))
and
(cut := True)
and
(b := self.disjunction())
and
(c := self._loop0_182(),)
):
return self . check_version ( ( 3 , 6 ) , "Async comprehensions are" , ast . comprehension ( target = a , iter = b , ifs = c , is_async = 1 ) );
self._reset(mark)
if cut:
return None;
cut = False
if (
(self.expect('for'))
and
(a := self.star_targets())
and
(self.expect('in'))
and
(cut := True)
and
(b := self.disjunction())
and
(c := self._loop0_183(),)
):
return ast . comprehension ( target = a , iter = b , ifs = c , is_async = 0 );
self._reset(mark)
if cut:
return None;
if (
self.call_invalid_rules
and
(self.invalid_for_target())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def listcomp(self) -> Optional[ast . ListComp]:
# listcomp: '[' named_expression for_if_clauses ']' | invalid_comprehension
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('['))
and
(a := self.named_expression())
and
(b := self.for_if_clauses())
and
(self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . ListComp ( elt = a , generators = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_comprehension())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def setcomp(self) -> Optional[ast . SetComp]:
# setcomp: '{' named_expression for_if_clauses '}' | invalid_comprehension
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('{'))
and
(a := self.named_expression())
and
(b := self.for_if_clauses())
and
(self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . SetComp ( elt = a , generators = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_comprehension())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def genexp(self) -> Optional[ast . GeneratorExp]:
# genexp: '(' (assignment_expression | expression !':=') for_if_clauses ')' | invalid_comprehension
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('('))
and
(a := self._tmp_184())
and
(b := self.for_if_clauses())
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . GeneratorExp ( elt = a , generators = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_comprehension())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def dictcomp(self) -> Optional[ast . DictComp]:
# dictcomp: '{' kvpair for_if_clauses '}' | invalid_dict_comprehension
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('{'))
and
(a := self.kvpair())
and
(b := self.for_if_clauses())
and
(self.expect('}'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . DictComp ( key = a [0] , value = a [1] , generators = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_dict_comprehension())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def arguments(self) -> Optional[Tuple [list , list]]:
# arguments: args ','? &')' | invalid_arguments
mark = self._mark()
if (
(a := self.args())
and
(self.expect(','),)
and
(self.positive_lookahead(self.expect, ')'))
):
return a;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_arguments())
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def args(self) -> Optional[Tuple [list , list]]:
# args: ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ [',' kwargs] | kwargs
mark = self._mark()
if (
(a := self._gather_185())
and
(b := self._tmp_187(),)
):
return ( a + ( [e for e in b if isinstance ( e , ast . Starred )] if b else [] ) , ( [e for e in b if not isinstance ( e , ast . Starred )] if b else [] ) );
self._reset(mark)
if (
(a := self.kwargs())
):
return ( [e for e in a if isinstance ( e , ast . Starred )] , [e for e in a if not isinstance ( e , ast . Starred )] );
self._reset(mark)
return None;
@memoize
def kwargs(self) -> Optional[list]:
# kwargs: ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ | ','.kwarg_or_starred+ | ','.kwarg_or_double_starred+
mark = self._mark()
if (
(a := self._gather_188())
and
(self.expect(','))
and
(b := self._gather_190())
):
return a + b;
self._reset(mark)
if (
(_gather_192 := self._gather_192())
):
return _gather_192;
self._reset(mark)
if (
(_gather_194 := self._gather_194())
):
return _gather_194;
self._reset(mark)
return None;
@memoize
def starred_expression(self) -> Optional[Any]:
# starred_expression: invalid_starred_expression | '*' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_starred_expression())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('*'))
and
(a := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Starred ( value = a , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def kwarg_or_starred(self) -> Optional[Any]:
# kwarg_or_starred: invalid_kwarg | NAME '=' expression | starred_expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_kwarg())
):
return None # pragma: no cover;
self._reset(mark)
if (
(a := self.name())
and
(self.expect('='))
and
(b := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . keyword ( arg = a . string , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.starred_expression())
):
return a;
self._reset(mark)
return None;
@memoize
def kwarg_or_double_starred(self) -> Optional[Any]:
# kwarg_or_double_starred: invalid_kwarg | NAME '=' expression | '**' expression
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
self.call_invalid_rules
and
(self.invalid_kwarg())
):
return None # pragma: no cover;
self._reset(mark)
if (
(a := self.name())
and
(self.expect('='))
and
(b := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . keyword ( arg = a . string , value = b , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('**'))
and
(a := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . keyword ( arg = None , value = a , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def star_targets(self) -> Optional[Any]:
# star_targets: star_target !',' | star_target ((',' star_target))* ','?
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.star_target())
and
(self.negative_lookahead(self.expect, ','))
):
return a;
self._reset(mark)
if (
(a := self.star_target())
and
(b := self._loop0_196(),)
and
(self.expect(','),)
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = [a] + b , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def star_targets_list_seq(self) -> Optional[list]:
# star_targets_list_seq: ','.star_target+ ','?
mark = self._mark()
if (
(a := self._gather_197())
and
(self.expect(','),)
):
return a;
self._reset(mark)
return None;
@memoize
def star_targets_tuple_seq(self) -> Optional[list]:
# star_targets_tuple_seq: star_target ((',' star_target))+ ','? | star_target ','
mark = self._mark()
if (
(a := self.star_target())
and
(b := self._loop1_199())
and
(self.expect(','),)
):
return [a] + b;
self._reset(mark)
if (
(a := self.star_target())
and
(self.expect(','))
):
return [a];
self._reset(mark)
return None;
@memoize
def star_target(self) -> Optional[Any]:
# star_target: '*' (!'*' star_target) | target_with_star_atom
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect('*'))
and
(a := self._tmp_200())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Starred ( value = self . set_expr_context ( a , Store ) , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(target_with_star_atom := self.target_with_star_atom())
):
return target_with_star_atom;
self._reset(mark)
return None;
@memoize
def target_with_star_atom(self) -> Optional[Any]:
# target_with_star_atom: t_primary '.' NAME !t_lookahead | t_primary '[' slices ']' !t_lookahead | star_atom
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.t_primary())
and
(self.expect('.'))
and
(b := self.name())
and
(self.negative_lookahead(self.t_lookahead, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = a , attr = b . string , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.t_primary())
and
(self.expect('['))
and
(b := self.slices())
and
(self.expect(']'))
and
(self.negative_lookahead(self.t_lookahead, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Subscript ( value = a , slice = b , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(star_atom := self.star_atom())
):
return star_atom;
self._reset(mark)
return None;
@memoize
def star_atom(self) -> Optional[Any]:
# star_atom: NAME | '(' target_with_star_atom ')' | '(' star_targets_tuple_seq? ')' | '[' star_targets_list_seq? ']'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = a . string , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('('))
and
(a := self.target_with_star_atom())
and
(self.expect(')'))
):
return self . set_expr_context ( a , Store );
self._reset(mark)
if (
(self.expect('('))
and
(a := self.star_targets_tuple_seq(),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = a , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('['))
and
(a := self.star_targets_list_seq(),)
and
(self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . List ( elts = a , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def single_target(self) -> Optional[Any]:
# single_target: single_subscript_attribute_target | NAME | '(' single_target ')'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(single_subscript_attribute_target := self.single_subscript_attribute_target())
):
return single_subscript_attribute_target;
self._reset(mark)
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = a . string , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('('))
and
(a := self.single_target())
and
(self.expect(')'))
):
return a;
self._reset(mark)
return None;
@memoize
def single_subscript_attribute_target(self) -> Optional[Any]:
# single_subscript_attribute_target: t_primary '.' NAME !t_lookahead | t_primary '[' slices ']' !t_lookahead
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.t_primary())
and
(self.expect('.'))
and
(b := self.name())
and
(self.negative_lookahead(self.t_lookahead, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = a , attr = b . string , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.t_primary())
and
(self.expect('['))
and
(b := self.slices())
and
(self.expect(']'))
and
(self.negative_lookahead(self.t_lookahead, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Subscript ( value = a , slice = b , ctx = Store , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize_left_rec
def t_primary(self) -> Optional[Any]:
# t_primary: t_primary '.' NAME &t_lookahead | t_primary '[' slices ']' &t_lookahead | t_primary genexp &t_lookahead | t_primary '(' arguments? ')' &t_lookahead | atom &t_lookahead
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.t_primary())
and
(self.expect('.'))
and
(b := self.name())
and
(self.positive_lookahead(self.t_lookahead, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = a , attr = b . string , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.t_primary())
and
(self.expect('['))
and
(b := self.slices())
and
(self.expect(']'))
and
(self.positive_lookahead(self.t_lookahead, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Subscript ( value = a , slice = b , ctx = Load , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.t_primary())
and
(b := self.genexp())
and
(self.positive_lookahead(self.t_lookahead, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Call ( func = a , args = [b] , keywords = [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.t_primary())
and
(self.expect('('))
and
(b := self.arguments(),)
and
(self.expect(')'))
and
(self.positive_lookahead(self.t_lookahead, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Call ( func = a , args = b [0] if b else [] , keywords = b [1] if b else [] , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset , );
self._reset(mark)
if (
(a := self.atom())
and
(self.positive_lookahead(self.t_lookahead, ))
):
return a;
self._reset(mark)
return None;
@memoize
def t_lookahead(self) -> Optional[Any]:
# t_lookahead: '(' | '[' | '.'
mark = self._mark()
if (
(literal := self.expect('('))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('['))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('.'))
):
return literal;
self._reset(mark)
return None;
@memoize
def del_targets(self) -> Optional[Any]:
# del_targets: ','.del_target+ ','?
mark = self._mark()
if (
(a := self._gather_201())
and
(self.expect(','),)
):
return a;
self._reset(mark)
return None;
@memoize
def del_target(self) -> Optional[Any]:
# del_target: t_primary '.' NAME !t_lookahead | t_primary '[' slices ']' !t_lookahead | del_t_atom
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.t_primary())
and
(self.expect('.'))
and
(b := self.name())
and
(self.negative_lookahead(self.t_lookahead, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Attribute ( value = a , attr = b . string , ctx = Del , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(a := self.t_primary())
and
(self.expect('['))
and
(b := self.slices())
and
(self.expect(']'))
and
(self.negative_lookahead(self.t_lookahead, ))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Subscript ( value = a , slice = b , ctx = Del , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(del_t_atom := self.del_t_atom())
):
return del_t_atom;
self._reset(mark)
return None;
@memoize
def del_t_atom(self) -> Optional[Any]:
# del_t_atom: NAME | '(' del_target ')' | '(' del_targets? ')' | '[' del_targets? ']'
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.name())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Name ( id = a . string , ctx = Del , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('('))
and
(a := self.del_target())
and
(self.expect(')'))
):
return self . set_expr_context ( a , Del );
self._reset(mark)
if (
(self.expect('('))
and
(a := self.del_targets(),)
and
(self.expect(')'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . Tuple ( elts = a , ctx = Del , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect('['))
and
(a := self.del_targets(),)
and
(self.expect(']'))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return ast . List ( elts = a , ctx = Del , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def type_expressions(self) -> Optional[list]:
# type_expressions: ','.expression+ ',' '*' expression ',' '**' expression | ','.expression+ ',' '*' expression | ','.expression+ ',' '**' expression | '*' expression ',' '**' expression | '*' expression | '**' expression | ','.expression+
mark = self._mark()
if (
(a := self._gather_203())
and
(self.expect(','))
and
(self.expect('*'))
and
(b := self.expression())
and
(self.expect(','))
and
(self.expect('**'))
and
(c := self.expression())
):
return a + [b , c];
self._reset(mark)
if (
(a := self._gather_205())
and
(self.expect(','))
and
(self.expect('*'))
and
(b := self.expression())
):
return a + [b];
self._reset(mark)
if (
(a := self._gather_207())
and
(self.expect(','))
and
(self.expect('**'))
and
(b := self.expression())
):
return a + [b];
self._reset(mark)
if (
(self.expect('*'))
and
(a := self.expression())
and
(self.expect(','))
and
(self.expect('**'))
and
(b := self.expression())
):
return [a , b];
self._reset(mark)
if (
(self.expect('*'))
and
(a := self.expression())
):
return [a];
self._reset(mark)
if (
(self.expect('**'))
and
(a := self.expression())
):
return [a];
self._reset(mark)
if (
(a := self._gather_209())
):
return a;
self._reset(mark)
return None;
@memoize
def func_type_comment(self) -> Optional[Any]:
# func_type_comment: NEWLINE TYPE_COMMENT &(NEWLINE INDENT) | invalid_double_type_comments | TYPE_COMMENT
mark = self._mark()
if (
(self.expect('NEWLINE'))
and
(t := self.type_comment())
and
(self.positive_lookahead(self._tmp_211, ))
):
return t . string;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.invalid_double_type_comments())
):
return None # pragma: no cover;
self._reset(mark)
if (
(type_comment := self.type_comment())
):
return type_comment;
self._reset(mark)
return None;
@memoize
def invalid_arguments(self) -> Optional[NoReturn]:
# invalid_arguments: args ',' '*' | expression for_if_clauses ',' [args | expression for_if_clauses] | NAME '=' expression for_if_clauses | [(args ',')] NAME '=' &(',' | ')') | args for_if_clauses | args ',' expression for_if_clauses | args ',' args
mark = self._mark()
if (
(a := self.args())
and
(self.expect(','))
and
(self.expect('*'))
):
return self . raise_syntax_error_known_location ( "iterable argument unpacking follows keyword argument unpacking" , a [1] [- 1] if a [1] else a [0] [- 1] , );
self._reset(mark)
if (
(a := self.expression())
and
(b := self.for_if_clauses())
and
(self.expect(','))
and
(self._tmp_212(),)
):
return self . raise_syntax_error_known_range ( "Generator expression must be parenthesized" , a , ( b [- 1] . ifs [- 1] if b [- 1] . ifs else b [- 1] . iter ) );
self._reset(mark)
if (
(a := self.name())
and
(b := self.expect('='))
and
(self.expression())
and
(self.for_if_clauses())
):
return self . raise_syntax_error_known_range ( "invalid syntax. Maybe you meant '==' or ':=' instead of '='?" , a , b );
self._reset(mark)
if (
(self._tmp_213(),)
and
(a := self.name())
and
(b := self.expect('='))
and
(self.positive_lookahead(self._tmp_214, ))
):
return self . raise_syntax_error_known_range ( "expected argument value expression" , a , b );
self._reset(mark)
if (
(a := self.args())
and
(b := self.for_if_clauses())
):
return self . raise_syntax_error_known_range ( "Generator expression must be parenthesized" , a [0] [- 1] , ( b [- 1] . ifs [- 1] if b [- 1] . ifs else b [- 1] . iter ) , ) if len ( a [0] ) > 1 else None;
self._reset(mark)
if (
(self.args())
and
(self.expect(','))
and
(a := self.expression())
and
(b := self.for_if_clauses())
):
return self . raise_syntax_error_known_range ( "Generator expression must be parenthesized" , a , ( b [- 1] . ifs [- 1] if b [- 1] . ifs else b [- 1] . iter ) , );
self._reset(mark)
if (
(a := self.args())
and
(self.expect(','))
and
(self.args())
):
return self . raise_syntax_error ( "positional argument follows keyword argument unpacking" if a [1] [- 1] . arg is None else "positional argument follows keyword argument" , );
self._reset(mark)
return None;
@memoize
def invalid_kwarg(self) -> Optional[NoReturn]:
# invalid_kwarg: ('True' | 'False' | 'None') '=' | NAME '=' expression for_if_clauses | !(NAME '=') expression '='
mark = self._mark()
if (
(a := self._tmp_215())
and
(b := self.expect('='))
):
return self . raise_syntax_error_known_range ( f"cannot assign to {a.string}" , a , b );
self._reset(mark)
if (
(a := self.name())
and
(b := self.expect('='))
and
(self.expression())
and
(self.for_if_clauses())
):
return self . raise_syntax_error_known_range ( "invalid syntax. Maybe you meant '==' or ':=' instead of '='?" , a , b );
self._reset(mark)
if (
(self.negative_lookahead(self._tmp_216, ))
and
(a := self.expression())
and
(b := self.expect('='))
):
return self . raise_syntax_error_known_range ( "expression cannot contain assignment, perhaps you meant \"==\"?" , a , b , );
self._reset(mark)
return None;
@memoize
def invalid_scenic_instance_creation(self) -> Optional[NoReturn]:
# invalid_scenic_instance_creation: NAME scenic_valid_specifier
mark = self._mark()
if (
(n := self.name())
and
(s := self.scenic_valid_specifier())
):
return self . raise_syntax_error_known_range ( "invalid syntax. Perhaps you forgot 'new'?" , n , s );
self._reset(mark)
return None;
@memoize
def invalid_scenic_specifier(self) -> Optional[NoReturn]:
# invalid_scenic_specifier: NAME
mark = self._mark()
if (
(n := self.name())
):
return self . raise_syntax_error_known_location ( "invalid specifier." , n );
self._reset(mark)
return None;
@memoize
def expression_without_invalid(self) -> Optional[ast . AST]:
# expression_without_invalid: disjunction 'if' disjunction 'else' expression | disjunction | lambdef
_prev_call_invalid = self.call_invalid_rules
self.call_invalid_rules = False
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(a := self.disjunction())
and
(self.expect('if'))
and
(b := self.disjunction())
and
(self.expect('else'))
and
(c := self.expression())
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
self.call_invalid_rules = _prev_call_invalid
return ast . IfExp ( body = b , test = a , orelse = c , lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(disjunction := self.disjunction())
):
self.call_invalid_rules = _prev_call_invalid
return disjunction;
self._reset(mark)
if (
(lambdef := self.lambdef())
):
self.call_invalid_rules = _prev_call_invalid
return lambdef;
self._reset(mark)
self.call_invalid_rules = _prev_call_invalid
return None;
@memoize
def invalid_legacy_expression(self) -> Optional[Any]:
# invalid_legacy_expression: NAME !'(' star_expressions
mark = self._mark()
if (
(a := self.name())
and
(self.negative_lookahead(self.expect, '('))
and
(b := self.star_expressions())
):
return self . raise_syntax_error_known_range ( f"Missing parentheses in call to '{a.string}' . Did you mean {a.string}(...)?" , a , b , ) if a . string in ( "exec" , "print" ) else None;
self._reset(mark)
return None;
@memoize
def invalid_expression(self) -> Optional[NoReturn]:
# invalid_expression: !(NAME STRING | SOFT_KEYWORD) disjunction !SOFT_KEYWORD expression_without_invalid | disjunction 'if' disjunction !('else' | ':') | 'lambda' lambda_params? ':' &(FSTRING_MIDDLE | fstring_replacement_field)
mark = self._mark()
if (
(self.negative_lookahead(self._tmp_217, ))
and
(a := self.disjunction())
and
(self.negative_lookahead(self.soft_keyword, ))
and
(b := self.expression_without_invalid())
):
return ( self . raise_syntax_error_known_range ( "invalid syntax. Perhaps you forgot a comma?" , a , b ) if not isinstance ( a , ast . Name ) or a . id not in ( "print" , "exec" ) else None );
self._reset(mark)
if (
(a := self.disjunction())
and
(self.expect('if'))
and
(b := self.disjunction())
and
(self.negative_lookahead(self._tmp_218, ))
):
return self . raise_syntax_error_known_range ( "expected 'else' after 'if' expression" , a , b );
self._reset(mark)
if (
(a := self.expect('lambda'))
and
(self.lambda_params(),)
and
(b := self.expect(':'))
and
(self.positive_lookahead(self._tmp_219, ))
):
return self . raise_syntax_error_known_range ( "f-string: lambda expressions are not allowed without parentheses" , a , b );
self._reset(mark)
return None;
@memoize
def invalid_named_expression(self) -> Optional[NoReturn]:
# invalid_named_expression: expression ':=' expression | NAME '=' bitwise_or !('=' | ':=') | !(list | tuple | genexp | 'True' | 'None' | 'False') bitwise_or '=' bitwise_or !('=' | ':=')
mark = self._mark()
if (
(a := self.expression())
and
(self.expect(':='))
and
(self.expression())
):
return self . raise_syntax_error_known_location ( f"cannot use assignment expressions with {self.get_expr_name(a)}" , a );
self._reset(mark)
if (
(a := self.name())
and
(self.expect('='))
and
(b := self.bitwise_or())
and
(self.negative_lookahead(self._tmp_220, ))
):
return ( None if self . in_recursive_rule else self . raise_syntax_error_known_range ( "invalid syntax. Maybe you meant '==' or ':=' instead of '='?" , a , b ) );
self._reset(mark)
if (
(self.negative_lookahead(self._tmp_221, ))
and
(a := self.bitwise_or())
and
(self.expect('='))
and
(self.bitwise_or())
and
(self.negative_lookahead(self._tmp_222, ))
):
return ( None if self . in_recursive_rule else self . raise_syntax_error_known_location ( f"cannot assign to {self.get_expr_name(a)} here. Maybe you meant '==' instead of '='?" , a ) );
self._reset(mark)
return None;
@memoize
def invalid_scenic_until(self) -> Optional[NoReturn]:
# invalid_scenic_until: scenic_temporal_disjunction 'until' scenic_implication
mark = self._mark()
if (
(a := self.scenic_temporal_disjunction())
and
(self.expect('until'))
and
(self.scenic_implication())
):
return self . raise_syntax_error_known_location ( f"`until` must take exactly two operands" , a );
self._reset(mark)
return None;
@memoize_left_rec
def invalid_scenic_implication(self) -> Optional[NoReturn]:
# invalid_scenic_implication: scenic_until "implies" scenic_implication
mark = self._mark()
if (
(a := self.scenic_until())
and
(self.expect("implies"))
and
(self.scenic_implication())
):
return self . raise_syntax_error_known_location ( f"`implies` must take exactly two operands" , a );
self._reset(mark)
return None;
@memoize
def invalid_scenic_require_prob(self) -> Optional[NoReturn]:
# invalid_scenic_require_prob: 'require' '[' !(NUMBER ']') expression ']' scenic_temporal_expression ['as' scenic_require_stmt_name]
mark = self._mark()
if (
(self.expect('require'))
and
(self.expect('['))
and
(self.negative_lookahead(self._tmp_223, ))
and
(p := self.expression())
and
(self.expect(']'))
and
(self.scenic_temporal_expression())
and
(self._tmp_224(),)
):
return self . raise_syntax_error_known_location ( f"'require' probability must be a constant" , p );
self._reset(mark)
return None;
@memoize
def invalid_scenic_dynamic_duration(self) -> Optional[NoReturn]:
# invalid_scenic_dynamic_duration: expression
mark = self._mark()
if (
(e := self.expression())
):
return self . raise_syntax_error_known_location ( "duration must specify a unit (seconds or steps)" , e );
self._reset(mark)
return None;
@memoize
def invalid_assignment(self) -> Optional[NoReturn]:
# invalid_assignment: invalid_ann_assign_target ':' expression | star_named_expression ',' star_named_expressions* ':' expression | expression ':' expression | ((star_targets '='))* star_expressions '=' | ((star_targets '='))* yield_expr '=' | star_expressions augassign (yield_expr | star_expressions)
mark = self._mark()
if (
self.call_invalid_rules
and
(a := self.invalid_ann_assign_target())
and
(self.expect(':'))
and
(self.expression())
):
return self . raise_syntax_error_known_location ( f"only single target (not {self.get_expr_name(a)}) can be annotated" , a );
self._reset(mark)
if (
(a := self.star_named_expression())
and
(self.expect(','))
and
(self._loop0_225(),)
and
(self.expect(':'))
and
(self.expression())
):
return self . raise_syntax_error_known_location ( "only single target (not tuple) can be annotated" , a );
self._reset(mark)
if (
(a := self.expression())
and
(self.expect(':'))
and
(self.expression())
):
return self . raise_syntax_error_known_location ( "illegal target for annotation" , a );
self._reset(mark)
if (
(self._loop0_226(),)
and
(a := self.star_expressions())
and
(self.expect('='))
):
return self . raise_syntax_error_invalid_target ( Target . STAR_TARGETS , a );
self._reset(mark)
if (
(self._loop0_227(),)
and
(a := self.yield_expr())
and
(self.expect('='))
):
return self . raise_syntax_error_known_location ( "assignment to yield expression not possible" , a );
self._reset(mark)
if (
(a := self.star_expressions())
and
(self.augassign())
and
(self._tmp_228())
):
return self . raise_syntax_error_known_location ( f"'{self.get_expr_name(a)}' is an illegal expression for augmented assignment" , a );
self._reset(mark)
return None;
@memoize
def invalid_ann_assign_target(self) -> Optional[ast . AST]:
# invalid_ann_assign_target: list | tuple | '(' invalid_ann_assign_target ')'
mark = self._mark()
if (
(a := self.list())
):
return a;
self._reset(mark)
if (
(a := self.tuple())
):
return a;
self._reset(mark)
if (
self.call_invalid_rules
and
(self.expect('('))
and
(a := self.invalid_ann_assign_target())
and
(self.expect(')'))
):
return a;
self._reset(mark)
return None;
@memoize
def invalid_del_stmt(self) -> Optional[NoReturn]:
# invalid_del_stmt: 'del' star_expressions
mark = self._mark()
if (
(self.expect('del'))
and
(a := self.star_expressions())
):
return self . raise_syntax_error_invalid_target ( Target . DEL_TARGETS , a );
self._reset(mark)
return None;
@memoize
def invalid_block(self) -> Optional[NoReturn]:
# invalid_block: NEWLINE !INDENT
mark = self._mark()
if (
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( "expected an indented block" );
self._reset(mark)
return None;
@memoize
def invalid_comprehension(self) -> Optional[NoReturn]:
# invalid_comprehension: ('[' | '(' | '{') starred_expression for_if_clauses | ('[' | '{') star_named_expression ',' star_named_expressions for_if_clauses | ('[' | '{') star_named_expression ',' for_if_clauses
mark = self._mark()
if (
(self._tmp_229())
and
(a := self.starred_expression())
and
(self.for_if_clauses())
):
return self . raise_syntax_error_known_location ( "iterable unpacking cannot be used in comprehension" , a );
self._reset(mark)
if (
(self._tmp_230())
and
(a := self.star_named_expression())
and
(self.expect(','))
and
(b := self.star_named_expressions())
and
(self.for_if_clauses())
):
return self . raise_syntax_error_known_range ( "did you forget parentheses around the comprehension target?" , a , b [- 1] );
self._reset(mark)
if (
(self._tmp_231())
and
(a := self.star_named_expression())
and
(b := self.expect(','))
and
(self.for_if_clauses())
):
return self . raise_syntax_error_known_range ( "did you forget parentheses around the comprehension target?" , a , b );
self._reset(mark)
return None;
@memoize
def invalid_dict_comprehension(self) -> Optional[NoReturn]:
# invalid_dict_comprehension: '{' '**' bitwise_or for_if_clauses '}'
mark = self._mark()
if (
(self.expect('{'))
and
(a := self.expect('**'))
and
(self.bitwise_or())
and
(self.for_if_clauses())
and
(self.expect('}'))
):
return self . raise_syntax_error_known_location ( "dict unpacking cannot be used in dict comprehension" , a );
self._reset(mark)
return None;
@memoize
def invalid_parameters(self) -> Optional[NoReturn]:
# invalid_parameters: "/" ',' | (slash_no_default | slash_with_default) param_maybe_default* '/' | slash_no_default? param_no_default* invalid_parameters_helper param_no_default | param_no_default* '(' param_no_default+ ','? ')' | [(slash_no_default | slash_with_default)] param_maybe_default* '*' (',' | param_no_default) param_maybe_default* '/' | param_maybe_default+ '/' '*'
mark = self._mark()
if (
(a := self.expect("/"))
and
(self.expect(','))
):
return self . raise_syntax_error_known_location ( "at least one argument must precede /" , a );
self._reset(mark)
if (
(self._tmp_232())
and
(self._loop0_233(),)
and
(a := self.expect('/'))
):
return self . raise_syntax_error_known_location ( "/ may appear only once" , a );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.slash_no_default(),)
and
(self._loop0_234(),)
and
(self.invalid_parameters_helper())
and
(a := self.param_no_default())
):
return self . raise_syntax_error_known_location ( "parameter without a default follows parameter with a default" , a );
self._reset(mark)
if (
(self._loop0_235(),)
and
(a := self.expect('('))
and
(self._loop1_236())
and
(self.expect(','),)
and
(b := self.expect(')'))
):
return self . raise_syntax_error_known_range ( "Function parameters cannot be parenthesized" , a , b );
self._reset(mark)
if (
(self._tmp_237(),)
and
(self._loop0_238(),)
and
(self.expect('*'))
and
(self._tmp_239())
and
(self._loop0_240(),)
and
(a := self.expect('/'))
):
return self . raise_syntax_error_known_location ( "/ must be ahead of *" , a );
self._reset(mark)
if (
(self._loop1_241())
and
(self.expect('/'))
and
(a := self.expect('*'))
):
return self . raise_syntax_error_known_location ( "expected comma between / and *" , a );
self._reset(mark)
return None;
@memoize
def invalid_default(self) -> Optional[Any]:
# invalid_default: '=' &(')' | ',')
mark = self._mark()
if (
(a := self.expect('='))
and
(self.positive_lookahead(self._tmp_242, ))
):
return self . raise_syntax_error_known_location ( "expected default value expression" , a );
self._reset(mark)
return None;
@memoize
def invalid_star_etc(self) -> Optional[Any]:
# invalid_star_etc: '*' (')' | ',' (')' | '**')) | '*' ',' TYPE_COMMENT | '*' param '=' | '*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')
mark = self._mark()
if (
(a := self.expect('*'))
and
(self._tmp_243())
):
return self . raise_syntax_error_known_location ( "named arguments must follow bare *" , a );
self._reset(mark)
if (
(self.expect('*'))
and
(self.expect(','))
and
(self.type_comment())
):
return self . raise_syntax_error ( "bare * has associated type comment" );
self._reset(mark)
if (
(self.expect('*'))
and
(self.param())
and
(a := self.expect('='))
):
return self . raise_syntax_error_known_location ( "var-positional argument cannot have default value" , a );
self._reset(mark)
if (
(self.expect('*'))
and
(self._tmp_244())
and
(self._loop0_245(),)
and
(a := self.expect('*'))
and
(self._tmp_246())
):
return self . raise_syntax_error_known_location ( "* argument may appear only once" , a );
self._reset(mark)
return None;
@memoize
def invalid_kwds(self) -> Optional[Any]:
# invalid_kwds: '**' param '=' | '**' param ',' param | '**' param ',' ('*' | '**' | '/')
mark = self._mark()
if (
(self.expect('**'))
and
(self.param())
and
(a := self.expect('='))
):
return self . raise_syntax_error_known_location ( "var-keyword argument cannot have default value" , a );
self._reset(mark)
if (
(self.expect('**'))
and
(self.param())
and
(self.expect(','))
and
(a := self.param())
):
return self . raise_syntax_error_known_location ( "arguments cannot follow var-keyword argument" , a );
self._reset(mark)
if (
(self.expect('**'))
and
(self.param())
and
(self.expect(','))
and
(a := self._tmp_247())
):
return self . raise_syntax_error_known_location ( "arguments cannot follow var-keyword argument" , a );
self._reset(mark)
return None;
@memoize
def invalid_parameters_helper(self) -> Optional[Any]:
# invalid_parameters_helper: slash_with_default | param_with_default+
mark = self._mark()
if (
(a := self.slash_with_default())
):
return [a];
self._reset(mark)
if (
(a := self._loop1_248())
):
return a;
self._reset(mark)
return None;
@memoize
def invalid_lambda_parameters(self) -> Optional[NoReturn]:
# invalid_lambda_parameters: "/" ',' | (lambda_slash_no_default | lambda_slash_with_default) lambda_param_maybe_default* '/' | lambda_slash_no_default? lambda_param_no_default* invalid_lambda_parameters_helper lambda_param_no_default | lambda_param_no_default* '(' ','.lambda_param+ ','? ')' | [(lambda_slash_no_default | lambda_slash_with_default)] lambda_param_maybe_default* '*' (',' | lambda_param_no_default) lambda_param_maybe_default* '/' | lambda_param_maybe_default+ '/' '*'
mark = self._mark()
if (
(a := self.expect("/"))
and
(self.expect(','))
):
return self . raise_syntax_error_known_location ( "at least one argument must precede /" , a );
self._reset(mark)
if (
(self._tmp_249())
and
(self._loop0_250(),)
and
(a := self.expect('/'))
):
return self . raise_syntax_error_known_location ( "/ may appear only once" , a );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.lambda_slash_no_default(),)
and
(self._loop0_251(),)
and
(self.invalid_lambda_parameters_helper())
and
(a := self.lambda_param_no_default())
):
return self . raise_syntax_error_known_location ( "parameter without a default follows parameter with a default" , a );
self._reset(mark)
if (
(self._loop0_252(),)
and
(a := self.expect('('))
and
(self._gather_253())
and
(self.expect(','),)
and
(b := self.expect(')'))
):
return self . raise_syntax_error_known_range ( "Lambda expression parameters cannot be parenthesized" , a , b );
self._reset(mark)
if (
(self._tmp_255(),)
and
(self._loop0_256(),)
and
(self.expect('*'))
and
(self._tmp_257())
and
(self._loop0_258(),)
and
(a := self.expect('/'))
):
return self . raise_syntax_error_known_location ( "/ must be ahead of *" , a );
self._reset(mark)
if (
(self._loop1_259())
and
(self.expect('/'))
and
(a := self.expect('*'))
):
return self . raise_syntax_error_known_location ( "expected comma between / and *" , a );
self._reset(mark)
return None;
@memoize
def invalid_lambda_parameters_helper(self) -> Optional[NoReturn]:
# invalid_lambda_parameters_helper: lambda_slash_with_default | lambda_param_with_default+
mark = self._mark()
if (
(a := self.lambda_slash_with_default())
):
return [a];
self._reset(mark)
if (
(a := self._loop1_260())
):
return a;
self._reset(mark)
return None;
@memoize
def invalid_lambda_star_etc(self) -> Optional[NoReturn]:
# invalid_lambda_star_etc: '*' (':' | ',' (':' | '**')) | '*' lambda_param '=' | '*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')
mark = self._mark()
if (
(self.expect('*'))
and
(self._tmp_261())
):
return self . raise_syntax_error ( "named arguments must follow bare *" );
self._reset(mark)
if (
(self.expect('*'))
and
(self.lambda_param())
and
(a := self.expect('='))
):
return self . raise_syntax_error_known_location ( "var-positional argument cannot have default value" , a );
self._reset(mark)
if (
(self.expect('*'))
and
(self._tmp_262())
and
(self._loop0_263(),)
and
(a := self.expect('*'))
and
(self._tmp_264())
):
return self . raise_syntax_error_known_location ( "* argument may appear only once" , a );
self._reset(mark)
return None;
@memoize
def invalid_lambda_kwds(self) -> Optional[Any]:
# invalid_lambda_kwds: '**' lambda_param '=' | '**' lambda_param ',' lambda_param | '**' lambda_param ',' ('*' | '**' | '/')
mark = self._mark()
if (
(self.expect('**'))
and
(self.lambda_param())
and
(a := self.expect('='))
):
return self . raise_syntax_error_known_location ( "var-keyword argument cannot have default value" , a );
self._reset(mark)
if (
(self.expect('**'))
and
(self.lambda_param())
and
(self.expect(','))
and
(a := self.lambda_param())
):
return self . raise_syntax_error_known_location ( "arguments cannot follow var-keyword argument" , a );
self._reset(mark)
if (
(self.expect('**'))
and
(self.lambda_param())
and
(self.expect(','))
and
(a := self._tmp_265())
):
return self . raise_syntax_error_known_location ( "arguments cannot follow var-keyword argument" , a );
self._reset(mark)
return None;
@memoize
def invalid_double_type_comments(self) -> Optional[NoReturn]:
# invalid_double_type_comments: TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT
mark = self._mark()
if (
(self.type_comment())
and
(self.expect('NEWLINE'))
and
(self.type_comment())
and
(self.expect('NEWLINE'))
and
(self.expect('INDENT'))
):
return self . raise_syntax_error ( "Cannot have two type comments on def" );
self._reset(mark)
return None;
@memoize
def invalid_with_item(self) -> Optional[NoReturn]:
# invalid_with_item: expression 'as' expression &(',' | ')' | ':')
mark = self._mark()
if (
(self.expression())
and
(self.expect('as'))
and
(a := self.expression())
and
(self.positive_lookahead(self._tmp_266, ))
):
return self . raise_syntax_error_invalid_target ( Target . STAR_TARGETS , a );
self._reset(mark)
return None;
@memoize
def invalid_for_target(self) -> Optional[NoReturn]:
# invalid_for_target: 'async'? 'for' star_expressions
mark = self._mark()
if (
(self.expect('async'),)
and
(self.expect('for'))
and
(a := self.star_expressions())
):
return self . raise_syntax_error_invalid_target ( Target . FOR_TARGETS , a );
self._reset(mark)
return None;
@memoize
def invalid_group(self) -> Optional[NoReturn]:
# invalid_group: '(' starred_expression ')' | '(' '**' expression ')'
mark = self._mark()
if (
(self.expect('('))
and
(a := self.starred_expression())
and
(self.expect(')'))
):
return self . raise_syntax_error_known_location ( "cannot use starred expression here" , a );
self._reset(mark)
if (
(self.expect('('))
and
(a := self.expect('**'))
and
(self.expression())
and
(self.expect(')'))
):
return self . raise_syntax_error_known_location ( "cannot use double starred expression here" , a );
self._reset(mark)
return None;
@memoize
def invalid_import(self) -> Optional[Any]:
# invalid_import: 'import' ','.dotted_name+ 'from' dotted_name
mark = self._mark()
if (
(a := self.expect('import'))
and
(self._gather_267())
and
(self.expect('from'))
and
(self.dotted_name())
):
return self . raise_syntax_error_starting_from ( "Did you mean to use 'from ... import ...' instead?" , a );
self._reset(mark)
return None;
@memoize
def invalid_import_from_targets(self) -> Optional[NoReturn]:
# invalid_import_from_targets: import_from_as_names ',' NEWLINE
mark = self._mark()
if (
(self.import_from_as_names())
and
(self.expect(','))
and
(self.expect('NEWLINE'))
):
return self . raise_syntax_error ( "trailing comma not allowed without surrounding parentheses" );
self._reset(mark)
return None;
@memoize
def invalid_with_stmt(self) -> Optional[None]:
# invalid_with_stmt: 'async'? 'with' ','.(expression ['as' star_target])+ &&':' | 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' &&':'
mark = self._mark()
if (
(self.expect('async'),)
and
(self.expect('with'))
and
(self._gather_269())
and
(self.expect_forced(self.expect(':'), "':'"))
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('async'),)
and
(self.expect('with'))
and
(self.expect('('))
and
(self._gather_271())
and
(self.expect(','),)
and
(self.expect(')'))
and
(self.expect_forced(self.expect(':'), "':'"))
):
return None # pragma: no cover;
self._reset(mark)
return None;
@memoize
def invalid_with_stmt_indent(self) -> Optional[NoReturn]:
# invalid_with_stmt_indent: 'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT | 'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT
mark = self._mark()
if (
(self.expect('async'),)
and
(a := self.expect('with'))
and
(self._gather_273())
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after 'with' statement on line {a.start[0]}" );
self._reset(mark)
if (
(self.expect('async'),)
and
(a := self.expect('with'))
and
(self.expect('('))
and
(self._gather_275())
and
(self.expect(','),)
and
(self.expect(')'))
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after 'with' statement on line {a.start[0]}" );
self._reset(mark)
return None;
@memoize
def invalid_try_stmt(self) -> Optional[NoReturn]:
# invalid_try_stmt: 'try' ':' NEWLINE !INDENT | 'try' ':' block !('except' | 'finally') | 'try' ':' block* except_block+ 'except' '*' expression ['as' NAME] ':' | 'try' ':' block* except_star_block+ 'except' [expression ['as' NAME]] ':'
mark = self._mark()
if (
(a := self.expect('try'))
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after 'try' statement on line {a.start[0]}" , );
self._reset(mark)
if (
(self.expect('try'))
and
(self.expect(':'))
and
(self.block())
and
(self.negative_lookahead(self._tmp_277, ))
):
return self . raise_syntax_error ( "expected 'except' or 'finally' block" );
self._reset(mark)
if (
(self.expect('try'))
and
(self.expect(':'))
and
(self._loop0_278(),)
and
(self._loop1_279())
and
(a := self.expect('except'))
and
(b := self.expect('*'))
and
(self.expression())
and
(self._tmp_280(),)
and
(self.expect(':'))
):
return self . raise_syntax_error_known_range ( "cannot have both 'except' and 'except*' on the same 'try'" , a , b );
self._reset(mark)
if (
(self.expect('try'))
and
(self.expect(':'))
and
(self._loop0_281(),)
and
(self._loop1_282())
and
(a := self.expect('except'))
and
(self._tmp_283(),)
and
(self.expect(':'))
):
return self . raise_syntax_error_known_location ( "cannot have both 'except' and 'except*' on the same 'try'" , a );
self._reset(mark)
return None;
@memoize
def invalid_except_stmt(self) -> Optional[None]:
# invalid_except_stmt: 'except' '*'? expression ',' expressions ['as' NAME] ':' | 'except' '*'? expression ['as' NAME] NEWLINE | 'except' '*'? NEWLINE | 'except' '*' (NEWLINE | ':')
mark = self._mark()
if (
(self.expect('except'))
and
(self.expect('*'),)
and
(a := self.expression())
and
(self.expect(','))
and
(self.expressions())
and
(self._tmp_284(),)
and
(self.expect(':'))
):
return self . raise_syntax_error_starting_from ( "multiple exception types must be parenthesized" , a );
self._reset(mark)
if (
(self.expect('except'))
and
(self.expect('*'),)
and
(self.expression())
and
(self._tmp_285(),)
and
(self.expect('NEWLINE'))
):
return self . raise_syntax_error ( "expected ':'" );
self._reset(mark)
if (
(self.expect('except'))
and
(self.expect('*'),)
and
(self.expect('NEWLINE'))
):
return self . raise_syntax_error ( "expected ':'" );
self._reset(mark)
if (
(self.expect('except'))
and
(self.expect('*'))
and
(self._tmp_286())
):
return self . raise_syntax_error ( "expected one or more exception types" );
self._reset(mark)
return None;
@memoize
def invalid_finally_stmt(self) -> Optional[NoReturn]:
# invalid_finally_stmt: 'finally' ':' NEWLINE !INDENT
mark = self._mark()
if (
(a := self.expect('finally'))
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after 'finally' statement on line {a.start[0]}" );
self._reset(mark)
return None;
@memoize
def invalid_except_stmt_indent(self) -> Optional[NoReturn]:
# invalid_except_stmt_indent: 'except' expression ['as' NAME] ':' NEWLINE !INDENT | 'except' ':' NEWLINE !INDENT
mark = self._mark()
if (
(a := self.expect('except'))
and
(self.expression())
and
(self._tmp_287(),)
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after 'except' statement on line {a.start[0]}" );
self._reset(mark)
if (
(a := self.expect('except'))
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after 'except' statement on line {a.start[0]}" );
self._reset(mark)
return None;
@memoize
def invalid_except_star_stmt_indent(self) -> Optional[Any]:
# invalid_except_star_stmt_indent: 'except' '*' expression ['as' NAME] ':' NEWLINE !INDENT
mark = self._mark()
if (
(a := self.expect('except'))
and
(self.expect('*'))
and
(self.expression())
and
(self._tmp_288(),)
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after 'except*' statement on line {a.start[0]}" );
self._reset(mark)
return None;
@memoize
def invalid_match_stmt(self) -> Optional[NoReturn]:
# invalid_match_stmt: "match" subject_expr !':' | "match" subject_expr ':' NEWLINE !INDENT
mark = self._mark()
if (
(self.expect("match"))
and
(self.subject_expr())
and
(self.negative_lookahead(self.expect, ':'))
):
return self . check_version ( ( 3 , 10 ) , "Pattern matching is" , self . raise_syntax_error ( "expected ':'" ) );
self._reset(mark)
if (
(a := self.expect("match"))
and
(self.subject_expr())
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . check_version ( ( 3 , 10 ) , "Pattern matching is" , self . raise_indentation_error ( f"expected an indented block after 'match' statement on line {a.start[0]}" ) );
self._reset(mark)
return None;
@memoize
def invalid_case_block(self) -> Optional[NoReturn]:
# invalid_case_block: "case" patterns guard? !':' | "case" patterns guard? ':' NEWLINE !INDENT
mark = self._mark()
if (
(self.expect("case"))
and
(self.patterns())
and
(self.guard(),)
and
(self.negative_lookahead(self.expect, ':'))
):
return self . raise_syntax_error ( "expected ':'" );
self._reset(mark)
if (
(a := self.expect("case"))
and
(self.patterns())
and
(self.guard(),)
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after 'case' statement on line {a.start[0]}" );
self._reset(mark)
return None;
@memoize
def invalid_as_pattern(self) -> Optional[NoReturn]:
# invalid_as_pattern: or_pattern 'as' "_" | or_pattern 'as' !NAME expression
mark = self._mark()
if (
(self.or_pattern())
and
(self.expect('as'))
and
(a := self.expect("_"))
):
return self . raise_syntax_error_known_location ( "cannot use '_' as a target" , a );
self._reset(mark)
if (
(self.or_pattern())
and
(self.expect('as'))
and
(self.negative_lookahead(self.name, ))
and
(a := self.expression())
):
return self . raise_syntax_error_known_location ( "invalid pattern target" , a );
self._reset(mark)
return None;
@memoize
def invalid_class_pattern(self) -> Optional[NoReturn]:
# invalid_class_pattern: name_or_attr '(' invalid_class_argument_pattern
mark = self._mark()
if (
self.call_invalid_rules
and
(self.name_or_attr())
and
(self.expect('('))
and
(a := self.invalid_class_argument_pattern())
):
return self . raise_syntax_error_known_range ( "positional patterns follow keyword patterns" , a [0] , a [- 1] );
self._reset(mark)
return None;
@memoize
def invalid_class_argument_pattern(self) -> Optional[list]:
# invalid_class_argument_pattern: [positional_patterns ','] keyword_patterns ',' positional_patterns
mark = self._mark()
if (
(self._tmp_289(),)
and
(self.keyword_patterns())
and
(self.expect(','))
and
(a := self.positional_patterns())
):
return a;
self._reset(mark)
return None;
@memoize
def invalid_if_stmt(self) -> Optional[NoReturn]:
# invalid_if_stmt: 'if' named_expression NEWLINE | 'if' named_expression ':' NEWLINE !INDENT
mark = self._mark()
if (
(self.expect('if'))
and
(self.named_expression())
and
(self.expect('NEWLINE'))
):
return self . raise_syntax_error ( "expected ':'" );
self._reset(mark)
if (
(a := self.expect('if'))
and
(a_1 := self.named_expression())
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after 'if' statement on line {a.start[0]}" );
self._reset(mark)
return None;
@memoize
def invalid_elif_stmt(self) -> Optional[NoReturn]:
# invalid_elif_stmt: 'elif' named_expression NEWLINE | 'elif' named_expression ':' NEWLINE !INDENT
mark = self._mark()
if (
(self.expect('elif'))
and
(self.named_expression())
and
(self.expect('NEWLINE'))
):
return self . raise_syntax_error ( "expected ':'" );
self._reset(mark)
if (
(a := self.expect('elif'))
and
(self.named_expression())
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after 'elif' statement on line {a.start[0]}" );
self._reset(mark)
return None;
@memoize
def invalid_else_stmt(self) -> Optional[NoReturn]:
# invalid_else_stmt: 'else' ':' NEWLINE !INDENT
mark = self._mark()
if (
(a := self.expect('else'))
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after 'else' statement on line {a.start[0]}" );
self._reset(mark)
return None;
@memoize
def invalid_while_stmt(self) -> Optional[NoReturn]:
# invalid_while_stmt: 'while' named_expression NEWLINE | 'while' named_expression ':' NEWLINE !INDENT
mark = self._mark()
if (
(self.expect('while'))
and
(self.named_expression())
and
(self.expect('NEWLINE'))
):
return self . raise_syntax_error ( "expected ':'" );
self._reset(mark)
if (
(a := self.expect('while'))
and
(self.named_expression())
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after 'while' statement on line {a.start[0]}" );
self._reset(mark)
return None;
@memoize
def invalid_for_stmt(self) -> Optional[NoReturn]:
# invalid_for_stmt: ASYNC? 'for' star_targets 'in' star_expressions NEWLINE | 'async'? 'for' star_targets 'in' star_expressions ':' NEWLINE !INDENT
mark = self._mark()
if (
(self.expect('ASYNC'),)
and
(self.expect('for'))
and
(self.star_targets())
and
(self.expect('in'))
and
(self.star_expressions())
and
(self.expect('NEWLINE'))
):
return self . raise_syntax_error ( "expected ':'" );
self._reset(mark)
if (
(self.expect('async'),)
and
(a := self.expect('for'))
and
(self.star_targets())
and
(self.expect('in'))
and
(self.star_expressions())
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after 'for' statement on line {a.start[0]}" );
self._reset(mark)
return None;
@memoize
def invalid_def_raw(self) -> Optional[NoReturn]:
# invalid_def_raw: 'async'? 'def' NAME type_params? '(' params? ')' ['->' expression] ':' NEWLINE !INDENT
mark = self._mark()
if (
(self.expect('async'),)
and
(a := self.expect('def'))
and
(self.name())
and
(self.type_params(),)
and
(self.expect('('))
and
(self.params(),)
and
(self.expect(')'))
and
(self._tmp_290(),)
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after function definition on line {a.start[0]}" );
self._reset(mark)
return None;
@memoize
def invalid_class_def_raw(self) -> Optional[NoReturn]:
# invalid_class_def_raw: 'class' NAME type_params? ['(' arguments? ')'] NEWLINE | 'class' NAME type_params? ['(' arguments? ')'] ':' NEWLINE !INDENT
mark = self._mark()
if (
(self.expect('class'))
and
(self.name())
and
(self.type_params(),)
and
(self._tmp_291(),)
and
(self.expect('NEWLINE'))
):
return self . raise_syntax_error ( "expected ':'" );
self._reset(mark)
if (
(a := self.expect('class'))
and
(self.name())
and
(self.type_params(),)
and
(self._tmp_292(),)
and
(self.expect(':'))
and
(self.expect('NEWLINE'))
and
(self.negative_lookahead(self.expect, 'INDENT'))
):
return self . raise_indentation_error ( f"expected an indented block after class definition on line {a.start[0]}" );
self._reset(mark)
return None;
@memoize
def invalid_double_starred_kvpairs(self) -> Optional[None]:
# invalid_double_starred_kvpairs: ','.double_starred_kvpair+ ',' invalid_kvpair | expression ':' '*' bitwise_or | expression ':' &('}' | ',')
mark = self._mark()
if (
self.call_invalid_rules
and
(self._gather_293())
and
(self.expect(','))
and
(self.invalid_kvpair())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expression())
and
(self.expect(':'))
and
(a := self.expect('*'))
and
(self.bitwise_or())
):
return self . raise_syntax_error_starting_from ( "cannot use a starred expression in a dictionary value" , a );
self._reset(mark)
if (
(self.expression())
and
(a := self.expect(':'))
and
(self.positive_lookahead(self._tmp_295, ))
):
return self . raise_syntax_error_known_location ( "expression expected after dictionary key and ':'" , a );
self._reset(mark)
return None;
@memoize
def invalid_kvpair(self) -> Optional[None]:
# invalid_kvpair: expression !(':') | expression ':' '*' bitwise_or | expression ':' &('}' | ',') | expression ':'
mark = self._mark()
if (
(a := self.expression())
and
(self.negative_lookahead(self.expect, ':'))
):
return self . raise_raw_syntax_error ( "':' expected after dictionary key" , ( a . lineno , a . col_offset ) , ( a . end_lineno , a . end_col_offset ) );
self._reset(mark)
if (
(self.expression())
and
(self.expect(':'))
and
(a := self.expect('*'))
and
(self.bitwise_or())
):
return self . raise_syntax_error_starting_from ( "cannot use a starred expression in a dictionary value" , a );
self._reset(mark)
if (
(self.expression())
and
(a := self.expect(':'))
and
(self.positive_lookahead(self._tmp_296, ))
):
return self . raise_syntax_error_known_location ( "expression expected after dictionary key and ':'" , a );
self._reset(mark)
if (
(self.expression())
and
(a := self.expect(':'))
):
return self . raise_syntax_error_known_location ( "expression expected after dictionary key and ':'" , a );
self._reset(mark)
return None;
@memoize
def invalid_starred_expression(self) -> Optional[Any]:
# invalid_starred_expression: '*' expression '=' expression
mark = self._mark()
if (
(a := self.expect('*'))
and
(self.expression())
and
(self.expect('='))
and
(b := self.expression())
):
return self . raise_syntax_error_known_range ( "cannot assign to iterable argument unpacking" , a , b );
self._reset(mark)
return None;
@memoize
def invalid_replacement_field(self) -> Optional[Any]:
# invalid_replacement_field: '{' '=' | '{' '!' | '{' ':' | '{' '}' | '{' !(yield_expr | star_expressions) | '{' (yield_expr | star_expressions) !('=' | '!' | ':' | '}') | '{' (yield_expr | star_expressions) '=' !('!' | ':' | '}') | '{' (yield_expr | star_expressions) '='? invalid_conversion_character | '{' (yield_expr | star_expressions) '='? ['!' NAME] !(':' | '}') | '{' (yield_expr | star_expressions) '='? ['!' NAME] ':' fstring_format_spec* !'}' | '{' (yield_expr | star_expressions) '='? ['!' NAME] !'}'
mark = self._mark()
if (
(self.expect('{'))
and
(a := self.expect('='))
):
return self . raise_syntax_error_known_location ( "f-string: valid expression required before '='" , a );
self._reset(mark)
if (
(self.expect('{'))
and
(a := self.expect('!'))
):
return self . raise_syntax_error_known_location ( "f-string: valid expression required before '!'" , a );
self._reset(mark)
if (
(self.expect('{'))
and
(a := self.expect(':'))
):
return self . raise_syntax_error_known_location ( "f-string: valid expression required before ':'" , a );
self._reset(mark)
if (
(self.expect('{'))
and
(a := self.expect('}'))
):
return self . raise_syntax_error_known_location ( "f-string: valid expression required before '}'" , a );
self._reset(mark)
if (
(self.expect('{'))
and
(self.negative_lookahead(self._tmp_297, ))
):
return self . raise_syntax_error_on_next_token ( "f-string: expecting a valid expression after '{'" );
self._reset(mark)
if (
(self.expect('{'))
and
(self._tmp_298())
and
(self.negative_lookahead(self._tmp_299, ))
):
return self . raise_syntax_error_on_next_token ( "f-string: expecting '=', or '!', or ':', or '}'" );
self._reset(mark)
if (
(self.expect('{'))
and
(self._tmp_300())
and
(self.expect('='))
and
(self.negative_lookahead(self._tmp_301, ))
):
return self . raise_syntax_error_on_next_token ( "f-string: expecting '!', or ':', or '}'" );
self._reset(mark)
if (
self.call_invalid_rules
and
(self.expect('{'))
and
(self._tmp_302())
and
(self.expect('='),)
and
(self.invalid_conversion_character())
):
return None # pragma: no cover;
self._reset(mark)
if (
(self.expect('{'))
and
(self._tmp_303())
and
(self.expect('='),)
and
(self._tmp_304(),)
and
(self.negative_lookahead(self._tmp_305, ))
):
return self . raise_syntax_error_on_next_token ( "f-string: expecting ':' or '}'" );
self._reset(mark)
if (
(self.expect('{'))
and
(self._tmp_306())
and
(self.expect('='),)
and
(self._tmp_307(),)
and
(self.expect(':'))
and
(self._loop0_308(),)
and
(self.negative_lookahead(self.expect, '}'))
):
return self . raise_syntax_error_on_next_token ( "f-string: expecting '}', or format specs" );
self._reset(mark)
if (
(self.expect('{'))
and
(self._tmp_309())
and
(self.expect('='),)
and
(self._tmp_310(),)
and
(self.negative_lookahead(self.expect, '}'))
):
return self . raise_syntax_error_on_next_token ( "f-string: expecting '}'" );
self._reset(mark)
return None;
@memoize
def invalid_conversion_character(self) -> Optional[Any]:
# invalid_conversion_character: '!' &(':' | '}') | '!' !NAME
mark = self._mark()
if (
(self.expect('!'))
and
(self.positive_lookahead(self._tmp_311, ))
):
return self . raise_syntax_error_on_next_token ( "f-string: missing conversion character" );
self._reset(mark)
if (
(self.expect('!'))
and
(self.negative_lookahead(self.name, ))
):
return self . raise_syntax_error_on_next_token ( "f-string: invalid conversion character" );
self._reset(mark)
return None;
@memoize
def _loop0_1(self) -> Optional[Any]:
# _loop0_1: NEWLINE
mark = self._mark()
children = []
while (
(_newline := self.expect('NEWLINE'))
):
children.append(_newline)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_2(self) -> Optional[Any]:
# _loop0_2: NEWLINE
mark = self._mark()
children = []
while (
(_newline := self.expect('NEWLINE'))
):
children.append(_newline)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_3(self) -> Optional[Any]:
# _loop0_3: fstring_mid
mark = self._mark()
children = []
while (
(fstring_mid := self.fstring_mid())
):
children.append(fstring_mid)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_4(self) -> Optional[Any]:
# _loop1_4: statement
mark = self._mark()
children = []
while (
(statement := self.statement())
):
children.append(statement)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_6(self) -> Optional[Any]:
# _loop0_6: ';' simple_stmt
mark = self._mark()
children = []
while (
(self.expect(';'))
and
(elem := self.simple_stmt())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_5(self) -> Optional[Any]:
# _gather_5: simple_stmt _loop0_6
mark = self._mark()
if (
(elem := self.simple_stmt())
is not None
and
(seq := self._loop0_6())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_8(self) -> Optional[Any]:
# _loop0_8: ';' scenic_stmt
mark = self._mark()
children = []
while (
(self.expect(';'))
and
(elem := self.scenic_stmt())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_7(self) -> Optional[Any]:
# _gather_7: scenic_stmt _loop0_8
mark = self._mark()
if (
(elem := self.scenic_stmt())
is not None
and
(seq := self._loop0_8())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_9(self) -> Optional[Any]:
# _tmp_9: 'import' | 'from'
mark = self._mark()
if (
(literal := self.expect('import'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('from'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_10(self) -> Optional[Any]:
# _tmp_10: 'def' | '@' | 'async'
mark = self._mark()
if (
(literal := self.expect('def'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('@'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('async'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_11(self) -> Optional[Any]:
# _tmp_11: 'class' | '@'
mark = self._mark()
if (
(literal := self.expect('class'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('@'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_12(self) -> Optional[Any]:
# _tmp_12: 'with' | 'async'
mark = self._mark()
if (
(literal := self.expect('with'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('async'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_13(self) -> Optional[Any]:
# _tmp_13: 'for' | 'async'
mark = self._mark()
if (
(literal := self.expect('for'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('async'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_14(self) -> Optional[Any]:
# _tmp_14: '=' annotated_rhs
mark = self._mark()
if (
(self.expect('='))
and
(d := self.annotated_rhs())
):
return d;
self._reset(mark)
return None;
@memoize
def _tmp_15(self) -> Optional[Any]:
# _tmp_15: '(' single_target ')' | single_subscript_attribute_target
mark = self._mark()
if (
(self.expect('('))
and
(b := self.single_target())
and
(self.expect(')'))
):
return b;
self._reset(mark)
if (
(single_subscript_attribute_target := self.single_subscript_attribute_target())
):
return single_subscript_attribute_target;
self._reset(mark)
return None;
@memoize
def _tmp_16(self) -> Optional[Any]:
# _tmp_16: '=' annotated_rhs
mark = self._mark()
if (
(self.expect('='))
and
(d := self.annotated_rhs())
):
return d;
self._reset(mark)
return None;
@memoize
def _loop1_17(self) -> Optional[Any]:
# _loop1_17: (star_targets '=')
mark = self._mark()
children = []
while (
(_tmp_312 := self._tmp_312())
):
children.append(_tmp_312)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_18(self) -> Optional[Any]:
# _tmp_18: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions;
self._reset(mark)
return None;
@memoize
def _tmp_19(self) -> Optional[Any]:
# _tmp_19: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions;
self._reset(mark)
return None;
@memoize
def _tmp_20(self) -> Optional[Any]:
# _tmp_20: 'from' expression
mark = self._mark()
if (
(self.expect('from'))
and
(z := self.expression())
):
return z;
self._reset(mark)
return None;
@memoize
def _loop0_22(self) -> Optional[Any]:
# _loop0_22: ',' NAME
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.name())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_21(self) -> Optional[Any]:
# _gather_21: NAME _loop0_22
mark = self._mark()
if (
(elem := self.name())
is not None
and
(seq := self._loop0_22())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_24(self) -> Optional[Any]:
# _loop0_24: ',' NAME
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.name())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_23(self) -> Optional[Any]:
# _gather_23: NAME _loop0_24
mark = self._mark()
if (
(elem := self.name())
is not None
and
(seq := self._loop0_24())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_25(self) -> Optional[Any]:
# _tmp_25: ';' | NEWLINE
mark = self._mark()
if (
(literal := self.expect(';'))
):
return literal;
self._reset(mark)
if (
(_newline := self.expect('NEWLINE'))
):
return _newline;
self._reset(mark)
return None;
@memoize
def _tmp_26(self) -> Optional[Any]:
# _tmp_26: ',' expression
mark = self._mark()
if (
(self.expect(','))
and
(z := self.expression())
):
return z;
self._reset(mark)
return None;
@memoize
def _loop0_27(self) -> Optional[Any]:
# _loop0_27: ('.' | '...')
mark = self._mark()
children = []
while (
(_tmp_313 := self._tmp_313())
):
children.append(_tmp_313)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_28(self) -> Optional[Any]:
# _loop1_28: ('.' | '...')
mark = self._mark()
children = []
while (
(_tmp_314 := self._tmp_314())
):
children.append(_tmp_314)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_30(self) -> Optional[Any]:
# _loop0_30: ',' import_from_as_name
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.import_from_as_name())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_29(self) -> Optional[Any]:
# _gather_29: import_from_as_name _loop0_30
mark = self._mark()
if (
(elem := self.import_from_as_name())
is not None
and
(seq := self._loop0_30())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_31(self) -> Optional[Any]:
# _tmp_31: 'as' NAME
mark = self._mark()
if (
(self.expect('as'))
and
(z := self.name())
):
return z . string;
self._reset(mark)
return None;
@memoize
def _loop0_33(self) -> Optional[Any]:
# _loop0_33: ',' dotted_as_name
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.dotted_as_name())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_32(self) -> Optional[Any]:
# _gather_32: dotted_as_name _loop0_33
mark = self._mark()
if (
(elem := self.dotted_as_name())
is not None
and
(seq := self._loop0_33())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_34(self) -> Optional[Any]:
# _tmp_34: 'as' NAME
mark = self._mark()
if (
(self.expect('as'))
and
(z := self.name())
):
return z . string;
self._reset(mark)
return None;
@memoize
def _loop1_35(self) -> Optional[Any]:
# _loop1_35: decorator
mark = self._mark()
children = []
while (
(decorator := self.decorator())
):
children.append(decorator)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_36(self) -> Optional[Any]:
# _tmp_36: '@' dec_maybe_call NEWLINE
mark = self._mark()
if (
(self.expect('@'))
and
(f := self.dec_maybe_call())
and
(self.expect('NEWLINE'))
):
return f;
self._reset(mark)
return None;
@memoize
def _tmp_37(self) -> Optional[Any]:
# _tmp_37: '@' named_expression NEWLINE
mark = self._mark()
if (
(self.expect('@'))
and
(f := self.named_expression())
and
(self.expect('NEWLINE'))
):
return f;
self._reset(mark)
return None;
@memoize
def _tmp_38(self) -> Optional[Any]:
# _tmp_38: '(' arguments? ')'
mark = self._mark()
if (
(self.expect('('))
and
(z := self.arguments(),)
and
(self.expect(')'))
):
return z;
self._reset(mark)
return None;
@memoize
def _loop1_39(self) -> Optional[Any]:
# _loop1_39: scenic_class_statement
mark = self._mark()
children = []
while (
(scenic_class_statement := self.scenic_class_statement())
):
children.append(scenic_class_statement)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_40(self) -> Optional[Any]:
# _tmp_40: '[' ','.scenic_class_property_attribute+ ']'
mark = self._mark()
if (
(self.expect('['))
and
(attrs := self._gather_315())
and
(self.expect(']'))
):
return attrs;
self._reset(mark)
return None;
@memoize
def _tmp_41(self) -> Optional[Any]:
# _tmp_41: "additive" | "dynamic" | "final"
mark = self._mark()
tok = self._tokenizer.peek()
start_lineno, start_col_offset = tok.start
if (
(self.expect("additive"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Additive ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("dynamic"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Dynamic ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
if (
(self.expect("final"))
):
tok = self._tokenizer.get_last_non_whitespace_token()
end_lineno, end_col_offset = tok.end
return s . Final ( lineno=start_lineno, col_offset=start_col_offset, end_lineno=end_lineno, end_col_offset=end_col_offset );
self._reset(mark)
return None;
@memoize
def _loop1_42(self) -> Optional[Any]:
# _loop1_42: (star_targets '=')
mark = self._mark()
children = []
while (
(_tmp_317 := self._tmp_317())
):
children.append(_tmp_317)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_43(self) -> Optional[Any]:
# _loop0_43: (scenic_specifiers ',' NEWLINE)
mark = self._mark()
children = []
while (
(_tmp_318 := self._tmp_318())
):
children.append(_tmp_318)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_44(self) -> Optional[Any]:
# _loop1_44: (scenic_specifiers ',' NEWLINE)
mark = self._mark()
children = []
while (
(_tmp_319 := self._tmp_319())
):
children.append(_tmp_319)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_45(self) -> Optional[Any]:
# _tmp_45: STRING NEWLINE
mark = self._mark()
if (
(x := self.string())
and
(self.expect('NEWLINE'))
):
return x . string;
self._reset(mark)
return None;
@memoize
def _loop1_46(self) -> Optional[Any]:
# _loop1_46: scenic_behavior_statement
mark = self._mark()
children = []
while (
(scenic_behavior_statement := self.scenic_behavior_statement())
):
children.append(scenic_behavior_statement)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_47(self) -> Optional[Any]:
# _loop1_47: ((scenic_precondition_stmt | scenic_invariant_stmt) NEWLINE)
mark = self._mark()
children = []
while (
(_tmp_320 := self._tmp_320())
):
children.append(_tmp_320)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_48(self) -> Optional[Any]:
# _tmp_48: STRING NEWLINE
mark = self._mark()
if (
(x := self.string())
and
(self.expect('NEWLINE'))
):
return x . string;
self._reset(mark)
return None;
@memoize
def _loop1_49(self) -> Optional[Any]:
# _loop1_49: statement
mark = self._mark()
children = []
while (
(statement := self.statement())
):
children.append(statement)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_50(self) -> Optional[Any]:
# _tmp_50: '(' params? ')'
mark = self._mark()
if (
(self.expect('('))
and
(z := self.params(),)
and
(self.expect(')'))
):
return z;
self._reset(mark)
return None;
@memoize
def _tmp_51(self) -> Optional[Any]:
# _tmp_51: STRING NEWLINE
mark = self._mark()
if (
(x := self.string())
and
(self.expect('NEWLINE'))
):
return x . string;
self._reset(mark)
return None;
@memoize
def _tmp_52(self) -> Optional[Any]:
# _tmp_52: STRING NEWLINE
mark = self._mark()
if (
(x := self.string())
and
(self.expect('NEWLINE'))
):
return x . string;
self._reset(mark)
return None;
@memoize
def _tmp_53(self) -> Optional[Any]:
# _tmp_53: '->' expression
mark = self._mark()
if (
(self.expect('->'))
and
(z := self.expression())
):
return z;
self._reset(mark)
return None;
@memoize
def _tmp_54(self) -> Optional[Any]:
# _tmp_54: '->' expression
mark = self._mark()
if (
(self.expect('->'))
and
(z := self.expression())
):
return z;
self._reset(mark)
return None;
@memoize
def _loop0_55(self) -> Optional[Any]:
# _loop0_55: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_56(self) -> Optional[Any]:
# _loop0_56: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_57(self) -> Optional[Any]:
# _loop0_57: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_58(self) -> Optional[Any]:
# _loop1_58: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_59(self) -> Optional[Any]:
# _loop0_59: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_60(self) -> Optional[Any]:
# _loop1_60: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_61(self) -> Optional[Any]:
# _loop1_61: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_62(self) -> Optional[Any]:
# _loop1_62: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_63(self) -> Optional[Any]:
# _loop0_63: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_64(self) -> Optional[Any]:
# _loop1_64: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_65(self) -> Optional[Any]:
# _loop0_65: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_66(self) -> Optional[Any]:
# _loop1_66: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_67(self) -> Optional[Any]:
# _loop0_67: param_maybe_default
mark = self._mark()
children = []
while (
(param_maybe_default := self.param_maybe_default())
):
children.append(param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_68(self) -> Optional[Any]:
# _loop0_68: param_maybe_default
mark = self._mark()
children = []
while (
(param_maybe_default := self.param_maybe_default())
):
children.append(param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_69(self) -> Optional[Any]:
# _loop1_69: param_maybe_default
mark = self._mark()
children = []
while (
(param_maybe_default := self.param_maybe_default())
):
children.append(param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_71(self) -> Optional[Any]:
# _loop0_71: ',' with_item
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.with_item())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_70(self) -> Optional[Any]:
# _gather_70: with_item _loop0_71
mark = self._mark()
if (
(elem := self.with_item())
is not None
and
(seq := self._loop0_71())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_73(self) -> Optional[Any]:
# _loop0_73: ',' with_item
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.with_item())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_72(self) -> Optional[Any]:
# _gather_72: with_item _loop0_73
mark = self._mark()
if (
(elem := self.with_item())
is not None
and
(seq := self._loop0_73())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_75(self) -> Optional[Any]:
# _loop0_75: ',' with_item
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.with_item())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_74(self) -> Optional[Any]:
# _gather_74: with_item _loop0_75
mark = self._mark()
if (
(elem := self.with_item())
is not None
and
(seq := self._loop0_75())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_77(self) -> Optional[Any]:
# _loop0_77: ',' with_item
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.with_item())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_76(self) -> Optional[Any]:
# _gather_76: with_item _loop0_77
mark = self._mark()
if (
(elem := self.with_item())
is not None
and
(seq := self._loop0_77())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_78(self) -> Optional[Any]:
# _tmp_78: ',' | ')' | ':'
mark = self._mark()
if (
(literal := self.expect(','))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(')'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(':'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _loop1_79(self) -> Optional[Any]:
# _loop1_79: except_block
mark = self._mark()
children = []
while (
(except_block := self.except_block())
):
children.append(except_block)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_80(self) -> Optional[Any]:
# _loop1_80: except_star_block
mark = self._mark()
children = []
while (
(except_star_block := self.except_star_block())
):
children.append(except_star_block)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_81(self) -> Optional[Any]:
# _loop1_81: interrupt_when_block
mark = self._mark()
children = []
while (
(interrupt_when_block := self.interrupt_when_block())
):
children.append(interrupt_when_block)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_82(self) -> Optional[Any]:
# _loop0_82: except_block
mark = self._mark()
children = []
while (
(except_block := self.except_block())
):
children.append(except_block)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_83(self) -> Optional[Any]:
# _tmp_83: 'as' NAME
mark = self._mark()
if (
(self.expect('as'))
and
(z := self.name())
):
return z . string;
self._reset(mark)
return None;
@memoize
def _tmp_84(self) -> Optional[Any]:
# _tmp_84: 'as' NAME
mark = self._mark()
if (
(self.expect('as'))
and
(z := self.name())
):
return z . string;
self._reset(mark)
return None;
@memoize
def _loop1_85(self) -> Optional[Any]:
# _loop1_85: case_block
mark = self._mark()
children = []
while (
(case_block := self.case_block())
):
children.append(case_block)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_87(self) -> Optional[Any]:
# _loop0_87: '|' closed_pattern
mark = self._mark()
children = []
while (
(self.expect('|'))
and
(elem := self.closed_pattern())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_86(self) -> Optional[Any]:
# _gather_86: closed_pattern _loop0_87
mark = self._mark()
if (
(elem := self.closed_pattern())
is not None
and
(seq := self._loop0_87())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_88(self) -> Optional[Any]:
# _tmp_88: '+' | '-'
mark = self._mark()
if (
(literal := self.expect('+'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('-'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_89(self) -> Optional[Any]:
# _tmp_89: '+' | '-'
mark = self._mark()
if (
(literal := self.expect('+'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('-'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_90(self) -> Optional[Any]:
# _tmp_90: '.' | '(' | '='
mark = self._mark()
if (
(literal := self.expect('.'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('('))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('='))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_91(self) -> Optional[Any]:
# _tmp_91: '.' | '(' | '='
mark = self._mark()
if (
(literal := self.expect('.'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('('))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('='))
):
return literal;
self._reset(mark)
return None;
@memoize
def _loop0_93(self) -> Optional[Any]:
# _loop0_93: ',' maybe_star_pattern
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.maybe_star_pattern())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_92(self) -> Optional[Any]:
# _gather_92: maybe_star_pattern _loop0_93
mark = self._mark()
if (
(elem := self.maybe_star_pattern())
is not None
and
(seq := self._loop0_93())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_95(self) -> Optional[Any]:
# _loop0_95: ',' key_value_pattern
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.key_value_pattern())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_94(self) -> Optional[Any]:
# _gather_94: key_value_pattern _loop0_95
mark = self._mark()
if (
(elem := self.key_value_pattern())
is not None
and
(seq := self._loop0_95())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_96(self) -> Optional[Any]:
# _tmp_96: literal_expr | attr
mark = self._mark()
if (
(literal_expr := self.literal_expr())
):
return literal_expr;
self._reset(mark)
if (
(attr := self.attr())
):
return attr;
self._reset(mark)
return None;
@memoize
def _loop0_98(self) -> Optional[Any]:
# _loop0_98: ',' pattern
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.pattern())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_97(self) -> Optional[Any]:
# _gather_97: pattern _loop0_98
mark = self._mark()
if (
(elem := self.pattern())
is not None
and
(seq := self._loop0_98())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_100(self) -> Optional[Any]:
# _loop0_100: ',' keyword_pattern
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.keyword_pattern())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_99(self) -> Optional[Any]:
# _gather_99: keyword_pattern _loop0_100
mark = self._mark()
if (
(elem := self.keyword_pattern())
is not None
and
(seq := self._loop0_100())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_102(self) -> Optional[Any]:
# _loop0_102: ',' type_param
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.type_param())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_101(self) -> Optional[Any]:
# _gather_101: type_param _loop0_102
mark = self._mark()
if (
(elem := self.type_param())
is not None
and
(seq := self._loop0_102())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop1_103(self) -> Optional[Any]:
# _loop1_103: (',' expression)
mark = self._mark()
children = []
while (
(_tmp_321 := self._tmp_321())
):
children.append(_tmp_321)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_104(self) -> Optional[Any]:
# _loop1_104: (',' star_expression)
mark = self._mark()
children = []
while (
(_tmp_322 := self._tmp_322())
):
children.append(_tmp_322)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_106(self) -> Optional[Any]:
# _loop0_106: ',' star_named_expression
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.star_named_expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_105(self) -> Optional[Any]:
# _gather_105: star_named_expression _loop0_106
mark = self._mark()
if (
(elem := self.star_named_expression())
is not None
and
(seq := self._loop0_106())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_107(self) -> Optional[Any]:
# _tmp_107: scenic_temporal_prefix | scenic_temporal_disjunction
mark = self._mark()
if (
(scenic_temporal_prefix := self.scenic_temporal_prefix())
):
return scenic_temporal_prefix;
self._reset(mark)
if (
(scenic_temporal_disjunction := self.scenic_temporal_disjunction())
):
return scenic_temporal_disjunction;
self._reset(mark)
return None;
@memoize
def _loop1_108(self) -> Optional[Any]:
# _loop1_108: ('or' conjunction)
mark = self._mark()
children = []
while (
(_tmp_323 := self._tmp_323())
):
children.append(_tmp_323)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_109(self) -> Optional[Any]:
# _loop1_109: ('or' (scenic_temporal_prefix | scenic_temporal_conjunction))
mark = self._mark()
children = []
while (
(_tmp_324 := self._tmp_324())
):
children.append(_tmp_324)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_110(self) -> Optional[Any]:
# _loop1_110: ('and' inversion)
mark = self._mark()
children = []
while (
(_tmp_325 := self._tmp_325())
):
children.append(_tmp_325)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_111(self) -> Optional[Any]:
# _loop1_111: ('and' (scenic_temporal_prefix | scenic_temporal_inversion))
mark = self._mark()
children = []
while (
(_tmp_326 := self._tmp_326())
):
children.append(_tmp_326)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_112(self) -> Optional[Any]:
# _tmp_112: "visible" inversion
mark = self._mark()
if (
(literal := self.expect("visible"))
and
(inversion := self.inversion())
):
return [literal, inversion];
self._reset(mark)
return None;
@memoize
def _tmp_113(self) -> Optional[Any]:
# _tmp_113: "visible" scenic_temporal_inversion
mark = self._mark()
if (
(literal := self.expect("visible"))
and
(scenic_temporal_inversion := self.scenic_temporal_inversion())
):
return [literal, scenic_temporal_inversion];
self._reset(mark)
return None;
@memoize
def _tmp_114(self) -> Optional[Any]:
# _tmp_114: scenic_temporal_prefix | scenic_temporal_inversion
mark = self._mark()
if (
(scenic_temporal_prefix := self.scenic_temporal_prefix())
):
return scenic_temporal_prefix;
self._reset(mark)
if (
(scenic_temporal_inversion := self.scenic_temporal_inversion())
):
return scenic_temporal_inversion;
self._reset(mark)
return None;
@memoize
def _tmp_115(self) -> Optional[Any]:
# _tmp_115: 'until' | 'or' | 'and' | ')' | ';' | NEWLINE
mark = self._mark()
if (
(literal := self.expect('until'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('or'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('and'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(')'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(';'))
):
return literal;
self._reset(mark)
if (
(_newline := self.expect('NEWLINE'))
):
return _newline;
self._reset(mark)
return None;
@memoize
def _loop0_117(self) -> Optional[Any]:
# _loop0_117: ',' scenic_specifier
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.scenic_specifier())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_116(self) -> Optional[Any]:
# _gather_116: scenic_specifier _loop0_117
mark = self._mark()
if (
(elem := self.scenic_specifier())
is not None
and
(seq := self._loop0_117())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_118(self) -> Optional[Any]:
# _tmp_118: 'by' expression
mark = self._mark()
if (
(self.expect('by'))
and
(e := self.expression())
):
return e;
self._reset(mark)
return None;
@memoize
def _tmp_119(self) -> Optional[Any]:
# _tmp_119: 'from' expression
mark = self._mark()
if (
(self.expect('from'))
and
(a := self.expression())
):
return a;
self._reset(mark)
return None;
@memoize
def _tmp_120(self) -> Optional[Any]:
# _tmp_120: 'from' expression
mark = self._mark()
if (
(self.expect('from'))
and
(r := self.expression())
):
return r;
self._reset(mark)
return None;
@memoize
def _tmp_121(self) -> Optional[Any]:
# _tmp_121: 'from' expression
mark = self._mark()
if (
(self.expect('from'))
and
(r := self.expression())
):
return r;
self._reset(mark)
return None;
@memoize
def _tmp_122(self) -> Optional[Any]:
# _tmp_122: 'from' expression
mark = self._mark()
if (
(self.expect('from'))
and
(e := self.expression())
):
return e;
self._reset(mark)
return None;
@memoize
def _tmp_123(self) -> Optional[Any]:
# _tmp_123: 'from' expression
mark = self._mark()
if (
(self.expect('from'))
and
(a := self.expression())
):
return a;
self._reset(mark)
return None;
@memoize
def _loop1_124(self) -> Optional[Any]:
# _loop1_124: compare_op_bitwise_or_pair
mark = self._mark()
children = []
while (
(compare_op_bitwise_or_pair := self.compare_op_bitwise_or_pair())
):
children.append(compare_op_bitwise_or_pair)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_125(self) -> Optional[Any]:
# _tmp_125: "relative" 'to' | "offset" 'by'
mark = self._mark()
if (
(literal := self.expect("relative"))
and
(literal_1 := self.expect('to'))
):
return [literal, literal_1];
self._reset(mark)
if (
(literal := self.expect("offset"))
and
(literal_1 := self.expect('by'))
):
return [literal, literal_1];
self._reset(mark)
return None;
@memoize
def _tmp_126(self) -> Optional[Any]:
# _tmp_126: 'to' | 'from'
mark = self._mark()
if (
(literal := self.expect('to'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('from'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _loop0_128(self) -> Optional[Any]:
# _loop0_128: ',' (slice | starred_expression)
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self._tmp_327())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_127(self) -> Optional[Any]:
# _gather_127: (slice | starred_expression) _loop0_128
mark = self._mark()
if (
(elem := self._tmp_327())
is not None
and
(seq := self._loop0_128())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_129(self) -> Optional[Any]:
# _tmp_129: ':' expression?
mark = self._mark()
if (
(self.expect(':'))
and
(d := self.expression(),)
):
return d;
self._reset(mark)
return None;
@memoize
def _tmp_130(self) -> Optional[Any]:
# _tmp_130: STRING | FSTRING_START
mark = self._mark()
if (
(string := self.string())
):
return string;
self._reset(mark)
if (
(fstring_start := self.fstring_start())
):
return fstring_start;
self._reset(mark)
return None;
@memoize
def _tmp_131(self) -> Optional[Any]:
# _tmp_131: tuple | group | genexp
mark = self._mark()
if (
(tuple := self.tuple())
):
return tuple;
self._reset(mark)
if (
(group := self.group())
):
return group;
self._reset(mark)
if (
(genexp := self.genexp())
):
return genexp;
self._reset(mark)
return None;
@memoize
def _tmp_132(self) -> Optional[Any]:
# _tmp_132: list | listcomp
mark = self._mark()
if (
(list := self.list())
):
return list;
self._reset(mark)
if (
(listcomp := self.listcomp())
):
return listcomp;
self._reset(mark)
return None;
@memoize
def _tmp_133(self) -> Optional[Any]:
# _tmp_133: dict | set | dictcomp | setcomp
mark = self._mark()
if (
(dict := self.dict())
):
return dict;
self._reset(mark)
if (
(set := self.set())
):
return set;
self._reset(mark)
if (
(dictcomp := self.dictcomp())
):
return dictcomp;
self._reset(mark)
if (
(setcomp := self.setcomp())
):
return setcomp;
self._reset(mark)
return None;
@memoize
def _tmp_134(self) -> Optional[Any]:
# _tmp_134: yield_expr | named_expression
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(named_expression := self.named_expression())
):
return named_expression;
self._reset(mark)
return None;
@memoize
def _loop0_135(self) -> Optional[Any]:
# _loop0_135: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_136(self) -> Optional[Any]:
# _loop0_136: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_137(self) -> Optional[Any]:
# _loop0_137: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_138(self) -> Optional[Any]:
# _loop1_138: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_139(self) -> Optional[Any]:
# _loop0_139: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_140(self) -> Optional[Any]:
# _loop1_140: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_141(self) -> Optional[Any]:
# _loop1_141: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_142(self) -> Optional[Any]:
# _loop1_142: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_143(self) -> Optional[Any]:
# _loop0_143: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_144(self) -> Optional[Any]:
# _loop1_144: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_145(self) -> Optional[Any]:
# _loop0_145: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_146(self) -> Optional[Any]:
# _loop1_146: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_147(self) -> Optional[Any]:
# _loop0_147: lambda_param_maybe_default
mark = self._mark()
children = []
while (
(lambda_param_maybe_default := self.lambda_param_maybe_default())
):
children.append(lambda_param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_148(self) -> Optional[Any]:
# _loop1_148: lambda_param_maybe_default
mark = self._mark()
children = []
while (
(lambda_param_maybe_default := self.lambda_param_maybe_default())
):
children.append(lambda_param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_150(self) -> Optional[Any]:
# _loop0_150: ',' scenic_param_stmt_param
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.scenic_param_stmt_param())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_149(self) -> Optional[Any]:
# _gather_149: scenic_param_stmt_param _loop0_150
mark = self._mark()
if (
(elem := self.scenic_param_stmt_param())
is not None
and
(seq := self._loop0_150())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_151(self) -> Optional[Any]:
# _tmp_151: 'as' scenic_require_stmt_name
mark = self._mark()
if (
(literal := self.expect('as'))
and
(scenic_require_stmt_name := self.scenic_require_stmt_name())
):
return [literal, scenic_require_stmt_name];
self._reset(mark)
return None;
@memoize
def _tmp_152(self) -> Optional[Any]:
# _tmp_152: '[' NUMBER ']'
mark = self._mark()
if (
(self.expect('['))
and
(a := self.number())
and
(self.expect(']'))
):
return float ( a . string );
self._reset(mark)
return None;
@memoize
def _tmp_153(self) -> Optional[Any]:
# _tmp_153: 'as' scenic_require_stmt_name
mark = self._mark()
if (
(self.expect('as'))
and
(a := self.scenic_require_stmt_name())
):
return a;
self._reset(mark)
return None;
@memoize
def _tmp_154(self) -> Optional[Any]:
# _tmp_154: NAME | NUMBER
mark = self._mark()
if (
(name := self.name())
):
return name;
self._reset(mark)
if (
(number := self.number())
):
return number;
self._reset(mark)
return None;
@memoize
def _tmp_155(self) -> Optional[Any]:
# _tmp_155: 'as' scenic_require_stmt_name
mark = self._mark()
if (
(self.expect('as'))
and
(a := self.scenic_require_stmt_name())
):
return a;
self._reset(mark)
return None;
@memoize
def _tmp_156(self) -> Optional[Any]:
# _tmp_156: 'as' scenic_require_stmt_name
mark = self._mark()
if (
(self.expect('as'))
and
(a := self.scenic_require_stmt_name())
):
return a;
self._reset(mark)
return None;
@memoize
def _tmp_157(self) -> Optional[Any]:
# _tmp_157: 'as' scenic_require_stmt_name
mark = self._mark()
if (
(self.expect('as'))
and
(a := self.scenic_require_stmt_name())
):
return a;
self._reset(mark)
return None;
@memoize
def _loop0_159(self) -> Optional[Any]:
# _loop0_159: ',' scenic_mutate_stmt_id
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.scenic_mutate_stmt_id())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_158(self) -> Optional[Any]:
# _gather_158: scenic_mutate_stmt_id _loop0_159
mark = self._mark()
if (
(elem := self.scenic_mutate_stmt_id())
is not None
and
(seq := self._loop0_159())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_160(self) -> Optional[Any]:
# _tmp_160: 'by' expression
mark = self._mark()
if (
(self.expect('by'))
and
(x := self.expression())
):
return x;
self._reset(mark)
return None;
@memoize
def _loop0_162(self) -> Optional[Any]:
# _loop0_162: ',' expression
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_161(self) -> Optional[Any]:
# _gather_161: expression _loop0_162
mark = self._mark()
if (
(elem := self.expression())
is not None
and
(seq := self._loop0_162())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_163(self) -> Optional[Any]:
# _tmp_163: 'as' scenic_require_stmt_name
mark = self._mark()
if (
(self.expect('as'))
and
(a := self.scenic_require_stmt_name())
):
return a;
self._reset(mark)
return None;
@memoize
def _tmp_164(self) -> Optional[Any]:
# _tmp_164: 'as' scenic_require_stmt_name
mark = self._mark()
if (
(self.expect('as'))
and
(a := self.scenic_require_stmt_name())
):
return a;
self._reset(mark)
return None;
@memoize
def _loop0_166(self) -> Optional[Any]:
# _loop0_166: ',' expression
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_165(self) -> Optional[Any]:
# _gather_165: expression _loop0_166
mark = self._mark()
if (
(elem := self.expression())
is not None
and
(seq := self._loop0_166())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_168(self) -> Optional[Any]:
# _loop0_168: ',' expression
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_167(self) -> Optional[Any]:
# _gather_167: expression _loop0_168
mark = self._mark()
if (
(elem := self.expression())
is not None
and
(seq := self._loop0_168())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_170(self) -> Optional[Any]:
# _loop0_170: ',' expression
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_169(self) -> Optional[Any]:
# _gather_169: expression _loop0_170
mark = self._mark()
if (
(elem := self.expression())
is not None
and
(seq := self._loop0_170())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_172(self) -> Optional[Any]:
# _loop0_172: ',' disjunction
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.disjunction())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_171(self) -> Optional[Any]:
# _gather_171: disjunction _loop0_172
mark = self._mark()
if (
(elem := self.disjunction())
is not None
and
(seq := self._loop0_172())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_174(self) -> Optional[Any]:
# _loop0_174: ',' expression
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_173(self) -> Optional[Any]:
# _gather_173: expression _loop0_174
mark = self._mark()
if (
(elem := self.expression())
is not None
and
(seq := self._loop0_174())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_175(self) -> Optional[Any]:
# _tmp_175: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions;
self._reset(mark)
return None;
@memoize
def _loop0_176(self) -> Optional[Any]:
# _loop0_176: fstring_format_spec
mark = self._mark()
children = []
while (
(fstring_format_spec := self.fstring_format_spec())
):
children.append(fstring_format_spec)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_177(self) -> Optional[Any]:
# _loop1_177: (fstring | STRING)
mark = self._mark()
children = []
while (
(_tmp_328 := self._tmp_328())
):
children.append(_tmp_328)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_178(self) -> Optional[Any]:
# _tmp_178: star_named_expression ',' star_named_expressions?
mark = self._mark()
if (
(y := self.star_named_expression())
and
(self.expect(','))
and
(z := self.star_named_expressions(),)
):
return [y] + ( z or [] );
self._reset(mark)
return None;
@memoize
def _loop0_180(self) -> Optional[Any]:
# _loop0_180: ',' double_starred_kvpair
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.double_starred_kvpair())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_179(self) -> Optional[Any]:
# _gather_179: double_starred_kvpair _loop0_180
mark = self._mark()
if (
(elem := self.double_starred_kvpair())
is not None
and
(seq := self._loop0_180())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop1_181(self) -> Optional[Any]:
# _loop1_181: for_if_clause
mark = self._mark()
children = []
while (
(for_if_clause := self.for_if_clause())
):
children.append(for_if_clause)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_182(self) -> Optional[Any]:
# _loop0_182: ('if' disjunction)
mark = self._mark()
children = []
while (
(_tmp_329 := self._tmp_329())
):
children.append(_tmp_329)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_183(self) -> Optional[Any]:
# _loop0_183: ('if' disjunction)
mark = self._mark()
children = []
while (
(_tmp_330 := self._tmp_330())
):
children.append(_tmp_330)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_184(self) -> Optional[Any]:
# _tmp_184: assignment_expression | expression !':='
mark = self._mark()
if (
(assignment_expression := self.assignment_expression())
):
return assignment_expression;
self._reset(mark)
if (
(expression := self.expression())
and
(self.negative_lookahead(self.expect, ':='))
):
return expression;
self._reset(mark)
return None;
@memoize
def _loop0_186(self) -> Optional[Any]:
# _loop0_186: ',' (starred_expression | (assignment_expression | expression !':=') !'=')
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self._tmp_331())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_185(self) -> Optional[Any]:
# _gather_185: (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_186
mark = self._mark()
if (
(elem := self._tmp_331())
is not None
and
(seq := self._loop0_186())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_187(self) -> Optional[Any]:
# _tmp_187: ',' kwargs
mark = self._mark()
if (
(self.expect(','))
and
(k := self.kwargs())
):
return k;
self._reset(mark)
return None;
@memoize
def _loop0_189(self) -> Optional[Any]:
# _loop0_189: ',' kwarg_or_starred
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.kwarg_or_starred())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_188(self) -> Optional[Any]:
# _gather_188: kwarg_or_starred _loop0_189
mark = self._mark()
if (
(elem := self.kwarg_or_starred())
is not None
and
(seq := self._loop0_189())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_191(self) -> Optional[Any]:
# _loop0_191: ',' kwarg_or_double_starred
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.kwarg_or_double_starred())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_190(self) -> Optional[Any]:
# _gather_190: kwarg_or_double_starred _loop0_191
mark = self._mark()
if (
(elem := self.kwarg_or_double_starred())
is not None
and
(seq := self._loop0_191())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_193(self) -> Optional[Any]:
# _loop0_193: ',' kwarg_or_starred
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.kwarg_or_starred())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_192(self) -> Optional[Any]:
# _gather_192: kwarg_or_starred _loop0_193
mark = self._mark()
if (
(elem := self.kwarg_or_starred())
is not None
and
(seq := self._loop0_193())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_195(self) -> Optional[Any]:
# _loop0_195: ',' kwarg_or_double_starred
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.kwarg_or_double_starred())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_194(self) -> Optional[Any]:
# _gather_194: kwarg_or_double_starred _loop0_195
mark = self._mark()
if (
(elem := self.kwarg_or_double_starred())
is not None
and
(seq := self._loop0_195())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_196(self) -> Optional[Any]:
# _loop0_196: (',' star_target)
mark = self._mark()
children = []
while (
(_tmp_332 := self._tmp_332())
):
children.append(_tmp_332)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_198(self) -> Optional[Any]:
# _loop0_198: ',' star_target
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.star_target())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_197(self) -> Optional[Any]:
# _gather_197: star_target _loop0_198
mark = self._mark()
if (
(elem := self.star_target())
is not None
and
(seq := self._loop0_198())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop1_199(self) -> Optional[Any]:
# _loop1_199: (',' star_target)
mark = self._mark()
children = []
while (
(_tmp_333 := self._tmp_333())
):
children.append(_tmp_333)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_200(self) -> Optional[Any]:
# _tmp_200: !'*' star_target
mark = self._mark()
if (
(self.negative_lookahead(self.expect, '*'))
and
(star_target := self.star_target())
):
return star_target;
self._reset(mark)
return None;
@memoize
def _loop0_202(self) -> Optional[Any]:
# _loop0_202: ',' del_target
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.del_target())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_201(self) -> Optional[Any]:
# _gather_201: del_target _loop0_202
mark = self._mark()
if (
(elem := self.del_target())
is not None
and
(seq := self._loop0_202())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_204(self) -> Optional[Any]:
# _loop0_204: ',' expression
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_203(self) -> Optional[Any]:
# _gather_203: expression _loop0_204
mark = self._mark()
if (
(elem := self.expression())
is not None
and
(seq := self._loop0_204())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_206(self) -> Optional[Any]:
# _loop0_206: ',' expression
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_205(self) -> Optional[Any]:
# _gather_205: expression _loop0_206
mark = self._mark()
if (
(elem := self.expression())
is not None
and
(seq := self._loop0_206())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_208(self) -> Optional[Any]:
# _loop0_208: ',' expression
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_207(self) -> Optional[Any]:
# _gather_207: expression _loop0_208
mark = self._mark()
if (
(elem := self.expression())
is not None
and
(seq := self._loop0_208())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_210(self) -> Optional[Any]:
# _loop0_210: ',' expression
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.expression())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_209(self) -> Optional[Any]:
# _gather_209: expression _loop0_210
mark = self._mark()
if (
(elem := self.expression())
is not None
and
(seq := self._loop0_210())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_211(self) -> Optional[Any]:
# _tmp_211: NEWLINE INDENT
mark = self._mark()
if (
(_newline := self.expect('NEWLINE'))
and
(_indent := self.expect('INDENT'))
):
return [_newline, _indent];
self._reset(mark)
return None;
@memoize
def _tmp_212(self) -> Optional[Any]:
# _tmp_212: args | expression for_if_clauses
mark = self._mark()
if (
(args := self.args())
):
return args;
self._reset(mark)
if (
(expression := self.expression())
and
(for_if_clauses := self.for_if_clauses())
):
return [expression, for_if_clauses];
self._reset(mark)
return None;
@memoize
def _tmp_213(self) -> Optional[Any]:
# _tmp_213: args ','
mark = self._mark()
if (
(args := self.args())
and
(literal := self.expect(','))
):
return [args, literal];
self._reset(mark)
return None;
@memoize
def _tmp_214(self) -> Optional[Any]:
# _tmp_214: ',' | ')'
mark = self._mark()
if (
(literal := self.expect(','))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(')'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_215(self) -> Optional[Any]:
# _tmp_215: 'True' | 'False' | 'None'
mark = self._mark()
if (
(literal := self.expect('True'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('False'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('None'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_216(self) -> Optional[Any]:
# _tmp_216: NAME '='
mark = self._mark()
if (
(name := self.name())
and
(literal := self.expect('='))
):
return [name, literal];
self._reset(mark)
return None;
@memoize
def _tmp_217(self) -> Optional[Any]:
# _tmp_217: NAME STRING | SOFT_KEYWORD
mark = self._mark()
if (
(name := self.name())
and
(string := self.string())
):
return [name, string];
self._reset(mark)
if (
(soft_keyword := self.soft_keyword())
):
return soft_keyword;
self._reset(mark)
return None;
@memoize
def _tmp_218(self) -> Optional[Any]:
# _tmp_218: 'else' | ':'
mark = self._mark()
if (
(literal := self.expect('else'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(':'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_219(self) -> Optional[Any]:
# _tmp_219: FSTRING_MIDDLE | fstring_replacement_field
mark = self._mark()
if (
(fstring_middle := self.fstring_middle())
):
return fstring_middle;
self._reset(mark)
if (
(fstring_replacement_field := self.fstring_replacement_field())
):
return fstring_replacement_field;
self._reset(mark)
return None;
@memoize
def _tmp_220(self) -> Optional[Any]:
# _tmp_220: '=' | ':='
mark = self._mark()
if (
(literal := self.expect('='))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(':='))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_221(self) -> Optional[Any]:
# _tmp_221: list | tuple | genexp | 'True' | 'None' | 'False'
mark = self._mark()
if (
(list := self.list())
):
return list;
self._reset(mark)
if (
(tuple := self.tuple())
):
return tuple;
self._reset(mark)
if (
(genexp := self.genexp())
):
return genexp;
self._reset(mark)
if (
(literal := self.expect('True'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('None'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('False'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_222(self) -> Optional[Any]:
# _tmp_222: '=' | ':='
mark = self._mark()
if (
(literal := self.expect('='))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(':='))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_223(self) -> Optional[Any]:
# _tmp_223: NUMBER ']'
mark = self._mark()
if (
(number := self.number())
and
(literal := self.expect(']'))
):
return [number, literal];
self._reset(mark)
return None;
@memoize
def _tmp_224(self) -> Optional[Any]:
# _tmp_224: 'as' scenic_require_stmt_name
mark = self._mark()
if (
(literal := self.expect('as'))
and
(scenic_require_stmt_name := self.scenic_require_stmt_name())
):
return [literal, scenic_require_stmt_name];
self._reset(mark)
return None;
@memoize
def _loop0_225(self) -> Optional[Any]:
# _loop0_225: star_named_expressions
mark = self._mark()
children = []
while (
(star_named_expressions := self.star_named_expressions())
):
children.append(star_named_expressions)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_226(self) -> Optional[Any]:
# _loop0_226: (star_targets '=')
mark = self._mark()
children = []
while (
(_tmp_334 := self._tmp_334())
):
children.append(_tmp_334)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_227(self) -> Optional[Any]:
# _loop0_227: (star_targets '=')
mark = self._mark()
children = []
while (
(_tmp_335 := self._tmp_335())
):
children.append(_tmp_335)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_228(self) -> Optional[Any]:
# _tmp_228: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions;
self._reset(mark)
return None;
@memoize
def _tmp_229(self) -> Optional[Any]:
# _tmp_229: '[' | '(' | '{'
mark = self._mark()
if (
(literal := self.expect('['))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('('))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('{'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_230(self) -> Optional[Any]:
# _tmp_230: '[' | '{'
mark = self._mark()
if (
(literal := self.expect('['))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('{'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_231(self) -> Optional[Any]:
# _tmp_231: '[' | '{'
mark = self._mark()
if (
(literal := self.expect('['))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('{'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_232(self) -> Optional[Any]:
# _tmp_232: slash_no_default | slash_with_default
mark = self._mark()
if (
(slash_no_default := self.slash_no_default())
):
return slash_no_default;
self._reset(mark)
if (
(slash_with_default := self.slash_with_default())
):
return slash_with_default;
self._reset(mark)
return None;
@memoize
def _loop0_233(self) -> Optional[Any]:
# _loop0_233: param_maybe_default
mark = self._mark()
children = []
while (
(param_maybe_default := self.param_maybe_default())
):
children.append(param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_234(self) -> Optional[Any]:
# _loop0_234: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_235(self) -> Optional[Any]:
# _loop0_235: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_236(self) -> Optional[Any]:
# _loop1_236: param_no_default
mark = self._mark()
children = []
while (
(param_no_default := self.param_no_default())
):
children.append(param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_237(self) -> Optional[Any]:
# _tmp_237: slash_no_default | slash_with_default
mark = self._mark()
if (
(slash_no_default := self.slash_no_default())
):
return slash_no_default;
self._reset(mark)
if (
(slash_with_default := self.slash_with_default())
):
return slash_with_default;
self._reset(mark)
return None;
@memoize
def _loop0_238(self) -> Optional[Any]:
# _loop0_238: param_maybe_default
mark = self._mark()
children = []
while (
(param_maybe_default := self.param_maybe_default())
):
children.append(param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_239(self) -> Optional[Any]:
# _tmp_239: ',' | param_no_default
mark = self._mark()
if (
(literal := self.expect(','))
):
return literal;
self._reset(mark)
if (
(param_no_default := self.param_no_default())
):
return param_no_default;
self._reset(mark)
return None;
@memoize
def _loop0_240(self) -> Optional[Any]:
# _loop0_240: param_maybe_default
mark = self._mark()
children = []
while (
(param_maybe_default := self.param_maybe_default())
):
children.append(param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_241(self) -> Optional[Any]:
# _loop1_241: param_maybe_default
mark = self._mark()
children = []
while (
(param_maybe_default := self.param_maybe_default())
):
children.append(param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_242(self) -> Optional[Any]:
# _tmp_242: ')' | ','
mark = self._mark()
if (
(literal := self.expect(')'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(','))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_243(self) -> Optional[Any]:
# _tmp_243: ')' | ',' (')' | '**')
mark = self._mark()
if (
(literal := self.expect(')'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(','))
and
(_tmp_336 := self._tmp_336())
):
return [literal, _tmp_336];
self._reset(mark)
return None;
@memoize
def _tmp_244(self) -> Optional[Any]:
# _tmp_244: param_no_default | ','
mark = self._mark()
if (
(param_no_default := self.param_no_default())
):
return param_no_default;
self._reset(mark)
if (
(literal := self.expect(','))
):
return literal;
self._reset(mark)
return None;
@memoize
def _loop0_245(self) -> Optional[Any]:
# _loop0_245: param_maybe_default
mark = self._mark()
children = []
while (
(param_maybe_default := self.param_maybe_default())
):
children.append(param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_246(self) -> Optional[Any]:
# _tmp_246: param_no_default | ','
mark = self._mark()
if (
(param_no_default := self.param_no_default())
):
return param_no_default;
self._reset(mark)
if (
(literal := self.expect(','))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_247(self) -> Optional[Any]:
# _tmp_247: '*' | '**' | '/'
mark = self._mark()
if (
(literal := self.expect('*'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('**'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('/'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _loop1_248(self) -> Optional[Any]:
# _loop1_248: param_with_default
mark = self._mark()
children = []
while (
(param_with_default := self.param_with_default())
):
children.append(param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_249(self) -> Optional[Any]:
# _tmp_249: lambda_slash_no_default | lambda_slash_with_default
mark = self._mark()
if (
(lambda_slash_no_default := self.lambda_slash_no_default())
):
return lambda_slash_no_default;
self._reset(mark)
if (
(lambda_slash_with_default := self.lambda_slash_with_default())
):
return lambda_slash_with_default;
self._reset(mark)
return None;
@memoize
def _loop0_250(self) -> Optional[Any]:
# _loop0_250: lambda_param_maybe_default
mark = self._mark()
children = []
while (
(lambda_param_maybe_default := self.lambda_param_maybe_default())
):
children.append(lambda_param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_251(self) -> Optional[Any]:
# _loop0_251: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_252(self) -> Optional[Any]:
# _loop0_252: lambda_param_no_default
mark = self._mark()
children = []
while (
(lambda_param_no_default := self.lambda_param_no_default())
):
children.append(lambda_param_no_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop0_254(self) -> Optional[Any]:
# _loop0_254: ',' lambda_param
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.lambda_param())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_253(self) -> Optional[Any]:
# _gather_253: lambda_param _loop0_254
mark = self._mark()
if (
(elem := self.lambda_param())
is not None
and
(seq := self._loop0_254())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_255(self) -> Optional[Any]:
# _tmp_255: lambda_slash_no_default | lambda_slash_with_default
mark = self._mark()
if (
(lambda_slash_no_default := self.lambda_slash_no_default())
):
return lambda_slash_no_default;
self._reset(mark)
if (
(lambda_slash_with_default := self.lambda_slash_with_default())
):
return lambda_slash_with_default;
self._reset(mark)
return None;
@memoize
def _loop0_256(self) -> Optional[Any]:
# _loop0_256: lambda_param_maybe_default
mark = self._mark()
children = []
while (
(lambda_param_maybe_default := self.lambda_param_maybe_default())
):
children.append(lambda_param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_257(self) -> Optional[Any]:
# _tmp_257: ',' | lambda_param_no_default
mark = self._mark()
if (
(literal := self.expect(','))
):
return literal;
self._reset(mark)
if (
(lambda_param_no_default := self.lambda_param_no_default())
):
return lambda_param_no_default;
self._reset(mark)
return None;
@memoize
def _loop0_258(self) -> Optional[Any]:
# _loop0_258: lambda_param_maybe_default
mark = self._mark()
children = []
while (
(lambda_param_maybe_default := self.lambda_param_maybe_default())
):
children.append(lambda_param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_259(self) -> Optional[Any]:
# _loop1_259: lambda_param_maybe_default
mark = self._mark()
children = []
while (
(lambda_param_maybe_default := self.lambda_param_maybe_default())
):
children.append(lambda_param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_260(self) -> Optional[Any]:
# _loop1_260: lambda_param_with_default
mark = self._mark()
children = []
while (
(lambda_param_with_default := self.lambda_param_with_default())
):
children.append(lambda_param_with_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_261(self) -> Optional[Any]:
# _tmp_261: ':' | ',' (':' | '**')
mark = self._mark()
if (
(literal := self.expect(':'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(','))
and
(_tmp_337 := self._tmp_337())
):
return [literal, _tmp_337];
self._reset(mark)
return None;
@memoize
def _tmp_262(self) -> Optional[Any]:
# _tmp_262: lambda_param_no_default | ','
mark = self._mark()
if (
(lambda_param_no_default := self.lambda_param_no_default())
):
return lambda_param_no_default;
self._reset(mark)
if (
(literal := self.expect(','))
):
return literal;
self._reset(mark)
return None;
@memoize
def _loop0_263(self) -> Optional[Any]:
# _loop0_263: lambda_param_maybe_default
mark = self._mark()
children = []
while (
(lambda_param_maybe_default := self.lambda_param_maybe_default())
):
children.append(lambda_param_maybe_default)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_264(self) -> Optional[Any]:
# _tmp_264: lambda_param_no_default | ','
mark = self._mark()
if (
(lambda_param_no_default := self.lambda_param_no_default())
):
return lambda_param_no_default;
self._reset(mark)
if (
(literal := self.expect(','))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_265(self) -> Optional[Any]:
# _tmp_265: '*' | '**' | '/'
mark = self._mark()
if (
(literal := self.expect('*'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('**'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('/'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_266(self) -> Optional[Any]:
# _tmp_266: ',' | ')' | ':'
mark = self._mark()
if (
(literal := self.expect(','))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(')'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(':'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _loop0_268(self) -> Optional[Any]:
# _loop0_268: ',' dotted_name
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.dotted_name())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_267(self) -> Optional[Any]:
# _gather_267: dotted_name _loop0_268
mark = self._mark()
if (
(elem := self.dotted_name())
is not None
and
(seq := self._loop0_268())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_270(self) -> Optional[Any]:
# _loop0_270: ',' (expression ['as' star_target])
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self._tmp_338())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_269(self) -> Optional[Any]:
# _gather_269: (expression ['as' star_target]) _loop0_270
mark = self._mark()
if (
(elem := self._tmp_338())
is not None
and
(seq := self._loop0_270())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_272(self) -> Optional[Any]:
# _loop0_272: ',' (expressions ['as' star_target])
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self._tmp_339())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_271(self) -> Optional[Any]:
# _gather_271: (expressions ['as' star_target]) _loop0_272
mark = self._mark()
if (
(elem := self._tmp_339())
is not None
and
(seq := self._loop0_272())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_274(self) -> Optional[Any]:
# _loop0_274: ',' (expression ['as' star_target])
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self._tmp_340())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_273(self) -> Optional[Any]:
# _gather_273: (expression ['as' star_target]) _loop0_274
mark = self._mark()
if (
(elem := self._tmp_340())
is not None
and
(seq := self._loop0_274())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _loop0_276(self) -> Optional[Any]:
# _loop0_276: ',' (expressions ['as' star_target])
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self._tmp_341())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_275(self) -> Optional[Any]:
# _gather_275: (expressions ['as' star_target]) _loop0_276
mark = self._mark()
if (
(elem := self._tmp_341())
is not None
and
(seq := self._loop0_276())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_277(self) -> Optional[Any]:
# _tmp_277: 'except' | 'finally'
mark = self._mark()
if (
(literal := self.expect('except'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('finally'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _loop0_278(self) -> Optional[Any]:
# _loop0_278: block
mark = self._mark()
children = []
while (
(block := self.block())
):
children.append(block)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_279(self) -> Optional[Any]:
# _loop1_279: except_block
mark = self._mark()
children = []
while (
(except_block := self.except_block())
):
children.append(except_block)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_280(self) -> Optional[Any]:
# _tmp_280: 'as' NAME
mark = self._mark()
if (
(literal := self.expect('as'))
and
(name := self.name())
):
return [literal, name];
self._reset(mark)
return None;
@memoize
def _loop0_281(self) -> Optional[Any]:
# _loop0_281: block
mark = self._mark()
children = []
while (
(block := self.block())
):
children.append(block)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _loop1_282(self) -> Optional[Any]:
# _loop1_282: except_star_block
mark = self._mark()
children = []
while (
(except_star_block := self.except_star_block())
):
children.append(except_star_block)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_283(self) -> Optional[Any]:
# _tmp_283: expression ['as' NAME]
mark = self._mark()
if (
(expression := self.expression())
and
(opt := self._tmp_342(),)
):
return [expression, opt];
self._reset(mark)
return None;
@memoize
def _tmp_284(self) -> Optional[Any]:
# _tmp_284: 'as' NAME
mark = self._mark()
if (
(literal := self.expect('as'))
and
(name := self.name())
):
return [literal, name];
self._reset(mark)
return None;
@memoize
def _tmp_285(self) -> Optional[Any]:
# _tmp_285: 'as' NAME
mark = self._mark()
if (
(literal := self.expect('as'))
and
(name := self.name())
):
return [literal, name];
self._reset(mark)
return None;
@memoize
def _tmp_286(self) -> Optional[Any]:
# _tmp_286: NEWLINE | ':'
mark = self._mark()
if (
(_newline := self.expect('NEWLINE'))
):
return _newline;
self._reset(mark)
if (
(literal := self.expect(':'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_287(self) -> Optional[Any]:
# _tmp_287: 'as' NAME
mark = self._mark()
if (
(literal := self.expect('as'))
and
(name := self.name())
):
return [literal, name];
self._reset(mark)
return None;
@memoize
def _tmp_288(self) -> Optional[Any]:
# _tmp_288: 'as' NAME
mark = self._mark()
if (
(literal := self.expect('as'))
and
(name := self.name())
):
return [literal, name];
self._reset(mark)
return None;
@memoize
def _tmp_289(self) -> Optional[Any]:
# _tmp_289: positional_patterns ','
mark = self._mark()
if (
(positional_patterns := self.positional_patterns())
and
(literal := self.expect(','))
):
return [positional_patterns, literal];
self._reset(mark)
return None;
@memoize
def _tmp_290(self) -> Optional[Any]:
# _tmp_290: '->' expression
mark = self._mark()
if (
(literal := self.expect('->'))
and
(expression := self.expression())
):
return [literal, expression];
self._reset(mark)
return None;
@memoize
def _tmp_291(self) -> Optional[Any]:
# _tmp_291: '(' arguments? ')'
mark = self._mark()
if (
(literal := self.expect('('))
and
(opt := self.arguments(),)
and
(literal_1 := self.expect(')'))
):
return [literal, opt, literal_1];
self._reset(mark)
return None;
@memoize
def _tmp_292(self) -> Optional[Any]:
# _tmp_292: '(' arguments? ')'
mark = self._mark()
if (
(literal := self.expect('('))
and
(opt := self.arguments(),)
and
(literal_1 := self.expect(')'))
):
return [literal, opt, literal_1];
self._reset(mark)
return None;
@memoize
def _loop0_294(self) -> Optional[Any]:
# _loop0_294: ',' double_starred_kvpair
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.double_starred_kvpair())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_293(self) -> Optional[Any]:
# _gather_293: double_starred_kvpair _loop0_294
mark = self._mark()
if (
(elem := self.double_starred_kvpair())
is not None
and
(seq := self._loop0_294())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_295(self) -> Optional[Any]:
# _tmp_295: '}' | ','
mark = self._mark()
if (
(literal := self.expect('}'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(','))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_296(self) -> Optional[Any]:
# _tmp_296: '}' | ','
mark = self._mark()
if (
(literal := self.expect('}'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(','))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_297(self) -> Optional[Any]:
# _tmp_297: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions;
self._reset(mark)
return None;
@memoize
def _tmp_298(self) -> Optional[Any]:
# _tmp_298: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions;
self._reset(mark)
return None;
@memoize
def _tmp_299(self) -> Optional[Any]:
# _tmp_299: '=' | '!' | ':' | '}'
mark = self._mark()
if (
(literal := self.expect('='))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('!'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(':'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('}'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_300(self) -> Optional[Any]:
# _tmp_300: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions;
self._reset(mark)
return None;
@memoize
def _tmp_301(self) -> Optional[Any]:
# _tmp_301: '!' | ':' | '}'
mark = self._mark()
if (
(literal := self.expect('!'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect(':'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('}'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_302(self) -> Optional[Any]:
# _tmp_302: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions;
self._reset(mark)
return None;
@memoize
def _tmp_303(self) -> Optional[Any]:
# _tmp_303: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions;
self._reset(mark)
return None;
@memoize
def _tmp_304(self) -> Optional[Any]:
# _tmp_304: '!' NAME
mark = self._mark()
if (
(literal := self.expect('!'))
and
(name := self.name())
):
return [literal, name];
self._reset(mark)
return None;
@memoize
def _tmp_305(self) -> Optional[Any]:
# _tmp_305: ':' | '}'
mark = self._mark()
if (
(literal := self.expect(':'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('}'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_306(self) -> Optional[Any]:
# _tmp_306: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions;
self._reset(mark)
return None;
@memoize
def _tmp_307(self) -> Optional[Any]:
# _tmp_307: '!' NAME
mark = self._mark()
if (
(literal := self.expect('!'))
and
(name := self.name())
):
return [literal, name];
self._reset(mark)
return None;
@memoize
def _loop0_308(self) -> Optional[Any]:
# _loop0_308: fstring_format_spec
mark = self._mark()
children = []
while (
(fstring_format_spec := self.fstring_format_spec())
):
children.append(fstring_format_spec)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _tmp_309(self) -> Optional[Any]:
# _tmp_309: yield_expr | star_expressions
mark = self._mark()
if (
(yield_expr := self.yield_expr())
):
return yield_expr;
self._reset(mark)
if (
(star_expressions := self.star_expressions())
):
return star_expressions;
self._reset(mark)
return None;
@memoize
def _tmp_310(self) -> Optional[Any]:
# _tmp_310: '!' NAME
mark = self._mark()
if (
(literal := self.expect('!'))
and
(name := self.name())
):
return [literal, name];
self._reset(mark)
return None;
@memoize
def _tmp_311(self) -> Optional[Any]:
# _tmp_311: ':' | '}'
mark = self._mark()
if (
(literal := self.expect(':'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('}'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_312(self) -> Optional[Any]:
# _tmp_312: star_targets '='
mark = self._mark()
if (
(z := self.star_targets())
and
(self.expect('='))
):
return z;
self._reset(mark)
return None;
@memoize
def _tmp_313(self) -> Optional[Any]:
# _tmp_313: '.' | '...'
mark = self._mark()
if (
(literal := self.expect('.'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('...'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_314(self) -> Optional[Any]:
# _tmp_314: '.' | '...'
mark = self._mark()
if (
(literal := self.expect('.'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('...'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _loop0_316(self) -> Optional[Any]:
# _loop0_316: ',' scenic_class_property_attribute
mark = self._mark()
children = []
while (
(self.expect(','))
and
(elem := self.scenic_class_property_attribute())
):
children.append(elem)
mark = self._mark()
self._reset(mark)
return children;
@memoize
def _gather_315(self) -> Optional[Any]:
# _gather_315: scenic_class_property_attribute _loop0_316
mark = self._mark()
if (
(elem := self.scenic_class_property_attribute())
is not None
and
(seq := self._loop0_316())
is not None
):
return [elem] + seq;
self._reset(mark)
return None;
@memoize
def _tmp_317(self) -> Optional[Any]:
# _tmp_317: star_targets '='
mark = self._mark()
if (
(z := self.star_targets())
and
(self.expect('='))
):
return z;
self._reset(mark)
return None;
@memoize
def _tmp_318(self) -> Optional[Any]:
# _tmp_318: scenic_specifiers ',' NEWLINE
mark = self._mark()
if (
(x := self.scenic_specifiers())
and
(self.expect(','))
and
(self.expect('NEWLINE'))
):
return x;
self._reset(mark)
return None;
@memoize
def _tmp_319(self) -> Optional[Any]:
# _tmp_319: scenic_specifiers ',' NEWLINE
mark = self._mark()
if (
(x := self.scenic_specifiers())
and
(self.expect(','))
and
(self.expect('NEWLINE'))
):
return x;
self._reset(mark)
return None;
@memoize
def _tmp_320(self) -> Optional[Any]:
# _tmp_320: (scenic_precondition_stmt | scenic_invariant_stmt) NEWLINE
mark = self._mark()
if (
(x := self._tmp_343())
and
(self.expect('NEWLINE'))
):
return x;
self._reset(mark)
return None;
@memoize
def _tmp_321(self) -> Optional[Any]:
# _tmp_321: ',' expression
mark = self._mark()
if (
(self.expect(','))
and
(c := self.expression())
):
return c;
self._reset(mark)
return None;
@memoize
def _tmp_322(self) -> Optional[Any]:
# _tmp_322: ',' star_expression
mark = self._mark()
if (
(self.expect(','))
and
(c := self.star_expression())
):
return c;
self._reset(mark)
return None;
@memoize
def _tmp_323(self) -> Optional[Any]:
# _tmp_323: 'or' conjunction
mark = self._mark()
if (
(self.expect('or'))
and
(c := self.conjunction())
):
return c;
self._reset(mark)
return None;
@memoize
def _tmp_324(self) -> Optional[Any]:
# _tmp_324: 'or' (scenic_temporal_prefix | scenic_temporal_conjunction)
mark = self._mark()
if (
(self.expect('or'))
and
(c := self._tmp_344())
):
return c;
self._reset(mark)
return None;
@memoize
def _tmp_325(self) -> Optional[Any]:
# _tmp_325: 'and' inversion
mark = self._mark()
if (
(self.expect('and'))
and
(c := self.inversion())
):
return c;
self._reset(mark)
return None;
@memoize
def _tmp_326(self) -> Optional[Any]:
# _tmp_326: 'and' (scenic_temporal_prefix | scenic_temporal_inversion)
mark = self._mark()
if (
(self.expect('and'))
and
(c := self._tmp_345())
):
return c;
self._reset(mark)
return None;
@memoize
def _tmp_327(self) -> Optional[Any]:
# _tmp_327: slice | starred_expression
mark = self._mark()
if (
(slice := self.slice())
):
return slice;
self._reset(mark)
if (
(starred_expression := self.starred_expression())
):
return starred_expression;
self._reset(mark)
return None;
@memoize
def _tmp_328(self) -> Optional[Any]:
# _tmp_328: fstring | STRING
mark = self._mark()
if (
(fstring := self.fstring())
):
return fstring;
self._reset(mark)
if (
(string := self.string())
):
return string;
self._reset(mark)
return None;
@memoize
def _tmp_329(self) -> Optional[Any]:
# _tmp_329: 'if' disjunction
mark = self._mark()
if (
(self.expect('if'))
and
(z := self.disjunction())
):
return z;
self._reset(mark)
return None;
@memoize
def _tmp_330(self) -> Optional[Any]:
# _tmp_330: 'if' disjunction
mark = self._mark()
if (
(self.expect('if'))
and
(z := self.disjunction())
):
return z;
self._reset(mark)
return None;
@memoize
def _tmp_331(self) -> Optional[Any]:
# _tmp_331: starred_expression | (assignment_expression | expression !':=') !'='
mark = self._mark()
if (
(starred_expression := self.starred_expression())
):
return starred_expression;
self._reset(mark)
if (
(_tmp_346 := self._tmp_346())
and
(self.negative_lookahead(self.expect, '='))
):
return _tmp_346;
self._reset(mark)
return None;
@memoize
def _tmp_332(self) -> Optional[Any]:
# _tmp_332: ',' star_target
mark = self._mark()
if (
(self.expect(','))
and
(c := self.star_target())
):
return c;
self._reset(mark)
return None;
@memoize
def _tmp_333(self) -> Optional[Any]:
# _tmp_333: ',' star_target
mark = self._mark()
if (
(self.expect(','))
and
(c := self.star_target())
):
return c;
self._reset(mark)
return None;
@memoize
def _tmp_334(self) -> Optional[Any]:
# _tmp_334: star_targets '='
mark = self._mark()
if (
(star_targets := self.star_targets())
and
(literal := self.expect('='))
):
return [star_targets, literal];
self._reset(mark)
return None;
@memoize
def _tmp_335(self) -> Optional[Any]:
# _tmp_335: star_targets '='
mark = self._mark()
if (
(star_targets := self.star_targets())
and
(literal := self.expect('='))
):
return [star_targets, literal];
self._reset(mark)
return None;
@memoize
def _tmp_336(self) -> Optional[Any]:
# _tmp_336: ')' | '**'
mark = self._mark()
if (
(literal := self.expect(')'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('**'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_337(self) -> Optional[Any]:
# _tmp_337: ':' | '**'
mark = self._mark()
if (
(literal := self.expect(':'))
):
return literal;
self._reset(mark)
if (
(literal := self.expect('**'))
):
return literal;
self._reset(mark)
return None;
@memoize
def _tmp_338(self) -> Optional[Any]:
# _tmp_338: expression ['as' star_target]
mark = self._mark()
if (
(expression := self.expression())
and
(opt := self._tmp_347(),)
):
return [expression, opt];
self._reset(mark)
return None;
@memoize
def _tmp_339(self) -> Optional[Any]:
# _tmp_339: expressions ['as' star_target]
mark = self._mark()
if (
(expressions := self.expressions())
and
(opt := self._tmp_348(),)
):
return [expressions, opt];
self._reset(mark)
return None;
@memoize
def _tmp_340(self) -> Optional[Any]:
# _tmp_340: expression ['as' star_target]
mark = self._mark()
if (
(expression := self.expression())
and
(opt := self._tmp_349(),)
):
return [expression, opt];
self._reset(mark)
return None;
@memoize
def _tmp_341(self) -> Optional[Any]:
# _tmp_341: expressions ['as' star_target]
mark = self._mark()
if (
(expressions := self.expressions())
and
(opt := self._tmp_350(),)
):
return [expressions, opt];
self._reset(mark)
return None;
@memoize
def _tmp_342(self) -> Optional[Any]:
# _tmp_342: 'as' NAME
mark = self._mark()
if (
(literal := self.expect('as'))
and
(name := self.name())
):
return [literal, name];
self._reset(mark)
return None;
@memoize
def _tmp_343(self) -> Optional[Any]:
# _tmp_343: scenic_precondition_stmt | scenic_invariant_stmt
mark = self._mark()
if (
(scenic_precondition_stmt := self.scenic_precondition_stmt())
):
return scenic_precondition_stmt;
self._reset(mark)
if (
(scenic_invariant_stmt := self.scenic_invariant_stmt())
):
return scenic_invariant_stmt;
self._reset(mark)
return None;
@memoize
def _tmp_344(self) -> Optional[Any]:
# _tmp_344: scenic_temporal_prefix | scenic_temporal_conjunction
mark = self._mark()
if (
(scenic_temporal_prefix := self.scenic_temporal_prefix())
):
return scenic_temporal_prefix;
self._reset(mark)
if (
(scenic_temporal_conjunction := self.scenic_temporal_conjunction())
):
return scenic_temporal_conjunction;
self._reset(mark)
return None;
@memoize
def _tmp_345(self) -> Optional[Any]:
# _tmp_345: scenic_temporal_prefix | scenic_temporal_inversion
mark = self._mark()
if (
(scenic_temporal_prefix := self.scenic_temporal_prefix())
):
return scenic_temporal_prefix;
self._reset(mark)
if (
(scenic_temporal_inversion := self.scenic_temporal_inversion())
):
return scenic_temporal_inversion;
self._reset(mark)
return None;
@memoize
def _tmp_346(self) -> Optional[Any]:
# _tmp_346: assignment_expression | expression !':='
mark = self._mark()
if (
(assignment_expression := self.assignment_expression())
):
return assignment_expression;
self._reset(mark)
if (
(expression := self.expression())
and
(self.negative_lookahead(self.expect, ':='))
):
return expression;
self._reset(mark)
return None;
@memoize
def _tmp_347(self) -> Optional[Any]:
# _tmp_347: 'as' star_target
mark = self._mark()
if (
(literal := self.expect('as'))
and
(star_target := self.star_target())
):
return [literal, star_target];
self._reset(mark)
return None;
@memoize
def _tmp_348(self) -> Optional[Any]:
# _tmp_348: 'as' star_target
mark = self._mark()
if (
(literal := self.expect('as'))
and
(star_target := self.star_target())
):
return [literal, star_target];
self._reset(mark)
return None;
@memoize
def _tmp_349(self) -> Optional[Any]:
# _tmp_349: 'as' star_target
mark = self._mark()
if (
(literal := self.expect('as'))
and
(star_target := self.star_target())
):
return [literal, star_target];
self._reset(mark)
return None;
@memoize
def _tmp_350(self) -> Optional[Any]:
# _tmp_350: 'as' star_target
mark = self._mark()
if (
(literal := self.expect('as'))
and
(star_target := self.star_target())
):
return [literal, star_target];
self._reset(mark)
return None;
KEYWORDS = ('False', 'None', 'True', 'and', 'as', 'assert', 'async', 'at', 'await', 'break', 'by', 'class', 'continue', 'def', 'del', 'do', 'elif', 'else', 'except', 'finally', 'for', 'from', 'global', 'if', 'import', 'in', 'is', 'lambda', 'new', 'nonlocal', 'not', 'of', 'on', 'or', 'pass', 'raise', 'require', 'return', 'to', 'try', 'until', 'while', 'with', 'yield')
SOFT_KEYWORDS = ('_', 'abort', 'above', 'additive', 'after', 'ahead', 'along', 'altitude', 'always', 'angle', 'apparent', 'apparently', 'away', 'back', 'behavior', 'behind', 'below', 'beyond', 'bottom', 'can', 'case', 'choose', 'compose', 'contained', 'deg', 'directly', 'distance', 'dynamic', 'ego', 'eventually', 'facing', 'final', 'follow', 'following', 'from', 'front', 'heading', 'implies', 'initial', 'interrupt', 'intersects', 'invariant', 'left', 'match', 'model', 'monitor', 'mutate', 'next', 'not', 'of', 'offset', 'override', 'param', 'past', 'position', 'precondition', 'record', 'relative', 'right', 'scenario', 'seconds', 'see', 'setup', 'shuffle', 'simulation', 'simulator', 'steps', 'take', 'terminate', 'top', 'toward', 'type', 'visible', 'wait', 'when', 'workspace')
if __name__ == '__main__':
from pegen.parser import simple_parser_main
simple_parser_main(ScenicParser)