Add fixers to lib2to3 (#10003)

This commit is contained in:
Avasam
2023-04-28 00:56:56 -04:00
committed by GitHub
parent a7748a9dd1
commit fb4bf034f3
65 changed files with 833 additions and 77 deletions

View File

@@ -0,0 +1,28 @@
from _typeshed import Incomplete, SupportsGetItem
from collections import defaultdict
from collections.abc import Iterable
from .fixer_base import BaseFix
from .pytree import Leaf, Node
class BMNode:
count: Incomplete
transition_table: Incomplete
fixers: Incomplete
id: Incomplete
content: str
def __init__(self) -> None: ...
class BottomMatcher:
match: Incomplete
root: Incomplete
nodes: Incomplete
fixers: Incomplete
logger: Incomplete
def __init__(self) -> None: ...
def add_fixer(self, fixer: BaseFix) -> None: ...
def add(self, pattern: SupportsGetItem[int | slice, Incomplete] | None, start: BMNode) -> list[BMNode]: ...
def run(self, leaves: Iterable[Leaf]) -> defaultdict[BaseFix, list[Node | Leaf]]: ...
def print_ac(self) -> None: ...
def type_repr(type_num: int) -> str | int: ...

View File

@@ -0,0 +1,43 @@
from _typeshed import Incomplete, StrPath
from abc import ABCMeta, abstractmethod
from collections.abc import MutableMapping
from typing import ClassVar, TypeVar
from typing_extensions import Literal
from .pytree import Base, Leaf, Node
_N = TypeVar("_N", bound=Base)
class BaseFix:
PATTERN: ClassVar[str | None]
pattern: Incomplete | None
pattern_tree: Incomplete | None
options: Incomplete | None
filename: Incomplete | None
numbers: Incomplete
used_names: Incomplete
order: ClassVar[Literal["post", "pre"]]
explicit: ClassVar[bool]
run_order: ClassVar[int]
keep_line_order: ClassVar[bool]
BM_compatible: ClassVar[bool]
syms: Incomplete
log: Incomplete
def __init__(self, options: MutableMapping[str, Incomplete], log: list[str]) -> None: ...
def compile_pattern(self) -> None: ...
def set_filename(self, filename: StrPath) -> None: ...
def match(self, node: _N) -> Literal[False] | dict[str, _N]: ...
@abstractmethod
def transform(self, node: Base, results: dict[str, Base]) -> Node | Leaf | None: ...
def new_name(self, template: str = "xxx_todo_changeme") -> str: ...
first_log: bool
def log_message(self, message: str) -> None: ...
def cannot_convert(self, node: Base, reason: str | None = None) -> None: ...
def warning(self, node: Base, reason: str) -> None: ...
def start_tree(self, tree: Node, filename: StrPath) -> None: ...
def finish_tree(self, tree: Node, filename: StrPath) -> None: ...
class ConditionalFix(BaseFix, metaclass=ABCMeta):
skip_on: ClassVar[str | None]
def start_tree(self, __tree: Node, __filename: StrPath) -> None: ...
def should_skip(self, node: Base) -> bool: ...

View File

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixApply(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,11 @@
from typing import ClassVar
from typing_extensions import Literal
from ..fixer_base import BaseFix
NAMES: dict[str, str]
class FixAsserts(BaseFix):
BM_compatible: ClassVar[Literal[False]]
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixBasestring(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[Literal["'basestring'"]]
def transform(self, node, results): ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixBuffer(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,17 @@
from _typeshed import Incomplete
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
iter_exempt: set[str]
class FixDict(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...
P1: ClassVar[str]
p1: ClassVar[Incomplete]
P2: ClassVar[str]
p2: ClassVar[Incomplete]
def in_special_context(self, node, isiter): ...

View File

@@ -0,0 +1,15 @@
from collections.abc import Generator, Iterable
from typing import ClassVar, TypeVar
from typing_extensions import Literal
from .. import fixer_base
from ..pytree import Base
_N = TypeVar("_N", bound=Base)
def find_excepts(nodes: Iterable[_N]) -> Generator[tuple[_N, _N], None, None]: ...
class FixExcept(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixExec(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixExecfile(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,14 @@
from _typeshed import Incomplete, StrPath
from lib2to3 import fixer_base
from typing import ClassVar
from typing_extensions import Literal
from ..pytree import Node
class FixExitfunc(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def __init__(self, *args) -> None: ...
sys_import: Incomplete | None
def start_tree(self, tree: Node, filename: StrPath) -> None: ...
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,10 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixFilter(fixer_base.ConditionalFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
skip_on: ClassVar[Literal["future_builtins.filter"]]
def transform(self, node, results): ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixFuncattrs(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixFuture(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixGetcwdu(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixHasKey(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,16 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
CMP: str
TYPE: str
class FixIdioms(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[False]]
PATTERN: ClassVar[str]
def match(self, node): ...
def transform(self, node, results): ...
def transform_isinstance(self, node, results): ...
def transform_while(self, node, results) -> None: ...
def transform_sort(self, node, results) -> None: ...

View File

@@ -0,0 +1,17 @@
from _typeshed import StrPath
from collections.abc import Generator
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
from ..pytree import Node
def traverse_imports(names) -> Generator[str, None, None]: ...
class FixImport(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
skip: bool
def start_tree(self, tree: Node, name: StrPath) -> None: ...
def transform(self, node, results): ...
def probably_a_local_import(self, imp_name): ...

View File

@@ -0,0 +1,22 @@
from _typeshed import StrPath
from collections.abc import Generator
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
from ..pytree import Node
MAPPING: dict[str, str]
def alternates(members): ...
def build_pattern(mapping=...) -> Generator[str, None, None]: ...
class FixImports(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
mapping = MAPPING # noqa: F821
def build_pattern(self): ...
def compile_pattern(self) -> None: ...
def match(self, node): ...
replace: dict[str, str]
def start_tree(self, tree: Node, filename: StrPath) -> None: ...
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,6 @@
from . import fix_imports
MAPPING: dict[str, str]
class FixImports2(fix_imports.FixImports):
mapping = MAPPING # noqa: F821

View File

@@ -0,0 +1,12 @@
from _typeshed import Incomplete
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
context: Incomplete
class FixInput(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,10 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixIntern(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
order: ClassVar[Literal["pre"]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixIsinstance(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,10 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixItertools(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
it_funcs: str
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,8 @@
from lib2to3 import fixer_base
from typing import ClassVar
from typing_extensions import Literal
class FixItertoolsImports(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,8 @@
from lib2to3 import fixer_base
from typing import ClassVar
from typing_extensions import Literal
class FixLong(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[Literal["'long'"]]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,10 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixMap(fixer_base.ConditionalFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
skip_on: ClassVar[Literal["future_builtins.map"]]
def transform(self, node, results): ...

View File

@@ -0,0 +1,18 @@
from collections.abc import Generator
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
from ..pytree import Base
def has_metaclass(parent): ...
def fixup_parse_tree(cls_node) -> None: ...
def fixup_simple_stmt(parent, i, stmt_node) -> None: ...
def remove_trailing_newline(node) -> None: ...
def find_metas(cls_node) -> Generator[tuple[Base, int, Base], None, None]: ...
def fixup_indent(suite) -> None: ...
class FixMetaclass(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,11 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
MAP: dict[str, str]
class FixMethodattrs(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixNe(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[False]]
def match(self, node): ...
def transform(self, node, results): ...

View File

@@ -0,0 +1,20 @@
from _typeshed import StrPath
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
from ..pytree import Node
bind_warning: str
class FixNext(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
order: ClassVar[Literal["pre"]]
shadowed_next: bool
def start_tree(self, tree: Node, filename: StrPath) -> None: ...
def transform(self, node, results) -> None: ...
def is_assign_target(node): ...
def find_assign(node): ...
def is_subtree(root, node): ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixNonzero(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixNumliterals(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[False]]
def match(self, node): ...
def transform(self, node, results): ...

View File

@@ -0,0 +1,13 @@
from lib2to3 import fixer_base
from typing import ClassVar
from typing_extensions import Literal
def invocation(s): ...
class FixOperator(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
order: ClassVar[Literal["pre"]]
methods: str
obj: str
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixParen(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,13 @@
from _typeshed import Incomplete
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
parend_expr: Incomplete
class FixPrint(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...
def add_kwarg(self, l_nodes, s_kwd, n_expr) -> None: ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixRaise(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixRawInput(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,9 @@
from lib2to3 import fixer_base
from typing import ClassVar
from typing_extensions import Literal
class FixReduce(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
order: ClassVar[Literal["pre"]]
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,10 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixReload(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
order: ClassVar[Literal["pre"]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,18 @@
from collections.abc import Generator
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
MAPPING: dict[str, dict[str, str]]
LOOKUP: dict[tuple[str, str], str]
def alternates(members): ...
def build_pattern() -> Generator[str, None, None]: ...
class FixRenames(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
order: ClassVar[Literal["pre"]]
PATTERN: ClassVar[str]
def match(self, node): ...
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixRepr(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,8 @@
from lib2to3 import fixer_base
from typing import ClassVar
from typing_extensions import Literal
class FixSetLiteral(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixStandarderror(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,10 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixSysExc(fixer_base.BaseFix):
exc_info: ClassVar[list[str]]
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixThrow(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,18 @@
from _typeshed import Incomplete
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
def is_docstring(stmt): ...
class FixTupleParams(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...
def transform_lambda(self, node, results) -> None: ...
def simplify_args(node): ...
def find_params(node): ...
def map_to_index(param_list, prefix=..., d: Incomplete | None = ...): ...
def tuple_name(param_list): ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixTypes(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results): ...

View File

@@ -0,0 +1,13 @@
from _typeshed import StrPath
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
from ..pytree import Node
class FixUnicode(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[Literal["STRING | 'unicode' | 'unichr'"]] # type: ignore[name-defined] # Name "STRING" is not defined
unicode_literals: bool
def start_tree(self, tree: Node, filename: StrPath) -> None: ...
def transform(self, node, results): ...

View File

@@ -0,0 +1,15 @@
from collections.abc import Generator
from typing_extensions import Literal
from .fix_imports import FixImports
MAPPING: dict[str, list[tuple[Literal["urllib.request", "urllib.parse", "urllib.error"], list[str]]]]
def build_pattern() -> Generator[str, None, None]: ...
class FixUrllib(FixImports):
def build_pattern(self): ...
def transform_import(self, node, results) -> None: ...
def transform_member(self, node, results): ...
def transform_dot(self, node, results) -> None: ...
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,13 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
from ..pytree import Leaf
class FixWsComma(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[False]]
PATTERN: ClassVar[str]
COMMA: Leaf
COLON: Leaf
SEPS: tuple[Leaf, Leaf]
def transform(self, node, results): ...

View File

@@ -0,0 +1,21 @@
from _typeshed import Incomplete, StrPath
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
from ..pytree import Node
class FixXrange(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
transformed_xranges: set[Incomplete] | None
def start_tree(self, tree: Node, filename: StrPath) -> None: ...
def finish_tree(self, tree: Node, filename: StrPath) -> None: ...
def transform(self, node, results): ...
def transform_xrange(self, node, results) -> None: ...
def transform_range(self, node, results): ...
P1: ClassVar[str]
p1: ClassVar[Incomplete]
P2: ClassVar[str]
p2: ClassVar[Incomplete]
def in_special_context(self, node): ...

View File

@@ -0,0 +1,9 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixXreadlines(fixer_base.BaseFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
def transform(self, node, results) -> None: ...

View File

@@ -0,0 +1,10 @@
from typing import ClassVar
from typing_extensions import Literal
from .. import fixer_base
class FixZip(fixer_base.ConditionalFix):
BM_compatible: ClassVar[Literal[True]]
PATTERN: ClassVar[str]
skip_on: ClassVar[Literal["future_builtins.zip"]]
def transform(self, node, results): ...

43
stdlib/lib2to3/main.pyi Normal file
View File

@@ -0,0 +1,43 @@
from _typeshed import FileDescriptorOrPath
from collections.abc import Container, Iterable, Iterator, Mapping, Sequence
from logging import _ExcInfoType
from typing import AnyStr
from typing_extensions import Literal
from . import refactor as refactor
def diff_texts(a: str, b: str, filename: str) -> Iterator[str]: ...
class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
nobackups: bool
show_diffs: bool
def __init__(
self,
fixers: Iterable[str],
options: Mapping[str, object] | None,
explicit: Container[str] | None,
nobackups: bool,
show_diffs: bool,
input_base_dir: str = "",
output_dir: str = "",
append_suffix: str = "",
) -> None: ...
# Same as super.log_error and Logger.error
def log_error( # type: ignore[override]
self,
msg: str,
*args: Iterable[str],
exc_info: _ExcInfoType = None,
stack_info: bool = False,
stacklevel: int = 1,
extra: Mapping[str, object] | None = None,
) -> None: ...
# Same as super.write_file but without default values
def write_file( # type: ignore[override]
self, new_text: str, filename: FileDescriptorOrPath, old_text: str, encoding: str | None
) -> None: ...
# filename has to be str
def print_output(self, old: str, new: str, filename: str, equal: bool) -> None: ... # type: ignore[override]
def warn(msg: object) -> None: ...
def main(fixer_pkg: str, args: Sequence[AnyStr] | None = None) -> Literal[0, 1, 2]: ...

View File

@@ -1,8 +1,9 @@
from collections.abc import Callable
from lib2to3.pgen2.grammar import Grammar
from lib2to3.pytree import _RawNode
from typing import Any
from typing_extensions import TypeAlias
from ..pytree import _RawNode
from .grammar import Grammar
# This is imported in several lib2to3/pgen2 submodules
_Convert: TypeAlias = Callable[[Grammar, _RawNode], Any] # noqa: Y047

View File

@@ -1,10 +1,11 @@
from _typeshed import StrPath
from collections.abc import Iterable
from lib2to3.pgen2 import _Convert
from lib2to3.pgen2.grammar import Grammar
from lib2to3.pytree import _NL
from logging import Logger
from typing import IO, Any
from typing import IO
from ..pytree import _NL
from . import _Convert
from .grammar import Grammar
__all__ = ["Driver", "load_grammar"]
@@ -13,7 +14,9 @@ class Driver:
logger: Logger
convert: _Convert
def __init__(self, grammar: Grammar, convert: _Convert | None = None, logger: Logger | None = None) -> None: ...
def parse_tokens(self, tokens: Iterable[Any], debug: bool = False) -> _NL: ...
def parse_tokens(
self, tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]], debug: bool = False
) -> _NL: ...
def parse_stream_raw(self, stream: IO[str], debug: bool = False) -> _NL: ...
def parse_stream(self, stream: IO[str], debug: bool = False) -> _NL: ...
def parse_file(self, filename: StrPath, encoding: str | None = None, debug: bool = False) -> _NL: ...

View File

@@ -1,11 +1,12 @@
from _typeshed import Incomplete
from collections.abc import Sequence
from lib2to3.pgen2 import _Convert
from lib2to3.pgen2.grammar import _DFAS, Grammar
from lib2to3.pytree import _NL, _RawNode
from typing import Any
from typing_extensions import TypeAlias
_Context: TypeAlias = Sequence[Any]
from ..pytree import _NL, _RawNode
from . import _Convert
from .grammar import _DFAS, Grammar
_Context: TypeAlias = Sequence[Incomplete]
class ParseError(Exception):
msg: str

View File

@@ -1,8 +1,9 @@
from _typeshed import StrPath
from _typeshed import Incomplete, StrPath
from collections.abc import Iterable, Iterator
from lib2to3.pgen2 import grammar
from lib2to3.pgen2.tokenize import _TokenInfo
from typing import IO, Any, NoReturn
from typing import IO, NoReturn, overload
from . import grammar
from .tokenize import _TokenInfo
class PgenGrammar(grammar.Grammar): ...
@@ -26,19 +27,22 @@ class ParserGenerator:
def parse_alt(self) -> tuple[NFAState, NFAState]: ...
def parse_item(self) -> tuple[NFAState, NFAState]: ...
def parse_atom(self) -> tuple[NFAState, NFAState]: ...
def expect(self, type: int, value: Any | None = None) -> str: ...
def expect(self, type: int, value: str | None = None) -> str: ...
def gettoken(self) -> None: ...
def raise_error(self, msg: str, *args: Any) -> NoReturn: ...
@overload
def raise_error(self, msg: object) -> NoReturn: ...
@overload
def raise_error(self, msg: str, *args: object) -> NoReturn: ...
class NFAState:
arcs: list[tuple[str | None, NFAState]]
def addarc(self, next: NFAState, label: str | None = None) -> None: ...
class DFAState:
nfaset: dict[NFAState, Any]
nfaset: dict[NFAState, Incomplete]
isfinal: bool
arcs: dict[str, DFAState]
def __init__(self, nfaset: dict[NFAState, Any], final: NFAState) -> None: ...
def __init__(self, nfaset: dict[NFAState, Incomplete], final: NFAState) -> None: ...
def addarc(self, next: DFAState, label: str) -> None: ...
def unifystate(self, old: DFAState, new: DFAState) -> None: ...
def __eq__(self, other: DFAState) -> bool: ... # type: ignore[override]

View File

@@ -1,7 +1,8 @@
from collections.abc import Callable, Iterable, Iterator
from lib2to3.pgen2.token import *
from typing_extensions import TypeAlias
from .token import *
__all__ = [
"AMPER",
"AMPEREQUAL",

View File

@@ -1,5 +1,6 @@
import sys
from lib2to3.pgen2.grammar import Grammar
from .pgen2.grammar import Grammar
class Symbols:
def __init__(self, grammar: Grammar) -> None: ...

View File

@@ -1,16 +1,19 @@
from collections.abc import Iterator
from lib2to3.pgen2.grammar import Grammar
from typing import Any
from typing_extensions import Self, TypeAlias
from _typeshed import Incomplete, SupportsGetItem, SupportsLenAndGetItem, Unused
from abc import abstractmethod
from collections.abc import Iterable, Iterator, MutableSequence
from typing_extensions import Final, Self, TypeAlias
from .fixer_base import BaseFix
from .pgen2.grammar import Grammar
_NL: TypeAlias = Node | Leaf
_Context: TypeAlias = tuple[str, int, int]
_Results: TypeAlias = dict[str, _NL]
_RawNode: TypeAlias = tuple[int, str, _Context, list[_NL] | None]
HUGE: int
HUGE: Final = 0x7FFFFFFF
def type_repr(type_num: int) -> str: ...
def type_repr(type_num: int) -> str | int: ...
class Base:
type: int
@@ -20,10 +23,14 @@ class Base:
was_changed: bool
was_checked: bool
def __eq__(self, other: object) -> bool: ...
def _eq(self, other: Self) -> bool: ...
@abstractmethod
def _eq(self, other: Base) -> bool: ...
@abstractmethod
def clone(self) -> Self: ...
def post_order(self) -> Iterator[_NL]: ...
def pre_order(self) -> Iterator[_NL]: ...
@abstractmethod
def post_order(self) -> Iterator[Self]: ...
@abstractmethod
def pre_order(self) -> Iterator[Self]: ...
def replace(self, new: _NL | list[_NL]) -> None: ...
def get_lineno(self) -> int: ...
def changed(self) -> None: ...
@@ -37,15 +44,23 @@ class Base:
def get_suffix(self) -> str: ...
class Node(Base):
fixers_applied: list[Any]
fixers_applied: MutableSequence[BaseFix] | None
# Is Unbound until set in refactor.RefactoringTool
future_features: frozenset[Incomplete]
# Is Unbound until set in pgen2.parse.Parser.pop
used_names: set[str]
def __init__(
self,
type: int,
children: list[_NL],
context: Any | None = None,
children: Iterable[_NL],
context: Unused = None,
prefix: str | None = None,
fixers_applied: list[Any] | None = None,
fixers_applied: MutableSequence[BaseFix] | None = None,
) -> None: ...
def _eq(self, other: Base) -> bool: ...
def clone(self) -> Node: ...
def post_order(self) -> Iterator[Self]: ...
def pre_order(self) -> Iterator[Self]: ...
def set_child(self, i: int, child: _NL) -> None: ...
def insert_child(self, i: int, child: _NL) -> None: ...
def append_child(self, child: _NL) -> None: ...
@@ -55,10 +70,19 @@ class Leaf(Base):
lineno: int
column: int
value: str
fixers_applied: list[Any]
fixers_applied: MutableSequence[BaseFix]
def __init__(
self, type: int, value: str, context: _Context | None = None, prefix: str | None = None, fixers_applied: list[Any] = []
self,
type: int,
value: str,
context: _Context | None = None,
prefix: str | None = None,
fixers_applied: MutableSequence[BaseFix] = [],
) -> None: ...
def _eq(self, other: Base) -> bool: ...
def clone(self) -> Leaf: ...
def post_order(self) -> Iterator[Self]: ...
def pre_order(self) -> Iterator[Self]: ...
def __unicode__(self) -> str: ...
def convert(gr: Grammar, raw_node: _RawNode) -> _NL: ...
@@ -69,8 +93,8 @@ class BasePattern:
name: str | None
def optimize(self) -> BasePattern: ... # sic, subclasses are free to optimize themselves into different patterns
def match(self, node: _NL, results: _Results | None = None) -> bool: ...
def match_seq(self, nodes: list[_NL], results: _Results | None = None) -> bool: ...
def generate_matches(self, nodes: list[_NL]) -> Iterator[tuple[int, _Results]]: ...
def match_seq(self, nodes: SupportsLenAndGetItem[_NL], results: _Results | None = None) -> bool: ...
def generate_matches(self, nodes: SupportsGetItem[int, _NL]) -> Iterator[tuple[int, _Results]]: ...
class LeafPattern(BasePattern):
def __init__(self, type: int | None = None, content: str | None = None, name: str | None = None) -> None: ...
@@ -87,4 +111,6 @@ class WildcardPattern(BasePattern):
class NegatedPattern(BasePattern):
def __init__(self, content: str | None = None) -> None: ...
def generate_matches(patterns: list[BasePattern], nodes: list[_NL]) -> Iterator[tuple[int, _Results]]: ...
def generate_matches(
patterns: SupportsGetItem[int | slice, BasePattern] | None, nodes: SupportsGetItem[int | slice, _NL]
) -> Iterator[tuple[int, _Results]]: ...

View File

@@ -1,12 +1,16 @@
from _typeshed import FileDescriptorOrPath, StrPath, SupportsGetItem
from collections.abc import Container, Generator, Iterable, Mapping
from logging import Logger
from typing import Any, ClassVar, NoReturn
from typing_extensions import TypeAlias
from logging import Logger, _ExcInfoType
from multiprocessing import JoinableQueue
from multiprocessing.synchronize import Lock
from typing import Any, ClassVar, NoReturn, overload
from typing_extensions import Final
from .btm_matcher import BottomMatcher
from .fixer_base import BaseFix
from .pgen2.driver import Driver
from .pgen2.grammar import Grammar
_Driver: TypeAlias = Any # really lib2to3.driver.Driver
_BottomMatcher: TypeAlias = Any # really lib2to3.btm_matcher.BottomMatcher
from .pytree import Node
def get_all_fix_names(fixer_pkg: str, remove_prefix: bool = True) -> list[str]: ...
def get_fixers_from_package(pkg_name: str) -> list[str]: ...
@@ -21,53 +25,59 @@ class RefactoringTool:
options: dict[str, Any]
grammar: Grammar
write_unchanged_files: bool
errors: list[Any]
errors: list[tuple[str, Iterable[str], dict[str, _ExcInfoType]]]
logger: Logger
fixer_log: list[Any]
fixer_log: list[str]
wrote: bool
driver: _Driver
pre_order: Any
post_order: Any
files: list[Any]
BM: _BottomMatcher
bmi_pre_order: list[Any]
bmi_post_order: list[Any]
driver: Driver
pre_order: list[BaseFix]
post_order: list[BaseFix]
files: list[StrPath]
BM: BottomMatcher
bmi_pre_order: list[BaseFix]
bmi_post_order: list[BaseFix]
def __init__(
self, fixer_names: Iterable[str], options: Mapping[str, Any] | None = None, explicit: Container[str] | None = None
self, fixer_names: Iterable[str], options: Mapping[str, object] | None = None, explicit: Container[str] | None = None
) -> None: ...
def get_fixers(self) -> tuple[list[Any], list[Any]]: ...
def log_error(self, msg: str, *args: Any, **kwds: Any) -> NoReturn: ...
def log_message(self, msg: str, *args: Any) -> None: ...
def log_debug(self, msg: str, *args: Any) -> None: ...
def print_output(self, old_text: str, new_text: str, filename: str, equal): ...
def get_fixers(self) -> tuple[list[BaseFix], list[BaseFix]]: ...
def log_error(self, msg: str, *args: Iterable[str], **kwargs: _ExcInfoType) -> NoReturn: ...
@overload
def log_message(self, msg: object) -> None: ...
@overload
def log_message(self, msg: str, *args: object) -> None: ...
@overload
def log_debug(self, msg: object) -> None: ...
@overload
def log_debug(self, msg: str, *args: object) -> None: ...
def print_output(self, old_text: str, new_text: str, filename: StrPath, equal: bool) -> None: ...
def refactor(self, items: Iterable[str], write: bool = False, doctests_only: bool = False) -> None: ...
def refactor_dir(self, dir_name: str, write: bool = False, doctests_only: bool = False) -> None: ...
def _read_python_source(self, filename: str) -> tuple[str, str]: ...
def refactor_file(self, filename: str, write: bool = False, doctests_only: bool = False) -> None: ...
def refactor_string(self, data: str, name: str): ...
def _read_python_source(self, filename: FileDescriptorOrPath) -> tuple[str, str]: ...
def refactor_file(self, filename: StrPath, write: bool = False, doctests_only: bool = False) -> None: ...
def refactor_string(self, data: str, name: str) -> Node | None: ...
def refactor_stdin(self, doctests_only: bool = False) -> None: ...
def refactor_tree(self, tree, name: str) -> bool: ...
def traverse_by(self, fixers, traversal) -> None: ...
def refactor_tree(self, tree: Node, name: str) -> bool: ...
def traverse_by(self, fixers: SupportsGetItem[int, Iterable[BaseFix]] | None, traversal: Iterable[Node]) -> None: ...
def processed_file(
self, new_text: str, filename: str, old_text: str | None = None, write: bool = False, encoding: str | None = None
self, new_text: str, filename: StrPath, old_text: str | None = None, write: bool = False, encoding: str | None = None
) -> None: ...
def write_file(self, new_text: str, filename: str, old_text: str, encoding: str | None = None) -> None: ...
PS1: ClassVar[str]
PS2: ClassVar[str]
def refactor_docstring(self, input: str, filename: str) -> str: ...
def refactor_doctest(self, block: list[str], lineno: int, indent: int, filename: str) -> list[str]: ...
def write_file(self, new_text: str, filename: FileDescriptorOrPath, old_text: str, encoding: str | None = None) -> None: ...
PS1: Final = ">>> "
PS2: Final = "... "
def refactor_docstring(self, input: str, filename: StrPath) -> str: ...
def refactor_doctest(self, block: list[str], lineno: int, indent: int, filename: StrPath) -> list[str]: ...
def summarize(self) -> None: ...
def parse_block(self, block: Iterable[str], lineno: int, indent: int): ...
def parse_block(self, block: Iterable[str], lineno: int, indent: int) -> Node: ...
def wrap_toks(
self, block: Iterable[str], lineno: int, indent: int
) -> Generator[tuple[Any, Any, tuple[int, int], tuple[int, int], str], None, None]: ...
) -> Generator[tuple[int, str, tuple[int, int], tuple[int, int], str], None, None]: ...
def gen_lines(self, block: Iterable[str], indent: int) -> Generator[str, None, None]: ...
class MultiprocessingUnsupported(Exception): ...
class MultiprocessRefactoringTool(RefactoringTool):
queue: Any | None
output_lock: Any | None
queue: JoinableQueue[None | tuple[Iterable[str], bool | int]] | None
output_lock: Lock | None
def refactor(
self, items: Iterable[str], write: bool = False, doctests_only: bool = False, num_processes: int = 1
) -> None: ...