Prefer to use ellipsis over pass (#2292)

This commit is contained in:
Yusuke Miyazaki
2018-06-29 02:29:45 +09:00
committed by Jelle Zijlstra
parent 187aaaced9
commit 581705d9ee
15 changed files with 47 additions and 86 deletions

View File

@@ -12,8 +12,6 @@ def scanstring(a, b, *args, **kwargs) -> tuple:
raise TypeError()
class Encoder(object):
pass
class Encoder(object): ...
class Scanner(object):
pass
class Scanner(object): ...

View File

@@ -33,13 +33,10 @@ SEEK_END = ... # type: int
class IOBase(_io._IOBase): ...
class RawIOBase(_io._RawIOBase, IOBase):
pass
class RawIOBase(_io._RawIOBase, IOBase): ...
class BufferedIOBase(_io._BufferedIOBase, IOBase):
pass
class BufferedIOBase(_io._BufferedIOBase, IOBase): ...
# Note: In the actual io.py, TextIOBase subclasses IOBase.
# (Which we don't do here because we don't want to subclass both TextIO and BinaryIO.)
class TextIOBase(_io._TextIOBase):
pass
class TextIOBase(_io._TextIOBase): ...

View File

@@ -15,11 +15,8 @@ class LockType:
def __enter__(self) -> LockType: ...
def __exit__(self, typ: Any, value: Any, traceback: Any) -> None: ...
class _local(object):
pass
class _localdummy(object):
pass
class _local(object): ...
class _localdummy(object): ...
def start_new(function: Callable[..., Any], args: Any, kwargs: Any = ...) -> int: ...
def start_new_thread(function: Callable[..., Any], args: Any, kwargs: Any = ...) -> int: ...

View File

@@ -122,11 +122,9 @@ def tokenize(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str,
def tokenize_loop(readline: Callable[[], str], tokeneater: Callable[[Tuple[int, str, _Pos, _Pos, str]], None]) -> None: ...
def untokenize(iterable: Iterable[_TokenType]) -> str: ...
class StopTokenizing(Exception):
pass
class StopTokenizing(Exception): ...
class TokenError(Exception):
pass
class TokenError(Exception): ...
class Untokenizer:
prev_col = ... # type: int