Be stricter about mypy needing error codes

These make it clearer what's being ignored and harder to
accidentally ignore more than expected.
This commit is contained in:
Peter Law
2023-02-13 19:45:11 +00:00
parent 4eba7d697f
commit 171fd33cb6
3 changed files with 9 additions and 6 deletions

View File

@@ -106,14 +106,14 @@ class Grammar(Generic[_NodeT]):
if file_io is None: if file_io is None:
if code is None: if code is None:
file_io = FileIO(path) # type: ignore file_io = FileIO(path) # type: ignore[arg-type]
else: else:
file_io = KnownContentFileIO(path, code) file_io = KnownContentFileIO(path, code)
if cache and file_io.path is not None: if cache and file_io.path is not None:
module_node = load_module(self._hashed, file_io, cache_path=cache_path) module_node = load_module(self._hashed, file_io, cache_path=cache_path)
if module_node is not None: if module_node is not None:
return module_node # type: ignore return module_node # type: ignore[no-any-return]
if code is None: if code is None:
code = file_io.read() code = file_io.read()
@@ -132,7 +132,7 @@ class Grammar(Generic[_NodeT]):
module_node = module_cache_item.node module_node = module_cache_item.node
old_lines = module_cache_item.lines old_lines = module_cache_item.lines
if old_lines == lines: if old_lines == lines:
return module_node # type: ignore return module_node # type: ignore[no-any-return]
new_node = self._diff_parser( new_node = self._diff_parser(
self._pgen_grammar, self._tokenizer, module_node self._pgen_grammar, self._tokenizer, module_node
@@ -144,7 +144,7 @@ class Grammar(Generic[_NodeT]):
# Never pickle in pypy, it's slow as hell. # Never pickle in pypy, it's slow as hell.
pickling=cache and not is_pypy, pickling=cache and not is_pypy,
cache_path=cache_path) cache_path=cache_path)
return new_node # type: ignore return new_node # type: ignore[no-any-return]
tokens = self._tokenizer(lines) tokens = self._tokenizer(lines)
@@ -160,7 +160,7 @@ class Grammar(Generic[_NodeT]):
# Never pickle in pypy, it's slow as hell. # Never pickle in pypy, it's slow as hell.
pickling=cache and not is_pypy, pickling=cache and not is_pypy,
cache_path=cache_path) cache_path=cache_path)
return root_node # type: ignore return root_node # type: ignore[no-any-return]
def _get_token_namespace(self): def _get_token_namespace(self):
ns = self._token_namespace ns = self._token_namespace

View File

@@ -276,7 +276,7 @@ def generate_grammar(bnf_grammar: str, token_namespace) -> Grammar:
dfa_state.transitions[transition] = DFAPlan(next_dfa) dfa_state.transitions[transition] = DFAPlan(next_dfa)
_calculate_tree_traversal(rule_to_dfas) _calculate_tree_traversal(rule_to_dfas)
return Grammar(start_nonterminal, rule_to_dfas, reserved_strings) # type: ignore return Grammar(start_nonterminal, rule_to_dfas, reserved_strings) # type: ignore[arg-type]
def _make_transition(token_namespace, reserved_syntax_strings, label): def _make_transition(token_namespace, reserved_syntax_strings, label):

View File

@@ -13,6 +13,9 @@ ignore =
[mypy] [mypy]
show_error_codes = true
enable_error_code = ignore-without-code
disallow_subclassing_any = True disallow_subclassing_any = True
# Avoid creating future gotchas emerging from bad typing # Avoid creating future gotchas emerging from bad typing