mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-08 14:54:47 +08:00
NO_CONTEXTS -> NO_VALUES
This commit is contained in:
@@ -9,7 +9,7 @@ from parso.python.parser import Parser
|
|||||||
from parso.python import tree
|
from parso.python import tree
|
||||||
|
|
||||||
from jedi._compatibility import u, Parameter
|
from jedi._compatibility import u, Parameter
|
||||||
from jedi.inference.base_value import NO_CONTEXTS
|
from jedi.inference.base_value import NO_VALUES
|
||||||
from jedi.inference.syntax_tree import infer_atom
|
from jedi.inference.syntax_tree import infer_atom
|
||||||
from jedi.inference.helpers import infer_call_of_leaf
|
from jedi.inference.helpers import infer_call_of_leaf
|
||||||
from jedi.inference.compiled import get_string_value_set
|
from jedi.inference.compiled import get_string_value_set
|
||||||
@@ -143,7 +143,7 @@ def infer_goto_definition(infer_state, value, leaf):
|
|||||||
return infer_state.goto_definitions(value, leaf)
|
return infer_state.goto_definitions(value, leaf)
|
||||||
|
|
||||||
parent = leaf.parent
|
parent = leaf.parent
|
||||||
definitions = NO_CONTEXTS
|
definitions = NO_VALUES
|
||||||
if parent.type == 'atom':
|
if parent.type == 'atom':
|
||||||
# e.g. `(a + b)`
|
# e.g. `(a + b)`
|
||||||
definitions = value.infer_node(leaf.parent)
|
definitions = value.infer_node(leaf.parent)
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ from jedi.inference.cache import infer_state_function_cache
|
|||||||
from jedi.inference import helpers
|
from jedi.inference import helpers
|
||||||
from jedi.inference.names import TreeNameDefinition, ParamName
|
from jedi.inference.names import TreeNameDefinition, ParamName
|
||||||
from jedi.inference.base_value import ContextualizedName, ContextualizedNode, \
|
from jedi.inference.base_value import ContextualizedName, ContextualizedNode, \
|
||||||
ContextSet, NO_CONTEXTS, iterate_values
|
ContextSet, NO_VALUES, iterate_values
|
||||||
from jedi.inference.value import ClassContext, FunctionContext, \
|
from jedi.inference.value import ClassContext, FunctionContext, \
|
||||||
AnonymousInstance, BoundMethod
|
AnonymousInstance, BoundMethod
|
||||||
from jedi.inference.value.iterable import CompForContext
|
from jedi.inference.value.iterable import CompForContext
|
||||||
@@ -208,7 +208,7 @@ class InferState(object):
|
|||||||
for name_dict in name_dicts:
|
for name_dict in name_dicts:
|
||||||
name_dict[if_name.value] = definitions
|
name_dict[if_name.value] = definitions
|
||||||
if len(name_dicts) > 1:
|
if len(name_dicts) > 1:
|
||||||
result = NO_CONTEXTS
|
result = NO_VALUES
|
||||||
for name_dict in name_dicts:
|
for name_dict in name_dicts:
|
||||||
with helpers.predefine_names(value, if_stmt, name_dict):
|
with helpers.predefine_names(value, if_stmt, name_dict):
|
||||||
result |= infer_node(value, element)
|
result |= infer_node(value, element)
|
||||||
@@ -233,7 +233,7 @@ class InferState(object):
|
|||||||
return infer_node(value, element)
|
return infer_node(value, element)
|
||||||
return self._infer_element_cached(value, element)
|
return self._infer_element_cached(value, element)
|
||||||
|
|
||||||
@infer_state_function_cache(default=NO_CONTEXTS)
|
@infer_state_function_cache(default=NO_VALUES)
|
||||||
def _infer_element_cached(self, value, element):
|
def _infer_element_cached(self, value, element):
|
||||||
return infer_node(value, element)
|
return infer_node(value, element)
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from jedi.inference import analysis
|
|||||||
from jedi.inference.lazy_value import LazyKnownContext, LazyKnownContexts, \
|
from jedi.inference.lazy_value import LazyKnownContext, LazyKnownContexts, \
|
||||||
LazyTreeContext, get_merged_lazy_value
|
LazyTreeContext, get_merged_lazy_value
|
||||||
from jedi.inference.names import ParamName, TreeNameDefinition
|
from jedi.inference.names import ParamName, TreeNameDefinition
|
||||||
from jedi.inference.base_value import NO_CONTEXTS, ContextSet, ContextualizedNode
|
from jedi.inference.base_value import NO_VALUES, ContextSet, ContextualizedNode
|
||||||
from jedi.inference.value import iterable
|
from jedi.inference.value import iterable
|
||||||
from jedi.inference.cache import infer_state_as_method_param_cache
|
from jedi.inference.cache import infer_state_as_method_param_cache
|
||||||
from jedi.inference.param import get_executed_params_and_issues, ExecutedParam
|
from jedi.inference.param import get_executed_params_and_issues, ExecutedParam
|
||||||
@@ -64,7 +64,7 @@ def repack_with_argument_clinic(string, keep_arguments_param=False, keep_callbac
|
|||||||
clinic_args
|
clinic_args
|
||||||
))
|
))
|
||||||
except ParamIssue:
|
except ParamIssue:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
else:
|
else:
|
||||||
return func(value, *args, **kwargs)
|
return func(value, *args, **kwargs)
|
||||||
|
|
||||||
@@ -98,7 +98,7 @@ def _iterate_argument_clinic(infer_state, arguments, parameters):
|
|||||||
name, len(parameters), i)
|
name, len(parameters), i)
|
||||||
raise ParamIssue
|
raise ParamIssue
|
||||||
|
|
||||||
value_set = NO_CONTEXTS if argument is None else argument.infer()
|
value_set = NO_VALUES if argument is None else argument.infer()
|
||||||
|
|
||||||
if not value_set and not optional:
|
if not value_set and not optional:
|
||||||
# For the stdlib we always want values. If we don't get them,
|
# For the stdlib we always want values. If we don't get them,
|
||||||
|
|||||||
@@ -137,7 +137,7 @@ class Context(HelperContextMixin, BaseContext):
|
|||||||
valueualized_node.node,
|
valueualized_node.node,
|
||||||
message="TypeError: '%s' object is not subscriptable" % self
|
message="TypeError: '%s' object is not subscriptable" % self
|
||||||
)
|
)
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
def py__iter__(self, valueualized_node=None):
|
def py__iter__(self, valueualized_node=None):
|
||||||
if valueualized_node is not None:
|
if valueualized_node is not None:
|
||||||
@@ -196,11 +196,11 @@ class Context(HelperContextMixin, BaseContext):
|
|||||||
|
|
||||||
def py__call__(self, arguments):
|
def py__call__(self, arguments):
|
||||||
debug.warning("no execution possible %s", self)
|
debug.warning("no execution possible %s", self)
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
def py__stop_iteration_returns(self):
|
def py__stop_iteration_returns(self):
|
||||||
debug.warning("Not possible to return the stop iterations of %s", self)
|
debug.warning("Not possible to return the stop iterations of %s", self)
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
def get_qualified_names(self):
|
def get_qualified_names(self):
|
||||||
# Returns Optional[Tuple[str, ...]]
|
# Returns Optional[Tuple[str, ...]]
|
||||||
@@ -346,7 +346,7 @@ def _getitem(value, index_values, valueualized_node):
|
|||||||
# The actual getitem call.
|
# The actual getitem call.
|
||||||
simple_getitem = getattr(value, 'py__simple_getitem__', None)
|
simple_getitem = getattr(value, 'py__simple_getitem__', None)
|
||||||
|
|
||||||
result = NO_CONTEXTS
|
result = NO_VALUES
|
||||||
unused_values = set()
|
unused_values = set()
|
||||||
for index_value in index_values:
|
for index_value in index_values:
|
||||||
if simple_getitem is not None:
|
if simple_getitem is not None:
|
||||||
@@ -426,7 +426,7 @@ class ContextSet(BaseContextSet):
|
|||||||
return [sig for c in self._set for sig in c.get_signatures()]
|
return [sig for c in self._set for sig in c.get_signatures()]
|
||||||
|
|
||||||
|
|
||||||
NO_CONTEXTS = ContextSet([])
|
NO_VALUES = ContextSet([])
|
||||||
|
|
||||||
|
|
||||||
def iterator_to_value_set(func):
|
def iterator_to_value_set(func):
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ from jedi.cache import underscore_memoization, memoize_method
|
|||||||
from jedi.inference.filters import AbstractFilter
|
from jedi.inference.filters import AbstractFilter
|
||||||
from jedi.inference.names import AbstractNameDefinition, ContextNameMixin, \
|
from jedi.inference.names import AbstractNameDefinition, ContextNameMixin, \
|
||||||
ParamNameInterface
|
ParamNameInterface
|
||||||
from jedi.inference.base_value import Context, ContextSet, NO_CONTEXTS
|
from jedi.inference.base_value import Context, ContextSet, NO_VALUES
|
||||||
from jedi.inference.lazy_value import LazyKnownContext
|
from jedi.inference.lazy_value import LazyKnownContext
|
||||||
from jedi.inference.compiled.access import _sentinel
|
from jedi.inference.compiled.access import _sentinel
|
||||||
from jedi.inference.cache import infer_state_function_cache
|
from jedi.inference.cache import infer_state_function_cache
|
||||||
@@ -185,7 +185,7 @@ class CompiledObject(Context):
|
|||||||
with reraise_getitem_errors(IndexError, KeyError, TypeError):
|
with reraise_getitem_errors(IndexError, KeyError, TypeError):
|
||||||
access = self.access_handle.py__simple_getitem__(index)
|
access = self.access_handle.py__simple_getitem__(index)
|
||||||
if access is None:
|
if access is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
return ContextSet([create_from_access_path(self.infer_state, access)])
|
return ContextSet([create_from_access_path(self.infer_state, access)])
|
||||||
|
|
||||||
@@ -265,7 +265,7 @@ class CompiledObject(Context):
|
|||||||
return create_from_access_path(self.infer_state, self.access_handle.negate())
|
return create_from_access_path(self.infer_state, self.access_handle.negate())
|
||||||
|
|
||||||
def get_metaclasses(self):
|
def get_metaclasses(self):
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
|
|
||||||
class CompiledName(AbstractNameDefinition):
|
class CompiledName(AbstractNameDefinition):
|
||||||
@@ -323,7 +323,7 @@ class SignatureParamName(ParamNameInterface, AbstractNameDefinition):
|
|||||||
def infer(self):
|
def infer(self):
|
||||||
p = self._signature_param
|
p = self._signature_param
|
||||||
infer_state = self.parent_value.infer_state
|
infer_state = self.parent_value.infer_state
|
||||||
values = NO_CONTEXTS
|
values = NO_VALUES
|
||||||
if p.has_default:
|
if p.has_default:
|
||||||
values = ContextSet([create_from_access_path(infer_state, p.default)])
|
values = ContextSet([create_from_access_path(infer_state, p.default)])
|
||||||
if p.has_annotation:
|
if p.has_annotation:
|
||||||
@@ -348,7 +348,7 @@ class UnresolvableParamName(ParamNameInterface, AbstractNameDefinition):
|
|||||||
return string
|
return string
|
||||||
|
|
||||||
def infer(self):
|
def infer(self):
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
|
|
||||||
class CompiledContextName(ContextNameMixin, AbstractNameDefinition):
|
class CompiledContextName(ContextNameMixin, AbstractNameDefinition):
|
||||||
@@ -369,7 +369,7 @@ class EmptyCompiledName(AbstractNameDefinition):
|
|||||||
self.string_name = name
|
self.string_name = name
|
||||||
|
|
||||||
def infer(self):
|
def infer(self):
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
|
|
||||||
class CompiledObjectFilter(AbstractFilter):
|
class CompiledObjectFilter(AbstractFilter):
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ from jedi import debug
|
|||||||
from jedi.inference.utils import indent_block
|
from jedi.inference.utils import indent_block
|
||||||
from jedi.inference.cache import infer_state_method_cache
|
from jedi.inference.cache import infer_state_method_cache
|
||||||
from jedi.inference.base_value import iterator_to_value_set, ContextSet, \
|
from jedi.inference.base_value import iterator_to_value_set, ContextSet, \
|
||||||
NO_CONTEXTS
|
NO_VALUES
|
||||||
from jedi.inference.lazy_value import LazyKnownContexts
|
from jedi.inference.lazy_value import LazyKnownContexts
|
||||||
|
|
||||||
|
|
||||||
@@ -281,7 +281,7 @@ def infer_param(execution_value, param):
|
|||||||
module_value = execution_value.get_root_value()
|
module_value = execution_value.get_root_value()
|
||||||
func = param.get_parent_function()
|
func = param.get_parent_function()
|
||||||
if func.type == 'lambdef':
|
if func.type == 'lambdef':
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
types = infer_docstring(execution_value.py__doc__())
|
types = infer_docstring(execution_value.py__doc__())
|
||||||
if isinstance(execution_value, FunctionExecutionContext) \
|
if isinstance(execution_value, FunctionExecutionContext) \
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ from jedi.inference.helpers import is_stdlib_path
|
|||||||
from jedi.inference.utils import to_list
|
from jedi.inference.utils import to_list
|
||||||
from jedi.parser_utils import get_parent_scope
|
from jedi.parser_utils import get_parent_scope
|
||||||
from jedi.inference.value import ModuleContext, instance
|
from jedi.inference.value import ModuleContext, instance
|
||||||
from jedi.inference.base_value import ContextSet, NO_CONTEXTS
|
from jedi.inference.base_value import ContextSet, NO_VALUES
|
||||||
from jedi.inference import recursion
|
from jedi.inference import recursion
|
||||||
|
|
||||||
|
|
||||||
@@ -50,7 +50,7 @@ class DynamicExecutedParams(object):
|
|||||||
# more or less self referencing.
|
# more or less self referencing.
|
||||||
if allowed:
|
if allowed:
|
||||||
return ContextSet.from_sets(p.infer() for p in self._executed_params)
|
return ContextSet.from_sets(p.infer() for p in self._executed_params)
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
|
|
||||||
@debug.increase_indent
|
@debug.increase_indent
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ from jedi.inference import helpers
|
|||||||
from jedi.inference.value import iterable
|
from jedi.inference.value import iterable
|
||||||
from jedi.inference.filters import get_global_filters
|
from jedi.inference.filters import get_global_filters
|
||||||
from jedi.inference.names import TreeNameDefinition
|
from jedi.inference.names import TreeNameDefinition
|
||||||
from jedi.inference.base_value import ContextSet, NO_CONTEXTS
|
from jedi.inference.base_value import ContextSet, NO_VALUES
|
||||||
from jedi.parser_utils import is_scope, get_parent_scope
|
from jedi.parser_utils import is_scope, get_parent_scope
|
||||||
from jedi.inference.gradual.conversion import convert_values
|
from jedi.inference.gradual.conversion import convert_values
|
||||||
|
|
||||||
@@ -61,7 +61,7 @@ class NameFinder(object):
|
|||||||
node=self._name,
|
node=self._name,
|
||||||
)
|
)
|
||||||
if check is flow_analysis.UNREACHABLE:
|
if check is flow_analysis.UNREACHABLE:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
return self._found_predefined_types
|
return self._found_predefined_types
|
||||||
|
|
||||||
types = self._names_to_types(names, attribute_lookup)
|
types = self._names_to_types(names, attribute_lookup)
|
||||||
@@ -280,7 +280,7 @@ def _check_isinstance_type(value, element, search_name):
|
|||||||
except AssertionError:
|
except AssertionError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
value_set = NO_CONTEXTS
|
value_set = NO_VALUES
|
||||||
for cls_or_tup in lazy_value_cls.infer():
|
for cls_or_tup in lazy_value_cls.infer():
|
||||||
if isinstance(cls_or_tup, iterable.Sequence) and cls_or_tup.array_type == 'tuple':
|
if isinstance(cls_or_tup, iterable.Sequence) and cls_or_tup.array_type == 'tuple':
|
||||||
for lazy_value in cls_or_tup.py__iter__():
|
for lazy_value in cls_or_tup.py__iter__():
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ from parso import ParserSyntaxError, parse
|
|||||||
|
|
||||||
from jedi._compatibility import force_unicode
|
from jedi._compatibility import force_unicode
|
||||||
from jedi.inference.cache import infer_state_method_cache
|
from jedi.inference.cache import infer_state_method_cache
|
||||||
from jedi.inference.base_value import ContextSet, NO_CONTEXTS
|
from jedi.inference.base_value import ContextSet, NO_VALUES
|
||||||
from jedi.inference.gradual.typing import TypeVar, LazyGenericClass, \
|
from jedi.inference.gradual.typing import TypeVar, LazyGenericClass, \
|
||||||
AbstractAnnotatedClass
|
AbstractAnnotatedClass
|
||||||
from jedi.inference.gradual.typing import GenericClass
|
from jedi.inference.gradual.typing import GenericClass
|
||||||
@@ -47,7 +47,7 @@ def infer_annotation(value, annotation):
|
|||||||
def _infer_annotation_string(value, string, index=None):
|
def _infer_annotation_string(value, string, index=None):
|
||||||
node = _get_forward_reference_node(value, string)
|
node = _get_forward_reference_node(value, string)
|
||||||
if node is None:
|
if node is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
value_set = value.infer_node(node)
|
value_set = value.infer_node(node)
|
||||||
if index is not None:
|
if index is not None:
|
||||||
@@ -142,11 +142,11 @@ def _infer_param(execution_value, param):
|
|||||||
node = param.parent.parent
|
node = param.parent.parent
|
||||||
comment = parser_utils.get_following_comment_same_line(node)
|
comment = parser_utils.get_following_comment_same_line(node)
|
||||||
if comment is None:
|
if comment is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
match = re.match(r"^#\s*type:\s*\(([^#]*)\)\s*->", comment)
|
match = re.match(r"^#\s*type:\s*\(([^#]*)\)\s*->", comment)
|
||||||
if not match:
|
if not match:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
params_comments = _split_comment_param_declaration(match.group(1))
|
params_comments = _split_comment_param_declaration(match.group(1))
|
||||||
|
|
||||||
# Find the specific param being investigated
|
# Find the specific param being investigated
|
||||||
@@ -162,10 +162,10 @@ def _infer_param(execution_value, param):
|
|||||||
if isinstance(execution_value.var_args, InstanceArguments):
|
if isinstance(execution_value.var_args, InstanceArguments):
|
||||||
if index == 0:
|
if index == 0:
|
||||||
# Assume it's self, which is already handled
|
# Assume it's self, which is already handled
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
index -= 1
|
index -= 1
|
||||||
if index >= len(params_comments):
|
if index >= len(params_comments):
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
param_comment = params_comments[index]
|
param_comment = params_comments[index]
|
||||||
return _infer_annotation_string(
|
return _infer_annotation_string(
|
||||||
@@ -203,18 +203,18 @@ def infer_return_types(function_execution_value):
|
|||||||
node = function_execution_value.tree_node
|
node = function_execution_value.tree_node
|
||||||
comment = parser_utils.get_following_comment_same_line(node)
|
comment = parser_utils.get_following_comment_same_line(node)
|
||||||
if comment is None:
|
if comment is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
match = re.match(r"^#\s*type:\s*\([^#]*\)\s*->\s*([^#]*)", comment)
|
match = re.match(r"^#\s*type:\s*\([^#]*\)\s*->\s*([^#]*)", comment)
|
||||||
if not match:
|
if not match:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
return _infer_annotation_string(
|
return _infer_annotation_string(
|
||||||
function_execution_value.function_value.get_default_param_value(),
|
function_execution_value.function_value.get_default_param_value(),
|
||||||
match.group(1).strip()
|
match.group(1).strip()
|
||||||
).execute_annotation()
|
).execute_annotation()
|
||||||
if annotation is None:
|
if annotation is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
value = function_execution_value.function_value.get_default_param_value()
|
value = function_execution_value.function_value.get_default_param_value()
|
||||||
unknown_type_vars = list(find_unknown_type_vars(value, annotation))
|
unknown_type_vars = list(find_unknown_type_vars(value, annotation))
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from jedi import debug
|
from jedi import debug
|
||||||
from jedi.inference.base_value import ContextSet, \
|
from jedi.inference.base_value import ContextSet, \
|
||||||
NO_CONTEXTS
|
NO_VALUES
|
||||||
from jedi.inference.utils import to_list
|
from jedi.inference.utils import to_list
|
||||||
from jedi.inference.gradual.stub_value import StubModuleContext
|
from jedi.inference.gradual.stub_value import StubModuleContext
|
||||||
|
|
||||||
@@ -16,7 +16,7 @@ def _stub_to_python_value_set(stub_value, ignore_compiled=False):
|
|||||||
|
|
||||||
qualified_names = stub_value.get_qualified_names()
|
qualified_names = stub_value.get_qualified_names()
|
||||||
if qualified_names is None:
|
if qualified_names is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
was_bound_method = stub_value.is_bound_method()
|
was_bound_method = stub_value.is_bound_method()
|
||||||
if was_bound_method:
|
if was_bound_method:
|
||||||
@@ -60,7 +60,7 @@ def _try_stub_to_python_names(names, prefer_stub_to_compiled=False):
|
|||||||
|
|
||||||
name_list = name.get_qualified_names()
|
name_list = name.get_qualified_names()
|
||||||
if name_list is None:
|
if name_list is None:
|
||||||
values = NO_CONTEXTS
|
values = NO_VALUES
|
||||||
else:
|
else:
|
||||||
values = _infer_from_stub(
|
values = _infer_from_stub(
|
||||||
module,
|
module,
|
||||||
@@ -112,7 +112,7 @@ def _python_to_stub_names(names, fallback_to_python=False):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
name_list = name.get_qualified_names()
|
name_list = name.get_qualified_names()
|
||||||
stubs = NO_CONTEXTS
|
stubs = NO_VALUES
|
||||||
if name_list is not None:
|
if name_list is not None:
|
||||||
stub_module = _load_stub_module(module)
|
stub_module = _load_stub_module(module)
|
||||||
if stub_module is not None:
|
if stub_module is not None:
|
||||||
@@ -150,7 +150,7 @@ def convert_values(values, only_stubs=False, prefer_stubs=False, ignore_compiled
|
|||||||
if only_stubs or prefer_stubs:
|
if only_stubs or prefer_stubs:
|
||||||
return ContextSet.from_sets(
|
return ContextSet.from_sets(
|
||||||
to_stub(value)
|
to_stub(value)
|
||||||
or (ContextSet({value}) if prefer_stubs else NO_CONTEXTS)
|
or (ContextSet({value}) if prefer_stubs else NO_VALUES)
|
||||||
for value in values
|
for value in values
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@@ -173,7 +173,7 @@ def to_stub(value):
|
|||||||
qualified_names = value.get_qualified_names()
|
qualified_names = value.get_qualified_names()
|
||||||
stub_module = _load_stub_module(value.get_root_value())
|
stub_module = _load_stub_module(value.get_root_value())
|
||||||
if stub_module is None or qualified_names is None:
|
if stub_module is None or qualified_names is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
was_bound_method = value.is_bound_method()
|
was_bound_method = value.is_bound_method()
|
||||||
if was_bound_method:
|
if was_bound_method:
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from functools import wraps
|
|||||||
from jedi.file_io import FileIO
|
from jedi.file_io import FileIO
|
||||||
from jedi._compatibility import FileNotFoundError, cast_path
|
from jedi._compatibility import FileNotFoundError, cast_path
|
||||||
from jedi.parser_utils import get_cached_code_lines
|
from jedi.parser_utils import get_cached_code_lines
|
||||||
from jedi.inference.base_value import ContextSet, NO_CONTEXTS
|
from jedi.inference.base_value import ContextSet, NO_VALUES
|
||||||
from jedi.inference.gradual.stub_value import TypingModuleWrapper, StubModuleContext
|
from jedi.inference.gradual.stub_value import TypingModuleWrapper, StubModuleContext
|
||||||
|
|
||||||
_jedi_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
_jedi_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
@@ -150,7 +150,7 @@ def _try_to_load_stub(infer_state, import_names, python_value_set,
|
|||||||
if parent_module_value is None and len(import_names) > 1:
|
if parent_module_value is None and len(import_names) > 1:
|
||||||
try:
|
try:
|
||||||
parent_module_value = _try_to_load_stub_cached(
|
parent_module_value = _try_to_load_stub_cached(
|
||||||
infer_state, import_names[:-1], NO_CONTEXTS,
|
infer_state, import_names[:-1], NO_VALUES,
|
||||||
parent_module_value=None, sys_path=sys_path)
|
parent_module_value=None, sys_path=sys_path)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from jedi._compatibility import unicode, force_unicode
|
|||||||
from jedi import debug
|
from jedi import debug
|
||||||
from jedi.inference.cache import infer_state_method_cache
|
from jedi.inference.cache import infer_state_method_cache
|
||||||
from jedi.inference.compiled import builtin_from_name
|
from jedi.inference.compiled import builtin_from_name
|
||||||
from jedi.inference.base_value import ContextSet, NO_CONTEXTS, Context, \
|
from jedi.inference.base_value import ContextSet, NO_VALUES, Context, \
|
||||||
iterator_to_value_set, ContextWrapper, LazyContextWrapper
|
iterator_to_value_set, ContextWrapper, LazyContextWrapper
|
||||||
from jedi.inference.lazy_value import LazyKnownContexts
|
from jedi.inference.lazy_value import LazyKnownContexts
|
||||||
from jedi.inference.value.iterable import SequenceLiteralContext
|
from jedi.inference.value.iterable import SequenceLiteralContext
|
||||||
@@ -286,7 +286,7 @@ class _ContainerBase(_WithIndexBase):
|
|||||||
return values
|
return values
|
||||||
|
|
||||||
debug.warning('No param #%s found for annotation %s', index, self._index_value)
|
debug.warning('No param #%s found for annotation %s', index, self._index_value)
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
|
|
||||||
class Callable(_ContainerBase):
|
class Callable(_ContainerBase):
|
||||||
@@ -313,7 +313,7 @@ class Tuple(_ContainerBase):
|
|||||||
return self._get_getitem_values(index).execute_annotation()
|
return self._get_getitem_values(index).execute_annotation()
|
||||||
|
|
||||||
debug.dbg('The getitem type on Tuple was %s' % index)
|
debug.dbg('The getitem type on Tuple was %s' % index)
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
def py__iter__(self, valueualized_node=None):
|
def py__iter__(self, valueualized_node=None):
|
||||||
if self._is_homogenous():
|
if self._is_homogenous():
|
||||||
@@ -343,7 +343,7 @@ class Protocol(_ContainerBase):
|
|||||||
class Any(_BaseTypingContext):
|
class Any(_BaseTypingContext):
|
||||||
def execute_annotation(self):
|
def execute_annotation(self):
|
||||||
debug.warning('Used Any - returned no results')
|
debug.warning('Used Any - returned no results')
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
|
|
||||||
class TypeVarClass(_BaseTypingContext):
|
class TypeVarClass(_BaseTypingContext):
|
||||||
@@ -355,7 +355,7 @@ class TypeVarClass(_BaseTypingContext):
|
|||||||
# The name must be given, otherwise it's useless.
|
# The name must be given, otherwise it's useless.
|
||||||
if var_name is None or key is not None:
|
if var_name is None or key is not None:
|
||||||
debug.warning('Found a variable without a name %s', arguments)
|
debug.warning('Found a variable without a name %s', arguments)
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
return ContextSet([TypeVar.create_cached(
|
return ContextSet([TypeVar.create_cached(
|
||||||
self.infer_state,
|
self.infer_state,
|
||||||
@@ -424,7 +424,7 @@ class TypeVar(_BaseTypingContext):
|
|||||||
if self._constraints_lazy_values:
|
if self._constraints_lazy_values:
|
||||||
return self.constraints
|
return self.constraints
|
||||||
debug.warning('Tried to infer the TypeVar %s without a given type', self._var_name)
|
debug.warning('Tried to infer the TypeVar %s without a given type', self._var_name)
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
def is_same_class(self, other):
|
def is_same_class(self, other):
|
||||||
# Everything can match an undefined type var.
|
# Everything can match an undefined type var.
|
||||||
@@ -466,7 +466,7 @@ class NewTypeFunction(_BaseTypingContext):
|
|||||||
next(ordered_args, (None, None))
|
next(ordered_args, (None, None))
|
||||||
_, second_arg = next(ordered_args, (None, None))
|
_, second_arg = next(ordered_args, (None, None))
|
||||||
if second_arg is None:
|
if second_arg is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
return ContextSet(
|
return ContextSet(
|
||||||
NewType(
|
NewType(
|
||||||
self.infer_state,
|
self.infer_state,
|
||||||
@@ -602,7 +602,7 @@ class AbstractAnnotatedClass(ClassMixin, ContextWrapper):
|
|||||||
changed = False
|
changed = False
|
||||||
new_generics = []
|
new_generics = []
|
||||||
for generic_set in self.get_generics():
|
for generic_set in self.get_generics():
|
||||||
values = NO_CONTEXTS
|
values = NO_VALUES
|
||||||
for generic in generic_set:
|
for generic in generic_set:
|
||||||
if isinstance(generic, (AbstractAnnotatedClass, TypeVar)):
|
if isinstance(generic, (AbstractAnnotatedClass, TypeVar)):
|
||||||
result = generic.define_generics(type_var_dict)
|
result = generic.define_generics(type_var_dict)
|
||||||
@@ -678,7 +678,7 @@ class LazyAnnotatedBaseClass(object):
|
|||||||
def _remap_type_vars(self, base):
|
def _remap_type_vars(self, base):
|
||||||
filter = self._class_value.get_type_var_filter()
|
filter = self._class_value.get_type_var_filter()
|
||||||
for type_var_set in base.get_generics():
|
for type_var_set in base.get_generics():
|
||||||
new = NO_CONTEXTS
|
new = NO_VALUES
|
||||||
for type_var in type_var_set:
|
for type_var in type_var_set:
|
||||||
if isinstance(type_var, TypeVar):
|
if isinstance(type_var, TypeVar):
|
||||||
names = filter.get(type_var.py__name__())
|
names = filter.get(type_var.py__name__())
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ from jedi.inference import analysis
|
|||||||
from jedi.inference.utils import unite
|
from jedi.inference.utils import unite
|
||||||
from jedi.inference.cache import infer_state_method_cache
|
from jedi.inference.cache import infer_state_method_cache
|
||||||
from jedi.inference.names import ImportName, SubModuleName
|
from jedi.inference.names import ImportName, SubModuleName
|
||||||
from jedi.inference.base_value import ContextSet, NO_CONTEXTS
|
from jedi.inference.base_value import ContextSet, NO_VALUES
|
||||||
from jedi.inference.gradual.typeshed import import_module_decorator
|
from jedi.inference.gradual.typeshed import import_module_decorator
|
||||||
from jedi.inference.value.module import iter_module_names
|
from jedi.inference.value.module import iter_module_names
|
||||||
from jedi.plugins import plugin_manager
|
from jedi.plugins import plugin_manager
|
||||||
@@ -56,7 +56,7 @@ class ModuleCache(object):
|
|||||||
|
|
||||||
# This memoization is needed, because otherwise we will infinitely loop on
|
# This memoization is needed, because otherwise we will infinitely loop on
|
||||||
# certain imports.
|
# certain imports.
|
||||||
@infer_state_method_cache(default=NO_CONTEXTS)
|
@infer_state_method_cache(default=NO_VALUES)
|
||||||
def infer_import(value, tree_name, is_goto=False):
|
def infer_import(value, tree_name, is_goto=False):
|
||||||
module_value = value.get_root_value()
|
module_value = value.get_root_value()
|
||||||
import_node = search_ancestor(tree_name, 'import_name', 'import_from')
|
import_node = search_ancestor(tree_name, 'import_name', 'import_from')
|
||||||
@@ -84,7 +84,7 @@ def infer_import(value, tree_name, is_goto=False):
|
|||||||
# scopes = [NestedImportModule(module, import_node)]
|
# scopes = [NestedImportModule(module, import_node)]
|
||||||
|
|
||||||
if not types:
|
if not types:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
if from_import_name is not None:
|
if from_import_name is not None:
|
||||||
types = unite(
|
types = unite(
|
||||||
@@ -279,7 +279,7 @@ class Importer(object):
|
|||||||
|
|
||||||
def follow(self):
|
def follow(self):
|
||||||
if not self.import_path or not self._infer_possible:
|
if not self.import_path or not self._infer_possible:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
import_names = tuple(
|
import_names = tuple(
|
||||||
force_unicode(i.value if isinstance(i, tree.Name) else i)
|
force_unicode(i.value if isinstance(i, tree.Name) else i)
|
||||||
@@ -299,7 +299,7 @@ class Importer(object):
|
|||||||
if not value_set:
|
if not value_set:
|
||||||
message = 'No module named ' + '.'.join(import_names)
|
message = 'No module named ' + '.'.join(import_names)
|
||||||
_add_error(self.module_value, name, message)
|
_add_error(self.module_value, name, message)
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
return value_set
|
return value_set
|
||||||
|
|
||||||
def _get_module_names(self, search_path=None, in_module=None):
|
def _get_module_names(self, search_path=None, in_module=None):
|
||||||
@@ -381,7 +381,7 @@ def import_module(infer_state, import_names, parent_module_value, sys_path):
|
|||||||
if import_names[0] in settings.auto_import_modules:
|
if import_names[0] in settings.auto_import_modules:
|
||||||
module = _load_builtin_module(infer_state, import_names, sys_path)
|
module = _load_builtin_module(infer_state, import_names, sys_path)
|
||||||
if module is None:
|
if module is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
return ContextSet([module])
|
return ContextSet([module])
|
||||||
|
|
||||||
module_name = '.'.join(import_names)
|
module_name = '.'.join(import_names)
|
||||||
@@ -395,13 +395,13 @@ def import_module(infer_state, import_names, parent_module_value, sys_path):
|
|||||||
is_global_search=True,
|
is_global_search=True,
|
||||||
)
|
)
|
||||||
if is_pkg is None:
|
if is_pkg is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
method = parent_module_value.py__path__
|
method = parent_module_value.py__path__
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
# The module is not a package.
|
# The module is not a package.
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
else:
|
else:
|
||||||
paths = method()
|
paths = method()
|
||||||
for path in paths:
|
for path in paths:
|
||||||
@@ -418,7 +418,7 @@ def import_module(infer_state, import_names, parent_module_value, sys_path):
|
|||||||
if is_pkg is not None:
|
if is_pkg is not None:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
if isinstance(file_io_or_ns, ImplicitNSInfo):
|
if isinstance(file_io_or_ns, ImplicitNSInfo):
|
||||||
from jedi.inference.value.namespace import ImplicitNamespaceContext
|
from jedi.inference.value.namespace import ImplicitNamespaceContext
|
||||||
@@ -430,7 +430,7 @@ def import_module(infer_state, import_names, parent_module_value, sys_path):
|
|||||||
elif file_io_or_ns is None:
|
elif file_io_or_ns is None:
|
||||||
module = _load_builtin_module(infer_state, import_names, sys_path)
|
module = _load_builtin_module(infer_state, import_names, sys_path)
|
||||||
if module is None:
|
if module is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
else:
|
else:
|
||||||
module = _load_python_module(
|
module = _load_python_module(
|
||||||
infer_state, file_io_or_ns, sys_path,
|
infer_state, file_io_or_ns, sys_path,
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from jedi.inference.base_value import ContextSet, NO_CONTEXTS
|
from jedi.inference.base_value import ContextSet, NO_VALUES
|
||||||
from jedi.common.utils import monkeypatch
|
from jedi.common.utils import monkeypatch
|
||||||
|
|
||||||
|
|
||||||
@@ -30,7 +30,7 @@ class LazyUnknownContext(AbstractLazyContext):
|
|||||||
super(LazyUnknownContext, self).__init__(None)
|
super(LazyUnknownContext, self).__init__(None)
|
||||||
|
|
||||||
def infer(self):
|
def infer(self):
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
|
|
||||||
class LazyTreeContext(AbstractLazyContext):
|
class LazyTreeContext(AbstractLazyContext):
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ from abc import abstractmethod
|
|||||||
from parso.tree import search_ancestor
|
from parso.tree import search_ancestor
|
||||||
|
|
||||||
from jedi._compatibility import Parameter
|
from jedi._compatibility import Parameter
|
||||||
from jedi.inference.base_value import ContextSet, NO_CONTEXTS
|
from jedi.inference.base_value import ContextSet, NO_VALUES
|
||||||
from jedi.cache import memoize_method
|
from jedi.cache import memoize_method
|
||||||
|
|
||||||
|
|
||||||
@@ -72,7 +72,7 @@ class AbstractArbitraryName(AbstractNameDefinition):
|
|||||||
self.parent_value = infer_state.builtins_module
|
self.parent_value = infer_state.builtins_module
|
||||||
|
|
||||||
def infer(self):
|
def infer(self):
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
|
|
||||||
class AbstractTreeName(AbstractNameDefinition):
|
class AbstractTreeName(AbstractNameDefinition):
|
||||||
@@ -240,7 +240,7 @@ class ParamName(BaseTreeParamName):
|
|||||||
def infer_annotation(self, execute_annotation=True):
|
def infer_annotation(self, execute_annotation=True):
|
||||||
node = self.annotation_node
|
node = self.annotation_node
|
||||||
if node is None:
|
if node is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
values = self.parent_value.parent_value.infer_node(node)
|
values = self.parent_value.parent_value.infer_node(node)
|
||||||
if execute_annotation:
|
if execute_annotation:
|
||||||
values = values.execute_annotation()
|
values = values.execute_annotation()
|
||||||
@@ -249,7 +249,7 @@ class ParamName(BaseTreeParamName):
|
|||||||
def infer_default(self):
|
def infer_default(self):
|
||||||
node = self.default_node
|
node = self.default_node
|
||||||
if node is None:
|
if node is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
return self.parent_value.parent_value.infer_node(node)
|
return self.parent_value.parent_value.infer_node(node)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ therefore the quality might not always be maximal.
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
from jedi import debug
|
from jedi import debug
|
||||||
from jedi.inference.base_value import NO_CONTEXTS
|
from jedi.inference.base_value import NO_VALUES
|
||||||
|
|
||||||
|
|
||||||
recursion_limit = 15
|
recursion_limit = 15
|
||||||
@@ -75,7 +75,7 @@ def execution_allowed(infer_state, node):
|
|||||||
pushed_nodes.pop()
|
pushed_nodes.pop()
|
||||||
|
|
||||||
|
|
||||||
def execution_recursion_decorator(default=NO_CONTEXTS):
|
def execution_recursion_decorator(default=NO_VALUES):
|
||||||
def decorator(func):
|
def decorator(func):
|
||||||
def wrapper(self, **kwargs):
|
def wrapper(self, **kwargs):
|
||||||
detector = self.infer_state.execution_recursion_detector
|
detector = self.infer_state.execution_recursion_detector
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from parso.python import tree
|
|||||||
from jedi._compatibility import force_unicode, unicode
|
from jedi._compatibility import force_unicode, unicode
|
||||||
from jedi import debug
|
from jedi import debug
|
||||||
from jedi import parser_utils
|
from jedi import parser_utils
|
||||||
from jedi.inference.base_value import ContextSet, NO_CONTEXTS, ContextualizedNode, \
|
from jedi.inference.base_value import ContextSet, NO_VALUES, ContextualizedNode, \
|
||||||
ContextualizedName, iterator_to_value_set, iterate_values
|
ContextualizedName, iterator_to_value_set, iterate_values
|
||||||
from jedi.inference.lazy_value import LazyTreeContext
|
from jedi.inference.lazy_value import LazyTreeContext
|
||||||
from jedi.inference import compiled
|
from jedi.inference import compiled
|
||||||
@@ -46,7 +46,7 @@ def _limit_value_infers(func):
|
|||||||
infer_state.inferred_element_counts[n] += 1
|
infer_state.inferred_element_counts[n] += 1
|
||||||
if infer_state.inferred_element_counts[n] > 300:
|
if infer_state.inferred_element_counts[n] > 300:
|
||||||
debug.warning('In value %s there were too many inferences.', n)
|
debug.warning('In value %s there were too many inferences.', n)
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
except KeyError:
|
except KeyError:
|
||||||
infer_state.inferred_element_counts[n] = 1
|
infer_state.inferred_element_counts[n] = 1
|
||||||
return func(value, *args, **kwargs)
|
return func(value, *args, **kwargs)
|
||||||
@@ -55,7 +55,7 @@ def _limit_value_infers(func):
|
|||||||
|
|
||||||
|
|
||||||
def _py__stop_iteration_returns(generators):
|
def _py__stop_iteration_returns(generators):
|
||||||
results = NO_CONTEXTS
|
results = NO_VALUES
|
||||||
for generator in generators:
|
for generator in generators:
|
||||||
try:
|
try:
|
||||||
method = generator.py__stop_iteration_returns
|
method = generator.py__stop_iteration_returns
|
||||||
@@ -143,7 +143,7 @@ def infer_node(value, element):
|
|||||||
return generators.py__stop_iteration_returns()
|
return generators.py__stop_iteration_returns()
|
||||||
|
|
||||||
# Generator.send() is not implemented.
|
# Generator.send() is not implemented.
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
elif typ == 'namedexpr_test':
|
elif typ == 'namedexpr_test':
|
||||||
return infer_node(value, element.children[2])
|
return infer_node(value, element.children[2])
|
||||||
else:
|
else:
|
||||||
@@ -210,11 +210,11 @@ def infer_atom(value, atom):
|
|||||||
return ContextSet([compiled.builtin_from_name(value.infer_state, atom.value)])
|
return ContextSet([compiled.builtin_from_name(value.infer_state, atom.value)])
|
||||||
elif atom.value == 'print':
|
elif atom.value == 'print':
|
||||||
# print e.g. could be inferred like this in Python 2.7
|
# print e.g. could be inferred like this in Python 2.7
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
elif atom.value == 'yield':
|
elif atom.value == 'yield':
|
||||||
# Contrary to yield from, yield can just appear alone to return a
|
# Contrary to yield from, yield can just appear alone to return a
|
||||||
# value when used with `.send()`.
|
# value when used with `.send()`.
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
assert False, 'Cannot infer the keyword %s' % atom
|
assert False, 'Cannot infer the keyword %s' % atom
|
||||||
|
|
||||||
elif isinstance(atom, tree.Literal):
|
elif isinstance(atom, tree.Literal):
|
||||||
@@ -287,7 +287,7 @@ def infer_expr_stmt(value, stmt, seek_name=None):
|
|||||||
|
|
||||||
if allowed:
|
if allowed:
|
||||||
return _infer_expr_stmt(value, stmt, seek_name)
|
return _infer_expr_stmt(value, stmt, seek_name)
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
|
|
||||||
@debug.increase_indent
|
@debug.increase_indent
|
||||||
@@ -385,7 +385,7 @@ def infer_factor(value_set, operator):
|
|||||||
def _literals_to_types(infer_state, result):
|
def _literals_to_types(infer_state, result):
|
||||||
# Changes literals ('a', 1, 1.0, etc) to its type instances (str(),
|
# Changes literals ('a', 1, 1.0, etc) to its type instances (str(),
|
||||||
# int(), float(), etc).
|
# int(), float(), etc).
|
||||||
new_result = NO_CONTEXTS
|
new_result = NO_VALUES
|
||||||
for typ in result:
|
for typ in result:
|
||||||
if is_literal(typ):
|
if is_literal(typ):
|
||||||
# Literals are only valid as long as the operations are
|
# Literals are only valid as long as the operations are
|
||||||
@@ -400,7 +400,7 @@ def _literals_to_types(infer_state, result):
|
|||||||
def _infer_comparison(infer_state, value, left_values, operator, right_values):
|
def _infer_comparison(infer_state, value, left_values, operator, right_values):
|
||||||
if not left_values or not right_values:
|
if not left_values or not right_values:
|
||||||
# illegal slices e.g. cause left/right_result to be None
|
# illegal slices e.g. cause left/right_result to be None
|
||||||
result = (left_values or NO_CONTEXTS) | (right_values or NO_CONTEXTS)
|
result = (left_values or NO_VALUES) | (right_values or NO_VALUES)
|
||||||
return _literals_to_types(infer_state, result)
|
return _literals_to_types(infer_state, result)
|
||||||
else:
|
else:
|
||||||
# I don't think there's a reasonable chance that a string
|
# I don't think there's a reasonable chance that a string
|
||||||
@@ -512,7 +512,7 @@ def _infer_comparison_part(infer_state, value, left, operator, right):
|
|||||||
|
|
||||||
return ContextSet([_bool_to_value(infer_state, True), _bool_to_value(infer_state, False)])
|
return ContextSet([_bool_to_value(infer_state, True), _bool_to_value(infer_state, False)])
|
||||||
elif str_operator == 'in':
|
elif str_operator == 'in':
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
def check(obj):
|
def check(obj):
|
||||||
"""Checks if a Jedi object is either a float or an int."""
|
"""Checks if a Jedi object is either a float or an int."""
|
||||||
@@ -548,7 +548,7 @@ def _remove_statements(infer_state, value, stmt, name):
|
|||||||
|
|
||||||
@plugin_manager.decorate()
|
@plugin_manager.decorate()
|
||||||
def tree_name_to_values(infer_state, value, tree_name):
|
def tree_name_to_values(infer_state, value, tree_name):
|
||||||
value_set = NO_CONTEXTS
|
value_set = NO_VALUES
|
||||||
module_node = value.get_root_value().tree_node
|
module_node = value.get_root_value().tree_node
|
||||||
# First check for annotations, like: `foo: int = 3`
|
# First check for annotations, like: `foo: int = 3`
|
||||||
if module_node is not None:
|
if module_node is not None:
|
||||||
@@ -620,7 +620,7 @@ def tree_name_to_values(infer_state, value, tree_name):
|
|||||||
exceptions = value.infer_node(tree_name.get_previous_sibling().get_previous_sibling())
|
exceptions = value.infer_node(tree_name.get_previous_sibling().get_previous_sibling())
|
||||||
types = exceptions.execute_with_values()
|
types = exceptions.execute_with_values()
|
||||||
elif node.type == 'param':
|
elif node.type == 'param':
|
||||||
types = NO_CONTEXTS
|
types = NO_VALUES
|
||||||
else:
|
else:
|
||||||
raise ValueError("Should not happen. type: %s" % typ)
|
raise ValueError("Should not happen. type: %s" % typ)
|
||||||
return types
|
return types
|
||||||
@@ -684,7 +684,7 @@ def check_tuple_assignments(infer_state, valueualized_name, value_set):
|
|||||||
iterated = value_set.iterate(cn)
|
iterated = value_set.iterate(cn)
|
||||||
if isinstance(index, slice):
|
if isinstance(index, slice):
|
||||||
# For no star unpacking is not possible.
|
# For no star unpacking is not possible.
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
for _ in range(index + 1):
|
for _ in range(index + 1):
|
||||||
try:
|
try:
|
||||||
lazy_value = next(iterated)
|
lazy_value = next(iterated)
|
||||||
@@ -693,7 +693,7 @@ def check_tuple_assignments(infer_state, valueualized_name, value_set):
|
|||||||
# would allow this loop to run for a very long time if the
|
# would allow this loop to run for a very long time if the
|
||||||
# index number is high. Therefore break if the loop is
|
# index number is high. Therefore break if the loop is
|
||||||
# finished.
|
# finished.
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
value_set = lazy_value.infer()
|
value_set = lazy_value.infer()
|
||||||
return value_set
|
return value_set
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from jedi.inference.signature import TreeSignature
|
|||||||
from jedi.inference.arguments import AnonymousArguments
|
from jedi.inference.arguments import AnonymousArguments
|
||||||
from jedi.inference.filters import ParserTreeFilter, FunctionExecutionFilter
|
from jedi.inference.filters import ParserTreeFilter, FunctionExecutionFilter
|
||||||
from jedi.inference.names import ContextName, AbstractNameDefinition, ParamName
|
from jedi.inference.names import ContextName, AbstractNameDefinition, ParamName
|
||||||
from jedi.inference.base_value import ContextualizedNode, NO_CONTEXTS, \
|
from jedi.inference.base_value import ContextualizedNode, NO_VALUES, \
|
||||||
ContextSet, TreeContext, ContextWrapper
|
ContextSet, TreeContext, ContextWrapper
|
||||||
from jedi.inference.lazy_value import LazyKnownContexts, LazyKnownContext, \
|
from jedi.inference.lazy_value import LazyKnownContexts, LazyKnownContext, \
|
||||||
LazyTreeContext
|
LazyTreeContext
|
||||||
@@ -180,7 +180,7 @@ class FunctionExecutionContext(TreeContext):
|
|||||||
self.function_value = function_value
|
self.function_value = function_value
|
||||||
self.var_args = var_args
|
self.var_args = var_args
|
||||||
|
|
||||||
@infer_state_method_cache(default=NO_CONTEXTS)
|
@infer_state_method_cache(default=NO_VALUES)
|
||||||
@recursion.execution_recursion_decorator()
|
@recursion.execution_recursion_decorator()
|
||||||
def get_return_values(self, check_yields=False):
|
def get_return_values(self, check_yields=False):
|
||||||
funcdef = self.tree_node
|
funcdef = self.tree_node
|
||||||
@@ -188,7 +188,7 @@ class FunctionExecutionContext(TreeContext):
|
|||||||
return self.infer_node(funcdef.children[-1])
|
return self.infer_node(funcdef.children[-1])
|
||||||
|
|
||||||
if check_yields:
|
if check_yields:
|
||||||
value_set = NO_CONTEXTS
|
value_set = NO_VALUES
|
||||||
returns = get_yield_exprs(self.infer_state, funcdef)
|
returns = get_yield_exprs(self.infer_state, funcdef)
|
||||||
else:
|
else:
|
||||||
returns = funcdef.iter_return_stmts()
|
returns = funcdef.iter_return_stmts()
|
||||||
@@ -331,13 +331,13 @@ class FunctionExecutionContext(TreeContext):
|
|||||||
if is_coroutine:
|
if is_coroutine:
|
||||||
if is_generator:
|
if is_generator:
|
||||||
if infer_state.environment.version_info < (3, 6):
|
if infer_state.environment.version_info < (3, 6):
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
async_generator_classes = infer_state.typing_module \
|
async_generator_classes = infer_state.typing_module \
|
||||||
.py__getattribute__('AsyncGenerator')
|
.py__getattribute__('AsyncGenerator')
|
||||||
|
|
||||||
yield_values = self.merge_yield_values(is_async=True)
|
yield_values = self.merge_yield_values(is_async=True)
|
||||||
# The contravariant doesn't seem to be defined.
|
# The contravariant doesn't seem to be defined.
|
||||||
generics = (yield_values.py__class__(), NO_CONTEXTS)
|
generics = (yield_values.py__class__(), NO_VALUES)
|
||||||
return ContextSet(
|
return ContextSet(
|
||||||
# In Python 3.6 AsyncGenerator is still a class.
|
# In Python 3.6 AsyncGenerator is still a class.
|
||||||
GenericClass(c, generics)
|
GenericClass(c, generics)
|
||||||
@@ -345,11 +345,11 @@ class FunctionExecutionContext(TreeContext):
|
|||||||
).execute_annotation()
|
).execute_annotation()
|
||||||
else:
|
else:
|
||||||
if infer_state.environment.version_info < (3, 5):
|
if infer_state.environment.version_info < (3, 5):
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
async_classes = infer_state.typing_module.py__getattribute__('Coroutine')
|
async_classes = infer_state.typing_module.py__getattribute__('Coroutine')
|
||||||
return_values = self.get_return_values()
|
return_values = self.get_return_values()
|
||||||
# Only the first generic is relevant.
|
# Only the first generic is relevant.
|
||||||
generics = (return_values.py__class__(), NO_CONTEXTS, NO_CONTEXTS)
|
generics = (return_values.py__class__(), NO_VALUES, NO_VALUES)
|
||||||
return ContextSet(
|
return ContextSet(
|
||||||
GenericClass(c, generics) for c in async_classes
|
GenericClass(c, generics) for c in async_classes
|
||||||
).execute_annotation()
|
).execute_annotation()
|
||||||
@@ -368,7 +368,7 @@ class OverloadedFunctionContext(FunctionMixin, ContextWrapper):
|
|||||||
def py__call__(self, arguments):
|
def py__call__(self, arguments):
|
||||||
debug.dbg("Execute overloaded function %s", self._wrapped_value, color='BLUE')
|
debug.dbg("Execute overloaded function %s", self._wrapped_value, color='BLUE')
|
||||||
function_executions = []
|
function_executions = []
|
||||||
value_set = NO_CONTEXTS
|
value_set = NO_VALUES
|
||||||
matched = False
|
matched = False
|
||||||
for f in self._overloaded_functions:
|
for f in self._overloaded_functions:
|
||||||
function_execution = f.get_function_execution(arguments)
|
function_execution = f.get_function_execution(arguments)
|
||||||
@@ -382,7 +382,7 @@ class OverloadedFunctionContext(FunctionMixin, ContextWrapper):
|
|||||||
|
|
||||||
if self.infer_state.is_analysis:
|
if self.infer_state.is_analysis:
|
||||||
# In this case we want precision.
|
# In this case we want precision.
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
return ContextSet.from_sets(fe.infer() for fe in function_executions)
|
return ContextSet.from_sets(fe.infer() for fe in function_executions)
|
||||||
|
|
||||||
def get_signature_functions(self):
|
def get_signature_functions(self):
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from jedi.inference.compiled.value import CompiledObjectFilter
|
|||||||
from jedi.inference.helpers import values_from_qualified_names
|
from jedi.inference.helpers import values_from_qualified_names
|
||||||
from jedi.inference.filters import AbstractFilter
|
from jedi.inference.filters import AbstractFilter
|
||||||
from jedi.inference.names import ContextName, TreeNameDefinition
|
from jedi.inference.names import ContextName, TreeNameDefinition
|
||||||
from jedi.inference.base_value import Context, NO_CONTEXTS, ContextSet, \
|
from jedi.inference.base_value import Context, NO_VALUES, ContextSet, \
|
||||||
iterator_to_value_set, ContextWrapper
|
iterator_to_value_set, ContextWrapper
|
||||||
from jedi.inference.lazy_value import LazyKnownContext, LazyKnownContexts
|
from jedi.inference.lazy_value import LazyKnownContext, LazyKnownContexts
|
||||||
from jedi.inference.cache import infer_state_method_cache
|
from jedi.inference.cache import infer_state_method_cache
|
||||||
@@ -249,7 +249,7 @@ class CompiledInstance(AbstractInstanceContext):
|
|||||||
def get_first_non_keyword_argument_values(self):
|
def get_first_non_keyword_argument_values(self):
|
||||||
key, lazy_value = next(self._original_var_args.unpack(), ('', None))
|
key, lazy_value = next(self._original_var_args.unpack(), ('', None))
|
||||||
if key is not None:
|
if key is not None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
return lazy_value.infer()
|
return lazy_value.infer()
|
||||||
|
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ from jedi.inference.utils import safe_property, to_list
|
|||||||
from jedi.inference.cache import infer_state_method_cache
|
from jedi.inference.cache import infer_state_method_cache
|
||||||
from jedi.inference.filters import ParserTreeFilter, LazyAttributeOverwrite, \
|
from jedi.inference.filters import ParserTreeFilter, LazyAttributeOverwrite, \
|
||||||
publish_method
|
publish_method
|
||||||
from jedi.inference.base_value import ContextSet, Context, NO_CONTEXTS, \
|
from jedi.inference.base_value import ContextSet, Context, NO_VALUES, \
|
||||||
TreeContext, ContextualizedNode, iterate_values, HelperContextMixin, _sentinel
|
TreeContext, ContextualizedNode, iterate_values, HelperContextMixin, _sentinel
|
||||||
from jedi.parser_utils import get_sync_comp_fors
|
from jedi.parser_utils import get_sync_comp_fors
|
||||||
|
|
||||||
@@ -381,7 +381,7 @@ class SequenceLiteralContext(Sequence):
|
|||||||
"""
|
"""
|
||||||
if self.array_type == u'dict':
|
if self.array_type == u'dict':
|
||||||
# Get keys.
|
# Get keys.
|
||||||
types = NO_CONTEXTS
|
types = NO_VALUES
|
||||||
for k, _ in self.get_tree_entries():
|
for k, _ in self.get_tree_entries():
|
||||||
types |= self._defining_value.infer_node(k)
|
types |= self._defining_value.infer_node(k)
|
||||||
# We don't know which dict index comes first, therefore always
|
# We don't know which dict index comes first, therefore always
|
||||||
@@ -652,12 +652,12 @@ def check_array_additions(value, sequence):
|
|||||||
""" Just a mapper function for the internal _check_array_additions """
|
""" Just a mapper function for the internal _check_array_additions """
|
||||||
if sequence.array_type not in ('list', 'set'):
|
if sequence.array_type not in ('list', 'set'):
|
||||||
# TODO also check for dict updates
|
# TODO also check for dict updates
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
return _check_array_additions(value, sequence)
|
return _check_array_additions(value, sequence)
|
||||||
|
|
||||||
|
|
||||||
@infer_state_method_cache(default=NO_CONTEXTS)
|
@infer_state_method_cache(default=NO_VALUES)
|
||||||
@debug.increase_indent
|
@debug.increase_indent
|
||||||
def _check_array_additions(value, sequence):
|
def _check_array_additions(value, sequence):
|
||||||
"""
|
"""
|
||||||
@@ -672,7 +672,7 @@ def _check_array_additions(value, sequence):
|
|||||||
module_value = value.get_root_value()
|
module_value = value.get_root_value()
|
||||||
if not settings.dynamic_array_additions or isinstance(module_value, compiled.CompiledObject):
|
if not settings.dynamic_array_additions or isinstance(module_value, compiled.CompiledObject):
|
||||||
debug.dbg('Dynamic array search aborted.', color='MAGENTA')
|
debug.dbg('Dynamic array search aborted.', color='MAGENTA')
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
def find_additions(value, arglist, add_name):
|
def find_additions(value, arglist, add_name):
|
||||||
params = list(arguments.TreeArguments(value.infer_state, value, arglist).unpack())
|
params = list(arguments.TreeArguments(value.infer_state, value, arglist).unpack())
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ from jedi.inference.filters import ParserTreeFilter
|
|||||||
from jedi.inference.names import TreeNameDefinition, ContextName
|
from jedi.inference.names import TreeNameDefinition, ContextName
|
||||||
from jedi.inference.arguments import unpack_arglist, ValuesArguments
|
from jedi.inference.arguments import unpack_arglist, ValuesArguments
|
||||||
from jedi.inference.base_value import ContextSet, iterator_to_value_set, \
|
from jedi.inference.base_value import ContextSet, iterator_to_value_set, \
|
||||||
NO_CONTEXTS
|
NO_VALUES
|
||||||
from jedi.inference.value.function import FunctionAndClassBase
|
from jedi.inference.value.function import FunctionAndClassBase
|
||||||
from jedi.plugins import plugin_manager
|
from jedi.plugins import plugin_manager
|
||||||
|
|
||||||
@@ -307,7 +307,7 @@ class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBa
|
|||||||
a different type var name.
|
a different type var name.
|
||||||
"""
|
"""
|
||||||
for type_var in self.list_type_vars():
|
for type_var in self.list_type_vars():
|
||||||
yield type_var_dict.get(type_var.py__name__(), NO_CONTEXTS)
|
yield type_var_dict.get(type_var.py__name__(), NO_VALUES)
|
||||||
|
|
||||||
if type_var_dict:
|
if type_var_dict:
|
||||||
return ContextSet([GenericClass(
|
return ContextSet([GenericClass(
|
||||||
@@ -321,7 +321,7 @@ class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBa
|
|||||||
debug.dbg('Unprocessed metaclass %s', metaclass)
|
debug.dbg('Unprocessed metaclass %s', metaclass)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@infer_state_method_cache(default=NO_CONTEXTS)
|
@infer_state_method_cache(default=NO_VALUES)
|
||||||
def get_metaclasses(self):
|
def get_metaclasses(self):
|
||||||
args = self._get_bases_arguments()
|
args = self._get_bases_arguments()
|
||||||
if args is not None:
|
if args is not None:
|
||||||
@@ -337,4 +337,4 @@ class ClassContext(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBa
|
|||||||
values = value.get_metaclasses()
|
values = value.get_metaclasses()
|
||||||
if values:
|
if values:
|
||||||
return values
|
return values
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ from jedi.inference import analysis
|
|||||||
from jedi.inference import compiled
|
from jedi.inference import compiled
|
||||||
from jedi.inference.value.instance import BoundMethod, InstanceArguments
|
from jedi.inference.value.instance import BoundMethod, InstanceArguments
|
||||||
from jedi.inference.base_value import ContextualizedNode, \
|
from jedi.inference.base_value import ContextualizedNode, \
|
||||||
NO_CONTEXTS, ContextSet, ContextWrapper, LazyContextWrapper
|
NO_VALUES, ContextSet, ContextWrapper, LazyContextWrapper
|
||||||
from jedi.inference.value import ClassContext, ModuleContext, \
|
from jedi.inference.value import ClassContext, ModuleContext, \
|
||||||
FunctionExecutionContext
|
FunctionExecutionContext
|
||||||
from jedi.inference.value.klass import ClassMixin
|
from jedi.inference.value.klass import ClassMixin
|
||||||
@@ -152,7 +152,7 @@ def _follow_param(infer_state, arguments, index):
|
|||||||
try:
|
try:
|
||||||
key, lazy_value = list(arguments.unpack())[index]
|
key, lazy_value = list(arguments.unpack())[index]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
else:
|
else:
|
||||||
return lazy_value.infer()
|
return lazy_value.infer()
|
||||||
|
|
||||||
@@ -172,7 +172,7 @@ def argument_clinic(string, want_obj=False, want_value=False,
|
|||||||
callback = kwargs.pop('callback')
|
callback = kwargs.pop('callback')
|
||||||
assert not kwargs # Python 2...
|
assert not kwargs # Python 2...
|
||||||
debug.dbg('builtin start %s' % obj, color='MAGENTA')
|
debug.dbg('builtin start %s' % obj, color='MAGENTA')
|
||||||
result = NO_CONTEXTS
|
result = NO_VALUES
|
||||||
if want_value:
|
if want_value:
|
||||||
kwargs['value'] = arguments.value
|
kwargs['value'] = arguments.value
|
||||||
if want_obj:
|
if want_obj:
|
||||||
@@ -197,7 +197,7 @@ def builtins_property(objects, types, obj, arguments):
|
|||||||
key, lazy_value = next(property_args, (None, None))
|
key, lazy_value = next(property_args, (None, None))
|
||||||
if key is not None or lazy_value is None:
|
if key is not None or lazy_value is None:
|
||||||
debug.warning('property expected a first param, not %s', arguments)
|
debug.warning('property expected a first param, not %s', arguments)
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
return lazy_value.infer().py__call__(arguments=ValuesArguments([objects]))
|
return lazy_value.infer().py__call__(arguments=ValuesArguments([objects]))
|
||||||
|
|
||||||
@@ -231,14 +231,14 @@ def builtins_getattr(objects, names, defaults=None):
|
|||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
return obj.py__getattribute__(force_unicode(string))
|
return obj.py__getattribute__(force_unicode(string))
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
|
|
||||||
@argument_clinic('object[, bases, dict], /')
|
@argument_clinic('object[, bases, dict], /')
|
||||||
def builtins_type(objects, bases, dicts):
|
def builtins_type(objects, bases, dicts):
|
||||||
if bases or dicts:
|
if bases or dicts:
|
||||||
# It's a type creation... maybe someday...
|
# It's a type creation... maybe someday...
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
else:
|
else:
|
||||||
return objects.py__class__()
|
return objects.py__class__()
|
||||||
|
|
||||||
@@ -276,7 +276,7 @@ def builtins_super(types, objects, value):
|
|||||||
# class, it can be an anecestor from long ago.
|
# class, it can be an anecestor from long ago.
|
||||||
return ContextSet({SuperInstance(instance.infer_state, instance)})
|
return ContextSet({SuperInstance(instance.infer_state, instance)})
|
||||||
|
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
|
|
||||||
class ReversedObject(AttributeOverwrite):
|
class ReversedObject(AttributeOverwrite):
|
||||||
@@ -443,7 +443,7 @@ def collections_namedtuple(obj, arguments, callback):
|
|||||||
# TODO here we only use one of the types, we should use all.
|
# TODO here we only use one of the types, we should use all.
|
||||||
param_values = _follow_param(infer_state, arguments, 1)
|
param_values = _follow_param(infer_state, arguments, 1)
|
||||||
if not param_values:
|
if not param_values:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
_fields = list(param_values)[0]
|
_fields = list(param_values)[0]
|
||||||
string = get_str_or_none(_fields)
|
string = get_str_or_none(_fields)
|
||||||
if string is not None:
|
if string is not None:
|
||||||
@@ -456,7 +456,7 @@ def collections_namedtuple(obj, arguments, callback):
|
|||||||
]
|
]
|
||||||
fields = [f for f in fields if f is not None]
|
fields = [f for f in fields if f is not None]
|
||||||
else:
|
else:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
# Build source code
|
# Build source code
|
||||||
code = _NAMEDTUPLE_CLASS_TEMPLATE.format(
|
code = _NAMEDTUPLE_CLASS_TEMPLATE.format(
|
||||||
@@ -515,7 +515,7 @@ class PartialObject(object):
|
|||||||
def py__call__(self, arguments):
|
def py__call__(self, arguments):
|
||||||
func = self._get_function(self._arguments.unpack())
|
func = self._get_function(self._arguments.unpack())
|
||||||
if func is None:
|
if func is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
return func.execute(
|
return func.execute(
|
||||||
MergedPartialArguments(self._arguments, arguments)
|
MergedPartialArguments(self._arguments, arguments)
|
||||||
@@ -576,7 +576,7 @@ def _dataclass(obj, arguments, callback):
|
|||||||
return ContextSet([DataclassWrapper(c)])
|
return ContextSet([DataclassWrapper(c)])
|
||||||
else:
|
else:
|
||||||
return ContextSet([obj])
|
return ContextSet([obj])
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
|
|
||||||
|
|
||||||
class DataclassWrapper(ContextWrapper, ClassMixin):
|
class DataclassWrapper(ContextWrapper, ClassMixin):
|
||||||
@@ -625,7 +625,7 @@ class DataclassParamName(BaseTreeParamName):
|
|||||||
|
|
||||||
def infer(self):
|
def infer(self):
|
||||||
if self.annotation_node is None:
|
if self.annotation_node is None:
|
||||||
return NO_CONTEXTS
|
return NO_VALUES
|
||||||
else:
|
else:
|
||||||
return self.parent_value.infer_node(self.annotation_node)
|
return self.parent_value.infer_node(self.annotation_node)
|
||||||
|
|
||||||
@@ -637,7 +637,7 @@ class ItemGetterCallable(ContextWrapper):
|
|||||||
|
|
||||||
@repack_with_argument_clinic('item, /')
|
@repack_with_argument_clinic('item, /')
|
||||||
def py__call__(self, item_value_set):
|
def py__call__(self, item_value_set):
|
||||||
value_set = NO_CONTEXTS
|
value_set = NO_VALUES
|
||||||
for args_value in self._args_value_set:
|
for args_value in self._args_value_set:
|
||||||
lazy_values = list(args_value.py__iter__())
|
lazy_values = list(args_value.py__iter__())
|
||||||
if len(lazy_values) == 1:
|
if len(lazy_values) == 1:
|
||||||
@@ -745,8 +745,8 @@ _implemented = {
|
|||||||
'deepcopy': _return_first_param,
|
'deepcopy': _return_first_param,
|
||||||
},
|
},
|
||||||
'json': {
|
'json': {
|
||||||
'load': lambda obj, arguments, callback: NO_CONTEXTS,
|
'load': lambda obj, arguments, callback: NO_VALUES,
|
||||||
'loads': lambda obj, arguments, callback: NO_CONTEXTS,
|
'loads': lambda obj, arguments, callback: NO_VALUES,
|
||||||
},
|
},
|
||||||
'collections': {
|
'collections': {
|
||||||
'namedtuple': collections_namedtuple,
|
'namedtuple': collections_namedtuple,
|
||||||
@@ -773,7 +773,7 @@ _implemented = {
|
|||||||
# The _alias function just leads to some annoying type inference.
|
# The _alias function just leads to some annoying type inference.
|
||||||
# Therefore, just make it return nothing, which leads to the stubs
|
# Therefore, just make it return nothing, which leads to the stubs
|
||||||
# being used instead. This only matters for 3.7+.
|
# being used instead. This only matters for 3.7+.
|
||||||
'_alias': lambda obj, arguments, callback: NO_CONTEXTS,
|
'_alias': lambda obj, arguments, callback: NO_VALUES,
|
||||||
},
|
},
|
||||||
'dataclasses': {
|
'dataclasses': {
|
||||||
# For now this works at least better than Jedi trying to understand it.
|
# For now this works at least better than Jedi trying to understand it.
|
||||||
|
|||||||
Reference in New Issue
Block a user