forked from VimPlug/jedi
Remove unicode literals from code base
This commit is contained in:
16
docs/conf.py
16
docs/conf.py
@@ -41,8 +41,8 @@ source_encoding = 'utf-8'
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'Jedi'
|
||||
copyright = u'jedi contributors'
|
||||
project = 'Jedi'
|
||||
copyright = 'jedi contributors'
|
||||
|
||||
import jedi
|
||||
from jedi.utils import version_info
|
||||
@@ -203,8 +203,8 @@ latex_elements = {
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title, author, documentclass [howto/manual]).
|
||||
latex_documents = [
|
||||
('index', 'Jedi.tex', u'Jedi Documentation',
|
||||
u'Jedi contributors', 'manual'),
|
||||
('index', 'Jedi.tex', 'Jedi Documentation',
|
||||
'Jedi contributors', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
@@ -233,8 +233,8 @@ latex_documents = [
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'jedi', u'Jedi Documentation',
|
||||
[u'Jedi contributors'], 1)
|
||||
('index', 'jedi', 'Jedi Documentation',
|
||||
['Jedi contributors'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
@@ -247,8 +247,8 @@ man_pages = [
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'Jedi', u'Jedi Documentation',
|
||||
u'Jedi contributors', 'Jedi', 'Awesome Python autocompletion library.',
|
||||
('index', 'Jedi', 'Jedi Documentation',
|
||||
'Jedi contributors', 'Jedi', 'Awesome Python autocompletion library.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
|
||||
@@ -576,8 +576,8 @@ def _complete_getattr(user_context, instance):
|
||||
will write it like this anyway and the other ones, well they are just
|
||||
out of luck I guess :) ~dave.
|
||||
"""
|
||||
names = (instance.get_function_slot_names(u'__getattr__')
|
||||
or instance.get_function_slot_names(u'__getattribute__'))
|
||||
names = (instance.get_function_slot_names('__getattr__')
|
||||
or instance.get_function_slot_names('__getattribute__'))
|
||||
functions = ValueSet.from_sets(
|
||||
name.infer()
|
||||
for name in names
|
||||
|
||||
@@ -7,7 +7,7 @@ from jedi.inference.helpers import get_str_or_none
|
||||
|
||||
|
||||
class PathName(StringName):
|
||||
api_type = u'path'
|
||||
api_type = 'path'
|
||||
|
||||
|
||||
def complete_file_name(inference_state, module_context, start_leaf, quote, string,
|
||||
|
||||
@@ -7,7 +7,7 @@ from pydoc_data import topics as pydoc_topics
|
||||
|
||||
|
||||
class KeywordName(AbstractArbitraryName):
|
||||
api_type = u'keyword'
|
||||
api_type = 'keyword'
|
||||
|
||||
def py__doc__(self):
|
||||
return imitate_pydoc(self.string_name)
|
||||
|
||||
@@ -18,7 +18,7 @@ _sentinel = object()
|
||||
|
||||
|
||||
class StringName(AbstractArbitraryName):
|
||||
api_type = u'string'
|
||||
api_type = 'string'
|
||||
is_value_name = False
|
||||
|
||||
|
||||
|
||||
@@ -123,14 +123,14 @@ class InferenceState(object):
|
||||
@property
|
||||
@inference_state_function_cache()
|
||||
def builtins_module(self):
|
||||
module_name = u'builtins'
|
||||
module_name = 'builtins'
|
||||
builtins_module, = self.import_module((module_name,), sys_path=())
|
||||
return builtins_module
|
||||
|
||||
@property
|
||||
@inference_state_function_cache()
|
||||
def typing_module(self):
|
||||
typing_module, = self.import_module((u'typing',))
|
||||
typing_module, = self.import_module(('typing',))
|
||||
return typing_module
|
||||
|
||||
def reset_recursion_limitations(self):
|
||||
|
||||
@@ -93,7 +93,7 @@ class HelperValueMixin(object):
|
||||
return values
|
||||
|
||||
def py__await__(self):
|
||||
await_value_set = self.py__getattribute__(u"__await__")
|
||||
await_value_set = self.py__getattribute__("__await__")
|
||||
if not await_value_set:
|
||||
debug.warning('Tried to run __await__ on value %s', self)
|
||||
return await_value_set.execute_with_values()
|
||||
|
||||
@@ -53,7 +53,7 @@ def create_simple_object(inference_state, obj):
|
||||
|
||||
|
||||
def get_string_value_set(inference_state):
|
||||
return builtin_from_name(inference_state, u'str').execute_with_values()
|
||||
return builtin_from_name(inference_state, 'str').execute_with_values()
|
||||
|
||||
|
||||
def load_module(inference_state, dotted_name, **kwargs):
|
||||
|
||||
@@ -155,14 +155,14 @@ def create_access_path(inference_state, obj):
|
||||
|
||||
def get_api_type(obj):
|
||||
if inspect.isclass(obj):
|
||||
return u'class'
|
||||
return 'class'
|
||||
elif inspect.ismodule(obj):
|
||||
return u'module'
|
||||
return 'module'
|
||||
elif inspect.isbuiltin(obj) or inspect.ismethod(obj) \
|
||||
or inspect.ismethoddescriptor(obj) or inspect.isfunction(obj):
|
||||
return u'function'
|
||||
return 'function'
|
||||
# Everything else...
|
||||
return u'instance'
|
||||
return 'instance'
|
||||
|
||||
|
||||
class DirectObjectAccess(object):
|
||||
|
||||
@@ -57,7 +57,7 @@ class CompiledValue(Value):
|
||||
).execute_annotation()
|
||||
|
||||
try:
|
||||
self.access_handle.getattr_paths(u'__call__')
|
||||
self.access_handle.getattr_paths('__call__')
|
||||
except AttributeError:
|
||||
return super(CompiledValue, self).py__call__(arguments)
|
||||
else:
|
||||
@@ -500,7 +500,7 @@ class CompiledValueFilter(AbstractFilter):
|
||||
|
||||
# ``dir`` doesn't include the type names.
|
||||
if not self.is_instance and needs_type_completions:
|
||||
for filter in builtin_from_name(self._inference_state, u'type').get_filters():
|
||||
for filter in builtin_from_name(self._inference_state, 'type').get_filters():
|
||||
names += filter.values()
|
||||
return names
|
||||
|
||||
@@ -516,11 +516,11 @@ class CompiledValueFilter(AbstractFilter):
|
||||
|
||||
|
||||
docstr_defaults = {
|
||||
'floating point number': u'float',
|
||||
'character': u'str',
|
||||
'integer': u'int',
|
||||
'dictionary': u'dict',
|
||||
'string': u'str',
|
||||
'floating point number': 'float',
|
||||
'character': 'str',
|
||||
'integer': 'int',
|
||||
'dictionary': 'dict',
|
||||
'string': 'str',
|
||||
}
|
||||
|
||||
|
||||
@@ -550,7 +550,7 @@ def _parse_function_doc(doc):
|
||||
# UnboundLocalError for undefined end in last line
|
||||
debug.dbg('no brackets found - no param')
|
||||
end = 0
|
||||
param_str = u''
|
||||
param_str = ''
|
||||
else:
|
||||
# remove square brackets, that show an optional param ( = None)
|
||||
def change_options(m):
|
||||
@@ -568,9 +568,9 @@ def _parse_function_doc(doc):
|
||||
param_str = param_str.replace('-', '_') # see: isinstance.__doc__
|
||||
|
||||
# parse return value
|
||||
r = re.search(u'-[>-]* ', doc[end:end + 7])
|
||||
r = re.search('-[>-]* ', doc[end:end + 7])
|
||||
if r is None:
|
||||
ret = u''
|
||||
ret = ''
|
||||
else:
|
||||
index = end + r.end()
|
||||
# get result type, which can contain newlines
|
||||
|
||||
@@ -245,7 +245,7 @@ class MergedFilter(object):
|
||||
|
||||
class _BuiltinMappedMethod(ValueWrapper):
|
||||
"""``Generator.__next__`` ``dict.values`` methods and so on."""
|
||||
api_type = u'function'
|
||||
api_type = 'function'
|
||||
|
||||
def __init__(self, value, method, builtin_func):
|
||||
super(_BuiltinMappedMethod, self).__init__(builtin_func)
|
||||
@@ -263,7 +263,7 @@ class SpecialMethodFilter(DictFilter):
|
||||
classes like Generator (for __next__, etc).
|
||||
"""
|
||||
class SpecialMethodName(AbstractNameDefinition):
|
||||
api_type = u'function'
|
||||
api_type = 'function'
|
||||
|
||||
def __init__(self, parent_context, string_name, callable_, builtin_value):
|
||||
self.parent_context = parent_context
|
||||
|
||||
@@ -53,7 +53,7 @@ def _infer_annotation_string(context, string, index=None):
|
||||
value_set = context.infer_node(node)
|
||||
if index is not None:
|
||||
value_set = value_set.filter(
|
||||
lambda value: value.array_type == u'tuple' # noqa
|
||||
lambda value: value.array_type == 'tuple' # noqa
|
||||
and len(list(value.py__iter__())) >= index
|
||||
).py__simple_getitem__(index)
|
||||
return value_set
|
||||
|
||||
@@ -230,7 +230,7 @@ class GenericClass(DefineGenericBaseClass, ClassMixin):
|
||||
else:
|
||||
continue
|
||||
|
||||
if py_class.api_type != u'class':
|
||||
if py_class.api_type != 'class':
|
||||
# Functions & modules don't have an MRO and we're not
|
||||
# expecting a Callable (those are handled separately within
|
||||
# TypingClassValueWithIndex).
|
||||
@@ -309,7 +309,7 @@ class _GenericInstanceWrapper(ValueWrapper):
|
||||
except IndexError:
|
||||
pass
|
||||
elif cls.py__name__() == 'Iterator':
|
||||
return ValueSet([builtin_from_name(self.inference_state, u'None')])
|
||||
return ValueSet([builtin_from_name(self.inference_state, 'None')])
|
||||
return self._wrapped_value.py__stop_iteration_returns()
|
||||
|
||||
def get_type_hint(self, add_class_info=True):
|
||||
@@ -326,7 +326,7 @@ class _PseudoTreeNameClass(Value):
|
||||
this class. Essentially this class makes it possible to goto that `Tuple`
|
||||
name, without affecting anything else negatively.
|
||||
"""
|
||||
api_type = u'class'
|
||||
api_type = 'class'
|
||||
|
||||
def __init__(self, parent_context, tree_name):
|
||||
super(_PseudoTreeNameClass, self).__init__(
|
||||
@@ -356,7 +356,7 @@ class _PseudoTreeNameClass(Value):
|
||||
def py__class__(self):
|
||||
# This might not be 100% correct, but it is good enough. The details of
|
||||
# the typing library are not really an issue for Jedi.
|
||||
return builtin_from_name(self.inference_state, u'type')
|
||||
return builtin_from_name(self.inference_state, 'type')
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -423,7 +423,7 @@ class BaseTypingInstance(LazyValueWrapper):
|
||||
return ValueName(self, self._tree_name)
|
||||
|
||||
def _get_wrapped_value(self):
|
||||
object_, = builtin_from_name(self.inference_state, u'object').execute_annotation()
|
||||
object_, = builtin_from_name(self.inference_state, 'object').execute_annotation()
|
||||
return object_
|
||||
|
||||
def __repr__(self):
|
||||
|
||||
@@ -109,7 +109,7 @@ def import_module_decorator(func):
|
||||
# ``os``.
|
||||
python_value_set = ValueSet.from_sets(
|
||||
func(inference_state, (n,), None, sys_path,)
|
||||
for n in [u'posixpath', u'ntpath', u'macpath', u'os2emxpath']
|
||||
for n in ['posixpath', 'ntpath', 'macpath', 'os2emxpath']
|
||||
)
|
||||
else:
|
||||
python_value_set = ValueSet.from_sets(
|
||||
|
||||
@@ -71,7 +71,7 @@ class TypingModuleName(NameWrapper):
|
||||
elif name == 'TYPE_CHECKING':
|
||||
# This is needed for e.g. imports that are only available for type
|
||||
# checking or are in cycles. The user can then check this variable.
|
||||
yield builtin_from_name(inference_state, u'True')
|
||||
yield builtin_from_name(inference_state, 'True')
|
||||
elif name == 'overload':
|
||||
yield OverloadFunction.create_cached(
|
||||
inference_state, self.parent_context, self.tree_name)
|
||||
@@ -110,7 +110,7 @@ class ProxyWithGenerics(BaseTypingClassWithGenerics):
|
||||
# Optional is basically just saying it's either None or the actual
|
||||
# type.
|
||||
return self.gather_annotation_classes().execute_annotation() \
|
||||
| ValueSet([builtin_from_name(self.inference_state, u'None')])
|
||||
| ValueSet([builtin_from_name(self.inference_state, 'None')])
|
||||
elif string_name == 'Type':
|
||||
# The type is actually already given in the index_value
|
||||
return self._generics_manager[0]
|
||||
@@ -153,7 +153,7 @@ class ProxyWithGenerics(BaseTypingClassWithGenerics):
|
||||
# Optional[T] is equivalent to Union[T, None]. In Jedi unions
|
||||
# are represented by members within a ValueSet, so we extract
|
||||
# the T from the Optional[T] by removing the None value.
|
||||
none = builtin_from_name(self.inference_state, u'None')
|
||||
none = builtin_from_name(self.inference_state, 'None')
|
||||
return annotation_generics[0].infer_type_vars(
|
||||
value_set.filter(lambda x: x != none),
|
||||
)
|
||||
|
||||
@@ -372,7 +372,7 @@ class _ParamMixin(object):
|
||||
|
||||
|
||||
class ParamNameInterface(_ParamMixin):
|
||||
api_type = u'param'
|
||||
api_type = 'param'
|
||||
|
||||
def get_kind(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -227,7 +227,7 @@ def _infer_node(context, element):
|
||||
if element.value != '...':
|
||||
origin = element.parent
|
||||
raise AssertionError("unhandled operator %s in %s " % (repr(element.value), origin))
|
||||
return ValueSet([compiled.builtin_from_name(inference_state, u'Ellipsis')])
|
||||
return ValueSet([compiled.builtin_from_name(inference_state, 'Ellipsis')])
|
||||
elif typ == 'dotted_name':
|
||||
value_set = infer_atom(context, element.children[0])
|
||||
for next_name in element.children[2::2]:
|
||||
@@ -319,7 +319,7 @@ def infer_atom(context, atom):
|
||||
value_set = infer_atom(context, atom.children[0])
|
||||
for string in atom.children[1:]:
|
||||
right = infer_atom(context, string)
|
||||
value_set = _infer_comparison(context, value_set, u'+', right)
|
||||
value_set = _infer_comparison(context, value_set, '+', right)
|
||||
return value_set
|
||||
elif atom.type == 'fstring':
|
||||
return compiled.get_string_value_set(state)
|
||||
@@ -737,7 +737,7 @@ def tree_name_to_values(inference_state, context, tree_name):
|
||||
types = infer_expr_stmt(context, node, tree_name)
|
||||
elif typ == 'with_stmt':
|
||||
value_managers = context.infer_node(node.get_test_node_from_name(tree_name))
|
||||
enter_methods = value_managers.py__getattribute__(u'__enter__')
|
||||
enter_methods = value_managers.py__getattribute__('__enter__')
|
||||
return enter_methods.execute_with_values()
|
||||
elif typ in ('import_from', 'import_name'):
|
||||
types = imports.infer_import(context, tree_name)
|
||||
|
||||
@@ -25,7 +25,7 @@ from jedi.inference.gradual.generics import TupleGenericManager
|
||||
|
||||
class LambdaName(AbstractNameDefinition):
|
||||
string_name = '<lambda>'
|
||||
api_type = u'function'
|
||||
api_type = 'function'
|
||||
|
||||
def __init__(self, lambda_value):
|
||||
self._lambda_value = lambda_value
|
||||
@@ -54,7 +54,7 @@ class FunctionAndClassBase(TreeValue):
|
||||
|
||||
|
||||
class FunctionMixin(object):
|
||||
api_type = u'function'
|
||||
api_type = 'function'
|
||||
|
||||
def get_filters(self, origin_scope=None):
|
||||
cls = self.py__class__()
|
||||
@@ -160,7 +160,7 @@ class FunctionValue(FunctionMixin, FunctionAndClassBase, metaclass=CachedMetaCla
|
||||
return function
|
||||
|
||||
def py__class__(self):
|
||||
c, = values_from_qualified_names(self.inference_state, u'types', u'FunctionType')
|
||||
c, = values_from_qualified_names(self.inference_state, 'types', 'FunctionType')
|
||||
return c
|
||||
|
||||
def get_default_param_context(self):
|
||||
@@ -237,7 +237,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
try:
|
||||
children = r.children
|
||||
except AttributeError:
|
||||
ctx = compiled.builtin_from_name(self.inference_state, u'None')
|
||||
ctx = compiled.builtin_from_name(self.inference_state, 'None')
|
||||
value_set |= ValueSet([ctx])
|
||||
else:
|
||||
value_set |= self.infer_node(children[1])
|
||||
@@ -249,7 +249,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
|
||||
def _get_yield_lazy_value(self, yield_expr):
|
||||
if yield_expr.type == 'keyword':
|
||||
# `yield` just yields None.
|
||||
ctx = compiled.builtin_from_name(self.inference_state, u'None')
|
||||
ctx = compiled.builtin_from_name(self.inference_state, 'None')
|
||||
yield LazyKnownValue(ctx)
|
||||
return
|
||||
|
||||
|
||||
@@ -88,7 +88,7 @@ class MethodExecutionContext(FunctionExecutionContext):
|
||||
|
||||
|
||||
class AbstractInstanceValue(Value):
|
||||
api_type = u'instance'
|
||||
api_type = 'instance'
|
||||
|
||||
def __init__(self, inference_state, parent_context, class_value):
|
||||
super(AbstractInstanceValue, self).__init__(inference_state, parent_context)
|
||||
@@ -234,12 +234,12 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
||||
# other way around.
|
||||
if is_big_annoying_library(self.parent_context):
|
||||
return NO_VALUES
|
||||
names = (self.get_function_slot_names(u'__getattr__')
|
||||
or self.get_function_slot_names(u'__getattribute__'))
|
||||
names = (self.get_function_slot_names('__getattr__')
|
||||
or self.get_function_slot_names('__getattribute__'))
|
||||
return self.execute_function_slots(names, name)
|
||||
|
||||
def py__getitem__(self, index_value_set, contextualized_node):
|
||||
names = self.get_function_slot_names(u'__getitem__')
|
||||
names = self.get_function_slot_names('__getitem__')
|
||||
if not names:
|
||||
return super(_BaseTreeInstance, self).py__getitem__(
|
||||
index_value_set,
|
||||
@@ -250,7 +250,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
||||
return ValueSet.from_sets(name.infer().execute(args) for name in names)
|
||||
|
||||
def py__iter__(self, contextualized_node=None):
|
||||
iter_slot_names = self.get_function_slot_names(u'__iter__')
|
||||
iter_slot_names = self.get_function_slot_names('__iter__')
|
||||
if not iter_slot_names:
|
||||
return super(_BaseTreeInstance, self).py__iter__(contextualized_node)
|
||||
|
||||
@@ -258,7 +258,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
||||
for generator in self.execute_function_slots(iter_slot_names):
|
||||
if generator.is_instance() and not generator.is_compiled():
|
||||
# `__next__` logic.
|
||||
name = u'__next__'
|
||||
name = '__next__'
|
||||
next_slot_names = generator.get_function_slot_names(name)
|
||||
if next_slot_names:
|
||||
yield LazyKnownValues(
|
||||
@@ -272,7 +272,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
||||
return iterate()
|
||||
|
||||
def py__call__(self, arguments):
|
||||
names = self.get_function_slot_names(u'__call__')
|
||||
names = self.get_function_slot_names('__call__')
|
||||
if not names:
|
||||
# Means the Instance is not callable.
|
||||
return super(_BaseTreeInstance, self).py__call__(arguments)
|
||||
@@ -290,10 +290,10 @@ class _BaseTreeInstance(AbstractInstanceValue):
|
||||
if result is not NotImplemented:
|
||||
return result
|
||||
|
||||
names = self.get_function_slot_names(u'__get__')
|
||||
names = self.get_function_slot_names('__get__')
|
||||
if names:
|
||||
if instance is None:
|
||||
instance = compiled.builtin_from_name(self.inference_state, u'None')
|
||||
instance = compiled.builtin_from_name(self.inference_state, 'None')
|
||||
return self.execute_function_slots(names, instance, class_value)
|
||||
else:
|
||||
return ValueSet([self])
|
||||
@@ -461,7 +461,7 @@ class BoundMethod(FunctionMixin, ValueWrapper):
|
||||
)
|
||||
|
||||
def py__class__(self):
|
||||
c, = values_from_qualified_names(self.inference_state, u'types', u'MethodType')
|
||||
c, = values_from_qualified_names(self.inference_state, 'types', 'MethodType')
|
||||
return c
|
||||
|
||||
def _get_arguments(self, arguments):
|
||||
|
||||
@@ -21,7 +21,7 @@ from jedi.inference.value.dynamic_arrays import check_array_additions
|
||||
|
||||
class IterableMixin(object):
|
||||
def py__stop_iteration_returns(self):
|
||||
return ValueSet([compiled.builtin_from_name(self.inference_state, u'None')])
|
||||
return ValueSet([compiled.builtin_from_name(self.inference_state, 'None')])
|
||||
|
||||
# At the moment, safe values are simple values like "foo", 1 and not
|
||||
# lists/dicts. Therefore as a small speed optimization we can just do the
|
||||
@@ -58,7 +58,7 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
|
||||
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
|
||||
|
||||
def py__stop_iteration_returns(self):
|
||||
return ValueSet([compiled.builtin_from_name(self.inference_state, u'None')])
|
||||
return ValueSet([compiled.builtin_from_name(self.inference_state, 'None')])
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -173,7 +173,7 @@ class _DictMixin(object):
|
||||
|
||||
|
||||
class Sequence(LazyAttributeOverwrite, IterableMixin):
|
||||
api_type = u'instance'
|
||||
api_type = 'instance'
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -219,7 +219,7 @@ class _BaseComprehension(ComprehensionMixin):
|
||||
|
||||
|
||||
class ListComprehension(_BaseComprehension, Sequence):
|
||||
array_type = u'list'
|
||||
array_type = 'list'
|
||||
|
||||
def py__simple_getitem__(self, index):
|
||||
if isinstance(index, slice):
|
||||
@@ -232,7 +232,7 @@ class ListComprehension(_BaseComprehension, Sequence):
|
||||
|
||||
|
||||
class SetComprehension(_BaseComprehension, Sequence):
|
||||
array_type = u'set'
|
||||
array_type = 'set'
|
||||
|
||||
|
||||
class GeneratorComprehension(_BaseComprehension, GeneratorBase):
|
||||
@@ -250,7 +250,7 @@ class _DictKeyMixin(object):
|
||||
|
||||
|
||||
class DictComprehension(ComprehensionMixin, Sequence, _DictKeyMixin):
|
||||
array_type = u'dict'
|
||||
array_type = 'dict'
|
||||
|
||||
def __init__(self, inference_state, defining_context, sync_comp_for_node, key_node, value_node):
|
||||
assert sync_comp_for_node.type == 'sync_comp_for'
|
||||
@@ -307,9 +307,9 @@ class DictComprehension(ComprehensionMixin, Sequence, _DictKeyMixin):
|
||||
|
||||
class SequenceLiteralValue(Sequence):
|
||||
_TUPLE_LIKE = 'testlist_star_expr', 'testlist', 'subscriptlist'
|
||||
mapping = {'(': u'tuple',
|
||||
'[': u'list',
|
||||
'{': u'set'}
|
||||
mapping = {'(': 'tuple',
|
||||
'[': 'list',
|
||||
'{': 'set'}
|
||||
|
||||
def __init__(self, inference_state, defining_context, atom):
|
||||
super(SequenceLiteralValue, self).__init__(inference_state)
|
||||
@@ -317,13 +317,13 @@ class SequenceLiteralValue(Sequence):
|
||||
self._defining_context = defining_context
|
||||
|
||||
if self.atom.type in self._TUPLE_LIKE:
|
||||
self.array_type = u'tuple'
|
||||
self.array_type = 'tuple'
|
||||
else:
|
||||
self.array_type = SequenceLiteralValue.mapping[atom.children[0]]
|
||||
"""The builtin name of the array (list, set, tuple or dict)."""
|
||||
|
||||
def _get_generics(self):
|
||||
if self.array_type == u'tuple':
|
||||
if self.array_type == 'tuple':
|
||||
return tuple(x.infer().py__class__() for x in self.py__iter__())
|
||||
return super(SequenceLiteralValue, self)._get_generics()
|
||||
|
||||
@@ -415,7 +415,7 @@ class SequenceLiteralValue(Sequence):
|
||||
|
||||
|
||||
class DictLiteralValue(_DictMixin, SequenceLiteralValue, _DictKeyMixin):
|
||||
array_type = u'dict'
|
||||
array_type = 'dict'
|
||||
|
||||
def __init__(self, inference_state, defining_context, atom):
|
||||
super(SequenceLiteralValue, self).__init__(inference_state)
|
||||
@@ -427,7 +427,7 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue, _DictKeyMixin):
|
||||
compiled_value_index = compiled.create_simple_object(self.inference_state, index)
|
||||
for key, value in self.get_tree_entries():
|
||||
for k in self._defining_context.infer_node(key):
|
||||
for key_v in k.execute_operation(compiled_value_index, u'=='):
|
||||
for key_v in k.execute_operation(compiled_value_index, '=='):
|
||||
if key_v.get_safe_value():
|
||||
return self._defining_context.infer_node(value)
|
||||
raise SimpleGetItemNotFound('No key found in dictionary %s.' % self)
|
||||
@@ -503,15 +503,15 @@ class _FakeSequence(Sequence):
|
||||
|
||||
|
||||
class FakeTuple(_FakeSequence):
|
||||
array_type = u'tuple'
|
||||
array_type = 'tuple'
|
||||
|
||||
|
||||
class FakeList(_FakeSequence):
|
||||
array_type = u'tuple'
|
||||
array_type = 'tuple'
|
||||
|
||||
|
||||
class FakeDict(_DictMixin, Sequence, _DictKeyMixin):
|
||||
array_type = u'dict'
|
||||
array_type = 'dict'
|
||||
|
||||
def __init__(self, inference_state, dct):
|
||||
super(FakeDict, self).__init__(inference_state)
|
||||
|
||||
@@ -144,7 +144,7 @@ class ClassMixin(object):
|
||||
return ValueSet([TreeInstance(self.inference_state, self.parent_context, self, arguments)])
|
||||
|
||||
def py__class__(self):
|
||||
return compiled.builtin_from_name(self.inference_state, u'type')
|
||||
return compiled.builtin_from_name(self.inference_state, 'type')
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -204,7 +204,7 @@ class ClassMixin(object):
|
||||
)
|
||||
if not is_instance and include_type_when_class:
|
||||
from jedi.inference.compiled import builtin_from_name
|
||||
type_ = builtin_from_name(self.inference_state, u'type')
|
||||
type_ = builtin_from_name(self.inference_state, 'type')
|
||||
assert isinstance(type_, ClassValue)
|
||||
if type_ != self:
|
||||
# We are not using execute_with_values here, because the
|
||||
@@ -319,7 +319,7 @@ class ClassMixin(object):
|
||||
|
||||
|
||||
class ClassValue(ClassMixin, FunctionAndClassBase, metaclass=CachedMetaClass):
|
||||
api_type = u'class'
|
||||
api_type = 'class'
|
||||
|
||||
@inference_state_method_cache()
|
||||
def list_type_vars(self):
|
||||
|
||||
@@ -16,7 +16,7 @@ class _ModuleAttributeName(AbstractNameDefinition):
|
||||
"""
|
||||
For module attributes like __file__, __str__ and so on.
|
||||
"""
|
||||
api_type = u'instance'
|
||||
api_type = 'instance'
|
||||
|
||||
def __init__(self, parent_module, string_name, string_value=None):
|
||||
self.parent_context = parent_module
|
||||
@@ -70,7 +70,7 @@ class ModuleMixin(SubModuleDictMixin):
|
||||
yield star_filter
|
||||
|
||||
def py__class__(self):
|
||||
c, = values_from_qualified_names(self.inference_state, u'types', u'ModuleType')
|
||||
c, = values_from_qualified_names(self.inference_state, 'types', 'ModuleType')
|
||||
return c
|
||||
|
||||
def is_module(self):
|
||||
@@ -134,7 +134,7 @@ class ModuleMixin(SubModuleDictMixin):
|
||||
|
||||
|
||||
class ModuleValue(ModuleMixin, TreeValue):
|
||||
api_type = u'module'
|
||||
api_type = 'module'
|
||||
|
||||
def __init__(self, inference_state, module_node, code_lines, file_io=None,
|
||||
string_names=None, is_package=False):
|
||||
|
||||
@@ -23,7 +23,7 @@ class ImplicitNamespaceValue(Value, SubModuleDictMixin):
|
||||
# Is a module like every other module, because if you import an empty
|
||||
# folder foobar it will be available as an object:
|
||||
# <module 'foobar' (namespace)>.
|
||||
api_type = u'module'
|
||||
api_type = 'module'
|
||||
parent_context = None
|
||||
|
||||
def __init__(self, inference_state, string_names, paths):
|
||||
|
||||
@@ -6,14 +6,14 @@ def import_module(callback):
|
||||
def wrapper(inference_state, import_names, module_context, *args, **kwargs):
|
||||
if len(import_names) == 3 and import_names[:2] == ('flask', 'ext'):
|
||||
# New style.
|
||||
ipath = (u'flask_' + import_names[2]),
|
||||
ipath = ('flask_' + import_names[2]),
|
||||
value_set = callback(inference_state, ipath, None, *args, **kwargs)
|
||||
if value_set:
|
||||
return value_set
|
||||
value_set = callback(inference_state, (u'flaskext',), None, *args, **kwargs)
|
||||
value_set = callback(inference_state, ('flaskext',), None, *args, **kwargs)
|
||||
return callback(
|
||||
inference_state,
|
||||
(u'flaskext', import_names[2]),
|
||||
('flaskext', import_names[2]),
|
||||
next(iter(value_set)),
|
||||
*args, **kwargs
|
||||
)
|
||||
|
||||
@@ -421,7 +421,7 @@ def collections_namedtuple(value, arguments, callback):
|
||||
inference_state = value.inference_state
|
||||
|
||||
# Process arguments
|
||||
name = u'jedi_unknown_namedtuple'
|
||||
name = 'jedi_unknown_namedtuple'
|
||||
for c in _follow_param(inference_state, arguments, 0):
|
||||
x = get_str_or_none(c)
|
||||
if x is not None:
|
||||
@@ -451,7 +451,7 @@ def collections_namedtuple(value, arguments, callback):
|
||||
typename=name,
|
||||
field_names=tuple(fields),
|
||||
num_fields=len(fields),
|
||||
arg_list=repr(tuple(fields)).replace("u'", "").replace("'", "")[1:-1],
|
||||
arg_list=repr(tuple(fields)).replace("'", "")[1:-1],
|
||||
repr_fmt='',
|
||||
field_defs='\n'.join(_NAMEDTUPLE_FIELD_TEMPLATE.format(index=index, name=name)
|
||||
for index, name in enumerate(fields))
|
||||
@@ -727,7 +727,7 @@ def _create_string_input_function(func):
|
||||
@argument_clinic('*args, /', want_callback=True)
|
||||
def _os_path_join(args_set, callback):
|
||||
if len(args_set) == 1:
|
||||
string = u''
|
||||
string = ''
|
||||
sequence, = args_set
|
||||
is_first = True
|
||||
for lazy_value in sequence.py__iter__():
|
||||
|
||||
@@ -43,7 +43,7 @@ def test_versions(version):
|
||||
|
||||
def test_load_module(inference_state):
|
||||
access_path = inference_state.compiled_subprocess.load_module(
|
||||
dotted_name=u'math',
|
||||
dotted_name='math',
|
||||
sys_path=inference_state.get_sys_path()
|
||||
)
|
||||
name, access_handle = access_path.accesses[0]
|
||||
|
||||
@@ -564,7 +564,7 @@ def test_param_annotation_completion(class_is_findable):
|
||||
]
|
||||
)
|
||||
def test_dict_completion(code, column, expected):
|
||||
strs = {'asdf': 1, u"""foo""": 2, r'fbar': 3}
|
||||
strs = {'asdf': 1, """foo""": 2, r'fbar': 3}
|
||||
mixed = {1: 2, 1.10: 4, None: 6, r'a\sdf': 8, b'foo': 9}
|
||||
|
||||
namespaces = [locals(), {'implicit': {1000: 3}}]
|
||||
|
||||
@@ -12,22 +12,22 @@ from jedi.inference.syntax_tree import _infer_comparison_part
|
||||
|
||||
|
||||
def test_simple(inference_state, environment):
|
||||
obj = compiled.create_simple_object(inference_state, u'_str_')
|
||||
upper, = obj.py__getattribute__(u'upper')
|
||||
obj = compiled.create_simple_object(inference_state, '_str_')
|
||||
upper, = obj.py__getattribute__('upper')
|
||||
objs = list(upper.execute_with_values())
|
||||
assert len(objs) == 1
|
||||
assert objs[0].name.string_name == 'str'
|
||||
|
||||
|
||||
def test_builtin_loading(inference_state):
|
||||
string, = inference_state.builtins_module.py__getattribute__(u'str')
|
||||
from_name, = string.py__getattribute__(u'__init__')
|
||||
string, = inference_state.builtins_module.py__getattribute__('str')
|
||||
from_name, = string.py__getattribute__('__init__')
|
||||
assert from_name.tree_node
|
||||
assert not from_name.py__doc__() # It's a stub
|
||||
|
||||
|
||||
def test_next_docstr(inference_state):
|
||||
next_ = compiled.builtin_from_name(inference_state, u'next')
|
||||
next_ = compiled.builtin_from_name(inference_state, 'next')
|
||||
assert next_.tree_node is not None
|
||||
assert next_.py__doc__() == '' # It's a stub
|
||||
for non_stub in _stub_to_python_value_set(next_):
|
||||
@@ -48,9 +48,9 @@ def test_doc(inference_state):
|
||||
Even CompiledValue docs always return empty docstrings - not None, that's
|
||||
just a Jedi API definition.
|
||||
"""
|
||||
str_ = compiled.create_simple_object(inference_state, u'')
|
||||
str_ = compiled.create_simple_object(inference_state, '')
|
||||
# Equals `''.__getnewargs__`
|
||||
obj, = str_.py__getattribute__(u'__getnewargs__')
|
||||
obj, = str_.py__getattribute__('__getnewargs__')
|
||||
assert obj.py__doc__() == ''
|
||||
|
||||
|
||||
@@ -162,7 +162,7 @@ def test_operation(Script, inference_state, create_compiled_object):
|
||||
false, true = _infer_comparison_part(
|
||||
inference_state, b.parent_context,
|
||||
left=list(b.execute_with_values())[0],
|
||||
operator=u'is not',
|
||||
operator='is not',
|
||||
right=b,
|
||||
)
|
||||
assert false.py__name__() == 'bool'
|
||||
|
||||
@@ -50,8 +50,8 @@ def test_find_module_package_zipped(Script, inference_state, environment):
|
||||
|
||||
file_io, is_package = inference_state.compiled_subprocess.get_module_info(
|
||||
sys_path=sys_path,
|
||||
string=u'pkg',
|
||||
full_name=u'pkg'
|
||||
string='pkg',
|
||||
full_name='pkg'
|
||||
)
|
||||
assert file_io is not None
|
||||
assert file_io.path.endswith(os.path.join('pkg.zip', 'pkg', '__init__.py'))
|
||||
@@ -104,8 +104,8 @@ def test_find_module_not_package_zipped(Script, inference_state, environment):
|
||||
|
||||
file_io, is_package = inference_state.compiled_subprocess.get_module_info(
|
||||
sys_path=sys_path,
|
||||
string=u'not_pkg',
|
||||
full_name=u'not_pkg'
|
||||
string='not_pkg',
|
||||
full_name='not_pkg'
|
||||
)
|
||||
assert file_io.path.endswith(os.path.join('not_pkg.zip', 'not_pkg.py'))
|
||||
assert is_package is False
|
||||
|
||||
Reference in New Issue
Block a user