Remove unicode literals from code base

This commit is contained in:
Dave Halter
2020-07-02 10:43:14 +02:00
parent f1366b8a74
commit 5ab351dc8f
29 changed files with 105 additions and 105 deletions

View File

@@ -41,8 +41,8 @@ source_encoding = 'utf-8'
master_doc = 'index' master_doc = 'index'
# General information about the project. # General information about the project.
project = u'Jedi' project = 'Jedi'
copyright = u'jedi contributors' copyright = 'jedi contributors'
import jedi import jedi
from jedi.utils import version_info from jedi.utils import version_info
@@ -203,8 +203,8 @@ latex_elements = {
# Grouping the document tree into LaTeX files. List of tuples # Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]). # (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [ latex_documents = [
('index', 'Jedi.tex', u'Jedi Documentation', ('index', 'Jedi.tex', 'Jedi Documentation',
u'Jedi contributors', 'manual'), 'Jedi contributors', 'manual'),
] ]
# The name of an image file (relative to this directory) to place at the top of # The name of an image file (relative to this directory) to place at the top of
@@ -233,8 +233,8 @@ latex_documents = [
# One entry per manual page. List of tuples # One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section). # (source start file, name, description, authors, manual section).
man_pages = [ man_pages = [
('index', 'jedi', u'Jedi Documentation', ('index', 'jedi', 'Jedi Documentation',
[u'Jedi contributors'], 1) ['Jedi contributors'], 1)
] ]
# If true, show URL addresses after external links. # If true, show URL addresses after external links.
@@ -247,8 +247,8 @@ man_pages = [
# (source start file, target name, title, author, # (source start file, target name, title, author,
# dir menu entry, description, category) # dir menu entry, description, category)
texinfo_documents = [ texinfo_documents = [
('index', 'Jedi', u'Jedi Documentation', ('index', 'Jedi', 'Jedi Documentation',
u'Jedi contributors', 'Jedi', 'Awesome Python autocompletion library.', 'Jedi contributors', 'Jedi', 'Awesome Python autocompletion library.',
'Miscellaneous'), 'Miscellaneous'),
] ]

View File

@@ -576,8 +576,8 @@ def _complete_getattr(user_context, instance):
will write it like this anyway and the other ones, well they are just will write it like this anyway and the other ones, well they are just
out of luck I guess :) ~dave. out of luck I guess :) ~dave.
""" """
names = (instance.get_function_slot_names(u'__getattr__') names = (instance.get_function_slot_names('__getattr__')
or instance.get_function_slot_names(u'__getattribute__')) or instance.get_function_slot_names('__getattribute__'))
functions = ValueSet.from_sets( functions = ValueSet.from_sets(
name.infer() name.infer()
for name in names for name in names

View File

@@ -7,7 +7,7 @@ from jedi.inference.helpers import get_str_or_none
class PathName(StringName): class PathName(StringName):
api_type = u'path' api_type = 'path'
def complete_file_name(inference_state, module_context, start_leaf, quote, string, def complete_file_name(inference_state, module_context, start_leaf, quote, string,

View File

@@ -7,7 +7,7 @@ from pydoc_data import topics as pydoc_topics
class KeywordName(AbstractArbitraryName): class KeywordName(AbstractArbitraryName):
api_type = u'keyword' api_type = 'keyword'
def py__doc__(self): def py__doc__(self):
return imitate_pydoc(self.string_name) return imitate_pydoc(self.string_name)

View File

@@ -18,7 +18,7 @@ _sentinel = object()
class StringName(AbstractArbitraryName): class StringName(AbstractArbitraryName):
api_type = u'string' api_type = 'string'
is_value_name = False is_value_name = False

View File

@@ -123,14 +123,14 @@ class InferenceState(object):
@property @property
@inference_state_function_cache() @inference_state_function_cache()
def builtins_module(self): def builtins_module(self):
module_name = u'builtins' module_name = 'builtins'
builtins_module, = self.import_module((module_name,), sys_path=()) builtins_module, = self.import_module((module_name,), sys_path=())
return builtins_module return builtins_module
@property @property
@inference_state_function_cache() @inference_state_function_cache()
def typing_module(self): def typing_module(self):
typing_module, = self.import_module((u'typing',)) typing_module, = self.import_module(('typing',))
return typing_module return typing_module
def reset_recursion_limitations(self): def reset_recursion_limitations(self):

View File

@@ -93,7 +93,7 @@ class HelperValueMixin(object):
return values return values
def py__await__(self): def py__await__(self):
await_value_set = self.py__getattribute__(u"__await__") await_value_set = self.py__getattribute__("__await__")
if not await_value_set: if not await_value_set:
debug.warning('Tried to run __await__ on value %s', self) debug.warning('Tried to run __await__ on value %s', self)
return await_value_set.execute_with_values() return await_value_set.execute_with_values()

View File

@@ -53,7 +53,7 @@ def create_simple_object(inference_state, obj):
def get_string_value_set(inference_state): def get_string_value_set(inference_state):
return builtin_from_name(inference_state, u'str').execute_with_values() return builtin_from_name(inference_state, 'str').execute_with_values()
def load_module(inference_state, dotted_name, **kwargs): def load_module(inference_state, dotted_name, **kwargs):

View File

@@ -155,14 +155,14 @@ def create_access_path(inference_state, obj):
def get_api_type(obj): def get_api_type(obj):
if inspect.isclass(obj): if inspect.isclass(obj):
return u'class' return 'class'
elif inspect.ismodule(obj): elif inspect.ismodule(obj):
return u'module' return 'module'
elif inspect.isbuiltin(obj) or inspect.ismethod(obj) \ elif inspect.isbuiltin(obj) or inspect.ismethod(obj) \
or inspect.ismethoddescriptor(obj) or inspect.isfunction(obj): or inspect.ismethoddescriptor(obj) or inspect.isfunction(obj):
return u'function' return 'function'
# Everything else... # Everything else...
return u'instance' return 'instance'
class DirectObjectAccess(object): class DirectObjectAccess(object):

View File

@@ -57,7 +57,7 @@ class CompiledValue(Value):
).execute_annotation() ).execute_annotation()
try: try:
self.access_handle.getattr_paths(u'__call__') self.access_handle.getattr_paths('__call__')
except AttributeError: except AttributeError:
return super(CompiledValue, self).py__call__(arguments) return super(CompiledValue, self).py__call__(arguments)
else: else:
@@ -500,7 +500,7 @@ class CompiledValueFilter(AbstractFilter):
# ``dir`` doesn't include the type names. # ``dir`` doesn't include the type names.
if not self.is_instance and needs_type_completions: if not self.is_instance and needs_type_completions:
for filter in builtin_from_name(self._inference_state, u'type').get_filters(): for filter in builtin_from_name(self._inference_state, 'type').get_filters():
names += filter.values() names += filter.values()
return names return names
@@ -516,11 +516,11 @@ class CompiledValueFilter(AbstractFilter):
docstr_defaults = { docstr_defaults = {
'floating point number': u'float', 'floating point number': 'float',
'character': u'str', 'character': 'str',
'integer': u'int', 'integer': 'int',
'dictionary': u'dict', 'dictionary': 'dict',
'string': u'str', 'string': 'str',
} }
@@ -550,7 +550,7 @@ def _parse_function_doc(doc):
# UnboundLocalError for undefined end in last line # UnboundLocalError for undefined end in last line
debug.dbg('no brackets found - no param') debug.dbg('no brackets found - no param')
end = 0 end = 0
param_str = u'' param_str = ''
else: else:
# remove square brackets, that show an optional param ( = None) # remove square brackets, that show an optional param ( = None)
def change_options(m): def change_options(m):
@@ -568,9 +568,9 @@ def _parse_function_doc(doc):
param_str = param_str.replace('-', '_') # see: isinstance.__doc__ param_str = param_str.replace('-', '_') # see: isinstance.__doc__
# parse return value # parse return value
r = re.search(u'-[>-]* ', doc[end:end + 7]) r = re.search('-[>-]* ', doc[end:end + 7])
if r is None: if r is None:
ret = u'' ret = ''
else: else:
index = end + r.end() index = end + r.end()
# get result type, which can contain newlines # get result type, which can contain newlines

View File

@@ -245,7 +245,7 @@ class MergedFilter(object):
class _BuiltinMappedMethod(ValueWrapper): class _BuiltinMappedMethod(ValueWrapper):
"""``Generator.__next__`` ``dict.values`` methods and so on.""" """``Generator.__next__`` ``dict.values`` methods and so on."""
api_type = u'function' api_type = 'function'
def __init__(self, value, method, builtin_func): def __init__(self, value, method, builtin_func):
super(_BuiltinMappedMethod, self).__init__(builtin_func) super(_BuiltinMappedMethod, self).__init__(builtin_func)
@@ -263,7 +263,7 @@ class SpecialMethodFilter(DictFilter):
classes like Generator (for __next__, etc). classes like Generator (for __next__, etc).
""" """
class SpecialMethodName(AbstractNameDefinition): class SpecialMethodName(AbstractNameDefinition):
api_type = u'function' api_type = 'function'
def __init__(self, parent_context, string_name, callable_, builtin_value): def __init__(self, parent_context, string_name, callable_, builtin_value):
self.parent_context = parent_context self.parent_context = parent_context

View File

@@ -53,7 +53,7 @@ def _infer_annotation_string(context, string, index=None):
value_set = context.infer_node(node) value_set = context.infer_node(node)
if index is not None: if index is not None:
value_set = value_set.filter( value_set = value_set.filter(
lambda value: value.array_type == u'tuple' # noqa lambda value: value.array_type == 'tuple' # noqa
and len(list(value.py__iter__())) >= index and len(list(value.py__iter__())) >= index
).py__simple_getitem__(index) ).py__simple_getitem__(index)
return value_set return value_set

View File

@@ -230,7 +230,7 @@ class GenericClass(DefineGenericBaseClass, ClassMixin):
else: else:
continue continue
if py_class.api_type != u'class': if py_class.api_type != 'class':
# Functions & modules don't have an MRO and we're not # Functions & modules don't have an MRO and we're not
# expecting a Callable (those are handled separately within # expecting a Callable (those are handled separately within
# TypingClassValueWithIndex). # TypingClassValueWithIndex).
@@ -309,7 +309,7 @@ class _GenericInstanceWrapper(ValueWrapper):
except IndexError: except IndexError:
pass pass
elif cls.py__name__() == 'Iterator': elif cls.py__name__() == 'Iterator':
return ValueSet([builtin_from_name(self.inference_state, u'None')]) return ValueSet([builtin_from_name(self.inference_state, 'None')])
return self._wrapped_value.py__stop_iteration_returns() return self._wrapped_value.py__stop_iteration_returns()
def get_type_hint(self, add_class_info=True): def get_type_hint(self, add_class_info=True):
@@ -326,7 +326,7 @@ class _PseudoTreeNameClass(Value):
this class. Essentially this class makes it possible to goto that `Tuple` this class. Essentially this class makes it possible to goto that `Tuple`
name, without affecting anything else negatively. name, without affecting anything else negatively.
""" """
api_type = u'class' api_type = 'class'
def __init__(self, parent_context, tree_name): def __init__(self, parent_context, tree_name):
super(_PseudoTreeNameClass, self).__init__( super(_PseudoTreeNameClass, self).__init__(
@@ -356,7 +356,7 @@ class _PseudoTreeNameClass(Value):
def py__class__(self): def py__class__(self):
# This might not be 100% correct, but it is good enough. The details of # This might not be 100% correct, but it is good enough. The details of
# the typing library are not really an issue for Jedi. # the typing library are not really an issue for Jedi.
return builtin_from_name(self.inference_state, u'type') return builtin_from_name(self.inference_state, 'type')
@property @property
def name(self): def name(self):
@@ -423,7 +423,7 @@ class BaseTypingInstance(LazyValueWrapper):
return ValueName(self, self._tree_name) return ValueName(self, self._tree_name)
def _get_wrapped_value(self): def _get_wrapped_value(self):
object_, = builtin_from_name(self.inference_state, u'object').execute_annotation() object_, = builtin_from_name(self.inference_state, 'object').execute_annotation()
return object_ return object_
def __repr__(self): def __repr__(self):

View File

@@ -109,7 +109,7 @@ def import_module_decorator(func):
# ``os``. # ``os``.
python_value_set = ValueSet.from_sets( python_value_set = ValueSet.from_sets(
func(inference_state, (n,), None, sys_path,) func(inference_state, (n,), None, sys_path,)
for n in [u'posixpath', u'ntpath', u'macpath', u'os2emxpath'] for n in ['posixpath', 'ntpath', 'macpath', 'os2emxpath']
) )
else: else:
python_value_set = ValueSet.from_sets( python_value_set = ValueSet.from_sets(

View File

@@ -71,7 +71,7 @@ class TypingModuleName(NameWrapper):
elif name == 'TYPE_CHECKING': elif name == 'TYPE_CHECKING':
# This is needed for e.g. imports that are only available for type # This is needed for e.g. imports that are only available for type
# checking or are in cycles. The user can then check this variable. # checking or are in cycles. The user can then check this variable.
yield builtin_from_name(inference_state, u'True') yield builtin_from_name(inference_state, 'True')
elif name == 'overload': elif name == 'overload':
yield OverloadFunction.create_cached( yield OverloadFunction.create_cached(
inference_state, self.parent_context, self.tree_name) inference_state, self.parent_context, self.tree_name)
@@ -110,7 +110,7 @@ class ProxyWithGenerics(BaseTypingClassWithGenerics):
# Optional is basically just saying it's either None or the actual # Optional is basically just saying it's either None or the actual
# type. # type.
return self.gather_annotation_classes().execute_annotation() \ return self.gather_annotation_classes().execute_annotation() \
| ValueSet([builtin_from_name(self.inference_state, u'None')]) | ValueSet([builtin_from_name(self.inference_state, 'None')])
elif string_name == 'Type': elif string_name == 'Type':
# The type is actually already given in the index_value # The type is actually already given in the index_value
return self._generics_manager[0] return self._generics_manager[0]
@@ -153,7 +153,7 @@ class ProxyWithGenerics(BaseTypingClassWithGenerics):
# Optional[T] is equivalent to Union[T, None]. In Jedi unions # Optional[T] is equivalent to Union[T, None]. In Jedi unions
# are represented by members within a ValueSet, so we extract # are represented by members within a ValueSet, so we extract
# the T from the Optional[T] by removing the None value. # the T from the Optional[T] by removing the None value.
none = builtin_from_name(self.inference_state, u'None') none = builtin_from_name(self.inference_state, 'None')
return annotation_generics[0].infer_type_vars( return annotation_generics[0].infer_type_vars(
value_set.filter(lambda x: x != none), value_set.filter(lambda x: x != none),
) )

View File

@@ -372,7 +372,7 @@ class _ParamMixin(object):
class ParamNameInterface(_ParamMixin): class ParamNameInterface(_ParamMixin):
api_type = u'param' api_type = 'param'
def get_kind(self): def get_kind(self):
raise NotImplementedError raise NotImplementedError

View File

@@ -227,7 +227,7 @@ def _infer_node(context, element):
if element.value != '...': if element.value != '...':
origin = element.parent origin = element.parent
raise AssertionError("unhandled operator %s in %s " % (repr(element.value), origin)) raise AssertionError("unhandled operator %s in %s " % (repr(element.value), origin))
return ValueSet([compiled.builtin_from_name(inference_state, u'Ellipsis')]) return ValueSet([compiled.builtin_from_name(inference_state, 'Ellipsis')])
elif typ == 'dotted_name': elif typ == 'dotted_name':
value_set = infer_atom(context, element.children[0]) value_set = infer_atom(context, element.children[0])
for next_name in element.children[2::2]: for next_name in element.children[2::2]:
@@ -319,7 +319,7 @@ def infer_atom(context, atom):
value_set = infer_atom(context, atom.children[0]) value_set = infer_atom(context, atom.children[0])
for string in atom.children[1:]: for string in atom.children[1:]:
right = infer_atom(context, string) right = infer_atom(context, string)
value_set = _infer_comparison(context, value_set, u'+', right) value_set = _infer_comparison(context, value_set, '+', right)
return value_set return value_set
elif atom.type == 'fstring': elif atom.type == 'fstring':
return compiled.get_string_value_set(state) return compiled.get_string_value_set(state)
@@ -737,7 +737,7 @@ def tree_name_to_values(inference_state, context, tree_name):
types = infer_expr_stmt(context, node, tree_name) types = infer_expr_stmt(context, node, tree_name)
elif typ == 'with_stmt': elif typ == 'with_stmt':
value_managers = context.infer_node(node.get_test_node_from_name(tree_name)) value_managers = context.infer_node(node.get_test_node_from_name(tree_name))
enter_methods = value_managers.py__getattribute__(u'__enter__') enter_methods = value_managers.py__getattribute__('__enter__')
return enter_methods.execute_with_values() return enter_methods.execute_with_values()
elif typ in ('import_from', 'import_name'): elif typ in ('import_from', 'import_name'):
types = imports.infer_import(context, tree_name) types = imports.infer_import(context, tree_name)

View File

@@ -25,7 +25,7 @@ from jedi.inference.gradual.generics import TupleGenericManager
class LambdaName(AbstractNameDefinition): class LambdaName(AbstractNameDefinition):
string_name = '<lambda>' string_name = '<lambda>'
api_type = u'function' api_type = 'function'
def __init__(self, lambda_value): def __init__(self, lambda_value):
self._lambda_value = lambda_value self._lambda_value = lambda_value
@@ -54,7 +54,7 @@ class FunctionAndClassBase(TreeValue):
class FunctionMixin(object): class FunctionMixin(object):
api_type = u'function' api_type = 'function'
def get_filters(self, origin_scope=None): def get_filters(self, origin_scope=None):
cls = self.py__class__() cls = self.py__class__()
@@ -160,7 +160,7 @@ class FunctionValue(FunctionMixin, FunctionAndClassBase, metaclass=CachedMetaCla
return function return function
def py__class__(self): def py__class__(self):
c, = values_from_qualified_names(self.inference_state, u'types', u'FunctionType') c, = values_from_qualified_names(self.inference_state, 'types', 'FunctionType')
return c return c
def get_default_param_context(self): def get_default_param_context(self):
@@ -237,7 +237,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
try: try:
children = r.children children = r.children
except AttributeError: except AttributeError:
ctx = compiled.builtin_from_name(self.inference_state, u'None') ctx = compiled.builtin_from_name(self.inference_state, 'None')
value_set |= ValueSet([ctx]) value_set |= ValueSet([ctx])
else: else:
value_set |= self.infer_node(children[1]) value_set |= self.infer_node(children[1])
@@ -249,7 +249,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
def _get_yield_lazy_value(self, yield_expr): def _get_yield_lazy_value(self, yield_expr):
if yield_expr.type == 'keyword': if yield_expr.type == 'keyword':
# `yield` just yields None. # `yield` just yields None.
ctx = compiled.builtin_from_name(self.inference_state, u'None') ctx = compiled.builtin_from_name(self.inference_state, 'None')
yield LazyKnownValue(ctx) yield LazyKnownValue(ctx)
return return

View File

@@ -88,7 +88,7 @@ class MethodExecutionContext(FunctionExecutionContext):
class AbstractInstanceValue(Value): class AbstractInstanceValue(Value):
api_type = u'instance' api_type = 'instance'
def __init__(self, inference_state, parent_context, class_value): def __init__(self, inference_state, parent_context, class_value):
super(AbstractInstanceValue, self).__init__(inference_state, parent_context) super(AbstractInstanceValue, self).__init__(inference_state, parent_context)
@@ -234,12 +234,12 @@ class _BaseTreeInstance(AbstractInstanceValue):
# other way around. # other way around.
if is_big_annoying_library(self.parent_context): if is_big_annoying_library(self.parent_context):
return NO_VALUES return NO_VALUES
names = (self.get_function_slot_names(u'__getattr__') names = (self.get_function_slot_names('__getattr__')
or self.get_function_slot_names(u'__getattribute__')) or self.get_function_slot_names('__getattribute__'))
return self.execute_function_slots(names, name) return self.execute_function_slots(names, name)
def py__getitem__(self, index_value_set, contextualized_node): def py__getitem__(self, index_value_set, contextualized_node):
names = self.get_function_slot_names(u'__getitem__') names = self.get_function_slot_names('__getitem__')
if not names: if not names:
return super(_BaseTreeInstance, self).py__getitem__( return super(_BaseTreeInstance, self).py__getitem__(
index_value_set, index_value_set,
@@ -250,7 +250,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
return ValueSet.from_sets(name.infer().execute(args) for name in names) return ValueSet.from_sets(name.infer().execute(args) for name in names)
def py__iter__(self, contextualized_node=None): def py__iter__(self, contextualized_node=None):
iter_slot_names = self.get_function_slot_names(u'__iter__') iter_slot_names = self.get_function_slot_names('__iter__')
if not iter_slot_names: if not iter_slot_names:
return super(_BaseTreeInstance, self).py__iter__(contextualized_node) return super(_BaseTreeInstance, self).py__iter__(contextualized_node)
@@ -258,7 +258,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
for generator in self.execute_function_slots(iter_slot_names): for generator in self.execute_function_slots(iter_slot_names):
if generator.is_instance() and not generator.is_compiled(): if generator.is_instance() and not generator.is_compiled():
# `__next__` logic. # `__next__` logic.
name = u'__next__' name = '__next__'
next_slot_names = generator.get_function_slot_names(name) next_slot_names = generator.get_function_slot_names(name)
if next_slot_names: if next_slot_names:
yield LazyKnownValues( yield LazyKnownValues(
@@ -272,7 +272,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
return iterate() return iterate()
def py__call__(self, arguments): def py__call__(self, arguments):
names = self.get_function_slot_names(u'__call__') names = self.get_function_slot_names('__call__')
if not names: if not names:
# Means the Instance is not callable. # Means the Instance is not callable.
return super(_BaseTreeInstance, self).py__call__(arguments) return super(_BaseTreeInstance, self).py__call__(arguments)
@@ -290,10 +290,10 @@ class _BaseTreeInstance(AbstractInstanceValue):
if result is not NotImplemented: if result is not NotImplemented:
return result return result
names = self.get_function_slot_names(u'__get__') names = self.get_function_slot_names('__get__')
if names: if names:
if instance is None: if instance is None:
instance = compiled.builtin_from_name(self.inference_state, u'None') instance = compiled.builtin_from_name(self.inference_state, 'None')
return self.execute_function_slots(names, instance, class_value) return self.execute_function_slots(names, instance, class_value)
else: else:
return ValueSet([self]) return ValueSet([self])
@@ -461,7 +461,7 @@ class BoundMethod(FunctionMixin, ValueWrapper):
) )
def py__class__(self): def py__class__(self):
c, = values_from_qualified_names(self.inference_state, u'types', u'MethodType') c, = values_from_qualified_names(self.inference_state, 'types', 'MethodType')
return c return c
def _get_arguments(self, arguments): def _get_arguments(self, arguments):

View File

@@ -21,7 +21,7 @@ from jedi.inference.value.dynamic_arrays import check_array_additions
class IterableMixin(object): class IterableMixin(object):
def py__stop_iteration_returns(self): def py__stop_iteration_returns(self):
return ValueSet([compiled.builtin_from_name(self.inference_state, u'None')]) return ValueSet([compiled.builtin_from_name(self.inference_state, 'None')])
# At the moment, safe values are simple values like "foo", 1 and not # At the moment, safe values are simple values like "foo", 1 and not
# lists/dicts. Therefore as a small speed optimization we can just do the # lists/dicts. Therefore as a small speed optimization we can just do the
@@ -58,7 +58,7 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__()) return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
def py__stop_iteration_returns(self): def py__stop_iteration_returns(self):
return ValueSet([compiled.builtin_from_name(self.inference_state, u'None')]) return ValueSet([compiled.builtin_from_name(self.inference_state, 'None')])
@property @property
def name(self): def name(self):
@@ -173,7 +173,7 @@ class _DictMixin(object):
class Sequence(LazyAttributeOverwrite, IterableMixin): class Sequence(LazyAttributeOverwrite, IterableMixin):
api_type = u'instance' api_type = 'instance'
@property @property
def name(self): def name(self):
@@ -219,7 +219,7 @@ class _BaseComprehension(ComprehensionMixin):
class ListComprehension(_BaseComprehension, Sequence): class ListComprehension(_BaseComprehension, Sequence):
array_type = u'list' array_type = 'list'
def py__simple_getitem__(self, index): def py__simple_getitem__(self, index):
if isinstance(index, slice): if isinstance(index, slice):
@@ -232,7 +232,7 @@ class ListComprehension(_BaseComprehension, Sequence):
class SetComprehension(_BaseComprehension, Sequence): class SetComprehension(_BaseComprehension, Sequence):
array_type = u'set' array_type = 'set'
class GeneratorComprehension(_BaseComprehension, GeneratorBase): class GeneratorComprehension(_BaseComprehension, GeneratorBase):
@@ -250,7 +250,7 @@ class _DictKeyMixin(object):
class DictComprehension(ComprehensionMixin, Sequence, _DictKeyMixin): class DictComprehension(ComprehensionMixin, Sequence, _DictKeyMixin):
array_type = u'dict' array_type = 'dict'
def __init__(self, inference_state, defining_context, sync_comp_for_node, key_node, value_node): def __init__(self, inference_state, defining_context, sync_comp_for_node, key_node, value_node):
assert sync_comp_for_node.type == 'sync_comp_for' assert sync_comp_for_node.type == 'sync_comp_for'
@@ -307,9 +307,9 @@ class DictComprehension(ComprehensionMixin, Sequence, _DictKeyMixin):
class SequenceLiteralValue(Sequence): class SequenceLiteralValue(Sequence):
_TUPLE_LIKE = 'testlist_star_expr', 'testlist', 'subscriptlist' _TUPLE_LIKE = 'testlist_star_expr', 'testlist', 'subscriptlist'
mapping = {'(': u'tuple', mapping = {'(': 'tuple',
'[': u'list', '[': 'list',
'{': u'set'} '{': 'set'}
def __init__(self, inference_state, defining_context, atom): def __init__(self, inference_state, defining_context, atom):
super(SequenceLiteralValue, self).__init__(inference_state) super(SequenceLiteralValue, self).__init__(inference_state)
@@ -317,13 +317,13 @@ class SequenceLiteralValue(Sequence):
self._defining_context = defining_context self._defining_context = defining_context
if self.atom.type in self._TUPLE_LIKE: if self.atom.type in self._TUPLE_LIKE:
self.array_type = u'tuple' self.array_type = 'tuple'
else: else:
self.array_type = SequenceLiteralValue.mapping[atom.children[0]] self.array_type = SequenceLiteralValue.mapping[atom.children[0]]
"""The builtin name of the array (list, set, tuple or dict).""" """The builtin name of the array (list, set, tuple or dict)."""
def _get_generics(self): def _get_generics(self):
if self.array_type == u'tuple': if self.array_type == 'tuple':
return tuple(x.infer().py__class__() for x in self.py__iter__()) return tuple(x.infer().py__class__() for x in self.py__iter__())
return super(SequenceLiteralValue, self)._get_generics() return super(SequenceLiteralValue, self)._get_generics()
@@ -415,7 +415,7 @@ class SequenceLiteralValue(Sequence):
class DictLiteralValue(_DictMixin, SequenceLiteralValue, _DictKeyMixin): class DictLiteralValue(_DictMixin, SequenceLiteralValue, _DictKeyMixin):
array_type = u'dict' array_type = 'dict'
def __init__(self, inference_state, defining_context, atom): def __init__(self, inference_state, defining_context, atom):
super(SequenceLiteralValue, self).__init__(inference_state) super(SequenceLiteralValue, self).__init__(inference_state)
@@ -427,7 +427,7 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue, _DictKeyMixin):
compiled_value_index = compiled.create_simple_object(self.inference_state, index) compiled_value_index = compiled.create_simple_object(self.inference_state, index)
for key, value in self.get_tree_entries(): for key, value in self.get_tree_entries():
for k in self._defining_context.infer_node(key): for k in self._defining_context.infer_node(key):
for key_v in k.execute_operation(compiled_value_index, u'=='): for key_v in k.execute_operation(compiled_value_index, '=='):
if key_v.get_safe_value(): if key_v.get_safe_value():
return self._defining_context.infer_node(value) return self._defining_context.infer_node(value)
raise SimpleGetItemNotFound('No key found in dictionary %s.' % self) raise SimpleGetItemNotFound('No key found in dictionary %s.' % self)
@@ -503,15 +503,15 @@ class _FakeSequence(Sequence):
class FakeTuple(_FakeSequence): class FakeTuple(_FakeSequence):
array_type = u'tuple' array_type = 'tuple'
class FakeList(_FakeSequence): class FakeList(_FakeSequence):
array_type = u'tuple' array_type = 'tuple'
class FakeDict(_DictMixin, Sequence, _DictKeyMixin): class FakeDict(_DictMixin, Sequence, _DictKeyMixin):
array_type = u'dict' array_type = 'dict'
def __init__(self, inference_state, dct): def __init__(self, inference_state, dct):
super(FakeDict, self).__init__(inference_state) super(FakeDict, self).__init__(inference_state)

View File

@@ -144,7 +144,7 @@ class ClassMixin(object):
return ValueSet([TreeInstance(self.inference_state, self.parent_context, self, arguments)]) return ValueSet([TreeInstance(self.inference_state, self.parent_context, self, arguments)])
def py__class__(self): def py__class__(self):
return compiled.builtin_from_name(self.inference_state, u'type') return compiled.builtin_from_name(self.inference_state, 'type')
@property @property
def name(self): def name(self):
@@ -204,7 +204,7 @@ class ClassMixin(object):
) )
if not is_instance and include_type_when_class: if not is_instance and include_type_when_class:
from jedi.inference.compiled import builtin_from_name from jedi.inference.compiled import builtin_from_name
type_ = builtin_from_name(self.inference_state, u'type') type_ = builtin_from_name(self.inference_state, 'type')
assert isinstance(type_, ClassValue) assert isinstance(type_, ClassValue)
if type_ != self: if type_ != self:
# We are not using execute_with_values here, because the # We are not using execute_with_values here, because the
@@ -319,7 +319,7 @@ class ClassMixin(object):
class ClassValue(ClassMixin, FunctionAndClassBase, metaclass=CachedMetaClass): class ClassValue(ClassMixin, FunctionAndClassBase, metaclass=CachedMetaClass):
api_type = u'class' api_type = 'class'
@inference_state_method_cache() @inference_state_method_cache()
def list_type_vars(self): def list_type_vars(self):

View File

@@ -16,7 +16,7 @@ class _ModuleAttributeName(AbstractNameDefinition):
""" """
For module attributes like __file__, __str__ and so on. For module attributes like __file__, __str__ and so on.
""" """
api_type = u'instance' api_type = 'instance'
def __init__(self, parent_module, string_name, string_value=None): def __init__(self, parent_module, string_name, string_value=None):
self.parent_context = parent_module self.parent_context = parent_module
@@ -70,7 +70,7 @@ class ModuleMixin(SubModuleDictMixin):
yield star_filter yield star_filter
def py__class__(self): def py__class__(self):
c, = values_from_qualified_names(self.inference_state, u'types', u'ModuleType') c, = values_from_qualified_names(self.inference_state, 'types', 'ModuleType')
return c return c
def is_module(self): def is_module(self):
@@ -134,7 +134,7 @@ class ModuleMixin(SubModuleDictMixin):
class ModuleValue(ModuleMixin, TreeValue): class ModuleValue(ModuleMixin, TreeValue):
api_type = u'module' api_type = 'module'
def __init__(self, inference_state, module_node, code_lines, file_io=None, def __init__(self, inference_state, module_node, code_lines, file_io=None,
string_names=None, is_package=False): string_names=None, is_package=False):

View File

@@ -23,7 +23,7 @@ class ImplicitNamespaceValue(Value, SubModuleDictMixin):
# Is a module like every other module, because if you import an empty # Is a module like every other module, because if you import an empty
# folder foobar it will be available as an object: # folder foobar it will be available as an object:
# <module 'foobar' (namespace)>. # <module 'foobar' (namespace)>.
api_type = u'module' api_type = 'module'
parent_context = None parent_context = None
def __init__(self, inference_state, string_names, paths): def __init__(self, inference_state, string_names, paths):

View File

@@ -6,14 +6,14 @@ def import_module(callback):
def wrapper(inference_state, import_names, module_context, *args, **kwargs): def wrapper(inference_state, import_names, module_context, *args, **kwargs):
if len(import_names) == 3 and import_names[:2] == ('flask', 'ext'): if len(import_names) == 3 and import_names[:2] == ('flask', 'ext'):
# New style. # New style.
ipath = (u'flask_' + import_names[2]), ipath = ('flask_' + import_names[2]),
value_set = callback(inference_state, ipath, None, *args, **kwargs) value_set = callback(inference_state, ipath, None, *args, **kwargs)
if value_set: if value_set:
return value_set return value_set
value_set = callback(inference_state, (u'flaskext',), None, *args, **kwargs) value_set = callback(inference_state, ('flaskext',), None, *args, **kwargs)
return callback( return callback(
inference_state, inference_state,
(u'flaskext', import_names[2]), ('flaskext', import_names[2]),
next(iter(value_set)), next(iter(value_set)),
*args, **kwargs *args, **kwargs
) )

View File

@@ -421,7 +421,7 @@ def collections_namedtuple(value, arguments, callback):
inference_state = value.inference_state inference_state = value.inference_state
# Process arguments # Process arguments
name = u'jedi_unknown_namedtuple' name = 'jedi_unknown_namedtuple'
for c in _follow_param(inference_state, arguments, 0): for c in _follow_param(inference_state, arguments, 0):
x = get_str_or_none(c) x = get_str_or_none(c)
if x is not None: if x is not None:
@@ -451,7 +451,7 @@ def collections_namedtuple(value, arguments, callback):
typename=name, typename=name,
field_names=tuple(fields), field_names=tuple(fields),
num_fields=len(fields), num_fields=len(fields),
arg_list=repr(tuple(fields)).replace("u'", "").replace("'", "")[1:-1], arg_list=repr(tuple(fields)).replace("'", "")[1:-1],
repr_fmt='', repr_fmt='',
field_defs='\n'.join(_NAMEDTUPLE_FIELD_TEMPLATE.format(index=index, name=name) field_defs='\n'.join(_NAMEDTUPLE_FIELD_TEMPLATE.format(index=index, name=name)
for index, name in enumerate(fields)) for index, name in enumerate(fields))
@@ -727,7 +727,7 @@ def _create_string_input_function(func):
@argument_clinic('*args, /', want_callback=True) @argument_clinic('*args, /', want_callback=True)
def _os_path_join(args_set, callback): def _os_path_join(args_set, callback):
if len(args_set) == 1: if len(args_set) == 1:
string = u'' string = ''
sequence, = args_set sequence, = args_set
is_first = True is_first = True
for lazy_value in sequence.py__iter__(): for lazy_value in sequence.py__iter__():

View File

@@ -43,7 +43,7 @@ def test_versions(version):
def test_load_module(inference_state): def test_load_module(inference_state):
access_path = inference_state.compiled_subprocess.load_module( access_path = inference_state.compiled_subprocess.load_module(
dotted_name=u'math', dotted_name='math',
sys_path=inference_state.get_sys_path() sys_path=inference_state.get_sys_path()
) )
name, access_handle = access_path.accesses[0] name, access_handle = access_path.accesses[0]

View File

@@ -564,7 +564,7 @@ def test_param_annotation_completion(class_is_findable):
] ]
) )
def test_dict_completion(code, column, expected): def test_dict_completion(code, column, expected):
strs = {'asdf': 1, u"""foo""": 2, r'fbar': 3} strs = {'asdf': 1, """foo""": 2, r'fbar': 3}
mixed = {1: 2, 1.10: 4, None: 6, r'a\sdf': 8, b'foo': 9} mixed = {1: 2, 1.10: 4, None: 6, r'a\sdf': 8, b'foo': 9}
namespaces = [locals(), {'implicit': {1000: 3}}] namespaces = [locals(), {'implicit': {1000: 3}}]

View File

@@ -12,22 +12,22 @@ from jedi.inference.syntax_tree import _infer_comparison_part
def test_simple(inference_state, environment): def test_simple(inference_state, environment):
obj = compiled.create_simple_object(inference_state, u'_str_') obj = compiled.create_simple_object(inference_state, '_str_')
upper, = obj.py__getattribute__(u'upper') upper, = obj.py__getattribute__('upper')
objs = list(upper.execute_with_values()) objs = list(upper.execute_with_values())
assert len(objs) == 1 assert len(objs) == 1
assert objs[0].name.string_name == 'str' assert objs[0].name.string_name == 'str'
def test_builtin_loading(inference_state): def test_builtin_loading(inference_state):
string, = inference_state.builtins_module.py__getattribute__(u'str') string, = inference_state.builtins_module.py__getattribute__('str')
from_name, = string.py__getattribute__(u'__init__') from_name, = string.py__getattribute__('__init__')
assert from_name.tree_node assert from_name.tree_node
assert not from_name.py__doc__() # It's a stub assert not from_name.py__doc__() # It's a stub
def test_next_docstr(inference_state): def test_next_docstr(inference_state):
next_ = compiled.builtin_from_name(inference_state, u'next') next_ = compiled.builtin_from_name(inference_state, 'next')
assert next_.tree_node is not None assert next_.tree_node is not None
assert next_.py__doc__() == '' # It's a stub assert next_.py__doc__() == '' # It's a stub
for non_stub in _stub_to_python_value_set(next_): for non_stub in _stub_to_python_value_set(next_):
@@ -48,9 +48,9 @@ def test_doc(inference_state):
Even CompiledValue docs always return empty docstrings - not None, that's Even CompiledValue docs always return empty docstrings - not None, that's
just a Jedi API definition. just a Jedi API definition.
""" """
str_ = compiled.create_simple_object(inference_state, u'') str_ = compiled.create_simple_object(inference_state, '')
# Equals `''.__getnewargs__` # Equals `''.__getnewargs__`
obj, = str_.py__getattribute__(u'__getnewargs__') obj, = str_.py__getattribute__('__getnewargs__')
assert obj.py__doc__() == '' assert obj.py__doc__() == ''
@@ -162,7 +162,7 @@ def test_operation(Script, inference_state, create_compiled_object):
false, true = _infer_comparison_part( false, true = _infer_comparison_part(
inference_state, b.parent_context, inference_state, b.parent_context,
left=list(b.execute_with_values())[0], left=list(b.execute_with_values())[0],
operator=u'is not', operator='is not',
right=b, right=b,
) )
assert false.py__name__() == 'bool' assert false.py__name__() == 'bool'

View File

@@ -50,8 +50,8 @@ def test_find_module_package_zipped(Script, inference_state, environment):
file_io, is_package = inference_state.compiled_subprocess.get_module_info( file_io, is_package = inference_state.compiled_subprocess.get_module_info(
sys_path=sys_path, sys_path=sys_path,
string=u'pkg', string='pkg',
full_name=u'pkg' full_name='pkg'
) )
assert file_io is not None assert file_io is not None
assert file_io.path.endswith(os.path.join('pkg.zip', 'pkg', '__init__.py')) assert file_io.path.endswith(os.path.join('pkg.zip', 'pkg', '__init__.py'))
@@ -104,8 +104,8 @@ def test_find_module_not_package_zipped(Script, inference_state, environment):
file_io, is_package = inference_state.compiled_subprocess.get_module_info( file_io, is_package = inference_state.compiled_subprocess.get_module_info(
sys_path=sys_path, sys_path=sys_path,
string=u'not_pkg', string='not_pkg',
full_name=u'not_pkg' full_name='not_pkg'
) )
assert file_io.path.endswith(os.path.join('not_pkg.zip', 'not_pkg.py')) assert file_io.path.endswith(os.path.join('not_pkg.zip', 'not_pkg.py'))
assert is_package is False assert is_package is False