Remove unicode literals from code base

This commit is contained in:
Dave Halter
2020-07-02 10:43:14 +02:00
parent f1366b8a74
commit 5ab351dc8f
29 changed files with 105 additions and 105 deletions

View File

@@ -123,14 +123,14 @@ class InferenceState(object):
@property
@inference_state_function_cache()
def builtins_module(self):
module_name = u'builtins'
module_name = 'builtins'
builtins_module, = self.import_module((module_name,), sys_path=())
return builtins_module
@property
@inference_state_function_cache()
def typing_module(self):
typing_module, = self.import_module((u'typing',))
typing_module, = self.import_module(('typing',))
return typing_module
def reset_recursion_limitations(self):

View File

@@ -93,7 +93,7 @@ class HelperValueMixin(object):
return values
def py__await__(self):
await_value_set = self.py__getattribute__(u"__await__")
await_value_set = self.py__getattribute__("__await__")
if not await_value_set:
debug.warning('Tried to run __await__ on value %s', self)
return await_value_set.execute_with_values()

View File

@@ -53,7 +53,7 @@ def create_simple_object(inference_state, obj):
def get_string_value_set(inference_state):
return builtin_from_name(inference_state, u'str').execute_with_values()
return builtin_from_name(inference_state, 'str').execute_with_values()
def load_module(inference_state, dotted_name, **kwargs):

View File

@@ -155,14 +155,14 @@ def create_access_path(inference_state, obj):
def get_api_type(obj):
if inspect.isclass(obj):
return u'class'
return 'class'
elif inspect.ismodule(obj):
return u'module'
return 'module'
elif inspect.isbuiltin(obj) or inspect.ismethod(obj) \
or inspect.ismethoddescriptor(obj) or inspect.isfunction(obj):
return u'function'
return 'function'
# Everything else...
return u'instance'
return 'instance'
class DirectObjectAccess(object):

View File

@@ -57,7 +57,7 @@ class CompiledValue(Value):
).execute_annotation()
try:
self.access_handle.getattr_paths(u'__call__')
self.access_handle.getattr_paths('__call__')
except AttributeError:
return super(CompiledValue, self).py__call__(arguments)
else:
@@ -500,7 +500,7 @@ class CompiledValueFilter(AbstractFilter):
# ``dir`` doesn't include the type names.
if not self.is_instance and needs_type_completions:
for filter in builtin_from_name(self._inference_state, u'type').get_filters():
for filter in builtin_from_name(self._inference_state, 'type').get_filters():
names += filter.values()
return names
@@ -516,11 +516,11 @@ class CompiledValueFilter(AbstractFilter):
docstr_defaults = {
'floating point number': u'float',
'character': u'str',
'integer': u'int',
'dictionary': u'dict',
'string': u'str',
'floating point number': 'float',
'character': 'str',
'integer': 'int',
'dictionary': 'dict',
'string': 'str',
}
@@ -550,7 +550,7 @@ def _parse_function_doc(doc):
# UnboundLocalError for undefined end in last line
debug.dbg('no brackets found - no param')
end = 0
param_str = u''
param_str = ''
else:
# remove square brackets, that show an optional param ( = None)
def change_options(m):
@@ -568,9 +568,9 @@ def _parse_function_doc(doc):
param_str = param_str.replace('-', '_') # see: isinstance.__doc__
# parse return value
r = re.search(u'-[>-]* ', doc[end:end + 7])
r = re.search('-[>-]* ', doc[end:end + 7])
if r is None:
ret = u''
ret = ''
else:
index = end + r.end()
# get result type, which can contain newlines

View File

@@ -245,7 +245,7 @@ class MergedFilter(object):
class _BuiltinMappedMethod(ValueWrapper):
"""``Generator.__next__`` ``dict.values`` methods and so on."""
api_type = u'function'
api_type = 'function'
def __init__(self, value, method, builtin_func):
super(_BuiltinMappedMethod, self).__init__(builtin_func)
@@ -263,7 +263,7 @@ class SpecialMethodFilter(DictFilter):
classes like Generator (for __next__, etc).
"""
class SpecialMethodName(AbstractNameDefinition):
api_type = u'function'
api_type = 'function'
def __init__(self, parent_context, string_name, callable_, builtin_value):
self.parent_context = parent_context

View File

@@ -53,7 +53,7 @@ def _infer_annotation_string(context, string, index=None):
value_set = context.infer_node(node)
if index is not None:
value_set = value_set.filter(
lambda value: value.array_type == u'tuple' # noqa
lambda value: value.array_type == 'tuple' # noqa
and len(list(value.py__iter__())) >= index
).py__simple_getitem__(index)
return value_set

View File

@@ -230,7 +230,7 @@ class GenericClass(DefineGenericBaseClass, ClassMixin):
else:
continue
if py_class.api_type != u'class':
if py_class.api_type != 'class':
# Functions & modules don't have an MRO and we're not
# expecting a Callable (those are handled separately within
# TypingClassValueWithIndex).
@@ -309,7 +309,7 @@ class _GenericInstanceWrapper(ValueWrapper):
except IndexError:
pass
elif cls.py__name__() == 'Iterator':
return ValueSet([builtin_from_name(self.inference_state, u'None')])
return ValueSet([builtin_from_name(self.inference_state, 'None')])
return self._wrapped_value.py__stop_iteration_returns()
def get_type_hint(self, add_class_info=True):
@@ -326,7 +326,7 @@ class _PseudoTreeNameClass(Value):
this class. Essentially this class makes it possible to goto that `Tuple`
name, without affecting anything else negatively.
"""
api_type = u'class'
api_type = 'class'
def __init__(self, parent_context, tree_name):
super(_PseudoTreeNameClass, self).__init__(
@@ -356,7 +356,7 @@ class _PseudoTreeNameClass(Value):
def py__class__(self):
# This might not be 100% correct, but it is good enough. The details of
# the typing library are not really an issue for Jedi.
return builtin_from_name(self.inference_state, u'type')
return builtin_from_name(self.inference_state, 'type')
@property
def name(self):
@@ -423,7 +423,7 @@ class BaseTypingInstance(LazyValueWrapper):
return ValueName(self, self._tree_name)
def _get_wrapped_value(self):
object_, = builtin_from_name(self.inference_state, u'object').execute_annotation()
object_, = builtin_from_name(self.inference_state, 'object').execute_annotation()
return object_
def __repr__(self):

View File

@@ -109,7 +109,7 @@ def import_module_decorator(func):
# ``os``.
python_value_set = ValueSet.from_sets(
func(inference_state, (n,), None, sys_path,)
for n in [u'posixpath', u'ntpath', u'macpath', u'os2emxpath']
for n in ['posixpath', 'ntpath', 'macpath', 'os2emxpath']
)
else:
python_value_set = ValueSet.from_sets(

View File

@@ -71,7 +71,7 @@ class TypingModuleName(NameWrapper):
elif name == 'TYPE_CHECKING':
# This is needed for e.g. imports that are only available for type
# checking or are in cycles. The user can then check this variable.
yield builtin_from_name(inference_state, u'True')
yield builtin_from_name(inference_state, 'True')
elif name == 'overload':
yield OverloadFunction.create_cached(
inference_state, self.parent_context, self.tree_name)
@@ -110,7 +110,7 @@ class ProxyWithGenerics(BaseTypingClassWithGenerics):
# Optional is basically just saying it's either None or the actual
# type.
return self.gather_annotation_classes().execute_annotation() \
| ValueSet([builtin_from_name(self.inference_state, u'None')])
| ValueSet([builtin_from_name(self.inference_state, 'None')])
elif string_name == 'Type':
# The type is actually already given in the index_value
return self._generics_manager[0]
@@ -153,7 +153,7 @@ class ProxyWithGenerics(BaseTypingClassWithGenerics):
# Optional[T] is equivalent to Union[T, None]. In Jedi unions
# are represented by members within a ValueSet, so we extract
# the T from the Optional[T] by removing the None value.
none = builtin_from_name(self.inference_state, u'None')
none = builtin_from_name(self.inference_state, 'None')
return annotation_generics[0].infer_type_vars(
value_set.filter(lambda x: x != none),
)

View File

@@ -372,7 +372,7 @@ class _ParamMixin(object):
class ParamNameInterface(_ParamMixin):
api_type = u'param'
api_type = 'param'
def get_kind(self):
raise NotImplementedError

View File

@@ -227,7 +227,7 @@ def _infer_node(context, element):
if element.value != '...':
origin = element.parent
raise AssertionError("unhandled operator %s in %s " % (repr(element.value), origin))
return ValueSet([compiled.builtin_from_name(inference_state, u'Ellipsis')])
return ValueSet([compiled.builtin_from_name(inference_state, 'Ellipsis')])
elif typ == 'dotted_name':
value_set = infer_atom(context, element.children[0])
for next_name in element.children[2::2]:
@@ -319,7 +319,7 @@ def infer_atom(context, atom):
value_set = infer_atom(context, atom.children[0])
for string in atom.children[1:]:
right = infer_atom(context, string)
value_set = _infer_comparison(context, value_set, u'+', right)
value_set = _infer_comparison(context, value_set, '+', right)
return value_set
elif atom.type == 'fstring':
return compiled.get_string_value_set(state)
@@ -737,7 +737,7 @@ def tree_name_to_values(inference_state, context, tree_name):
types = infer_expr_stmt(context, node, tree_name)
elif typ == 'with_stmt':
value_managers = context.infer_node(node.get_test_node_from_name(tree_name))
enter_methods = value_managers.py__getattribute__(u'__enter__')
enter_methods = value_managers.py__getattribute__('__enter__')
return enter_methods.execute_with_values()
elif typ in ('import_from', 'import_name'):
types = imports.infer_import(context, tree_name)

View File

@@ -25,7 +25,7 @@ from jedi.inference.gradual.generics import TupleGenericManager
class LambdaName(AbstractNameDefinition):
string_name = '<lambda>'
api_type = u'function'
api_type = 'function'
def __init__(self, lambda_value):
self._lambda_value = lambda_value
@@ -54,7 +54,7 @@ class FunctionAndClassBase(TreeValue):
class FunctionMixin(object):
api_type = u'function'
api_type = 'function'
def get_filters(self, origin_scope=None):
cls = self.py__class__()
@@ -160,7 +160,7 @@ class FunctionValue(FunctionMixin, FunctionAndClassBase, metaclass=CachedMetaCla
return function
def py__class__(self):
c, = values_from_qualified_names(self.inference_state, u'types', u'FunctionType')
c, = values_from_qualified_names(self.inference_state, 'types', 'FunctionType')
return c
def get_default_param_context(self):
@@ -237,7 +237,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
try:
children = r.children
except AttributeError:
ctx = compiled.builtin_from_name(self.inference_state, u'None')
ctx = compiled.builtin_from_name(self.inference_state, 'None')
value_set |= ValueSet([ctx])
else:
value_set |= self.infer_node(children[1])
@@ -249,7 +249,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
def _get_yield_lazy_value(self, yield_expr):
if yield_expr.type == 'keyword':
# `yield` just yields None.
ctx = compiled.builtin_from_name(self.inference_state, u'None')
ctx = compiled.builtin_from_name(self.inference_state, 'None')
yield LazyKnownValue(ctx)
return

View File

@@ -88,7 +88,7 @@ class MethodExecutionContext(FunctionExecutionContext):
class AbstractInstanceValue(Value):
api_type = u'instance'
api_type = 'instance'
def __init__(self, inference_state, parent_context, class_value):
super(AbstractInstanceValue, self).__init__(inference_state, parent_context)
@@ -234,12 +234,12 @@ class _BaseTreeInstance(AbstractInstanceValue):
# other way around.
if is_big_annoying_library(self.parent_context):
return NO_VALUES
names = (self.get_function_slot_names(u'__getattr__')
or self.get_function_slot_names(u'__getattribute__'))
names = (self.get_function_slot_names('__getattr__')
or self.get_function_slot_names('__getattribute__'))
return self.execute_function_slots(names, name)
def py__getitem__(self, index_value_set, contextualized_node):
names = self.get_function_slot_names(u'__getitem__')
names = self.get_function_slot_names('__getitem__')
if not names:
return super(_BaseTreeInstance, self).py__getitem__(
index_value_set,
@@ -250,7 +250,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
return ValueSet.from_sets(name.infer().execute(args) for name in names)
def py__iter__(self, contextualized_node=None):
iter_slot_names = self.get_function_slot_names(u'__iter__')
iter_slot_names = self.get_function_slot_names('__iter__')
if not iter_slot_names:
return super(_BaseTreeInstance, self).py__iter__(contextualized_node)
@@ -258,7 +258,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
for generator in self.execute_function_slots(iter_slot_names):
if generator.is_instance() and not generator.is_compiled():
# `__next__` logic.
name = u'__next__'
name = '__next__'
next_slot_names = generator.get_function_slot_names(name)
if next_slot_names:
yield LazyKnownValues(
@@ -272,7 +272,7 @@ class _BaseTreeInstance(AbstractInstanceValue):
return iterate()
def py__call__(self, arguments):
names = self.get_function_slot_names(u'__call__')
names = self.get_function_slot_names('__call__')
if not names:
# Means the Instance is not callable.
return super(_BaseTreeInstance, self).py__call__(arguments)
@@ -290,10 +290,10 @@ class _BaseTreeInstance(AbstractInstanceValue):
if result is not NotImplemented:
return result
names = self.get_function_slot_names(u'__get__')
names = self.get_function_slot_names('__get__')
if names:
if instance is None:
instance = compiled.builtin_from_name(self.inference_state, u'None')
instance = compiled.builtin_from_name(self.inference_state, 'None')
return self.execute_function_slots(names, instance, class_value)
else:
return ValueSet([self])
@@ -461,7 +461,7 @@ class BoundMethod(FunctionMixin, ValueWrapper):
)
def py__class__(self):
c, = values_from_qualified_names(self.inference_state, u'types', u'MethodType')
c, = values_from_qualified_names(self.inference_state, 'types', 'MethodType')
return c
def _get_arguments(self, arguments):

View File

@@ -21,7 +21,7 @@ from jedi.inference.value.dynamic_arrays import check_array_additions
class IterableMixin(object):
def py__stop_iteration_returns(self):
return ValueSet([compiled.builtin_from_name(self.inference_state, u'None')])
return ValueSet([compiled.builtin_from_name(self.inference_state, 'None')])
# At the moment, safe values are simple values like "foo", 1 and not
# lists/dicts. Therefore as a small speed optimization we can just do the
@@ -58,7 +58,7 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
def py__stop_iteration_returns(self):
return ValueSet([compiled.builtin_from_name(self.inference_state, u'None')])
return ValueSet([compiled.builtin_from_name(self.inference_state, 'None')])
@property
def name(self):
@@ -173,7 +173,7 @@ class _DictMixin(object):
class Sequence(LazyAttributeOverwrite, IterableMixin):
api_type = u'instance'
api_type = 'instance'
@property
def name(self):
@@ -219,7 +219,7 @@ class _BaseComprehension(ComprehensionMixin):
class ListComprehension(_BaseComprehension, Sequence):
array_type = u'list'
array_type = 'list'
def py__simple_getitem__(self, index):
if isinstance(index, slice):
@@ -232,7 +232,7 @@ class ListComprehension(_BaseComprehension, Sequence):
class SetComprehension(_BaseComprehension, Sequence):
array_type = u'set'
array_type = 'set'
class GeneratorComprehension(_BaseComprehension, GeneratorBase):
@@ -250,7 +250,7 @@ class _DictKeyMixin(object):
class DictComprehension(ComprehensionMixin, Sequence, _DictKeyMixin):
array_type = u'dict'
array_type = 'dict'
def __init__(self, inference_state, defining_context, sync_comp_for_node, key_node, value_node):
assert sync_comp_for_node.type == 'sync_comp_for'
@@ -307,9 +307,9 @@ class DictComprehension(ComprehensionMixin, Sequence, _DictKeyMixin):
class SequenceLiteralValue(Sequence):
_TUPLE_LIKE = 'testlist_star_expr', 'testlist', 'subscriptlist'
mapping = {'(': u'tuple',
'[': u'list',
'{': u'set'}
mapping = {'(': 'tuple',
'[': 'list',
'{': 'set'}
def __init__(self, inference_state, defining_context, atom):
super(SequenceLiteralValue, self).__init__(inference_state)
@@ -317,13 +317,13 @@ class SequenceLiteralValue(Sequence):
self._defining_context = defining_context
if self.atom.type in self._TUPLE_LIKE:
self.array_type = u'tuple'
self.array_type = 'tuple'
else:
self.array_type = SequenceLiteralValue.mapping[atom.children[0]]
"""The builtin name of the array (list, set, tuple or dict)."""
def _get_generics(self):
if self.array_type == u'tuple':
if self.array_type == 'tuple':
return tuple(x.infer().py__class__() for x in self.py__iter__())
return super(SequenceLiteralValue, self)._get_generics()
@@ -415,7 +415,7 @@ class SequenceLiteralValue(Sequence):
class DictLiteralValue(_DictMixin, SequenceLiteralValue, _DictKeyMixin):
array_type = u'dict'
array_type = 'dict'
def __init__(self, inference_state, defining_context, atom):
super(SequenceLiteralValue, self).__init__(inference_state)
@@ -427,7 +427,7 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue, _DictKeyMixin):
compiled_value_index = compiled.create_simple_object(self.inference_state, index)
for key, value in self.get_tree_entries():
for k in self._defining_context.infer_node(key):
for key_v in k.execute_operation(compiled_value_index, u'=='):
for key_v in k.execute_operation(compiled_value_index, '=='):
if key_v.get_safe_value():
return self._defining_context.infer_node(value)
raise SimpleGetItemNotFound('No key found in dictionary %s.' % self)
@@ -503,15 +503,15 @@ class _FakeSequence(Sequence):
class FakeTuple(_FakeSequence):
array_type = u'tuple'
array_type = 'tuple'
class FakeList(_FakeSequence):
array_type = u'tuple'
array_type = 'tuple'
class FakeDict(_DictMixin, Sequence, _DictKeyMixin):
array_type = u'dict'
array_type = 'dict'
def __init__(self, inference_state, dct):
super(FakeDict, self).__init__(inference_state)

View File

@@ -144,7 +144,7 @@ class ClassMixin(object):
return ValueSet([TreeInstance(self.inference_state, self.parent_context, self, arguments)])
def py__class__(self):
return compiled.builtin_from_name(self.inference_state, u'type')
return compiled.builtin_from_name(self.inference_state, 'type')
@property
def name(self):
@@ -204,7 +204,7 @@ class ClassMixin(object):
)
if not is_instance and include_type_when_class:
from jedi.inference.compiled import builtin_from_name
type_ = builtin_from_name(self.inference_state, u'type')
type_ = builtin_from_name(self.inference_state, 'type')
assert isinstance(type_, ClassValue)
if type_ != self:
# We are not using execute_with_values here, because the
@@ -319,7 +319,7 @@ class ClassMixin(object):
class ClassValue(ClassMixin, FunctionAndClassBase, metaclass=CachedMetaClass):
api_type = u'class'
api_type = 'class'
@inference_state_method_cache()
def list_type_vars(self):

View File

@@ -16,7 +16,7 @@ class _ModuleAttributeName(AbstractNameDefinition):
"""
For module attributes like __file__, __str__ and so on.
"""
api_type = u'instance'
api_type = 'instance'
def __init__(self, parent_module, string_name, string_value=None):
self.parent_context = parent_module
@@ -70,7 +70,7 @@ class ModuleMixin(SubModuleDictMixin):
yield star_filter
def py__class__(self):
c, = values_from_qualified_names(self.inference_state, u'types', u'ModuleType')
c, = values_from_qualified_names(self.inference_state, 'types', 'ModuleType')
return c
def is_module(self):
@@ -134,7 +134,7 @@ class ModuleMixin(SubModuleDictMixin):
class ModuleValue(ModuleMixin, TreeValue):
api_type = u'module'
api_type = 'module'
def __init__(self, inference_state, module_node, code_lines, file_io=None,
string_names=None, is_package=False):

View File

@@ -23,7 +23,7 @@ class ImplicitNamespaceValue(Value, SubModuleDictMixin):
# Is a module like every other module, because if you import an empty
# folder foobar it will be available as an object:
# <module 'foobar' (namespace)>.
api_type = u'module'
api_type = 'module'
parent_context = None
def __init__(self, inference_state, string_names, paths):