1
0
forked from VimPlug/jedi
Files
jedi-fork/jedi/evaluate/iterable.py
2016-11-25 23:31:45 +01:00

917 lines
33 KiB
Python

"""
Contains all classes and functions to deal with lists, dicts, generators and
iterators in general.
Array modifications
*******************
If the content of an array (``set``/``list``) is requested somewhere, the
current module will be checked for appearances of ``arr.append``,
``arr.insert``, etc. If the ``arr`` name points to an actual array, the
content will be added
This can be really cpu intensive, as you can imagine. Because |jedi| has to
follow **every** ``append`` and check wheter it's the right array. However this
works pretty good, because in *slow* cases, the recursion detector and other
settings will stop this process.
It is important to note that:
1. Array modfications work only in the current module.
2. Jedi only checks Array additions; ``list.pop``, etc are ignored.
"""
from jedi.common import unite, safe_property
from jedi import debug
from jedi import settings
from jedi._compatibility import unicode, zip_longest, is_py3
from jedi.parser import tree
from jedi.evaluate import compiled
from jedi.evaluate import helpers
from jedi.evaluate.cache import memoize_default
from jedi.evaluate import analysis
from jedi.evaluate import pep0484
from jedi import common
from jedi.evaluate.filters import DictFilter, AbstractNameDefinition
from jedi.evaluate import context
from jedi.evaluate import precedence
class AbstractSequence(context.Context):
def __init__(self, evaluator):
super(AbstractSequence, self).__init__(evaluator, evaluator.BUILTINS)
def get_filters(self, search_global, until_position=None, origin_scope=None):
raise NotImplementedError
@property
def name(self):
return compiled.CompiledContextName(self, self.array_type)
class BuiltinMethod(object):
"""``Generator.__next__`` ``dict.values`` methods and so on."""
def __init__(self, builtin_context, method, builtin_func):
self._builtin_context = builtin_context
self._method = method
self._builtin_func = builtin_func
def py__call__(self, params):
return self._method(self._builtin_context)
def __getattr__(self, name):
return getattr(self._builtin_func, name)
class SpecialMethodFilter(DictFilter):
"""
A filter for methods that are defined in this module on the corresponding
classes like Generator (for __next__, etc).
"""
class SpecialMethodName(AbstractNameDefinition):
api_type = 'function'
def __init__(self, parent_context, string_name, callable_, builtin_context):
self.parent_context = parent_context
self.string_name = string_name
self._callable = callable_
self._builtin_context = builtin_context
def infer(self):
filter = next(self._builtin_context.get_filters())
# We can take the first index, because on builtin methods there's
# always only going to be one name. The same is true for the
# inferred values.
builtin_func = filter.get(self.string_name)[0].infer().pop()
return set([BuiltinMethod(self.parent_context, self._callable, builtin_func)])
def __init__(self, context, dct, builtin_context):
super(SpecialMethodFilter, self).__init__(dct)
self.context = context
self._builtin_context = builtin_context
"""
This context is what will be used to introspect the name, where as the
other context will be used to execute the function.
We distinguish, because we have to.
"""
def _convert(self, name, value):
return self.SpecialMethodName(self.context, name, value, self._builtin_context)
def has_builtin_methods(cls):
cls.builtin_methods = {}
for func in cls.__dict__.values():
try:
cls.builtin_methods.update(func.registered_builtin_methods)
except AttributeError:
pass
return cls
def register_builtin_method(method_name, python_version_match=None):
def wrapper(func):
if python_version_match and python_version_match + int(is_py3) == 3:
# Some functions do only apply to certain versions.
return func
dct = func.__dict__.setdefault('registered_builtin_methods', {})
dct[method_name] = func
return func
return wrapper
@has_builtin_methods
class GeneratorMixin(object):
array_type = None
@register_builtin_method('send')
@register_builtin_method('next', python_version_match=2)
@register_builtin_method('__next__', python_version_match=3)
def py__next__(self):
# TODO add TypeError if params are given.
return unite(lazy_context.infer() for lazy_context in self.py__iter__())
@memoize_default()
def names_dicts(self, search_global=False): # is always False
gen_obj = compiled.get_special_object(self.evaluator, 'GENERATOR_OBJECT')
yield self._get_names_dict(gen_obj.names_dict)
def get_filters(self, search_global, until_position=None, origin_scope=None):
gen_obj = compiled.get_special_object(self.evaluator, 'GENERATOR_OBJECT')
yield SpecialMethodFilter(self, self.builtin_methods, gen_obj)
for filter in gen_obj.get_filters(search_global):
yield filter
def py__bool__(self):
return True
def py__class__(self):
gen_obj = compiled.get_special_object(self.evaluator, 'GENERATOR_OBJECT')
return gen_obj.py__class__()
class Generator(GeneratorMixin, context.Context):
"""Handling of `yield` functions."""
def __init__(self, evaluator, func_execution_context):
super(Generator, self).__init__(evaluator)
self._func_execution_context = func_execution_context
def py__iter__(self):
return self._func_execution_context.get_yield_values()
def __repr__(self):
return "<%s of %s>" % (type(self).__name__, self._func_execution_context)
class Comprehension(AbstractSequence):
@staticmethod
def from_atom(evaluator, context, atom):
bracket = atom.children[0]
if bracket == '{':
if atom.children[1].children[1] == ':':
cls = DictComprehension
else:
cls = SetComprehension
elif bracket == '(':
cls = GeneratorComprehension
elif bracket == '[':
cls = ListComprehension
return cls(evaluator, context, atom)
def __init__(self, evaluator, defining_context, atom):
super(Comprehension, self).__init__(evaluator)
self._defining_context = defining_context
self._atom = atom
def _get_comprehension(self):
# The atom contains a testlist_comp
return self._atom.children[1]
def _get_comp_for(self):
# The atom contains a testlist_comp
return self._get_comprehension().children[1]
@memoize_default()
def _eval_node(self, index=0):
"""
The first part `x + 1` of the list comprehension:
[x + 1 for x in foo]
"""
comp_for = self._get_comp_for()
# For nested comprehensions we need to search the last one.
node = self._get_comprehension().children[index]
last_comp = list(comp_for.get_comp_fors())[-1]
#TODO raise NotImplementedError('should not need to copy...')
return helpers.deep_ast_copy(node, parent=last_comp)
def _nested(self, comp_fors):
evaluator = self.evaluator
comp_for = comp_fors[0]
input_node = comp_for.children[3]
input_types = self._defining_context.eval_node(input_node)
iterated = py__iter__(evaluator, input_types, input_node)
exprlist = comp_for.children[1]
for i, lazy_context in enumerate(iterated):
types = lazy_context.infer()
dct = unpack_tuple_to_dict(evaluator, types, exprlist)
with helpers.predefine_names(self._defining_context, comp_for, dct):
try:
for result in self._nested(comp_fors[1:]):
yield result
except IndexError:
iterated = self._defining_context.eval_node(self._eval_node())
if self.array_type == 'dict':
yield iterated, self._defining_context.eval_node(self._eval_node(2))
else:
yield iterated
@memoize_default(default=[])
@common.to_list
def _iterate(self):
comp_fors = tuple(self._get_comp_for().get_comp_fors())
for result in self._nested(comp_fors):
yield result
def py__iter__(self):
for set_ in self._iterate():
yield context.LazyKnownContexts(set_)
def __repr__(self):
return "<%s of %s>" % (type(self).__name__, self._atom)
@has_builtin_methods
class ArrayMixin(object):
@memoize_default()
def names_dicts(self, search_global=False): # Always False.
# `array.type` is a string with the type, e.g. 'list'.
scope = compiled.builtin_from_name(self.evaluator, self.array_type)
# builtins only have one class -> [0]
scopes = self.evaluator.execute_evaluated(scope, self)
names_dicts = list(scopes)[0].names_dicts(search_global)
yield self._get_names_dict(names_dicts[1])
def get_filters(self, search_global, until_position=None, origin_scope=None):
# `array.type` is a string with the type, e.g. 'list'.
compiled_obj = compiled.builtin_from_name(self.evaluator, self.array_type)
yield SpecialMethodFilter(self, self.builtin_methods, compiled_obj)
for typ in compiled_obj.execute_evaluated(self):
for filter in typ.get_filters():
yield filter
# TODO this should be used.
#yield DictFilter(self._get_names_dict(names_dicts[1]))
def py__bool__(self):
return None # We don't know the length, because of appends.
def py__class__(self):
return compiled.builtin_from_name(self.evaluator, self.array_type)
@safe_property
def parent(self):
return self.evaluator.BUILTINS
def dict_values(self):
return unite(self._defining_context.eval_node(v) for k, v in self._items())
class DICT(object):
@register_builtin_method('values')
def _imitate_values(self):
items = self.dict_values()
return create_evaluated_sequence_set(self.evaluator, items, sequence_type='list')
@register_builtin_method('items')
def _imitate_items(self):
items = [set([FakeSequence(self.evaluator, (k, v), 'tuple')])
for k, v in self._items()]
return create_evaluated_sequence_set(self.evaluator, *items, sequence_type='list')
class ListComprehension(ArrayMixin, Comprehension):
array_type = 'list'
def py__getitem__(self, index):
if isinstance(index, slice):
return set([self])
all_types = list(self.py__iter__())
return all_types[index].infer()
class SetComprehension(ArrayMixin, Comprehension):
array_type = 'set'
@has_builtin_methods
class DictComprehension(ArrayMixin, Comprehension):
array_type = 'dict'
def _get_comp_for(self):
return self._get_comprehension().children[3]
def py__iter__(self):
raise NotImplementedError
for keys, values in self._iterate():
yield keys
def py__getitem__(self, index):
for keys, values in self._iterate():
for k in keys:
if isinstance(k, compiled.CompiledObject):
if k.obj == index:
return values
return self.dict_values()
def dict_values(self):
return unite(values for keys, values in self._iterate())
@register_builtin_method('items')
def _imitate_items(self):
items = set(FakeSequence(self.evaluator,
(AlreadyEvaluated(keys), AlreadyEvaluated(values)), 'tuple')
for keys, values in self._iterate())
return create_evaluated_sequence_set(self.evaluator, items, sequence_type='list')
class GeneratorComprehension(GeneratorMixin, Comprehension):
pass
class ArrayLiteralContext(ArrayMixin, AbstractSequence):
mapping = {'(': 'tuple',
'[': 'list',
'{': 'dict'}
def __init__(self, evaluator, defining_context, atom):
super(ArrayLiteralContext, self).__init__(evaluator)
self.atom = atom
self._defining_context = defining_context
if self.atom.type in ('testlist_star_expr', 'testlist'):
self.array_type = 'tuple'
else:
self.array_type = ArrayLiteralContext.mapping[atom.children[0]]
"""The builtin name of the array (list, set, tuple or dict)."""
c = self.atom.children
array_node = c[1]
if self.array_type == 'dict' and array_node != '}' \
and (not hasattr(array_node, 'children') or ':' not in array_node.children):
self.array_type = 'set'
def py__getitem__(self, index):
"""Here the index is an int/str. Raises IndexError/KeyError."""
if self.array_type == 'dict':
for key, value in self._items():
for k in self._defining_context.eval_node(key):
if isinstance(k, compiled.CompiledObject) \
and index == k.obj:
return self._defining_context.eval_node(value)
raise KeyError('No key found in dictionary %s.' % self)
# Can raise an IndexError
if isinstance(index, slice):
return set([self])
else:
return self._defining_context.eval_node(self._items()[index])
def py__iter__(self):
"""
While values returns the possible values for any array field, this
function returns the value for a certain index.
"""
if self.array_type == 'dict':
# Get keys.
types = set()
for k, _ in self._items():
types |= self._defining_context.eval_node(k)
# We don't know which dict index comes first, therefore always
# yield all the types.
for _ in types:
yield context.LazyKnownContexts(types)
else:
for node in self._items():
yield context.LazyTreeContext(self._defining_context, node)
additions = check_array_additions(self.evaluator, self)
if additions:
yield additions
def _values(self):
"""Returns a list of a list of node."""
if self.array_type == 'dict':
return unite(v for k, v in self._items())
else:
return self._items()
def _items(self):
c = self.atom.children
if self.atom.type in ('testlist_star_expr', 'testlist'):
return c[::2]
array_node = c[1]
if array_node in (']', '}', ')'):
return [] # Direct closing bracket, doesn't contain items.
if tree.is_node(array_node, 'testlist_comp'):
return array_node.children[::2]
elif tree.is_node(array_node, 'dictorsetmaker'):
kv = []
iterator = iter(array_node.children)
for key in iterator:
op = next(iterator, None)
if op is None or op == ',':
kv.append(key) # A set.
else:
assert op == ':' # A dict.
kv.append((key, next(iterator)))
next(iterator, None) # Possible comma.
return kv
else:
return [array_node]
def exact_key_items(self):
"""
Returns a generator of tuples like dict.items(), where the key is
resolved (as a string) and the values are still LazyContexts.
"""
for key_node, value in self._items():
for key in self._defining_context.eval_node(key_node):
if precedence.is_string(key):
yield key.obj, context.LazyTreeContext(self._defining_context, value)
def __repr__(self):
return "<%s of %s>" % (self.__class__.__name__, self.atom)
class _FakeArray(ArrayLiteralContext):
def __init__(self, evaluator, container, type):
super(ArrayLiteralContext, self).__init__(evaluator)
self.array_type = type
self.atom = container
# TODO is this class really needed?
class ImplicitTuple(_FakeArray):
def __init__(self, evaluator, testlist):
super(ImplicitTuple, self).__init__(evaluator, testlist, 'tuple')
raise NotImplementedError
self._testlist = testlist
def _items(self):
return self._testlist.children[::2]
class FakeSequence(_FakeArray):
def __init__(self, evaluator, array_type, lazy_context_list):
"""
type should be one of "tuple", "list"
"""
super(FakeSequence, self).__init__(evaluator, None, array_type)
self._lazy_context_list = lazy_context_list
def _items(self):
raise DeprecationWarning
return self._context_list
def py__getitem__(self, index):
return self._lazy_context_list[index].infer()
def py__iter__(self):
return self._lazy_context_list
def __repr__(self):
return "<%s of %s>" % (type(self).__name__, self._lazy_context_list)
def create_evaluated_sequence_set(evaluator, *types_order, **kwargs):
"""
``sequence_type`` is a named argument, that doesn't work in Python2. For backwards
compatibility reasons, we're now using kwargs.
"""
sequence_type = kwargs.pop('sequence_type')
assert not kwargs
sets = tuple(AlreadyEvaluated(types) for types in types_order)
return set([FakeSequence(evaluator, sets, sequence_type)])
class AlreadyEvaluated(frozenset):
"""A simple container to add already evaluated objects to an array."""
def __init__(self, *args, **kwargs):
raise DeprecationWarning
def get_code(self, normalized=False):
# For debugging purposes.
return str(self)
class MergedNodes(frozenset):
def __init__(self, *args, **kwargs):
raise DeprecationWarning
pass
class FakeDict(_FakeArray):
def __init__(self, evaluator, dct):
super(FakeDict, self).__init__(evaluator, dct, 'dict')
self._dct = dct
def py__iter__(self):
for key in self._dct:
yield context.LazyKnownContext(compiled.create(self.evaluator, key))
def py__getitem__(self, index):
return self._dct[index].infer()
def dict_values(self):
return unite(lazy_context.infer() for lazy_context in self._dct.values())
def _items(self):
raise DeprecationWarning
for key, values in self._dct.items():
# TODO this is not proper. The values could be multiple values?!
yield key, values[0]
def exact_key_items(self):
return self._dct.items()
class MergedArray(_FakeArray):
def __init__(self, evaluator, arrays):
super(MergedArray, self).__init__(evaluator, arrays, arrays[-1].type)
self._arrays = arrays
def py__iter__(self):
for array in self._arrays:
for lazy_context in array.py__iter__():
yield lazy_context
def py__getitem__(self, index):
return unite(lazy_context.infer() for lazy_context in self.py__iter__())
def _items(self):
for array in self._arrays:
for a in array._items():
yield a
def __len__(self):
return sum(len(a) for a in self._arrays)
def unpack_tuple_to_dict(evaluator, types, exprlist):
"""
Unpacking tuple assignments in for statements and expr_stmts.
"""
if exprlist.type == 'name':
return {exprlist.value: types}
elif exprlist.type == 'atom' and exprlist.children[0] in '([':
return unpack_tuple_to_dict(evaluator, types, exprlist.children[1])
elif exprlist.type in ('testlist', 'testlist_comp', 'exprlist',
'testlist_star_expr'):
dct = {}
parts = iter(exprlist.children[::2])
n = 0
for lazy_context in py__iter__(evaluator, types, exprlist):
n += 1
try:
part = next(parts)
except StopIteration:
analysis.add(evaluator, 'value-error-too-many-values', part,
message="ValueError: too many values to unpack (expected %s)" % n)
else:
dct.update(unpack_tuple_to_dict(evaluator, lazy_context.infer(), part))
has_parts = next(parts, None)
if types and has_parts is not None:
analysis.add(evaluator, 'value-error-too-few-values', has_parts,
message="ValueError: need more than %s values to unpack" % n)
return dct
elif exprlist.type == 'power' or exprlist.type == 'atom_expr':
# Something like ``arr[x], var = ...``.
# This is something that is not yet supported, would also be difficult
# to write into a dict.
return {}
elif exprlist.type == 'star_expr': # `a, *b, c = x` type unpackings
# Currently we're not supporting them.
return {}
raise NotImplementedError
def py__iter__(evaluator, types, node=None):
debug.dbg('py__iter__')
type_iters = []
for typ in types:
try:
iter_method = typ.py__iter__
except AttributeError:
if node is not None:
analysis.add(evaluator, 'type-error-not-iterable', node,
message="TypeError: '%s' object is not iterable" % typ)
else:
type_iters.append(iter_method())
for lazy_contexts in zip_longest(*type_iters):
yield context.get_merged_lazy_context(
[l for l in lazy_contexts if l is not None]
)
def py__iter__types(evaluator, types, node=None):
"""
Calls `py__iter__`, but ignores the ordering in the end and just returns
all types that it contains.
"""
return unite(lazy_context.infer() for lazy_context in py__iter__(evaluator, types, node))
def py__getitem__(evaluator, context, types, trailer):
from jedi.evaluate.representation import ClassContext
result = set()
trailer_op, node, trailer_cl = trailer.children
assert trailer_op == "["
assert trailer_cl == "]"
# special case: PEP0484 typing module, see
# https://github.com/davidhalter/jedi/issues/663
for typ in list(types):
if isinstance(typ, ClassContext):
typing_module_types = pep0484.py__getitem__(context, typ, node)
if typing_module_types is not None:
types.remove(typ)
result |= typing_module_types
if not types:
# all consumed by special cases
return result
for index in create_index_types(evaluator, context, node):
if isinstance(index, (compiled.CompiledObject, Slice)):
index = index.obj
if type(index) not in (float, int, str, unicode, slice):
# If the index is not clearly defined, we have to get all the
# possiblities.
for typ in list(types):
if isinstance(typ, AbstractSequence) and typ.type == 'dict':
types.remove(typ)
result |= typ.dict_values()
return result | py__iter__types(evaluator, types)
for typ in types:
# The actual getitem call.
try:
getitem = typ.py__getitem__
except AttributeError:
analysis.add(evaluator, 'type-error-not-subscriptable', trailer_op,
message="TypeError: '%s' object is not subscriptable" % typ)
else:
try:
result |= getitem(index)
except IndexError:
result |= py__iter__types(evaluator, set([typ]))
except KeyError:
# Must be a dict. Lists don't raise KeyErrors.
result |= typ.dict_values()
return result
def check_array_additions(evaluator, array):
""" Just a mapper function for the internal _check_array_additions """
if array.type not in ('list', 'set'):
# TODO also check for dict updates
return set()
is_list = array.type == 'list'
try:
current_module = array.atom.get_parent_until()
except AttributeError:
# If there's no get_parent_until, it's a FakeSequence or another Fake
# type. Those fake types are used inside Jedi's engine. No values may
# be added to those after their creation.
return set()
return _check_array_additions(evaluator, array, current_module, is_list)
@memoize_default(default=set(), evaluator_is_first_arg=True)
@debug.increase_indent
def _check_array_additions(evaluator, compare_array, module, is_list):
"""
Checks if a `Array` has "add" (append, insert, extend) statements:
>>> a = [""]
>>> a.append(1)
"""
debug.dbg('Dynamic array search for %s' % compare_array, color='MAGENTA')
if not settings.dynamic_array_additions or isinstance(module, compiled.CompiledObject):
debug.dbg('Dynamic array search aborted.', color='MAGENTA')
return set()
def check_additions(arglist, add_name):
params = list(param.Arguments(evaluator, context, arglist).unpack())
result = set()
if add_name in ['insert']:
params = params[1:]
if add_name in ['append', 'add', 'insert']:
for key, nodes in params:
result |= unite(evaluator.eval_element(node) for node in nodes)
elif add_name in ['extend', 'update']:
for key, nodes in params:
for node in nodes:
types = evaluator.eval_element(node)
result |= py__iter__types(evaluator, types, node)
return result
from jedi.evaluate import representation as er, param
def get_execution_parent(element):
""" Used to get an Instance/FunctionExecution parent """
if isinstance(element, Array):
node = element.atom
else:
# Is an Instance with an
# Arguments([AlreadyEvaluated([_ArrayInstance])]) inside
# Yeah... I know... It's complicated ;-)
node = list(element.var_args.argument_node[0])[0].var_args.trailer
if isinstance(node, er.InstanceElement) or node is None:
return node
return node.get_parent_until(er.FunctionExecution)
temp_param_add, settings.dynamic_params_for_other_modules = \
settings.dynamic_params_for_other_modules, False
search_names = ['append', 'extend', 'insert'] if is_list else ['add', 'update']
comp_arr_parent = get_execution_parent(compare_array)
added_types = set()
for add_name in search_names:
try:
possible_names = module.used_names[add_name]
except KeyError:
continue
else:
for name in possible_names:
# Check if the original scope is an execution. If it is, one
# can search for the same statement, that is in the module
# dict. Executions are somewhat special in jedi, since they
# literally copy the contents of a function.
if isinstance(comp_arr_parent, er.FunctionExecution):
if comp_arr_parent.start_pos < name.start_pos < comp_arr_parent.end_pos:
name = comp_arr_parent.name_for_position(name.start_pos)
else:
# Don't check definitions that are not defined in the
# same function. This is not "proper" anyway. It also
# improves Jedi's speed for array lookups, since we
# don't have to check the whole source tree anymore.
continue
trailer = name.parent
power = trailer.parent
trailer_pos = power.children.index(trailer)
try:
execution_trailer = power.children[trailer_pos + 1]
except IndexError:
continue
else:
if execution_trailer.type != 'trailer' \
or execution_trailer.children[0] != '(' \
or execution_trailer.children[1] == ')':
continue
power = helpers.call_of_leaf(name, cut_own_trailer=True)
# InstanceElements are special, because they don't get copied,
# but have this wrapper around them.
if isinstance(comp_arr_parent, er.InstanceElement):
power = er.get_instance_el(evaluator, comp_arr_parent.instance, power)
if evaluator.recursion_detector.push_stmt(power):
# Check for recursion. Possible by using 'extend' in
# combination with function calls.
continue
try:
if compare_array in evaluator.eval_element(power):
# The arrays match. Now add the results
added_types |= check_additions(execution_trailer.children[1], add_name)
finally:
evaluator.recursion_detector.pop_stmt()
# reset settings
settings.dynamic_params_for_other_modules = temp_param_add
debug.dbg('Dynamic array result %s' % added_types, color='MAGENTA')
return added_types
def check_array_instances(evaluator, instance):
"""Used for set() and list() instances."""
if not settings.dynamic_array_additions:
return instance.var_args
ai = _ArrayInstance(evaluator, instance)
from jedi.evaluate import param
return param.Arguments(evaluator, instance, [AlreadyEvaluated([ai])])
class _ArrayInstance(object):
"""
Used for the usage of set() and list().
This is definitely a hack, but a good one :-)
It makes it possible to use set/list conversions.
In contrast to Array, ListComprehension and all other iterable types, this
is something that is only used inside `evaluate/compiled/fake/builtins.py`
and therefore doesn't need `names_dicts`, `py__bool__` and so on, because
we don't use these operations in `builtins.py`.
"""
def __init__(self, evaluator, instance):
self.evaluator = evaluator
self.instance = instance
self.var_args = instance.var_args
def py__iter__(self):
raise NotImplementedError
try:
_, first_nodes = next(self.var_args.unpack())
except StopIteration:
types = set()
else:
types = unite(self.evaluator.eval_element(node) for node in first_nodes)
for types in py__iter__(self.evaluator, types, first_nodes[0]):
yield types
module = self.var_args.get_parent_until()
if module is None:
return
is_list = str(self.instance.name) == 'list'
additions = _check_array_additions(self.evaluator, self.instance, module, is_list)
if additions:
yield additions
class Slice(context.Context):
def __init__(self, context, start, stop, step):
super(Slice, self).__init__(
context.evaluator,
parent_context=context.evaluator.BUILTINS
)
self._context = context
# all of them are either a Precedence or None.
self._start = start
self._stop = stop
self._step = step
@property
def obj(self):
"""
Imitate CompiledObject.obj behavior and return a ``builtin.slice()``
object.
"""
def get(element):
if element is None:
return None
result = self._context.eval_node(element)
if len(result) != 1:
# For simplicity, we want slices to be clear defined with just
# one type. Otherwise we will return an empty slice object.
raise IndexError
try:
return list(result)[0].obj
except AttributeError:
return None
try:
return slice(get(self._start), get(self._stop), get(self._step))
except IndexError:
return slice(None, None, None)
def create_index_types(evaluator, context, index):
"""
Handles slices in subscript nodes.
"""
if index == ':':
# Like array[:]
return set([Slice(context, None, None, None)])
elif tree.is_node(index, 'subscript'): # subscript is a slice operation.
# Like array[:3]
result = []
for el in index.children:
if el == ':':
if not result:
result.append(None)
elif tree.is_node(el, 'sliceop'):
if len(el.children) == 2:
result.append(el.children[1])
else:
result.append(el)
result += [None] * (3 - len(result))
return set([Slice(context, *result)])
# No slices
return context.eval_node(index)