mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-07 14:34:31 +08:00
Add a test to check if numpy tensorflow stuff is now cached, see #1116
This commit is contained in:
@@ -15,6 +15,8 @@ Changelog
|
||||
- Completion for "proxies" works now. These are classes that have a
|
||||
``__getattr__(self, name)`` method that does a ``return getattr(x, name)``.
|
||||
- Understanding of Pytest fixtures.
|
||||
- Tensorflow, Numpy and Pandas completions should now be about 4-10x faster
|
||||
after loading them initially.
|
||||
- Big **Script API Changes**:
|
||||
- The line and column parameters of ``jedi.Script`` are now deprecated
|
||||
- ``completions`` deprecated, use ``complete`` instead
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from itertools import count
|
||||
|
||||
import pytest
|
||||
|
||||
from . import helpers
|
||||
from . import run
|
||||
from . import refactor
|
||||
|
||||
import jedi
|
||||
from jedi.api.environment import InterpreterEnvironment
|
||||
from jedi.inference.compiled.value import create_from_access_path
|
||||
from jedi.inference.imports import _load_python_module
|
||||
from jedi.file_io import KnownContentFileIO
|
||||
from jedi.inference.base_value import ValueSet
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
@@ -144,3 +145,16 @@ def create_compiled_object(inference_state):
|
||||
inference_state,
|
||||
inference_state.compiled_subprocess.create_simple_object(obj)
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def module_injector():
|
||||
counter = count()
|
||||
|
||||
def module_injector(inference_state, names, code):
|
||||
assert isinstance(names, tuple)
|
||||
file_io = KnownContentFileIO('/foo/bar/module-injector-%s.py' % next(counter), code)
|
||||
v = _load_python_module(inference_state, file_io, names)
|
||||
inference_state.module_cache.add(names, ValueSet([v]))
|
||||
|
||||
return module_injector
|
||||
|
||||
@@ -392,3 +392,33 @@ def test_fuzzy_match():
|
||||
|
||||
def test_ellipsis_completion(Script):
|
||||
assert Script('...').complete() == []
|
||||
|
||||
|
||||
def test_completion_cache(Script, module_injector):
|
||||
"""
|
||||
For some modules like numpy, tensorflow or pandas we cache docstrings and
|
||||
type to avoid them slowing us down, because they are huge.
|
||||
"""
|
||||
script = Script('import numpy; numpy.foo')
|
||||
module_injector(script._inference_state, ('numpy',), 'def foo(a): "doc"')
|
||||
c, = script.complete()
|
||||
assert c.name == 'foo'
|
||||
assert c.type == 'function'
|
||||
assert c.docstring() == 'foo(a)\n\ndoc'
|
||||
|
||||
code = dedent('''\
|
||||
class foo:
|
||||
'doc2'
|
||||
def __init__(self):
|
||||
pass
|
||||
''')
|
||||
script = Script('import numpy; numpy.foo')
|
||||
module_injector(script._inference_state, ('numpy',), code)
|
||||
# The outpus should still be the same
|
||||
c, = script.complete()
|
||||
assert c.name == 'foo'
|
||||
assert c.type == 'function'
|
||||
assert c.docstring() == 'foo(a)\n\ndoc'
|
||||
cls, = c.infer()
|
||||
assert cls.type == 'class'
|
||||
assert cls.docstring() == 'foo()\n\ndoc2'
|
||||
|
||||
Reference in New Issue
Block a user