mirror of
https://github.com/davidhalter/jedi.git
synced 2025-12-08 14:54:47 +08:00
Compare commits
135 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
51f4a99a1e | ||
|
|
93c14d2e6e | ||
|
|
57aefed6ea | ||
|
|
8a4b079d0f | ||
|
|
62cbcb0844 | ||
|
|
d8420d0f72 | ||
|
|
886279fb6d | ||
|
|
ff3a7f367f | ||
|
|
1f70e3301e | ||
|
|
a34c348a55 | ||
|
|
972123c9c9 | ||
|
|
6455a14841 | ||
|
|
8d9e3ab3a7 | ||
|
|
048173e467 | ||
|
|
1947e7dd56 | ||
|
|
01d8da8f73 | ||
|
|
6ea5ad7b19 | ||
|
|
cd4ca74d7a | ||
|
|
67d6262f45 | ||
|
|
5f19237a3e | ||
|
|
f2444b4be5 | ||
|
|
7028bbb5d5 | ||
|
|
3699ba0aa7 | ||
|
|
72d34f3d7d | ||
|
|
a28bd24bef | ||
|
|
54cb64292c | ||
|
|
d421b920fa | ||
|
|
c137eb6918 | ||
|
|
d67facc922 | ||
|
|
7023b645b1 | ||
|
|
b5120cc90b | ||
|
|
483e78993d | ||
|
|
3dbcd2c6de | ||
|
|
ca36fcfa4b | ||
|
|
825c6b93bf | ||
|
|
c22585c6f2 | ||
|
|
431d1e104d | ||
|
|
adcd6ade8b | ||
|
|
32a1dd33a6 | ||
|
|
9ea01bcc69 | ||
|
|
77cfefc1cc | ||
|
|
ff7d6c6e4c | ||
|
|
6ee33bd385 | ||
|
|
0fbc2aafa3 | ||
|
|
fe7e350051 | ||
|
|
b814ca2951 | ||
|
|
aae2a8e3ed | ||
|
|
67e0bec597 | ||
|
|
c71e06fcb3 | ||
|
|
bbd5bcf3ca | ||
|
|
d888c1b266 | ||
|
|
83d0e23800 | ||
|
|
dc4e48d7c7 | ||
|
|
664b10a5c6 | ||
|
|
36a4b7d48c | ||
|
|
b0025ee6ba | ||
|
|
fac0b7f068 | ||
|
|
aeadba7cad | ||
|
|
fd0e6aed96 | ||
|
|
c89fa8e927 | ||
|
|
00e23ddcee | ||
|
|
66e97e5b93 | ||
|
|
0f5ea3de5f | ||
|
|
e47bbbb851 | ||
|
|
eaab706038 | ||
|
|
41455480be | ||
|
|
0a670d10dd | ||
|
|
6b73d5c1bf | ||
|
|
a3fed3b6a6 | ||
|
|
66c52b4bc7 | ||
|
|
89f9a3a7f1 | ||
|
|
3a30008cc4 | ||
|
|
b0d5fc2bd0 | ||
|
|
6e5db3f479 | ||
|
|
85780111e0 | ||
|
|
0ba48bbb9d | ||
|
|
26f7878d97 | ||
|
|
8027e1b162 | ||
|
|
78a53bf005 | ||
|
|
8485df416d | ||
|
|
94e78340e1 | ||
|
|
f454989859 | ||
|
|
e779f23ac7 | ||
|
|
3c40363a39 | ||
|
|
a6cf2c338a | ||
|
|
2a7311c1a0 | ||
|
|
81427e4408 | ||
|
|
804e4b0ca2 | ||
|
|
3475ccfbd3 | ||
|
|
9723a0eed0 | ||
|
|
658f80fa1e | ||
|
|
31c2c508c3 | ||
|
|
6c9cab2f8e | ||
|
|
0a6ad1010c | ||
|
|
3a60943f6e | ||
|
|
4d1e00c3ab | ||
|
|
e15f51ecc1 | ||
|
|
eaa66b3dbb | ||
|
|
239d9e0b22 | ||
|
|
40e1e3f560 | ||
|
|
c243608ac6 | ||
|
|
e25750ecef | ||
|
|
1a306fddbf | ||
|
|
ec425ed2af | ||
|
|
fa1e9ce9a7 | ||
|
|
8447d7f3e4 | ||
|
|
27e13e4072 | ||
|
|
9fd4aab5da | ||
|
|
8b0d391ac1 | ||
|
|
fa0c064841 | ||
|
|
9e2089ef1e | ||
|
|
85c7f14562 | ||
|
|
695f0832b4 | ||
|
|
cfb7e300af | ||
|
|
f5faca014f | ||
|
|
7ff0d2d595 | ||
|
|
c28b337278 | ||
|
|
128695bd8e | ||
|
|
e194ab5951 | ||
|
|
c0ac341750 | ||
|
|
486695d479 | ||
|
|
8cb1b76ea4 | ||
|
|
e7755651a4 | ||
|
|
0c7384edc3 | ||
|
|
8f15f38949 | ||
|
|
96af7e4077 | ||
|
|
929fa9b452 | ||
|
|
08c5ab821f | ||
|
|
b6f761f13c | ||
|
|
72cf41f4c9 | ||
|
|
3602c10916 | ||
|
|
601bfb3493 | ||
|
|
021f081d8a | ||
|
|
54af6fa86d | ||
|
|
f193ae67e9 |
16
.github/workflows/ci.yml
vendored
16
.github/workflows/ci.yml
vendored
@@ -7,20 +7,20 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-20.04, windows-2019]
|
os: [ubuntu-20.04, windows-2019]
|
||||||
python-version: ["3.10", "3.9", "3.8", "3.7", "3.6"]
|
python-version: ["3.11", "3.10", "3.9", "3.8", "3.7", "3.6"]
|
||||||
environment: ['3.8', '3.10', '3.9', '3.7', '3.6', 'interpreter']
|
environment: ['3.8', '3.11', '3.10', '3.9', '3.7', '3.6', 'interpreter']
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v4
|
||||||
if: ${{ matrix.environment != 'interpreter' }}
|
if: ${{ matrix.environment != 'interpreter' }}
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.environment }}
|
python-version: ${{ matrix.environment }}
|
||||||
|
|
||||||
- uses: actions/setup-python@v2
|
- uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
@@ -36,7 +36,7 @@ jobs:
|
|||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
||||||
@@ -46,14 +46,14 @@ jobs:
|
|||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: |
|
run: |
|
||||||
python -m flake8 jedi setup.py
|
python -m flake8 jedi setup.py
|
||||||
python -m mypy jedi sith.py
|
python -m mypy jedi sith.py setup.py
|
||||||
|
|
||||||
coverage:
|
coverage:
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-20.04
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -14,3 +14,4 @@ record.json
|
|||||||
/.pytest_cache
|
/.pytest_cache
|
||||||
/.mypy_cache
|
/.mypy_cache
|
||||||
/venv/
|
/venv/
|
||||||
|
.nvimrc
|
||||||
|
|||||||
@@ -1,2 +1,11 @@
|
|||||||
|
version: 2
|
||||||
|
|
||||||
python:
|
python:
|
||||||
pip_install: true
|
install:
|
||||||
|
- method: pip
|
||||||
|
path: .
|
||||||
|
extra_requirements:
|
||||||
|
- docs
|
||||||
|
|
||||||
|
submodules:
|
||||||
|
include: all
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
Main Authors
|
Main Authors
|
||||||
------------
|
------------
|
||||||
|
|
||||||
- David Halter (@davidhalter) <davidhalter88@gmail.com>
|
- David Halter (@davidhalter) <davidhalter88@gmail.com>
|
||||||
@@ -62,6 +62,7 @@ Code Contributors
|
|||||||
- Andrii Kolomoiets (@muffinmad)
|
- Andrii Kolomoiets (@muffinmad)
|
||||||
- Leo Ryu (@Leo-Ryu)
|
- Leo Ryu (@Leo-Ryu)
|
||||||
- Joseph Birkner (@josephbirkner)
|
- Joseph Birkner (@josephbirkner)
|
||||||
|
- Márcio Mazza (@marciomazza)
|
||||||
|
|
||||||
And a few more "anonymous" contributors.
|
And a few more "anonymous" contributors.
|
||||||
|
|
||||||
|
|||||||
@@ -6,6 +6,25 @@ Changelog
|
|||||||
Unreleased
|
Unreleased
|
||||||
++++++++++
|
++++++++++
|
||||||
|
|
||||||
|
0.19.0 (2023-07-29)
|
||||||
|
+++++++++++++++++++
|
||||||
|
|
||||||
|
- Python 3.11 support
|
||||||
|
- Massive improvements in performance for ``Interpreter`` (e.g. IPython) users.
|
||||||
|
This especially affects ``pandas`` users with large datasets.
|
||||||
|
- Add ``jedi.settings.allow_unsafe_interpreter_executions`` to make it easier
|
||||||
|
for IPython users to avoid unsafe executions.
|
||||||
|
|
||||||
|
0.18.2 (2022-11-21)
|
||||||
|
+++++++++++++++++++
|
||||||
|
|
||||||
|
- Added dataclass-equivalent for attrs.define
|
||||||
|
- Find fixtures from Pytest entrypoints; Examples of pytest plugins installed
|
||||||
|
like this are pytest-django, pytest-sugar and Faker.
|
||||||
|
- Fixed Project.search, when a venv was involved, which is why for example
|
||||||
|
`:Pyimport django.db` did not work in some cases in jedi-vim.
|
||||||
|
- And many smaller bugfixes
|
||||||
|
|
||||||
0.18.1 (2021-11-17)
|
0.18.1 (2021-11-17)
|
||||||
+++++++++++++++++++
|
+++++++++++++++++++
|
||||||
|
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ Jedi can currently be used with the following editors/projects:
|
|||||||
- `GNOME Builder`_ (with support for GObject Introspection)
|
- `GNOME Builder`_ (with support for GObject Introspection)
|
||||||
- Gedit (gedi_)
|
- Gedit (gedi_)
|
||||||
- wdb_ - Web Debugger
|
- wdb_ - Web Debugger
|
||||||
- `Eric IDE`_ (Available as a plugin)
|
- `Eric IDE`_
|
||||||
- `IPython 6.0.0+ <https://ipython.readthedocs.io/en/stable/whatsnew/version6.html>`_
|
- `IPython 6.0.0+ <https://ipython.readthedocs.io/en/stable/whatsnew/version6.html>`_
|
||||||
- `xonsh shell <https://xon.sh/contents.html>`_ has `jedi extension <https://xon.sh/xontribs.html#jedi>`_
|
- `xonsh shell <https://xon.sh/contents.html>`_ has `jedi extension <https://xon.sh/xontribs.html#jedi>`_
|
||||||
|
|
||||||
@@ -51,7 +51,8 @@ and many more!
|
|||||||
There are a few language servers that use Jedi:
|
There are a few language servers that use Jedi:
|
||||||
|
|
||||||
- `jedi-language-server <https://github.com/pappasam/jedi-language-server>`_
|
- `jedi-language-server <https://github.com/pappasam/jedi-language-server>`_
|
||||||
- `python-language-server <https://github.com/palantir/python-language-server>`_
|
- `python-language-server <https://github.com/palantir/python-language-server>`_ (currently unmaintained)
|
||||||
|
- `python-lsp-server <https://github.com/python-lsp/python-lsp-server>`_ (fork from python-language-server)
|
||||||
- `anakin-language-server <https://github.com/muffinmad/anakin-language-server>`_
|
- `anakin-language-server <https://github.com/muffinmad/anakin-language-server>`_
|
||||||
|
|
||||||
Here are some pictures taken from jedi-vim_:
|
Here are some pictures taken from jedi-vim_:
|
||||||
|
|||||||
9
SECURITY.md
Normal file
9
SECURITY.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
If security issues arise, we will try to fix those as soon as possible.
|
||||||
|
|
||||||
|
Due to Jedi's nature, Security Issues will probably be extremely rare, but we will neverless treat them seriously.
|
||||||
|
|
||||||
|
## Reporting Security Problems
|
||||||
|
|
||||||
|
If you need to report a security vulnerability, please send an email to davidhalter88@gmail.com. Typically, I will respond in the next few business days.
|
||||||
@@ -35,7 +35,7 @@ to write my own version of a completion engine.
|
|||||||
The first idea was to execute non-dangerous code. But I soon realized, that
|
The first idea was to execute non-dangerous code. But I soon realized, that
|
||||||
this would not work. So I started to build a static analysis tool.
|
this would not work. So I started to build a static analysis tool.
|
||||||
The biggest problem that I had at the time was that I did not know a thing
|
The biggest problem that I had at the time was that I did not know a thing
|
||||||
about parsers.I did not did not even know the word static analysis. It turns
|
about parsers. I did not even know the word static analysis. It turns
|
||||||
out they are the foundation of a good static analysis tool. I of course did not
|
out they are the foundation of a good static analysis tool. I of course did not
|
||||||
know that and tried to write my own poor version of a parser that I ended up
|
know that and tried to write my own poor version of a parser that I ended up
|
||||||
throwing away two years later.
|
throwing away two years later.
|
||||||
@@ -53,7 +53,7 @@ quick and is pretty much feature complete.
|
|||||||
|
|
||||||
--------
|
--------
|
||||||
|
|
||||||
I will leave you with a small annectote that happend in 2012, if I remember
|
I will leave you with a small anecdote that happened in 2012, if I remember
|
||||||
correctly. After I explained Guido van Rossum, how some parts of my
|
correctly. After I explained Guido van Rossum, how some parts of my
|
||||||
auto-completion work, he said:
|
auto-completion work, he said:
|
||||||
|
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ Performance Issues
|
|||||||
|
|
||||||
Importing ``numpy`` can be quite slow sometimes, as well as loading the
|
Importing ``numpy`` can be quite slow sometimes, as well as loading the
|
||||||
builtins the first time. If you want to speed things up, you could preload
|
builtins the first time. If you want to speed things up, you could preload
|
||||||
libriaries in |jedi|, with :func:`.preload_module`. However, once loaded, this
|
libraries in |jedi|, with :func:`.preload_module`. However, once loaded, this
|
||||||
should not be a problem anymore. The same is true for huge modules like
|
should not be a problem anymore. The same is true for huge modules like
|
||||||
``PySide``, ``wx``, ``tensorflow``, ``pandas``, etc.
|
``PySide``, ``wx``, ``tensorflow``, ``pandas``, etc.
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ Using Jedi
|
|||||||
==========
|
==========
|
||||||
|
|
||||||
|jedi| is can be used with a variety of :ref:`plugins <editor-plugins>`,
|
|jedi| is can be used with a variety of :ref:`plugins <editor-plugins>`,
|
||||||
`language servers <language-servers>` and other software.
|
:ref:`language servers <language-servers>` and other software.
|
||||||
It is also possible to use |jedi| in the :ref:`Python shell or with IPython
|
It is also possible to use |jedi| in the :ref:`Python shell or with IPython
|
||||||
<repl-completion>`.
|
<repl-completion>`.
|
||||||
|
|
||||||
@@ -16,7 +16,8 @@ Language Servers
|
|||||||
--------------
|
--------------
|
||||||
|
|
||||||
- `jedi-language-server <https://github.com/pappasam/jedi-language-server>`_
|
- `jedi-language-server <https://github.com/pappasam/jedi-language-server>`_
|
||||||
- `python-language-server <https://github.com/palantir/python-language-server>`_
|
- `python-language-server <https://github.com/palantir/python-language-server>`_ (currently unmaintained)
|
||||||
|
- `python-lsp-server <https://github.com/python-lsp/python-lsp-server>`_ (fork from python-language-server)
|
||||||
- `anakin-language-server <https://github.com/muffinmad/anakin-language-server>`_
|
- `anakin-language-server <https://github.com/muffinmad/anakin-language-server>`_
|
||||||
|
|
||||||
.. _editor-plugins:
|
.. _editor-plugins:
|
||||||
@@ -86,7 +87,7 @@ Gedit
|
|||||||
Eric IDE
|
Eric IDE
|
||||||
~~~~~~~~
|
~~~~~~~~
|
||||||
|
|
||||||
- `Eric IDE`_ (Available as a plugin)
|
- `Eric IDE`_
|
||||||
|
|
||||||
Web Debugger
|
Web Debugger
|
||||||
~~~~~~~~~~~~
|
~~~~~~~~~~~~
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ ad
|
|||||||
load
|
load
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = '0.18.1'
|
__version__ = '0.19.0'
|
||||||
|
|
||||||
from jedi.api import Script, Interpreter, set_debug_function, preload_module
|
from jedi.api import Script, Interpreter, set_debug_function, preload_module
|
||||||
from jedi import settings
|
from jedi import settings
|
||||||
|
|||||||
@@ -206,6 +206,7 @@ class Script:
|
|||||||
before magic methods and name mangled names that start with ``__``.
|
before magic methods and name mangled names that start with ``__``.
|
||||||
:rtype: list of :class:`.Completion`
|
:rtype: list of :class:`.Completion`
|
||||||
"""
|
"""
|
||||||
|
self._inference_state.reset_recursion_limitations()
|
||||||
with debug.increase_indent_cm('complete'):
|
with debug.increase_indent_cm('complete'):
|
||||||
completion = Completion(
|
completion = Completion(
|
||||||
self._inference_state, self._get_module_context(), self._code_lines,
|
self._inference_state, self._get_module_context(), self._code_lines,
|
||||||
@@ -215,6 +216,7 @@ class Script:
|
|||||||
|
|
||||||
@validate_line_column
|
@validate_line_column
|
||||||
def infer(self, line=None, column=None, *, only_stubs=False, prefer_stubs=False):
|
def infer(self, line=None, column=None, *, only_stubs=False, prefer_stubs=False):
|
||||||
|
self._inference_state.reset_recursion_limitations()
|
||||||
"""
|
"""
|
||||||
Return the definitions of under the cursor. It is basically a wrapper
|
Return the definitions of under the cursor. It is basically a wrapper
|
||||||
around Jedi's type inference.
|
around Jedi's type inference.
|
||||||
@@ -260,6 +262,7 @@ class Script:
|
|||||||
@validate_line_column
|
@validate_line_column
|
||||||
def goto(self, line=None, column=None, *, follow_imports=False, follow_builtin_imports=False,
|
def goto(self, line=None, column=None, *, follow_imports=False, follow_builtin_imports=False,
|
||||||
only_stubs=False, prefer_stubs=False):
|
only_stubs=False, prefer_stubs=False):
|
||||||
|
self._inference_state.reset_recursion_limitations()
|
||||||
"""
|
"""
|
||||||
Goes to the name that defined the object under the cursor. Optionally
|
Goes to the name that defined the object under the cursor. Optionally
|
||||||
you can follow imports.
|
you can follow imports.
|
||||||
@@ -365,10 +368,17 @@ class Script:
|
|||||||
|
|
||||||
:rtype: list of :class:`.Name`
|
:rtype: list of :class:`.Name`
|
||||||
"""
|
"""
|
||||||
|
self._inference_state.reset_recursion_limitations()
|
||||||
definitions = self.goto(line, column, follow_imports=True)
|
definitions = self.goto(line, column, follow_imports=True)
|
||||||
if definitions:
|
if definitions:
|
||||||
return definitions
|
return definitions
|
||||||
leaf = self._module_node.get_leaf_for_position((line, column))
|
leaf = self._module_node.get_leaf_for_position((line, column))
|
||||||
|
|
||||||
|
if leaf is not None and leaf.end_pos == (line, column) and leaf.type == 'newline':
|
||||||
|
next_ = leaf.get_next_leaf()
|
||||||
|
if next_ is not None and next_.start_pos == leaf.end_pos:
|
||||||
|
leaf = next_
|
||||||
|
|
||||||
if leaf is not None and leaf.type in ('keyword', 'operator', 'error_leaf'):
|
if leaf is not None and leaf.type in ('keyword', 'operator', 'error_leaf'):
|
||||||
def need_pydoc():
|
def need_pydoc():
|
||||||
if leaf.value in ('(', ')', '[', ']'):
|
if leaf.value in ('(', ')', '[', ']'):
|
||||||
@@ -400,6 +410,7 @@ class Script:
|
|||||||
the current module only.
|
the current module only.
|
||||||
:rtype: list of :class:`.Name`
|
:rtype: list of :class:`.Name`
|
||||||
"""
|
"""
|
||||||
|
self._inference_state.reset_recursion_limitations()
|
||||||
|
|
||||||
def _references(include_builtins=True, scope='project'):
|
def _references(include_builtins=True, scope='project'):
|
||||||
if scope not in ('project', 'file'):
|
if scope not in ('project', 'file'):
|
||||||
@@ -434,6 +445,7 @@ class Script:
|
|||||||
|
|
||||||
:rtype: list of :class:`.Signature`
|
:rtype: list of :class:`.Signature`
|
||||||
"""
|
"""
|
||||||
|
self._inference_state.reset_recursion_limitations()
|
||||||
pos = line, column
|
pos = line, column
|
||||||
call_details = helpers.get_signature_details(self._module_node, pos)
|
call_details = helpers.get_signature_details(self._module_node, pos)
|
||||||
if call_details is None:
|
if call_details is None:
|
||||||
@@ -553,6 +565,7 @@ class Script:
|
|||||||
return parso_to_jedi_errors(self._inference_state.grammar, self._module_node)
|
return parso_to_jedi_errors(self._inference_state.grammar, self._module_node)
|
||||||
|
|
||||||
def _names(self, all_scopes=False, definitions=True, references=False):
|
def _names(self, all_scopes=False, definitions=True, references=False):
|
||||||
|
self._inference_state.reset_recursion_limitations()
|
||||||
# Set line/column to a random position, because they don't matter.
|
# Set line/column to a random position, because they don't matter.
|
||||||
module_context = self._get_module_context()
|
module_context = self._get_module_context()
|
||||||
defs = [
|
defs = [
|
||||||
@@ -581,7 +594,7 @@ class Script:
|
|||||||
@validate_line_column
|
@validate_line_column
|
||||||
def extract_variable(self, line, column, *, new_name, until_line=None, until_column=None):
|
def extract_variable(self, line, column, *, new_name, until_line=None, until_column=None):
|
||||||
"""
|
"""
|
||||||
Moves an expression to a new statemenet.
|
Moves an expression to a new statement.
|
||||||
|
|
||||||
For example if you have the cursor on ``foo`` and provide a
|
For example if you have the cursor on ``foo`` and provide a
|
||||||
``new_name`` called ``bar``::
|
``new_name`` called ``bar``::
|
||||||
@@ -708,7 +721,6 @@ class Interpreter(Script):
|
|||||||
:param namespaces: A list of namespace dictionaries such as the one
|
:param namespaces: A list of namespace dictionaries such as the one
|
||||||
returned by :func:`globals` and :func:`locals`.
|
returned by :func:`globals` and :func:`locals`.
|
||||||
"""
|
"""
|
||||||
_allow_descriptor_getattr_default = True
|
|
||||||
|
|
||||||
def __init__(self, code, namespaces, *, project=None, **kwds):
|
def __init__(self, code, namespaces, *, project=None, **kwds):
|
||||||
try:
|
try:
|
||||||
@@ -729,7 +741,16 @@ class Interpreter(Script):
|
|||||||
super().__init__(code, environment=environment, project=project, **kwds)
|
super().__init__(code, environment=environment, project=project, **kwds)
|
||||||
|
|
||||||
self.namespaces = namespaces
|
self.namespaces = namespaces
|
||||||
self._inference_state.allow_descriptor_getattr = self._allow_descriptor_getattr_default
|
self._inference_state.allow_unsafe_executions = \
|
||||||
|
settings.allow_unsafe_interpreter_executions
|
||||||
|
# Dynamic params search is important when we work on functions that are
|
||||||
|
# called by other pieces of code. However for interpreter completions
|
||||||
|
# this is not important at all, because the current code is always new
|
||||||
|
# and will never be called by something.
|
||||||
|
# Also sometimes this logic goes a bit too far like in
|
||||||
|
# https://github.com/ipython/ipython/issues/13866, where it takes
|
||||||
|
# seconds to do a simple completion.
|
||||||
|
self._inference_state.do_dynamic_params_search = False
|
||||||
|
|
||||||
@cache.memoize_method
|
@cache.memoize_method
|
||||||
def _get_module_context(self):
|
def _get_module_context(self):
|
||||||
|
|||||||
@@ -105,8 +105,7 @@ class BaseName:
|
|||||||
# Compiled modules should not return a module path even if they
|
# Compiled modules should not return a module path even if they
|
||||||
# have one.
|
# have one.
|
||||||
path: Optional[Path] = self._get_module_context().py__file__()
|
path: Optional[Path] = self._get_module_context().py__file__()
|
||||||
if path is not None:
|
return path
|
||||||
return path
|
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@@ -15,9 +15,9 @@ from jedi.inference.compiled.subprocess import CompiledSubprocess, \
|
|||||||
|
|
||||||
import parso
|
import parso
|
||||||
|
|
||||||
_VersionInfo = namedtuple('VersionInfo', 'major minor micro')
|
_VersionInfo = namedtuple('VersionInfo', 'major minor micro') # type: ignore[name-match]
|
||||||
|
|
||||||
_SUPPORTED_PYTHONS = ['3.10', '3.9', '3.8', '3.7', '3.6']
|
_SUPPORTED_PYTHONS = ['3.11', '3.10', '3.9', '3.8', '3.7', '3.6']
|
||||||
_SAFE_PATHS = ['/usr/bin', '/usr/local/bin']
|
_SAFE_PATHS = ['/usr/bin', '/usr/local/bin']
|
||||||
_CONDA_VAR = 'CONDA_PREFIX'
|
_CONDA_VAR = 'CONDA_PREFIX'
|
||||||
_CURRENT_VERSION = '%s.%s' % (sys.version_info.major, sys.version_info.minor)
|
_CURRENT_VERSION = '%s.%s' % (sys.version_info.major, sys.version_info.minor)
|
||||||
@@ -384,8 +384,7 @@ def _get_executable_path(path, safe=True):
|
|||||||
|
|
||||||
|
|
||||||
def _get_executables_from_windows_registry(version):
|
def _get_executables_from_windows_registry(version):
|
||||||
# https://github.com/python/typeshed/pull/3794 adds winreg
|
import winreg
|
||||||
import winreg # type: ignore[import]
|
|
||||||
|
|
||||||
# TODO: support Python Anaconda.
|
# TODO: support Python Anaconda.
|
||||||
sub_keys = [
|
sub_keys = [
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ class RefactoringError(_JediError):
|
|||||||
Refactorings can fail for various reasons. So if you work with refactorings
|
Refactorings can fail for various reasons. So if you work with refactorings
|
||||||
like :meth:`.Script.rename`, :meth:`.Script.inline`,
|
like :meth:`.Script.rename`, :meth:`.Script.inline`,
|
||||||
:meth:`.Script.extract_variable` and :meth:`.Script.extract_function`, make
|
:meth:`.Script.extract_variable` and :meth:`.Script.extract_function`, make
|
||||||
sure to catch these. The descriptions in the errors are ususally valuable
|
sure to catch these. The descriptions in the errors are usually valuable
|
||||||
for end users.
|
for end users.
|
||||||
|
|
||||||
A typical ``RefactoringError`` would tell the user that inlining is not
|
A typical ``RefactoringError`` would tell the user that inlining is not
|
||||||
|
|||||||
@@ -280,7 +280,7 @@ class CallDetails:
|
|||||||
def count_positional_arguments(self):
|
def count_positional_arguments(self):
|
||||||
count = 0
|
count = 0
|
||||||
for star_count, key_start, had_equal in self._list_arguments()[:-1]:
|
for star_count, key_start, had_equal in self._list_arguments()[:-1]:
|
||||||
if star_count:
|
if star_count or key_start:
|
||||||
break
|
break
|
||||||
count += 1
|
count += 1
|
||||||
return count
|
return count
|
||||||
@@ -306,7 +306,7 @@ def _iter_arguments(nodes, position):
|
|||||||
first = node.children[0]
|
first = node.children[0]
|
||||||
second = node.children[1]
|
second = node.children[1]
|
||||||
if second == '=':
|
if second == '=':
|
||||||
if second.start_pos < position:
|
if second.start_pos < position and first.type == 'name':
|
||||||
yield 0, first.value, True
|
yield 0, first.value, True
|
||||||
else:
|
else:
|
||||||
yield 0, remove_after_pos(first), False
|
yield 0, remove_after_pos(first), False
|
||||||
|
|||||||
@@ -5,8 +5,7 @@ from typing import Dict, Optional
|
|||||||
from jedi.inference.names import AbstractArbitraryName
|
from jedi.inference.names import AbstractArbitraryName
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# https://github.com/python/typeshed/pull/4351 adds pydoc_data
|
from pydoc_data import topics
|
||||||
from pydoc_data import topics # type: ignore[import]
|
|
||||||
pydoc_topics: Optional[Dict[str, str]] = topics.topics
|
pydoc_topics: Optional[Dict[str, str]] = topics.topics
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# Python 3.6.8 embeddable does not have pydoc_data.
|
# Python 3.6.8 embeddable does not have pydoc_data.
|
||||||
|
|||||||
@@ -352,9 +352,8 @@ class Project:
|
|||||||
# 3. Search for modules on sys.path
|
# 3. Search for modules on sys.path
|
||||||
sys_path = [
|
sys_path = [
|
||||||
p for p in self._get_sys_path(inference_state)
|
p for p in self._get_sys_path(inference_state)
|
||||||
# Exclude folders that are handled by recursing of the Python
|
# Exclude the current folder which is handled by recursing the folders.
|
||||||
# folders.
|
if p != self._path
|
||||||
if not p.startswith(str(self._path))
|
|
||||||
]
|
]
|
||||||
names = list(iter_module_names(inference_state, empty_module_context, sys_path))
|
names = list(iter_module_names(inference_state, empty_module_context, sys_path))
|
||||||
yield from search_in_module(
|
yield from search_in_module(
|
||||||
@@ -433,7 +432,6 @@ def get_default_project(path=None):
|
|||||||
probable_path = dir
|
probable_path = dir
|
||||||
|
|
||||||
if probable_path is not None:
|
if probable_path is not None:
|
||||||
# TODO search for setup.py etc
|
|
||||||
return Project(probable_path)
|
return Project(probable_path)
|
||||||
|
|
||||||
if first_no_init_file is not None:
|
if first_no_init_file is not None:
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from typing import Dict, Iterable, Tuple
|
|||||||
from parso import split_lines
|
from parso import split_lines
|
||||||
|
|
||||||
from jedi.api.exceptions import RefactoringError
|
from jedi.api.exceptions import RefactoringError
|
||||||
|
from jedi.inference.value.namespace import ImplicitNSName
|
||||||
|
|
||||||
EXPRESSION_PARTS = (
|
EXPRESSION_PARTS = (
|
||||||
'or_test and_test not_test comparison '
|
'or_test and_test not_test comparison '
|
||||||
@@ -42,11 +43,17 @@ class ChangedFile:
|
|||||||
if self._from_path is None:
|
if self._from_path is None:
|
||||||
from_p = ''
|
from_p = ''
|
||||||
else:
|
else:
|
||||||
from_p = self._from_path.relative_to(project_path)
|
try:
|
||||||
|
from_p = self._from_path.relative_to(project_path)
|
||||||
|
except ValueError: # Happens it the path is not on th project_path
|
||||||
|
from_p = self._from_path
|
||||||
if self._to_path is None:
|
if self._to_path is None:
|
||||||
to_p = ''
|
to_p = ''
|
||||||
else:
|
else:
|
||||||
to_p = self._to_path.relative_to(project_path)
|
try:
|
||||||
|
to_p = self._to_path.relative_to(project_path)
|
||||||
|
except ValueError:
|
||||||
|
to_p = self._to_path
|
||||||
diff = difflib.unified_diff(
|
diff = difflib.unified_diff(
|
||||||
old_lines, new_lines,
|
old_lines, new_lines,
|
||||||
fromfile=str(from_p),
|
fromfile=str(from_p),
|
||||||
@@ -96,7 +103,12 @@ class Refactoring:
|
|||||||
to_path=calculate_to_path(path),
|
to_path=calculate_to_path(path),
|
||||||
module_node=next(iter(map_)).get_root_node(),
|
module_node=next(iter(map_)).get_root_node(),
|
||||||
node_to_str_map=map_
|
node_to_str_map=map_
|
||||||
) for path, map_ in sorted(self._file_to_node_changes.items())
|
)
|
||||||
|
# We need to use `or`, because the path can be None
|
||||||
|
for path, map_ in sorted(
|
||||||
|
self._file_to_node_changes.items(),
|
||||||
|
key=lambda x: x[0] or Path("")
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_renames(self) -> Iterable[Tuple[Path, Path]]:
|
def get_renames(self) -> Iterable[Tuple[Path, Path]]:
|
||||||
@@ -110,7 +122,7 @@ class Refactoring:
|
|||||||
project_path = self._inference_state.project.path
|
project_path = self._inference_state.project.path
|
||||||
for from_, to in self.get_renames():
|
for from_, to in self.get_renames():
|
||||||
text += 'rename from %s\nrename to %s\n' \
|
text += 'rename from %s\nrename to %s\n' \
|
||||||
% (from_.relative_to(project_path), to.relative_to(project_path))
|
% (_try_relative_to(from_, project_path), _try_relative_to(to, project_path))
|
||||||
|
|
||||||
return text + ''.join(f.get_diff() for f in self.get_changed_files().values())
|
return text + ''.join(f.get_diff() for f in self.get_changed_files().values())
|
||||||
|
|
||||||
@@ -140,13 +152,16 @@ def rename(inference_state, definitions, new_name):
|
|||||||
raise RefactoringError("There is no name under the cursor")
|
raise RefactoringError("There is no name under the cursor")
|
||||||
|
|
||||||
for d in definitions:
|
for d in definitions:
|
||||||
|
# This private access is ok in a way. It's not public to
|
||||||
|
# protect Jedi users from seeing it.
|
||||||
tree_name = d._name.tree_name
|
tree_name = d._name.tree_name
|
||||||
if d.type == 'module' and tree_name is None:
|
if d.type == 'module' and tree_name is None and d.module_path is not None:
|
||||||
p = None if d.module_path is None else Path(d.module_path)
|
p = Path(d.module_path)
|
||||||
file_renames.add(_calculate_rename(p, new_name))
|
file_renames.add(_calculate_rename(p, new_name))
|
||||||
|
elif isinstance(d._name, ImplicitNSName):
|
||||||
|
for p in d._name._value.py__path__():
|
||||||
|
file_renames.add(_calculate_rename(Path(p), new_name))
|
||||||
else:
|
else:
|
||||||
# This private access is ok in a way. It's not public to
|
|
||||||
# protect Jedi users from seeing it.
|
|
||||||
if tree_name is not None:
|
if tree_name is not None:
|
||||||
fmap = file_tree_name_map.setdefault(d.module_path, {})
|
fmap = file_tree_name_map.setdefault(d.module_path, {})
|
||||||
fmap[tree_name] = tree_name.prefix + new_name
|
fmap[tree_name] = tree_name.prefix + new_name
|
||||||
@@ -240,3 +255,10 @@ def _remove_indent_of_prefix(prefix):
|
|||||||
Removes the last indentation of a prefix, e.g. " \n \n " becomes " \n \n".
|
Removes the last indentation of a prefix, e.g. " \n \n " becomes " \n \n".
|
||||||
"""
|
"""
|
||||||
return ''.join(split_lines(prefix, keepends=True)[:-1])
|
return ''.join(split_lines(prefix, keepends=True)[:-1])
|
||||||
|
|
||||||
|
|
||||||
|
def _try_relative_to(path: Path, base: Path) -> Path:
|
||||||
|
try:
|
||||||
|
return path.relative_to(base)
|
||||||
|
except ValueError:
|
||||||
|
return path
|
||||||
|
|||||||
@@ -36,8 +36,11 @@ def complete_dict(module_context, code_lines, leaf, position, string, fuzzy):
|
|||||||
string = cut_value_at_position(leaf, position)
|
string = cut_value_at_position(leaf, position)
|
||||||
|
|
||||||
context = module_context.create_context(bracket_leaf)
|
context = module_context.create_context(bracket_leaf)
|
||||||
before_bracket_leaf = bracket_leaf.get_previous_leaf()
|
|
||||||
if before_bracket_leaf.type in ('atom', 'trailer', 'name'):
|
before_node = before_bracket_leaf = bracket_leaf.get_previous_leaf()
|
||||||
|
if before_node in (')', ']', '}'):
|
||||||
|
before_node = before_node.parent
|
||||||
|
if before_node.type in ('atom', 'trailer', 'name'):
|
||||||
values = infer_call_of_leaf(context, before_bracket_leaf)
|
values = infer_call_of_leaf(context, before_bracket_leaf)
|
||||||
return list(_completions_for_dicts(
|
return list(_completions_for_dicts(
|
||||||
module_context.inference_state,
|
module_context.inference_state,
|
||||||
|
|||||||
@@ -90,7 +90,7 @@ class InferenceState:
|
|||||||
self.compiled_subprocess = environment.get_inference_state_subprocess(self)
|
self.compiled_subprocess = environment.get_inference_state_subprocess(self)
|
||||||
self.grammar = environment.get_grammar()
|
self.grammar = environment.get_grammar()
|
||||||
|
|
||||||
self.latest_grammar = parso.load_grammar(version='3.7')
|
self.latest_grammar = parso.load_grammar(version='3.11')
|
||||||
self.memoize_cache = {} # for memoize decorators
|
self.memoize_cache = {} # for memoize decorators
|
||||||
self.module_cache = imports.ModuleCache() # does the job of `sys.modules`.
|
self.module_cache = imports.ModuleCache() # does the job of `sys.modules`.
|
||||||
self.stub_module_cache = {} # Dict[Tuple[str, ...], Optional[ModuleValue]]
|
self.stub_module_cache = {} # Dict[Tuple[str, ...], Optional[ModuleValue]]
|
||||||
@@ -99,10 +99,11 @@ class InferenceState:
|
|||||||
self.mixed_cache = {} # see `inference.compiled.mixed._create()`
|
self.mixed_cache = {} # see `inference.compiled.mixed._create()`
|
||||||
self.analysis = []
|
self.analysis = []
|
||||||
self.dynamic_params_depth = 0
|
self.dynamic_params_depth = 0
|
||||||
|
self.do_dynamic_params_search = settings.dynamic_params
|
||||||
self.is_analysis = False
|
self.is_analysis = False
|
||||||
self.project = project
|
self.project = project
|
||||||
self.access_cache = {}
|
self.access_cache = {}
|
||||||
self.allow_descriptor_getattr = False
|
self.allow_unsafe_executions = False
|
||||||
self.flow_analysis_enabled = True
|
self.flow_analysis_enabled = True
|
||||||
|
|
||||||
self.reset_recursion_limitations()
|
self.reset_recursion_limitations()
|
||||||
@@ -125,7 +126,7 @@ class InferenceState:
|
|||||||
@inference_state_function_cache()
|
@inference_state_function_cache()
|
||||||
def builtins_module(self):
|
def builtins_module(self):
|
||||||
module_name = 'builtins'
|
module_name = 'builtins'
|
||||||
builtins_module, = self.import_module((module_name,), sys_path=())
|
builtins_module, = self.import_module((module_name,), sys_path=[])
|
||||||
return builtins_module
|
return builtins_module
|
||||||
|
|
||||||
@property # type: ignore[misc]
|
@property # type: ignore[misc]
|
||||||
|
|||||||
@@ -297,7 +297,7 @@ class Value(HelperValueMixin):
|
|||||||
just the `_T` generic parameter.
|
just the `_T` generic parameter.
|
||||||
|
|
||||||
`value_set`: represents the actual argument passed to the parameter
|
`value_set`: represents the actual argument passed to the parameter
|
||||||
we're inferrined for, or (for recursive calls) their types. In the
|
we're inferred for, or (for recursive calls) their types. In the
|
||||||
above example this would first be the representation of the list
|
above example this would first be the representation of the list
|
||||||
`[1]` and then, when recursing, just of `1`.
|
`[1]` and then, when recursing, just of `1`.
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import re
|
|||||||
import builtins
|
import builtins
|
||||||
import typing
|
import typing
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Optional
|
from typing import Optional, Tuple
|
||||||
|
|
||||||
from jedi.inference.compiled.getattr_static import getattr_static
|
from jedi.inference.compiled.getattr_static import getattr_static
|
||||||
|
|
||||||
@@ -40,7 +40,7 @@ NOT_CLASS_TYPES = (
|
|||||||
MethodDescriptorType = type(str.replace)
|
MethodDescriptorType = type(str.replace)
|
||||||
WrapperDescriptorType = type(set.__iter__)
|
WrapperDescriptorType = type(set.__iter__)
|
||||||
# `object.__subclasshook__` is an already executed descriptor.
|
# `object.__subclasshook__` is an already executed descriptor.
|
||||||
object_class_dict = type.__dict__["__dict__"].__get__(object)
|
object_class_dict = type.__dict__["__dict__"].__get__(object) # type: ignore[index]
|
||||||
ClassMethodDescriptorType = type(object_class_dict['__subclasshook__'])
|
ClassMethodDescriptorType = type(object_class_dict['__subclasshook__'])
|
||||||
|
|
||||||
_sentinel = object()
|
_sentinel = object()
|
||||||
@@ -147,7 +147,7 @@ class AccessPath:
|
|||||||
self.accesses = accesses
|
self.accesses = accesses
|
||||||
|
|
||||||
|
|
||||||
def create_access_path(inference_state, obj):
|
def create_access_path(inference_state, obj) -> AccessPath:
|
||||||
access = create_access(inference_state, obj)
|
access = create_access(inference_state, obj)
|
||||||
return AccessPath(access.get_access_path_tuples())
|
return AccessPath(access.get_access_path_tuples())
|
||||||
|
|
||||||
@@ -175,7 +175,7 @@ class DirectObjectAccess:
|
|||||||
def _create_access(self, obj):
|
def _create_access(self, obj):
|
||||||
return create_access(self._inference_state, obj)
|
return create_access(self._inference_state, obj)
|
||||||
|
|
||||||
def _create_access_path(self, obj):
|
def _create_access_path(self, obj) -> AccessPath:
|
||||||
return create_access_path(self._inference_state, obj)
|
return create_access_path(self._inference_state, obj)
|
||||||
|
|
||||||
def py__bool__(self):
|
def py__bool__(self):
|
||||||
@@ -230,8 +230,8 @@ class DirectObjectAccess:
|
|||||||
return [annotation]
|
return [annotation]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def py__simple_getitem__(self, index):
|
def py__simple_getitem__(self, index, *, safe=True):
|
||||||
if type(self._obj) not in ALLOWED_GETITEM_TYPES:
|
if safe and type(self._obj) not in ALLOWED_GETITEM_TYPES:
|
||||||
# Get rid of side effects, we won't call custom `__getitem__`s.
|
# Get rid of side effects, we won't call custom `__getitem__`s.
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -329,33 +329,37 @@ class DirectObjectAccess:
|
|||||||
except TypeError:
|
except TypeError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def is_allowed_getattr(self, name, safe=True):
|
def is_allowed_getattr(self, name, safe=True) -> Tuple[bool, bool, Optional[AccessPath]]:
|
||||||
# TODO this API is ugly.
|
# TODO this API is ugly.
|
||||||
if not safe:
|
|
||||||
# Unsafe is mostly used to check for __getattr__/__getattribute__.
|
|
||||||
# getattr_static works for properties, but the underscore methods
|
|
||||||
# are just ignored (because it's safer and avoids more code
|
|
||||||
# execution). See also GH #1378.
|
|
||||||
|
|
||||||
# Avoid warnings, see comment in the next function.
|
|
||||||
with warnings.catch_warnings(record=True):
|
|
||||||
warnings.simplefilter("always")
|
|
||||||
try:
|
|
||||||
return hasattr(self._obj, name), False
|
|
||||||
except Exception:
|
|
||||||
# Obviously has an attribute (propably a property) that
|
|
||||||
# gets executed, so just avoid all exceptions here.
|
|
||||||
return False, False
|
|
||||||
try:
|
try:
|
||||||
attr, is_get_descriptor = getattr_static(self._obj, name)
|
attr, is_get_descriptor = getattr_static(self._obj, name)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
return False, False
|
if not safe:
|
||||||
|
# Unsafe is mostly used to check for __getattr__/__getattribute__.
|
||||||
|
# getattr_static works for properties, but the underscore methods
|
||||||
|
# are just ignored (because it's safer and avoids more code
|
||||||
|
# execution). See also GH #1378.
|
||||||
|
|
||||||
|
# Avoid warnings, see comment in the next function.
|
||||||
|
with warnings.catch_warnings(record=True):
|
||||||
|
warnings.simplefilter("always")
|
||||||
|
try:
|
||||||
|
return hasattr(self._obj, name), False, None
|
||||||
|
except Exception:
|
||||||
|
# Obviously has an attribute (probably a property) that
|
||||||
|
# gets executed, so just avoid all exceptions here.
|
||||||
|
pass
|
||||||
|
return False, False, None
|
||||||
else:
|
else:
|
||||||
if is_get_descriptor and type(attr) not in ALLOWED_DESCRIPTOR_ACCESS:
|
if is_get_descriptor and type(attr) not in ALLOWED_DESCRIPTOR_ACCESS:
|
||||||
|
if isinstance(attr, property):
|
||||||
|
if hasattr(attr.fget, '__annotations__'):
|
||||||
|
a = DirectObjectAccess(self._inference_state, attr.fget)
|
||||||
|
return True, True, a.get_return_annotation()
|
||||||
# In case of descriptors that have get methods we cannot return
|
# In case of descriptors that have get methods we cannot return
|
||||||
# it's value, because that would mean code execution.
|
# it's value, because that would mean code execution.
|
||||||
return True, True
|
return True, True, None
|
||||||
return True, False
|
return True, False, None
|
||||||
|
|
||||||
def getattr_paths(self, name, default=_sentinel):
|
def getattr_paths(self, name, default=_sentinel):
|
||||||
try:
|
try:
|
||||||
@@ -515,7 +519,7 @@ class DirectObjectAccess:
|
|||||||
# the signature. In that case we just want a simple escape for now.
|
# the signature. In that case we just want a simple escape for now.
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
|
||||||
def get_return_annotation(self):
|
def get_return_annotation(self) -> Optional[AccessPath]:
|
||||||
try:
|
try:
|
||||||
o = self._obj.__annotations__.get('return')
|
o = self._obj.__annotations__.get('return')
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ class MixedObject(ValueWrapper):
|
|||||||
|
|
||||||
This combined logic makes it possible to provide more powerful REPL
|
This combined logic makes it possible to provide more powerful REPL
|
||||||
completion. It allows side effects that are not noticable with the default
|
completion. It allows side effects that are not noticable with the default
|
||||||
parser structure to still be completeable.
|
parser structure to still be completable.
|
||||||
|
|
||||||
The biggest difference from CompiledValue to MixedObject is that we are
|
The biggest difference from CompiledValue to MixedObject is that we are
|
||||||
generally dealing with Python code and not with C code. This will generate
|
generally dealing with Python code and not with C code. This will generate
|
||||||
@@ -142,9 +142,9 @@ class MixedObjectFilter(compiled.CompiledValueFilter):
|
|||||||
super().__init__(inference_state, compiled_value)
|
super().__init__(inference_state, compiled_value)
|
||||||
self._tree_value = tree_value
|
self._tree_value = tree_value
|
||||||
|
|
||||||
def _create_name(self, name):
|
def _create_name(self, *args, **kwargs):
|
||||||
return MixedName(
|
return MixedName(
|
||||||
super()._create_name(name),
|
super()._create_name(*args, **kwargs),
|
||||||
self._tree_value,
|
self._tree_value,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -267,7 +267,7 @@ def _find_syntax_node_name(inference_state, python_object):
|
|||||||
@inference_state_function_cache()
|
@inference_state_function_cache()
|
||||||
def _create(inference_state, compiled_value, module_context):
|
def _create(inference_state, compiled_value, module_context):
|
||||||
# TODO accessing this is bad, but it probably doesn't matter that much,
|
# TODO accessing this is bad, but it probably doesn't matter that much,
|
||||||
# because we're working with interpreteters only here.
|
# because we're working with interpreters only here.
|
||||||
python_object = compiled_value.access_handle.access._obj
|
python_object = compiled_value.access_handle.access._obj
|
||||||
result = _find_syntax_node_name(inference_state, python_object)
|
result = _find_syntax_node_name(inference_state, python_object)
|
||||||
if result is None:
|
if result is None:
|
||||||
|
|||||||
@@ -21,11 +21,11 @@ class _ExactImporter(MetaPathFinder):
|
|||||||
def __init__(self, path_dct):
|
def __init__(self, path_dct):
|
||||||
self._path_dct = path_dct
|
self._path_dct = path_dct
|
||||||
|
|
||||||
def find_module(self, fullname, path=None):
|
def find_spec(self, fullname, path=None, target=None):
|
||||||
if path is None and fullname in self._path_dct:
|
if path is None and fullname in self._path_dct:
|
||||||
p = self._path_dct[fullname]
|
p = self._path_dct[fullname]
|
||||||
loader = PathFinder.find_module(fullname, path=[p])
|
spec = PathFinder.find_spec(fullname, path=[p], target=target)
|
||||||
return loader
|
return spec
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import sys
|
|||||||
import os
|
import os
|
||||||
import inspect
|
import inspect
|
||||||
import importlib
|
import importlib
|
||||||
import warnings
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from zipfile import ZipFile
|
from zipfile import ZipFile
|
||||||
from zipimport import zipimporter, ZipImportError
|
from zipimport import zipimporter, ZipImportError
|
||||||
@@ -151,7 +150,11 @@ def _find_module(string, path=None, full_name=None, is_global_search=True):
|
|||||||
|
|
||||||
spec = find_spec(string, p)
|
spec = find_spec(string, p)
|
||||||
if spec is not None:
|
if spec is not None:
|
||||||
|
if spec.origin == "frozen":
|
||||||
|
continue
|
||||||
|
|
||||||
loader = spec.loader
|
loader = spec.loader
|
||||||
|
|
||||||
if loader is None and not spec.has_location:
|
if loader is None and not spec.has_location:
|
||||||
# This is a namespace package.
|
# This is a namespace package.
|
||||||
full_name = string if not path else full_name
|
full_name = string if not path else full_name
|
||||||
@@ -163,17 +166,16 @@ def _find_module(string, path=None, full_name=None, is_global_search=True):
|
|||||||
|
|
||||||
|
|
||||||
def _find_module_py33(string, path=None, loader=None, full_name=None, is_global_search=True):
|
def _find_module_py33(string, path=None, loader=None, full_name=None, is_global_search=True):
|
||||||
loader = loader or importlib.machinery.PathFinder.find_module(string, path)
|
if not loader:
|
||||||
|
spec = importlib.machinery.PathFinder.find_spec(string, path)
|
||||||
|
if spec is not None:
|
||||||
|
loader = spec.loader
|
||||||
|
|
||||||
if loader is None and path is None: # Fallback to find builtins
|
if loader is None and path is None: # Fallback to find builtins
|
||||||
try:
|
try:
|
||||||
with warnings.catch_warnings(record=True):
|
spec = importlib.util.find_spec(string)
|
||||||
# Mute "DeprecationWarning: Use importlib.util.find_spec()
|
if spec is not None:
|
||||||
# instead." While we should replace that in the future, it's
|
loader = spec.loader
|
||||||
# probably good to wait until we deprecate Python 3.3, since
|
|
||||||
# it was added in Python 3.4 and find_loader hasn't been
|
|
||||||
# removed in 3.6.
|
|
||||||
loader = importlib.find_loader(string)
|
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
# See #491. Importlib might raise a ValueError, to avoid this, we
|
# See #491. Importlib might raise a ValueError, to avoid this, we
|
||||||
# just raise an ImportError to fix the issue.
|
# just raise an ImportError to fix the issue.
|
||||||
|
|||||||
@@ -51,7 +51,6 @@ class CompiledValue(Value):
|
|||||||
def py__call__(self, arguments):
|
def py__call__(self, arguments):
|
||||||
return_annotation = self.access_handle.get_return_annotation()
|
return_annotation = self.access_handle.get_return_annotation()
|
||||||
if return_annotation is not None:
|
if return_annotation is not None:
|
||||||
# TODO the return annotation may also be a string.
|
|
||||||
return create_from_access_path(
|
return create_from_access_path(
|
||||||
self.inference_state,
|
self.inference_state,
|
||||||
return_annotation
|
return_annotation
|
||||||
@@ -163,7 +162,10 @@ class CompiledValue(Value):
|
|||||||
def py__simple_getitem__(self, index):
|
def py__simple_getitem__(self, index):
|
||||||
with reraise_getitem_errors(IndexError, KeyError, TypeError):
|
with reraise_getitem_errors(IndexError, KeyError, TypeError):
|
||||||
try:
|
try:
|
||||||
access = self.access_handle.py__simple_getitem__(index)
|
access = self.access_handle.py__simple_getitem__(
|
||||||
|
index,
|
||||||
|
safe=not self.inference_state.allow_unsafe_executions
|
||||||
|
)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
return super().py__simple_getitem__(index)
|
return super().py__simple_getitem__(index)
|
||||||
if access is None:
|
if access is None:
|
||||||
@@ -311,11 +313,12 @@ class CompiledModule(CompiledValue):
|
|||||||
|
|
||||||
|
|
||||||
class CompiledName(AbstractNameDefinition):
|
class CompiledName(AbstractNameDefinition):
|
||||||
def __init__(self, inference_state, parent_value, name):
|
def __init__(self, inference_state, parent_value, name, is_descriptor):
|
||||||
self._inference_state = inference_state
|
self._inference_state = inference_state
|
||||||
self.parent_context = parent_value.as_context()
|
self.parent_context = parent_value.as_context()
|
||||||
self._parent_value = parent_value
|
self._parent_value = parent_value
|
||||||
self.string_name = name
|
self.string_name = name
|
||||||
|
self.is_descriptor = is_descriptor
|
||||||
|
|
||||||
def py__doc__(self):
|
def py__doc__(self):
|
||||||
return self.infer_compiled_value().py__doc__()
|
return self.infer_compiled_value().py__doc__()
|
||||||
@@ -342,6 +345,11 @@ class CompiledName(AbstractNameDefinition):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def api_type(self):
|
def api_type(self):
|
||||||
|
if self.is_descriptor:
|
||||||
|
# In case of properties we want to avoid executions as much as
|
||||||
|
# possible. Since the api_type can be wrong for other reasons
|
||||||
|
# anyway, we just return instance here.
|
||||||
|
return "instance"
|
||||||
return self.infer_compiled_value().api_type
|
return self.infer_compiled_value().api_type
|
||||||
|
|
||||||
def infer(self):
|
def infer(self):
|
||||||
@@ -432,9 +440,10 @@ class CompiledValueFilter(AbstractFilter):
|
|||||||
|
|
||||||
def get(self, name):
|
def get(self, name):
|
||||||
access_handle = self.compiled_value.access_handle
|
access_handle = self.compiled_value.access_handle
|
||||||
|
safe = not self._inference_state.allow_unsafe_executions
|
||||||
return self._get(
|
return self._get(
|
||||||
name,
|
name,
|
||||||
lambda name, safe: access_handle.is_allowed_getattr(name, safe=safe),
|
lambda name: access_handle.is_allowed_getattr(name, safe=safe),
|
||||||
lambda name: name in access_handle.dir(),
|
lambda name: name in access_handle.dir(),
|
||||||
check_has_attribute=True
|
check_has_attribute=True
|
||||||
)
|
)
|
||||||
@@ -443,30 +452,34 @@ class CompiledValueFilter(AbstractFilter):
|
|||||||
"""
|
"""
|
||||||
To remove quite a few access calls we introduced the callback here.
|
To remove quite a few access calls we introduced the callback here.
|
||||||
"""
|
"""
|
||||||
if self._inference_state.allow_descriptor_getattr:
|
has_attribute, is_descriptor, property_return_annotation = allowed_getattr_callback(
|
||||||
pass
|
|
||||||
|
|
||||||
has_attribute, is_descriptor = allowed_getattr_callback(
|
|
||||||
name,
|
name,
|
||||||
safe=not self._inference_state.allow_descriptor_getattr
|
|
||||||
)
|
)
|
||||||
|
if property_return_annotation is not None:
|
||||||
|
values = create_from_access_path(
|
||||||
|
self._inference_state,
|
||||||
|
property_return_annotation
|
||||||
|
).execute_annotation()
|
||||||
|
if values:
|
||||||
|
return [CompiledValueName(v, name) for v in values]
|
||||||
|
|
||||||
if check_has_attribute and not has_attribute:
|
if check_has_attribute and not has_attribute:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if (is_descriptor or not has_attribute) \
|
if (is_descriptor or not has_attribute) \
|
||||||
and not self._inference_state.allow_descriptor_getattr:
|
and not self._inference_state.allow_unsafe_executions:
|
||||||
return [self._get_cached_name(name, is_empty=True)]
|
return [self._get_cached_name(name, is_empty=True)]
|
||||||
|
|
||||||
if self.is_instance and not in_dir_callback(name):
|
if self.is_instance and not in_dir_callback(name):
|
||||||
return []
|
return []
|
||||||
return [self._get_cached_name(name)]
|
return [self._get_cached_name(name, is_descriptor=is_descriptor)]
|
||||||
|
|
||||||
@memoize_method
|
@memoize_method
|
||||||
def _get_cached_name(self, name, is_empty=False):
|
def _get_cached_name(self, name, is_empty=False, *, is_descriptor=False):
|
||||||
if is_empty:
|
if is_empty:
|
||||||
return EmptyCompiledName(self._inference_state, name)
|
return EmptyCompiledName(self._inference_state, name)
|
||||||
else:
|
else:
|
||||||
return self._create_name(name)
|
return self._create_name(name, is_descriptor=is_descriptor)
|
||||||
|
|
||||||
def values(self):
|
def values(self):
|
||||||
from jedi.inference.compiled import builtin_from_name
|
from jedi.inference.compiled import builtin_from_name
|
||||||
@@ -480,7 +493,7 @@ class CompiledValueFilter(AbstractFilter):
|
|||||||
for name in dir_infos:
|
for name in dir_infos:
|
||||||
names += self._get(
|
names += self._get(
|
||||||
name,
|
name,
|
||||||
lambda name, safe: dir_infos[name],
|
lambda name: dir_infos[name],
|
||||||
lambda name: name in dir_infos,
|
lambda name: name in dir_infos,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -490,11 +503,12 @@ class CompiledValueFilter(AbstractFilter):
|
|||||||
names += filter.values()
|
names += filter.values()
|
||||||
return names
|
return names
|
||||||
|
|
||||||
def _create_name(self, name):
|
def _create_name(self, name, is_descriptor):
|
||||||
return CompiledName(
|
return CompiledName(
|
||||||
self._inference_state,
|
self._inference_state,
|
||||||
self.compiled_value,
|
self.compiled_value,
|
||||||
name
|
name,
|
||||||
|
is_descriptor,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
|
|||||||
@@ -66,11 +66,11 @@ def dynamic_param_lookup(function_value, param_index):
|
|||||||
have to look for all calls to ``func`` to find out what ``foo`` possibly
|
have to look for all calls to ``func`` to find out what ``foo`` possibly
|
||||||
is.
|
is.
|
||||||
"""
|
"""
|
||||||
funcdef = function_value.tree_node
|
if not function_value.inference_state.do_dynamic_params_search:
|
||||||
|
|
||||||
if not settings.dynamic_params:
|
|
||||||
return NO_VALUES
|
return NO_VALUES
|
||||||
|
|
||||||
|
funcdef = function_value.tree_node
|
||||||
|
|
||||||
path = function_value.get_root_context().py__file__()
|
path = function_value.get_root_context().py__file__()
|
||||||
if path is not None and is_stdlib_path(path):
|
if path is not None and is_stdlib_path(path):
|
||||||
# We don't want to search for references in the stdlib. Usually people
|
# We don't want to search for references in the stdlib. Usually people
|
||||||
|
|||||||
@@ -402,6 +402,10 @@ def find_type_from_comment_hint_for(context, node, name):
|
|||||||
|
|
||||||
|
|
||||||
def find_type_from_comment_hint_with(context, node, name):
|
def find_type_from_comment_hint_with(context, node, name):
|
||||||
|
if len(node.children) > 4:
|
||||||
|
# In case there are multiple with_items, we do not want a type hint for
|
||||||
|
# now.
|
||||||
|
return []
|
||||||
assert len(node.children[1].children) == 3, \
|
assert len(node.children[1].children) == 3, \
|
||||||
"Can only be here when children[1] is 'foo() as f'"
|
"Can only be here when children[1] is 'foo() as f'"
|
||||||
varlist = node.children[1].children[2]
|
varlist = node.children[1].children[2]
|
||||||
|
|||||||
@@ -86,6 +86,8 @@ class StubFilter(ParserTreeFilter):
|
|||||||
# Imports in stub files are only public if they have an "as"
|
# Imports in stub files are only public if they have an "as"
|
||||||
# export.
|
# export.
|
||||||
definition = name.get_definition()
|
definition = name.get_definition()
|
||||||
|
if definition is None:
|
||||||
|
return False
|
||||||
if definition.type in ('import_from', 'import_name'):
|
if definition.type in ('import_from', 'import_name'):
|
||||||
if name.parent.type not in ('import_as_name', 'dotted_as_name'):
|
if name.parent.type not in ('import_as_name', 'dotted_as_name'):
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ _TYPE_ALIAS_TYPES = {
|
|||||||
'DefaultDict': 'collections.defaultdict',
|
'DefaultDict': 'collections.defaultdict',
|
||||||
'Deque': 'collections.deque',
|
'Deque': 'collections.deque',
|
||||||
}
|
}
|
||||||
_PROXY_TYPES = 'Optional Union ClassVar'.split()
|
_PROXY_TYPES = 'Optional Union ClassVar Annotated'.split()
|
||||||
|
|
||||||
|
|
||||||
class TypingModuleName(NameWrapper):
|
class TypingModuleName(NameWrapper):
|
||||||
@@ -113,7 +113,7 @@ class ProxyWithGenerics(BaseTypingClassWithGenerics):
|
|||||||
elif string_name == 'Type':
|
elif string_name == 'Type':
|
||||||
# The type is actually already given in the index_value
|
# The type is actually already given in the index_value
|
||||||
return self._generics_manager[0]
|
return self._generics_manager[0]
|
||||||
elif string_name == 'ClassVar':
|
elif string_name in ['ClassVar', 'Annotated']:
|
||||||
# For now don't do anything here, ClassVars are always used.
|
# For now don't do anything here, ClassVars are always used.
|
||||||
return self._generics_manager[0].execute_annotation()
|
return self._generics_manager[0].execute_annotation()
|
||||||
|
|
||||||
@@ -294,6 +294,9 @@ class Callable(BaseTypingInstance):
|
|||||||
from jedi.inference.gradual.annotation import infer_return_for_callable
|
from jedi.inference.gradual.annotation import infer_return_for_callable
|
||||||
return infer_return_for_callable(arguments, param_values, result_values)
|
return infer_return_for_callable(arguments, param_values, result_values)
|
||||||
|
|
||||||
|
def py__get__(self, instance, class_value):
|
||||||
|
return ValueSet([self])
|
||||||
|
|
||||||
|
|
||||||
class Tuple(BaseTypingInstance):
|
class Tuple(BaseTypingInstance):
|
||||||
def _is_homogenous(self):
|
def _is_homogenous(self):
|
||||||
|
|||||||
@@ -248,7 +248,7 @@ class ValueNameMixin:
|
|||||||
|
|
||||||
def get_defining_qualified_value(self):
|
def get_defining_qualified_value(self):
|
||||||
context = self.parent_context
|
context = self.parent_context
|
||||||
if context.is_module() or context.is_class():
|
if context is not None and (context.is_module() or context.is_class()):
|
||||||
return self.parent_context.get_value() # Might be None
|
return self.parent_context.get_value() # Might be None
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@@ -180,26 +180,34 @@ def _check_fs(inference_state, file_io, regex):
|
|||||||
return m.as_context()
|
return m.as_context()
|
||||||
|
|
||||||
|
|
||||||
def gitignored_lines(folder_io, file_io):
|
def gitignored_paths(folder_io, file_io):
|
||||||
ignored_paths = set()
|
ignored_paths_abs = set()
|
||||||
ignored_names = set()
|
ignored_paths_rel = set()
|
||||||
|
|
||||||
for l in file_io.read().splitlines():
|
for l in file_io.read().splitlines():
|
||||||
if not l or l.startswith(b'#'):
|
if not l or l.startswith(b'#') or l.startswith(b'!') or b'*' in l:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
p = l.decode('utf-8', 'ignore')
|
p = l.decode('utf-8', 'ignore').rstrip('/')
|
||||||
if p.startswith('/'):
|
if '/' in p:
|
||||||
name = p[1:]
|
name = p.lstrip('/')
|
||||||
if name.endswith(os.path.sep):
|
ignored_paths_abs.add(os.path.join(folder_io.path, name))
|
||||||
name = name[:-1]
|
|
||||||
ignored_paths.add(os.path.join(folder_io.path, name))
|
|
||||||
else:
|
else:
|
||||||
ignored_names.add(p)
|
name = p
|
||||||
return ignored_paths, ignored_names
|
ignored_paths_rel.add((folder_io.path, name))
|
||||||
|
|
||||||
|
return ignored_paths_abs, ignored_paths_rel
|
||||||
|
|
||||||
|
|
||||||
|
def expand_relative_ignore_paths(folder_io, relative_paths):
|
||||||
|
curr_path = folder_io.path
|
||||||
|
return {os.path.join(curr_path, p[1]) for p in relative_paths if curr_path.startswith(p[0])}
|
||||||
|
|
||||||
|
|
||||||
def recurse_find_python_folders_and_files(folder_io, except_paths=()):
|
def recurse_find_python_folders_and_files(folder_io, except_paths=()):
|
||||||
except_paths = set(except_paths)
|
except_paths = set(except_paths)
|
||||||
|
except_paths_relative = set()
|
||||||
|
|
||||||
for root_folder_io, folder_ios, file_ios in folder_io.walk():
|
for root_folder_io, folder_ios, file_ios in folder_io.walk():
|
||||||
# Delete folders that we don't want to iterate over.
|
# Delete folders that we don't want to iterate over.
|
||||||
for file_io in file_ios:
|
for file_io in file_ios:
|
||||||
@@ -209,14 +217,21 @@ def recurse_find_python_folders_and_files(folder_io, except_paths=()):
|
|||||||
yield None, file_io
|
yield None, file_io
|
||||||
|
|
||||||
if path.name == '.gitignore':
|
if path.name == '.gitignore':
|
||||||
ignored_paths, ignored_names = \
|
ignored_paths_abs, ignored_paths_rel = gitignored_paths(
|
||||||
gitignored_lines(root_folder_io, file_io)
|
root_folder_io, file_io
|
||||||
except_paths |= ignored_paths
|
)
|
||||||
|
except_paths |= ignored_paths_abs
|
||||||
|
except_paths_relative |= ignored_paths_rel
|
||||||
|
|
||||||
|
except_paths_relative_expanded = expand_relative_ignore_paths(
|
||||||
|
root_folder_io, except_paths_relative
|
||||||
|
)
|
||||||
|
|
||||||
folder_ios[:] = [
|
folder_ios[:] = [
|
||||||
folder_io
|
folder_io
|
||||||
for folder_io in folder_ios
|
for folder_io in folder_ios
|
||||||
if folder_io.path not in except_paths
|
if folder_io.path not in except_paths
|
||||||
|
and folder_io.path not in except_paths_relative_expanded
|
||||||
and folder_io.get_base_name() not in _IGNORE_FOLDERS
|
and folder_io.get_base_name() not in _IGNORE_FOLDERS
|
||||||
]
|
]
|
||||||
for folder_io in folder_ios:
|
for folder_io in folder_ios:
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
Functions inferring the syntax tree.
|
Functions inferring the syntax tree.
|
||||||
"""
|
"""
|
||||||
import copy
|
import copy
|
||||||
|
import itertools
|
||||||
|
|
||||||
from parso.python import tree
|
from parso.python import tree
|
||||||
|
|
||||||
@@ -328,8 +329,8 @@ def infer_atom(context, atom):
|
|||||||
c = atom.children
|
c = atom.children
|
||||||
# Parentheses without commas are not tuples.
|
# Parentheses without commas are not tuples.
|
||||||
if c[0] == '(' and not len(c) == 2 \
|
if c[0] == '(' and not len(c) == 2 \
|
||||||
and not(c[1].type == 'testlist_comp'
|
and not (c[1].type == 'testlist_comp'
|
||||||
and len(c[1].children) > 1):
|
and len(c[1].children) > 1):
|
||||||
return context.infer_node(c[1])
|
return context.infer_node(c[1])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -515,10 +516,20 @@ def _literals_to_types(inference_state, result):
|
|||||||
|
|
||||||
def _infer_comparison(context, left_values, operator, right_values):
|
def _infer_comparison(context, left_values, operator, right_values):
|
||||||
state = context.inference_state
|
state = context.inference_state
|
||||||
|
if isinstance(operator, str):
|
||||||
|
operator_str = operator
|
||||||
|
else:
|
||||||
|
operator_str = str(operator.value)
|
||||||
if not left_values or not right_values:
|
if not left_values or not right_values:
|
||||||
# illegal slices e.g. cause left/right_result to be None
|
# illegal slices e.g. cause left/right_result to be None
|
||||||
result = (left_values or NO_VALUES) | (right_values or NO_VALUES)
|
result = (left_values or NO_VALUES) | (right_values or NO_VALUES)
|
||||||
return _literals_to_types(state, result)
|
return _literals_to_types(state, result)
|
||||||
|
elif operator_str == "|" and all(
|
||||||
|
value.is_class() or value.is_compiled()
|
||||||
|
for value in itertools.chain(left_values, right_values)
|
||||||
|
):
|
||||||
|
# ^^^ A naive hack for PEP 604
|
||||||
|
return ValueSet.from_sets((left_values, right_values))
|
||||||
else:
|
else:
|
||||||
# I don't think there's a reasonable chance that a string
|
# I don't think there's a reasonable chance that a string
|
||||||
# operation is still correct, once we pass something like six
|
# operation is still correct, once we pass something like six
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ settings will stop this process.
|
|||||||
|
|
||||||
It is important to note that:
|
It is important to note that:
|
||||||
|
|
||||||
1. Array modfications work only in the current module.
|
1. Array modifications work only in the current module.
|
||||||
2. Jedi only checks Array additions; ``list.pop``, etc are ignored.
|
2. Jedi only checks Array additions; ``list.pop``, etc are ignored.
|
||||||
"""
|
"""
|
||||||
from jedi import debug
|
from jedi import debug
|
||||||
|
|||||||
@@ -78,6 +78,8 @@ class ClassName(TreeNameDefinition):
|
|||||||
type_ = super().api_type
|
type_ = super().api_type
|
||||||
if type_ == 'function':
|
if type_ == 'function':
|
||||||
definition = self.tree_name.get_definition()
|
definition = self.tree_name.get_definition()
|
||||||
|
if definition is None:
|
||||||
|
return type_
|
||||||
if function_is_property(definition):
|
if function_is_property(definition):
|
||||||
# This essentially checks if there is an @property before
|
# This essentially checks if there is an @property before
|
||||||
# the function. @property could be something different, but
|
# the function. @property could be something different, but
|
||||||
@@ -118,21 +120,6 @@ class ClassFilter(ParserTreeFilter):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def _access_possible(self, name):
|
def _access_possible(self, name):
|
||||||
# Filter for ClassVar variables
|
|
||||||
# TODO this is not properly done, yet. It just checks for the string
|
|
||||||
# ClassVar in the annotation, which can be quite imprecise. If we
|
|
||||||
# wanted to do this correct, we would have to infer the ClassVar.
|
|
||||||
if not self._is_instance:
|
|
||||||
expr_stmt = name.get_definition()
|
|
||||||
if expr_stmt is not None and expr_stmt.type == 'expr_stmt':
|
|
||||||
annassign = expr_stmt.children[1]
|
|
||||||
if annassign.type == 'annassign':
|
|
||||||
# If there is an =, the variable is obviously also
|
|
||||||
# defined on the class.
|
|
||||||
if 'ClassVar' not in annassign.children[1].get_code() \
|
|
||||||
and '=' not in annassign.children:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Filter for name mangling of private variables like __foo
|
# Filter for name mangling of private variables like __foo
|
||||||
return not name.value.startswith('__') or name.value.endswith('__') \
|
return not name.value.startswith('__') or name.value.endswith('__') \
|
||||||
or self._equals_origin_scope()
|
or self._equals_origin_scope()
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
|
import sys
|
||||||
|
from typing import List
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from parso.tree import search_ancestor
|
from parso.tree import search_ancestor
|
||||||
from jedi.inference.cache import inference_state_method_cache
|
from jedi.inference.cache import inference_state_method_cache
|
||||||
from jedi.inference.imports import load_module_from_path
|
from jedi.inference.imports import goto_import, load_module_from_path
|
||||||
from jedi.inference.filters import ParserTreeFilter
|
from jedi.inference.filters import ParserTreeFilter
|
||||||
from jedi.inference.base_value import NO_VALUES, ValueSet
|
from jedi.inference.base_value import NO_VALUES, ValueSet
|
||||||
from jedi.inference.helpers import infer_call_of_leaf
|
from jedi.inference.helpers import infer_call_of_leaf
|
||||||
@@ -131,6 +133,36 @@ def _is_pytest_func(func_name, decorator_nodes):
|
|||||||
or any('fixture' in n.get_code() for n in decorator_nodes)
|
or any('fixture' in n.get_code() for n in decorator_nodes)
|
||||||
|
|
||||||
|
|
||||||
|
def _find_pytest_plugin_modules() -> List[List[str]]:
|
||||||
|
"""
|
||||||
|
Finds pytest plugin modules hooked by setuptools entry points
|
||||||
|
|
||||||
|
See https://docs.pytest.org/en/stable/how-to/writing_plugins.html#setuptools-entry-points
|
||||||
|
"""
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
from importlib.metadata import entry_points
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
pytest_entry_points = entry_points(group="pytest11")
|
||||||
|
else:
|
||||||
|
pytest_entry_points = entry_points().get("pytest11", ())
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 9):
|
||||||
|
return [ep.module.split(".") for ep in pytest_entry_points]
|
||||||
|
else:
|
||||||
|
# Python 3.8 doesn't have `EntryPoint.module`. Implement equivalent
|
||||||
|
# to what Python 3.9 does (with additional None check to placate `mypy`)
|
||||||
|
matches = [
|
||||||
|
ep.pattern.match(ep.value)
|
||||||
|
for ep in pytest_entry_points
|
||||||
|
]
|
||||||
|
return [x.group('module').split(".") for x in matches if x]
|
||||||
|
|
||||||
|
else:
|
||||||
|
from pkg_resources import iter_entry_points
|
||||||
|
return [ep.module_name.split(".") for ep in iter_entry_points(group="pytest11")]
|
||||||
|
|
||||||
|
|
||||||
@inference_state_method_cache()
|
@inference_state_method_cache()
|
||||||
def _iter_pytest_modules(module_context, skip_own_module=False):
|
def _iter_pytest_modules(module_context, skip_own_module=False):
|
||||||
if not skip_own_module:
|
if not skip_own_module:
|
||||||
@@ -159,7 +191,7 @@ def _iter_pytest_modules(module_context, skip_own_module=False):
|
|||||||
break
|
break
|
||||||
last_folder = folder # keep track of the last found parent name
|
last_folder = folder # keep track of the last found parent name
|
||||||
|
|
||||||
for names in _PYTEST_FIXTURE_MODULES:
|
for names in _PYTEST_FIXTURE_MODULES + _find_pytest_plugin_modules():
|
||||||
for module_value in module_context.inference_state.import_module(names):
|
for module_value in module_context.inference_state.import_module(names):
|
||||||
yield module_value.as_context()
|
yield module_value.as_context()
|
||||||
|
|
||||||
@@ -167,14 +199,28 @@ def _iter_pytest_modules(module_context, skip_own_module=False):
|
|||||||
class FixtureFilter(ParserTreeFilter):
|
class FixtureFilter(ParserTreeFilter):
|
||||||
def _filter(self, names):
|
def _filter(self, names):
|
||||||
for name in super()._filter(names):
|
for name in super()._filter(names):
|
||||||
funcdef = name.parent
|
# look for fixture definitions of imported names
|
||||||
# Class fixtures are not supported
|
if name.parent.type == "import_from":
|
||||||
if funcdef.type == 'funcdef':
|
imported_names = goto_import(self.parent_context, name)
|
||||||
decorated = funcdef.parent
|
if any(
|
||||||
if decorated.type == 'decorated' and self._is_fixture(decorated):
|
self._is_fixture(iname.parent_context, iname.tree_name)
|
||||||
|
for iname in imported_names
|
||||||
|
# discard imports of whole modules, that have no tree_name
|
||||||
|
if iname.tree_name
|
||||||
|
):
|
||||||
yield name
|
yield name
|
||||||
|
|
||||||
def _is_fixture(self, decorated):
|
elif self._is_fixture(self.parent_context, name):
|
||||||
|
yield name
|
||||||
|
|
||||||
|
def _is_fixture(self, context, name):
|
||||||
|
funcdef = name.parent
|
||||||
|
# Class fixtures are not supported
|
||||||
|
if funcdef.type != "funcdef":
|
||||||
|
return False
|
||||||
|
decorated = funcdef.parent
|
||||||
|
if decorated.type != "decorated":
|
||||||
|
return False
|
||||||
decorators = decorated.children[0]
|
decorators = decorated.children[0]
|
||||||
if decorators.type == 'decorators':
|
if decorators.type == 'decorators':
|
||||||
decorators = decorators.children
|
decorators = decorators.children
|
||||||
@@ -191,11 +237,12 @@ class FixtureFilter(ParserTreeFilter):
|
|||||||
last_leaf = last_trailer.get_last_leaf()
|
last_leaf = last_trailer.get_last_leaf()
|
||||||
if last_leaf == ')':
|
if last_leaf == ')':
|
||||||
values = infer_call_of_leaf(
|
values = infer_call_of_leaf(
|
||||||
self.parent_context, last_leaf, cut_own_trailer=True)
|
context, last_leaf, cut_own_trailer=True
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
values = self.parent_context.infer_node(dotted_name)
|
values = context.infer_node(dotted_name)
|
||||||
else:
|
else:
|
||||||
values = self.parent_context.infer_node(dotted_name)
|
values = context.infer_node(dotted_name)
|
||||||
for value in values:
|
for value in values:
|
||||||
if value.name.get_qualified_names(include_module_names=True) \
|
if value.name.get_qualified_names(include_module_names=True) \
|
||||||
== ('_pytest', 'fixtures', 'fixture'):
|
== ('_pytest', 'fixtures', 'fixture'):
|
||||||
|
|||||||
@@ -803,6 +803,17 @@ _implemented = {
|
|||||||
# For now this works at least better than Jedi trying to understand it.
|
# For now this works at least better than Jedi trying to understand it.
|
||||||
'dataclass': _dataclass
|
'dataclass': _dataclass
|
||||||
},
|
},
|
||||||
|
# attrs exposes declaration interface roughly compatible with dataclasses
|
||||||
|
# via attrs.define, attrs.frozen and attrs.mutable
|
||||||
|
# https://www.attrs.org/en/stable/names.html
|
||||||
|
'attr': {
|
||||||
|
'define': _dataclass,
|
||||||
|
'frozen': _dataclass,
|
||||||
|
},
|
||||||
|
'attrs': {
|
||||||
|
'define': _dataclass,
|
||||||
|
'frozen': _dataclass,
|
||||||
|
},
|
||||||
'os.path': {
|
'os.path': {
|
||||||
'dirname': _create_string_input_function(os.path.dirname),
|
'dirname': _create_string_input_function(os.path.dirname),
|
||||||
'abspath': _create_string_input_function(os.path.abspath),
|
'abspath': _create_string_input_function(os.path.abspath),
|
||||||
|
|||||||
@@ -143,6 +143,15 @@ This improves autocompletion for libraries that use ``setattr`` or
|
|||||||
``globals()`` modifications a lot.
|
``globals()`` modifications a lot.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
allow_unsafe_interpreter_executions = True
|
||||||
|
"""
|
||||||
|
Controls whether descriptors are evaluated when using an Interpreter. This is
|
||||||
|
something you might want to control when using Jedi from a Repl (e.g. IPython)
|
||||||
|
|
||||||
|
Generally this setting allows Jedi to execute __getitem__ and descriptors like
|
||||||
|
`property`.
|
||||||
|
"""
|
||||||
|
|
||||||
# ----------------
|
# ----------------
|
||||||
# Caching Validity
|
# Caching Validity
|
||||||
# ----------------
|
# ----------------
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
Utilities for end-users.
|
Utilities for end-users.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import __main__ # type: ignore[import]
|
import __main__
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
import logging
|
import logging
|
||||||
import traceback
|
import traceback
|
||||||
|
|||||||
@@ -28,6 +28,12 @@ max-line-length = 100
|
|||||||
|
|
||||||
|
|
||||||
[mypy]
|
[mypy]
|
||||||
|
# Exclude our copies of external stubs
|
||||||
|
exclude = ^jedi/third_party
|
||||||
|
|
||||||
|
show_error_codes = true
|
||||||
|
enable_error_code = ignore-without-code
|
||||||
|
|
||||||
# Ensure generics are explicit about what they are (e.g: `List[str]` rather than
|
# Ensure generics are explicit about what they are (e.g: `List[str]` rather than
|
||||||
# just `List`)
|
# just `List`)
|
||||||
disallow_any_generics = True
|
disallow_any_generics = True
|
||||||
|
|||||||
41
setup.py
41
setup.py
@@ -32,7 +32,8 @@ setup(name='jedi',
|
|||||||
long_description=readme,
|
long_description=readme,
|
||||||
packages=find_packages(exclude=['test', 'test.*']),
|
packages=find_packages(exclude=['test', 'test.*']),
|
||||||
python_requires='>=3.6',
|
python_requires='>=3.6',
|
||||||
install_requires=['parso>=0.8.0,<0.9.0'],
|
# Python 3.11 grammar is added to parso in 0.8.3
|
||||||
|
install_requires=['parso>=0.8.3,<0.9.0'],
|
||||||
extras_require={
|
extras_require={
|
||||||
'testing': [
|
'testing': [
|
||||||
'pytest<7.0.0',
|
'pytest<7.0.0',
|
||||||
@@ -41,10 +42,43 @@ setup(name='jedi',
|
|||||||
# coloroma for colored debug output
|
# coloroma for colored debug output
|
||||||
'colorama',
|
'colorama',
|
||||||
'Django<3.1', # For now pin this.
|
'Django<3.1', # For now pin this.
|
||||||
|
'attrs',
|
||||||
],
|
],
|
||||||
'qa': [
|
'qa': [
|
||||||
'flake8==3.8.3',
|
# latest version supporting Python 3.6
|
||||||
'mypy==0.782',
|
'flake8==5.0.4',
|
||||||
|
# latest version supporting Python 3.6
|
||||||
|
'mypy==0.971',
|
||||||
|
# Arbitrary pins, latest at the time of pinning
|
||||||
|
'types-setuptools==67.2.0.1',
|
||||||
|
],
|
||||||
|
'docs': [
|
||||||
|
# Just pin all of these.
|
||||||
|
'Jinja2==2.11.3',
|
||||||
|
'MarkupSafe==1.1.1',
|
||||||
|
'Pygments==2.8.1',
|
||||||
|
'alabaster==0.7.12',
|
||||||
|
'babel==2.9.1',
|
||||||
|
'chardet==4.0.0',
|
||||||
|
'commonmark==0.8.1',
|
||||||
|
'docutils==0.17.1',
|
||||||
|
'future==0.18.2',
|
||||||
|
'idna==2.10',
|
||||||
|
'imagesize==1.2.0',
|
||||||
|
'mock==1.0.1',
|
||||||
|
'packaging==20.9',
|
||||||
|
'pyparsing==2.4.7',
|
||||||
|
'pytz==2021.1',
|
||||||
|
'readthedocs-sphinx-ext==2.1.4',
|
||||||
|
'recommonmark==0.5.0',
|
||||||
|
'requests==2.25.1',
|
||||||
|
'six==1.15.0',
|
||||||
|
'snowballstemmer==2.1.0',
|
||||||
|
'sphinx==1.8.5',
|
||||||
|
'sphinx-rtd-theme==0.4.3',
|
||||||
|
'sphinxcontrib-serializinghtml==1.1.4',
|
||||||
|
'sphinxcontrib-websupport==1.2.4',
|
||||||
|
'urllib3==1.26.4',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
package_data={'jedi': ['*.pyi', 'third_party/typeshed/LICENSE',
|
package_data={'jedi': ['*.pyi', 'third_party/typeshed/LICENSE',
|
||||||
@@ -62,6 +96,7 @@ setup(name='jedi',
|
|||||||
'Programming Language :: Python :: 3.8',
|
'Programming Language :: Python :: 3.8',
|
||||||
'Programming Language :: Python :: 3.9',
|
'Programming Language :: Python :: 3.9',
|
||||||
'Programming Language :: Python :: 3.10',
|
'Programming Language :: Python :: 3.10',
|
||||||
|
'Programming Language :: Python :: 3.11',
|
||||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||||
'Topic :: Text Editors :: Integrated Development Environments (IDE)',
|
'Topic :: Text Editors :: Integrated Development Environments (IDE)',
|
||||||
'Topic :: Utilities',
|
'Topic :: Utilities',
|
||||||
|
|||||||
@@ -47,8 +47,6 @@ async def awaitable_test():
|
|||||||
#? str()
|
#? str()
|
||||||
foo
|
foo
|
||||||
|
|
||||||
# python >= 3.6
|
|
||||||
|
|
||||||
async def asgen():
|
async def asgen():
|
||||||
yield 1
|
yield 1
|
||||||
await asyncio.sleep(0)
|
await asyncio.sleep(0)
|
||||||
|
|||||||
@@ -413,6 +413,10 @@ with Foo() as f3:
|
|||||||
with Foo() as f3:
|
with Foo() as f3:
|
||||||
f3
|
f3
|
||||||
|
|
||||||
|
with open("a"), open("b") as bfile:
|
||||||
|
#? ['flush']
|
||||||
|
bfile.flush
|
||||||
|
|
||||||
# -----------------
|
# -----------------
|
||||||
# Avoiding multiple definitions
|
# Avoiding multiple definitions
|
||||||
# -----------------
|
# -----------------
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ def inheritance_fixture():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def testdir(testdir):
|
def capsysbinary(capsysbinary):
|
||||||
#? ['chdir']
|
#? ['close']
|
||||||
testdir.chdir
|
capsysbinary.clos
|
||||||
return testdir
|
return capsysbinary
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import uuid
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
from django.contrib.auth.models import User
|
from django.contrib.auth.models import User
|
||||||
from django.db.models.query_utils import DeferredAttribute
|
from django.db.models.query_utils import DeferredAttribute
|
||||||
|
from django.db.models.manager import BaseManager
|
||||||
|
|
||||||
|
|
||||||
class TagManager(models.Manager):
|
class TagManager(models.Manager):
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
# python >= 3.6
|
|
||||||
|
|
||||||
class Foo:
|
class Foo:
|
||||||
bar = 1
|
bar = 1
|
||||||
|
|
||||||
|
|||||||
@@ -110,4 +110,4 @@ class Test(object):
|
|||||||
# nocond lambdas make no sense at all.
|
# nocond lambdas make no sense at all.
|
||||||
|
|
||||||
#? int()
|
#? int()
|
||||||
[a for a in [1,2] if lambda: 3][0]
|
[a for a in [1,2] if (lambda: 3)][0]
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ from import_tree.pkg.mod1 import not_existant,
|
|||||||
#? 22 ['mod1', 'base']
|
#? 22 ['mod1', 'base']
|
||||||
from import_tree.pkg. import mod1
|
from import_tree.pkg. import mod1
|
||||||
#? 17 ['mod1', 'mod2', 'random', 'pkg', 'references', 'rename1', 'rename2', 'classes', 'globals', 'recurse_class1', 'recurse_class2', 'invisible_pkg', 'flow_import']
|
#? 17 ['mod1', 'mod2', 'random', 'pkg', 'references', 'rename1', 'rename2', 'classes', 'globals', 'recurse_class1', 'recurse_class2', 'invisible_pkg', 'flow_import']
|
||||||
from import_tree. import pkg
|
from import_tree. import new_pkg
|
||||||
|
|
||||||
#? 18 ['pkg']
|
#? 18 ['pkg']
|
||||||
from import_tree.p import pkg
|
from import_tree.p import pkg
|
||||||
|
|||||||
50
test/completion/pep0484_decorators.py
Normal file
50
test/completion/pep0484_decorators.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
""" Pep-0484 type hinted decorators """
|
||||||
|
|
||||||
|
from typing import Callable
|
||||||
|
|
||||||
|
|
||||||
|
def decorator(func):
|
||||||
|
def wrapper(*a, **k):
|
||||||
|
return str(func(*a, **k))
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def typed_decorator(func: Callable[..., int]) -> Callable[..., str]:
|
||||||
|
...
|
||||||
|
|
||||||
|
# Functions
|
||||||
|
|
||||||
|
@decorator
|
||||||
|
def plain_func() -> int:
|
||||||
|
return 4
|
||||||
|
|
||||||
|
#? str()
|
||||||
|
plain_func()
|
||||||
|
|
||||||
|
|
||||||
|
@typed_decorator
|
||||||
|
def typed_func() -> int:
|
||||||
|
return 4
|
||||||
|
|
||||||
|
#? str()
|
||||||
|
typed_func()
|
||||||
|
|
||||||
|
|
||||||
|
# Methods
|
||||||
|
|
||||||
|
class X:
|
||||||
|
@decorator
|
||||||
|
def plain_method(self) -> int:
|
||||||
|
return 4
|
||||||
|
|
||||||
|
@typed_decorator
|
||||||
|
def typed_method(self) -> int:
|
||||||
|
return 4
|
||||||
|
|
||||||
|
inst = X()
|
||||||
|
|
||||||
|
#? str()
|
||||||
|
inst.plain_method()
|
||||||
|
|
||||||
|
#? str()
|
||||||
|
inst.typed_method()
|
||||||
@@ -27,13 +27,13 @@ class PlainClass(object):
|
|||||||
|
|
||||||
|
|
||||||
tpl = ("1", 2)
|
tpl = ("1", 2)
|
||||||
tpl_typed = ("2", 3) # type: Tuple[str, int]
|
tpl_typed: Tuple[str, int] = ("2", 3)
|
||||||
|
|
||||||
collection = {"a": 1}
|
collection = {"a": 1}
|
||||||
collection_typed = {"a": 1} # type: Dict[str, int]
|
collection_typed: Dict[str, int] = {"a": 1}
|
||||||
|
|
||||||
list_of_ints = [42] # type: List[int]
|
list_of_ints: List[int] = [42]
|
||||||
list_of_funcs = [foo] # type: List[Callable[[T], T]]
|
list_of_funcs: List[Callable[[T], T]] = [foo]
|
||||||
|
|
||||||
custom_generic = CustomGeneric(123.45)
|
custom_generic = CustomGeneric(123.45)
|
||||||
|
|
||||||
|
|||||||
@@ -19,12 +19,12 @@ T_co = TypeVar('T_co', covariant=True)
|
|||||||
V = TypeVar('V')
|
V = TypeVar('V')
|
||||||
|
|
||||||
|
|
||||||
just_float = 42. # type: float
|
just_float: float = 42.
|
||||||
optional_float = 42. # type: Optional[float]
|
optional_float: Optional[float] = 42.
|
||||||
list_of_ints = [42] # type: List[int]
|
list_of_ints: List[int] = [42]
|
||||||
list_of_floats = [42.] # type: List[float]
|
list_of_floats: List[float] = [42.]
|
||||||
list_of_optional_floats = [x or None for x in list_of_floats] # type: List[Optional[float]]
|
list_of_optional_floats: List[Optional[float]] = [x or None for x in list_of_floats]
|
||||||
list_of_ints_and_strs = [42, 'abc'] # type: List[Union[int, str]]
|
list_of_ints_and_strs: List[Union[int, str]] = [42, 'abc']
|
||||||
|
|
||||||
# Test that simple parameters are handled
|
# Test that simple parameters are handled
|
||||||
def list_t_to_list_t(the_list: List[T]) -> List[T]:
|
def list_t_to_list_t(the_list: List[T]) -> List[T]:
|
||||||
@@ -48,7 +48,7 @@ for z in list_t_to_list_t(list_of_ints_and_strs):
|
|||||||
z
|
z
|
||||||
|
|
||||||
|
|
||||||
list_of_int_type = [int] # type: List[Type[int]]
|
list_of_int_type: List[Type[int]] = [int]
|
||||||
|
|
||||||
# Test that nested parameters are handled
|
# Test that nested parameters are handled
|
||||||
def list_optional_t_to_list_t(the_list: List[Optional[T]]) -> List[T]:
|
def list_optional_t_to_list_t(the_list: List[Optional[T]]) -> List[T]:
|
||||||
@@ -85,7 +85,7 @@ def optional_list_t_to_list_t(x: Optional[List[T]]) -> List[T]:
|
|||||||
return x if x is not None else []
|
return x if x is not None else []
|
||||||
|
|
||||||
|
|
||||||
optional_list_float = None # type: Optional[List[float]]
|
optional_list_float: Optional[List[float]] = None
|
||||||
for xc in optional_list_t_to_list_t(optional_list_float):
|
for xc in optional_list_t_to_list_t(optional_list_float):
|
||||||
#? float()
|
#? float()
|
||||||
xc
|
xc
|
||||||
@@ -134,7 +134,7 @@ def list_tuple_t_to_tuple_list_t(the_list: List[Tuple[T]]) -> Tuple[List[T], ...
|
|||||||
return tuple(list(x) for x in the_list)
|
return tuple(list(x) for x in the_list)
|
||||||
|
|
||||||
|
|
||||||
list_of_int_tuples = [(x,) for x in list_of_ints] # type: List[Tuple[int]]
|
list_of_int_tuples: List[Tuple[int]] = [(x,) for x in list_of_ints]
|
||||||
|
|
||||||
for b in list_tuple_t_to_tuple_list_t(list_of_int_tuples):
|
for b in list_tuple_t_to_tuple_list_t(list_of_int_tuples):
|
||||||
#? int()
|
#? int()
|
||||||
@@ -145,7 +145,7 @@ def list_tuple_t_elipsis_to_tuple_list_t(the_list: List[Tuple[T, ...]]) -> Tuple
|
|||||||
return tuple(list(x) for x in the_list)
|
return tuple(list(x) for x in the_list)
|
||||||
|
|
||||||
|
|
||||||
list_of_int_tuple_elipsis = [tuple(list_of_ints)] # type: List[Tuple[int, ...]]
|
list_of_int_tuple_elipsis: List[Tuple[int, ...]] = [tuple(list_of_ints)]
|
||||||
|
|
||||||
for b in list_tuple_t_elipsis_to_tuple_list_t(list_of_int_tuple_elipsis):
|
for b in list_tuple_t_elipsis_to_tuple_list_t(list_of_int_tuple_elipsis):
|
||||||
#? int()
|
#? int()
|
||||||
@@ -157,7 +157,7 @@ def foo(x: int) -> int:
|
|||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
list_of_funcs = [foo] # type: List[Callable[[int], int]]
|
list_of_funcs: List[Callable[[int], int]] = [foo]
|
||||||
|
|
||||||
def list_func_t_to_list_func_type_t(the_list: List[Callable[[T], T]]) -> List[Callable[[Type[T]], T]]:
|
def list_func_t_to_list_func_type_t(the_list: List[Callable[[T], T]]) -> List[Callable[[Type[T]], T]]:
|
||||||
def adapt(func: Callable[[T], T]) -> Callable[[Type[T]], T]:
|
def adapt(func: Callable[[T], T]) -> Callable[[Type[T]], T]:
|
||||||
@@ -176,7 +176,7 @@ def bar(*a, **k) -> int:
|
|||||||
return len(a) + len(k)
|
return len(a) + len(k)
|
||||||
|
|
||||||
|
|
||||||
list_of_funcs_2 = [bar] # type: List[Callable[..., int]]
|
list_of_funcs_2: List[Callable[..., int]] = [bar]
|
||||||
|
|
||||||
def list_func_t_passthrough(the_list: List[Callable[..., T]]) -> List[Callable[..., T]]:
|
def list_func_t_passthrough(the_list: List[Callable[..., T]]) -> List[Callable[..., T]]:
|
||||||
return the_list
|
return the_list
|
||||||
@@ -187,7 +187,7 @@ for b in list_func_t_passthrough(list_of_funcs_2):
|
|||||||
b(None, x="x")
|
b(None, x="x")
|
||||||
|
|
||||||
|
|
||||||
mapping_int_str = {42: 'a'} # type: Dict[int, str]
|
mapping_int_str: Dict[int, str] = {42: 'a'}
|
||||||
|
|
||||||
# Test that mappings (that have more than one parameter) are handled
|
# Test that mappings (that have more than one parameter) are handled
|
||||||
def invert_mapping(mapping: Mapping[K, V]) -> Mapping[V, K]:
|
def invert_mapping(mapping: Mapping[K, V]) -> Mapping[V, K]:
|
||||||
@@ -210,11 +210,11 @@ first(mapping_int_str)
|
|||||||
#? str()
|
#? str()
|
||||||
first("abc")
|
first("abc")
|
||||||
|
|
||||||
some_str = NotImplemented # type: str
|
some_str: str = NotImplemented
|
||||||
#? str()
|
#? str()
|
||||||
first(some_str)
|
first(some_str)
|
||||||
|
|
||||||
annotated = [len] # type: List[ Callable[[Sequence[float]], int] ]
|
annotated: List[ Callable[[Sequence[float]], int] ] = [len]
|
||||||
#? int()
|
#? int()
|
||||||
first(annotated)()
|
first(annotated)()
|
||||||
|
|
||||||
@@ -237,7 +237,7 @@ for b in values(mapping_int_str):
|
|||||||
#
|
#
|
||||||
# Tests that user-defined generic types are handled
|
# Tests that user-defined generic types are handled
|
||||||
#
|
#
|
||||||
list_ints = [42] # type: List[int]
|
list_ints: List[int] = [42]
|
||||||
|
|
||||||
class CustomGeneric(Generic[T_co]):
|
class CustomGeneric(Generic[T_co]):
|
||||||
def __init__(self, val: T_co) -> None:
|
def __init__(self, val: T_co) -> None:
|
||||||
@@ -248,7 +248,7 @@ class CustomGeneric(Generic[T_co]):
|
|||||||
def custom(x: CustomGeneric[T]) -> T:
|
def custom(x: CustomGeneric[T]) -> T:
|
||||||
return x.val
|
return x.val
|
||||||
|
|
||||||
custom_instance = CustomGeneric(42) # type: CustomGeneric[int]
|
custom_instance: CustomGeneric[int] = CustomGeneric(42)
|
||||||
|
|
||||||
#? int()
|
#? int()
|
||||||
custom(custom_instance)
|
custom(custom_instance)
|
||||||
@@ -275,7 +275,7 @@ for x5 in wrap_custom(list_ints):
|
|||||||
|
|
||||||
|
|
||||||
# Test extraction of type from a nested custom generic type
|
# Test extraction of type from a nested custom generic type
|
||||||
list_custom_instances = [CustomGeneric(42)] # type: List[CustomGeneric[int]]
|
list_custom_instances: List[CustomGeneric[int]] = [CustomGeneric(42)]
|
||||||
|
|
||||||
def unwrap_custom(iterable: Iterable[CustomGeneric[T]]) -> List[T]:
|
def unwrap_custom(iterable: Iterable[CustomGeneric[T]]) -> List[T]:
|
||||||
return [x.val for x in iterable]
|
return [x.val for x in iterable]
|
||||||
@@ -303,7 +303,7 @@ for xg in unwrap_custom(CustomGeneric(s) for s in 'abc'):
|
|||||||
|
|
||||||
|
|
||||||
# Test extraction of type from type parameer nested within a custom generic type
|
# Test extraction of type from type parameer nested within a custom generic type
|
||||||
custom_instance_list_int = CustomGeneric([42]) # type: CustomGeneric[List[int]]
|
custom_instance_list_int: CustomGeneric[List[int]] = CustomGeneric([42])
|
||||||
|
|
||||||
def unwrap_custom2(instance: CustomGeneric[Iterable[T]]) -> List[T]:
|
def unwrap_custom2(instance: CustomGeneric[Iterable[T]]) -> List[T]:
|
||||||
return list(instance.val)
|
return list(instance.val)
|
||||||
@@ -326,7 +326,7 @@ class Specialised(Mapping[int, str]):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
specialised_instance = NotImplemented # type: Specialised
|
specialised_instance: Specialised = NotImplemented
|
||||||
|
|
||||||
#? int()
|
#? int()
|
||||||
first(specialised_instance)
|
first(specialised_instance)
|
||||||
@@ -341,7 +341,7 @@ class ChildOfSpecialised(Specialised):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
child_of_specialised_instance = NotImplemented # type: ChildOfSpecialised
|
child_of_specialised_instance: ChildOfSpecialised = NotImplemented
|
||||||
|
|
||||||
#? int()
|
#? int()
|
||||||
first(child_of_specialised_instance)
|
first(child_of_specialised_instance)
|
||||||
@@ -355,13 +355,13 @@ class CustomPartialGeneric1(Mapping[str, T]):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
custom_partial1_instance = NotImplemented # type: CustomPartialGeneric1[int]
|
custom_partial1_instance: CustomPartialGeneric1[int] = NotImplemented
|
||||||
|
|
||||||
#? str()
|
#? str()
|
||||||
first(custom_partial1_instance)
|
first(custom_partial1_instance)
|
||||||
|
|
||||||
|
|
||||||
custom_partial1_unbound_instance = NotImplemented # type: CustomPartialGeneric1
|
custom_partial1_unbound_instance: CustomPartialGeneric1 = NotImplemented
|
||||||
|
|
||||||
#? str()
|
#? str()
|
||||||
first(custom_partial1_unbound_instance)
|
first(custom_partial1_unbound_instance)
|
||||||
@@ -371,7 +371,7 @@ class CustomPartialGeneric2(Mapping[T, str]):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
custom_partial2_instance = NotImplemented # type: CustomPartialGeneric2[int]
|
custom_partial2_instance: CustomPartialGeneric2[int] = NotImplemented
|
||||||
|
|
||||||
#? int()
|
#? int()
|
||||||
first(custom_partial2_instance)
|
first(custom_partial2_instance)
|
||||||
@@ -380,7 +380,7 @@ first(custom_partial2_instance)
|
|||||||
values(custom_partial2_instance)[0]
|
values(custom_partial2_instance)[0]
|
||||||
|
|
||||||
|
|
||||||
custom_partial2_unbound_instance = NotImplemented # type: CustomPartialGeneric2
|
custom_partial2_unbound_instance: CustomPartialGeneric2 = NotImplemented
|
||||||
|
|
||||||
#? []
|
#? []
|
||||||
first(custom_partial2_unbound_instance)
|
first(custom_partial2_unbound_instance)
|
||||||
|
|||||||
@@ -19,16 +19,16 @@ TTypeAny = TypeVar('TTypeAny', bound=Type[Any])
|
|||||||
TCallable = TypeVar('TCallable', bound=Callable[..., Any])
|
TCallable = TypeVar('TCallable', bound=Callable[..., Any])
|
||||||
|
|
||||||
untyped_list_str = ['abc', 'def']
|
untyped_list_str = ['abc', 'def']
|
||||||
typed_list_str = ['abc', 'def'] # type: List[str]
|
typed_list_str: List[str] = ['abc', 'def']
|
||||||
|
|
||||||
untyped_tuple_str = ('abc',)
|
untyped_tuple_str = ('abc',)
|
||||||
typed_tuple_str = ('abc',) # type: Tuple[str]
|
typed_tuple_str: Tuple[str] = ('abc',)
|
||||||
|
|
||||||
untyped_tuple_str_int = ('abc', 4)
|
untyped_tuple_str_int = ('abc', 4)
|
||||||
typed_tuple_str_int = ('abc', 4) # type: Tuple[str, int]
|
typed_tuple_str_int: Tuple[str, int] = ('abc', 4)
|
||||||
|
|
||||||
variadic_tuple_str = ('abc',) # type: Tuple[str, ...]
|
variadic_tuple_str: Tuple[str, ...] = ('abc',)
|
||||||
variadic_tuple_str_int = ('abc', 4) # type: Tuple[Union[str, int], ...]
|
variadic_tuple_str_int: Tuple[Union[str, int], ...] = ('abc', 4)
|
||||||
|
|
||||||
|
|
||||||
def untyped_passthrough(x):
|
def untyped_passthrough(x):
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
# python >= 3.6
|
|
||||||
from typing import List, Dict, overload, Tuple, TypeVar
|
from typing import List, Dict, overload, Tuple, TypeVar
|
||||||
|
|
||||||
lst: list
|
lst: list
|
||||||
list_alias: List
|
list_alias: List
|
||||||
list_str: List[str]
|
list_str: List[str]
|
||||||
list_str: List[int]
|
list_int: List[int]
|
||||||
|
|
||||||
# -------------------------
|
# -------------------------
|
||||||
# With base classes
|
# With base classes
|
||||||
|
|||||||
@@ -2,18 +2,14 @@
|
|||||||
Test the typing library, with docstrings and annotations
|
Test the typing library, with docstrings and annotations
|
||||||
"""
|
"""
|
||||||
import typing
|
import typing
|
||||||
|
from typing import Sequence, MutableSequence, List, Iterable, Iterator, \
|
||||||
|
AbstractSet, Tuple, Mapping, Dict, Union, Optional
|
||||||
|
|
||||||
class B:
|
class B:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def we_can_has_sequence(p, q, r, s, t, u):
|
def we_can_has_sequence(p: Sequence[int], q: Sequence[B], r: Sequence[int],
|
||||||
"""
|
s: Sequence["int"], t: MutableSequence[dict], u: List[float]):
|
||||||
:type p: typing.Sequence[int]
|
|
||||||
:type q: typing.Sequence[B]
|
|
||||||
:type r: typing.Sequence[int]
|
|
||||||
:type s: typing.Sequence["int"]
|
|
||||||
:type t: typing.MutableSequence[dict]
|
|
||||||
:type u: typing.List[float]
|
|
||||||
"""
|
|
||||||
#? ["count"]
|
#? ["count"]
|
||||||
p.c
|
p.c
|
||||||
#? int()
|
#? int()
|
||||||
@@ -43,13 +39,8 @@ def we_can_has_sequence(p, q, r, s, t, u):
|
|||||||
#? float()
|
#? float()
|
||||||
u[1]
|
u[1]
|
||||||
|
|
||||||
def iterators(ps, qs, rs, ts):
|
def iterators(ps: Iterable[int], qs: Iterator[str], rs:
|
||||||
"""
|
Sequence["ForwardReference"], ts: AbstractSet["float"]):
|
||||||
:type ps: typing.Iterable[int]
|
|
||||||
:type qs: typing.Iterator[str]
|
|
||||||
:type rs: typing.Sequence["ForwardReference"]
|
|
||||||
:type ts: typing.AbstractSet["float"]
|
|
||||||
"""
|
|
||||||
for p in ps:
|
for p in ps:
|
||||||
#? int()
|
#? int()
|
||||||
p
|
p
|
||||||
@@ -79,22 +70,13 @@ def iterators(ps, qs, rs, ts):
|
|||||||
#? float()
|
#? float()
|
||||||
t
|
t
|
||||||
|
|
||||||
def sets(p, q):
|
def sets(p: AbstractSet[int], q: typing.MutableSet[float]):
|
||||||
"""
|
|
||||||
:type p: typing.AbstractSet[int]
|
|
||||||
:type q: typing.MutableSet[float]
|
|
||||||
"""
|
|
||||||
#? []
|
#? []
|
||||||
p.a
|
p.a
|
||||||
#? ["add"]
|
#? ["add"]
|
||||||
q.a
|
q.a
|
||||||
|
|
||||||
def tuple(p, q, r):
|
def tuple(p: Tuple[int], q: Tuple[int, str, float], r: Tuple[B, ...]):
|
||||||
"""
|
|
||||||
:type p: typing.Tuple[int]
|
|
||||||
:type q: typing.Tuple[int, str, float]
|
|
||||||
:type r: typing.Tuple[B, ...]
|
|
||||||
"""
|
|
||||||
#? int()
|
#? int()
|
||||||
p[0]
|
p[0]
|
||||||
#? ['index']
|
#? ['index']
|
||||||
@@ -127,16 +109,14 @@ class Key:
|
|||||||
class Value:
|
class Value:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def mapping(p, q, d, dd, r, s, t):
|
def mapping(
|
||||||
"""
|
p: Mapping[Key, Value],
|
||||||
:type p: typing.Mapping[Key, Value]
|
q: typing.MutableMapping[Key, Value],
|
||||||
:type q: typing.MutableMapping[Key, Value]
|
d: Dict[Key, Value],
|
||||||
:type d: typing.Dict[Key, Value]
|
dd: typing.DefaultDict[Key, Value],
|
||||||
:type dd: typing.DefaultDict[Key, Value]
|
r: typing.KeysView[Key],
|
||||||
:type r: typing.KeysView[Key]
|
s: typing.ValuesView[Value],
|
||||||
:type s: typing.ValuesView[Value]
|
t: typing.ItemsView[Key, Value]):
|
||||||
:type t: typing.ItemsView[Key, Value]
|
|
||||||
"""
|
|
||||||
#? []
|
#? []
|
||||||
p.setd
|
p.setd
|
||||||
#? ["setdefault"]
|
#? ["setdefault"]
|
||||||
@@ -198,14 +178,12 @@ def mapping(p, q, d, dd, r, s, t):
|
|||||||
#? Value()
|
#? Value()
|
||||||
value
|
value
|
||||||
|
|
||||||
def union(p, q, r, s, t):
|
def union(
|
||||||
"""
|
p: Union[int],
|
||||||
:type p: typing.Union[int]
|
q: Union[int, int],
|
||||||
:type q: typing.Union[int, int]
|
r: Union[int, str, "int"],
|
||||||
:type r: typing.Union[int, str, "int"]
|
s: Union[int, typing.Union[str, "typing.Union['float', 'dict']"]],
|
||||||
:type s: typing.Union[int, typing.Union[str, "typing.Union['float', 'dict']"]]
|
t: Union[int, None]):
|
||||||
:type t: typing.Union[int, None]
|
|
||||||
"""
|
|
||||||
#? int()
|
#? int()
|
||||||
p
|
p
|
||||||
#? int()
|
#? int()
|
||||||
@@ -217,9 +195,8 @@ def union(p, q, r, s, t):
|
|||||||
#? int() None
|
#? int() None
|
||||||
t
|
t
|
||||||
|
|
||||||
def optional(p):
|
def optional(p: Optional[int]):
|
||||||
"""
|
"""
|
||||||
:type p: typing.Optional[int]
|
|
||||||
Optional does not do anything special. However it should be recognised
|
Optional does not do anything special. However it should be recognised
|
||||||
as being of that type. Jedi doesn't do anything with the extra into that
|
as being of that type. Jedi doesn't do anything with the extra into that
|
||||||
it can be None as well
|
it can be None as well
|
||||||
@@ -234,10 +211,7 @@ class TestDict(typing.Dict[str, int]):
|
|||||||
def setdud(self):
|
def setdud(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def testdict(x):
|
def testdict(x: TestDict):
|
||||||
"""
|
|
||||||
:type x: TestDict
|
|
||||||
"""
|
|
||||||
#? ["setdud", "setdefault"]
|
#? ["setdud", "setdefault"]
|
||||||
x.setd
|
x.setd
|
||||||
for key in x.keys():
|
for key in x.keys():
|
||||||
@@ -262,10 +236,7 @@ y = WrappingType(0) # Per https://github.com/davidhalter/jedi/issues/1015#issuec
|
|||||||
#? str()
|
#? str()
|
||||||
y
|
y
|
||||||
|
|
||||||
def testnewtype(y):
|
def testnewtype(y: WrappingType):
|
||||||
"""
|
|
||||||
:type y: WrappingType
|
|
||||||
"""
|
|
||||||
#? str()
|
#? str()
|
||||||
y
|
y
|
||||||
#? ["upper"]
|
#? ["upper"]
|
||||||
@@ -273,10 +244,7 @@ def testnewtype(y):
|
|||||||
|
|
||||||
WrappingType2 = typing.NewType()
|
WrappingType2 = typing.NewType()
|
||||||
|
|
||||||
def testnewtype2(y):
|
def testnewtype2(y: WrappingType2):
|
||||||
"""
|
|
||||||
:type y: WrappingType2
|
|
||||||
"""
|
|
||||||
#?
|
#?
|
||||||
y
|
y
|
||||||
#? []
|
#? []
|
||||||
@@ -297,10 +265,7 @@ class TestDefaultDict(typing.DefaultDict[str, int]):
|
|||||||
def setdud(self):
|
def setdud(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def testdict(x):
|
def testdict(x: TestDefaultDict):
|
||||||
"""
|
|
||||||
:type x: TestDefaultDict
|
|
||||||
"""
|
|
||||||
#? ["setdud", "setdefault"]
|
#? ["setdud", "setdefault"]
|
||||||
x.setd
|
x.setd
|
||||||
for key in x.keys():
|
for key in x.keys():
|
||||||
|
|||||||
@@ -1,9 +1,6 @@
|
|||||||
"""
|
"""
|
||||||
PEP 526 introduced a new way of using type annotations on variables. It was
|
PEP 526 introduced a way of using type annotations on variables.
|
||||||
introduced in Python 3.6.
|
|
||||||
"""
|
"""
|
||||||
# python >= 3.6
|
|
||||||
|
|
||||||
import typing
|
import typing
|
||||||
|
|
||||||
asdf = ''
|
asdf = ''
|
||||||
@@ -47,7 +44,7 @@ class Foo():
|
|||||||
baz: typing.ClassVar[str]
|
baz: typing.ClassVar[str]
|
||||||
|
|
||||||
|
|
||||||
#?
|
#? int()
|
||||||
Foo.bar
|
Foo.bar
|
||||||
#? int()
|
#? int()
|
||||||
Foo().bar
|
Foo().bar
|
||||||
@@ -61,6 +58,7 @@ class VarClass:
|
|||||||
var_instance2: float
|
var_instance2: float
|
||||||
var_class1: typing.ClassVar[str] = 1
|
var_class1: typing.ClassVar[str] = 1
|
||||||
var_class2: typing.ClassVar[bytes]
|
var_class2: typing.ClassVar[bytes]
|
||||||
|
var_class3 = None
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
#? int()
|
#? int()
|
||||||
@@ -73,15 +71,21 @@ class VarClass:
|
|||||||
d.var_class2
|
d.var_class2
|
||||||
#? []
|
#? []
|
||||||
d.int
|
d.int
|
||||||
#? ['var_class1', 'var_class2', 'var_instance1', 'var_instance2']
|
#? ['var_class1', 'var_class2', 'var_instance1', 'var_instance2', 'var_class3']
|
||||||
self.var_
|
self.var_
|
||||||
|
|
||||||
|
class VarClass2(VarClass):
|
||||||
|
var_class3: typing.ClassVar[int]
|
||||||
|
|
||||||
#? ['var_class1', 'var_class2', 'var_instance1']
|
def __init__(self):
|
||||||
|
#? int()
|
||||||
|
self.var_class3
|
||||||
|
|
||||||
|
#? ['var_class1', 'var_class2', 'var_instance1', 'var_class3', 'var_instance2']
|
||||||
VarClass.var_
|
VarClass.var_
|
||||||
#? int()
|
#? int()
|
||||||
VarClass.var_instance1
|
VarClass.var_instance1
|
||||||
#?
|
#? float()
|
||||||
VarClass.var_instance2
|
VarClass.var_instance2
|
||||||
#? str()
|
#? str()
|
||||||
VarClass.var_class1
|
VarClass.var_class1
|
||||||
@@ -91,7 +95,7 @@ VarClass.var_class2
|
|||||||
VarClass.int
|
VarClass.int
|
||||||
|
|
||||||
d = VarClass()
|
d = VarClass()
|
||||||
#? ['var_class1', 'var_class2', 'var_instance1', 'var_instance2']
|
#? ['var_class1', 'var_class2', 'var_class3', 'var_instance1', 'var_instance2']
|
||||||
d.var_
|
d.var_
|
||||||
#? int()
|
#? int()
|
||||||
d.var_instance1
|
d.var_instance1
|
||||||
|
|||||||
26
test/completion/pep0593_annotations.py
Normal file
26
test/completion/pep0593_annotations.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# python >= 3.9
|
||||||
|
|
||||||
|
from typing import Annotated
|
||||||
|
|
||||||
|
# This is just a dummy and very meaningless thing to use with to the Annotated
|
||||||
|
# type hint
|
||||||
|
class Foo:
|
||||||
|
pass
|
||||||
|
|
||||||
|
class A:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def annotated_function_params(
|
||||||
|
basic: Annotated[str, Foo()],
|
||||||
|
obj: A,
|
||||||
|
annotated_obj: Annotated[A, Foo()],
|
||||||
|
):
|
||||||
|
#? str()
|
||||||
|
basic
|
||||||
|
|
||||||
|
#? A()
|
||||||
|
obj
|
||||||
|
|
||||||
|
#? A()
|
||||||
|
annotated_obj
|
||||||
46
test/completion/pep0604.py
Normal file
46
test/completion/pep0604.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
from pep0484_generic_parameters import list_t_to_list_t
|
||||||
|
|
||||||
|
list_of_ints_and_strs: list[int | str]
|
||||||
|
|
||||||
|
# Test that unions are handled
|
||||||
|
x2 = list_t_to_list_t(list_of_ints_and_strs)[0]
|
||||||
|
#? int() str()
|
||||||
|
x2
|
||||||
|
|
||||||
|
for z in list_t_to_list_t(list_of_ints_and_strs):
|
||||||
|
#? int() str()
|
||||||
|
z
|
||||||
|
|
||||||
|
|
||||||
|
from pep0484_generic_passthroughs import (
|
||||||
|
typed_variadic_tuple_generic_passthrough,
|
||||||
|
)
|
||||||
|
|
||||||
|
variadic_tuple_str_int: tuple[int | str, ...]
|
||||||
|
|
||||||
|
for m in typed_variadic_tuple_generic_passthrough(variadic_tuple_str_int):
|
||||||
|
#? str() int()
|
||||||
|
m
|
||||||
|
|
||||||
|
|
||||||
|
def func_returns_byteslike() -> bytes | bytearray:
|
||||||
|
pass
|
||||||
|
|
||||||
|
#? bytes() bytearray()
|
||||||
|
func_returns_byteslike()
|
||||||
|
|
||||||
|
|
||||||
|
pep604_optional_1: int | str | None
|
||||||
|
pep604_optional_2: None | bytes
|
||||||
|
|
||||||
|
#? int() str() None
|
||||||
|
pep604_optional_1
|
||||||
|
|
||||||
|
#? None bytes()
|
||||||
|
pep604_optional_2
|
||||||
|
|
||||||
|
|
||||||
|
pep604_in_str: "int | bytes"
|
||||||
|
|
||||||
|
#? int() bytes()
|
||||||
|
pep604_in_str
|
||||||
@@ -139,9 +139,6 @@ def test_p(monkeypatch):
|
|||||||
#? ['capsysbinary']
|
#? ['capsysbinary']
|
||||||
def test_p(capsysbin
|
def test_p(capsysbin
|
||||||
|
|
||||||
#? ['tmpdir', 'tmpdir_factory']
|
|
||||||
def test_p(tmpdi
|
|
||||||
|
|
||||||
|
|
||||||
def close_parens():
|
def close_parens():
|
||||||
pass
|
pass
|
||||||
@@ -183,3 +180,28 @@ def with_annot() -> Generator[float, None, None]:
|
|||||||
def test_with_annot(inheritance_fixture, with_annot):
|
def test_with_annot(inheritance_fixture, with_annot):
|
||||||
#? float()
|
#? float()
|
||||||
with_annot
|
with_annot
|
||||||
|
|
||||||
|
# -----------------
|
||||||
|
# pytest external plugins
|
||||||
|
# -----------------
|
||||||
|
|
||||||
|
#? ['admin_user', 'admin_client']
|
||||||
|
def test_z(admin
|
||||||
|
|
||||||
|
#! 15 ['def admin_client']
|
||||||
|
def test_p(admin_client):
|
||||||
|
#? ['login', 'logout']
|
||||||
|
admin_client.log
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
@some_decorator
|
||||||
|
#? ['admin_user']
|
||||||
|
def bla(admin_u
|
||||||
|
return
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
@some_decorator
|
||||||
|
#! 12 ['def admin_user']
|
||||||
|
def bla(admin_user):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,6 @@
|
|||||||
|
from pytest import fixture
|
||||||
|
|
||||||
|
|
||||||
|
@fixture()
|
||||||
|
def admin_user():
|
||||||
|
pass
|
||||||
16
test/examples/pytest_plugin_package/pytest_plugin/plugin.py
Normal file
16
test/examples/pytest_plugin_package/pytest_plugin/plugin.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import pytest
|
||||||
|
|
||||||
|
from .fixtures import admin_user # noqa
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def admin_client():
|
||||||
|
return Client()
|
||||||
|
|
||||||
|
|
||||||
|
class Client:
|
||||||
|
def login(self, **credentials):
|
||||||
|
...
|
||||||
|
|
||||||
|
def logout(self):
|
||||||
|
...
|
||||||
@@ -209,7 +209,7 @@ class IntegrationTestCase(BaseTestCase):
|
|||||||
TEST_REFERENCES: self.run_get_references,
|
TEST_REFERENCES: self.run_get_references,
|
||||||
}
|
}
|
||||||
if (self.path.endswith('pytest.py') or self.path.endswith('conftest.py')) \
|
if (self.path.endswith('pytest.py') or self.path.endswith('conftest.py')) \
|
||||||
and environment.executable != os.path.realpath(sys.executable):
|
and os.path.realpath(environment.executable) != os.path.realpath(sys.executable):
|
||||||
# It's not guarantueed that pytest is installed in test
|
# It's not guarantueed that pytest is installed in test
|
||||||
# environments, if we're not running in the same environment that
|
# environments, if we're not running in the same environment that
|
||||||
# we're already in, so just skip that case.
|
# we're already in, so just skip that case.
|
||||||
|
|||||||
@@ -650,6 +650,7 @@ def test_cursor_after_signature(Script, column):
|
|||||||
('abs(chr ( \nclass y: pass', 1, 8, 'abs', 0),
|
('abs(chr ( \nclass y: pass', 1, 8, 'abs', 0),
|
||||||
('abs(chr ( \nclass y: pass', 1, 9, 'abs', 0),
|
('abs(chr ( \nclass y: pass', 1, 9, 'abs', 0),
|
||||||
('abs(chr ( \nclass y: pass', 1, 10, 'chr', 0),
|
('abs(chr ( \nclass y: pass', 1, 10, 'chr', 0),
|
||||||
|
('abs(foo.bar=3)', 1, 13, 'abs', 0),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
def test_base_signatures(Script, code, line, column, name, index):
|
def test_base_signatures(Script, code, line, column, name, index):
|
||||||
|
|||||||
@@ -28,6 +28,11 @@ def test_import_keyword(Script):
|
|||||||
# unrelated to #44
|
# unrelated to #44
|
||||||
|
|
||||||
|
|
||||||
|
def test_import_keyword_after_newline(Script):
|
||||||
|
d, = Script("import x\nimport y").help(line=2, column=0)
|
||||||
|
assert d.docstring().startswith('The "import" statement')
|
||||||
|
|
||||||
|
|
||||||
def test_import_keyword_with_gotos(goto_or_infer):
|
def test_import_keyword_with_gotos(goto_or_infer):
|
||||||
assert not goto_or_infer("import x", column=0)
|
assert not goto_or_infer("import x", column=0)
|
||||||
|
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import typing
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import jedi
|
import jedi
|
||||||
|
import jedi.settings
|
||||||
from jedi.inference.compiled import mixed
|
from jedi.inference.compiled import mixed
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
|
||||||
@@ -101,11 +102,13 @@ def test_side_effect_completion():
|
|||||||
assert foo.name == 'foo'
|
assert foo.name == 'foo'
|
||||||
|
|
||||||
|
|
||||||
def _assert_interpreter_complete(source, namespace, completions,
|
def _assert_interpreter_complete(source, namespace, completions, *, check_type=False, **kwds):
|
||||||
**kwds):
|
|
||||||
script = jedi.Interpreter(source, [namespace], **kwds)
|
script = jedi.Interpreter(source, [namespace], **kwds)
|
||||||
cs = script.complete()
|
cs = script.complete()
|
||||||
actual = [c.name for c in cs]
|
actual = [c.name for c in cs]
|
||||||
|
if check_type:
|
||||||
|
for c in cs:
|
||||||
|
c.type
|
||||||
assert sorted(actual) == sorted(completions)
|
assert sorted(actual) == sorted(completions)
|
||||||
|
|
||||||
|
|
||||||
@@ -219,7 +222,7 @@ def test__getattr__completions(allow_unsafe_getattr, class_is_findable):
|
|||||||
|
|
||||||
@pytest.fixture(params=[False, True])
|
@pytest.fixture(params=[False, True])
|
||||||
def allow_unsafe_getattr(request, monkeypatch):
|
def allow_unsafe_getattr(request, monkeypatch):
|
||||||
monkeypatch.setattr(jedi.Interpreter, '_allow_descriptor_getattr_default', request.param)
|
monkeypatch.setattr(jedi.settings, 'allow_unsafe_interpreter_executions', request.param)
|
||||||
return request.param
|
return request.param
|
||||||
|
|
||||||
|
|
||||||
@@ -610,12 +613,12 @@ def test_dict_getitem(code, types):
|
|||||||
#('for x in dunder: x', 'str'),
|
#('for x in dunder: x', 'str'),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
def test_dunders(class_is_findable, code, expected):
|
def test_dunders(class_is_findable, code, expected, allow_unsafe_getattr):
|
||||||
from typing import Iterator
|
from typing import Iterator
|
||||||
|
|
||||||
class DunderCls:
|
class DunderCls:
|
||||||
def __getitem__(self, key) -> int:
|
def __getitem__(self, key) -> int:
|
||||||
pass
|
return 1
|
||||||
|
|
||||||
def __iter__(self, key) -> Iterator[str]:
|
def __iter__(self, key) -> Iterator[str]:
|
||||||
pass
|
pass
|
||||||
@@ -656,10 +659,12 @@ def bar():
|
|||||||
({'return': 'typing.Union[str, int]'}, ['int', 'str'], ''),
|
({'return': 'typing.Union[str, int]'}, ['int', 'str'], ''),
|
||||||
({'return': 'typing.Union["str", int]'},
|
({'return': 'typing.Union["str", int]'},
|
||||||
['int', 'str'] if sys.version_info >= (3, 9) else ['int'], ''),
|
['int', 'str'] if sys.version_info >= (3, 9) else ['int'], ''),
|
||||||
({'return': 'typing.Union["str", 1]'}, [], ''),
|
({'return': 'typing.Union["str", 1]'},
|
||||||
|
['str'] if sys.version_info >= (3, 11) else [], ''),
|
||||||
({'return': 'typing.Optional[str]'}, ['NoneType', 'str'], ''),
|
({'return': 'typing.Optional[str]'}, ['NoneType', 'str'], ''),
|
||||||
({'return': 'typing.Optional[str, int]'}, [], ''), # Takes only one arg
|
({'return': 'typing.Optional[str, int]'}, [], ''), # Takes only one arg
|
||||||
({'return': 'typing.Any'}, [], ''),
|
({'return': 'typing.Any'},
|
||||||
|
['_AnyMeta'] if sys.version_info >= (3, 11) else [], ''),
|
||||||
|
|
||||||
({'return': 'typing.Tuple[int, str]'},
|
({'return': 'typing.Tuple[int, str]'},
|
||||||
['Tuple' if sys.version_info[:2] == (3, 6) else 'tuple'], ''),
|
['Tuple' if sys.version_info[:2] == (3, 6) else 'tuple'], ''),
|
||||||
@@ -739,3 +744,114 @@ def test_param_infer_default():
|
|||||||
param, = abs_sig.params
|
param, = abs_sig.params
|
||||||
assert param.name == 'x'
|
assert param.name == 'x'
|
||||||
assert param.infer_default() == []
|
assert param.infer_default() == []
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'code, expected', [
|
||||||
|
("random.triangular(", ['high=', 'low=', 'mode=']),
|
||||||
|
("random.triangular(low=1, ", ['high=', 'mode=']),
|
||||||
|
("random.triangular(high=1, ", ['low=', 'mode=']),
|
||||||
|
("random.triangular(low=1, high=2, ", ['mode=']),
|
||||||
|
("random.triangular(low=1, mode=2, ", ['high=']),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_keyword_param_completion(code, expected):
|
||||||
|
import random
|
||||||
|
completions = jedi.Interpreter(code, [locals()]).complete()
|
||||||
|
assert expected == [c.name for c in completions if c.name.endswith('=')]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('class_is_findable', [False, True])
|
||||||
|
def test_avoid_descriptor_executions_if_not_necessary(class_is_findable):
|
||||||
|
counter = 0
|
||||||
|
|
||||||
|
class AvoidDescriptor(object):
|
||||||
|
@property
|
||||||
|
def prop(self):
|
||||||
|
nonlocal counter
|
||||||
|
counter += 1
|
||||||
|
return self
|
||||||
|
|
||||||
|
if not class_is_findable:
|
||||||
|
AvoidDescriptor.__name__ = "something_somewhere"
|
||||||
|
namespace = {'b': AvoidDescriptor()}
|
||||||
|
expected = ['prop']
|
||||||
|
_assert_interpreter_complete('b.pro', namespace, expected, check_type=True)
|
||||||
|
assert counter == 0
|
||||||
|
_assert_interpreter_complete('b.prop.pro', namespace, expected, check_type=True)
|
||||||
|
assert counter == 1
|
||||||
|
|
||||||
|
|
||||||
|
class Hello:
|
||||||
|
its_me = 1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('class_is_findable', [False, True])
|
||||||
|
def test_try_to_use_return_annotation_for_property(class_is_findable):
|
||||||
|
class WithProperties(object):
|
||||||
|
@property
|
||||||
|
def with_annotation1(self) -> str:
|
||||||
|
raise BaseException
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_annotation2(self) -> 'str':
|
||||||
|
raise BaseException
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_annotation3(self) -> Hello:
|
||||||
|
raise BaseException
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_annotation4(self) -> 'Hello':
|
||||||
|
raise BaseException
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_annotation_garbage1(self) -> 'asldjflksjdfljdslkjfsl': # noqa
|
||||||
|
return Hello()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_annotation_garbage2(self) -> 'sdf$@@$5*+8': # noqa
|
||||||
|
return Hello()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def without_annotation(self):
|
||||||
|
return ""
|
||||||
|
|
||||||
|
if not class_is_findable:
|
||||||
|
WithProperties.__name__ = "something_somewhere"
|
||||||
|
Hello.__name__ = "something_somewhere_else"
|
||||||
|
|
||||||
|
namespace = {'p': WithProperties()}
|
||||||
|
_assert_interpreter_complete('p.without_annotation.upp', namespace, ['upper'])
|
||||||
|
_assert_interpreter_complete('p.with_annotation1.upp', namespace, ['upper'])
|
||||||
|
_assert_interpreter_complete('p.with_annotation2.upp', namespace, ['upper'])
|
||||||
|
_assert_interpreter_complete('p.with_annotation3.its', namespace, ['its_me'])
|
||||||
|
_assert_interpreter_complete('p.with_annotation4.its', namespace, ['its_me'])
|
||||||
|
# This is a fallback, if the annotations don't help
|
||||||
|
_assert_interpreter_complete('p.with_annotation_garbage1.its', namespace, ['its_me'])
|
||||||
|
_assert_interpreter_complete('p.with_annotation_garbage2.its', namespace, ['its_me'])
|
||||||
|
|
||||||
|
|
||||||
|
def test_nested__getitem__():
|
||||||
|
d = {'foo': {'bar': 1}}
|
||||||
|
_assert_interpreter_complete('d["fo', locals(), ['"foo"'])
|
||||||
|
_assert_interpreter_complete('d["foo"]["ba', locals(), ['"bar"'])
|
||||||
|
_assert_interpreter_complete('(d["foo"])["ba', locals(), ['"bar"'])
|
||||||
|
_assert_interpreter_complete('((d["foo"]))["ba', locals(), ['"bar"'])
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('class_is_findable', [False, True])
|
||||||
|
def test_custom__getitem__(class_is_findable, allow_unsafe_getattr):
|
||||||
|
class CustomGetItem:
|
||||||
|
def __getitem__(self, x: int):
|
||||||
|
return "asdf"
|
||||||
|
|
||||||
|
if not class_is_findable:
|
||||||
|
CustomGetItem.__name__ = "something_somewhere"
|
||||||
|
|
||||||
|
namespace = {'c': CustomGetItem()}
|
||||||
|
if not class_is_findable and not allow_unsafe_getattr:
|
||||||
|
expected = []
|
||||||
|
else:
|
||||||
|
expected = ['upper']
|
||||||
|
_assert_interpreter_complete('c["a"].up', namespace, expected)
|
||||||
|
|||||||
@@ -178,7 +178,7 @@ def test_is_potential_project(path, expected):
|
|||||||
|
|
||||||
if expected is None:
|
if expected is None:
|
||||||
try:
|
try:
|
||||||
expected = _CONTAINS_POTENTIAL_PROJECT in os.listdir(path)
|
expected = bool(set(_CONTAINS_POTENTIAL_PROJECT) & set(os.listdir(path)))
|
||||||
except OSError:
|
except OSError:
|
||||||
expected = False
|
expected = False
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,13 @@
|
|||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
import platform
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import jedi
|
import jedi
|
||||||
|
from test.helpers import get_example_dir
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
@@ -51,6 +54,47 @@ def test_rename_mod(Script, dir_with_content):
|
|||||||
''').format(dir=dir_with_content)
|
''').format(dir=dir_with_content)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif('sys.version_info[:2] < (3, 8)', message="Python 3.8 introduces dirs_exist_ok")
|
||||||
|
def test_namespace_package(Script, tmpdir):
|
||||||
|
origin = get_example_dir('implicit_namespace_package')
|
||||||
|
shutil.copytree(origin, tmpdir.strpath, dirs_exist_ok=True)
|
||||||
|
sys_path = [
|
||||||
|
os.path.join(tmpdir.strpath, 'ns1'),
|
||||||
|
os.path.join(tmpdir.strpath, 'ns2')
|
||||||
|
]
|
||||||
|
script_path = os.path.join(tmpdir.strpath, 'script.py')
|
||||||
|
script = Script(
|
||||||
|
'import pkg\n',
|
||||||
|
path=script_path,
|
||||||
|
project=jedi.Project(os.path.join(tmpdir.strpath, 'does-not-exist'), sys_path=sys_path),
|
||||||
|
)
|
||||||
|
refactoring = script.rename(line=1, new_name='new_pkg')
|
||||||
|
refactoring.apply()
|
||||||
|
old1 = os.path.join(sys_path[0], "pkg")
|
||||||
|
new1 = os.path.join(sys_path[0], "new_pkg")
|
||||||
|
old2 = os.path.join(sys_path[1], "pkg")
|
||||||
|
new2 = os.path.join(sys_path[1], "new_pkg")
|
||||||
|
assert not os.path.exists(old1)
|
||||||
|
assert os.path.exists(new1)
|
||||||
|
assert not os.path.exists(old2)
|
||||||
|
assert os.path.exists(new2)
|
||||||
|
|
||||||
|
changed, = iter(refactoring.get_changed_files().values())
|
||||||
|
assert changed.get_new_code() == "import new_pkg\n"
|
||||||
|
|
||||||
|
assert refactoring.get_diff() == dedent(f'''\
|
||||||
|
rename from {old1}
|
||||||
|
rename to {new1}
|
||||||
|
rename from {old2}
|
||||||
|
rename to {new2}
|
||||||
|
--- {script_path}
|
||||||
|
+++ {script_path}
|
||||||
|
@@ -1,2 +1,2 @@
|
||||||
|
-import pkg
|
||||||
|
+import new_pkg
|
||||||
|
''').format(dir=dir_with_content)
|
||||||
|
|
||||||
|
|
||||||
def test_rename_none_path(Script):
|
def test_rename_none_path(Script):
|
||||||
refactoring = Script('foo', path=None).rename(new_name='bar')
|
refactoring = Script('foo', path=None).rename(new_name='bar')
|
||||||
with pytest.raises(jedi.RefactoringError, match='on a Script with path=None'):
|
with pytest.raises(jedi.RefactoringError, match='on a Script with path=None'):
|
||||||
@@ -70,3 +114,23 @@ def test_diff_without_ending_newline(Script):
|
|||||||
-a
|
-a
|
||||||
+c
|
+c
|
||||||
''')
|
''')
|
||||||
|
|
||||||
|
|
||||||
|
def test_diff_path_outside_of_project(Script):
|
||||||
|
if platform.system().lower() == 'windows':
|
||||||
|
abs_path = r'D:\unknown_dir\file.py'
|
||||||
|
else:
|
||||||
|
abs_path = '/unknown_dir/file.py'
|
||||||
|
script = Script(
|
||||||
|
code='foo = 1',
|
||||||
|
path=abs_path,
|
||||||
|
project=jedi.get_default_project()
|
||||||
|
)
|
||||||
|
diff = script.rename(line=1, column=0, new_name='bar').get_diff()
|
||||||
|
assert diff == dedent(f'''\
|
||||||
|
--- {abs_path}
|
||||||
|
+++ {abs_path}
|
||||||
|
@@ -1 +1 @@
|
||||||
|
-foo = 1
|
||||||
|
+bar = 1
|
||||||
|
''')
|
||||||
|
|||||||
@@ -43,6 +43,9 @@ def test_implicit_namespace_package(Script):
|
|||||||
solution = "foo = '%s'" % solution
|
solution = "foo = '%s'" % solution
|
||||||
assert completion.description == solution
|
assert completion.description == solution
|
||||||
|
|
||||||
|
c, = script_with_path('import pkg').complete()
|
||||||
|
assert c.docstring() == ""
|
||||||
|
|
||||||
|
|
||||||
def test_implicit_nested_namespace_package(Script):
|
def test_implicit_nested_namespace_package(Script):
|
||||||
code = 'from implicit_nested_namespaces.namespace.pkg.module import CONST'
|
code = 'from implicit_nested_namespaces.namespace.pkg.module import CONST'
|
||||||
|
|||||||
@@ -297,7 +297,6 @@ def test_os_issues(Script):
|
|||||||
# Github issue #759
|
# Github issue #759
|
||||||
s = 'import os, s'
|
s = 'import os, s'
|
||||||
assert 'sys' in import_names(s)
|
assert 'sys' in import_names(s)
|
||||||
assert 'path' not in import_names(s, column=len(s) - 1)
|
|
||||||
assert 'os' in import_names(s, column=len(s) - 3)
|
assert 'os' in import_names(s, column=len(s) - 3)
|
||||||
|
|
||||||
# Some more checks
|
# Some more checks
|
||||||
|
|||||||
@@ -356,6 +356,49 @@ def test_dataclass_signature(Script, skip_pre_python37, start, start_params):
|
|||||||
assert price.name == 'float'
|
assert price.name == 'float'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'start, start_params', [
|
||||||
|
['@define\nclass X:', []],
|
||||||
|
['@frozen\nclass X:', []],
|
||||||
|
['@define(eq=True)\nclass X:', []],
|
||||||
|
[dedent('''
|
||||||
|
class Y():
|
||||||
|
y: int
|
||||||
|
@define
|
||||||
|
class X(Y):'''), []],
|
||||||
|
[dedent('''
|
||||||
|
@define
|
||||||
|
class Y():
|
||||||
|
y: int
|
||||||
|
z = 5
|
||||||
|
@define
|
||||||
|
class X(Y):'''), ['y']],
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_attrs_signature(Script, skip_pre_python37, start, start_params):
|
||||||
|
has_attrs = bool(Script('import attrs').infer())
|
||||||
|
if not has_attrs:
|
||||||
|
raise pytest.skip("attrs needed in target environment to run this test")
|
||||||
|
|
||||||
|
code = dedent('''
|
||||||
|
name: str
|
||||||
|
foo = 3
|
||||||
|
price: float
|
||||||
|
quantity: int = 0.0
|
||||||
|
|
||||||
|
X(''')
|
||||||
|
|
||||||
|
# attrs exposes two namespaces
|
||||||
|
code = 'from attrs import define, frozen\n' + start + code
|
||||||
|
|
||||||
|
sig, = Script(code).get_signatures()
|
||||||
|
assert [p.name for p in sig.params] == start_params + ['name', 'price', 'quantity']
|
||||||
|
quantity, = sig.params[-1].infer()
|
||||||
|
assert quantity.name == 'int'
|
||||||
|
price, = sig.params[-2].infer()
|
||||||
|
assert price.name == 'float'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
'stmt, expected', [
|
'stmt, expected', [
|
||||||
('args = 1', 'wrapped(*args, b, c)'),
|
('args = 1', 'wrapped(*args, b, c)'),
|
||||||
|
|||||||
@@ -30,14 +30,16 @@ def test_paths_from_assignment(Script):
|
|||||||
assert paths('sys.path, other = ["a"], 2') == set()
|
assert paths('sys.path, other = ["a"], 2') == set()
|
||||||
|
|
||||||
|
|
||||||
def test_venv_and_pths(venv_path):
|
def test_venv_and_pths(venv_path, environment):
|
||||||
pjoin = os.path.join
|
pjoin = os.path.join
|
||||||
|
|
||||||
site_pkg_path = pjoin(venv_path, 'lib')
|
|
||||||
if os.name == 'nt':
|
if os.name == 'nt':
|
||||||
site_pkg_path = pjoin(site_pkg_path, 'site-packages')
|
if environment.version_info < (3, 11):
|
||||||
|
site_pkg_path = pjoin(venv_path, 'lib', 'site-packages')
|
||||||
|
else:
|
||||||
|
site_pkg_path = pjoin(venv_path, 'Lib', 'site-packages')
|
||||||
else:
|
else:
|
||||||
site_pkg_path = glob(pjoin(site_pkg_path, 'python*', 'site-packages'))[0]
|
site_pkg_path = glob(pjoin(venv_path, 'lib', 'python*', 'site-packages'))[0]
|
||||||
shutil.rmtree(site_pkg_path)
|
shutil.rmtree(site_pkg_path)
|
||||||
shutil.copytree(get_example_dir('sample_venvs', 'pth_directory'), site_pkg_path)
|
shutil.copytree(get_example_dir('sample_venvs', 'pth_directory'), site_pkg_path)
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -42,6 +44,40 @@ def test_completion(case, monkeypatch, environment, has_django):
|
|||||||
|
|
||||||
if (not has_django) and case.path.endswith('django.py'):
|
if (not has_django) and case.path.endswith('django.py'):
|
||||||
pytest.skip('Needs django to be installed to run this test.')
|
pytest.skip('Needs django to be installed to run this test.')
|
||||||
|
|
||||||
|
if case.path.endswith("pytest.py"):
|
||||||
|
# to test finding pytest fixtures from external plugins
|
||||||
|
# add a stub pytest plugin to the project sys_path...
|
||||||
|
pytest_plugin_dir = str(helpers.get_example_dir("pytest_plugin_package"))
|
||||||
|
case._project.added_sys_path = [pytest_plugin_dir]
|
||||||
|
|
||||||
|
# ... and mock the entry points to include it
|
||||||
|
# see https://docs.pytest.org/en/stable/how-to/writing_plugins.html#setuptools-entry-points
|
||||||
|
if sys.version_info >= (3, 8):
|
||||||
|
def mock_entry_points(*, group=None):
|
||||||
|
import importlib.metadata
|
||||||
|
entries = [importlib.metadata.EntryPoint(
|
||||||
|
name=None,
|
||||||
|
value="pytest_plugin.plugin",
|
||||||
|
group="pytest11",
|
||||||
|
)]
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 10):
|
||||||
|
assert group == "pytest11"
|
||||||
|
return entries
|
||||||
|
else:
|
||||||
|
assert group is None
|
||||||
|
return {"pytest11": entries}
|
||||||
|
|
||||||
|
monkeypatch.setattr("importlib.metadata.entry_points", mock_entry_points)
|
||||||
|
else:
|
||||||
|
def mock_iter_entry_points(group):
|
||||||
|
assert group == "pytest11"
|
||||||
|
EntryPoint = namedtuple("EntryPoint", ["module_name"])
|
||||||
|
return [EntryPoint("pytest_plugin.plugin")]
|
||||||
|
|
||||||
|
monkeypatch.setattr("pkg_resources.iter_entry_points", mock_iter_entry_points)
|
||||||
|
|
||||||
repo_root = helpers.root_dir
|
repo_root = helpers.root_dir
|
||||||
monkeypatch.chdir(os.path.join(repo_root, 'jedi'))
|
monkeypatch.chdir(os.path.join(repo_root, 'jedi'))
|
||||||
case.run(assert_case_equal, environment)
|
case.run(assert_case_equal, environment)
|
||||||
|
|||||||
Reference in New Issue
Block a user