542 Commits

Author SHA1 Message Date
Joffrey Bluthé
5e06d386dd Merge 1831b60c73 into ce109a8cdf 2024-12-25 23:34:44 +00:00
bluthej
1831b60c73 Add graphviz to installed APT packages 2024-12-26 00:28:08 +01:00
Dave Halter
ce109a8cdf Fix a small fail in test_duplicated_import
Some checks failed
ci / tests (3.10, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.9) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.10) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.11) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.12) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.13) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.6) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.7) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.8) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.9) (push) Has been cancelled
ci / code-quality (push) Has been cancelled
ci / coverage (push) Has been cancelled
2024-11-25 09:49:44 +01:00
Dave Halter
ecb922c6ff Fix a few issues around duplicated import paths, fixes #2033
Some checks are pending
ci / tests (3.10, ubuntu-20.04, 3.10) (push) Waiting to run
ci / tests (3.10, ubuntu-20.04, 3.11) (push) Waiting to run
ci / tests (3.10, ubuntu-20.04, 3.12) (push) Waiting to run
ci / tests (3.10, ubuntu-20.04, 3.13) (push) Waiting to run
ci / tests (3.10, ubuntu-20.04, 3.6) (push) Waiting to run
ci / tests (3.10, ubuntu-20.04, 3.7) (push) Waiting to run
ci / tests (3.10, ubuntu-20.04, 3.8) (push) Waiting to run
ci / tests (3.10, ubuntu-20.04, 3.9) (push) Waiting to run
ci / tests (3.10, windows-2019, 3.10) (push) Waiting to run
ci / tests (3.10, windows-2019, 3.11) (push) Waiting to run
ci / tests (3.10, windows-2019, 3.12) (push) Waiting to run
ci / tests (3.10, windows-2019, 3.13) (push) Waiting to run
ci / tests (3.10, windows-2019, 3.6) (push) Waiting to run
ci / tests (3.10, windows-2019, 3.7) (push) Waiting to run
ci / tests (3.10, windows-2019, 3.8) (push) Waiting to run
ci / tests (3.10, windows-2019, 3.9) (push) Waiting to run
ci / tests (3.11, ubuntu-20.04, 3.10) (push) Waiting to run
ci / tests (3.11, ubuntu-20.04, 3.11) (push) Waiting to run
ci / tests (3.11, ubuntu-20.04, 3.12) (push) Waiting to run
ci / tests (3.11, ubuntu-20.04, 3.13) (push) Waiting to run
ci / tests (3.11, ubuntu-20.04, 3.6) (push) Waiting to run
ci / tests (3.11, ubuntu-20.04, 3.7) (push) Waiting to run
ci / tests (3.11, ubuntu-20.04, 3.8) (push) Waiting to run
ci / tests (3.11, ubuntu-20.04, 3.9) (push) Waiting to run
ci / tests (3.11, windows-2019, 3.10) (push) Waiting to run
ci / tests (3.11, windows-2019, 3.11) (push) Waiting to run
ci / tests (3.11, windows-2019, 3.12) (push) Waiting to run
ci / tests (3.11, windows-2019, 3.13) (push) Waiting to run
ci / tests (3.11, windows-2019, 3.6) (push) Waiting to run
ci / tests (3.11, windows-2019, 3.7) (push) Waiting to run
ci / tests (3.11, windows-2019, 3.8) (push) Waiting to run
ci / tests (3.11, windows-2019, 3.9) (push) Waiting to run
ci / tests (3.12, ubuntu-20.04, 3.10) (push) Waiting to run
ci / tests (3.12, ubuntu-20.04, 3.11) (push) Waiting to run
ci / tests (3.12, ubuntu-20.04, 3.12) (push) Waiting to run
ci / tests (3.12, ubuntu-20.04, 3.13) (push) Waiting to run
ci / tests (3.12, ubuntu-20.04, 3.6) (push) Waiting to run
ci / tests (3.12, ubuntu-20.04, 3.7) (push) Waiting to run
ci / tests (3.12, ubuntu-20.04, 3.8) (push) Waiting to run
ci / tests (3.12, ubuntu-20.04, 3.9) (push) Waiting to run
ci / tests (3.12, windows-2019, 3.10) (push) Waiting to run
ci / tests (3.12, windows-2019, 3.11) (push) Waiting to run
ci / tests (3.12, windows-2019, 3.12) (push) Waiting to run
ci / tests (3.12, windows-2019, 3.13) (push) Waiting to run
ci / tests (3.12, windows-2019, 3.6) (push) Waiting to run
ci / tests (3.12, windows-2019, 3.7) (push) Waiting to run
ci / tests (3.12, windows-2019, 3.8) (push) Waiting to run
ci / tests (3.12, windows-2019, 3.9) (push) Waiting to run
ci / tests (3.13, ubuntu-20.04, 3.10) (push) Waiting to run
ci / tests (3.13, ubuntu-20.04, 3.11) (push) Waiting to run
ci / tests (3.13, ubuntu-20.04, 3.12) (push) Waiting to run
ci / tests (3.13, ubuntu-20.04, 3.13) (push) Waiting to run
ci / tests (3.13, ubuntu-20.04, 3.6) (push) Waiting to run
ci / tests (3.13, ubuntu-20.04, 3.7) (push) Waiting to run
ci / tests (3.13, ubuntu-20.04, 3.8) (push) Waiting to run
ci / tests (3.13, ubuntu-20.04, 3.9) (push) Waiting to run
ci / tests (3.13, windows-2019, 3.10) (push) Waiting to run
ci / tests (3.13, windows-2019, 3.11) (push) Waiting to run
ci / tests (3.13, windows-2019, 3.12) (push) Waiting to run
ci / tests (3.13, windows-2019, 3.13) (push) Waiting to run
ci / tests (3.13, windows-2019, 3.6) (push) Waiting to run
ci / tests (3.13, windows-2019, 3.7) (push) Waiting to run
ci / tests (3.13, windows-2019, 3.8) (push) Waiting to run
ci / tests (3.13, windows-2019, 3.9) (push) Waiting to run
ci / tests (3.6, ubuntu-20.04, 3.10) (push) Waiting to run
ci / tests (3.6, ubuntu-20.04, 3.11) (push) Waiting to run
ci / tests (3.6, ubuntu-20.04, 3.12) (push) Waiting to run
ci / tests (3.6, ubuntu-20.04, 3.13) (push) Waiting to run
ci / tests (3.6, ubuntu-20.04, 3.6) (push) Waiting to run
ci / tests (3.6, ubuntu-20.04, 3.7) (push) Waiting to run
ci / tests (3.6, ubuntu-20.04, 3.8) (push) Waiting to run
ci / tests (3.6, ubuntu-20.04, 3.9) (push) Waiting to run
ci / tests (3.6, windows-2019, 3.10) (push) Waiting to run
ci / tests (3.6, windows-2019, 3.11) (push) Waiting to run
ci / tests (3.6, windows-2019, 3.12) (push) Waiting to run
ci / tests (3.6, windows-2019, 3.13) (push) Waiting to run
ci / tests (3.6, windows-2019, 3.6) (push) Waiting to run
ci / tests (3.6, windows-2019, 3.7) (push) Waiting to run
ci / tests (3.6, windows-2019, 3.8) (push) Waiting to run
ci / tests (3.6, windows-2019, 3.9) (push) Waiting to run
ci / tests (3.7, ubuntu-20.04, 3.10) (push) Waiting to run
ci / tests (3.7, ubuntu-20.04, 3.11) (push) Waiting to run
ci / tests (3.7, ubuntu-20.04, 3.12) (push) Waiting to run
ci / tests (3.7, ubuntu-20.04, 3.13) (push) Waiting to run
ci / tests (3.7, ubuntu-20.04, 3.6) (push) Waiting to run
ci / tests (3.7, ubuntu-20.04, 3.7) (push) Waiting to run
ci / tests (3.7, ubuntu-20.04, 3.8) (push) Waiting to run
ci / tests (3.7, ubuntu-20.04, 3.9) (push) Waiting to run
ci / tests (3.7, windows-2019, 3.10) (push) Waiting to run
ci / tests (3.7, windows-2019, 3.11) (push) Waiting to run
ci / tests (3.7, windows-2019, 3.12) (push) Waiting to run
ci / tests (3.7, windows-2019, 3.13) (push) Waiting to run
ci / tests (3.7, windows-2019, 3.6) (push) Waiting to run
ci / tests (3.7, windows-2019, 3.7) (push) Waiting to run
ci / tests (3.7, windows-2019, 3.8) (push) Waiting to run
ci / tests (3.7, windows-2019, 3.9) (push) Waiting to run
ci / tests (3.8, ubuntu-20.04, 3.10) (push) Waiting to run
ci / tests (3.8, ubuntu-20.04, 3.11) (push) Waiting to run
ci / tests (3.8, ubuntu-20.04, 3.12) (push) Waiting to run
ci / tests (3.8, ubuntu-20.04, 3.13) (push) Waiting to run
ci / tests (3.8, ubuntu-20.04, 3.6) (push) Waiting to run
ci / tests (3.8, ubuntu-20.04, 3.7) (push) Waiting to run
ci / tests (3.8, ubuntu-20.04, 3.8) (push) Waiting to run
ci / tests (3.8, ubuntu-20.04, 3.9) (push) Waiting to run
ci / tests (3.8, windows-2019, 3.10) (push) Waiting to run
ci / tests (3.8, windows-2019, 3.11) (push) Waiting to run
ci / tests (3.8, windows-2019, 3.12) (push) Waiting to run
ci / tests (3.8, windows-2019, 3.13) (push) Waiting to run
ci / tests (3.8, windows-2019, 3.6) (push) Waiting to run
ci / tests (3.8, windows-2019, 3.7) (push) Waiting to run
ci / tests (3.8, windows-2019, 3.8) (push) Waiting to run
ci / tests (3.8, windows-2019, 3.9) (push) Waiting to run
ci / tests (3.9, ubuntu-20.04, 3.10) (push) Waiting to run
ci / tests (3.9, ubuntu-20.04, 3.11) (push) Waiting to run
ci / tests (3.9, ubuntu-20.04, 3.12) (push) Waiting to run
ci / tests (3.9, ubuntu-20.04, 3.13) (push) Waiting to run
ci / tests (3.9, ubuntu-20.04, 3.6) (push) Waiting to run
ci / tests (3.9, ubuntu-20.04, 3.7) (push) Waiting to run
ci / tests (3.9, ubuntu-20.04, 3.8) (push) Waiting to run
ci / tests (3.9, ubuntu-20.04, 3.9) (push) Waiting to run
ci / tests (3.9, windows-2019, 3.10) (push) Waiting to run
ci / tests (3.9, windows-2019, 3.11) (push) Waiting to run
ci / tests (3.9, windows-2019, 3.12) (push) Waiting to run
ci / tests (3.9, windows-2019, 3.13) (push) Waiting to run
ci / tests (3.9, windows-2019, 3.6) (push) Waiting to run
ci / tests (3.9, windows-2019, 3.7) (push) Waiting to run
ci / tests (3.9, windows-2019, 3.8) (push) Waiting to run
ci / tests (3.9, windows-2019, 3.9) (push) Waiting to run
ci / tests (interpreter, ubuntu-20.04, 3.10) (push) Waiting to run
ci / tests (interpreter, ubuntu-20.04, 3.11) (push) Waiting to run
ci / tests (interpreter, ubuntu-20.04, 3.12) (push) Waiting to run
ci / tests (interpreter, ubuntu-20.04, 3.13) (push) Waiting to run
ci / tests (interpreter, ubuntu-20.04, 3.6) (push) Waiting to run
ci / tests (interpreter, ubuntu-20.04, 3.7) (push) Waiting to run
ci / tests (interpreter, ubuntu-20.04, 3.8) (push) Waiting to run
ci / tests (interpreter, ubuntu-20.04, 3.9) (push) Waiting to run
ci / tests (interpreter, windows-2019, 3.10) (push) Waiting to run
ci / tests (interpreter, windows-2019, 3.11) (push) Waiting to run
ci / tests (interpreter, windows-2019, 3.12) (push) Waiting to run
ci / tests (interpreter, windows-2019, 3.13) (push) Waiting to run
ci / tests (interpreter, windows-2019, 3.6) (push) Waiting to run
ci / tests (interpreter, windows-2019, 3.7) (push) Waiting to run
ci / tests (interpreter, windows-2019, 3.8) (push) Waiting to run
ci / tests (interpreter, windows-2019, 3.9) (push) Waiting to run
ci / code-quality (push) Waiting to run
ci / coverage (push) Waiting to run
2024-11-25 00:53:09 +01:00
Dave Halter
41e9e957e7 Increase Jedi version 2024-11-11 02:39:18 +01:00
Dave Halter
b225678a42 Add a release for Python 3.13
Some checks failed
ci / tests (3.10, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.9) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.10) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.11) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.12) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.13) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.6) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.7) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.8) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.9) (push) Has been cancelled
ci / code-quality (push) Has been cancelled
ci / coverage (push) Has been cancelled
2024-11-10 23:04:28 +01:00
Dave Halter
30adf43a89 Merge pull request #2027 from WutingjiaX/feat/filterImported
Some checks failed
ci / tests (3.10, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.9) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.10) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.11) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.12) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.13) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.6) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.7) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.8) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.9) (push) Has been cancelled
ci / code-quality (push) Has been cancelled
ci / coverage (push) Has been cancelled
Filter duplicate imports when completing
2024-10-17 21:10:55 +00:00
wutingjia
be6df62434 filter imported names during completion 2024-10-17 19:20:39 +08:00
Dave Halter
e53359ad88 Fix a test that had issues with a minor upgrade of Python 3.12
Some checks failed
ci / tests (3.10, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.10, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.10, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.11, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.11, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.12, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.12, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.13, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.13, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.6, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.6, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.7, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.7, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.8, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.8, windows-2019, 3.9) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (3.9, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.10) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.11) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.12) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.13) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.6) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.7) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.8) (push) Has been cancelled
ci / tests (3.9, windows-2019, 3.9) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.10) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.11) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.12) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.13) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.6) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.7) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.8) (push) Has been cancelled
ci / tests (interpreter, ubuntu-20.04, 3.9) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.10) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.11) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.12) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.13) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.6) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.7) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.8) (push) Has been cancelled
ci / tests (interpreter, windows-2019, 3.9) (push) Has been cancelled
ci / code-quality (push) Has been cancelled
ci / coverage (push) Has been cancelled
2024-10-16 12:56:10 +02:00
Dave Halter
6e5d5b779c Enable workflow_dispatch in CI 2024-10-16 12:39:33 +02:00
Nguyễn Hồng Quân
91ffdead32 Sort completions by input resemblance. (#2018)
* Sort completions by input resemblance.

Fixes #2017

* Clean code
2024-07-15 08:15:20 +00:00
WutingjiaX
2859e4f409 Support inferring not expr to bool (#2016)
* support inferring some not expr

* format

---------

Co-authored-by: wutingjia <wutingjia@bytedance.com>
2024-07-12 12:58:39 +00:00
Dave Halter
8ee4c26ae4 Merge pull request #2014 from WutingjiaX/feat/in_operator
When inferring comparison operators, return a definite type instead of NO_VALUES for the in/not in operator
2024-07-10 18:03:32 +00:00
wutingjia
4d09ac07e4 When inferring comparison operators, return a definite type instead of NO_VALUES for the in/not in operator 2024-07-10 11:20:11 +08:00
Peter Law
82d1902f38 Merge branch 'python-3.13' 2024-07-06 11:39:06 +01:00
Peter Law
857c9be500 Ignore py__name__ issues for functools.partial in Python 3.13.0b3+
See https://github.com/davidhalter/jedi/issues/2012 for details.
2024-07-05 21:38:28 +01:00
Peter Law
e839683e91 Merge branch 'ensure-unique-subprocess-reference-ids' 2024-07-04 22:39:29 +01:00
Peter Law
255186376e Cope with Python 3.13 moving pathlib's implementation
Jedi passes pickles to subprocesses which are running the target
version of Python and thus may not be the same as the version
under which Jedi itself is running. In Python 3.13, pathlib is
being refactored to allow for easier extension and has thus moved
most of its internal implementation to a submodule. Unfortunately
this changes the paths of the symbols, causing pickles of those
types to fail to load in earlier versions of Python.

This commit introduces a custom unpickler which accounts for this
move, allowing bi-directional passing of pickles to work.
2024-07-02 21:37:34 +01:00
Peter Law
a67deeb602 Fix race condition around subprocess inference state tidyup
There was a race condition due to the combination of Python's
object ids being re-usable and Jedi persisting such ids beyond
the real lifeteime of some objects. This could lead to the
subprocess' view of the lifetime of `InferenceState` contexts
getting out of step with that in the parent process and
resulting in errors when removing them. It is also possible
that this could result in erroneous results being reported,
however this was not directly observed.

The race was specifically:
- `InferenceState` A created, gets id 1
- `InferenceStateSubprocess` A' created, uses `InferenceState`
  A which it stores as a weakref and an id
- `InferenceStateSubprocess` A' is used, the sub-process learns
  about an `InferenceState` with id 1
- `InferenceState` A goes away, `InferenceStateSubprocess` A' is
  not yet garbage collected
- `InferenceState` B created, gets id 1
- `InferenceStateSubprocess` B' created, uses `InferenceState` B
  which it stores as a weakref and an id
- `InferenceStateSubprocess` B' is used, the sub-process re-uses
  its entry for an `InferenceState` with id 1

At this point the order of operations between the two
`InferenceStateSubprocess` instances going away is immaterial --
both will trigger a removal of a state with id 1. As long as B'
doesn't try to use the sub-process again after the first removal
has happened then the second removal will fail.

This commit resolves the race condition by coupling the context
in the subprocess to the corresponding manager class instance
in the parent process, rather than to the consumer `InferenceState`.

See inline comments for further details.
2024-07-02 21:37:34 +01:00
Peter Law
d543d1d004 Support Python 3.13
This moves to using the 3.13 grammar as well as testing 3.13 in CI.
2024-07-02 21:37:34 +01:00
Peter Law
9d18b7c36d Document how Jedi manages its subprocesses
This is derived from my understanding of the code, plus a bit of
experimentation.
2024-07-02 21:37:34 +01:00
Peter Law
340dedd021 Use an explicit mapping for locals in this test
In Python 3.13 the `locals` function now returns a fresh mapping
each time it's called (when called in a function). We thus need
to store a reference to the mapping being used, rather than
re-fetching it each time.

Since we don't actually need to modify the locals within the scope
of the test function itself, it suffices to use our own mapping
here rather than the result of calling `locals`, which fully
isolates this test from the nature of that function.

Fixes https://github.com/davidhalter/jedi/issues/2002
2024-07-02 21:37:34 +01:00
Peter Law
fff6e0ce2e Drop unused member
I'm not sure where this was used in the past, however it appears
to be unused now. Removing this simplifies a change I'm about to
make to _InferenceStateProcess.
2024-07-02 21:37:34 +01:00
Peter Law
473b35e6ec Ignore more items extra in Python 3.13 2024-07-02 21:37:34 +01:00
Peter Law
a0527a5af5 Pass through the inference state id rather than recomputing it
This removes some of the coupling between the management of the
underlying process and the inference state itself, which intends
to enable changing the origin of the id. This will be useful in
the next commit.
2024-07-02 21:37:34 +01:00
Peter Law
bbbaad21e8 Clarify filter by flipping the boolean logic 2024-07-02 21:37:34 +01:00
Peter Law
ee90cd97b6 Name this list of accepted symbol differences
This should make it easier to add new entries as well as clarifying
the intent of this filter.
2024-07-02 21:37:34 +01:00
Peter Law
68e435cc66 Update 'next' signature in Python 3.13 where it's been fixed 2024-07-02 21:37:34 +01:00
Dave Halter
b69d4d87c3 Merge pull request #2006 from PeterJCLaw/lint-tests
Enable linting of our test code, but not our test data files
2024-06-30 23:31:16 +00:00
Peter Law
0fcb4468e7 Fix or ignore lints in tests 2024-06-30 19:05:38 +01:00
Peter Law
5c578e1899 Enable linting of our test code, but not our test data files 2024-06-30 19:01:03 +01:00
Peter Law
9bad42c0db Update actions/setup-python to avoid Node JS deprecations 2024-06-29 16:57:25 +01:00
Peter Law
3118462a93 Allow more recent version of pytest
pytest 7.x and 8.x work without changes.
2024-06-23 13:20:10 +01:00
Dave Halter
065580b5d4 Use the proper super call for setUp 2024-05-24 21:29:08 +02:00
Dave Halter
39c8317922 Merge pull request #1996 from frenzymadness/patch-2
Fix compatibility with pytest 8
2024-05-22 22:44:47 +00:00
Lumír 'Frenzy' Balhar
ab97e9f784 Fix compatibility with pytest 8
Fixes #1995
2024-05-22 23:13:19 +02:00
Dave Halter
f7c9ee9433 Merge pull request #1994 from HairlessVillager/patch-1
Add a windows path in `_get_executable_path()`
2024-05-06 10:06:18 +00:00
HairlessVillager
8792c6d432 Add a windows path in _get_executable_path() 2024-05-06 17:04:38 +08:00
pyscripter
a4574a50d0 Fix resolving of names that are not annotations (#1989), fixes #1988
* Fix #1988

* Fix failing code quality test.

* Fix  flake W504 line break after binary operator.  Now as formatted by Black.

* Added test to test/completion/pep0484_basic.py
Addressed feedback from Dave
2024-04-25 09:11:24 +00:00
Dave Halter
f11014fc5d Wording 2024-04-21 10:40:00 +02:00
Ehsan Iran-Nejad
54a6dadde3 properties with setters are now reported as 'property' for completion (#1983)
* properties with setters are now reported as 'property' for completion

* code cleanups

* fixed test

* fixed tests

* Revert "fixed test"

This reverts commit a80c955a48.

* code quality cleanup

* so picky

* Revert "Revert "fixed test""

This reverts commit 58dfc5292e.

* updated test per maintainer comments #1983

* removed extra char
2024-02-19 12:07:47 +00:00
Dave Halter
740b474eda Merge pull request #1907 from moser/support-fixtures-from-pytest-plugins
Add support for pytest fixtures from local pytest plugins.
2024-01-26 16:15:37 +00:00
Martin Vielsmaier
950ce70239 Prioritize conftest over plugins for pytest fixtures 2024-01-26 10:39:15 +01:00
Martin Vielsmaier
6982a49977 Fix codestyle 2024-01-24 13:11:26 +01:00
Martin Vielsmaier
9b8cece7ef Improve pytest plugin loading 2024-01-23 20:54:21 +01:00
Martin Vielsmaier
162034b387 Fix line length. 2024-01-23 20:54:21 +01:00
Martin Vielsmaier
7494c9495e Update contributors. 2024-01-23 20:54:21 +01:00
Martin Vielsmaier
7d77f61040 Add support for pytest fixtures from local pytest plugins. 2024-01-23 20:54:21 +01:00
Dave Halter
11280ef502 Merge pull request #1977 from HairlessVillager/master
[Typo] Fix some misalignment of docstrings
2023-12-28 10:20:12 +00:00
HairlessVillager
94ec4b873a Fix some misalignment of docstrings 2023-12-28 13:26:12 +08:00
Dave Halter
f8e502f90c Apparently read the docs also needs this 2023-10-04 09:41:55 +02:00
Dave Halter
dc20f2e5a0 Try to fix readthedocs 2023-10-04 09:40:41 +02:00
Dave Halter
4e175ca82b Prepare for release 0.19.1 2023-10-02 10:55:17 +02:00
Dave Halter
30e9b7b5be Merge pull request #1961 from PeterJCLaw/docs-link
Include a link to the docs in the package metadata
2023-09-23 23:56:11 +00:00
Peter Law
83545bc9ec Include a link to the docs in the package metadata 2023-09-22 21:33:52 +01:00
Peter Law
57e7b83455 Add missing import
I suspect this got lost in a merge somewhere, probably the
combination of 7e533ca7e1 and the
Python 3.12 work in a60fdba1d4.
2023-09-22 21:29:31 +01:00
Dave Halter
0770372857 Merge pull request #1956 from PeterJCLaw/python-3.12
Support Python 3.12
2023-09-17 19:25:45 +00:00
Peter Law
7e533ca7e1 Drop redundant conditional skips for unsupported Python versions 2023-09-17 18:38:12 +01:00
Peter Law
a60fdba1d4 Adjust for change to documention change of next in Python 3.12
The signature of the builtin isn't actually changing in Python 3.12,
however its documentation has changed.
2023-09-17 18:27:53 +01:00
Dave Halter
9d399a9229 Merge pull request #1959 from davidhalter/unpin-test-django
Unpin Django in tests
2023-09-17 12:37:00 +00:00
Peter Law
770cdade00 Claim support for Python 3.12 2023-09-16 21:41:06 +01:00
Peter Law
29890c1f29 Ignore linux-only os.CLONE_* constants in Python 3.12 in import test 2023-09-16 21:41:06 +01:00
Peter Law
159566e1a0 Add Python 3.12 as a tested platform 2023-09-16 21:41:06 +01:00
Peter Law
a80618a2df Unpin Django in tests
It's not completely clear why this was pinned originally, though
at the time Jedi supported Python 2.7 as well as 3.5-3.8, so that
may have had something to do with it.

Removing this pin now seems to work in CI and unblocks some issues
we're seeing around Python 3.12 (specifically that Django<3.1
implicitly relies on distutils, which is no longer available by
default, and possibly other issues).
2023-09-16 21:40:34 +01:00
Peter Law
4bc1b6ef99 Bump use of actions/checkout to avoid Node JS deprecations 2023-09-16 18:12:28 +01:00
Peter Law
d655d65d3a Fix typo in comment 2023-09-16 18:03:56 +01:00
Dave Halter
51f4a99a1e Bump version to 0.19.0 2023-07-29 00:57:34 +02:00
Dave Halter
93c14d2e6e Add release notes for 0.19.0 2023-07-29 00:46:38 +02:00
Dave Halter
57aefed6ea Allow unsafe custom __getitem__ executions when allow unsafe executions is on 2023-07-29 00:33:09 +02:00
Dave Halter
8a4b079d0f allow_descriptor_getattr -> allow_unsafe_interpreter_executions 2023-07-29 00:06:55 +02:00
Dave Halter
62cbcb0844 Make nested dict completions possible.
See also https://github.com/ipython/ipython/issues/13866
2023-07-28 23:50:38 +02:00
Dave Halter
d8420d0f72 Add a note to the changelog 2023-07-28 22:59:03 +02:00
Dave Halter
886279fb6d Try to use the return annotations of properties, if available, fixes #1933 2023-07-28 22:35:15 +02:00
Dave Halter
ff3a7f367f Avoid evaluating properties just for the api type, improves #1933 2023-07-28 22:11:15 +02:00
Dave Halter
1f70e3301e Revert "Avoid one layer of caching that is probably useless"
This reverts commit a34c348a55.
2023-07-28 16:10:30 +02:00
Dave Halter
a34c348a55 Avoid one layer of caching that is probably useless 2023-07-28 16:05:56 +02:00
Dave Halter
972123c9c9 Introduce the property return annotation 2023-07-28 15:54:54 +02:00
Dave Halter
6455a14841 Avoid multiple getattrs instead of a single one, see also #1933 2023-07-28 15:10:37 +02:00
Dave Halter
8d9e3ab3a7 Simplify 2023-07-28 13:10:26 +02:00
Dave Halter
048173e467 Remove a piece of unimportant code, see discussion in #1933 2023-07-28 10:15:28 +02:00
Dave Halter
1947e7dd56 Avoid dynamic params search for Interpreter, fixes #1899 2023-07-27 13:49:27 +02:00
Dave Halter
01d8da8f73 Reset the recursion limitations at the start of the main Script calls, fixes #1796 2023-07-27 13:14:24 +02:00
Dave Halter
6ea5ad7b19 Fix issue around completions with multiple with with_items, fixes 1931 2023-07-27 11:54:39 +02:00
Dave Halter
cd4ca74d7a Satisfy flake8 2023-07-27 11:36:16 +02:00
Dave Halter
67d6262f45 Skip the namespace package test correctly 2023-07-27 10:07:16 +02:00
Dave Halter
5f19237a3e Fix renaming of namespace packages, fixes #1779 2023-07-27 03:09:25 +02:00
Dave Halter
f2444b4be5 Merge pull request #1943 from diegorodriguezv/patch-1
Fix language servers reference
2023-06-22 08:44:15 +00:00
diegorodriguezv
7028bbb5d5 Fix language servers reference 2023-06-21 19:06:53 -05:00
Dave Halter
3699ba0aa7 Merge pull request #1942 from lkh42t/inference-annotated
Support typing.Annotated inference
2023-06-19 00:28:27 +00:00
Luc Khai Hai
72d34f3d7d Support typing.Annotated inference 2023-06-17 20:46:03 +09:00
Peter Law
a28bd24bef Merge branch 'importlib-metadata-entry-points' 2023-05-28 12:08:09 +01:00
Peter Law
54cb64292c Support importlib.metadata entry points for newer python
pkg_resources is deprecated and liable to be dropped at some point.
2023-05-28 11:48:19 +01:00
Dave Halter
d421b920fa Merge pull request #1937 from PeterJCLaw/update-importlib-usage
Modernise importlib usage
2023-05-27 22:54:45 +00:00
Peter Law
c137eb6918 Modernise importlib usage
`find_module` is deprecated in all supported version of Python and
is slated for removal in the upcoming 3.12. Happily it seems we
can move to the related `find_spec` and just hoist the loader from
the spec which that returns. (This is mostly what current `find_module`
implementations do anyway).
2023-05-27 22:03:51 +01:00
Peter Law
d67facc922 Merge branch 'update-github-actions' 2023-05-27 22:03:03 +01:00
Dave Halter
7023b645b1 Merge pull request #1935 from PeterJCLaw/fix-attrs-as-dataclass
Teach Jedi that `attrs`' `frozen` decorator also acts like a dataclass
2023-05-27 21:01:21 +00:00
Peter Law
b5120cc90b Update GitHub Actions for Node 16 support 2023-05-27 00:18:38 +01:00
Peter Law
483e78993d attrs' frozen decorator also acts like a dataclass
In 23.1.0 (specifically in 46053d703d)
the definition of the `frozen` decorator was tweaked slightly, such
that its type stub is separate from that for `define`. This means
that Jedi needs to be told about it as a separate member.

I've manually checked that this still works with the prior version
of `attrs`.

Fixes https://github.com/davidhalter/jedi/issues/1929
2023-05-27 00:13:55 +01:00
Peter Law
3dbcd2c6de Whitespace 2023-05-27 00:10:00 +01:00
Peter Law
ca36fcfa4b Fix typo in comment 2023-05-26 21:38:42 +01:00
Dave Halter
825c6b93bf Merge pull request #1930 from tachikoma-li/doc-fix
docs: Fix typo in acknowledgements
2023-04-21 21:55:14 +00:00
Li Li
c22585c6f2 small doc fix 2023-04-19 15:12:39 +10:00
Dave Halter
431d1e104d Merge pull request #1926 from dijonkitchen/patch-1
Update usage.rst to match Readme language servers
2023-04-12 15:50:21 +00:00
Dave Halter
adcd6ade8b Merge pull request #1927 from dijonkitchen/patch-2
docs: fix spelling
2023-04-12 15:49:40 +00:00
JC (Jonathan Chen)
32a1dd33a6 docs: fix spelling 2023-04-12 11:01:10 -04:00
JC (Jonathan Chen)
9ea01bcc69 Update usage.rst to match Readme language servers 2023-04-12 10:58:57 -04:00
Dave Halter
77cfefc1cc Add a security policy 2023-03-22 00:47:58 +01:00
Dave Halter
ff7d6c6e4c Merge pull request #1922 from zerocewl/add_pylsp_link
Added link to the python-lsp-server
2023-03-21 23:24:04 +00:00
Dave Halter
6ee33bd385 Merge pull request #1923 from dimbleby/help-after-newline
fix help when in column zero
2023-03-13 20:17:13 +00:00
David Hotham
0fbc2aafa3 fix help when in column zero 2023-03-12 14:21:09 +00:00
zerocewl
fe7e350051 Added link to the python-lsp-server 2023-03-01 16:27:11 +01:00
Dave Halter
b814ca2951 Merge pull request #1917 from PeterJCLaw/python-3.11
Support Python 3.11
2023-02-14 00:48:04 +00:00
Peter Law
aae2a8e3ed Cope with Windows virtualenvs different casing 2023-02-13 20:25:31 +00:00
Peter Law
67e0bec597 Support Python 3.11
This adds support for targetting Python 3.11 via picking up the
latest grammar from parso while also validating support for running
on 3.11 by adding it to the CI matrix.
2023-02-13 19:58:35 +00:00
Peter Law
c71e06fcb3 Clarify that this is also the latest flake8 version which supports 3.6 2023-02-13 19:57:38 +00:00
Peter Law
bbd5bcf3ca Merge branch 'update-mypy' 2023-02-13 19:57:20 +00:00
Dave Halter
d888c1b266 Merge pull request #1915 from PeterJCLaw/update-flake8
Update flake8 and fix issue found
2023-02-13 19:49:36 +00:00
Peter Law
83d0e23800 Type check setup.py too now we can 2023-02-13 19:40:16 +00:00
Peter Law
dc4e48d7c7 Be stricter about mypy needing error codes
These make it clearer what's being ignored and harder to
accidentally ignore more than expected.
2023-02-13 19:40:16 +00:00
Peter Law
664b10a5c6 Update mypy to the latest
This includes updating the ignore comments for things which mypy
now knows about or now complains about, as well as pulling in some
typeshed packages for things outside the standard library.
2023-02-13 19:40:16 +00:00
Peter Law
36a4b7d48c Update flake8 and fix issue found 2023-02-13 19:15:35 +00:00
Dave Halter
b0025ee6ba Merge pull request #1911 from krpatter-intc/allow_descriptor_getattr_official_support
Make allow_descriptor_getattr a non-private variable for more official
2023-02-10 22:30:33 +00:00
Patterson, Kevin R
fac0b7f068 instance_allow_descriptor_getattr as public setting 2023-02-10 05:43:21 -06:00
Dave Halter
aeadba7cad Merge pull request #1910 from ghrist8p/1909-fix-sys-path-is-tuple
Replaced tuple passed as sys_path actual argument with list
2023-02-07 23:21:16 +00:00
Georgi Hristov
fd0e6aed96 Replaced tuple passed as sys_path actual argument with list
Fixes davidhalter#1909
2023-02-05 15:46:23 -08:00
Dave Halter
c89fa8e927 Merge pull request #1903 from s-t-e-v-e-n-k/python-311-string-typing
Support Python 3.11 typing changes
2023-01-10 19:57:58 +00:00
Steve Kowalik
00e23ddcee Support Python 3.11 typing changes
Python 3.11 has changed typing so that unions now  return forward
refrences instead of erroring, and typing.Any is now an _AnyMeta class.
Correct the parameters for both of those.

Fixes #1858
2023-01-10 14:52:24 +11:00
Dave Halter
66e97e5b93 Jedi is now a fixed part of the Eric IDE 2022-12-16 15:37:22 +01:00
Dave Halter
0f5ea3de5f Revert "Removed all usages of Eric IDE, because apparently it's not using Jedi anymore"
This reverts commit e47bbbb851.
2022-12-16 15:36:31 +01:00
Dave Halter
e47bbbb851 Removed all usages of Eric IDE, because apparently it's not using Jedi anymore 2022-12-16 15:33:07 +01:00
Dave Halter
eaab706038 Prepare the release of 0.18.2 2022-11-21 23:23:46 +01:00
Dave Halter
41455480be Better search for venvs 2022-11-21 23:06:26 +01:00
Dave Halter
0a670d10dd Merge branch 'master' of github.com:davidhalter/jedi 2022-11-21 22:59:48 +01:00
Dave Halter
6b73d5c1bf Probably using the 3.10 grammar is better for stubs for now 2022-11-21 21:07:33 +01:00
Dave Halter
a3fed3b6a6 Remove a TODO that was already implemented 2022-11-14 08:39:11 +01:00
Dave Halter
66c52b4bc7 Try to fix a test for Windows 2022-11-13 23:48:43 +01:00
Dave Halter
89f9a3a7f1 Fix a Django test 2022-11-13 23:38:22 +01:00
Dave Halter
3a30008cc4 Fix keyword argument completion, fixes #1856 2022-11-13 20:26:00 +01:00
Dave Halter
b0d5fc2bd0 Fix errors around docs of namespace packages, fixes #1890, fixes #1822 2022-11-13 19:50:08 +01:00
Dave Halter
6e5db3f479 Fix a weird AttributeError, fixes #1765 2022-11-13 18:26:01 +01:00
Dave Halter
85780111e0 Use the latest grammar from parso for stubs, probably fixes #1864 2022-11-13 17:59:22 +01:00
Dave Halter
0ba48bbb9d Fix an issue with creatin a diff, fixes #1757 2022-11-13 17:51:54 +01:00
Dave Halter
26f7878d97 Revert some of the logic around ClassVar completions, see #1847 2022-11-12 23:15:16 +01:00
Dave Halter
8027e1b162 Remove the ClassVar filter, see also #1847 2022-11-12 22:58:00 +01:00
Dave Halter
78a53bf005 Change a test slightly 2022-11-12 13:59:07 +01:00
Dave Halter
8485df416d Finally fix a Django test 2022-11-11 18:00:17 +01:00
Dave Halter
94e78340e1 Fix a formatting issue in CI 2022-11-11 17:54:57 +01:00
Dave Halter
f454989859 Now that ClassVars work differently fix a Django test 2022-11-11 17:52:35 +01:00
Dave Halter
e779f23ac7 Another small change towards tests 2022-11-11 17:50:05 +01:00
Dave Halter
3c40363a39 Remove another test that depends on specific pytest versions and is well covered by other tests 2022-11-11 17:47:02 +01:00
Dave Halter
a6cf2c338a Remove part of a test that is annoying to develop 2022-11-11 17:44:49 +01:00
Dave Halter
2a7311c1a0 Remove some unrelated things from .gitignore again 2022-11-11 17:15:46 +01:00
Dave Halter
81427e4408 Add a note about pytest entrypoints in CHANGELOG 2022-11-11 17:01:11 +01:00
Dave Halter
804e4b0ca2 Merge pull request #1861 from qmmp123/master
Fix: #1847
2022-11-11 16:00:39 +00:00
Dave Halter
3475ccfbd3 Merge pull request #1870 from Presburger/master
fix autocomplete crash in ycmd
2022-11-11 15:50:10 +00:00
Dave Halter
9723a0eed0 Merge pull request #1879 from marciomazza/find-external-pytest-fixtures
Find external pytest fixtures
2022-11-11 15:46:40 +00:00
Dave Halter
658f80fa1e Just pin all documentation generation dependencies 2022-11-11 16:36:23 +01:00
Dave Halter
31c2c508c3 Try to get jedi.readthedocs.org running again 2022-11-11 16:15:37 +01:00
Dave Halter
6c9cab2f8e Merge pull request #1889 from AndrewAmmerlaan/master
python3.11 compatibility
2022-10-20 19:08:52 +00:00
Andrew Ammerlaan
0a6ad1010c inference/compiled/subprocess/functions.py: Skip python3.11's frozen imports
Bug: https://github.com/davidhalter/jedi/issues/1858
Signed-off-by: Andrew Ammerlaan <andrewammerlaan@gentoo.org>
2022-10-19 16:53:17 +02:00
Dave Halter
3a60943f6e Merge pull request #1885 from asford/attrs_support
Extend dataclass constructor hinting to attrs next-gen apis.
2022-10-13 19:12:59 +00:00
Alex Ford
4d1e00c3ab Skip if attrs not in target environment.
Add check for attrs in test environment and skip if not installed.
This is patterned off the existing django tests.
2022-10-13 00:43:29 -07:00
Alex Ford
e15f51ecc1 Remove mutable from attrs signature tests 2022-10-11 17:55:57 -07:00
Alex Ford
eaa66b3dbb Update setup.py 2022-10-11 17:40:31 -07:00
Alex Ford
239d9e0b22 Add note to changelog 2022-10-11 17:40:31 -07:00
Alex Ford
40e1e3f560 Extend dataclass constructor hinting to attrs next-gen apis.
Trivially extends dataclass constructor hinting to attrs next-gen APIs.

This will stumble in cases where attrs extends beyond the standard
dataclasses API, such as complex use of defaults, converters, et al.
However, it likely covers the vast majority of cases which fall solidly
in the intersection of the two APIs.

Extension beyond these cases could use [PEP0681 dataclass_transforms],
however this is definitely a problem for another day.

[PEP0681 dataclass_transforms]: https://peps.python.org/pep-0681/

https://github.com/davidhalter/jedi/issues/1835
2022-10-11 17:40:31 -07:00
Marcio Mazza
c243608ac6 Add your name to AUTHORS.txt 2022-09-05 17:31:14 -03:00
Marcio Mazza
e25750ecef Make code compatible with python < 3.8 2022-09-05 17:05:11 -03:00
Marcio Mazza
1a306fddbf Fix check pytest fixture from import on the right context 2022-09-04 13:12:13 -03:00
Marcio Mazza
ec425ed2af Add tests to find pytest fixtures from external plugins 2022-09-03 17:16:32 -03:00
Marcio Mazza
fa1e9ce9a7 Simplify entry points enumeration 2022-09-03 17:16:32 -03:00
Marcio Mazza
8447d7f3e4 Discard imports of modules as pytest fixtures 2022-09-03 17:16:32 -03:00
Marcio Mazza
27e13e4072 Allow for multiple returns from goto_import 2022-09-03 17:16:32 -03:00
Marcio Mazza
9fd4aab5da Find pytest fixtures from external plugins registered via setuptools entry points
Using setuptools entry points is probably the main pytest mechanism of
plugin discovery.

See https://docs.pytest.org/en/stable/how-to/writing_plugins.html#setuptools-entry-points

This extends the functionality of #791
and maybe eliminates the need for #1786.
2022-09-03 17:16:32 -03:00
Dave Halter
8b0d391ac1 Merge pull request #1876 from marciomazza/fix-skipped-tests-due-to-python-symlinks
Fix skipped collection of pytest integration test files
2022-09-03 12:36:01 +00:00
Marcio Mazza
fa0c064841 Fix skipped collection of pytest integration test files
On integration tests file collection,
the value of `environment.executable` can also be a symlink
(e.g. in a virtualenv) with a different name than,
but pointing to the same as `sys.executable`
(e.g. .../bin/python3.10 and .../bin/python, respectively).

That causes skipping the collection of `completion/pytest.py`
and `completion/conftest.py` a lot of times, depending on the environment.
(e.g. "60 skipped" before x "23 skipped" after, in a local virtualenv)
2022-09-02 14:23:38 -03:00
Dave Halter
9e2089ef1e Merge pull request #1875 from marciomazza/fix-test-home-is-potential-project
Fix test where home could be a potential project
2022-09-02 09:19:52 +00:00
Marcio Mazza
85c7f14562 Fix test where home could be a potential project 2022-09-01 13:01:27 -03:00
Dave Halter
695f0832b4 Merge pull request #1871 from xzz53/fix-gitignore
Improve .gitignore handling
2022-08-22 09:59:53 +00:00
Mikhail Rudenko
cfb7e300af Improve .gitignore handling
At present, .gitignore patterns not starting with '/' are classified
as "ignored names" (opposing to "ignored paths") and not used for
filtering directories. But, according to the spec [1], the situation
is a bit different: all patterns apply to directories (and those
ending with '/' apply to directories only). Besides that, there two
kinds of patterns: those that match only w.r.t the directory where
defining .gitignore is located (they must contain a '/' in the
beginning or in the middle), which we call "absolute", and those that
also match in all subdirectories under the directory where defining
.gitignore is located (they must not contain '/' or contain only
trailing '/'), which we call "relative".

This commit implements handling of both "absolute" and "relative"
.gitignore patterns according to the spec. "Absolute" patterns are
handled mostly like `ignored_paths` were handled in the previous
implementation. "Relative" patterns are collected into a distinct set
containing `(defining_gitignore_dir, pattern)` tuples. For each
traversed `root_folder_io`, all applicable "relative" patterns are
expanded into a set of plain paths, which are then used for filtering
`folder_io`s.

While at it, also fix some minor issues. Explicitly ignore negative
and wildcard patterns, since we don't handle them correctly
anyway. Also, use '/' as a path separator instead of `os.path.sep`
when dealing with .gitignore, since the spec explicitly says that '/'
must be used on all platforms.

[1] https://git-scm.com/docs/gitignore
2022-08-21 21:50:29 +03:00
Yusheng.Ma
f5faca014f fix autocomplete crash in ycmd
Signed-off-by: Yusheng.Ma <Yusheng.Ma@zilliz.com>
2022-08-17 07:53:35 +00:00
Dave Halter
7ff0d2d595 Merge pull request #1867 from timgates42/bugfix_typos
docs: Fix a few typos
2022-07-15 07:36:27 +00:00
Tim Gates
c28b337278 docs: Fix a few typos
There are small typos in:
- jedi/api/exceptions.py
- jedi/inference/base_value.py
- jedi/inference/compiled/mixed.py
- jedi/inference/value/dynamic_arrays.py

Fixes:
- Should read `usually` rather than `ususally`.
- Should read `modifications` rather than `modfications`.
- Should read `interpreters` rather than `interpreteters`.
- Should read `inferred` rather than `inferrined`.
- Should read `completable` rather than `completeable`.

Signed-off-by: Tim Gates <tim.gates@iress.com>
2022-07-15 17:29:02 +10:00
nedilmark
128695bd8e remove debug changes 2022-07-03 09:42:29 +08:00
nedilmark
e194ab5951 Fix: #1847 2022-06-18 06:13:07 +08:00
Dave Halter
c0ac341750 Replace some type comments with annotations
This was necessary, back when we supported Python 3.5
2022-05-26 23:09:28 +02:00
Dave Halter
486695d479 Merge pull request #1851 from GalaxySnail/pep604
Add a naive implementation for PEP 604
2022-05-13 12:31:54 +02:00
GalaxySnail
8cb1b76ea4 Fix typo 2022-04-14 04:02:20 +08:00
GalaxySnail
e7755651a4 Add some tests for PEP 604 2022-04-14 03:32:43 +08:00
GalaxySnail
0c7384edc3 A naive implementation for PEP 604 2022-04-14 03:32:12 +08:00
Dave Halter
8f15f38949 Revert a change for Python 2.7 compatibility (see also e267f63657) 2021-12-25 14:08:44 +01:00
Dave Halter
96af7e4077 The Python 3.6 requirement is now the lowest supported version 2021-12-25 13:37:35 +01:00
Dave Halter
929fa9b452 Fix a small issue in overload tests 2021-12-25 13:18:58 +01:00
Dave Halter
08c5ab821f Merge pull request #1826 from PeterJCLaw/fix-1801-typed-decorator-on-instance-method
Make typed decorators work for instance methods
2021-12-13 02:05:55 +01:00
Peter Law
b6f761f13c Make typed decorators work for instance methods
This feels incomplete when compared to FunctionMixin.py__get__,
however seems to work at least in the cut-down reported.

Fixes https://github.com/davidhalter/jedi/issues/1801.
2021-12-12 18:18:55 +00:00
Peter Law
72cf41f4c9 Lambdas in comprehensions need parentheses in Python > 3.8
Fixes https://github.com/davidhalter/jedi/issues/1824.
2021-12-12 18:17:53 +00:00
Dave Halter
3602c10916 Merge pull request #1821 from tomaarsen/patch-1
Typo in docstring of `extract_variable`
2021-11-17 13:44:08 +01:00
Dave Halter
601bfb3493 The readthedocs option submodules should not be part of the Python option 2021-11-17 13:39:21 +01:00
Dave Halter
021f081d8a Submodules should be part of the readthedocs build 2021-11-17 13:38:03 +01:00
Dave Halter
54af6fa86d Try to fix docs dependencies
Docs were not building on read the docs, see also: https://github.com/sphinx-doc/sphinx/issues/9788
2021-11-17 13:33:41 +01:00
Tom Aarsen
f193ae67e9 typo: "statemenet" -> "statement" 2021-11-17 12:59:13 +01:00
Dave Halter
fae26fa7a4 Last preparations for v0.18.1 2021-11-17 01:44:27 +01:00
Dave Halter
a276710f66 Merge pull request #1820 from davidhalter/changes
Some Changes for 0.18.1
2021-11-17 01:42:55 +01:00
Dave Halter
aa8eed8da4 Merge pull request #1819 from jerluc/master
Adds support for "async with" via #1818
2021-11-17 01:36:53 +01:00
jerluc
b2e647d598 Removing invalid test for async with open(...)
See explanation in https://github.com/davidhalter/jedi/pull/1819#issuecomment-970776091
2021-11-16 16:12:43 -08:00
Dave Halter
ec9b453379 Handle defined_names for values that have no context, fixes #1744, fixes #1745 2021-11-17 01:07:28 +01:00
Dave Halter
84d086a47b Fix an issue with whitespace after a dot at the end of a file, also part of #1748 2021-11-17 00:31:46 +01:00
Dave Halter
8bc9c8cda2 Fix an issue where a slice is indexed, fixes #1748 2021-11-17 00:14:59 +01:00
Dave Halter
a17b958078 Fix infer_default for params in REPL, fixes #1738 2021-11-16 23:36:22 +01:00
Dave Halter
656ecf502d Prepare CHANGELOG for 0.18.1 2021-11-16 23:27:01 +01:00
Dave Halter
b846043117 Add 3.10 to the supported Python versions 2021-11-16 23:19:21 +01:00
Dave Halter
6fa91726bf Fix a test in Python 3.10 that's not really important anyway 2021-11-16 23:08:05 +01:00
Dave Halter
42508d9309 Fix fixture annotations for pytest
This means mostly these:

@fixture
def foo() -> Generator[int, None, None]: ...
2021-11-16 22:57:25 +01:00
jerluc
8847848a03 Adds support for "async with" via #1818 2021-11-16 13:00:24 -08:00
Dave Halter
8bd969c24a Upgrade pytest 2021-11-16 21:51:03 +01:00
Dave Halter
458bb30884 Yaml got me again 2021-11-16 21:46:00 +01:00
Dave Halter
515e07227b Try to enable Python 3.10 in CI 2021-11-16 21:44:29 +01:00
Dave Halter
6cb5804227 Revert "Upgrade Django"
This reverts commit 195695edd3.
2021-11-16 21:32:15 +01:00
Dave Halter
e580d1f4d9 Fix a stub docs issue 2021-11-16 21:27:00 +01:00
Dave Halter
195695edd3 Upgrade Django 2021-11-16 21:10:12 +01:00
Dave Halter
42c5276e04 Merge pull request #1800 from Boerde/pytest_improve_fixture_completion
Improve completion for pytest fixtures
2021-11-16 21:09:35 +01:00
Dave Halter
bb5bed4937 Merge pull request #1805 from kirat-singh/support_nested_namespace_packages
fix(import): support for nested namespace packages
2021-10-09 15:20:59 +02:00
Kirat Singh
d872eef1a7 chore: remove unnecessary for loop 2021-10-06 13:15:20 +00:00
Kirat Singh
53e837055f fix(import): support for nested namespace packages
If multiple directories in sys.path provide a nested namespace
package, then jedi would only visit the first directory which
contained the package.  Fix this by saving the remaining path list in
the ImplicitNamespaceValue and add a test for it.
2021-10-02 04:09:27 +00:00
Dave Halter
65bc1c117b Merge pull request #1795 from frenzymadness/patch-1
inspect now raises OSError for objects without source file
2021-09-02 11:22:08 +02:00
Lumír 'Frenzy' Balhar
eab1b8be8b inspect now raises OSError for objects without source file
CPython issue: https://bugs.python.org/issue44648
2021-09-01 20:50:54 +02:00
boerde
3cf98f6ba1 paramters with annotation do not need special pytest handling 2021-08-29 09:17:04 +02:00
boerde
8808b5b64b added test to override fixture return value with annotation 2021-08-29 09:14:29 +02:00
Laurent Soest
fe50352f9c annotations should be preferred even when it is a generator 2021-08-28 21:04:57 +02:00
Laurent Soest
96b4330ef9 testing: added test to override generator with annotation 2021-08-28 21:02:45 +02:00
Dave Halter
1d944943c3 Merge pull request #1794 from PeterJCLaw/fix-quoted-generic-forward-refs
Fix quoted generic annotations
2021-07-25 20:02:38 +02:00
Peter Law
78a95f4751 Handle generics appearing within any quoted annotations
This hoists the solution added for return-type annotations to
also apply for input annotations so they work too.
2021-07-25 16:31:27 +01:00
Peter Law
599a1c3ee1 Handle generics appearing within quoted return annotations
This ensures that these quoted likely forwards references in
return type annotations behave like their non-quoted equivalents.

I suspect there may be other places which will need similar
adjustments, which may mean that we should push the conversion
a layer closer to the parsing (perhaps in `py__annotations__`?).

One case I know that this doesn't solve (but which likely needs
similar adjustment) is generics in return types of comment-style
annotations. They're less likely and may not be worth supporting
since all supported Python versions can use the in-syntax spelling
for annotations at this point.
2021-07-25 15:32:22 +01:00
Peter Law
6814a7336c Hoist common variable for additional re-use 2021-07-25 15:23:51 +01:00
Dave Halter
070f191f55 Merge pull request #1663 from PeterJCLaw/tidyups
Tidyups
2021-07-25 13:44:55 +02:00
Dave Halter
11e67ed319 Merge pull request #1793 from PeterJCLaw/fix-functools-wraps-module-scope
Fix module-scope passthrough function signatures
2021-07-25 13:43:00 +02:00
Peter Law
ab2eb570a8 Use search_ancestor for a more robust search 2021-07-24 17:27:27 +01:00
Peter Law
aa265a44e1 Have all py__file__ methods return a Path 2021-07-24 17:14:25 +01:00
Peter Law
25a3e31ca8 Add a __repr__ 2021-07-24 17:12:34 +01:00
Peter Law
87388ae00f Drop dead line 2021-07-24 17:12:34 +01:00
Peter Law
2d11e02fdb Remove redundant invalid documentation line
This is now replaced by the type signature.
2021-07-24 17:12:34 +01:00
Peter Law
392dcdf015 Fix potential bug passing exception to function excepting str
Found while adding type annotations.
2021-07-24 17:12:34 +01:00
Peter Law
b9fd84e11c Add sanity-check exception
Found by mypy while adding types.
2021-07-24 17:12:34 +01:00
Peter Law
75624f0e3c Convert more things to Python 3 idioms 2021-07-24 17:12:34 +01:00
Peter Law
6ad62e18d2 deque is in collections, not queue
Though it seems that the queue module does use it internally, which
is why this was working.
2021-07-24 17:12:34 +01:00
Peter Law
6787719c28 Ensure *args, **kwargs lookthrough works at module scope too
This means that passthrough signatures will be found for top level
functions, which is useful both where they're wrappered by
`functools.wraps` or not.

Fixes https://github.com/davidhalter/jedi/issues/1791.
2021-07-24 16:58:34 +01:00
Peter Law
bb40390225 Add identifiers to these test strings
This makes it easier to work out which one fails when pytest
reports a failure. Mostly useful when introducing failing tests,
which I'm about to do.
2021-07-24 16:15:41 +01:00
Peter Law
0d15347210 Remove confusing comment
I'm assuming that this is incorrect given that there _are_ arguments
where the comment suggests there aren't any.
2021-07-24 16:14:20 +01:00
Dan Rosén
41652507b3 Fix grammar in features.rst 2021-05-06 00:38:19 +02:00
Dave Halter
41fb6a0cde Merge pull request #1772 from josephbirkner/bugfix/zip-complete
Fixed ZIP import completion.
2021-04-29 23:56:14 +02:00
Joseph Birkner
a340fe077e Fixed ZIP completion. 2021-04-29 09:52:08 +02:00
Dave Halter
dcea842ac2 Revert "Upgrade django-stubs, fixes #1750"
This reverts commit ce5619cabb.
2021-02-26 23:09:22 +01:00
Dave Halter
ce5619cabb Upgrade django-stubs, fixes #1750 2021-02-26 22:30:09 +01:00
Dave Halter
0eb6720c11 Some Python objects suck, fixes #1755 2021-02-26 21:58:47 +01:00
Dave Halter
ee30843f22 Merge pull request #1741 from sfavazza/master
BUGFIX: endless loop in pytest plugin
2021-02-01 00:41:40 +01:00
Samuele FAVAZZA
613cb08325 BUGFIX: prevent an infinite loop seeking for a "conftest.py" file 2021-01-30 16:31:26 +01:00
Aivar Annamaa
9f41153eb2 Allow tweaking Interpreter sys_path (#1734) 2021-01-23 14:38:10 +01:00
Dave Halter
387d73990b Fix issues with getitem on compiled objects that have annotations, see #1719 2021-01-17 13:48:22 +01:00
Dave Halter
47d0318fa6 Paths are the default for modules 2021-01-14 02:00:14 +01:00
Dave Halter
7555dc0d45 Get rid of cast_path 2021-01-14 01:39:51 +01:00
Dave Halter
2a8b212af7 Move the module_injector 2021-01-14 01:35:18 +01:00
Dave Halter
837cb1106a Use Path instead of str if possible 2021-01-14 01:32:57 +01:00
Dave Halter
b6fd81f1e1 Another time avoiding a memory leak, also part of #1723 2021-01-14 01:18:00 +01:00
Dave Halter
0ff532b937 Refactor docstrings 2021-01-14 01:11:50 +01:00
Dave Halter
b9067ccdbb Avoid caching parso objects, fixes #1723 2021-01-14 00:29:34 +01:00
Dave Halter
44d77523b3 Fix a test that depended on correct cwd location an dnot having an x.py in a local directory 2021-01-10 16:31:37 +01:00
Dave Halter
6279791b24 Fix an issue with complete_search 2021-01-10 16:08:17 +01:00
Romain Rigaux
4597c7ebe7 Fix typo in docstring 2021-01-09 10:56:22 +01:00
Dave Halter
e6f18df1d2 unsafe -> not safe 2021-01-03 01:13:17 +01:00
Dave Halter
3428a24af0 Remove an outdated comment 2021-01-02 23:41:38 +01:00
Dave Halter
7a3d1f7cee Run CI on pull request 2021-01-02 23:40:14 +01:00
Dave Halter
8ef2ce232c Hopefully fix a Windows issue 2021-01-02 18:11:59 +01:00
Dave Halter
4ab7a53c19 Fix a compatibility issue for Python < 3.8 2021-01-02 17:37:30 +01:00
Dave Halter
c5fb2985a3 Use clearly defined project for tests to avoid scanning the 2000 typeshed files all the time 2021-01-02 15:31:57 +01:00
Dave Halter
ca2c732d66 PNGs are not text and should not be normalized 2021-01-02 12:27:24 +01:00
Dave Halter
2ec3d72151 Use "namespace" as a Name.type 2021-01-02 12:14:28 +01:00
Dave Halter
02d43caa5e Fix a wrong test about references 2021-01-02 01:17:38 +01:00
Dave Halter
55c7e4eb49 Stdlib modules should not be included in the get_references search, fixes davidhalter/jedi-vim#792 2021-01-02 00:58:50 +01:00
Dave Halter
7d160f96f6 Do not show signatures for properties, fixes #1695 2021-01-01 23:51:41 +01:00
Dave Halter
1ccc63e83d Make py__iter__ work as well for Interpreter 2021-01-01 17:58:31 +01:00
Dave Halter
971913be35 Make it possible to use __getitem__ in interpreter 2021-01-01 15:57:55 +01:00
Dave Halter
36ea6b3285 Change an import 2021-01-01 05:19:37 +01:00
Dave Halter
85f45771f1 Fix typing.NewType signature 2021-01-01 04:22:52 +01:00
Dave Halter
30e702de11 Generics don't have signatures 2021-01-01 04:09:49 +01:00
Dave Halter
778442a972 Type aliases should not have a signature 2021-01-01 03:59:28 +01:00
Dave Halter
4f34712858 Fix signatures for TypeVar and cast, fixes #1709 2021-01-01 03:59:12 +01:00
Dave Halter
d821451a64 Upgrade typeshed 2021-01-01 03:18:49 +01:00
Dave Halter
92d96ac336 actually use auto_import_modules correctly 2021-01-01 02:59:42 +01:00
Dave Halter
c64e33173a Fix an issue about properties, fixes #1705 2020-12-28 00:54:40 +01:00
Dave Halter
5d2aed34f4 Fix signatures if a decorator has no signatures, fixes #1705 2020-12-28 00:47:10 +01:00
Dave Halter
04c1c0f871 Fix an issue with api_name of class attributes, fixes #1688 2020-12-28 00:29:30 +01:00
Dave Halter
0f128c6deb Fix nested comprehension contexts, fixes #1691 2020-12-27 21:09:00 +01:00
Dave Halter
8373ef079f Remove an unnecessary comment 2020-12-26 22:43:47 +01:00
Dave Halter
227cbde169 Merge branch 'master' of github.com:davidhalter/jedi 2020-12-26 18:02:05 +01:00
Dave Halter
1f06e6f0c9 name the ci workflow in the hope that badges will then be displayed 2020-12-26 17:57:38 +01:00
Dave Halter
2d3b8ac8df Merge pull request #1715 from davidhalter/github-actions
Use GitHub Actions
2020-12-26 12:56:20 +01:00
Dave Halter
fa6072b4fa Change Python test order in CI 2020-12-26 12:39:37 +01:00
Dave Halter
aae2f7c49a Change badges from Travis/Appveyor to GitHub Actions 2020-12-26 12:37:04 +01:00
Dave Halter
52443daf12 Fix another Windows test on 3.8 2020-12-26 12:19:59 +01:00
Dave Halter
86d57edda4 Some Windows compatibility fixes 2020-12-26 11:52:47 +01:00
Dave Halter
7298350e76 Standardize line separator 2020-12-26 04:27:06 +01:00
Dave Halter
3184264b3b Try to fix windows 2020-12-26 04:16:32 +01:00
Dave Halter
d4a1657b2e Better error reporting 2020-12-26 04:03:19 +01:00
Dave Halter
bea401912f Hopefully fix Actions configuration 2020-12-26 03:42:33 +01:00
Dave Halter
3e4070bbb3 Enable Windows 2019 2020-12-26 03:35:28 +01:00
Dave Halter
3d7ad50f57 Remove travis and appveyor configs in favor of github action 2020-12-26 03:33:22 +01:00
Dave Halter
85ec94cf65 Fix pytest issues, fixes #1699 2020-12-26 03:32:17 +01:00
Dave Halter
0cc5c974f6 Try to improve GH Actions 2020-12-26 01:43:29 +01:00
Dave Halter
6f76bb945a GH actions, checkout recursive submodules 2020-12-26 01:14:17 +01:00
Dave Halter
239a3730a6 Try to add Github Actions 2020-12-26 01:03:03 +01:00
Dave Halter
8740ff2691 Ignore the mypy cache for searching folders 2020-12-25 17:35:28 +01:00
Dave Halter
4b5b2e791b Prepare release of 0.18.0 2020-12-25 11:06:15 +01:00
Dave Halter
b89f9445c2 Merge pull request #1684 from davidhalter/relative-import
Relative imports should work even if they are not within the project
2020-12-22 23:18:46 +01:00
Dave Halter
ce6ddb91de Merge pull request #1711 from davidhalter/deprecations
Remove Deprecations
2020-12-21 22:49:09 +01:00
Dave Halter
fe60b5ca13 Fix flake8 issues in sith.py 2020-12-12 12:45:27 +01:00
Dave Halter
fa2d03a4fb Mention removal of deprecations in CHANGELOG 2020-12-12 12:32:29 +01:00
Dave Halter
1b16ca0e2e Add sith.py to the files to be ignored by pytest 2020-12-12 12:25:22 +01:00
Dave Halter
f9cec89038 Merge branch 'master' into deprecations 2020-12-12 12:17:25 +01:00
Dave Halter
bc4f6ed9dd Merge branch 'master' into relative-import 2020-12-12 12:15:13 +01:00
Leo Ryu
fd435a7bbb Check if string_names is None before returning string_names (#1708)
* Check if string is None before using string_names

* Add test asserting None string_names returns an empty list

* Remove whitespace to pass flake8

* Add name to authors.txt

Co-authored-by: Leo Ryu <leo@episci.com>
2020-12-12 12:13:31 +01:00
Dave Halter
ce0ed4b8ae Improve a comment 2020-12-10 16:57:09 +01:00
Dave Halter
42a759a7ae Merge pull request #1706 from ColdGrub1384/master
Catch 'PermissionError' for unreadable directories
2020-12-07 22:34:03 +01:00
Adrian Labbé
6dcae857a7 Remove 'test_get_parent_dir_with_file' 2020-12-07 14:50:04 -03:00
Dave Halter
34792c0077 Merge pull request #1707 from Carreau/fix-1702
Add tests for #1702, for a rare numpydoc syntax.
2020-12-07 15:07:53 +01:00
Dave Halter
6df463b1e3 Merge pull request #1704 from infokiller/expose-comp-prefix-len
add Completion.get_completion_prefix_length
2020-12-07 14:44:01 +01:00
Matthias Bussonnier
4740178bdf Not all nodes have children, protect agaisnt it. 2020-12-06 18:11:49 -08:00
Matthias Bussonnier
06d6776422 Add tests for #1702, for a rare numpydoc syntax.
It looks like numpydoc, and things like masked array docstrings use a
syntax that make jedi crash:

    fill_value : {var}, optional
            Value used internally for the masked values.
            If ``fill_value`` is not None, it supersedes ``endwith``.

Here we add a test that we do not crash jedi.
2020-12-06 18:08:51 -08:00
Yoni Weill
1095820006 add tests for get_completion_prefix_length 2020-12-06 21:09:03 +02:00
Adrian Labbé
47e60107b2 Add tests for 'test_get_parent_dir_with_file' and 'test_is_potential_project' 2020-12-06 15:26:20 -03:00
Adrian Labbé
12a2d10595 Catch 'OSError' instead of just 'PermissionError' 2020-12-06 15:25:46 -03:00
Yoni Weill
ccdf7eddf4 add Completion.get_completion_prefix_length
fixes #1687
2020-12-06 17:21:33 +02:00
Adrian Labbé
83d4ec9e84 Catch 'PermissionError' for unreadable directories 2020-12-05 21:00:28 -03:00
Dave Halter
69750b9bf0 Add Python 3.9 to the testsed environments 2020-10-24 13:40:19 +02:00
Dave Halter
a03a093e2c change the create_stub_module stuff a bit 2020-10-24 10:41:59 +02:00
Dave Halter
6094e7b39a Fix get_line_code for stubs 2020-10-24 10:12:32 +02:00
Dave Halter
98d0a55a02 Add a few more tests for annotations on self 2020-10-23 23:32:28 +02:00
Dave Halter
6eabde1519 Fix annotations on self attributes, fixes #1681 2020-10-23 23:26:07 +02:00
Dave Halter
a4f45993f8 Simplify some things, so something like #1678 does not happen again 2020-10-23 21:38:39 +02:00
Dave Halter
49e35497ae Stop subclassing CompiledName, potentially fixes #1667 2020-10-23 21:28:08 +02:00
Dave Halter
bf310c780c Fix a recursion on imports, fixes #1677 2020-10-23 21:04:36 +02:00
Dave Halter
e671a0cb6d Fix an error with enums, fixes #1675 2020-10-23 20:25:00 +02:00
Dave Halter
a5a36a049c Fix an infer issue on literals after brackets, fixes #1657 2020-10-23 19:09:23 +02:00
Dave Halter
43ff2833f3 Make a test more reliable 2020-10-23 18:04:47 +02:00
Dave Halter
5f2f4af851 Update test/test_inference/test_imports.py
Co-authored-by: Peter Law <PeterJCLaw@gmail.com>
2020-10-21 22:32:33 +02:00
Dave Halter
bf56103428 Update jedi/inference/imports.py
Co-authored-by: Peter Law <PeterJCLaw@gmail.com>
2020-10-21 22:32:24 +02:00
Dave Halter
78e87d0ab8 Relative imports should work even if they are not within the project 2020-10-20 01:00:22 +02:00
anki-code
04572422d4 Xonsh shell has jedi extension (#1674)
* xonsh shell has jedy extension

* jedin in xonsh shell usage

* and many more! :)
2020-09-29 00:12:48 +02:00
Dave Halter
cb55b45d47 Catch an OSError on Windows 2020-09-19 22:13:45 +02:00
Dave Halter
e3fedb52f1 Remove an unused import 2020-09-19 21:40:01 +02:00
Dave Halter
c1f4e7d874 One interpreter test is different for 3.9+ 2020-09-19 21:27:55 +02:00
Dave Halter
4082728c32 Revert "Add the Python 3.9 environment"
This reverts commit 39fe9a1979.
2020-09-19 21:22:38 +02:00
Dave Halter
66e2a0fce4 implict_reexport needs to be True for parso 2020-09-19 21:15:03 +02:00
Dave Halter
39fe9a1979 Add the Python 3.9 environment 2020-09-19 20:58:03 +02:00
Dave Halter
f18493b627 Fix an interpreter test 2020-09-19 20:57:32 +02:00
Dave Halter
fa2abb5ff6 Add mypy cache to gitignore 2020-09-19 20:36:54 +02:00
Dave Halter
5b81abd537 Mention different language servers in README 2020-09-19 20:36:19 +02:00
Dave Halter
01b2e8e6b8 Merge pull request #1669 from mvanderkamp/patch-1
make contextualized_node an optional kwarg in ReversedObject
2020-09-19 20:31:50 +02:00
Mvdk
ff439039da make contextualized_node an optional kwarg
In all other py__iter__ definitions that I found, this argument is optional. It also often seems to not be passed around. I'm not sure why it was deemed mandatory here despite not being used.
2020-09-14 10:27:19 -06:00
Dave Halter
216f976fd5 Add a .readthedocs.yml to make sure that it's properly pip installed before the documentation is built 2020-08-06 00:12:50 +02:00
Dave Halter
e617c9d344 Formatting 2020-08-05 23:55:46 +02:00
Dave Halter
58ef6cd36b if_stmt test clauses should be resolved at the start of the if_stmt 2020-08-05 23:55:46 +02:00
Dave Halter
abf63d73d3 Basic implementation support for namedexpr, fixes #1647 2020-08-05 23:55:46 +02:00
Dave Halter
76c0c373da Merge pull request #1642 from PeterJCLaw/mypy
Add an initial mypy config
2020-08-05 01:09:49 +02:00
Dave Halter
209e2713fd Remove the requirements file and require latest parso 2020-08-05 00:55:57 +02:00
Dave Halter
f12ed2088a Use pathlib for file ios, because the new parso is out 2020-08-05 00:52:50 +02:00
Dave Halter
94bf83c826 Revert Django changes in a9e2cd5a74
This was probably an accident in #1646
2020-08-05 00:18:24 +02:00
Peter Law
cce3ecb1e4 Use the default handling of optionals
This is strict handling, but allows implicit declarations.
2020-08-04 21:49:42 +01:00
Dave Halter
10aa21f970 Merge branch 'master' of github.com:davidhalter/jedi 2020-08-04 18:29:26 +02:00
Dave Halter
425287055b Merge pull request #1646 from Carreau/warnings
Turn print into warning to simplify silencing them.
2020-08-04 14:47:57 +02:00
Matthias Bussonnier
a9e2cd5a74 Reformat and move imports to top level. 2020-08-03 08:24:24 -07:00
Dave Halter
2f7d0ec42c Project attributes are now read accessible 2020-08-01 18:26:26 +02:00
Matthias Bussonnier
20be4f02c8 Turn print into warning to simplify silencing them. 2020-07-27 11:28:05 -07:00
Peter Law
6364dd1511 Add explicit Optional annotation
This isn't a mypy issue -- there's no way it could otherwise know
that this `None` value is in fact an optional callable.
2020-07-26 14:43:41 +01:00
Peter Law
19b8eaea59 Link mypy issue 2020-07-26 13:26:14 +01:00
Peter Law
b892c07841 Merge branch 'master' into mypy 2020-07-26 12:25:19 +01:00
Peter Law
cefc363f64 Configure mypy and flake8 for our re-export files
This removes the need to use __all__ in these files, while also
allowing us to have strictness elsewhere in the codebase.
2020-07-26 12:20:08 +01:00
Peter Law
45c90efb5c Remove a couple of unused imports 2020-07-26 12:17:54 +01:00
Peter Law
0571e12617 These attributes aren't optional
They just don't yet have a value.
2020-07-26 12:11:34 +01:00
Peter Law
86e0e16625 Drop redundant rtype comment
This is better expressed as an annotation.
2020-07-26 12:10:59 +01:00
Peter Law
b3edda30c4 Explain why we 'type: ignore' these properties 2020-07-26 12:09:04 +01:00
Dave Halter
9d1587a41d Don't need to inherit from object anymore 2020-07-26 00:11:57 +02:00
Dave Halter
e593396417 Merge pull request #1641 from PeterJCLaw/pydoc-data-python3.6-embedable
Python 3.6 embeddable doesn't have pydoc_data
2020-07-25 01:05:38 +02:00
Peter Law
a9cb9fbb1f Give a bit more detail here 2020-07-24 21:06:30 +01:00
Peter Law
3f74981d5e Also typecheck sith 2020-07-24 21:06:30 +01:00
Peter Law
38f853cf86 Add ignores for stdlib imports only recently added 2020-07-24 21:06:30 +01:00
Peter Law
4b7e837f0f Configure the package root as implicit exports 2020-07-24 20:25:55 +01:00
Peter Law
a2d9fbcd42 Ignore this runtime-only import
I've queried this in https://github.com/python/typeshed/issues/4360,
though I suspect the answer is going to be to have an ignore comment
like this.
2020-07-24 20:25:55 +01:00
Peter Law
6315709fea Inherit from base class to placate mypy 2020-07-24 20:25:55 +01:00
Peter Law
48e5aa777b Annotate potentially missing import 2020-07-24 20:25:55 +01:00
Peter Law
69be26b16e Change subclass to function wrapper
This avoids mypy complaining that we need to provide a generic
argument to Popen, which we cannot acctually do as the implementation
of Popen does not inherit from typing.Generic.
2020-07-24 20:25:55 +01:00
Peter Law
5e509814f7 Ignore mypy not coping with decorated properties 2020-07-24 20:25:55 +01:00
Peter Law
07fbcd2262 Make this explicitly expect a Path 2020-07-24 20:25:55 +01:00
Peter Law
1c87ae378d This is a Path now 2020-07-24 20:25:55 +01:00
Peter Law
b1f95b4bf9 Annotate these attributes 2020-07-24 16:10:34 +01:00
Peter Law
7d9205d4ae This is actually optional 2020-07-24 16:10:34 +01:00
Peter Law
9b3cd15c5f Fix type clash 2020-07-24 16:10:34 +01:00
Peter Law
1418aada91 Annotate top level items mypy needs annotating 2020-07-24 16:10:34 +01:00
Peter Law
f98a9f7999 Annotate the completions cache 2020-07-24 16:10:34 +01:00
Peter Law
35c2d660cb Fix most import related mypy errors 2020-07-24 16:10:34 +01:00
Peter Law
c09e21ae4b Configure mypy
No fixes yet, this just gets the config in place.

Note: I'm assuming that we'll pick up a change to parso such that
it exposes its type stubs here. Otherwise we'll want to tweak the
imports config to ignore those errors.
2020-07-24 16:10:34 +01:00
Peter Law
480c352d33 Python 3.6 embeddable doesn't have pydoc_data
This reinstates the import check for pydoc_data for now.

Specifically I looked in the following:
- python-3.6.8-embed-amd64.zip: missing pydoc_data
- python-3.7.8-embed-amd64.zip: present
- python-3.8.5-embed-amd64.zip: present
2020-07-24 16:07:48 +01:00
Dave Halter
8f167be980 Merge branch 'master' of github.com:davidhalter/jedi 2020-07-23 01:33:06 +02:00
Dave Halter
e86afc1705 _cropped_file_size should be an int, fixes #1639 2020-07-23 01:32:37 +02:00
Dave Halter
7423c65eb5 Merge pull request #1638 from PeterJCLaw/update-flake8
Update flake8
2020-07-22 09:28:08 +02:00
Peter Law
b651c6541a Configure travis' flake8 call more explicitly
I'm basing this on '{posargs:jedi}' looking like it was a tox thing,
which we're no longer using.
2020-07-21 23:15:20 +01:00
Peter Law
403564315c Reflow test to ensure trailing space is preserved
Many editors strip trailing space, so avoid using a multiline
string where the space is actually needed.
2020-07-21 22:44:43 +01:00
Peter Law
5e6138d16f Update to flake8 3.8.x
In particular this improves support for detecting usage of various
type annotation usages and adds support for correctly parsing
type: ignore comments which contain a reason tag.
2020-07-21 21:34:58 +01:00
Peter Law
6ef18bea50 Make this noqa more specific 2020-07-21 21:34:37 +01:00
Peter Law
9505dabfef Reflow for linting 2020-07-21 21:32:22 +01:00
Peter Law
4783c065da Configure editors for uniform whitespace handling 2020-07-21 21:26:46 +01:00
Dave Halter
bb303a75c0 Fix a test 2020-07-20 23:58:46 +02:00
Dave Halter
1e633ab8ed Remove the requirements file, it should not be necessary 2020-07-20 02:19:55 +02:00
Dave Halter
89f525407a Remove the deprecation tests 2020-07-20 02:06:17 +02:00
Dave Halter
d7d42c8e39 Rewrite the deprecation handling 2020-07-20 02:04:31 +02:00
Dave Halter
abb2250bf5 Remove all deprecations 2020-07-20 02:02:41 +02:00
Dave Halter
ae2becb531 Merge branch 'pytest'
This completely removes tox from Jedi.
2020-07-20 01:46:43 +02:00
Dave Halter
14069e81fd Remove speed tests, they were only flaky and didn't really provide a value anymore 2020-07-20 01:43:29 +02:00
Dave Halter
401e8d3100 Fix issues with property searches 2020-07-20 01:40:25 +02:00
Dave Halter
e7c2c85b9f Try to fix issues with the qa and coverage steps 2020-07-20 01:29:38 +02:00
Dave Halter
784e965d3a @property now returns Name.type == 'property', fixes muffinmad/anakin-language-server#15 2020-07-20 01:20:24 +02:00
Dave Halter
10c4dbf785 Try to get rid of tox and test directly with pytest 2020-07-19 14:58:17 +02:00
Dave Halter
7281302281 The defaults for find_system_environments and get_system_environment were wrong
This happened, because of the migration to Python 3 only.
2020-07-19 14:35:40 +02:00
Dave Halter
27603f9780 Reenable a test for nested imports 2020-07-19 13:57:52 +02:00
Dave Halter
d9a90d5d5e Remove a test that no longer made sense 2020-07-19 13:55:18 +02:00
Dave Halter
9957565b37 Try to use yield from instead of yield, if possible 2020-07-19 13:34:58 +02:00
Dave Halter
5bc174bf8d Start writing CHANGELOG for the next release 2020-07-18 17:00:45 +02:00
Dave Halter
89f070ea98 Mention the mailing list instead of the github issue for updates 2020-07-17 22:35:14 +02:00
Dave Halter
04d24acb5a Merge branch 'python3' 2020-07-17 21:58:26 +02:00
Dave Halter
3b7106ae71 Fix a typo 2020-07-17 21:56:13 +02:00
Dave Halter
74116fe2ea Prepare for 0.17.2 2020-07-17 21:39:36 +02:00
Dave Halter
1233caebdc Fix a Python 3.9 issue on travis 2020-07-17 16:13:23 +02:00
Dave Halter
d78567f853 Fix a Python 3.9 issue on travis 2020-07-17 16:12:55 +02:00
Dave Halter
1ece7698c2 Merge branch 'master' into python3 2020-07-17 16:07:54 +02:00
Dave Halter
7851dff915 Properly negate with Interpreter, fixes #1636 2020-07-17 15:57:32 +02:00
Dave Halter
e4987b3e7a Fix issues with generators, fixes #1624 2020-07-17 15:57:32 +02:00
Dave Halter
d1851c369c Introduce py__next__ to have more clear way to use __next__ 2020-07-17 15:57:32 +02:00
Dave Halter
d63fbd8624 Merge pull request #1633 from mrclary/mrclary-fix-wingkinl-patch-python-environ
Fix for #1630
2020-07-17 11:26:02 +02:00
Ryan Clary
b0f664ec94 * reflect default Popen behavior by inheriting os.environ
* without passing env_vars to create_environment, GeneralizedPopen behavior is same as before fix to issue #1540 (803c3cb271)
* env_vars allows explicit environment variables, per PR #1619 (f9183bbf64)
2020-07-16 19:04:33 -07:00
Dave Halter
9957374508 Fix dict completions for inherited dicts, fixes #1631 2020-07-14 17:50:12 +02:00
Dave Halter
7f3a7db7e6 Refactor Interpeter completions a bit 2020-07-12 22:26:57 +02:00
Dave Halter
3ffe8475b8 Make sure the interpreter completions work better in Jupyter Notebook, fixes #1628 2020-07-12 22:20:06 +02:00
Dave Halter
396d7df314 Fix an issue with interpreter completion, see also #1628 2020-07-12 22:02:00 +02:00
Dave Halter
0c618a4456 Making sure to note that Python 2 will not be supported after 0.17.2 2020-07-12 21:22:36 +02:00
Dave Halter
c4c36d8e2e Mention in Changelog that 3.9 is now supported 2020-07-12 19:44:48 +02:00
Dave Halter
829dda3ee9 Fix another windows issue 2020-07-12 11:18:35 +02:00
Dave Halter
a16f52b9fb Fix some Windows related issues with absolute paths 2020-07-12 11:13:37 +02:00
Dave Halter
a49c062b35 Properly support Python3.9 2020-07-12 01:58:13 +02:00
Dave Halter
da15e916de Fix a doctest 2020-07-12 01:37:41 +02:00
Dave Halter
480a464179 Implement all remaining Path issues and use it instead of strings 2020-07-12 01:14:00 +02:00
Dave Halter
db0e90763b Start using pathlib.Path instead of all the os.path functions 2020-07-10 17:30:36 +02:00
Dave Halter
92af043906 Fix some subprocess issues 2020-07-02 18:39:24 +02:00
Dave Halter
806ad06d6a Use raise from instead of weird magic 2020-07-02 16:14:53 +02:00
Dave Halter
dac1fb0a06 Get rid of a few Python 2 things 2020-07-02 16:00:26 +02:00
Dave Halter
ec08506704 Remove getstate and setstate, because they are not needed anymore 2020-07-02 15:55:31 +02:00
Dave Halter
7bcb420a0a Delete a weird comment 2020-07-02 12:33:19 +02:00
Dave Halter
546b970240 Rewrite a weird super call 2020-07-02 12:31:16 +02:00
Dave Halter
24a1bbb3ca Even more super deletions 2020-07-02 12:29:10 +02:00
Dave Halter
a0de93a638 Remove super arguments 2020-07-02 10:59:59 +02:00
Dave Halter
216ce8726c Move GeneralizedPopen 2020-07-02 10:54:32 +02:00
Dave Halter
0c1ba1b305 Move the importing of modules out of compatibility 2020-07-02 10:51:49 +02:00
Dave Halter
5ab351dc8f Remove unicode literals from code base 2020-07-02 10:43:14 +02:00
Dave Halter
f1366b8a74 Remove the u() unicode function 2020-07-02 10:35:39 +02:00
Dave Halter
7f67324210 Remove a lot more Python 2 mentions and todos 2020-07-02 10:30:58 +02:00
Dave Halter
a51f667be8 Cleanse the API from Python 2 stuff 2020-07-02 10:24:44 +02:00
Dave Halter
f7b445353f Remove Python 2 compatibility functions 2020-07-02 10:14:12 +02:00
Dave Halter
46154a3ee7 Remove an unnecessary print 2020-07-02 03:35:24 +02:00
Dave Halter
0790f376ca Some Python 2 removals 2020-07-02 03:34:44 +02:00
Dave Halter
332631434c Remove some unnecessary utf-8 references 2020-07-02 03:30:41 +02:00
Dave Halter
8ee0c8593e Remove unicode usages 2020-07-02 03:26:22 +02:00
Dave Halter
5a912de937 Remove a few unicode references in tests 2020-07-02 03:18:48 +02:00
Dave Halter
ef96c4c66b Remove __future__ usages 2020-07-02 03:15:07 +02:00
Dave Halter
155a1dd3fc A mistaken deletion in appveyor 2020-07-02 03:12:03 +02:00
Dave Halter
65601b6532 Remove compatibility code from getattr_static 2020-07-02 03:09:47 +02:00
Dave Halter
6e4dfda727 Fix a minor issue 2020-07-02 03:08:07 +02:00
Dave Halter
1fbe0d8d2e Remove python_version_match from publish_method 2020-07-02 03:04:14 +02:00
Dave Halter
6e184bca97 Remove most version_info.major usages 2020-07-02 03:00:01 +02:00
Dave Halter
188fdcd34f Remove the skip_python2 fixture 2020-07-02 02:52:24 +02:00
Dave Halter
f4e537fd72 Remove a lot of sys.version_info references 2020-07-02 02:49:35 +02:00
Dave Halter
cfd8eb23b8 Remove all_suffixes from _compatibility 2020-07-02 02:32:02 +02:00
Dave Halter
57c7d61989 importlib is needed 2020-07-02 02:30:49 +02:00
Dave Halter
db28eee760 Remove py__version__ 2020-07-02 02:30:16 +02:00
Dave Halter
0cd6a8f5cc Remove is_py3 and is_py35 2020-07-02 02:23:33 +02:00
Dave Halter
17343bb57c Remove some more Python 3.5 references 2020-07-02 02:18:16 +02:00
Dave Halter
182e1e864c Remove _no_python2_support 2020-07-02 02:05:16 +02:00
Dave Halter
782c561e86 Fix the compatibility docstring 2020-07-02 02:03:34 +02:00
Dave Halter
9838040ca3 Fix a TODO 2020-07-02 01:56:23 +02:00
Dave Halter
eea35ffc31 Remove supported Pythons from environments 2020-07-02 01:52:44 +02:00
Dave Halter
b639e7fd11 Fixed a minor error with removing of force_unicode 2020-07-02 01:51:06 +02:00
Dave Halter
2c1e591718 Remove python 3.5 from appveyor 2020-07-02 01:47:57 +02:00
Dave Halter
49e4b1a0f8 Remove force_unicode 2020-07-02 01:47:21 +02:00
Dave Halter
ebfc330e86 Remove the unused utf8_repr function 2020-07-02 01:32:17 +02:00
Dave Halter
e597dcc8fd Remove a Python 2 file 2020-07-02 01:30:34 +02:00
Dave Halter
07fc1ef837 Remove the pickle compatibility stuff 2020-07-02 01:29:54 +02:00
Dave Halter
a25e192ff9 Remove shutil.which compatibility 2020-07-02 01:19:12 +02:00
Dave Halter
e6a748b1a7 Fix some directory issues 2020-07-02 01:17:35 +02:00
Dave Halter
227cf00638 Remove the __builtin__ compatibility 2020-07-02 01:15:29 +02:00
Dave Halter
a9d32fbc99 Remove literal_eval compatibility 2020-07-02 01:10:46 +02:00
Dave Halter
b5e0c1e9c6 Remove compatibility for zip_longest 2020-07-02 01:08:57 +02:00
Dave Halter
2aec4678da Remove compatibility for IsADirectoryError PermissionError NotADirectoryError 2020-07-02 01:07:06 +02:00
Dave Halter
f9a35ae42a Remove FileNotFoundError compatibility 2020-07-02 01:05:13 +02:00
Dave Halter
0538a3e224 Remove Python 2 import hacks 2020-07-02 01:01:25 +02:00
Dave Halter
64516f1b45 Remove DummyFile 2020-07-02 00:59:36 +02:00
Dave Halter
1dc83115be Remove use_metaclass 2020-07-02 00:58:30 +02:00
Dave Halter
c651109b9a Remove _compatibility.reraise 2020-07-02 00:56:30 +02:00
Dave Halter
1df98c5bd6 Remove no_unicode_pprint 2020-07-02 00:54:17 +02:00
Dave Halter
aab9fd2fbe Remove queue compatibility 2020-07-02 00:52:26 +02:00
Dave Halter
4e2ca9e5fd Remove some pickle compatibility 2020-07-02 00:50:58 +02:00
Dave Halter
395f7fc59e Remove inspect.Parameter compatibility 2020-07-02 00:44:25 +02:00
Dave Halter
4c557d4050 Remove finalize from compatibility 2020-07-02 00:40:38 +02:00
Dave Halter
86eb48a89b Remove unwrap compatibility 2020-07-02 00:40:08 +02:00
Dave Halter
3262ad4350 Remove the scandir compatibility 2020-07-02 00:38:44 +02:00
Dave Halter
fb34df3987 Remove a way for using imp to load Jedi in a subprocess 2020-07-02 00:37:09 +02:00
Dave Halter
23db298e2f Removed various 3.3/3.4/3.5 references 2020-07-02 00:34:27 +02:00
Dave Halter
9d5acf3c53 Remove the has_typing fixture 2020-07-02 00:26:28 +02:00
Dave Halter
7e295d05a1 Remove some more Python 2/3.5 references 2020-07-02 00:25:00 +02:00
Dave Halter
50b85153ce Remove a lot of test references to Python 2/3.5 2020-07-02 00:17:21 +02:00
Dave Halter
0e5869b52f Remove 2.7/3.5 from docs 2020-07-02 00:04:22 +02:00
Dave Halter
d67dfba7f5 Remove Python 2.7/3.5 support 2020-07-02 00:00:46 +02:00
Dave Halter
a3a9ae1a26 Add download badge 2020-06-27 15:15:34 +02:00
Dave Halter
e41b966283 Some test skips 2020-06-27 03:10:24 +02:00
Dave Halter
4188526e2d Revert some of the Decoratee changes 2020-06-27 02:18:31 +02:00
Dave Halter
804b0f0d06 Some more signature adjustments 2020-06-27 02:18:31 +02:00
Dave Halter
7b15f1736c Change Decoratee slightly 2020-06-27 02:18:31 +02:00
Dave Halter
4846848a1e Fix an issue with decoratee names 2020-06-27 02:18:31 +02:00
Dave Halter
344fef1e2f Add Project.path, fixes #1622 2020-06-27 02:18:31 +02:00
Dave Halter
bc23458164 Fix the of a signature with a decorator 2020-06-27 02:18:31 +02:00
Dave Halter
9a54e583e7 Fix docstrings for method decorators, fixes #1621 2020-06-27 02:18:31 +02:00
Dave Halter
59ccd2da93 Make partial use the __doc__ of its function, fixes #1621 2020-06-27 02:18:31 +02:00
Dave Halter
737c1e5792 Merge pull request #1614 from PeterJCLaw/fix-decorator-factory-passthrough
Support passing values through decorators from factories
2020-06-26 13:29:58 +02:00
Peter Law
f72adf0cbc Switch to much simpler solution for preserving unbound type vars
Co-Authored-By: Dave Halter <davidhalter88@gmail.com>
2020-06-26 11:23:35 +01:00
Peter Law
5184d0cb9c Support passing values through decorators from factories
This builds on the approach taken in https://github.com/davidhalter/jedi/pull/1613
but applies it to type vars themselves so that their type var
nature is preserved when a function returns Callable[[T], T] and
the T has an upper bound.
2020-06-26 11:22:19 +01:00
Peter Law
2d0258db1a Add tests for class-style decorator factories 2020-06-26 11:19:51 +01:00
Dave Halter
f5e6a25542 Merge pull request #1623 from mallamanis/master
Add __matmul__ to supported operators.
2020-06-26 12:10:00 +02:00
Miltos
bc5a8ddf87 Add __matmul__ to supported operators. 2020-06-25 17:35:07 +01:00
Dave Halter
eabddb9698 Remove a print 2020-06-24 01:29:50 +02:00
Dave Halter
6fcdc44f3e Typeshed third party libraries should not be loaded if they don't actually exist in the environment, fixes #1620 2020-06-24 01:08:04 +02:00
Dave Halter
0d1a45ddc1 Add the env_vars change to CHANGELOG 2020-06-22 00:13:57 +02:00
Dave Halter
f9183bbf64 Merge pull request #1619 from mrclary/subprocess-env-vars
Provide option to pass explicit environment variables to Environment and CompiledSubprocess
2020-06-22 00:11:18 +02:00
Ryan Clary
7ec8454fc1 * Provide option to pass environment variables to Environment and CompiledSubprocess (subprocess.Popen)
* Extend this option to find_system_enviornments and get_system_environment without breaking API
2020-06-21 08:08:32 -07:00
Dave Halter
a3410f124a Make sure that Callables are properly represented
See also comment of https://github.com/davidhalter/jedi/pull/1614#issuecomment-647054740
2020-06-21 01:31:58 +02:00
Peter Law
3488f6b61d Add Python 3.8 to the tox env list (#1618) 2020-06-20 16:18:32 +02:00
Dave Halter
3dad9cac6b Use Python 3 in the deployment script 2020-06-20 01:19:01 +02:00
211 changed files with 4466 additions and 3820 deletions

14
.editorconfig Normal file
View File

@@ -0,0 +1,14 @@
root = true
[*]
charset = utf-8
end_of_line = lf
indent_style = space
insert_final_newline = true
trim_trailing_whitespace = true
[*.py]
indent_size = 4
[*.md]
indent_size = 2

10
.gitattributes vendored Normal file
View File

@@ -0,0 +1,10 @@
# all end-of-lines are normalized to LF when written to the repository
# https://git-scm.com/docs/gitattributes#_text
* text=auto
# force all text files on the working dir to have LF line endings
# https://git-scm.com/docs/gitattributes#_eol
* text eol=lf
# PNGs are not text and should not be normalized
*.png -text

75
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,75 @@
name: ci
on: [push, pull_request, workflow_dispatch]
jobs:
tests:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-20.04, windows-2019]
python-version: ["3.13", "3.12", "3.11", "3.10", "3.9", "3.8", "3.7", "3.6"]
environment: ['3.8', '3.13', '3.12', '3.11', '3.10', '3.9', '3.7', '3.6', 'interpreter']
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-python@v5
if: ${{ matrix.environment != 'interpreter' }}
with:
python-version: ${{ matrix.environment }}
allow-prereleases: true
- uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
- name: Install dependencies
run: 'pip install .[testing]'
- name: Run tests
run: python -m pytest
env:
JEDI_TEST_ENVIRONMENT: ${{ matrix.environment }}
code-quality:
runs-on: ubuntu-20.04
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Install dependencies
run: 'pip install .[qa]'
- name: Run tests
run: |
python -m flake8 jedi test setup.py
python -m mypy jedi sith.py setup.py
coverage:
runs-on: ubuntu-20.04
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
submodules: recursive
- name: Install dependencies
run: 'pip install .[testing] coverage'
- name: Run tests
run: |
python -m coverage run --source jedi -m pytest
python -m coverage report
- name: Upload coverage data
run: |
pip install --quiet codecov coveralls
python -m coverage xml
python -m coverage report -m
bash <(curl -s https://codecov.io/bash) -X gcov -X coveragepy -X search -X fix -X xcode -f coverage.xml

3
.gitignore vendored
View File

@@ -2,7 +2,6 @@
*.sw?
*.pyc
.ropeproject
.tox
.coveralls.yml
.coverage
.idea
@@ -13,4 +12,6 @@ jedi.egg-info/
record.json
/.cache/
/.pytest_cache
/.mypy_cache
/venv/
.nvimrc

18
.readthedocs.yml Normal file
View File

@@ -0,0 +1,18 @@
version: 2
python:
install:
- method: pip
path: .
extra_requirements:
- docs
submodules:
include: all
build:
os: ubuntu-22.04
tools:
python: "3.11"
apt_packages:
- graphviz

View File

@@ -1,73 +0,0 @@
dist: xenial
language: python
python:
- 3.9-dev
- 3.8
- 3.7
- 3.6
- 3.5
- 2.7
env:
- JEDI_TEST_ENVIRONMENT=38
- JEDI_TEST_ENVIRONMENT=37
- JEDI_TEST_ENVIRONMENT=36
- JEDI_TEST_ENVIRONMENT=35
- JEDI_TEST_ENVIRONMENT=27
- JEDI_TEST_ENVIRONMENT=interpreter
matrix:
include:
- python: 3.7
env:
- TOXENV=cov-py37
- JEDI_TEST_ENVIRONMENT=37
# For now ignore pypy, there are so many issues that we don't really need
# to run it.
#- python: pypy
# The 3.9 dev build does not seem to be available end 2019.
#- python: 3.9-dev
# env:
# - JEDI_TEST_ENVIRONMENT=39
install:
- pip install --quiet tox-travis
- sudo apt-get -y install python3-venv
script:
- |
# Setup/install Python for $JEDI_TEST_ENVIRONMENT.
set -ex
test_env_version=${JEDI_TEST_ENVIRONMENT:0:1}.${JEDI_TEST_ENVIRONMENT:1:1}
if [ "$TRAVIS_PYTHON_VERSION" != "$test_env_version" ] && [ "$JEDI_TEST_ENVIRONMENT" != "interpreter" ]; then
python_bin=python$test_env_version
python_path="$(which $python_bin || true)"
if [ -z "$python_path" ]; then
# Only required for JEDI_TEST_ENVIRONMENT=38, because it's not always
# available.
download_name=python-$test_env_version
wget https://s3.amazonaws.com/travis-python-archives/binaries/ubuntu/16.04/x86_64/$download_name.tar.bz2
sudo tar xjf $download_name.tar.bz2 --directory / opt/python
ln -s "/opt/python/${test_env_version}/bin/python" /home/travis/bin/$python_bin
elif [ "${python_path#/opt/pyenv/shims}" != "$python_path" ]; then
# Activate pyenv version (required with JEDI_TEST_ENVIRONMENT=36).
pyenv_bin="$(pyenv whence --path "$python_bin" | head -n1)"
ln -s "$pyenv_bin" /home/travis/bin/$python_bin
fi
$python_bin --version
python_ver=$($python_bin -c 'import sys; print("%d%d" % sys.version_info[0:2])')
if [ "$JEDI_TEST_ENVIRONMENT" != "$python_ver" ]; then
echo "Unexpected Python version for $JEDI_TEST_ENVIRONMENT: $python_ver"
set +ex
exit 2
fi
fi
set +ex
- tox
after_script:
- |
if [ $TOXENV == "cov-py37" ]; then
pip install --quiet codecov coveralls
coverage xml
coverage report -m
coveralls
bash <(curl -s https://codecov.io/bash) -X gcov -X coveragepy -X search -X fix -X xcode -f coverage.xml
fi

View File

@@ -1,4 +1,4 @@
Main Authors
Main Authors
------------
- David Halter (@davidhalter) <davidhalter88@gmail.com>
@@ -60,6 +60,12 @@ Code Contributors
- Max Mäusezahl (@mmaeusezahl) <maxmaeusezahl@googlemail.com>
- Vladislav Serebrennikov (@endilll)
- Andrii Kolomoiets (@muffinmad)
- Leo Ryu (@Leo-Ryu)
- Joseph Birkner (@josephbirkner)
- Márcio Mazza (@marciomazza)
- Martin Vielsmaier (@moser) <martin@vielsmaier.net>
- TingJia Wu (@WutingjiaX) <wutingjia@bytedance.com>
- Nguyễn Hồng Quân <ng.hong.quan@gmail.com>
And a few more "anonymous" contributors.

View File

@@ -6,6 +6,72 @@ Changelog
Unreleased
++++++++++
0.19.2 (2024-11-10)
+++++++++++++++++++
- Python 3.13 support
0.19.1 (2023-10-02)
+++++++++++++++++++
- Python 3.12 support (Thanks Peter!)
0.19.0 (2023-07-29)
+++++++++++++++++++
- Python 3.11 support
- Massive improvements in performance for ``Interpreter`` (e.g. IPython) users.
This especially affects ``pandas`` users with large datasets.
- Add ``jedi.settings.allow_unsafe_interpreter_executions`` to make it easier
for IPython users to avoid unsafe executions.
0.18.2 (2022-11-21)
+++++++++++++++++++
- Added dataclass-equivalent for attrs.define
- Find fixtures from Pytest entrypoints; Examples of pytest plugins installed
like this are pytest-django, pytest-sugar and Faker.
- Fixed Project.search, when a venv was involved, which is why for example
`:Pyimport django.db` did not work in some cases in jedi-vim.
- And many smaller bugfixes
0.18.1 (2021-11-17)
+++++++++++++++++++
- Implict namespaces are now a separate types in ``Name().type``
- Python 3.10 support
- Mostly bugfixes
0.18.0 (2020-12-25)
+++++++++++++++++++
- Dropped Python 2 and Python 3.5
- Using ``pathlib.Path()`` as an output instead of ``str`` in most places:
- ``Project.path``
- ``Script.path``
- ``Definition.module_path``
- ``Refactoring.get_renames``
- ``Refactoring.get_changed_files``
- Functions with ``@property`` now return ``property`` instead of ``function``
in ``Name().type``
- Started using annotations
- Better support for the walrus operator
- Project attributes are now read accessible
- Removed all deprecations
This is likely going to be the last minor release before 1.0.
0.17.2 (2020-07-17)
+++++++++++++++++++
- Added an option to pass environment variables to ``Environment``
- ``Project(...).path`` exists now
- Support for Python 3.9
- A few bugfixes
This will be the last release that supports Python 2 and Python 3.5.
``0.18.0`` will be Python 3.6+.
0.17.1 (2020-06-20)
+++++++++++++++++++

View File

@@ -6,8 +6,6 @@ include .coveragerc
include sith.py
include conftest.py
include pytest.ini
include tox.ini
include requirements.txt
recursive-include jedi/third_party *.pyi
include jedi/third_party/typeshed/LICENSE
include jedi/third_party/django-stubs/LICENSE.txt

View File

@@ -10,17 +10,13 @@ Jedi - an awesome autocompletion, static analysis and refactoring library for Py
:target: https://github.com/davidhalter/jedi/issues
:alt: The resolution time is the median time an issue or pull request stays open.
.. image:: https://travis-ci.org/davidhalter/jedi.svg?branch=master
:target: https://travis-ci.org/davidhalter/jedi
:alt: Linux Tests
.. image:: https://github.com/davidhalter/jedi/workflows/ci/badge.svg?branch=master
:target: https://github.com/davidhalter/jedi/actions
:alt: Tests
.. image:: https://ci.appveyor.com/api/projects/status/mgva3bbawyma1new/branch/master?svg=true
:target: https://ci.appveyor.com/project/davidhalter/jedi/branch/master
:alt: Windows Tests
.. image:: https://coveralls.io/repos/davidhalter/jedi/badge.svg?branch=master
:target: https://coveralls.io/r/davidhalter/jedi
:alt: Coverage status
.. image:: https://pepy.tech/badge/jedi
:target: https://pepy.tech/project/jedi
:alt: PyPI Downloads
Jedi is a static analysis tool for Python that is typically used in
@@ -46,11 +42,19 @@ Jedi can currently be used with the following editors/projects:
- `GNOME Builder`_ (with support for GObject Introspection)
- Gedit (gedi_)
- wdb_ - Web Debugger
- `Eric IDE`_ (Available as a plugin)
- `Eric IDE`_
- `IPython 6.0.0+ <https://ipython.readthedocs.io/en/stable/whatsnew/version6.html>`_
- `xonsh shell <https://xon.sh/contents.html>`_ has `jedi extension <https://xon.sh/xontribs.html#jedi>`_
and many more!
There are a few language servers that use Jedi:
- `jedi-language-server <https://github.com/pappasam/jedi-language-server>`_
- `python-language-server <https://github.com/palantir/python-language-server>`_ (currently unmaintained)
- `python-lsp-server <https://github.com/python-lsp/python-lsp-server>`_ (fork from python-language-server)
- `anakin-language-server <https://github.com/muffinmad/anakin-language-server>`_
Here are some pictures taken from jedi-vim_:
.. image:: https://github.com/davidhalter/jedi/raw/master/docs/_screenshots/screenshot_complete.png
@@ -72,8 +76,9 @@ Docs are available at `https://jedi.readthedocs.org/en/latest/
and/or fixes are awesome and most welcome. Jedi uses `semantic versioning
<https://semver.org/>`_.
If you want to stay up-to-date (News / RFCs), please subscribe to this `github
thread <https://github.com/davidhalter/jedi/issues/1063>`_.:
If you want to stay **up-to-date** with releases, please **subscribe** to this
mailing list: https://groups.google.com/g/jedi-announce. To subscribe you can
simply send an empty email to ``jedi-announce+subscribe@googlegroups.com``.
Issues & Questions
==================
@@ -94,7 +99,7 @@ Features and Limitations
Jedi's features are listed here:
`Features <https://jedi.readthedocs.org/en/latest/docs/features.html>`_.
You can run Jedi on CPython 2.7 or 3.5+ but it should also
You can run Jedi on Python 3.6+ but it should also
understand code that is older than those versions. Additionally you should be
able to use `Virtualenvs <https://jedi.readthedocs.org/en/latest/docs/api.html#environments>`_
very well.

9
SECURITY.md Normal file
View File

@@ -0,0 +1,9 @@
# Security Policy
If security issues arise, we will try to fix those as soon as possible.
Due to Jedi's nature, Security Issues will probably be extremely rare, but we will of course treat them seriously.
## Reporting Security Problems
If you need to report a security vulnerability, please send an email to davidhalter88@gmail.com. Typically, I will respond in the next few business days.

View File

@@ -1,59 +0,0 @@
environment:
matrix:
- TOXENV: py37
PYTHON_PATH: C:\Python37
JEDI_TEST_ENVIRONMENT: 37
- TOXENV: py37
PYTHON_PATH: C:\Python37
JEDI_TEST_ENVIRONMENT: 36
- TOXENV: py37
PYTHON_PATH: C:\Python37
JEDI_TEST_ENVIRONMENT: 35
- TOXENV: py37
PYTHON_PATH: C:\Python37
JEDI_TEST_ENVIRONMENT: 27
- TOXENV: py36
PYTHON_PATH: C:\Python36
JEDI_TEST_ENVIRONMENT: 37
- TOXENV: py36
PYTHON_PATH: C:\Python36
JEDI_TEST_ENVIRONMENT: 36
- TOXENV: py36
PYTHON_PATH: C:\Python36
JEDI_TEST_ENVIRONMENT: 35
- TOXENV: py36
PYTHON_PATH: C:\Python36
JEDI_TEST_ENVIRONMENT: 27
- TOXENV: py35
PYTHON_PATH: C:\Python35
JEDI_TEST_ENVIRONMENT: 37
- TOXENV: py35
PYTHON_PATH: C:\Python35
JEDI_TEST_ENVIRONMENT: 36
- TOXENV: py35
PYTHON_PATH: C:\Python35
JEDI_TEST_ENVIRONMENT: 35
- TOXENV: py35
PYTHON_PATH: C:\Python35
JEDI_TEST_ENVIRONMENT: 27
- TOXENV: py27
PYTHON_PATH: C:\Python27
JEDI_TEST_ENVIRONMENT: 37
- TOXENV: py27
PYTHON_PATH: C:\Python27
JEDI_TEST_ENVIRONMENT: 36
- TOXENV: py27
PYTHON_PATH: C:\Python27
JEDI_TEST_ENVIRONMENT: 35
- TOXENV: py27
PYTHON_PATH: C:\Python27
JEDI_TEST_ENVIRONMENT: 27
install:
- git submodule update --init --recursive
- set PATH=%PYTHON_PATH%;%PYTHON_PATH%\Scripts;%PATH%
- pip install tox
build_script:
- tox

View File

@@ -8,7 +8,6 @@ import pytest
import jedi
from jedi.api.environment import get_system_environment, InterpreterEnvironment
from jedi._compatibility import py_version
from test.helpers import test_dir
collect_ignore = [
@@ -17,10 +16,8 @@ collect_ignore = [
'jedi/inference/compiled/subprocess/__main__.py',
'build/',
'test/examples',
'sith.py',
]
if sys.version_info < (3, 6):
# Python 2 not supported syntax
collect_ignore.append('test/test_inference/test_mixed.py')
# The following hooks (pytest_configure, pytest_unconfigure) are used
@@ -45,7 +42,7 @@ def pytest_addoption(parser):
help="Warnings are treated as errors.")
parser.addoption("--env", action='store',
help="Execute the tests in that environment (e.g. 35 for python3.5).")
help="Execute the tests in that environment (e.g. 39 for python3.9).")
parser.addoption("--interpreter-env", "-I", action='store_true',
help="Don't use subprocesses to guarantee having safe "
"code execution. Useful for debugging.")
@@ -97,12 +94,15 @@ def clean_jedi_cache(request):
def environment(request):
version = request.config.option.env
if version is None:
version = os.environ.get('JEDI_TEST_ENVIRONMENT', str(py_version))
v = str(sys.version_info[0]) + str(sys.version_info[1])
version = os.environ.get('JEDI_TEST_ENVIRONMENT', v)
if request.config.option.interpreter_env or version == 'interpreter':
return InterpreterEnvironment()
return get_system_environment(version[0] + '.' + version[1:])
if '.' not in version:
version = version[0] + '.' + version[1:]
return get_system_environment(version)
@pytest.fixture(scope='session')
@@ -133,20 +133,18 @@ def goto_or_help(request, Script):
@pytest.fixture(scope='session', params=['goto', 'help', 'infer'])
def goto_or_help_or_infer(request, Script):
def do(code, *args, **kwargs):
return getattr(Script(code), request.param)(*args, **kwargs)
do.type = request.param
return do
@pytest.fixture(scope='session', params=['goto', 'complete', 'help'])
def goto_or_complete(request, Script):
return lambda code, *args, **kwargs: getattr(Script(code), request.param)(*args, **kwargs)
@pytest.fixture(scope='session')
def has_typing(environment):
if environment.version_info >= (3, 5, 0):
# This if is just needed to avoid that tests ever skip way more than
# they should for all Python versions.
return True
script = jedi.Script('import typing', environment=environment)
return bool(script.infer())
@pytest.fixture(scope='session')
def has_django(environment):
script = jedi.Script('import django', environment=environment)
@@ -158,14 +156,6 @@ def jedi_path():
return os.path.dirname(__file__)
@pytest.fixture()
def skip_python2(environment):
if environment.version_info.major == 2:
# This if is just needed to avoid that tests ever skip way more than
# they should for all Python versions.
pytest.skip()
@pytest.fixture()
def skip_pre_python38(environment):
if environment.version_info < (3, 8):
@@ -180,19 +170,3 @@ def skip_pre_python37(environment):
# This if is just needed to avoid that tests ever skip way more than
# they should for all Python versions.
pytest.skip()
@pytest.fixture()
def skip_pre_python35(environment):
if environment.version_info < (3, 5):
# This if is just needed to avoid that tests ever skip way more than
# they should for all Python versions.
pytest.skip()
@pytest.fixture()
def skip_pre_python36(environment):
if environment.version_info < (3, 6):
# This if is just needed to avoid that tests ever skip way more than
# they should for all Python versions.
pytest.skip()

View File

@@ -24,10 +24,10 @@ git checkout $BRANCH
git submodule update --init
# Test first.
tox
pytest
# Create tag
tag=v$(python -c "import $PROJECT_NAME; print($PROJECT_NAME.__version__)")
tag=v$(python3 -c "import $PROJECT_NAME; print($PROJECT_NAME.__version__)")
master_ref=$(git show-ref -s heads/$BRANCH)
tag_ref=$(git show-ref -s $tag || true)
@@ -44,7 +44,7 @@ fi
# Package and upload to PyPI
#rm -rf dist/ - Not needed anymore, because the folder is never reused.
echo `pwd`
python setup.py sdist bdist_wheel
python3 setup.py sdist bdist_wheel
# Maybe do a pip install twine before.
twine upload dist/*

View File

@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
#
# Jedi documentation build configuration file, created by
# sphinx-quickstart on Wed Dec 26 00:11:34 2012.
#
@@ -43,8 +41,8 @@ source_encoding = 'utf-8'
master_doc = 'index'
# General information about the project.
project = u'Jedi'
copyright = u'jedi contributors'
project = 'Jedi'
copyright = 'jedi contributors'
import jedi
from jedi.utils import version_info
@@ -205,8 +203,8 @@ latex_elements = {
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Jedi.tex', u'Jedi Documentation',
u'Jedi contributors', 'manual'),
('index', 'Jedi.tex', 'Jedi Documentation',
'Jedi contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -235,8 +233,8 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'jedi', u'Jedi Documentation',
[u'Jedi contributors'], 1)
('index', 'jedi', 'Jedi Documentation',
['Jedi contributors'], 1)
]
# If true, show URL addresses after external links.
@@ -249,8 +247,8 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Jedi', u'Jedi Documentation',
u'Jedi contributors', 'Jedi', 'Awesome Python autocompletion library.',
('index', 'Jedi', 'Jedi Documentation',
'Jedi contributors', 'Jedi', 'Awesome Python autocompletion library.',
'Miscellaneous'),
]

View File

@@ -35,7 +35,7 @@ to write my own version of a completion engine.
The first idea was to execute non-dangerous code. But I soon realized, that
this would not work. So I started to build a static analysis tool.
The biggest problem that I had at the time was that I did not know a thing
about parsers.I did not did not even know the word static analysis. It turns
about parsers. I did not even know the word static analysis. It turns
out they are the foundation of a good static analysis tool. I of course did not
know that and tried to write my own poor version of a parser that I ended up
throwing away two years later.
@@ -53,7 +53,7 @@ quick and is pretty much feature complete.
--------
I will leave you with a small annectote that happend in 2012, if I remember
I will leave you with a small anecdote that happened in 2012, if I remember
correctly. After I explained Guido van Rossum, how some parts of my
auto-completion work, he said:

View File

@@ -107,7 +107,7 @@ Completions
>>> code = '''import json; json.l'''
>>> script = jedi.Script(code, path='example.py')
>>> script
<Script: 'example.py' <SameEnvironment: 3.5.2 in /usr>>
<Script: 'example.py' <SameEnvironment: 3.9.0 in /usr>>
>>> completions = script.complete(1, 19)
>>> completions
[<Completion: load>, <Completion: loads>]
@@ -169,6 +169,5 @@ Deprecations
The deprecation process is as follows:
1. A deprecation is announced in the next major/minor release.
2. We wait either at least a year and at least two minor releases until we
remove the deprecated functionality.
1. A deprecation is announced in any release.
2. The next major release removes the deprecated functionality.

View File

@@ -16,7 +16,7 @@ Jedi's main API calls and features are:
Basic Features
--------------
- Python 2.7 and 3.5+ support
- Python 3.6+ support
- Ignores syntax errors and wrong indentation
- Can deal with complex module / function / class structures
- Great ``virtualenv``/``venv`` support
@@ -57,7 +57,7 @@ Supported Python Features
Limitations
-----------
In general Jedi's limit are quite high, but for very big projects or very
In general Jedi's limit is quite high, but for very big projects or very
complex code, sometimes Jedi intentionally stops type inference, to avoid
hanging for a long time.
@@ -77,7 +77,7 @@ Performance Issues
Importing ``numpy`` can be quite slow sometimes, as well as loading the
builtins the first time. If you want to speed things up, you could preload
libriaries in |jedi|, with :func:`.preload_module`. However, once loaded, this
libraries in |jedi|, with :func:`.preload_module`. However, once loaded, this
should not be a problem anymore. The same is true for huge modules like
``PySide``, ``wx``, ``tensorflow``, ``pandas``, etc.

View File

@@ -50,14 +50,6 @@ Arch Linux
You can install |jedi| directly from official Arch Linux packages:
- `python-jedi <https://www.archlinux.org/packages/community/any/python-jedi/>`__
(Python 3)
- `python2-jedi <https://www.archlinux.org/packages/community/any/python2-jedi/>`__
(Python 2)
The specified Python version just refers to the *runtime environment* for
|jedi|. Use the Python 2 version if you're running vim (or whatever editor you
use) under Python 2. Otherwise, use the Python 3 version. But whatever version
you choose, both are able to complete both Python 2 and 3 *code*.
(There is also a packaged version of the vim plugin available:
`vim-jedi at Arch Linux <https://www.archlinux.org/packages/community/any/vim-jedi/>`__.)

View File

@@ -12,8 +12,8 @@ easy as::
python3.8 -m pytest
Tests are also run automatically on `Travis CI
<https://travis-ci.org/davidhalter/jedi/>`_.
Tests are also run automatically on `GitHub Actions
<https://github.com/davidhalter/jedi/actions>`_.
You want to add a test for |jedi|? Great! We love that. Normally you should
write your tests as :ref:`Blackbox Tests <blackbox>`. Most tests would

View File

@@ -3,11 +3,22 @@
Using Jedi
==========
|jedi| is can be used with a variety of plugins and software. It is also possible
to use |jedi| in the :ref:`Python shell or with IPython <repl-completion>`.
|jedi| is can be used with a variety of :ref:`plugins <editor-plugins>`,
:ref:`language servers <language-servers>` and other software.
It is also possible to use |jedi| in the :ref:`Python shell or with IPython
<repl-completion>`.
Below you can also find a list of :ref:`recipes for type hinting <recipes>`.
.. _language-servers:
Language Servers
--------------
- `jedi-language-server <https://github.com/pappasam/jedi-language-server>`_
- `python-language-server <https://github.com/palantir/python-language-server>`_ (currently unmaintained)
- `python-lsp-server <https://github.com/python-lsp/python-lsp-server>`_ (fork from python-language-server)
- `anakin-language-server <https://github.com/muffinmad/anakin-language-server>`_
.. _editor-plugins:
@@ -76,13 +87,23 @@ Gedit
Eric IDE
~~~~~~~~
- `Eric IDE`_ (Available as a plugin)
- `Eric IDE`_
Web Debugger
~~~~~~~~~~~~
- wdb_
xonsh shell
~~~~~~~~~~~
Jedi is a preinstalled extension in `xonsh shell <https://xon.sh/contents.html>`_.
Run the following command to enable:
::
xontrib load jedi
and many more!
.. _repl-completion:

View File

@@ -18,18 +18,18 @@ Jedi - an awesome autocompletion, static analysis and refactoring library for Py
:target: https://github.com/davidhalter/jedi/issues
:alt: The resolution time is the median time an issue or pull request stays open.
.. image:: https://travis-ci.org/davidhalter/jedi.svg?branch=master
:target: https://travis-ci.org/davidhalter/jedi
:alt: Linux Tests
.. image:: https://ci.appveyor.com/api/projects/status/mgva3bbawyma1new/branch/master?svg=true
:target: https://ci.appveyor.com/project/davidhalter/jedi/branch/master
:alt: Windows Tests
.. image:: https://github.com/davidhalter/jedi/workflows/ci/badge.svg?branch=master
:target: https://github.com/davidhalter/jedi/actions
:alt: Tests
.. image:: https://coveralls.io/repos/davidhalter/jedi/badge.svg?branch=master
:target: https://coveralls.io/r/davidhalter/jedi
:alt: Coverage status
.. image:: https://pepy.tech/badge/jedi
:target: https://pepy.tech/project/jedi
:alt: PyPI Downloads
`Github Repository <https://github.com/davidhalter/jedi>`_
.. automodule:: jedi
@@ -64,6 +64,9 @@ Docs
Resources
---------
If you want to stay **up-to-date** with releases, please **subscribe** to this
mailing list: https://groups.google.com/g/jedi-announce. To subscribe you can
simply send an empty email to ``jedi-announce+subscribe@googlegroups.com``.
- `Source Code on Github <https://github.com/davidhalter/jedi>`_
- `Travis Testing <https://travis-ci.org/davidhalter/jedi>`_
- `Python Package Index <https://pypi.python.org/pypi/jedi/>`_

View File

@@ -27,10 +27,9 @@ ad
load
"""
__version__ = '0.17.1'
__version__ = '0.19.2'
from jedi.api import Script, Interpreter, set_debug_function, \
preload_module, names
from jedi.api import Script, Interpreter, set_debug_function, preload_module
from jedi import settings
from jedi.api.environment import find_virtualenvs, find_system_environments, \
get_default_environment, InvalidPythonEnvironment, create_environment, \

View File

@@ -1,400 +1,28 @@
"""
To ensure compatibility from Python ``2.7`` - ``3.x``, a module has been
created. Clearly there is huge need to use conforming syntax.
This module is here to ensure compatibility of Windows/Linux/MacOS and
different Python versions.
"""
from __future__ import print_function
import atexit
import errno
import functools
import sys
import os
import re
import pkgutil
import warnings
import subprocess
import weakref
try:
import importlib
except ImportError:
pass
from zipimport import zipimporter
from jedi.file_io import KnownContentFileIO, ZipFileIO
is_py3 = sys.version_info[0] >= 3
is_py35 = is_py3 and sys.version_info[1] >= 5
py_version = int(str(sys.version_info[0]) + str(sys.version_info[1]))
if sys.version_info[:2] < (3, 5):
"""
A super-minimal shim around listdir that behave like
scandir for the information we need.
"""
class _DirEntry:
def __init__(self, name, basepath):
self.name = name
self.basepath = basepath
def is_dir(self):
path_for_name = os.path.join(self.basepath, self.name)
return os.path.isdir(path_for_name)
def scandir(dir):
return [_DirEntry(name, dir) for name in os.listdir(dir)]
else:
from os import scandir
class DummyFile(object):
def __init__(self, loader, string):
self.loader = loader
self.string = string
def read(self):
return self.loader.get_source(self.string)
def close(self):
del self.loader
def find_module_py34(string, path=None, full_name=None, is_global_search=True):
spec = None
loader = None
for finder in sys.meta_path:
if is_global_search and finder != importlib.machinery.PathFinder:
p = None
else:
p = path
try:
find_spec = finder.find_spec
except AttributeError:
# These are old-school clases that still have a different API, just
# ignore those.
continue
spec = find_spec(string, p)
if spec is not None:
loader = spec.loader
if loader is None and not spec.has_location:
# This is a namespace package.
full_name = string if not path else full_name
implicit_ns_info = ImplicitNSInfo(full_name, spec.submodule_search_locations._path)
return implicit_ns_info, True
break
return find_module_py33(string, path, loader)
def find_module_py33(string, path=None, loader=None, full_name=None, is_global_search=True):
loader = loader or importlib.machinery.PathFinder.find_module(string, path)
if loader is None and path is None: # Fallback to find builtins
try:
with warnings.catch_warnings(record=True):
# Mute "DeprecationWarning: Use importlib.util.find_spec()
# instead." While we should replace that in the future, it's
# probably good to wait until we deprecate Python 3.3, since
# it was added in Python 3.4 and find_loader hasn't been
# removed in 3.6.
loader = importlib.find_loader(string)
except ValueError as e:
# See #491. Importlib might raise a ValueError, to avoid this, we
# just raise an ImportError to fix the issue.
raise ImportError("Originally " + repr(e))
if loader is None:
raise ImportError("Couldn't find a loader for {}".format(string))
return _from_loader(loader, string)
def _from_loader(loader, string):
try:
is_package_method = loader.is_package
except AttributeError:
is_package = False
else:
is_package = is_package_method(string)
try:
get_filename = loader.get_filename
except AttributeError:
return None, is_package
else:
module_path = cast_path(get_filename(string))
# To avoid unicode and read bytes, "overwrite" loader.get_source if
# possible.
try:
f = type(loader).get_source
except AttributeError:
raise ImportError("get_source was not defined on loader")
if is_py3 and f is not importlib.machinery.SourceFileLoader.get_source:
# Unfortunately we are reading unicode here, not bytes.
# It seems hard to get bytes, because the zip importer
# logic just unpacks the zip file and returns a file descriptor
# that we cannot as easily access. Therefore we just read it as
# a string in the cases where get_source was overwritten.
code = loader.get_source(string)
else:
code = _get_source(loader, string)
if code is None:
return None, is_package
if isinstance(loader, zipimporter):
return ZipFileIO(module_path, code, cast_path(loader.archive)), is_package
return KnownContentFileIO(module_path, code), is_package
def _get_source(loader, fullname):
"""
This method is here as a replacement for SourceLoader.get_source. That
method returns unicode, but we prefer bytes.
"""
path = loader.get_filename(fullname)
try:
return loader.get_data(path)
except OSError:
raise ImportError('source not available through get_data()',
name=fullname)
def find_module_pre_py3(string, path=None, full_name=None, is_global_search=True):
# This import is here, because in other places it will raise a
# DeprecationWarning.
import imp
try:
module_file, module_path, description = imp.find_module(string, path)
module_type = description[2]
is_package = module_type is imp.PKG_DIRECTORY
if is_package:
# In Python 2 directory package imports are returned as folder
# paths, not __init__.py paths.
p = os.path.join(module_path, '__init__.py')
try:
module_file = open(p)
module_path = p
except FileNotFoundError:
pass
elif module_type != imp.PY_SOURCE:
if module_file is not None:
module_file.close()
module_file = None
if module_file is None:
return None, is_package
with module_file:
code = module_file.read()
return KnownContentFileIO(cast_path(module_path), code), is_package
except ImportError:
pass
if path is None:
path = sys.path
for item in path:
loader = pkgutil.get_importer(item)
if loader:
loader = loader.find_module(string)
if loader is not None:
return _from_loader(loader, string)
raise ImportError("No module named {}".format(string))
find_module = find_module_py34 if is_py3 else find_module_pre_py3
find_module.__doc__ = """
Provides information about a module.
This function isolates the differences in importing libraries introduced with
python 3.3 on; it gets a module name and optionally a path. It will return a
tuple containin an open file for the module (if not builtin), the filename
or the name of the module if it is a builtin one and a boolean indicating
if the module is contained in a package.
"""
class ImplicitNSInfo(object):
"""Stores information returned from an implicit namespace spec"""
def __init__(self, name, paths):
self.name = name
self.paths = paths
if is_py3:
all_suffixes = importlib.machinery.all_suffixes
else:
def all_suffixes():
# Is deprecated and raises a warning in Python 3.6.
import imp
return [suffix for suffix, _, _ in imp.get_suffixes()]
# unicode function
try:
unicode = unicode
except NameError:
unicode = str
# re-raise function
if is_py3:
def reraise(exception, traceback):
raise exception.with_traceback(traceback)
else:
eval(compile("""
def reraise(exception, traceback):
raise exception, None, traceback
""", 'blub', 'exec'))
reraise.__doc__ = """
Re-raise `exception` with a `traceback` object.
Usage::
reraise(Exception, sys.exc_info()[2])
"""
def use_metaclass(meta, *bases):
""" Create a class with a metaclass. """
if not bases:
bases = (object,)
return meta("Py2CompatibilityMetaClass", bases, {})
try:
encoding = sys.stdout.encoding
if encoding is None:
encoding = 'utf-8'
except AttributeError:
encoding = 'ascii'
def u(string, errors='strict'):
"""Cast to unicode DAMMIT!
Written because Python2 repr always implicitly casts to a string, so we
have to cast back to a unicode (and we now that we always deal with valid
unicode, because we check that in the beginning).
"""
if isinstance(string, bytes):
return unicode(string, encoding='UTF-8', errors=errors)
return string
def cast_path(obj):
"""
Take a bytes or str path and cast it to unicode.
Apparently it is perfectly fine to pass both byte and unicode objects into
the sys.path. This probably means that byte paths are normal at other
places as well.
Since this just really complicates everything and Python 2.7 will be EOL
soon anyway, just go with always strings.
"""
return u(obj, errors='replace')
def force_unicode(obj):
# Intentionally don't mix those two up, because those two code paths might
# be different in the future (maybe windows?).
return cast_path(obj)
try:
import builtins # module name in python 3
except ImportError:
import __builtin__ as builtins # noqa: F401
import ast # noqa: F401
def literal_eval(string):
return ast.literal_eval(string)
try:
from itertools import zip_longest
except ImportError:
from itertools import izip_longest as zip_longest # Python 2 # noqa: F401
try:
FileNotFoundError = FileNotFoundError
except NameError:
FileNotFoundError = IOError
try:
IsADirectoryError = IsADirectoryError
except NameError:
IsADirectoryError = IOError
try:
PermissionError = PermissionError
except NameError:
PermissionError = IOError
try:
NotADirectoryError = NotADirectoryError
except NameError:
class NotADirectoryError(Exception):
# Don't implement this for Python 2 anymore.
pass
def no_unicode_pprint(dct):
"""
Python 2/3 dict __repr__ may be different, because of unicode differens
(with or without a `u` prefix). Normally in doctests we could use `pprint`
to sort dicts and check for equality, but here we have to write a separate
function to do that.
"""
import pprint
s = pprint.pformat(dct)
print(re.sub("u'", "'", s))
def utf8_repr(func):
"""
``__repr__`` methods in Python 2 don't allow unicode objects to be
returned. Therefore cast them to utf-8 bytes in this decorator.
"""
def wrapper(self):
result = func(self)
if isinstance(result, unicode):
return result.encode('utf-8')
else:
return result
if is_py3:
return func
else:
return wrapper
if is_py3:
import queue
else:
import Queue as queue # noqa: F401
try:
# Attempt to load the C implementation of pickle on Python 2 as it is way
# faster.
import cPickle as pickle
except ImportError:
import pickle
import pickle
from typing import Any
class Unpickler(pickle.Unpickler):
def find_class(self, module: str, name: str) -> Any:
# Python 3.13 moved pathlib implementation out of __init__.py as part of
# generalising its implementation. Ensure that we support loading
# pickles from 3.13 on older version of Python. Since 3.13 maintained a
# compatible API, pickles from older Python work natively on the newer
# version.
if module == 'pathlib._local':
module = 'pathlib'
return super().find_class(module, name)
def pickle_load(file):
try:
if is_py3:
return pickle.load(file, encoding='bytes')
return pickle.load(file)
return Unpickler(file).load()
# Python on Windows don't throw EOF errors for pipes. So reraise them with
# the correct type, which is caught upwards.
except OSError:
@@ -403,24 +31,8 @@ def pickle_load(file):
raise
def _python2_dct_keys_to_unicode(data):
"""
Python 2 stores object __dict__ entries as bytes, not unicode, correct it
here. Python 2 can deal with both, Python 3 expects unicode.
"""
if isinstance(data, tuple):
return tuple(_python2_dct_keys_to_unicode(x) for x in data)
elif isinstance(data, list):
return list(_python2_dct_keys_to_unicode(x) for x in data)
elif hasattr(data, '__dict__') and type(data.__dict__) == dict:
data.__dict__ = {unicode(k): v for k, v in data.__dict__.items()}
return data
def pickle_dump(data, file, protocol):
try:
if not is_py3:
data = _python2_dct_keys_to_unicode(data)
pickle.dump(data, file, protocol)
# On Python 3.3 flush throws sometimes an error even though the writing
# operation should be completed.
@@ -431,201 +43,3 @@ def pickle_dump(data, file, protocol):
if sys.platform == 'win32':
raise IOError(errno.EPIPE, "Broken pipe")
raise
# Determine the highest protocol version compatible for a given list of Python
# versions.
def highest_pickle_protocol(python_versions):
protocol = 4
for version in python_versions:
if version[0] == 2:
# The minimum protocol version for the versions of Python that we
# support (2.7 and 3.3+) is 2.
return 2
if version[1] < 4:
protocol = 3
return protocol
try:
from inspect import Parameter
except ImportError:
class Parameter(object):
POSITIONAL_ONLY = object()
POSITIONAL_OR_KEYWORD = object()
VAR_POSITIONAL = object()
KEYWORD_ONLY = object()
VAR_KEYWORD = object()
class GeneralizedPopen(subprocess.Popen):
def __init__(self, *args, **kwargs):
if os.name == 'nt':
try:
# Was introduced in Python 3.7.
CREATE_NO_WINDOW = subprocess.CREATE_NO_WINDOW
except AttributeError:
CREATE_NO_WINDOW = 0x08000000
kwargs['creationflags'] = CREATE_NO_WINDOW
# The child process doesn't need file descriptors except 0, 1, 2.
# This is unix only.
kwargs['close_fds'] = 'posix' in sys.builtin_module_names
super(GeneralizedPopen, self).__init__(*args, **kwargs)
# shutil.which is not available on Python 2.7.
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
"""Given a command, mode, and a PATH string, return the path which
conforms to the given mode on the PATH, or None if there is no such
file.
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
of os.environ.get("PATH"), or can be overridden with a custom search
path.
"""
# Check that a given file can be accessed with the correct mode.
# Additionally check that `file` is not a directory, as on Windows
# directories pass the os.access check.
def _access_check(fn, mode):
return (os.path.exists(fn) and os.access(fn, mode)
and not os.path.isdir(fn))
# If we're given a path with a directory part, look it up directly rather
# than referring to PATH directories. This includes checking relative to the
# current directory, e.g. ./script
if os.path.dirname(cmd):
if _access_check(cmd, mode):
return cmd
return None
if path is None:
path = os.environ.get("PATH", os.defpath)
if not path:
return None
path = path.split(os.pathsep)
if sys.platform == "win32":
# The current directory takes precedence on Windows.
if os.curdir not in path:
path.insert(0, os.curdir)
# PATHEXT is necessary to check on Windows.
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
# See if the given file matches any of the expected path extensions.
# This will allow us to short circuit when given "python.exe".
# If it does match, only test that one, otherwise we have to try
# others.
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
files = [cmd]
else:
files = [cmd + ext for ext in pathext]
else:
# On other platforms you don't have things like PATHEXT to tell you
# what file suffixes are executable, so just pass on cmd as-is.
files = [cmd]
seen = set()
for dir in path:
normdir = os.path.normcase(dir)
if normdir not in seen:
seen.add(normdir)
for thefile in files:
name = os.path.join(dir, thefile)
if _access_check(name, mode):
return name
return None
if not is_py3:
# Simplified backport of Python 3 weakref.finalize:
# https://github.com/python/cpython/blob/ded4737989316653469763230036b04513cb62b3/Lib/weakref.py#L502-L662
class finalize(object):
"""Class for finalization of weakrefable objects.
finalize(obj, func, *args, **kwargs) returns a callable finalizer
object which will be called when obj is garbage collected. The
first time the finalizer is called it evaluates func(*arg, **kwargs)
and returns the result. After this the finalizer is dead, and
calling it just returns None.
When the program exits any remaining finalizers will be run.
"""
# Finalizer objects don't have any state of their own.
# This ensures that they cannot be part of a ref-cycle.
__slots__ = ()
_registry = {}
def __init__(self, obj, func, *args, **kwargs):
info = functools.partial(func, *args, **kwargs)
info.weakref = weakref.ref(obj, self)
self._registry[self] = info
# To me it's an absolute mystery why in Python 2 we need _=None. It
# makes really no sense since it's never really called. Then again it
# might be called by Python 2.7 itself, but weakref.finalize is not
# documented in Python 2 and therefore shouldn't be randomly called.
# We never call this stuff with a parameter and therefore this
# parameter should not be needed. But it is. ~dave
def __call__(self, _=None):
"""Return func(*args, **kwargs) if alive."""
info = self._registry.pop(self, None)
if info:
return info()
@classmethod
def _exitfunc(cls):
if not cls._registry:
return
for finalizer in list(cls._registry):
try:
finalizer()
except Exception:
sys.excepthook(*sys.exc_info())
assert finalizer not in cls._registry
atexit.register(finalize._exitfunc)
weakref.finalize = finalize
if is_py3 and sys.version_info[1] > 5:
from inspect import unwrap
else:
# Only Python >=3.6 does properly limit the amount of unwraps. This is very
# relevant in the case of unittest.mock.patch.
# Below is the implementation of Python 3.7.
def unwrap(func, stop=None):
"""Get the object wrapped by *func*.
Follows the chain of :attr:`__wrapped__` attributes returning the last
object in the chain.
*stop* is an optional callback accepting an object in the wrapper chain
as its sole argument that allows the unwrapping to be terminated early if
the callback returns a true value. If the callback never returns a true
value, the last object in the chain is returned as usual. For example,
:func:`signature` uses this to stop unwrapping if any object in the
chain has a ``__signature__`` attribute defined.
:exc:`ValueError` is raised if a cycle is encountered.
"""
if stop is None:
def _is_wrapper(f):
return hasattr(f, '__wrapped__')
else:
def _is_wrapper(f):
return hasattr(f, '__wrapped__') and not stop(f)
f = func # remember the original func for error reporting
# Memoise by id to tolerate non-hashable objects, but store objects to
# ensure they aren't destroyed, which would allow their IDs to be reused.
memo = {id(f): f}
recursion_limit = sys.getrecursionlimit()
while _is_wrapper(func):
func = func.__wrapped__
id_func = id(func)
if (id_func in memo) or (len(memo) >= recursion_limit):
raise ValueError('wrapper loop when unwrapping {!r}'.format(f))
memo[id_func] = func
return func

View File

@@ -7,15 +7,12 @@ Alternatively, if you don't need a custom function and are happy with printing
debug messages to stdout, simply call :func:`set_debug_function` without
arguments.
"""
import os
import sys
import warnings
from functools import wraps
from pathlib import Path
import parso
from parso.python import tree
from jedi._compatibility import force_unicode, cast_path, is_py3
from jedi.parser_utils import get_executable_nodes
from jedi import debug
from jedi import settings
@@ -51,19 +48,7 @@ from jedi.inference.utils import to_list
sys.setrecursionlimit(3000)
def _no_python2_support(func):
# TODO remove when removing Python 2/3.5
@wraps(func)
def wrapper(self, *args, **kwargs):
if self._inference_state.grammar.version_info < (3, 6) or sys.version_info < (3, 6):
raise NotImplementedError(
"No support for refactorings/search on Python 2/3.5"
)
return func(self, *args, **kwargs)
return wrapper
class Script(object):
class Script:
"""
A Script is the base for completions, goto or whatever you want to do with
Jedi. The counter part of this class is :class:`Interpreter`, which works
@@ -103,83 +88,33 @@ class Script(object):
:param code: The source code of the current file, separated by newlines.
:type code: str
:param line: Deprecated, please use it directly on e.g. ``.complete``
:type line: int
:param column: Deprecated, please use it directly on e.g. ``.complete``
:type column: int
:param path: The path of the file in the file system, or ``''`` if
it hasn't been saved yet.
:type path: str or None
:param encoding: Deprecated, cast to unicode yourself. The encoding of
``code``, if it is not a ``unicode`` object (default ``'utf-8'``).
:type encoding: str
:param sys_path: Deprecated, use the project parameter.
:type sys_path: typing.List[str]
:type path: str or pathlib.Path or None
:param Environment environment: Provide a predefined :ref:`Environment <environments>`
to work with a specific Python version or virtualenv.
:param Project project: Provide a :class:`.Project` to make sure finding
references works well, because the right folder is searched. There are
also ways to modify the sys path and other things.
"""
def __init__(self, code=None, line=None, column=None, path=None,
encoding=None, sys_path=None, environment=None,
project=None, source=None):
def __init__(self, code=None, *, path=None, environment=None, project=None):
self._orig_path = path
# An empty path (also empty string) should always result in no path.
self.path = os.path.abspath(path) if path else None
if isinstance(path, str):
path = Path(path)
self.path = path.absolute() if path else None
if encoding is None:
encoding = 'utf-8'
else:
warnings.warn(
"Deprecated since version 0.17.0. You should cast to valid "
"unicode yourself, especially if you are not using utf-8.",
DeprecationWarning,
stacklevel=2
)
if line is not None:
warnings.warn(
"Providing the line is now done in the functions themselves "
"like `Script(...).complete(line, column)`",
DeprecationWarning,
stacklevel=2
)
if column is not None:
warnings.warn(
"Providing the column is now done in the functions themselves "
"like `Script(...).complete(line, column)`",
DeprecationWarning,
stacklevel=2
)
if source is not None:
code = source
warnings.warn(
"Use the code keyword argument instead.",
DeprecationWarning,
stacklevel=2
)
if code is None:
if path is None:
raise ValueError("Must provide at least one of code or path")
# TODO add a better warning than the traceback!
with open(path, 'rb') as f:
code = f.read()
if sys_path is not None and not is_py3:
sys_path = list(map(force_unicode, sys_path))
if project is None:
# Load the Python grammar of the current interpreter.
project = get_default_project(
os.path.dirname(self.path) if path else None
)
# TODO deprecate and remove sys_path from the Script API.
if sys_path is not None:
project._sys_path = sys_path
warnings.warn(
"Deprecated since version 0.17.0. Use the project API instead, "
"which means Script(project=Project(dir, sys_path=sys_path)) instead.",
DeprecationWarning,
stacklevel=2
)
project = get_default_project(None if self.path is None else self.path.parent)
self._inference_state = InferenceState(
project, environment=environment, script_path=self.path
@@ -188,8 +123,7 @@ class Script(object):
self._module_node, code = self._inference_state.parse_and_get_code(
code=code,
path=self.path,
encoding=encoding,
use_latest_grammar=path and path.endswith('.pyi'),
use_latest_grammar=path and path.suffix == '.pyi',
cache=False, # No disk cache, because the current script often changes.
diff_cache=settings.fast_parser,
cache_path=settings.cache_directory,
@@ -197,7 +131,6 @@ class Script(object):
debug.speed('parsed')
self._code_lines = parso.split_lines(code, keepends=True)
self._code = code
self._pos = line, column
cache.clear_time_caches()
debug.reset_time()
@@ -220,11 +153,12 @@ class Script(object):
if self.path is None:
file_io = None
else:
file_io = KnownContentFileIO(cast_path(self.path), self._code)
if self.path is not None and self.path.endswith('.pyi'):
file_io = KnownContentFileIO(self.path, self._code)
if self.path is not None and self.path.suffix == '.pyi':
# We are in a stub file. Try to load the stub properly.
stub_module = load_proper_stub_module(
self._inference_state,
self._inference_state.latest_grammar,
file_io,
names,
self._module_node
@@ -242,7 +176,7 @@ class Script(object):
code_lines=self._code_lines,
is_package=is_package,
)
if names[0] not in ('builtins', '__builtin__', 'typing'):
if names[0] not in ('builtins', 'typing'):
# These modules are essential for Jedi, so don't overwrite them.
self._inference_state.module_cache.add(names, ValueSet([module]))
return module
@@ -258,7 +192,7 @@ class Script(object):
)
@validate_line_column
def complete(self, line=None, column=None, **kwargs):
def complete(self, line=None, column=None, *, fuzzy=False):
"""
Completes objects under the cursor.
@@ -272,9 +206,7 @@ class Script(object):
before magic methods and name mangled names that start with ``__``.
:rtype: list of :class:`.Completion`
"""
return self._complete(line, column, **kwargs)
def _complete(self, line, column, fuzzy=False): # Python 2...
self._inference_state.reset_recursion_limitations()
with debug.increase_indent_cm('complete'):
completion = Completion(
self._inference_state, self._get_module_context(), self._code_lines,
@@ -282,16 +214,8 @@ class Script(object):
)
return completion.complete()
def completions(self, fuzzy=False):
warnings.warn(
"Deprecated since version 0.16.0. Use Script(...).complete instead.",
DeprecationWarning,
stacklevel=2
)
return self.complete(*self._pos, fuzzy=fuzzy)
@validate_line_column
def infer(self, line=None, column=None, **kwargs):
def infer(self, line=None, column=None, *, only_stubs=False, prefer_stubs=False):
"""
Return the definitions of under the cursor. It is basically a wrapper
around Jedi's type inference.
@@ -307,24 +231,18 @@ class Script(object):
:param prefer_stubs: Prefer stubs to Python objects for this method.
:rtype: list of :class:`.Name`
"""
with debug.increase_indent_cm('infer'):
return self._infer(line, column, **kwargs)
def goto_definitions(self, **kwargs):
warnings.warn(
"Deprecated since version 0.16.0. Use Script(...).infer instead.",
DeprecationWarning,
stacklevel=2
)
return self.infer(*self._pos, **kwargs)
def _infer(self, line, column, only_stubs=False, prefer_stubs=False):
self._inference_state.reset_recursion_limitations()
pos = line, column
leaf = self._module_node.get_name_of_position(pos)
if leaf is None:
leaf = self._module_node.get_leaf_for_position(pos)
if leaf is None or leaf.type == 'string':
return []
if leaf.end_pos == (line, column) and leaf.type == 'operator':
next_ = leaf.get_next_leaf()
if next_.start_pos == leaf.end_pos \
and next_.type in ('number', 'string', 'keyword'):
leaf = next_
context = self._get_module_context().create_context(leaf)
@@ -341,19 +259,9 @@ class Script(object):
# the API.
return helpers.sorted_definitions(set(defs))
def goto_assignments(self, follow_imports=False, follow_builtin_imports=False, **kwargs):
warnings.warn(
"Deprecated since version 0.16.0. Use Script(...).goto instead.",
DeprecationWarning,
stacklevel=2
)
return self.goto(*self._pos,
follow_imports=follow_imports,
follow_builtin_imports=follow_builtin_imports,
**kwargs)
@validate_line_column
def goto(self, line=None, column=None, **kwargs):
def goto(self, line=None, column=None, *, follow_imports=False, follow_builtin_imports=False,
only_stubs=False, prefer_stubs=False):
"""
Goes to the name that defined the object under the cursor. Optionally
you can follow imports.
@@ -367,11 +275,7 @@ class Script(object):
:param prefer_stubs: Prefer stubs to Python objects for this method.
:rtype: list of :class:`.Name`
"""
with debug.increase_indent_cm('goto'):
return self._goto(line, column, **kwargs)
def _goto(self, line, column, follow_imports=False, follow_builtin_imports=False,
only_stubs=False, prefer_stubs=False):
self._inference_state.reset_recursion_limitations()
tree_name = self._module_node.get_name_of_position((line, column))
if tree_name is None:
# Without a name we really just want to jump to the result e.g.
@@ -407,8 +311,7 @@ class Script(object):
# Avoid duplicates
return list(set(helpers.sorted_definitions(defs)))
@_no_python2_support
def search(self, string, **kwargs):
def search(self, string, *, all_scopes=False):
"""
Searches a name in the current file. For a description of how the
search string should look like, please have a look at
@@ -419,9 +322,6 @@ class Script(object):
functions and classes.
:yields: :class:`.Name`
"""
return self._search(string, **kwargs) # Python 2 ...
def _search(self, string, all_scopes=False):
return self._search_func(string, all_scopes=all_scopes)
@to_list
@@ -468,10 +368,17 @@ class Script(object):
:rtype: list of :class:`.Name`
"""
self._inference_state.reset_recursion_limitations()
definitions = self.goto(line, column, follow_imports=True)
if definitions:
return definitions
leaf = self._module_node.get_leaf_for_position((line, column))
if leaf is not None and leaf.end_pos == (line, column) and leaf.type == 'newline':
next_ = leaf.get_next_leaf()
if next_ is not None and next_.start_pos == leaf.end_pos:
leaf = next_
if leaf is not None and leaf.type in ('keyword', 'operator', 'error_leaf'):
def need_pydoc():
if leaf.value in ('(', ')', '[', ']'):
@@ -490,14 +397,6 @@ class Script(object):
return [classes.Name(self._inference_state, name)]
return []
def usages(self, **kwargs):
warnings.warn(
"Deprecated since version 0.16.0. Use Script(...).get_references instead.",
DeprecationWarning,
stacklevel=2
)
return self.get_references(*self._pos, **kwargs)
@validate_line_column
def get_references(self, line=None, column=None, **kwargs):
"""
@@ -505,12 +404,13 @@ class Script(object):
quite hard to do for Jedi, if it is too complicated, Jedi will stop
searching.
:param include_builtins: Default ``True``. If ``False``, checks if a reference
:param include_builtins: Default ``True``. If ``False``, checks if a definition
is a builtin (e.g. ``sys``) and in that case does not return it.
:param scope: Default ``'project'``. If ``'file'``, include references in
the current module only.
:rtype: list of :class:`.Name`
"""
self._inference_state.reset_recursion_limitations()
def _references(include_builtins=True, scope='project'):
if scope not in ('project', 'file'):
@@ -528,14 +428,6 @@ class Script(object):
return helpers.sorted_definitions(definitions)
return _references(**kwargs)
def call_signatures(self):
warnings.warn(
"Deprecated since version 0.16.0. Use Script(...).get_signatures instead.",
DeprecationWarning,
stacklevel=2
)
return self.get_signatures(*self._pos)
@validate_line_column
def get_signatures(self, line=None, column=None):
"""
@@ -553,6 +445,7 @@ class Script(object):
:rtype: list of :class:`.Signature`
"""
self._inference_state.reset_recursion_limitations()
pos = line, column
call_details = helpers.get_signature_details(self._module_node, pos)
if call_details is None:
@@ -672,6 +565,7 @@ class Script(object):
return parso_to_jedi_errors(self._inference_state.grammar, self._module_node)
def _names(self, all_scopes=False, definitions=True, references=False):
self._inference_state.reset_recursion_limitations()
# Set line/column to a random position, because they don't matter.
module_context = self._get_module_context()
defs = [
@@ -685,8 +579,7 @@ class Script(object):
]
return sorted(defs, key=lambda x: x.start_pos)
@_no_python2_support
def rename(self, line=None, column=None, **kwargs):
def rename(self, line=None, column=None, *, new_name):
"""
Renames all references of the variable under the cursor.
@@ -695,16 +588,13 @@ class Script(object):
:raises: :exc:`.RefactoringError`
:rtype: :class:`.Refactoring`
"""
return self._rename(line, column, **kwargs)
def _rename(self, line, column, new_name): # Python 2...
definitions = self.get_references(line, column, include_builtins=False)
return refactoring.rename(self._inference_state, definitions, new_name)
@_no_python2_support
def extract_variable(self, line, column, **kwargs):
@validate_line_column
def extract_variable(self, line, column, *, new_name, until_line=None, until_column=None):
"""
Moves an expression to a new statemenet.
Moves an expression to a new statement.
For example if you have the cursor on ``foo`` and provide a
``new_name`` called ``bar``::
@@ -727,10 +617,6 @@ class Script(object):
:raises: :exc:`.RefactoringError`
:rtype: :class:`.Refactoring`
"""
return self._extract_variable(line, column, **kwargs) # Python 2...
@validate_line_column
def _extract_variable(self, line, column, new_name, until_line=None, until_column=None):
if until_line is None and until_column is None:
until_pos = None
else:
@@ -744,8 +630,8 @@ class Script(object):
new_name, (line, column), until_pos
)
@_no_python2_support
def extract_function(self, line, column, **kwargs):
@validate_line_column
def extract_function(self, line, column, *, new_name, until_line=None, until_column=None):
"""
Moves an expression to a new function.
@@ -778,10 +664,6 @@ class Script(object):
:raises: :exc:`.RefactoringError`
:rtype: :class:`.Refactoring`
"""
return self._extract_function(line, column, **kwargs) # Python 2...
@validate_line_column
def _extract_function(self, line, column, new_name, until_line=None, until_column=None):
if until_line is None and until_column is None:
until_pos = None
else:
@@ -795,7 +677,6 @@ class Script(object):
new_name, (line, column), until_pos
)
@_no_python2_support
def inline(self, line=None, column=None):
"""
Inlines a variable under the cursor. This is basically the opposite of
@@ -840,9 +721,8 @@ class Interpreter(Script):
:param namespaces: A list of namespace dictionaries such as the one
returned by :func:`globals` and :func:`locals`.
"""
_allow_descriptor_getattr_default = True
def __init__(self, code, namespaces, **kwds):
def __init__(self, code, namespaces, *, project=None, **kwds):
try:
namespaces = [dict(n) for n in namespaces]
except Exception:
@@ -855,16 +735,32 @@ class Interpreter(Script):
if not isinstance(environment, InterpreterEnvironment):
raise TypeError("The environment needs to be an InterpreterEnvironment subclass.")
super(Interpreter, self).__init__(code, environment=environment,
project=Project(os.getcwd()), **kwds)
if project is None:
project = Project(Path.cwd())
super().__init__(code, environment=environment, project=project, **kwds)
self.namespaces = namespaces
self._inference_state.allow_descriptor_getattr = self._allow_descriptor_getattr_default
self._inference_state.allow_unsafe_executions = \
settings.allow_unsafe_interpreter_executions
# Dynamic params search is important when we work on functions that are
# called by other pieces of code. However for interpreter completions
# this is not important at all, because the current code is always new
# and will never be called by something.
# Also sometimes this logic goes a bit too far like in
# https://github.com/ipython/ipython/issues/13866, where it takes
# seconds to do a simple completion.
self._inference_state.do_dynamic_params_search = False
@cache.memoize_method
def _get_module_context(self):
if self.path is None:
file_io = None
else:
file_io = KnownContentFileIO(self.path, self._code)
tree_module_value = ModuleValue(
self._inference_state, self._module_node,
file_io=KnownContentFileIO(self.path, self._code),
file_io=file_io,
string_names=('__main__',),
code_lines=self._code_lines,
)
@@ -874,21 +770,6 @@ class Interpreter(Script):
)
def names(source=None, path=None, encoding='utf-8', all_scopes=False,
definitions=True, references=False, environment=None):
warnings.warn(
"Deprecated since version 0.16.0. Use Script(...).get_names instead.",
DeprecationWarning,
stacklevel=2
)
return Script(source, path=path, encoding=encoding).get_names(
all_scopes=all_scopes,
definitions=definitions,
references=references,
)
def preload_module(*modules):
"""
Preloading modules tells Jedi to load a module now, instead of lazy parsing
@@ -899,7 +780,7 @@ def preload_module(*modules):
"""
for m in modules:
s = "import %s as x; x." % m
Script(s, path=None).complete(1, len(s))
Script(s).complete(1, len(s))
def set_debug_function(func_cb=debug.print_to_stdout, warnings=True,

View File

@@ -14,21 +14,20 @@ These classes are the much biggest part of the API, because they contain
the interesting information about all operations.
"""
import re
import sys
import warnings
from pathlib import Path
from typing import Optional
from parso.python.tree import search_ancestor
from parso.tree import search_ancestor
from jedi import settings
from jedi import debug
from jedi.inference.utils import unite
from jedi.cache import memoize_method
from jedi.inference import imports
from jedi.inference.imports import ImportName
from jedi.inference.compiled.mixed import MixedName
from jedi.inference.gradual.typeshed import StubModuleValue
from jedi.inference.names import ImportName, SubModuleName
from jedi.inference.gradual.stub_value import StubModuleValue
from jedi.inference.gradual.conversion import convert_names, convert_values
from jedi.inference.base_value import ValueSet
from jedi.inference.base_value import ValueSet, HasNoContext
from jedi.api.keywords import KeywordName
from jedi.api import completion_cache
from jedi.api.helpers import filter_follow_imports
@@ -38,13 +37,17 @@ def _sort_names_by_start_pos(names):
return sorted(names, key=lambda s: s.start_pos or (0, 0))
def defined_names(inference_state, context):
def defined_names(inference_state, value):
"""
List sub-definitions (e.g., methods in class).
:type scope: Scope
:rtype: list of Name
"""
try:
context = value.as_context()
except HasNoContext:
return []
filter = next(context.get_filters())
names = [name for name in filter.values()]
return [Name(inference_state, n) for n in _sort_names_by_start_pos(names)]
@@ -54,7 +57,7 @@ def _values_to_definitions(values):
return [Name(c.inference_state, c.name) for c in values]
class BaseName(object):
class BaseName:
"""
The base class for all definitions, completions and signatures.
"""
@@ -71,7 +74,6 @@ class BaseName(object):
'_collections': 'collections',
'_socket': 'socket',
'_sqlite3': 'sqlite3',
'__builtin__': 'builtins',
}
_tuple_mapping = dict((tuple(k.split('.')), v) for (k, v) in {
@@ -94,17 +96,16 @@ class BaseName(object):
return self._name.get_root_context()
@property
def module_path(self):
def module_path(self) -> Optional[Path]:
"""
Shows the file path of a module. e.g. ``/usr/lib/python2.7/os.py``
:rtype: str or None
Shows the file path of a module. e.g. ``/usr/lib/python3.9/os.py``
"""
module = self._get_module_context()
if module.is_stub() or not module.is_compiled():
# Compiled modules should not return a module path even if they
# have one.
return self._get_module_context().py__file__()
path: Optional[Path] = self._get_module_context().py__file__()
return path
return None
@@ -129,7 +130,6 @@ class BaseName(object):
to Jedi, :meth:`jedi.Script.infer` should return a list of
definition for ``sys``, ``f``, ``C`` and ``x``.
>>> from jedi._compatibility import no_unicode_pprint
>>> from jedi import Script
>>> source = '''
... import keyword
@@ -155,7 +155,7 @@ class BaseName(object):
so that it is easy to relate the result to the source code.
>>> defs = sorted(defs, key=lambda d: d.line)
>>> no_unicode_pprint(defs) # doctest: +NORMALIZE_WHITESPACE
>>> print(defs) # doctest: +NORMALIZE_WHITESPACE
[<Name full_name='keyword', description='module keyword'>,
<Name full_name='__main__.C', description='class C'>,
<Name full_name='__main__.D', description='instance D'>,
@@ -163,7 +163,7 @@ class BaseName(object):
Finally, here is what you can get from :attr:`type`:
>>> defs = [str(d.type) for d in defs] # It's unicode and in Py2 has u before it.
>>> defs = [d.type for d in defs]
>>> defs[0]
'module'
>>> defs[1]
@@ -174,7 +174,7 @@ class BaseName(object):
'function'
Valid values for type are ``module``, ``class``, ``instance``, ``function``,
``param``, ``path``, ``keyword`` and ``statement``.
``param``, ``path``, ``keyword``, ``property`` and ``statement``.
"""
tree_name = self._name.tree_name
@@ -186,7 +186,7 @@ class BaseName(object):
tree_name.is_definition():
resolve = True
if isinstance(self._name, imports.SubModuleName) or resolve:
if isinstance(self._name, SubModuleName) or resolve:
for value in self._name.infer():
return value.api_type
return self._name.api_type
@@ -324,7 +324,6 @@ class BaseName(object):
Example:
>>> from jedi._compatibility import no_unicode_pprint
>>> from jedi import Script
>>> source = '''
... def f():
@@ -337,10 +336,10 @@ class BaseName(object):
>>> script = Script(source) # line is maximum by default
>>> defs = script.infer(column=3)
>>> defs = sorted(defs, key=lambda d: d.line)
>>> no_unicode_pprint(defs) # doctest: +NORMALIZE_WHITESPACE
>>> print(defs) # doctest: +NORMALIZE_WHITESPACE
[<Name full_name='__main__.f', description='def f'>,
<Name full_name='__main__.C', description='class C'>]
>>> str(defs[0].description) # strip literals in python2
>>> str(defs[0].description)
'def f'
>>> str(defs[1].description)
'class C'
@@ -424,7 +423,10 @@ class BaseName(object):
return False
return tree_name.is_definition() and tree_name.parent.type == 'trailer'
def goto(self, **kwargs):
@debug.increase_indent_cm('goto on name')
def goto(self, *, follow_imports=False, follow_builtin_imports=False,
only_stubs=False, prefer_stubs=False):
"""
Like :meth:`.Script.goto` (also supports the same params), but does it
for the current name. This is typically useful if you are using
@@ -437,20 +439,6 @@ class BaseName(object):
:param prefer_stubs: Prefer stubs to Python objects for this goto call.
:rtype: list of :class:`Name`
"""
with debug.increase_indent_cm('goto for %s' % self._name):
return self._goto(**kwargs)
def goto_assignments(self, **kwargs): # Python 2...
warnings.warn(
"Deprecated since version 0.16.0. Use .goto.",
DeprecationWarning,
stacklevel=2
)
return self.goto(**kwargs)
def _goto(self, follow_imports=False, follow_builtin_imports=False,
only_stubs=False, prefer_stubs=False):
if not self._name.is_value_name:
return []
@@ -465,7 +453,8 @@ class BaseName(object):
return [self if n == self._name else Name(self._inference_state, n)
for n in names]
def infer(self, **kwargs): # Python 2...
@debug.increase_indent_cm('infer on name')
def infer(self, *, only_stubs=False, prefer_stubs=False):
"""
Like :meth:`.Script.infer`, it can be useful to understand which type
the current name has.
@@ -482,10 +471,6 @@ class BaseName(object):
inference call.
:rtype: list of :class:`Name`
"""
with debug.increase_indent_cm('infer for %s' % self._name):
return self._infer(**kwargs)
def _infer(self, only_stubs=False, prefer_stubs=False):
assert not (only_stubs and prefer_stubs)
if not self._name.is_value_name:
@@ -504,28 +489,6 @@ class BaseName(object):
return [self if n == self._name else Name(self._inference_state, n)
for n in resulting_names]
@property
@memoize_method
def params(self):
warnings.warn(
"Deprecated since version 0.16.0. Use get_signatures()[...].params",
DeprecationWarning,
stacklevel=2
)
# Only return the first one. There might be multiple one, especially
# with overloading.
for signature in self._get_signatures():
return [
Name(self._inference_state, n)
for n in signature.get_param_names(resolve_stars=True)
]
if self.type == 'function' or self.type == 'class':
# Fallback, if no signatures were defined (which is probably by
# itself a bug).
return []
raise AttributeError('There are no params defined on this.')
def parent(self):
"""
Returns the parent scope of this identifier.
@@ -590,6 +553,8 @@ class BaseName(object):
return ''.join(lines[start_index:index + after + 1])
def _get_signatures(self, for_docstring=False):
if self._name.api_type == 'property':
return []
if for_docstring and self._name.api_type == 'statement' and not self.is_stub():
# For docstrings we don't resolve signatures if they are simple
# statements and not stubs. This is a speed optimization.
@@ -645,7 +610,7 @@ class Completion(BaseName):
"""
def __init__(self, inference_state, name, stack, like_name_length,
is_fuzzy, cached_name=None):
super(Completion, self).__init__(inference_state, name)
super().__init__(inference_state, name)
self._like_name_length = like_name_length
self._stack = stack
@@ -716,7 +681,7 @@ class Completion(BaseName):
# wouldn't load like > 100 Python modules anymore.
fast = False
return super(Completion, self).docstring(raw=raw, fast=fast)
return super().docstring(raw=raw, fast=fast)
def _get_docstring(self):
if self._cached_name is not None:
@@ -725,7 +690,7 @@ class Completion(BaseName):
self._name.get_public_name(),
lambda: self._get_cache()
)
return super(Completion, self)._get_docstring()
return super()._get_docstring()
def _get_docstring_signature(self):
if self._cached_name is not None:
@@ -734,13 +699,13 @@ class Completion(BaseName):
self._name.get_public_name(),
lambda: self._get_cache()
)
return super(Completion, self)._get_docstring_signature()
return super()._get_docstring_signature()
def _get_cache(self):
return (
super(Completion, self).type,
super(Completion, self)._get_docstring_signature(),
super(Completion, self)._get_docstring(),
super().type,
super()._get_docstring_signature(),
super()._get_docstring(),
)
@property
@@ -756,7 +721,25 @@ class Completion(BaseName):
lambda: self._get_cache()
)
return super(Completion, self).type
return super().type
def get_completion_prefix_length(self):
"""
Returns the length of the prefix being completed.
For example, completing ``isinstance``::
isinstan# <-- Cursor is here
would return 8, because len('isinstan') == 8.
Assuming the following function definition::
def foo(param=0):
pass
completing ``foo(par`` would return 3.
"""
return self._like_name_length
def __repr__(self):
return '<%s: %s>' % (type(self).__name__, self._name.get_public_name())
@@ -768,16 +751,7 @@ class Name(BaseName):
:meth:`.Script.goto` or :meth:`.Script.infer`.
"""
def __init__(self, inference_state, definition):
super(Name, self).__init__(inference_state, definition)
@property
def desc_with_module(self):
warnings.warn(
"Deprecated since version 0.17.0. No replacement for now, maybe .full_name helps",
DeprecationWarning,
stacklevel=2
)
return "%s:%s" % (self.module_name, self.description)
super().__init__(inference_state, definition)
@memoize_method
def defined_names(self):
@@ -788,7 +762,7 @@ class Name(BaseName):
"""
defs = self._name.infer()
return sorted(
unite(defined_names(self._inference_state, d.as_context()) for d in defs),
unite(defined_names(self._inference_state, d) for d in defs),
key=lambda s: s._name.start_pos or (0, 0)
)
@@ -821,7 +795,7 @@ class BaseSignature(Name):
calls.
"""
def __init__(self, inference_state, signature):
super(BaseSignature, self).__init__(inference_state, signature.name)
super().__init__(inference_state, signature.name)
self._signature = signature
@property
@@ -851,7 +825,7 @@ class Signature(BaseSignature):
:meth:`.Script.get_signatures`.
"""
def __init__(self, inference_state, signature, call_details):
super(Signature, self).__init__(inference_state, signature)
super().__init__(inference_state, signature)
self._call_details = call_details
self._signature = signature
@@ -918,8 +892,4 @@ class ParamName(Name):
:rtype: :py:attr:`inspect.Parameter.kind`
"""
if sys.version_info < (3, 5):
raise NotImplementedError(
'Python 2 is end-of-life, the new feature is not available for it'
)
return self._name.get_kind()

View File

@@ -1,12 +1,12 @@
import re
from textwrap import dedent
from inspect import Parameter
from parso.python.token import PythonTokenTypes
from parso.python import tree
from parso.tree import search_ancestor, Leaf
from parso import split_lines
from jedi._compatibility import Parameter
from jedi import debug
from jedi import settings
from jedi.api import classes
@@ -18,7 +18,8 @@ from jedi.inference import imports
from jedi.inference.base_value import ValueSet
from jedi.inference.helpers import infer_call_of_leaf, parse_dotted_names
from jedi.inference.context import get_global_filters
from jedi.inference.value import TreeInstance, ModuleValue
from jedi.inference.value import TreeInstance
from jedi.inference.docstring_utils import DocstringModule
from jedi.inference.names import ParamNameWrapper, SubModuleName
from jedi.inference.gradual.conversion import convert_values, convert_names
from jedi.parser_utils import cut_value_at_position
@@ -34,9 +35,7 @@ def _get_signature_param_names(signatures, positional_count, used_kwargs):
# Add named params
for call_sig in signatures:
for i, p in enumerate(call_sig.params):
# Allow protected access, because it's a public API.
# TODO reconsider with Python 2 drop
kind = p._name.get_kind()
kind = p.kind
if i < positional_count and kind == Parameter.POSITIONAL_OR_KEYWORD:
continue
if kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY) \
@@ -51,8 +50,7 @@ def _must_be_kwarg(signatures, positional_count, used_kwargs):
must_be_kwarg = True
for signature in signatures:
for i, p in enumerate(signature.params):
# TODO reconsider with Python 2 drop
kind = p._name.get_kind()
kind = p.kind
if kind is Parameter.VAR_POSITIONAL:
# In case there were not already kwargs, the next param can
# always be a normal argument.
@@ -67,12 +65,15 @@ def _must_be_kwarg(signatures, positional_count, used_kwargs):
return must_be_kwarg
def filter_names(inference_state, completion_names, stack, like_name, fuzzy, cached_name):
def filter_names(inference_state, completion_names, stack, like_name, fuzzy,
imported_names, cached_name):
comp_dct = set()
if settings.case_insensitive_completion:
like_name = like_name.lower()
for name in completion_names:
string = name.string_name
if string in imported_names and string != like_name:
continue
if settings.case_insensitive_completion:
string = string.lower()
if helpers.match(string, like_name, fuzzy=fuzzy):
@@ -140,6 +141,11 @@ class Completion:
self._fuzzy = fuzzy
# Return list of completions in this order:
# - Beginning with what user is typing
# - Public (alphabet)
# - Private ("_xxx")
# - Dunder ("__xxx")
def complete(self):
leaf = self._module_node.get_leaf_for_position(
self._original_position,
@@ -171,14 +177,19 @@ class Completion:
cached_name, completion_names = self._complete_python(leaf)
imported_names = []
if leaf.parent is not None and leaf.parent.type in ['import_as_names', 'dotted_as_names']:
imported_names.extend(extract_imported_names(leaf.parent))
completions = list(filter_names(self._inference_state, completion_names,
self.stack, self._like_name,
self._fuzzy, cached_name=cached_name))
self._fuzzy, imported_names, cached_name=cached_name))
return (
# Removing duplicates mostly to remove False/True/None duplicates.
_remove_duplicates(prefixed_completions, completions)
+ sorted(completions, key=lambda x: (x.name.startswith('__'),
+ sorted(completions, key=lambda x: (not x.name.startswith(self._like_name),
x.name.startswith('__'),
x.name.startswith('_'),
x.name.lower()))
)
@@ -197,7 +208,6 @@ class Completion:
- In args: */**: no completion
- In params (also lambda): no completion before =
"""
grammar = self._inference_state.grammar
self.stack = stack = None
self._position = (
@@ -280,6 +290,10 @@ class Completion:
)
elif nonterminals[-1] in ('trailer', 'dotted_name') and nodes[-1] == '.':
dot = self._module_node.get_leaf_for_position(self._position)
if dot.type == "endmarker":
# This is a bit of a weird edge case, maybe we can somehow
# generalize this.
dot = leaf.get_previous_leaf()
cached_name, n = self._complete_trailer(dot.get_previous_leaf())
completion_names += n
elif self._is_parameter_completion():
@@ -441,6 +455,7 @@ class Completion:
- Having some doctest code that starts with `>>>`
- Having backticks that doesn't have whitespace inside it
"""
def iter_relevant_lines(lines):
include_next_line = False
for l in code_lines:
@@ -465,12 +480,12 @@ class Completion:
def _complete_code_lines(self, code_lines):
module_node = self._inference_state.grammar.parse(''.join(code_lines))
module_value = ModuleValue(
self._inference_state,
module_node,
module_value = DocstringModule(
in_module_context=self._module_context,
inference_state=self._inference_state,
module_node=module_node,
code_lines=code_lines,
)
module_value.parent_context = self._module_context
return Completion(
self._inference_state,
module_value.as_context(),
@@ -579,8 +594,8 @@ def _complete_getattr(user_context, instance):
will write it like this anyway and the other ones, well they are just
out of luck I guess :) ~dave.
"""
names = (instance.get_function_slot_names(u'__getattr__')
or instance.get_function_slot_names(u'__getattribute__'))
names = (instance.get_function_slot_names('__getattr__')
or instance.get_function_slot_names('__getattribute__'))
functions = ValueSet.from_sets(
name.infer()
for name in names
@@ -630,7 +645,7 @@ def search_in_module(inference_state, module_context, names, wanted_names,
new_names = []
for n in names:
if s == n.string_name:
if n.tree_name is not None and n.api_type == 'module' \
if n.tree_name is not None and n.api_type in ('module', 'namespace') \
and ignore_imports:
continue
new_names += complete_trailer(
@@ -663,3 +678,19 @@ def search_in_module(inference_state, module_context, names, wanted_names,
def_ = classes.Name(inference_state, n2)
if not wanted_type or wanted_type == def_.type:
yield def_
def extract_imported_names(node):
imported_names = []
if node.type in ['import_as_names', 'dotted_as_names', 'dotted_as_name', 'import_as_name']:
for index, child in enumerate(node.children):
if child.type == 'name':
if (index > 1 and node.children[index - 1].type == "keyword"
and node.children[index - 1].value == "as"):
continue
imported_names.append(child.value)
elif child.type in ('import_as_name', 'dotted_as_name'):
imported_names.extend(extract_imported_names(child))
return imported_names

View File

@@ -1,7 +1,13 @@
_cache = {}
from typing import Dict, Tuple, Callable
CacheValues = Tuple[str, str, str]
CacheValuesCallback = Callable[[], CacheValues]
def save_entry(module_name, name, cache):
_cache: Dict[str, Dict[str, CacheValues]] = {}
def save_entry(module_name: str, name: str, cache: CacheValues) -> None:
try:
module_cache = _cache[module_name]
except KeyError:
@@ -9,8 +15,8 @@ def save_entry(module_name, name, cache):
module_cache[name] = cache
def _create_get_from_cache(number):
def _get_from_cache(module_name, name, get_cache_values):
def _create_get_from_cache(number: int) -> Callable[[str, str, CacheValuesCallback], str]:
def _get_from_cache(module_name: str, name: str, get_cache_values: CacheValuesCallback) -> str:
try:
return _cache[module_name][name][number]
except KeyError:

View File

@@ -7,17 +7,22 @@ import sys
import hashlib
import filecmp
from collections import namedtuple
from shutil import which
from typing import TYPE_CHECKING
from jedi._compatibility import highest_pickle_protocol, which
from jedi.cache import memoize_method, time_cache
from jedi.inference.compiled.subprocess import CompiledSubprocess, \
InferenceStateSameProcess, InferenceStateSubprocess
import parso
_VersionInfo = namedtuple('VersionInfo', 'major minor micro')
if TYPE_CHECKING:
from jedi.inference import InferenceState
_SUPPORTED_PYTHONS = ['3.8', '3.7', '3.6', '3.5', '2.7']
_VersionInfo = namedtuple('VersionInfo', 'major minor micro') # type: ignore[name-match]
_SUPPORTED_PYTHONS = ['3.13', '3.12', '3.11', '3.10', '3.9', '3.8', '3.7', '3.6']
_SAFE_PATHS = ['/usr/bin', '/usr/local/bin']
_CONDA_VAR = 'CONDA_PREFIX'
_CURRENT_VERSION = '%s.%s' % (sys.version_info.major, sys.version_info.minor)
@@ -30,7 +35,7 @@ class InvalidPythonEnvironment(Exception):
"""
class _BaseEnvironment(object):
class _BaseEnvironment:
@memoize_method
def get_grammar(self):
version_string = '%s.%s' % (self.version_info.major, self.version_info.minor)
@@ -61,8 +66,9 @@ class Environment(_BaseEnvironment):
"""
_subprocess = None
def __init__(self, executable):
def __init__(self, executable, env_vars=None):
self._start_executable = executable
self._env_vars = env_vars
# Initialize the environment
self._get_subprocess()
@@ -71,7 +77,8 @@ class Environment(_BaseEnvironment):
return self._subprocess
try:
self._subprocess = CompiledSubprocess(self._start_executable)
self._subprocess = CompiledSubprocess(self._start_executable,
env_vars=self._env_vars)
info = self._subprocess._send(None, _get_info)
except Exception as exc:
raise InvalidPythonEnvironment(
@@ -94,23 +101,16 @@ class Environment(_BaseEnvironment):
Like :data:`sys.version_info`: a tuple to show the current
Environment's Python version.
"""
# py2 sends bytes via pickle apparently?!
if self.version_info.major == 2:
self.executable = self.executable.decode()
self.path = self.path.decode()
# Adjust pickle protocol according to host and client version.
self._subprocess._pickle_protocol = highest_pickle_protocol([
sys.version_info, self.version_info])
return self._subprocess
def __repr__(self):
version = '.'.join(str(i) for i in self.version_info)
return '<%s: %s in %s>' % (self.__class__.__name__, version, self.path)
def get_inference_state_subprocess(self, inference_state):
def get_inference_state_subprocess(
self,
inference_state: 'InferenceState',
) -> InferenceStateSubprocess:
return InferenceStateSubprocess(inference_state, self._get_subprocess())
@memoize_method
@@ -129,11 +129,12 @@ class Environment(_BaseEnvironment):
return self._get_subprocess().get_sys_path()
class _SameEnvironmentMixin(object):
class _SameEnvironmentMixin:
def __init__(self):
self._start_executable = self.executable = sys.executable
self.path = sys.prefix
self.version_info = _VersionInfo(*sys.version_info[:3])
self._env_vars = None
class SameEnvironment(_SameEnvironmentMixin, Environment):
@@ -141,7 +142,10 @@ class SameEnvironment(_SameEnvironmentMixin, Environment):
class InterpreterEnvironment(_SameEnvironmentMixin, _BaseEnvironment):
def get_inference_state_subprocess(self, inference_state):
def get_inference_state_subprocess(
self,
inference_state: 'InferenceState',
) -> InferenceStateSameProcess:
return InferenceStateSameProcess(inference_state)
def get_sys_path(self):
@@ -264,7 +268,7 @@ def _get_cached_default_environment():
return InterpreterEnvironment()
def find_virtualenvs(paths=None, **kwargs):
def find_virtualenvs(paths=None, *, safe=True, use_environment_vars=True):
"""
:param paths: A list of paths in your file system to be scanned for
Virtualenvs. It will search in these paths and potentially execute the
@@ -281,47 +285,44 @@ def find_virtualenvs(paths=None, **kwargs):
:yields: :class:`.Environment`
"""
def py27_comp(paths=None, safe=True, use_environment_vars=True):
if paths is None:
paths = []
if paths is None:
paths = []
_used_paths = set()
_used_paths = set()
if use_environment_vars:
# Using this variable should be safe, because attackers might be
# able to drop files (via git) but not environment variables.
virtual_env = _get_virtual_env_from_var()
if virtual_env is not None:
yield virtual_env
_used_paths.add(virtual_env.path)
if use_environment_vars:
# Using this variable should be safe, because attackers might be
# able to drop files (via git) but not environment variables.
virtual_env = _get_virtual_env_from_var()
if virtual_env is not None:
yield virtual_env
_used_paths.add(virtual_env.path)
conda_env = _get_virtual_env_from_var(_CONDA_VAR)
if conda_env is not None:
yield conda_env
_used_paths.add(conda_env.path)
conda_env = _get_virtual_env_from_var(_CONDA_VAR)
if conda_env is not None:
yield conda_env
_used_paths.add(conda_env.path)
for directory in paths:
if not os.path.isdir(directory):
for directory in paths:
if not os.path.isdir(directory):
continue
directory = os.path.abspath(directory)
for path in os.listdir(directory):
path = os.path.join(directory, path)
if path in _used_paths:
# A path shouldn't be inferred twice.
continue
_used_paths.add(path)
directory = os.path.abspath(directory)
for path in os.listdir(directory):
path = os.path.join(directory, path)
if path in _used_paths:
# A path shouldn't be inferred twice.
continue
_used_paths.add(path)
try:
executable = _get_executable_path(path, safe=safe)
yield Environment(executable)
except InvalidPythonEnvironment:
pass
return py27_comp(paths, **kwargs)
try:
executable = _get_executable_path(path, safe=safe)
yield Environment(executable)
except InvalidPythonEnvironment:
pass
def find_system_environments():
def find_system_environments(*, env_vars=None):
"""
Ignores virtualenvs and returns the Python versions that were installed on
your system. This might return nothing, if you're running Python e.g. from
@@ -333,14 +334,14 @@ def find_system_environments():
"""
for version_string in _SUPPORTED_PYTHONS:
try:
yield get_system_environment(version_string)
yield get_system_environment(version_string, env_vars=env_vars)
except InvalidPythonEnvironment:
pass
# TODO: this function should probably return a list of environments since
# multiple Python installations can be found on a system for the same version.
def get_system_environment(version):
def get_system_environment(version, *, env_vars=None):
"""
Return the first Python environment found for a string of the form 'X.Y'
where X and Y are the major and minor versions of Python.
@@ -357,24 +358,24 @@ def get_system_environment(version):
if os.name == 'nt':
for exe in _get_executables_from_windows_registry(version):
try:
return Environment(exe)
return Environment(exe, env_vars=env_vars)
except InvalidPythonEnvironment:
pass
raise InvalidPythonEnvironment("Cannot find executable python%s." % version)
def create_environment(path, safe=True):
def create_environment(path, *, safe=True, env_vars=None):
"""
Make it possible to manually create an Environment object by specifying a
Virtualenv path or an executable path.
Virtualenv path or an executable path and optional environment variables.
:raises: :exc:`.InvalidPythonEnvironment`
:returns: :class:`.Environment`
"""
if os.path.isfile(path):
_assert_safe(path, safe)
return Environment(path)
return Environment(_get_executable_path(path, safe=safe))
return Environment(path, env_vars=env_vars)
return Environment(_get_executable_path(path, safe=safe), env_vars=env_vars)
def _get_executable_path(path, safe=True):
@@ -383,10 +384,13 @@ def _get_executable_path(path, safe=True):
"""
if os.name == 'nt':
python = os.path.join(path, 'Scripts', 'python.exe')
pythons = [os.path.join(path, 'Scripts', 'python.exe'), os.path.join(path, 'python.exe')]
else:
pythons = [os.path.join(path, 'bin', 'python')]
for python in pythons:
if os.path.exists(python):
break
else:
python = os.path.join(path, 'bin', 'python')
if not os.path.exists(python):
raise InvalidPythonEnvironment("%s seems to be missing." % python)
_assert_safe(python, safe)
@@ -394,11 +398,7 @@ def _get_executable_path(path, safe=True):
def _get_executables_from_windows_registry(version):
# The winreg module is named _winreg on Python 2.
try:
import winreg
except ImportError:
import _winreg as winreg
import winreg
# TODO: support Python Anaconda.
sub_keys = [

View File

@@ -8,7 +8,7 @@ def parso_to_jedi_errors(grammar, module_node):
return [SyntaxError(e) for e in grammar.iter_errors(module_node)]
class SyntaxError(object):
class SyntaxError:
"""
Syntax errors are generated by :meth:`.Script.get_syntax_errors`.
"""

View File

@@ -23,7 +23,7 @@ class RefactoringError(_JediError):
Refactorings can fail for various reasons. So if you work with refactorings
like :meth:`.Script.rename`, :meth:`.Script.inline`,
:meth:`.Script.extract_variable` and :meth:`.Script.extract_function`, make
sure to catch these. The descriptions in the errors are ususally valuable
sure to catch these. The descriptions in the errors are usually valuable
for end users.
A typical ``RefactoringError`` would tell the user that inlining is not

View File

@@ -1,6 +1,5 @@
import os
from jedi._compatibility import FileNotFoundError, force_unicode, scandir
from jedi.api import classes
from jedi.api.strings import StringName, get_quote_ending
from jedi.api.helpers import match
@@ -8,7 +7,7 @@ from jedi.inference.helpers import get_str_or_none
class PathName(StringName):
api_type = u'path'
api_type = 'path'
def complete_file_name(inference_state, module_context, start_leaf, quote, string,
@@ -36,9 +35,9 @@ def complete_file_name(inference_state, module_context, start_leaf, quote, strin
is_in_os_path_join = False
else:
string = to_be_added + string
base_path = os.path.join(inference_state.project._path, string)
base_path = os.path.join(inference_state.project.path, string)
try:
listed = sorted(scandir(base_path), key=lambda e: e.name)
listed = sorted(os.scandir(base_path), key=lambda e: e.name)
# OSError: [Errno 36] File name too long: '...'
except (FileNotFoundError, OSError):
return
@@ -94,7 +93,7 @@ def _add_strings(context, nodes, add_slash=False):
return None
if not first and add_slash:
string += os.path.sep
string += force_unicode(s)
string += s
first = False
return string

View File

@@ -6,11 +6,11 @@ from collections import namedtuple
from textwrap import dedent
from itertools import chain
from functools import wraps
from inspect import Parameter
from parso.python.parser import Parser
from parso.python import tree
from jedi._compatibility import u, Parameter
from jedi.inference.base_value import NO_VALUES
from jedi.inference.syntax_tree import infer_atom
from jedi.inference.helpers import infer_call_of_leaf
@@ -44,7 +44,10 @@ def match(string, like_name, fuzzy=False):
def sorted_definitions(defs):
# Note: `or ''` below is required because `module_path` could be
return sorted(defs, key=lambda x: (x.module_path or '', x.line or 0, x.column or 0, x.name))
return sorted(defs, key=lambda x: (str(x.module_path or ''),
x.line or 0,
x.column or 0,
x.name))
def get_on_completion_name(module_node, lines, position):
@@ -84,18 +87,18 @@ def _get_code_for_stack(code_lines, leaf, position):
# If we're not on a comment simply get the previous leaf and proceed.
leaf = leaf.get_previous_leaf()
if leaf is None:
return u('') # At the beginning of the file.
return '' # At the beginning of the file.
is_after_newline = leaf.type == 'newline'
while leaf.type == 'newline':
leaf = leaf.get_previous_leaf()
if leaf is None:
return u('')
return ''
if leaf.type == 'error_leaf' or leaf.type == 'string':
if leaf.start_pos[0] < position[0]:
# On a different line, we just begin anew.
return u('')
return ''
# Error leafs cannot be parsed, completion in strings is also
# impossible.
@@ -111,7 +114,7 @@ def _get_code_for_stack(code_lines, leaf, position):
if user_stmt.start_pos[1] > position[1]:
# This means that it's actually a dedent and that means that we
# start without value (part of a suite).
return u('')
return ''
# This is basically getting the relevant lines.
return _get_code(code_lines, user_stmt.get_start_pos_of_prefix(), position)
@@ -195,15 +198,13 @@ def filter_follow_imports(names, follow_builtin_imports=False):
if found_builtin:
yield name
else:
for new_name in new_names:
yield new_name
yield from new_names
else:
yield name
class CallDetails(object):
class CallDetails:
def __init__(self, bracket_leaf, children, position):
['bracket_leaf', 'call_index', 'keyword_name_str']
self.bracket_leaf = bracket_leaf
self._children = children
self._position = position
@@ -279,7 +280,7 @@ class CallDetails(object):
def count_positional_arguments(self):
count = 0
for star_count, key_start, had_equal in self._list_arguments()[:-1]:
if star_count:
if star_count or key_start:
break
count += 1
return count
@@ -294,8 +295,7 @@ def _iter_arguments(nodes, position):
# Returns Generator[Tuple[star_count, Optional[key_start: str], had_equal]]
nodes_before = [c for c in nodes if c.start_pos < position]
if nodes_before[-1].type == 'arglist':
for x in _iter_arguments(nodes_before[-1].children, position):
yield x # Python 2 :(
yield from _iter_arguments(nodes_before[-1].children, position)
return
previous_node_yielded = False
@@ -306,7 +306,7 @@ def _iter_arguments(nodes, position):
first = node.children[0]
second = node.children[1]
if second == '=':
if second.start_pos < position:
if second.start_pos < position and first.type == 'name':
yield 0, first.value, True
else:
yield 0, remove_after_pos(first), False
@@ -320,7 +320,7 @@ def _iter_arguments(nodes, position):
else:
yield 0, None, False
stars_seen = 0
elif node.type in ('testlist', 'testlist_star_expr'): # testlist is Python 2
elif node.type == 'testlist_star_expr':
for n in node.children[::2]:
if n.type == 'star_expr':
stars_seen = 1

View File

@@ -3,6 +3,9 @@ TODO Some parts of this module are still not well documented.
"""
from jedi.inference import compiled
from jedi.inference.base_value import ValueSet
from jedi.inference.filters import ParserTreeFilter, MergedFilter
from jedi.inference.names import TreeNameDefinition
from jedi.inference.compiled import mixed
from jedi.inference.compiled.access import create_access_path
from jedi.inference.context import ModuleContext
@@ -14,15 +17,42 @@ def _create(inference_state, obj):
)
class NamespaceObject(object):
class NamespaceObject:
def __init__(self, dct):
self.__dict__ = dct
class MixedTreeName(TreeNameDefinition):
def infer(self):
"""
In IPython notebook it is typical that some parts of the code that is
provided was already executed. In that case if something is not properly
inferred, it should still infer from the variables it already knows.
"""
inferred = super().infer()
if not inferred:
for compiled_value in self.parent_context.mixed_values:
for f in compiled_value.get_filters():
values = ValueSet.from_sets(
n.infer() for n in f.get(self.string_name)
)
if values:
return values
return inferred
class MixedParserTreeFilter(ParserTreeFilter):
name_class = MixedTreeName
class MixedModuleContext(ModuleContext):
def __init__(self, tree_module_value, namespaces):
super(MixedModuleContext, self).__init__(tree_module_value)
self._namespace_objects = [NamespaceObject(n) for n in namespaces]
super().__init__(tree_module_value)
self.mixed_values = [
self._get_mixed_object(
_create(self.inference_state, NamespaceObject(n))
) for n in namespaces
]
def _get_mixed_object(self, compiled_value):
return mixed.MixedObject(
@@ -30,12 +60,15 @@ class MixedModuleContext(ModuleContext):
tree_value=self._value
)
def get_filters(self, *args, **kwargs):
for filter in self._value.as_context().get_filters(*args, **kwargs):
yield filter
def get_filters(self, until_position=None, origin_scope=None):
yield MergedFilter(
MixedParserTreeFilter(
parent_context=self,
until_position=until_position,
origin_scope=origin_scope
),
self.get_global_filter(),
)
for namespace_obj in self._namespace_objects:
compiled_value = _create(self.inference_state, namespace_obj)
mixed_object = self._get_mixed_object(compiled_value)
for filter in mixed_object.get_filters(*args, **kwargs):
yield filter
for mixed_object in self.mixed_values:
yield from mixed_object.get_filters(until_position, origin_scope)

View File

@@ -1,22 +1,19 @@
import pydoc
from contextlib import suppress
from typing import Dict, Optional
from jedi.inference.utils import ignored
from jedi.inference.names import AbstractArbitraryName
try:
from pydoc_data import topics as pydoc_topics
from pydoc_data import topics
pydoc_topics: Optional[Dict[str, str]] = topics.topics
except ImportError:
# Python 2
try:
import pydoc_topics
except ImportError:
# This is for Python 3 embeddable version, which dont have
# pydoc_data module in its file python3x.zip.
pydoc_topics = None
# Python 3.6.8 embeddable does not have pydoc_data.
pydoc_topics = None
class KeywordName(AbstractArbitraryName):
api_type = u'keyword'
api_type = 'keyword'
def py__doc__(self):
return imitate_pydoc(self.string_name)
@@ -30,11 +27,8 @@ def imitate_pydoc(string):
if pydoc_topics is None:
return ''
# str needed because of possible unicode stuff in py2k (pydoc doesn't work
# with unicode strings)
string = str(string)
h = pydoc.help
with ignored(KeyError):
with suppress(KeyError):
# try to access symbols
string = h.symbols[string]
string, _, related = string.partition(' ')
@@ -52,6 +46,6 @@ def imitate_pydoc(string):
return ''
try:
return pydoc_topics.topics[label].strip() if pydoc_topics else ''
return pydoc_topics[label].strip() if pydoc_topics else ''
except KeyError:
return ''

View File

@@ -7,26 +7,21 @@ flexibility to define sys paths and Python interpreters for a project,
Projects can be saved to disk and loaded again, to allow project definitions to
be used across repositories.
"""
import os
import errno
import json
import sys
from pathlib import Path
from itertools import chain
from jedi._compatibility import FileNotFoundError, PermissionError, \
IsADirectoryError, NotADirectoryError
from jedi import debug
from jedi.api.environment import get_cached_default_environment, create_environment
from jedi.api.exceptions import WrongVersion
from jedi.api.completion import search_in_module
from jedi.api.helpers import split_search_string, get_module_names
from jedi._compatibility import force_unicode
from jedi.inference.imports import load_module_from_path, \
load_namespace_from_path, iter_module_names
from jedi.inference.sys_path import discover_buildout_paths
from jedi.inference.cache import inference_state_as_method_param_cache
from jedi.inference.references import recurse_find_python_folders_and_files, search_in_file_ios
from jedi.file_io import FolderIO
from jedi.common import traverse_parents
_CONFIG_FOLDER = '.jedi'
_CONTAINS_POTENTIAL_PROJECT = \
@@ -61,11 +56,7 @@ def _remove_duplicates_from_path(path):
yield p
def _force_unicode_list(lst):
return list(map(force_unicode, lst))
class Project(object):
class Project:
"""
Projects are a simple way to manage Python folders and define how Jedi does
import resolution. It is mostly used as a parameter to :class:`.Script`.
@@ -75,11 +66,11 @@ class Project(object):
@staticmethod
def _get_config_folder_path(base_path):
return os.path.join(base_path, _CONFIG_FOLDER)
return base_path.joinpath(_CONFIG_FOLDER)
@staticmethod
def _get_json_path(base_path):
return os.path.join(Project._get_config_folder_path(base_path), 'project.json')
return Project._get_config_folder_path(base_path).joinpath('project.json')
@classmethod
def load(cls, path):
@@ -89,6 +80,8 @@ class Project(object):
:param path: The path of the directory you want to use as a project.
"""
if isinstance(path, str):
path = Path(path)
with open(cls._get_json_path(path)) as f:
version, data = json.load(f)
@@ -107,17 +100,22 @@ class Project(object):
data.pop('_environment', None)
data.pop('_django', None) # TODO make django setting public?
data = {k.lstrip('_'): v for k, v in data.items()}
data['path'] = str(data['path'])
# TODO when dropping Python 2 use pathlib.Path.mkdir(parents=True, exist_ok=True)
try:
os.makedirs(self._get_config_folder_path(self._path))
except OSError as e:
if e.errno != errno.EEXIST:
raise
self._get_config_folder_path(self._path).mkdir(parents=True, exist_ok=True)
with open(self._get_json_path(self._path), 'w') as f:
return json.dump((_SERIALIZER_VERSION, data), f)
def __init__(self, path, **kwargs):
def __init__(
self,
path,
*,
environment_path=None,
load_unsafe_extensions=False,
sys_path=None,
added_sys_path=(),
smart_sys_path=True,
) -> None:
"""
:param path: The base path for this project.
:param environment_path: The Python executable path, typically the path
@@ -136,19 +134,52 @@ class Project(object):
local directories. Otherwise you will have to rely on your packages
being properly configured on the ``sys.path``.
"""
def py2_comp(path, environment_path=None, load_unsafe_extensions=False,
sys_path=None, added_sys_path=(), smart_sys_path=True):
self._path = os.path.abspath(path)
self._environment_path = environment_path
self._sys_path = sys_path
self._smart_sys_path = smart_sys_path
self._load_unsafe_extensions = load_unsafe_extensions
self._django = False
self.added_sys_path = list(added_sys_path)
"""The sys path that is going to be added at the end of the """
if isinstance(path, str):
path = Path(path).absolute()
self._path = path
py2_comp(path, **kwargs)
self._environment_path = environment_path
if sys_path is not None:
# Remap potential pathlib.Path entries
sys_path = list(map(str, sys_path))
self._sys_path = sys_path
self._smart_sys_path = smart_sys_path
self._load_unsafe_extensions = load_unsafe_extensions
self._django = False
# Remap potential pathlib.Path entries
self.added_sys_path = list(map(str, added_sys_path))
"""The sys path that is going to be added at the end of the """
@property
def path(self):
"""
The base path for this project.
"""
return self._path
@property
def sys_path(self):
"""
The sys path provided to this project. This can be None and in that
case will be auto generated.
"""
return self._sys_path
@property
def smart_sys_path(self):
"""
If the sys path is going to be calculated in a smart way, where
additional paths are added.
"""
return self._smart_sys_path
@property
def load_unsafe_extensions(self):
"""
Wheter the project loads unsafe extensions.
"""
return self._load_unsafe_extensions
@inference_state_as_method_param_cache()
def _get_base_sys_path(self, inference_state):
@@ -175,23 +206,27 @@ class Project(object):
sys_path = list(self._sys_path)
if self._smart_sys_path:
prefixed.append(self._path)
prefixed.append(str(self._path))
if inference_state.script_path is not None:
suffixed += discover_buildout_paths(inference_state, inference_state.script_path)
suffixed += map(str, discover_buildout_paths(
inference_state,
inference_state.script_path
))
if add_parent_paths:
# Collect directories in upward search by:
# 1. Skipping directories with __init__.py
# 2. Stopping immediately when above self._path
traversed = []
for parent_path in traverse_parents(inference_state.script_path):
if parent_path == self._path or not parent_path.startswith(self._path):
for parent_path in inference_state.script_path.parents:
if parent_path == self._path \
or self._path not in parent_path.parents:
break
if not add_init_paths \
and os.path.isfile(os.path.join(parent_path, "__init__.py")):
and parent_path.joinpath("__init__.py").is_file():
continue
traversed.append(parent_path)
traversed.append(str(parent_path))
# AFAIK some libraries have imports like `foo.foo.bar`, which
# leads to the conclusion to by default prefer longer paths
@@ -199,10 +234,10 @@ class Project(object):
suffixed += reversed(traversed)
if self._django:
prefixed.append(self._path)
prefixed.append(str(self._path))
path = prefixed + sys_path + suffixed
return list(_force_unicode_list(_remove_duplicates_from_path(path)))
return list(_remove_duplicates_from_path(path))
def get_environment(self):
if self._environment is None:
@@ -212,7 +247,7 @@ class Project(object):
self._environment = get_cached_default_environment()
return self._environment
def search(self, string, **kwargs):
def search(self, string, *, all_scopes=False):
"""
Searches a name in the whole project. If the project is very big,
at some point Jedi will stop searching. However it's also very much
@@ -233,7 +268,7 @@ class Project(object):
functions and classes.
:yields: :class:`.Name`
"""
return self._search(string, **kwargs)
return self._search_func(string, all_scopes=all_scopes)
def complete_search(self, string, **kwargs):
"""
@@ -247,9 +282,6 @@ class Project(object):
"""
return self._search_func(string, complete=True, **kwargs)
def _search(self, string, all_scopes=False): # Python 2..
return self._search_func(string, all_scopes=all_scopes)
@_try_to_skip_duplicates
def _search_func(self, string, complete=False, all_scopes=False):
# Using a Script is they easiest way to get an empty module context.
@@ -258,16 +290,12 @@ class Project(object):
inference_state = s._inference_state
empty_module_context = s._get_module_context()
if inference_state.grammar.version_info < (3, 6) or sys.version_info < (3, 6):
raise NotImplementedError(
"No support for refactorings/search on Python 2/3.5"
)
debug.dbg('Search for string %s, complete=%s', string, complete)
wanted_type, wanted_names = split_search_string(string)
name = wanted_names[0]
stub_folder_name = name + '-stubs'
ios = recurse_find_python_folders_and_files(FolderIO(self._path))
ios = recurse_find_python_folders_and_files(FolderIO(str(self._path)))
file_ios = []
# 1. Search for modules in the current project
@@ -288,14 +316,13 @@ class Project(object):
continue
else:
file_ios.append(file_io)
file_name = os.path.basename(file_io.path)
if file_name in (name + '.py', name + '.pyi'):
if Path(file_io.path).name in (name + '.py', name + '.pyi'):
m = load_module_from_path(inference_state, file_io).as_context()
else:
continue
debug.dbg('Search of a specific module %s', m)
for x in search_in_module(
yield from search_in_module(
inference_state,
m,
names=[m.name],
@@ -304,15 +331,15 @@ class Project(object):
complete=complete,
convert=True,
ignore_imports=True,
):
yield x # Python 2...
)
# 2. Search for identifiers in the project.
for module_context in search_in_file_ios(inference_state, file_ios, name):
for module_context in search_in_file_ios(inference_state, file_ios,
name, complete=complete):
names = get_module_names(module_context.tree_node, all_scopes=all_scopes)
names = [module_context.create_name(n) for n in names]
names = _remove_imports(names)
for x in search_in_module(
yield from search_in_module(
inference_state,
module_context,
names=names,
@@ -320,18 +347,16 @@ class Project(object):
wanted_names=wanted_names,
complete=complete,
ignore_imports=True,
):
yield x # Python 2...
)
# 3. Search for modules on sys.path
sys_path = [
p for p in self._get_sys_path(inference_state)
# Exclude folders that are handled by recursing of the Python
# folders.
if not p.startswith(self._path)
# Exclude the current folder which is handled by recursing the folders.
if p != self._path
]
names = list(iter_module_names(inference_state, empty_module_context, sys_path))
for x in search_in_module(
yield from search_in_module(
inference_state,
empty_module_context,
names=names,
@@ -339,8 +364,7 @@ class Project(object):
wanted_names=wanted_names,
complete=complete,
convert=True,
):
yield x # Python 2...
)
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self._path)
@@ -348,15 +372,18 @@ class Project(object):
def _is_potential_project(path):
for name in _CONTAINS_POTENTIAL_PROJECT:
if os.path.exists(os.path.join(path, name)):
return True
try:
if path.joinpath(name).exists():
return True
except OSError:
continue
return False
def _is_django_path(directory):
""" Detects the path of the very well known Django library (if used) """
try:
with open(os.path.join(directory, 'manage.py'), 'rb') as f:
with open(directory.joinpath('manage.py'), 'rb') as f:
return b"DJANGO_SETTINGS_MODULE" in f.read()
except (FileNotFoundError, IsADirectoryError, PermissionError):
return False
@@ -373,12 +400,14 @@ def get_default_project(path=None):
``requirements.txt`` and ``MANIFEST.in``.
"""
if path is None:
path = os.getcwd()
path = Path.cwd()
elif isinstance(path, str):
path = Path(path)
check = os.path.realpath(path)
check = path.absolute()
probable_path = None
first_no_init_file = None
for dir in traverse_parents(check, include_current=True):
for dir in chain([check], check.parents):
try:
return Project.load(dir)
except (FileNotFoundError, IsADirectoryError, PermissionError):
@@ -387,11 +416,11 @@ def get_default_project(path=None):
continue
if first_no_init_file is None:
if os.path.exists(os.path.join(dir, '__init__.py')):
if dir.joinpath('__init__.py').exists():
# In the case that a __init__.py exists, it's in 99% just a
# Python package and the project sits at least one level above.
continue
else:
elif not dir.is_file():
first_no_init_file = dir
if _is_django_path(dir):
@@ -403,18 +432,17 @@ def get_default_project(path=None):
probable_path = dir
if probable_path is not None:
# TODO search for setup.py etc
return Project(probable_path)
if first_no_init_file is not None:
return Project(first_no_init_file)
curdir = path if os.path.isdir(path) else os.path.dirname(path)
curdir = path if path.is_dir() else path.parent
return Project(curdir)
def _remove_imports(names):
return [
n for n in names
if n.tree_name is None or n.api_type != 'module'
if n.tree_name is None or n.api_type not in ('module', 'namespace')
]

View File

@@ -1,11 +1,11 @@
from os.path import dirname, basename, join, relpath
import os
import re
import difflib
from pathlib import Path
from typing import Dict, Iterable, Tuple
from parso import split_lines
from jedi.api.exceptions import RefactoringError
from jedi.inference.value.namespace import ImplicitNSName
EXPRESSION_PARTS = (
'or_test and_test not_test comparison '
@@ -13,7 +13,7 @@ EXPRESSION_PARTS = (
).split()
class ChangedFile(object):
class ChangedFile:
def __init__(self, inference_state, from_path, to_path,
module_node, node_to_str_map):
self._inference_state = inference_state
@@ -39,19 +39,25 @@ class ChangedFile(object):
if new_lines[-1] != '':
new_lines[-1] += '\n'
project_path = self._inference_state.project._path
project_path = self._inference_state.project.path
if self._from_path is None:
from_p = ''
else:
from_p = relpath(self._from_path, project_path)
try:
from_p = self._from_path.relative_to(project_path)
except ValueError: # Happens it the path is not on th project_path
from_p = self._from_path
if self._to_path is None:
to_p = ''
else:
to_p = relpath(self._to_path, project_path)
try:
to_p = self._to_path.relative_to(project_path)
except ValueError:
to_p = self._to_path
diff = difflib.unified_diff(
old_lines, new_lines,
fromfile=from_p,
tofile=to_p,
fromfile=str(from_p),
tofile=str(to_p),
)
# Apparently there's a space at the end of the diff - for whatever
# reason.
@@ -73,23 +79,21 @@ class ChangedFile(object):
return '<%s: %s>' % (self.__class__.__name__, self._from_path)
class Refactoring(object):
class Refactoring:
def __init__(self, inference_state, file_to_node_changes, renames=()):
self._inference_state = inference_state
self._renames = renames
self._file_to_node_changes = file_to_node_changes
def get_changed_files(self):
"""
Returns a path to ``ChangedFile`` map.
"""
def get_changed_files(self) -> Dict[Path, ChangedFile]:
def calculate_to_path(p):
if p is None:
return p
p = str(p)
for from_, to in renames:
if p.startswith(from_):
p = to + p[len(from_):]
return p
if p.startswith(str(from_)):
p = str(to) + p[len(str(from_)):]
return Path(p)
renames = self.get_renames()
return {
@@ -99,23 +103,26 @@ class Refactoring(object):
to_path=calculate_to_path(path),
module_node=next(iter(map_)).get_root_node(),
node_to_str_map=map_
) for path, map_ in sorted(self._file_to_node_changes.items())
)
# We need to use `or`, because the path can be None
for path, map_ in sorted(
self._file_to_node_changes.items(),
key=lambda x: x[0] or Path("")
)
}
def get_renames(self):
def get_renames(self) -> Iterable[Tuple[Path, Path]]:
"""
Files can be renamed in a refactoring.
Returns ``Iterable[Tuple[str, str]]``.
"""
return sorted(self._renames)
def get_diff(self):
text = ''
project_path = self._inference_state.project._path
project_path = self._inference_state.project.path
for from_, to in self.get_renames():
text += 'rename from %s\nrename to %s\n' \
% (relpath(from_, project_path), relpath(to, project_path))
% (_try_relative_to(from_, project_path), _try_relative_to(to, project_path))
return text + ''.join(f.get_diff() for f in self.get_changed_files().values())
@@ -127,17 +134,14 @@ class Refactoring(object):
f.apply()
for old, new in self.get_renames():
os.rename(old, new)
old.rename(new)
def _calculate_rename(path, new_name):
name = basename(path)
dir_ = dirname(path)
if name in ('__init__.py', '__init__.pyi'):
parent_dir = dirname(dir_)
return dir_, join(parent_dir, new_name)
ending = re.search(r'\.pyi?$', name).group(0)
return path, join(dir_, new_name + ending)
dir_ = path.parent
if path.name in ('__init__.py', '__init__.pyi'):
return dir_, dir_.parent.joinpath(new_name)
return path, dir_.joinpath(new_name + path.suffix)
def rename(inference_state, definitions, new_name):
@@ -148,12 +152,16 @@ def rename(inference_state, definitions, new_name):
raise RefactoringError("There is no name under the cursor")
for d in definitions:
# This private access is ok in a way. It's not public to
# protect Jedi users from seeing it.
tree_name = d._name.tree_name
if d.type == 'module' and tree_name is None:
file_renames.add(_calculate_rename(d.module_path, new_name))
if d.type == 'module' and tree_name is None and d.module_path is not None:
p = Path(d.module_path)
file_renames.add(_calculate_rename(p, new_name))
elif isinstance(d._name, ImplicitNSName):
for p in d._name._value.py__path__():
file_renames.add(_calculate_rename(Path(p), new_name))
else:
# This private access is ok in a way. It's not public to
# protect Jedi users from seeing it.
if tree_name is not None:
fmap = file_tree_name_map.setdefault(d.module_path, {})
fmap[tree_name] = tree_name.prefix + new_name
@@ -163,8 +171,8 @@ def rename(inference_state, definitions, new_name):
def inline(inference_state, names):
if not names:
raise RefactoringError("There is no name under the cursor")
if any(n.api_type == 'module' for n in names):
raise RefactoringError("Cannot inline imports or modules")
if any(n.api_type in ('module', 'namespace') for n in names):
raise RefactoringError("Cannot inline imports, modules or namespaces")
if any(n.tree_name is None for n in names):
raise RefactoringError("Cannot inline builtins/extensions")
@@ -247,3 +255,10 @@ def _remove_indent_of_prefix(prefix):
Removes the last indentation of a prefix, e.g. " \n \n " becomes " \n \n".
"""
return ''.join(split_lines(prefix, keepends=True)[:-1])
def _try_relative_to(path: Path, base: Path) -> Path:
try:
return path.relative_to(base)
except ValueError:
return path

View File

@@ -350,8 +350,7 @@ def _find_non_global_names(nodes):
if node.type == 'trailer' and node.children[0] == '.':
continue
for x in _find_non_global_names(children): # Python 2...
yield x
yield from _find_non_global_names(children)
def _get_code_insertion_node(node, is_bound_method):

View File

@@ -9,7 +9,7 @@ just use IPython instead::
Then you will be able to use Jedi completer in your Python interpreter::
$ python
Python 2.7.2+ (default, Jul 20 2012, 22:15:08)
Python 3.9.2+ (default, Jul 20 2020, 22:15:08)
[GCC 4.6.1] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import os

View File

@@ -9,7 +9,6 @@ names in a module, but pretty much an arbitrary string.
"""
import re
from jedi._compatibility import unicode
from jedi.inference.names import AbstractArbitraryName
from jedi.inference.helpers import infer_call_of_leaf
from jedi.api.classes import Completion
@@ -19,7 +18,7 @@ _sentinel = object()
class StringName(AbstractArbitraryName):
api_type = u'string'
api_type = 'string'
is_value_name = False
@@ -37,8 +36,11 @@ def complete_dict(module_context, code_lines, leaf, position, string, fuzzy):
string = cut_value_at_position(leaf, position)
context = module_context.create_context(bracket_leaf)
before_bracket_leaf = bracket_leaf.get_previous_leaf()
if before_bracket_leaf.type in ('atom', 'trailer', 'name'):
before_node = before_bracket_leaf = bracket_leaf.get_previous_leaf()
if before_node in (')', ']', '}'):
before_node = before_node.parent
if before_node.type in ('atom', 'trailer', 'name'):
values = infer_call_of_leaf(context, before_bracket_leaf)
return list(_completions_for_dicts(
module_context.inference_state,
@@ -65,7 +67,7 @@ def _completions_for_dicts(inference_state, dicts, literal_string, cut_end_quote
def _create_repr_string(literal_string, dict_key):
if not isinstance(dict_key, (unicode, bytes)) or not literal_string:
if not isinstance(dict_key, (str, bytes)) or not literal_string:
return repr(dict_key)
r = repr(dict_key)

View File

@@ -13,14 +13,15 @@ these variables are being cleaned after every API usage.
"""
import time
from functools import wraps
from typing import Any, Dict, Tuple
from jedi import settings
from parso.cache import parser_cache
_time_caches = {}
_time_caches: Dict[str, Dict[Any, Tuple[float, Any]]] = {}
def clear_time_caches(delete_all=False):
def clear_time_caches(delete_all: bool = False) -> None:
""" Jedi caches many things, that should be completed after each completion
finishes.

View File

@@ -1,18 +1,6 @@
import os
from contextlib import contextmanager
def traverse_parents(path, include_current=False):
if not include_current:
path = os.path.dirname(path)
previous = None
while previous != path:
yield path
previous = path
path = os.path.dirname(path)
@contextmanager
def monkeypatch(obj, attribute_name, new_value):
"""

View File

@@ -1,8 +1,7 @@
import os
import time
from contextlib import contextmanager
from jedi._compatibility import encoding, is_py3, u
from typing import Callable, Optional
_inited = False
@@ -22,7 +21,7 @@ try:
raise ImportError
else:
# Use colorama for nicer console output.
from colorama import Fore, init
from colorama import Fore, init # type: ignore[import]
from colorama import initialise
def _lazy_colorama_init(): # noqa: F811
@@ -47,7 +46,7 @@ try:
_inited = True
except ImportError:
class Fore(object):
class Fore: # type: ignore[no-redef]
RED = ''
GREEN = ''
YELLOW = ''
@@ -64,7 +63,7 @@ enable_warning = False
enable_notice = False
# callback, interface: level, str
debug_function = None
debug_function: Optional[Callable[[str, str], None]] = None
_debug_indent = 0
_start_time = time.time()
@@ -97,26 +96,21 @@ def increase_indent_cm(title=None, color='MAGENTA'):
dbg('End: ' + title, color=color)
def dbg(message, *args, **kwargs):
def dbg(message, *args, color='GREEN'):
""" Looks at the stack, to see if a debug message should be printed. """
# Python 2 compatibility, because it doesn't understand default args
color = kwargs.pop('color', 'GREEN')
assert color
if debug_function and enable_notice:
i = ' ' * _debug_indent
_lazy_colorama_init()
debug_function(color, i + 'dbg: ' + message % tuple(u(repr(a)) for a in args))
debug_function(color, i + 'dbg: ' + message % tuple(repr(a) for a in args))
def warning(message, *args, **kwargs):
format = kwargs.pop('format', True)
assert not kwargs
def warning(message, *args, format=True):
if debug_function and enable_warning:
i = ' ' * _debug_indent
if format:
message = message % tuple(u(repr(a)) for a in args)
message = message % tuple(repr(a) for a in args)
debug_function('RED', i + 'warning: ' + message)
@@ -135,9 +129,4 @@ def print_to_stdout(color, str_out):
"""
col = getattr(Fore, color)
_lazy_colorama_init()
if not is_py3:
str_out = str_out.encode(encoding, 'replace')
print(col + str_out + Fore.RESET)
# debug_function = print_to_stdout

View File

@@ -3,7 +3,7 @@ import os
from parso import file_io
class AbstractFolderIO(object):
class AbstractFolderIO:
def __init__(self, path):
self.path = path
@@ -57,7 +57,7 @@ class FolderIO(AbstractFolderIO):
del dirs[i]
class FileIOFolderMixin(object):
class FileIOFolderMixin:
def get_parent_folder(self):
return FolderIO(os.path.dirname(self.path))
@@ -65,13 +65,13 @@ class FileIOFolderMixin(object):
class ZipFileIO(file_io.KnownContentFileIO, FileIOFolderMixin):
"""For .zip and .egg archives"""
def __init__(self, path, code, zip_path):
super(ZipFileIO, self).__init__(path, code)
super().__init__(path, code)
self._zip_path = zip_path
def get_last_modified(self):
try:
return os.path.getmtime(self._zip_path)
except OSError: # Python 3 would probably only need FileNotFoundError
except (FileNotFoundError, PermissionError, NotADirectoryError):
return None

View File

@@ -81,7 +81,7 @@ from jedi.inference.imports import follow_error_node_imports_if_possible
from jedi.plugins import plugin_manager
class InferenceState(object):
class InferenceState:
def __init__(self, project, environment=None, script_path=None):
if environment is None:
environment = project.get_environment()
@@ -90,7 +90,7 @@ class InferenceState(object):
self.compiled_subprocess = environment.get_inference_state_subprocess(self)
self.grammar = environment.get_grammar()
self.latest_grammar = parso.load_grammar(version='3.7')
self.latest_grammar = parso.load_grammar(version='3.13')
self.memoize_cache = {} # for memoize decorators
self.module_cache = imports.ModuleCache() # does the job of `sys.modules`.
self.stub_module_cache = {} # Dict[Tuple[str, ...], Optional[ModuleValue]]
@@ -99,10 +99,11 @@ class InferenceState(object):
self.mixed_cache = {} # see `inference.compiled.mixed._create()`
self.analysis = []
self.dynamic_params_depth = 0
self.do_dynamic_params_search = settings.dynamic_params
self.is_analysis = False
self.project = project
self.access_cache = {}
self.allow_descriptor_getattr = False
self.allow_unsafe_executions = False
self.flow_analysis_enabled = True
self.reset_recursion_limitations()
@@ -120,19 +121,18 @@ class InferenceState(object):
debug.dbg('execute result: %s in %s', value_set, value)
return value_set
@property
# mypy doesn't suppport decorated propeties (https://github.com/python/mypy/issues/1362)
@property # type: ignore[misc]
@inference_state_function_cache()
def builtins_module(self):
module_name = u'builtins'
if self.environment.version_info.major == 2:
module_name = u'__builtin__'
builtins_module, = self.import_module((module_name,), sys_path=())
module_name = 'builtins'
builtins_module, = self.import_module((module_name,), sys_path=[])
return builtins_module
@property
@property # type: ignore[misc]
@inference_state_function_cache()
def typing_module(self):
typing_module, = self.import_module((u'typing',))
typing_module, = self.import_module(('typing',))
return typing_module
def reset_recursion_limitations(self):
@@ -171,6 +171,8 @@ class InferenceState(object):
return tree_name_to_values(self, context, name)
elif type_ == 'param':
return context.py__getattribute__(name.value, position=name.end_pos)
elif type_ == 'namedexpr_test':
return context.infer_node(def_)
else:
result = follow_error_node_imports_if_possible(context, name)
if result is not None:
@@ -178,14 +180,14 @@ class InferenceState(object):
return helpers.infer_call_of_leaf(context, name)
def parse_and_get_code(self, code=None, path=None, encoding='utf-8',
def parse_and_get_code(self, code=None, path=None,
use_latest_grammar=False, file_io=None, **kwargs):
if code is None:
if file_io is None:
file_io = FileIO(path)
code = file_io.read()
# We cannot just use parso, because it doesn't use errors='replace'.
code = parso.python_bytes_to_unicode(code, encoding=encoding, errors='replace')
code = parso.python_bytes_to_unicode(code, encoding='utf-8', errors='replace')
if len(code) > settings._cropped_file_size:
code = code[:settings._cropped_file_size]

View File

@@ -3,7 +3,6 @@ Module for statical analysis.
"""
from parso.python import tree
from jedi._compatibility import force_unicode
from jedi import debug
from jedi.inference.helpers import is_string
@@ -27,7 +26,7 @@ CODES = {
}
class Error(object):
class Error:
def __init__(self, name, module_path, start_pos, message=None):
self.path = module_path
self._start_pos = start_pos
@@ -50,13 +49,10 @@ class Error(object):
first = self.__class__.__name__[0]
return first + str(CODES[self.name][0])
def __unicode__(self):
def __str__(self):
return '%s:%s:%s: %s %s' % (self.path, self.line, self.column,
self.code, self.message)
def __str__(self):
return self.__unicode__()
def __eq__(self, other):
return (self.path == other.path and self.name == other.name
and self._start_pos == other._start_pos)
@@ -193,7 +189,7 @@ def _check_for_exception_catch(node_context, jedi_name, exception, payload=None)
key, lazy_value = unpacked_args[1]
names = list(lazy_value.infer())
assert len(names) == 1 and is_string(names[0])
assert force_unicode(names[0].get_safe_value()) == payload[1].value
assert names[0].get_safe_value() == payload[1].value
# Check objects
key, lazy_value = unpacked_args[0]

View File

@@ -1,8 +1,8 @@
import re
from itertools import zip_longest
from parso.python import tree
from jedi._compatibility import zip_longest
from jedi import debug
from jedi.inference.utils import PushBackIterator
from jedi.inference import analysis
@@ -124,7 +124,7 @@ def _parse_argument_clinic(string):
allow_kwargs = True
class _AbstractArgumentsMixin(object):
class _AbstractArgumentsMixin:
def unpack(self, funcdef=None):
raise NotImplementedError
@@ -142,11 +142,8 @@ def unpack_arglist(arglist):
if arglist is None:
return
# Allow testlist here as well for Python2's class inheritance
# definitions.
if not (arglist.type in ('arglist', 'testlist') or (
# in python 3.5 **arg is an argument, not arglist
arglist.type == 'argument' and arglist.children[0] in ('*', '**'))):
if arglist.type != 'arglist' and not (
arglist.type == 'argument' and arglist.children[0] in ('*', '**')):
yield 0, arglist
return
@@ -189,16 +186,13 @@ class TreeArguments(AbstractArguments):
iterators = [_iterate_star_args(self.context, a, el, funcdef)
for a in arrays]
for values in list(zip_longest(*iterators)):
# TODO zip_longest yields None, that means this would raise
# an exception?
yield None, get_merged_lazy_value(
[v for v in values if v is not None]
)
elif star_count == 2:
arrays = self.context.infer_node(el)
for dct in arrays:
for key, values in _star_star_dict(self.context, dct, el, funcdef):
yield key, values
yield from _star_star_dict(self.context, dct, el, funcdef)
else:
if el.type == 'argument':
c = el.children
@@ -221,8 +215,7 @@ class TreeArguments(AbstractArguments):
# Reordering arguments is necessary, because star args sometimes appear
# after named argument, but in the actual order it's prepended.
for named_arg in named_args:
yield named_arg
yield from named_args
def _as_tree_tuple_objects(self):
for star_count, argument in unpack_arglist(self.argument_node):
@@ -323,8 +316,7 @@ def _iterate_star_args(context, array, input_node, funcdef=None):
except AttributeError:
pass
else:
for lazy_value in iter_():
yield lazy_value
yield from iter_()
def _star_star_dict(context, array, input_node, funcdef):

View File

@@ -8,10 +8,11 @@ just one.
"""
from functools import reduce
from operator import add
from itertools import zip_longest
from parso.python.tree import Name
from jedi import debug
from jedi._compatibility import zip_longest, unicode
from jedi.parser_utils import clean_scope_docstring
from jedi.inference.helpers import SimpleGetItemNotFound
from jedi.inference.utils import safe_property
@@ -21,7 +22,11 @@ from jedi.cache import memoize_method
sentinel = object()
class HelperValueMixin(object):
class HasNoContext(Exception):
pass
class HelperValueMixin:
def get_root_context(self):
value = self
if value.parent_context is None:
@@ -54,14 +59,12 @@ class HelperValueMixin(object):
def _get_value_filters(self, name_or_str):
origin_scope = name_or_str if isinstance(name_or_str, Name) else None
for f in self.get_filters(origin_scope=origin_scope):
yield f
yield from self.get_filters(origin_scope=origin_scope)
# This covers the case where a stub files are incomplete.
if self.is_stub():
from jedi.inference.gradual.conversion import convert_values
for c in convert_values(ValueSet({self})):
for f in c.get_filters():
yield f
yield from c.get_filters()
def goto(self, name_or_str, name_context=None, analysis_errors=True):
from jedi.inference import finder
@@ -92,11 +95,14 @@ class HelperValueMixin(object):
return values
def py__await__(self):
await_value_set = self.py__getattribute__(u"__await__")
await_value_set = self.py__getattribute__("__await__")
if not await_value_set:
debug.warning('Tried to run __await__ on value %s', self)
return await_value_set.execute_with_values()
def py__name__(self):
return self.name.string_name
def iterate(self, contextualized_node=None, is_async=False):
debug.dbg('iterate %s', self)
if is_async:
@@ -109,7 +115,7 @@ class HelperValueMixin(object):
.py__getattribute__('__anext__').execute_with_values()
.py__getattribute__('__await__').execute_with_values()
.py__stop_iteration_returns()
) # noqa
) # noqa: E124
])
return self.py__iter__(contextualized_node)
@@ -172,6 +178,9 @@ class Value(HelperValueMixin):
message="TypeError: '%s' object is not iterable" % self)
return iter([])
def py__next__(self, contextualized_node=None):
return self.py__iter__(contextualized_node)
def get_signatures(self):
return []
@@ -256,15 +265,12 @@ class Value(HelperValueMixin):
return self.parent_context.is_stub()
def _as_context(self):
raise NotImplementedError('Not all values need to be converted to contexts: %s', self)
raise HasNoContext
@property
def name(self):
raise NotImplementedError
def py__name__(self):
return self.name.string_name
def get_type_hint(self, add_class_info=True):
return None
@@ -291,7 +297,7 @@ class Value(HelperValueMixin):
just the `_T` generic parameter.
`value_set`: represents the actual argument passed to the parameter
we're inferrined for, or (for recursive calls) their types. In the
we're inferred for, or (for recursive calls) their types. In the
above example this would first be the representation of the list
`[1]` and then, when recursing, just of `1`.
"""
@@ -354,14 +360,14 @@ class ValueWrapper(_ValueWrapperBase):
class TreeValue(Value):
def __init__(self, inference_state, parent_context, tree_node):
super(TreeValue, self).__init__(inference_state, parent_context)
super().__init__(inference_state, parent_context)
self.tree_node = tree_node
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.tree_node)
class ContextualizedNode(object):
class ContextualizedNode:
def __init__(self, context, node):
self.context = context
self.node = node
@@ -382,7 +388,7 @@ def _getitem(value, index_values, contextualized_node):
unused_values = set()
for index_value in index_values:
index = index_value.get_safe_value(default=None)
if type(index) in (float, int, str, unicode, slice, bytes):
if type(index) in (float, int, str, slice, bytes):
try:
result |= value.py__simple_getitem__(index)
continue
@@ -403,7 +409,7 @@ def _getitem(value, index_values, contextualized_node):
return result
class ValueSet(object):
class ValueSet:
def __init__(self, iterable):
self._set = frozenset(iterable)
for value in iterable:
@@ -435,8 +441,7 @@ class ValueSet(object):
return self._from_frozen_set(self._set & other._set)
def __iter__(self):
for element in self._set:
yield element
return iter(self._set)
def __bool__(self):
return bool(self._set)

View File

@@ -78,7 +78,7 @@ class CachedMetaClass(type):
"""
@inference_state_as_method_param_cache()
def __call__(self, *args, **kwargs):
return super(CachedMetaClass, self).__call__(*args, **kwargs)
return super().__call__(*args, **kwargs)
def inference_state_method_generator_cache():

View File

@@ -1,4 +1,6 @@
from jedi._compatibility import unicode
# This file also re-exports symbols for wider use. We configure mypy and flake8
# to be aware that this file does this.
from jedi.inference.compiled.value import CompiledValue, CompiledName, \
CompiledValueFilter, CompiledValueName, create_from_access_path
from jedi.inference.base_value import LazyValueWrapper
@@ -29,7 +31,7 @@ class ExactValue(LazyValueWrapper):
if name in ('get_safe_value', 'execute_operation', 'access_handle',
'negate', 'py__bool__', 'is_compiled'):
return getattr(self._compiled_value, name)
return super(ExactValue, self).__getattribute__(name)
return super().__getattribute__(name)
def _get_wrapped_value(self):
instance, = builtin_from_name(
@@ -45,7 +47,7 @@ def create_simple_object(inference_state, obj):
Only allows creations of objects that are easily picklable across Python
versions.
"""
assert type(obj) in (int, float, str, bytes, unicode, slice, complex, bool), obj
assert type(obj) in (int, float, str, bytes, slice, complex, bool), repr(obj)
compiled_value = create_from_access_path(
inference_state,
inference_state.compiled_subprocess.create_simple_object(obj)
@@ -54,7 +56,7 @@ def create_simple_object(inference_state, obj):
def get_string_value_set(inference_state):
return builtin_from_name(inference_state, u'str').execute_with_values()
return builtin_from_name(inference_state, 'str').execute_with_values()
def load_module(inference_state, dotted_name, **kwargs):

View File

@@ -1,17 +1,19 @@
from __future__ import print_function
import inspect
import types
import traceback
import sys
import operator as op
from collections import namedtuple
import warnings
import re
import builtins
import typing
from pathlib import Path
from typing import Optional, Tuple
from jedi._compatibility import unicode, is_py3, builtins, \
py_version, force_unicode
from jedi.inference.compiled.getattr_static import getattr_static
ALLOWED_GETITEM_TYPES = (str, list, tuple, unicode, bytes, bytearray, dict)
ALLOWED_GETITEM_TYPES = (str, list, tuple, bytes, bytearray, dict)
MethodDescriptorType = type(str.replace)
# These are not considered classes and access is granted even though they have
@@ -28,22 +30,17 @@ NOT_CLASS_TYPES = (
types.MethodType,
types.ModuleType,
types.TracebackType,
MethodDescriptorType
MethodDescriptorType,
types.MappingProxyType,
types.SimpleNamespace,
types.DynamicClassAttribute,
)
if is_py3:
NOT_CLASS_TYPES += (
types.MappingProxyType,
types.SimpleNamespace,
types.DynamicClassAttribute,
)
# Those types don't exist in typing.
MethodDescriptorType = type(str.replace)
WrapperDescriptorType = type(set.__iter__)
# `object.__subclasshook__` is an already executed descriptor.
object_class_dict = type.__dict__["__dict__"].__get__(object)
object_class_dict = type.__dict__["__dict__"].__get__(object) # type: ignore[index]
ClassMethodDescriptorType = type(object_class_dict['__subclasshook__'])
_sentinel = object()
@@ -123,13 +120,18 @@ def load_module(inference_state, dotted_name, sys_path):
__import__(dotted_name)
except ImportError:
# If a module is "corrupt" or not really a Python module or whatever.
print('Module %s not importable in path %s.' % (dotted_name, sys_path), file=sys.stderr)
warnings.warn(
"Module %s not importable in path %s." % (dotted_name, sys_path),
UserWarning,
stacklevel=2,
)
return None
except Exception:
# Since __import__ pretty much makes code execution possible, just
# catch any error here and print it.
import traceback
print("Cannot import:\n%s" % traceback.format_exc(), file=sys.stderr)
warnings.warn(
"Cannot import:\n%s" % traceback.format_exc(), UserWarning, stacklevel=2
)
return None
finally:
sys.path = temp
@@ -140,42 +142,29 @@ def load_module(inference_state, dotted_name, sys_path):
return create_access_path(inference_state, module)
class AccessPath(object):
class AccessPath:
def __init__(self, accesses):
self.accesses = accesses
# Writing both of these methods here looks a bit ridiculous. However with
# the differences of Python 2/3 it's actually necessary, because we will
# otherwise have a accesses attribute that is bytes instead of unicode.
def __getstate__(self):
return self.accesses
def __setstate__(self, value):
self.accesses = value
def create_access_path(inference_state, obj):
def create_access_path(inference_state, obj) -> AccessPath:
access = create_access(inference_state, obj)
return AccessPath(access.get_access_path_tuples())
def _force_unicode_decorator(func):
return lambda *args, **kwargs: force_unicode(func(*args, **kwargs))
def get_api_type(obj):
if inspect.isclass(obj):
return u'class'
return 'class'
elif inspect.ismodule(obj):
return u'module'
return 'module'
elif inspect.isbuiltin(obj) or inspect.ismethod(obj) \
or inspect.ismethoddescriptor(obj) or inspect.isfunction(obj):
return u'function'
return 'function'
# Everything else...
return u'instance'
return 'instance'
class DirectObjectAccess(object):
class DirectObjectAccess:
def __init__(self, inference_state, obj):
self._inference_state = inference_state
self._obj = obj
@@ -186,20 +175,20 @@ class DirectObjectAccess(object):
def _create_access(self, obj):
return create_access(self._inference_state, obj)
def _create_access_path(self, obj):
def _create_access_path(self, obj) -> AccessPath:
return create_access_path(self._inference_state, obj)
def py__bool__(self):
return bool(self._obj)
def py__file__(self):
def py__file__(self) -> Optional[Path]:
try:
return self._obj.__file__
return Path(self._obj.__file__)
except AttributeError:
return None
def py__doc__(self):
return force_unicode(inspect.getdoc(self._obj)) or u''
return inspect.getdoc(self._obj) or ''
def py__name__(self):
if not _is_class_instance(self._obj) or \
@@ -214,7 +203,7 @@ class DirectObjectAccess(object):
return None
try:
return force_unicode(cls.__name__)
return cls.__name__
except AttributeError:
return None
@@ -224,18 +213,39 @@ class DirectObjectAccess(object):
def py__getitem__all_values(self):
if isinstance(self._obj, dict):
return [self._create_access_path(v) for v in self._obj.values()]
return self.py__iter__list()
if isinstance(self._obj, (list, tuple)):
return [self._create_access_path(v) for v in self._obj]
def py__simple_getitem__(self, index):
if type(self._obj) not in ALLOWED_GETITEM_TYPES:
if self.is_instance():
cls = DirectObjectAccess(self._inference_state, self._obj.__class__)
return cls.py__getitem__all_values()
try:
getitem = self._obj.__getitem__
except AttributeError:
pass
else:
annotation = DirectObjectAccess(self._inference_state, getitem).get_return_annotation()
if annotation is not None:
return [annotation]
return None
def py__simple_getitem__(self, index, *, safe=True):
if safe and type(self._obj) not in ALLOWED_GETITEM_TYPES:
# Get rid of side effects, we won't call custom `__getitem__`s.
return None
return self._create_access_path(self._obj[index])
def py__iter__list(self):
if not hasattr(self._obj, '__getitem__'):
try:
iter_method = self._obj.__iter__
except AttributeError:
return None
else:
p = DirectObjectAccess(self._inference_state, iter_method).get_return_annotation()
if p is not None:
return [p]
if type(self._obj) not in ALLOWED_GETITEM_TYPES:
# Get rid of side effects, we won't call custom `__getitem__`s.
@@ -260,26 +270,23 @@ class DirectObjectAccess(object):
# Avoid some weird hacks that would just fail, because they cannot be
# used by pickle.
if not isinstance(paths, list) \
or not all(isinstance(p, (bytes, unicode)) for p in paths):
or not all(isinstance(p, str) for p in paths):
return None
return paths
@_force_unicode_decorator
@shorten_repr
def get_repr(self):
builtins = 'builtins', '__builtin__'
if inspect.ismodule(self._obj):
return repr(self._obj)
# Try to avoid execution of the property.
if safe_getattr(self._obj, '__module__', default='') in builtins:
if safe_getattr(self._obj, '__module__', default='') == 'builtins':
return repr(self._obj)
type_ = type(self._obj)
if type_ == type:
return type.__repr__(self._obj)
if safe_getattr(type_, '__module__', default='') in builtins:
if safe_getattr(type_, '__module__', default='') == 'builtins':
# Allow direct execution of repr for builtins.
return repr(self._obj)
return object.__repr__(self._obj)
@@ -310,10 +317,10 @@ class DirectObjectAccess(object):
name = try_to_get_name(type(self._obj))
if name is None:
return ()
return tuple(force_unicode(n) for n in name.split('.'))
return tuple(name.split('.'))
def dir(self):
return list(map(force_unicode, dir(self._obj)))
return dir(self._obj)
def has_iter(self):
try:
@@ -322,33 +329,37 @@ class DirectObjectAccess(object):
except TypeError:
return False
def is_allowed_getattr(self, name, unsafe=False):
def is_allowed_getattr(self, name, safe=True) -> Tuple[bool, bool, Optional[AccessPath]]:
# TODO this API is ugly.
if unsafe:
# Unsafe is mostly used to check for __getattr__/__getattribute__.
# getattr_static works for properties, but the underscore methods
# are just ignored (because it's safer and avoids more code
# execution). See also GH #1378.
# Avoid warnings, see comment in the next function.
with warnings.catch_warnings(record=True):
warnings.simplefilter("always")
try:
return hasattr(self._obj, name), False
except Exception:
# Obviously has an attribute (propably a property) that
# gets executed, so just avoid all exceptions here.
return False, False
try:
attr, is_get_descriptor = getattr_static(self._obj, name)
except AttributeError:
return False, False
if not safe:
# Unsafe is mostly used to check for __getattr__/__getattribute__.
# getattr_static works for properties, but the underscore methods
# are just ignored (because it's safer and avoids more code
# execution). See also GH #1378.
# Avoid warnings, see comment in the next function.
with warnings.catch_warnings(record=True):
warnings.simplefilter("always")
try:
return hasattr(self._obj, name), False, None
except Exception:
# Obviously has an attribute (probably a property) that
# gets executed, so just avoid all exceptions here.
pass
return False, False, None
else:
if is_get_descriptor and type(attr) not in ALLOWED_DESCRIPTOR_ACCESS:
if isinstance(attr, property):
if hasattr(attr.fget, '__annotations__'):
a = DirectObjectAccess(self._inference_state, attr.fget)
return True, True, a.get_return_annotation()
# In case of descriptors that have get methods we cannot return
# it's value, because that would mean code execution.
return True, True
return True, False
return True, True, None
return True, False, None
def getattr_paths(self, name, default=_sentinel):
try:
@@ -377,7 +388,7 @@ class DirectObjectAccess(object):
except AttributeError:
pass
else:
if module is not None:
if module is not None and isinstance(module, str):
try:
__import__(module)
# For some modules like _sqlite3, the __module__ for classes is
@@ -396,7 +407,7 @@ class DirectObjectAccess(object):
return [self._create_access(module), access]
def get_safe_value(self):
if type(self._obj) in (bool, bytes, float, int, str, unicode, slice) or self._obj is None:
if type(self._obj) in (bool, bytes, float, int, str, slice) or self._obj is None:
return self._obj
raise ValueError("Object is type %s and not simple" % type(self._obj))
@@ -464,9 +475,6 @@ class DirectObjectAccess(object):
"""
Returns Tuple[Optional[str], Tuple[AccessPath, ...]]
"""
if sys.version_info < (3, 5):
return None, ()
name = None
args = ()
if safe_getattr(self._obj, '__module__', default='') == 'typing':
@@ -485,8 +493,6 @@ class DirectObjectAccess(object):
return inspect.isclass(self._obj) and self._obj != type
def _annotation_to_str(self, annotation):
if py_version < 30:
return ''
return inspect.formatannotation(annotation)
def get_signature_params(self):
@@ -505,8 +511,6 @@ class DirectObjectAccess(object):
def _get_signature(self):
obj = self._obj
if py_version < 33:
raise ValueError("inspect.signature was introduced in 3.3")
try:
return inspect.signature(obj)
except (RuntimeError, TypeError):
@@ -515,7 +519,7 @@ class DirectObjectAccess(object):
# the signature. In that case we just want a simple escape for now.
raise ValueError
def get_return_annotation(self):
def get_return_annotation(self) -> Optional[AccessPath]:
try:
o = self._obj.__annotations__.get('return')
except AttributeError:
@@ -525,15 +529,9 @@ class DirectObjectAccess(object):
return None
try:
# Python 2 doesn't have typing.
import typing
except ImportError:
o = typing.get_type_hints(self._obj).get('return')
except Exception:
pass
else:
try:
o = typing.get_type_hints(self._obj).get('return')
except Exception:
pass
return self._create_access_path(o)
@@ -546,7 +544,7 @@ class DirectObjectAccess(object):
objects of an objects
"""
tuples = dict(
(force_unicode(name), self.is_allowed_getattr(name))
(name, self.is_allowed_getattr(name))
for name in self.dir()
)
return self.needs_type_completions(), tuples

View File

@@ -7,7 +7,6 @@ information returned to enable Jedi to make decisions.
import types
from jedi import debug
from jedi._compatibility import py_version
_sentinel = object()
@@ -39,7 +38,7 @@ def _is_type(obj):
return True
def _shadowed_dict_newstyle(klass):
def _shadowed_dict(klass):
dict_attr = type.__dict__["__dict__"]
for entry in _static_getmro(klass):
try:
@@ -54,7 +53,7 @@ def _shadowed_dict_newstyle(klass):
return _sentinel
def _static_getmro_newstyle(klass):
def _static_getmro(klass):
mro = type.__dict__['__mro__'].__get__(klass)
if not isinstance(mro, (tuple, list)):
# There are unfortunately no tests for this, I was not able to
@@ -65,70 +64,8 @@ def _static_getmro_newstyle(klass):
return mro
if py_version >= 30:
_shadowed_dict = _shadowed_dict_newstyle
_get_type = type
_static_getmro = _static_getmro_newstyle
else:
def _shadowed_dict(klass):
"""
In Python 2 __dict__ is not overwritable:
class Foo(object): pass
setattr(Foo, '__dict__', 4)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: __dict__ must be a dictionary object
It applies to both newstyle and oldstyle classes:
class Foo(object): pass
setattr(Foo, '__dict__', 4)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
AttributeError: attribute '__dict__' of 'type' objects is not writable
It also applies to instances of those objects. However to keep things
straight forward, newstyle classes always use the complicated way of
accessing it while oldstyle classes just use getattr.
"""
if type(klass) is _oldstyle_class_type:
return getattr(klass, '__dict__', _sentinel)
return _shadowed_dict_newstyle(klass)
class _OldStyleClass:
pass
_oldstyle_instance_type = type(_OldStyleClass())
_oldstyle_class_type = type(_OldStyleClass)
def _get_type(obj):
type_ = object.__getattribute__(obj, '__class__')
if type_ is _oldstyle_instance_type:
# Somehow for old style classes we need to access it directly.
return obj.__class__
return type_
def _static_getmro(klass):
if type(klass) is _oldstyle_class_type:
def oldstyle_mro(klass):
"""
Oldstyle mro is a really simplistic way of look up mro:
https://stackoverflow.com/questions/54867/what-is-the-difference-between-old-style-and-new-style-classes-in-python
"""
yield klass
for base in klass.__bases__:
for yield_from in oldstyle_mro(base):
yield yield_from
return oldstyle_mro(klass)
return _static_getmro_newstyle(klass)
def _safe_hasattr(obj, name):
return _check_class(_get_type(obj), name) is not _sentinel
return _check_class(type(obj), name) is not _sentinel
def _safe_is_data_descriptor(obj):
@@ -151,7 +88,7 @@ def getattr_static(obj, attr, default=_sentinel):
"""
instance_result = _sentinel
if not _is_type(obj):
klass = _get_type(obj)
klass = type(obj)
dict_attr = _shadowed_dict(klass)
if (dict_attr is _sentinel or type(dict_attr) is types.MemberDescriptorType):
instance_result = _check_instance(obj, attr)

View File

@@ -3,12 +3,10 @@ Used only for REPL Completion.
"""
import inspect
import os
import sys
from pathlib import Path
from jedi.parser_utils import get_cached_code_lines
from jedi._compatibility import unwrap
from jedi import settings
from jedi.cache import memoize_method
from jedi.inference import compiled
@@ -36,7 +34,7 @@ class MixedObject(ValueWrapper):
This combined logic makes it possible to provide more powerful REPL
completion. It allows side effects that are not noticable with the default
parser structure to still be completeable.
parser structure to still be completable.
The biggest difference from CompiledValue to MixedObject is that we are
generally dealing with Python code and not with C code. This will generate
@@ -44,7 +42,7 @@ class MixedObject(ValueWrapper):
to modify the runtime.
"""
def __init__(self, compiled_value, tree_value):
super(MixedObject, self).__init__(tree_value)
super().__init__(tree_value)
self.compiled_value = compiled_value
self.access_handle = compiled_value.access_handle
@@ -71,12 +69,22 @@ class MixedObject(ValueWrapper):
else:
return self.compiled_value.get_safe_value(default)
@property
def array_type(self):
return self.compiled_value.array_type
def get_key_values(self):
return self.compiled_value.get_key_values()
def py__simple_getitem__(self, index):
python_object = self.compiled_value.access_handle.access._obj
if type(python_object) in ALLOWED_GETITEM_TYPES:
return self.compiled_value.py__simple_getitem__(index)
return self._wrapped_value.py__simple_getitem__(index)
def negate(self):
return self.compiled_value.negate()
def _as_context(self):
if self.parent_context is None:
return MixedModuleContext(self)
@@ -105,7 +113,7 @@ class MixedName(NameWrapper):
The ``CompiledName._compiled_value`` is our MixedObject.
"""
def __init__(self, wrapped_name, parent_tree_value):
super(MixedName, self).__init__(wrapped_name)
super().__init__(wrapped_name)
self._parent_tree_value = parent_tree_value
@property
@@ -131,12 +139,12 @@ class MixedName(NameWrapper):
class MixedObjectFilter(compiled.CompiledValueFilter):
def __init__(self, inference_state, compiled_value, tree_value):
super(MixedObjectFilter, self).__init__(inference_state, compiled_value)
super().__init__(inference_state, compiled_value)
self._tree_value = tree_value
def _create_name(self, name):
def _create_name(self, *args, **kwargs):
return MixedName(
super(MixedObjectFilter, self)._create_name(name),
super()._create_name(*args, **kwargs),
self._tree_value,
)
@@ -153,12 +161,11 @@ def _load_module(inference_state, path):
def _get_object_to_check(python_object):
"""Check if inspect.getfile has a chance to find the source."""
if sys.version_info[0] > 2:
try:
python_object = unwrap(python_object)
except ValueError:
# Can return a ValueError when it wraps around
pass
try:
python_object = inspect.unwrap(python_object)
except ValueError:
# Can return a ValueError when it wraps around
pass
if (inspect.ismodule(python_object)
or inspect.isclass(python_object)
@@ -180,11 +187,19 @@ def _find_syntax_node_name(inference_state, python_object):
try:
python_object = _get_object_to_check(python_object)
path = inspect.getsourcefile(python_object)
except TypeError:
except (OSError, TypeError):
# The type might not be known (e.g. class_with_dict.__weakref__)
return None
if path is None or not os.path.exists(path):
# The path might not exist or be e.g. <stdin>.
path = None if path is None else Path(path)
try:
if path is None or not path.exists():
# The path might not exist or be e.g. <stdin>.
return None
except OSError:
# Might raise an OSError on Windows:
#
# [WinError 123] The filename, directory name, or volume label
# syntax is incorrect: '<string>'
return None
file_io = FileIO(path)
@@ -252,7 +267,7 @@ def _find_syntax_node_name(inference_state, python_object):
@inference_state_function_cache()
def _create(inference_state, compiled_value, module_context):
# TODO accessing this is bad, but it probably doesn't matter that much,
# because we're working with interpreteters only here.
# because we're working with interpreters only here.
python_object = compiled_value.access_handle.access._obj
result = _find_syntax_node_name(inference_state, python_object)
if result is None:

View File

@@ -5,23 +5,37 @@ goals:
1. Making it safer - Segfaults and RuntimeErrors as well as stdout/stderr can
be ignored and dealt with.
2. Make it possible to handle different Python versions as well as virtualenvs.
The architecture here is briefly:
- For each Jedi `Environment` there is a corresponding subprocess which
operates within the target environment. If the subprocess dies it is replaced
at this level.
- `CompiledSubprocess` manages exactly one subprocess and handles communication
from the parent side.
- `Listener` runs within the subprocess, processing each request and yielding
results.
- `InterpreterEnvironment` provides an API which matches that of `Environment`,
but runs functionality inline rather than within a subprocess. It is thus
used both directly in places where a subprocess is unnecessary and/or
undesirable and also within subprocesses themselves.
- `InferenceStateSubprocess` (or `InferenceStateSameProcess`) provide high
level access to functionality within the subprocess from within the parent.
Each `InterpreterState` has an instance of one of these, provided by its
environment.
"""
import collections
import os
import sys
import queue
import subprocess
import socket
import errno
import traceback
import weakref
from functools import partial
from threading import Thread
try:
from queue import Queue, Empty
except ImportError:
from Queue import Queue, Empty # python 2.7
from typing import Dict, TYPE_CHECKING
from jedi._compatibility import queue, is_py3, force_unicode, \
pickle_dump, pickle_load, GeneralizedPopen, weakref
from jedi._compatibility import pickle_dump, pickle_load
from jedi import debug
from jedi.cache import memoize_method
from jedi.inference.compiled.subprocess import functions
@@ -29,13 +43,32 @@ from jedi.inference.compiled.access import DirectObjectAccess, AccessPath, \
SignatureParam
from jedi.api.exceptions import InternalError
if TYPE_CHECKING:
from jedi.inference import InferenceState
_MAIN_PATH = os.path.join(os.path.dirname(__file__), '__main__.py')
PICKLE_PROTOCOL = 4
def _enqueue_output(out, queue):
def _GeneralizedPopen(*args, **kwargs):
if os.name == 'nt':
try:
# Was introduced in Python 3.7.
CREATE_NO_WINDOW = subprocess.CREATE_NO_WINDOW
except AttributeError:
CREATE_NO_WINDOW = 0x08000000
kwargs['creationflags'] = CREATE_NO_WINDOW
# The child process doesn't need file descriptors except 0, 1, 2.
# This is unix only.
kwargs['close_fds'] = 'posix' in sys.builtin_module_names
return subprocess.Popen(*args, **kwargs)
def _enqueue_output(out, queue_):
for line in iter(out.readline, b''):
queue.put(line)
queue_.put(line)
def _add_stderr_to_debug(stderr_queue):
@@ -46,7 +79,7 @@ def _add_stderr_to_debug(stderr_queue):
line = stderr_queue.get_nowait()
line = line.decode('utf-8', 'replace')
debug.warning('stderr output: %s' % line.rstrip('\n'))
except Empty:
except queue.Empty:
break
@@ -70,11 +103,10 @@ def _cleanup_process(process, thread):
pass
class _InferenceStateProcess(object):
def __init__(self, inference_state):
class _InferenceStateProcess:
def __init__(self, inference_state: 'InferenceState') -> None:
self._inference_state_weakref = weakref.ref(inference_state)
self._inference_state_id = id(inference_state)
self._handles = {}
self._handles: Dict[int, AccessHandle] = {}
def get_or_create_access_handle(self, obj):
id_ = id(obj)
@@ -104,11 +136,49 @@ class InferenceStateSameProcess(_InferenceStateProcess):
class InferenceStateSubprocess(_InferenceStateProcess):
def __init__(self, inference_state, compiled_subprocess):
super(InferenceStateSubprocess, self).__init__(inference_state)
"""
API to functionality which will run in a subprocess.
This mediates the interaction between an `InferenceState` and the actual
execution of functionality running within a `CompiledSubprocess`. Available
functions are defined in `.functions`, though should be accessed via
attributes on this class of the same name.
This class is responsible for indicating that the `InferenceState` within
the subprocess can be removed once the corresponding instance in the parent
goes away.
"""
def __init__(
self,
inference_state: 'InferenceState',
compiled_subprocess: 'CompiledSubprocess',
) -> None:
super().__init__(inference_state)
self._used = False
self._compiled_subprocess = compiled_subprocess
# Opaque id we'll pass to the subprocess to identify the context (an
# `InferenceState`) which should be used for the request. This allows us
# to make subsequent requests which operate on results from previous
# ones, while keeping a single subprocess which can work with several
# contexts in the parent process. Once it is no longer needed(i.e: when
# this class goes away), we also use this id to indicate that the
# subprocess can discard the context.
#
# Note: this id is deliberately coupled to this class (and not to
# `InferenceState`) as this class manages access handle mappings which
# must correspond to those in the subprocess. This approach also avoids
# race conditions from successive `InferenceState`s with the same object
# id (as observed while adding support for Python 3.13).
#
# This value does not need to be the `id()` of this instance, we merely
# need to ensure that it enables the (visible) lifetime of the context
# within the subprocess to match that of this class. We therefore also
# depend on the semantics of `CompiledSubprocess.delete_inference_state`
# for correctness.
self._inference_state_id = id(self)
def __getattr__(self, name):
func = _get_function(name)
@@ -116,7 +186,7 @@ class InferenceStateSubprocess(_InferenceStateProcess):
self._used = True
result = self._compiled_subprocess.run(
self._inference_state_weakref(),
self._inference_state_id,
func,
args=args,
kwargs=kwargs,
@@ -151,22 +221,31 @@ class InferenceStateSubprocess(_InferenceStateProcess):
self._compiled_subprocess.delete_inference_state(self._inference_state_id)
class CompiledSubprocess(object):
is_crashed = False
# Start with 2, gets set after _get_info.
_pickle_protocol = 2
class CompiledSubprocess:
"""
A subprocess which runs inference within a target environment.
def __init__(self, executable):
This class manages the interface to a single instance of such a process as
well as the lifecycle of the process itself. See `.__main__` and `Listener`
for the implementation of the subprocess and details of the protocol.
A single live instance of this is maintained by `jedi.api.environment.Environment`,
so that typically a single subprocess is used at a time.
"""
is_crashed = False
def __init__(self, executable, env_vars=None):
self._executable = executable
self._inference_state_deletion_queue = queue.deque()
self._env_vars = env_vars
self._inference_state_deletion_queue = collections.deque()
self._cleanup_callable = lambda: None
def __repr__(self):
pid = os.getpid()
return '<%s _executable=%r, _pickle_protocol=%r, is_crashed=%r, pid=%r>' % (
return '<%s _executable=%r, is_crashed=%r, pid=%r>' % (
self.__class__.__name__,
self._executable,
self._pickle_protocol,
self.is_crashed,
pid,
)
@@ -181,26 +260,14 @@ class CompiledSubprocess(object):
os.path.dirname(os.path.dirname(parso_path)),
'.'.join(str(x) for x in sys.version_info[:3]),
)
# Use explicit envionment to ensure reliable results (#1540)
env = {}
if os.name == 'nt':
# if SYSTEMROOT (or case variant) exists in environment,
# ensure it goes to subprocess
for k, v in os.environ.items():
if 'SYSTEMROOT' == k.upper():
env.update({k: os.environ[k]})
break # don't risk multiple entries
process = GeneralizedPopen(
process = _GeneralizedPopen(
args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
# Use system default buffering on Python 2 to improve performance
# (this is already the case on Python 3).
bufsize=-1,
env=env
env=self._env_vars
)
self._stderr_queue = Queue()
self._stderr_queue = queue.Queue()
self._stderr_thread = t = Thread(
target=_enqueue_output,
args=(process.stderr, self._stderr_queue)
@@ -215,18 +282,18 @@ class CompiledSubprocess(object):
t)
return process
def run(self, inference_state, function, args=(), kwargs={}):
def run(self, inference_state_id, function, args=(), kwargs={}):
# Delete old inference_states.
while True:
try:
inference_state_id = self._inference_state_deletion_queue.pop()
delete_id = self._inference_state_deletion_queue.pop()
except IndexError:
break
else:
self._send(inference_state_id, None)
self._send(delete_id, None)
assert callable(function)
return self._send(id(inference_state), function, args, kwargs)
return self._send(inference_state_id, function, args, kwargs)
def get_sys_path(self):
return self._send(None, functions.get_sys_path, (), {})
@@ -239,20 +306,10 @@ class CompiledSubprocess(object):
if self.is_crashed:
raise InternalError("The subprocess %s has crashed." % self._executable)
if not is_py3:
# Python 2 compatibility
kwargs = {force_unicode(key): value for key, value in kwargs.items()}
data = inference_state_id, function, args, kwargs
try:
pickle_dump(data, self._get_process().stdin, self._pickle_protocol)
except (socket.error, IOError) as e:
# Once Python2 will be removed we can just use `BrokenPipeError`.
# Also, somehow in windows it returns EINVAL instead of EPIPE if
# the subprocess dies.
if e.errno not in (errno.EPIPE, errno.EINVAL):
# Not a broken pipe
raise
pickle_dump(data, self._get_process().stdin, PICKLE_PROTOCOL)
except BrokenPipeError:
self._kill()
raise InternalError("The subprocess %s was killed. Maybe out of memory?"
% self._executable)
@@ -284,22 +341,65 @@ class CompiledSubprocess(object):
def delete_inference_state(self, inference_state_id):
"""
Currently we are not deleting inference_state instantly. They only get
deleted once the subprocess is used again. It would probably a better
solution to move all of this into a thread. However, the memory usage
of a single inference_state shouldn't be that high.
Indicate that an inference state (in the subprocess) is no longer
needed.
The state corresponding to the given id will become inaccessible and the
id may safely be re-used to refer to a different context.
Note: it is not guaranteed that the corresponding state will actually be
deleted immediately.
"""
# With an argument - the inference_state gets deleted.
# Warning: if changing the semantics of context deletion see the comment
# in `InferenceStateSubprocess.__init__` regarding potential race
# conditions.
# Currently we are not deleting the related state instantly. They only
# get deleted once the subprocess is used again. It would probably a
# better solution to move all of this into a thread. However, the memory
# usage of a single inference_state shouldn't be that high.
self._inference_state_deletion_queue.append(inference_state_id)
class Listener(object):
def __init__(self, pickle_protocol):
class Listener:
"""
Main loop for the subprocess which actually does the inference.
This class runs within the target environment. It listens to instructions
from the parent process, runs inference and returns the results.
The subprocess has a long lifetime and is expected to process several
requests, including for different `InferenceState` instances in the parent.
See `CompiledSubprocess` for the parent half of the system.
Communication is via pickled data sent serially over stdin and stdout.
Stderr is read only if the child process crashes.
The request protocol is a 4-tuple of:
* inference_state_id | None: an opaque identifier of the parent's
`InferenceState`. An `InferenceState` operating over an
`InterpreterEnvironment` is created within this process for each of
these, ensuring that each parent context has a corresponding context
here. This allows context to be persisted between requests. Unless
`None`, the local `InferenceState` will be passed to the given function
as the first positional argument.
* function | None: the function to run. This is expected to be a member of
`.functions`. `None` indicates that the corresponding inference state is
no longer needed and should be dropped.
* args: positional arguments to the `function`. If any of these are
`AccessHandle` instances they will be adapted to the local
`InferenceState` before being passed.
* kwargs: keyword arguments to the `function`. If any of these are
`AccessHandle` instances they will be adapted to the local
`InferenceState` before being passed.
The result protocol is a 3-tuple of either:
* (False, None, function result): if the function returns without error, or
* (True, traceback, exception): if the function raises an exception
"""
def __init__(self):
self._inference_states = {}
# TODO refactor so we don't need to process anymore just handle
# controlling.
self._process = _InferenceStateProcess(Listener)
self._pickle_protocol = pickle_protocol
def _get_inference_state(self, function, inference_state_id):
from jedi.inference import InferenceState
@@ -321,6 +421,9 @@ class Listener(object):
if inference_state_id is None:
return function(*args, **kwargs)
elif function is None:
# Warning: if changing the semantics of context deletion see the comment
# in `InferenceStateSubprocess.__init__` regarding potential race
# conditions.
del self._inference_states[inference_state_id]
else:
inference_state = self._get_inference_state(function, inference_state_id)
@@ -342,15 +445,8 @@ class Listener(object):
# because stdout is used for IPC.
sys.stdout = open(os.devnull, 'w')
stdin = sys.stdin
if sys.version_info[0] > 2:
stdout = stdout.buffer
stdin = stdin.buffer
# Python 2 opens streams in text mode on Windows. Set stdout and stdin
# to binary mode.
elif sys.platform == 'win32':
import msvcrt
msvcrt.setmode(stdout.fileno(), os.O_BINARY)
msvcrt.setmode(stdin.fileno(), os.O_BINARY)
stdout = stdout.buffer
stdin = stdin.buffer
while True:
try:
@@ -364,11 +460,16 @@ class Listener(object):
except Exception as e:
result = True, traceback.format_exc(), e
pickle_dump(result, stdout, self._pickle_protocol)
pickle_dump(result, stdout, PICKLE_PROTOCOL)
class AccessHandle(object):
def __init__(self, subprocess, access, id_):
class AccessHandle:
def __init__(
self,
subprocess: _InferenceStateProcess,
access: DirectObjectAccess,
id_: int,
) -> None:
self.access = access
self._subprocess = subprocess
self.id = id_
@@ -393,9 +494,8 @@ class AccessHandle(object):
if name in ('id', 'access') or name.startswith('_'):
raise AttributeError("Something went wrong with unpickling")
# if not is_py3: print >> sys.stderr, name
# print('getattr', name, file=sys.stderr)
return partial(self._workaround, force_unicode(name))
return partial(self._workaround, name)
def _workaround(self, name, *args, **kwargs):
"""

View File

@@ -1,5 +1,11 @@
import os
import sys
from importlib.abc import MetaPathFinder
from importlib.machinery import PathFinder
# Remove the first entry, because it's simply a directory entry that equals
# this directory.
del sys.path[0]
def _get_paths():
@@ -11,45 +17,24 @@ def _get_paths():
return {'jedi': _jedi_path, 'parso': _parso_path}
# Remove the first entry, because it's simply a directory entry that equals
# this directory.
del sys.path[0]
class _ExactImporter(MetaPathFinder):
def __init__(self, path_dct):
self._path_dct = path_dct
if sys.version_info > (3, 4):
from importlib.machinery import PathFinder
def find_spec(self, fullname, path=None, target=None):
if path is None and fullname in self._path_dct:
p = self._path_dct[fullname]
spec = PathFinder.find_spec(fullname, path=[p], target=target)
return spec
return None
class _ExactImporter(object):
def __init__(self, path_dct):
self._path_dct = path_dct
def find_module(self, fullname, path=None):
if path is None and fullname in self._path_dct:
p = self._path_dct[fullname]
loader = PathFinder.find_module(fullname, path=[p])
return loader
return None
# Try to import jedi/parso.
sys.meta_path.insert(0, _ExactImporter(_get_paths()))
from jedi.inference.compiled import subprocess # NOQA
sys.meta_path.pop(0)
else:
import imp
def load(name):
paths = list(_get_paths().values())
fp, pathname, description = imp.find_module(name, paths)
return imp.load_module(name, fp, pathname, description)
load('parso')
load('jedi')
from jedi.inference.compiled import subprocess # NOQA
from jedi._compatibility import highest_pickle_protocol # noqa: E402
# Try to import jedi/parso.
sys.meta_path.insert(0, _ExactImporter(_get_paths()))
from jedi.inference.compiled import subprocess # noqa: E402
sys.meta_path.pop(0)
# Retrieve the pickle protocol.
host_sys_version = [int(x) for x in sys.argv[2].split('.')]
pickle_protocol = highest_pickle_protocol([sys.version_info, host_sys_version])
# And finally start the client.
subprocess.Listener(pickle_protocol=pickle_protocol).listen()
subprocess.Listener().listen()

View File

@@ -1,18 +1,20 @@
from __future__ import print_function
import sys
import os
import re
import inspect
import importlib
from pathlib import Path
from zipfile import ZipFile
from zipimport import zipimporter, ZipImportError
from importlib.machinery import all_suffixes
from jedi._compatibility import find_module, cast_path, force_unicode, \
all_suffixes, scandir
from jedi.inference.compiled import access
from jedi import debug
from jedi import parser_utils
from jedi.file_io import KnownContentFileIO, ZipFileIO
def get_sys_path():
return list(map(cast_path, sys.path))
return sys.path
def load_module(inference_state, **kwargs):
@@ -35,7 +37,7 @@ def get_module_info(inference_state, sys_path=None, full_name=None, **kwargs):
if sys_path is not None:
sys.path, temp = sys_path, sys.path
try:
return find_module(full_name=full_name, **kwargs)
return _find_module(full_name=full_name, **kwargs)
except ImportError:
return None, None
finally:
@@ -44,7 +46,7 @@ def get_module_info(inference_state, sys_path=None, full_name=None, **kwargs):
def get_builtin_module_names(inference_state):
return list(map(force_unicode, sys.builtin_module_names))
return sys.builtin_module_names
def _test_raise_error(inference_state, exception_type):
@@ -90,19 +92,25 @@ def _iter_module_names(inference_state, paths):
# Python modules/packages
for path in paths:
try:
dirs = scandir(path)
dir_entries = ((entry.name, entry.is_dir()) for entry in os.scandir(path))
except OSError:
# The file might not exist or reading it might lead to an error.
debug.warning("Not possible to list directory: %s", path)
continue
for dir_entry in dirs:
name = dir_entry.name
try:
zip_import_info = zipimporter(path)
# Unfortunately, there is no public way to access zipimporter's
# private _files member. We therefore have to use a
# custom function to iterate over the files.
dir_entries = _zip_list_subdirectory(
zip_import_info.archive, zip_import_info.prefix)
except ZipImportError:
# The file might not exist or reading it might lead to an error.
debug.warning("Not possible to list directory: %s", path)
continue
for name, is_dir in dir_entries:
# First Namespaces then modules/stubs
if dir_entry.is_dir():
# pycache is obviously not an interestin namespace. Also the
if is_dir:
# pycache is obviously not an interesting namespace. Also the
# name must be a valid identifier.
# TODO use str.isidentifier, once Python 2 is removed
if name != '__pycache__' and not re.search(r'\W|^\d', name):
if name != '__pycache__' and name.isidentifier():
yield name
else:
if name.endswith('.pyi'): # Stub files
@@ -113,3 +121,137 @@ def _iter_module_names(inference_state, paths):
if modname and '.' not in modname:
if modname != '__init__':
yield modname
def _find_module(string, path=None, full_name=None, is_global_search=True):
"""
Provides information about a module.
This function isolates the differences in importing libraries introduced with
python 3.3 on; it gets a module name and optionally a path. It will return a
tuple containin an open file for the module (if not builtin), the filename
or the name of the module if it is a builtin one and a boolean indicating
if the module is contained in a package.
"""
spec = None
loader = None
for finder in sys.meta_path:
if is_global_search and finder != importlib.machinery.PathFinder:
p = None
else:
p = path
try:
find_spec = finder.find_spec
except AttributeError:
# These are old-school clases that still have a different API, just
# ignore those.
continue
spec = find_spec(string, p)
if spec is not None:
if spec.origin == "frozen":
continue
loader = spec.loader
if loader is None and not spec.has_location:
# This is a namespace package.
full_name = string if not path else full_name
implicit_ns_info = ImplicitNSInfo(full_name, spec.submodule_search_locations._path)
return implicit_ns_info, True
break
return _find_module_py33(string, path, loader)
def _find_module_py33(string, path=None, loader=None, full_name=None, is_global_search=True):
if not loader:
spec = importlib.machinery.PathFinder.find_spec(string, path)
if spec is not None:
loader = spec.loader
if loader is None and path is None: # Fallback to find builtins
try:
spec = importlib.util.find_spec(string)
if spec is not None:
loader = spec.loader
except ValueError as e:
# See #491. Importlib might raise a ValueError, to avoid this, we
# just raise an ImportError to fix the issue.
raise ImportError("Originally " + repr(e))
if loader is None:
raise ImportError("Couldn't find a loader for {}".format(string))
return _from_loader(loader, string)
def _from_loader(loader, string):
try:
is_package_method = loader.is_package
except AttributeError:
is_package = False
else:
is_package = is_package_method(string)
try:
get_filename = loader.get_filename
except AttributeError:
return None, is_package
else:
module_path = get_filename(string)
# To avoid unicode and read bytes, "overwrite" loader.get_source if
# possible.
try:
f = type(loader).get_source
except AttributeError:
raise ImportError("get_source was not defined on loader")
if f is not importlib.machinery.SourceFileLoader.get_source:
# Unfortunately we are reading unicode here, not bytes.
# It seems hard to get bytes, because the zip importer
# logic just unpacks the zip file and returns a file descriptor
# that we cannot as easily access. Therefore we just read it as
# a string in the cases where get_source was overwritten.
code = loader.get_source(string)
else:
code = _get_source(loader, string)
if code is None:
return None, is_package
if isinstance(loader, zipimporter):
return ZipFileIO(module_path, code, Path(loader.archive)), is_package
return KnownContentFileIO(module_path, code), is_package
def _get_source(loader, fullname):
"""
This method is here as a replacement for SourceLoader.get_source. That
method returns unicode, but we prefer bytes.
"""
path = loader.get_filename(fullname)
try:
return loader.get_data(path)
except OSError:
raise ImportError('source not available through get_data()',
name=fullname)
def _zip_list_subdirectory(zip_path, zip_subdir_path):
zip_file = ZipFile(zip_path)
zip_subdir_path = Path(zip_subdir_path)
zip_content_file_paths = zip_file.namelist()
for raw_file_name in zip_content_file_paths:
file_path = Path(raw_file_name)
if file_path.parent == zip_subdir_path:
file_path = file_path.relative_to(zip_subdir_path)
yield file_path.name, raw_file_name.endswith("/")
class ImplicitNSInfo:
"""Stores information returned from an implicit namespace spec"""
def __init__(self, name, paths):
self.name = name
self.paths = paths

View File

@@ -3,10 +3,12 @@ Imitate the parser representation.
"""
import re
from functools import partial
from inspect import Parameter
from pathlib import Path
from typing import Optional
from jedi import debug
from jedi.inference.utils import to_list
from jedi._compatibility import force_unicode, Parameter, cast_path
from jedi.cache import memoize_method
from jedi.inference.filters import AbstractFilter
from jedi.inference.names import AbstractNameDefinition, ValueNameMixin, \
@@ -20,7 +22,7 @@ from jedi.inference.signature import BuiltinSignature
from jedi.inference.context import CompiledContext, CompiledModuleContext
class CheckAttribute(object):
class CheckAttribute:
"""Raises :exc:`AttributeError` if the attribute X is not available."""
def __init__(self, check_name=None):
# Remove the py in front of e.g. py__call__.
@@ -29,7 +31,7 @@ class CheckAttribute(object):
def __call__(self, func):
self.func = func
if self.check_name is None:
self.check_name = force_unicode(func.__name__[2:])
self.check_name = func.__name__[2:]
return self
def __get__(self, instance, owner):
@@ -43,22 +45,21 @@ class CheckAttribute(object):
class CompiledValue(Value):
def __init__(self, inference_state, access_handle, parent_context=None):
super(CompiledValue, self).__init__(inference_state, parent_context)
super().__init__(inference_state, parent_context)
self.access_handle = access_handle
def py__call__(self, arguments):
return_annotation = self.access_handle.get_return_annotation()
if return_annotation is not None:
# TODO the return annotation may also be a string.
return create_from_access_path(
self.inference_state,
return_annotation
).execute_annotation()
try:
self.access_handle.getattr_paths(u'__call__')
self.access_handle.getattr_paths('__call__')
except AttributeError:
return super(CompiledValue, self).py__call__(arguments)
return super().py__call__(arguments)
else:
if self.access_handle.is_class():
from jedi.inference.value import CompiledInstance
@@ -161,11 +162,14 @@ class CompiledValue(Value):
def py__simple_getitem__(self, index):
with reraise_getitem_errors(IndexError, KeyError, TypeError):
try:
access = self.access_handle.py__simple_getitem__(index)
access = self.access_handle.py__simple_getitem__(
index,
safe=not self.inference_state.allow_unsafe_executions
)
except AttributeError:
return super(CompiledValue, self).py__simple_getitem__(index)
return super().py__simple_getitem__(index)
if access is None:
return NO_VALUES
return super().py__simple_getitem__(index)
return ValueSet([create_from_access_path(self.inference_state, access)])
@@ -174,20 +178,15 @@ class CompiledValue(Value):
if all_access_paths is None:
# This means basically that no __getitem__ has been defined on this
# object.
return super(CompiledValue, self).py__getitem__(index_value_set, contextualized_node)
return super().py__getitem__(index_value_set, contextualized_node)
return ValueSet(
create_from_access_path(self.inference_state, access)
for access in all_access_paths
)
def py__iter__(self, contextualized_node=None):
# Python iterators are a bit strange, because there's no need for
# the __iter__ function as long as __getitem__ is defined (it will
# just start with __getitem__(0). This is especially true for
# Python 2 strings, where `str.__iter__` is not even defined.
if not self.access_handle.has_iter():
for x in super(CompiledValue, self).py__iter__(contextualized_node):
yield x
yield from super().py__iter__(contextualized_node)
access_path_list = self.access_handle.py__iter__list()
if access_path_list is None:
@@ -222,10 +221,8 @@ class CompiledValue(Value):
continue
else:
bltn_obj = builtin_from_name(self.inference_state, name)
for result in self.inference_state.execute(bltn_obj, params):
yield result
for type_ in docstrings.infer_return_types(self):
yield type_
yield from self.inference_state.execute(bltn_obj, params)
yield from docstrings.infer_return_types(self)
def get_safe_value(self, default=_sentinel):
try:
@@ -264,7 +261,7 @@ class CompiledValue(Value):
v.with_generics(arguments)
for v in self.inference_state.typing_module.py__getattribute__(name)
]).execute_annotation()
return super(CompiledValue, self).execute_annotation()
return super().execute_annotation()
def negate(self):
return create_from_access_path(self.inference_state, self.access_handle.negate())
@@ -298,10 +295,7 @@ class CompiledModule(CompiledValue):
return CompiledModuleContext(self)
def py__path__(self):
paths = self.access_handle.py__path__()
if paths is None:
return None
return map(cast_path, paths)
return self.access_handle.py__path__()
def is_package(self):
return self.py__path__() is not None
@@ -314,20 +308,20 @@ class CompiledModule(CompiledValue):
return ()
return tuple(name.split('.'))
def py__file__(self):
return cast_path(self.access_handle.py__file__())
def py__file__(self) -> Optional[Path]:
return self.access_handle.py__file__() # type: ignore[no-any-return]
class CompiledName(AbstractNameDefinition):
def __init__(self, inference_state, parent_value, name):
def __init__(self, inference_state, parent_value, name, is_descriptor):
self._inference_state = inference_state
self.parent_context = parent_value.as_context()
self._parent_value = parent_value
self.string_name = name
self.is_descriptor = is_descriptor
def py__doc__(self):
value, = self.infer()
return value.py__doc__()
return self.infer_compiled_value().py__doc__()
def _get_qualified_names(self):
parent_qualified_names = self.parent_context.get_qualified_names()
@@ -351,16 +345,17 @@ class CompiledName(AbstractNameDefinition):
@property
def api_type(self):
api = self.infer()
# If we can't find the type, assume it is an instance variable
if not api:
if self.is_descriptor:
# In case of properties we want to avoid executions as much as
# possible. Since the api_type can be wrong for other reasons
# anyway, we just return instance here.
return "instance"
return next(iter(api)).api_type
return self.infer_compiled_value().api_type
@memoize_method
def infer(self):
return ValueSet([self.infer_compiled_value()])
@memoize_method
def infer_compiled_value(self):
return create_from_name(self._inference_state, self._parent_value, self.string_name)
@@ -445,9 +440,10 @@ class CompiledValueFilter(AbstractFilter):
def get(self, name):
access_handle = self.compiled_value.access_handle
safe = not self._inference_state.allow_unsafe_executions
return self._get(
name,
lambda name, unsafe: access_handle.is_allowed_getattr(name, unsafe),
lambda name: access_handle.is_allowed_getattr(name, safe=safe),
lambda name: name in access_handle.dir(),
check_has_attribute=True
)
@@ -456,39 +452,40 @@ class CompiledValueFilter(AbstractFilter):
"""
To remove quite a few access calls we introduced the callback here.
"""
# Always use unicode objects in Python 2 from here.
name = force_unicode(name)
if self._inference_state.allow_descriptor_getattr:
pass
has_attribute, is_descriptor = allowed_getattr_callback(
has_attribute, is_descriptor, property_return_annotation = allowed_getattr_callback(
name,
unsafe=self._inference_state.allow_descriptor_getattr
)
if property_return_annotation is not None:
values = create_from_access_path(
self._inference_state,
property_return_annotation
).execute_annotation()
if values:
return [CompiledValueName(v, name) for v in values]
if check_has_attribute and not has_attribute:
return []
if (is_descriptor or not has_attribute) \
and not self._inference_state.allow_descriptor_getattr:
and not self._inference_state.allow_unsafe_executions:
return [self._get_cached_name(name, is_empty=True)]
if self.is_instance and not in_dir_callback(name):
return []
return [self._get_cached_name(name)]
return [self._get_cached_name(name, is_descriptor=is_descriptor)]
@memoize_method
def _get_cached_name(self, name, is_empty=False):
def _get_cached_name(self, name, is_empty=False, *, is_descriptor=False):
if is_empty:
return EmptyCompiledName(self._inference_state, name)
else:
return self._create_name(name)
return self._create_name(name, is_descriptor=is_descriptor)
def values(self):
from jedi.inference.compiled import builtin_from_name
names = []
needs_type_completions, dir_infos = self.compiled_value.access_handle.get_dir_infos()
# We could use `unsafe` here as well, especially as a parameter to
# We could use `safe=False` here as well, especially as a parameter to
# get_dir_infos. But this would lead to a lot of property executions
# that are probably not wanted. The drawback for this is that we
# have a different name for `get` and `values`. For `get` we always
@@ -496,21 +493,22 @@ class CompiledValueFilter(AbstractFilter):
for name in dir_infos:
names += self._get(
name,
lambda name, unsafe: dir_infos[name],
lambda name: dir_infos[name],
lambda name: name in dir_infos,
)
# ``dir`` doesn't include the type names.
if not self.is_instance and needs_type_completions:
for filter in builtin_from_name(self._inference_state, u'type').get_filters():
for filter in builtin_from_name(self._inference_state, 'type').get_filters():
names += filter.values()
return names
def _create_name(self, name):
def _create_name(self, name, is_descriptor):
return CompiledName(
self._inference_state,
self.compiled_value,
name
name,
is_descriptor,
)
def __repr__(self):
@@ -518,11 +516,11 @@ class CompiledValueFilter(AbstractFilter):
docstr_defaults = {
'floating point number': u'float',
'character': u'str',
'integer': u'int',
'dictionary': u'dict',
'string': u'str',
'floating point number': 'float',
'character': 'str',
'integer': 'int',
'dictionary': 'dict',
'string': 'str',
}
@@ -534,7 +532,6 @@ def _parse_function_doc(doc):
TODO docstrings like utime(path, (atime, mtime)) and a(b [, b]) -> None
TODO docstrings like 'tuple of integers'
"""
doc = force_unicode(doc)
# parse round parentheses: def func(a, (b,c))
try:
count = 0
@@ -553,7 +550,7 @@ def _parse_function_doc(doc):
# UnboundLocalError for undefined end in last line
debug.dbg('no brackets found - no param')
end = 0
param_str = u''
param_str = ''
else:
# remove square brackets, that show an optional param ( = None)
def change_options(m):
@@ -571,9 +568,9 @@ def _parse_function_doc(doc):
param_str = param_str.replace('-', '_') # see: isinstance.__doc__
# parse return value
r = re.search(u'-[>-]* ', doc[end:end + 7])
r = re.search('-[>-]* ', doc[end:end + 7])
if r is None:
ret = u''
ret = ''
else:
index = end + r.end()
# get result type, which can contain newlines

View File

@@ -1,5 +1,7 @@
from abc import abstractmethod
from contextlib import contextmanager
from pathlib import Path
from typing import Optional
from parso.tree import search_ancestor
from parso.python.tree import Name
@@ -13,7 +15,7 @@ from jedi import debug
from jedi import parser_utils
class AbstractContext(object):
class AbstractContext:
# Must be defined: inference_state and tree_node and parent_context as an attribute/property
def __init__(self, inference_state):
@@ -164,7 +166,7 @@ class ValueContext(AbstractContext):
Should be defined, otherwise the API returns empty types.
"""
def __init__(self, value):
super(ValueContext, self).__init__(value.inference_state)
super().__init__(value.inference_state)
self._value = value
@property
@@ -216,7 +218,7 @@ class ValueContext(AbstractContext):
return '%s(%s)' % (self.__class__.__name__, self._value)
class TreeContextMixin(object):
class TreeContextMixin:
def infer_node(self, node):
from jedi.inference.syntax_tree import infer_node
return infer_node(self, node)
@@ -255,8 +257,7 @@ class TreeContextMixin(object):
if scope_node.type in ('funcdef', 'lambdef', 'classdef'):
return self.create_value(scope_node).as_context()
elif scope_node.type in ('comp_for', 'sync_comp_for'):
parent_scope = parser_utils.get_parent_scope(scope_node)
parent_context = from_scope_node(parent_scope)
parent_context = from_scope_node(parent_scope(scope_node.parent))
if node.start_pos >= scope_node.children[-1].start_pos:
return parent_context
return CompForContext(parent_context, scope_node)
@@ -308,8 +309,8 @@ class FunctionContext(TreeContextMixin, ValueContext):
class ModuleContext(TreeContextMixin, ValueContext):
def py__file__(self):
return self._value.py__file__()
def py__file__(self) -> Optional[Path]:
return self._value.py__file__() # type: ignore[no-any-return]
def get_filters(self, until_position=None, origin_scope=None):
filters = self._value.get_filters(origin_scope)
@@ -323,11 +324,10 @@ class ModuleContext(TreeContextMixin, ValueContext):
),
self.get_global_filter(),
)
for f in filters: # Python 2...
yield f
yield from filters
def get_global_filter(self):
return GlobalNameFilter(self, self.tree_node)
return GlobalNameFilter(self)
@property
def string_names(self):
@@ -357,8 +357,8 @@ class NamespaceContext(TreeContextMixin, ValueContext):
def string_names(self):
return self._value.string_names
def py__file__(self):
return self._value.py__file__()
def py__file__(self) -> Optional[Path]:
return self._value.py__file__() # type: ignore[no-any-return]
class ClassContext(TreeContextMixin, ValueContext):
@@ -375,7 +375,7 @@ class ClassContext(TreeContextMixin, ValueContext):
class CompForContext(TreeContextMixin, AbstractContext):
def __init__(self, parent_context, comp_for):
super(CompForContext, self).__init__(parent_context.inference_state)
super().__init__(parent_context.inference_state)
self.tree_node = comp_for
self.parent_context = parent_context
@@ -407,8 +407,8 @@ class CompiledModuleContext(CompiledContext):
def string_names(self):
return self._value.string_names
def py__file__(self):
return self._value.py__file__()
def py__file__(self) -> Optional[Path]:
return self._value.py__file__() # type: ignore[no-any-return]
def _get_global_filters_for_name(context, name_or_none, position):
@@ -439,13 +439,12 @@ def get_global_filters(context, until_position, origin_scope):
For global name lookups. The filters will handle name resolution
themselves, but here we gather possible filters downwards.
>>> from jedi._compatibility import u, no_unicode_pprint
>>> from jedi import Script
>>> script = Script(u('''
>>> script = Script('''
... x = ['a', 'b', 'c']
... def func():
... y = None
... '''))
... ''')
>>> module_node = script._module_node
>>> scope = next(module_node.iter_funcdefs())
>>> scope
@@ -455,7 +454,7 @@ def get_global_filters(context, until_position, origin_scope):
First we get the names from the function scope.
>>> no_unicode_pprint(filters[0]) # doctest: +ELLIPSIS
>>> print(filters[0]) # doctest: +ELLIPSIS
MergedFilter(<ParserTreeFilter: ...>, <GlobalNameFilter: ...>)
>>> sorted(str(n) for n in filters[0].values()) # doctest: +NORMALIZE_WHITESPACE
['<TreeNameDefinition: string_name=func start_pos=(3, 4)>',
@@ -484,10 +483,10 @@ def get_global_filters(context, until_position, origin_scope):
from jedi.inference.value.function import BaseFunctionExecutionContext
while context is not None:
# Names in methods cannot be resolved within the class.
for filter in context.get_filters(
until_position=until_position,
origin_scope=origin_scope):
yield filter
yield from context.get_filters(
until_position=until_position,
origin_scope=origin_scope
)
if isinstance(context, (BaseFunctionExecutionContext, ModuleContext)):
# The position should be reset if the current scope is a function.
until_position = None

View File

@@ -0,0 +1,21 @@
from jedi.inference.value import ModuleValue
from jedi.inference.context import ModuleContext
class DocstringModule(ModuleValue):
def __init__(self, in_module_context, **kwargs):
super().__init__(**kwargs)
self._in_module_context = in_module_context
def _as_context(self):
return DocstringModuleContext(self, self._in_module_context)
class DocstringModuleContext(ModuleContext):
def __init__(self, module_value, in_module_context):
super().__init__(module_value)
self._in_module_context = in_module_context
def get_filters(self, origin_scope=None, until_position=None):
yield from super().get_filters(until_position=until_position)
yield from self._in_module_context.get_filters()

View File

@@ -17,13 +17,10 @@ annotations.
import re
import warnings
from textwrap import dedent
from parso import parse, ParserSyntaxError
from jedi._compatibility import u
from jedi import debug
from jedi.common import indent_block
from jedi.inference.cache import inference_state_method_cache
from jedi.inference.base_value import iterator_to_value_set, ValueSet, \
NO_VALUES
@@ -51,7 +48,7 @@ def _get_numpy_doc_string_cls():
global _numpy_doc_string_cache
if isinstance(_numpy_doc_string_cache, (ImportError, SyntaxError)):
raise _numpy_doc_string_cache
from numpydoc.docscrape import NumpyDocString
from numpydoc.docscrape import NumpyDocString # type: ignore[import]
_numpy_doc_string_cache = NumpyDocString
return _numpy_doc_string_cache
@@ -96,8 +93,7 @@ def _search_return_in_numpydocstr(docstr):
# Return names are optional and if so the type is in the name
if not r_type:
r_type = r_name
for type_ in _expand_typestr(r_type):
yield type_
yield from _expand_typestr(r_type)
def _expand_typestr(type_str):
@@ -115,7 +111,7 @@ def _expand_typestr(type_str):
elif type_str.startswith('{'):
node = parse(type_str, version='3.7').children[0]
if node.type == 'atom':
for leaf in node.children[1].children:
for leaf in getattr(node.children[1], "children", []):
if leaf.type == 'number':
if '.' in leaf.value:
yield 'float'
@@ -184,55 +180,40 @@ def _strip_rst_role(type_str):
def _infer_for_statement_string(module_context, string):
code = dedent(u("""
def pseudo_docstring_stuff():
'''
Create a pseudo function for docstring statements.
Need this docstring so that if the below part is not valid Python this
is still a function.
'''
{}
"""))
if string is None:
return []
for element in re.findall(r'((?:\w+\.)*\w+)\.', string):
# Try to import module part in dotted name.
# (e.g., 'threading' in 'threading.Thread').
string = 'import %s\n' % element + string
potential_imports = re.findall(r'((?:\w+\.)*\w+)\.', string)
# Try to import module part in dotted name.
# (e.g., 'threading' in 'threading.Thread').
imports = "\n".join(f"import {p}" for p in potential_imports)
string = f'{imports}\n{string}'
# Take the default grammar here, if we load the Python 2.7 grammar here, it
# will be impossible to use `...` (Ellipsis) as a token. Docstring types
# don't need to conform with the current grammar.
debug.dbg('Parse docstring code %s', string, color='BLUE')
grammar = module_context.inference_state.latest_grammar
grammar = module_context.inference_state.grammar
try:
module = grammar.parse(code.format(indent_block(string)), error_recovery=False)
module = grammar.parse(string, error_recovery=False)
except ParserSyntaxError:
return []
try:
funcdef = next(module.iter_funcdefs())
# First pick suite, then simple_stmt and then the node,
# which is also not the last item, because there's a newline.
stmt = funcdef.children[-1].children[-1].children[-2]
# It's not the last item, because that's an end marker.
stmt = module.children[-2]
except (AttributeError, IndexError):
return []
if stmt.type not in ('name', 'atom', 'atom_expr'):
return []
from jedi.inference.value import FunctionValue
function_value = FunctionValue(
module_context.inference_state,
module_context,
funcdef
# Here we basically use a fake module that also uses the filters in
# the actual module.
from jedi.inference.docstring_utils import DocstringModule
m = DocstringModule(
in_module_context=module_context,
inference_state=module_context.inference_state,
module_node=module,
code_lines=[],
)
func_execution_context = function_value.as_context()
# Use the module of the param.
# TODO this module is not the module of the param in case of a function
# call. In that case it's the module of the function call.
# stuffed with content from a function call.
return list(_execute_types_in_stmt(func_execution_context, stmt))
return list(_execute_types_in_stmt(m.as_context(), stmt))
def _execute_types_in_stmt(module_context, stmt):
@@ -299,9 +280,7 @@ def infer_return_types(function_value):
if match:
yield _strip_rst_role(match.group(1))
# Check for numpy style return hint
for type_ in _search_return_in_numpydocstr(code):
yield type_
yield from _search_return_in_numpydocstr(code)
for type_str in search_return_in_docstr(function_value.py__doc__()):
for value in _infer_for_statement_string(function_value.get_root_context(), type_str):
yield value
yield from _infer_for_statement_string(function_value.get_root_context(), type_str)

View File

@@ -66,11 +66,11 @@ def dynamic_param_lookup(function_value, param_index):
have to look for all calls to ``func`` to find out what ``foo`` possibly
is.
"""
funcdef = function_value.tree_node
if not settings.dynamic_params:
if not function_value.inference_state.do_dynamic_params_search:
return NO_VALUES
funcdef = function_value.tree_node
path = function_value.get_root_context().py__file__()
if path is not None and is_stdlib_path(path):
# We don't want to search for references in the stdlib. Usually people
@@ -215,12 +215,10 @@ def _check_name_for_execution(inference_state, context, compare_node, name, trai
for name, trailer in potential_nodes:
if value_node.start_pos < name.start_pos < value_node.end_pos:
random_context = execution_context.create_context(name)
iterator = _check_name_for_execution(
yield from _check_name_for_execution(
inference_state,
random_context,
compare_node,
name,
trailer
)
for arguments in iterator:
yield arguments

View File

@@ -3,23 +3,25 @@ Filters are objects that you can use to filter names in different scopes. They
are needed for name resolution.
"""
from abc import abstractmethod
from typing import List, MutableMapping, Type
import weakref
from parso.tree import search_ancestor
from parso.python.tree import Name, UsedNamesMapping
from jedi._compatibility import use_metaclass
from jedi.inference import flow_analysis
from jedi.inference.base_value import ValueSet, ValueWrapper, \
LazyValueWrapper
from jedi.parser_utils import get_cached_parent_scope
from jedi.parser_utils import get_cached_parent_scope, get_parso_cache_node
from jedi.inference.utils import to_list
from jedi.inference.names import TreeNameDefinition, ParamName, \
AnonymousParamName, AbstractNameDefinition
AnonymousParamName, AbstractNameDefinition, NameWrapper
_definition_name_cache: MutableMapping[UsedNamesMapping, List[Name]]
_definition_name_cache = weakref.WeakKeyDictionary()
class AbstractFilter(object):
class AbstractFilter:
_until_position = None
def _filter(self, names):
@@ -36,8 +38,8 @@ class AbstractFilter(object):
raise NotImplementedError
class FilterWrapper(object):
name_wrapper_class = None
class FilterWrapper:
name_wrapper_class: Type[NameWrapper]
def __init__(self, wrapped_filter):
self._wrapped_filter = wrapped_filter
@@ -52,11 +54,15 @@ class FilterWrapper(object):
return self.wrap_names(self._wrapped_filter.values())
def _get_definition_names(used_names, name_key):
def _get_definition_names(parso_cache_node, used_names, name_key):
if parso_cache_node is None:
names = used_names.get(name_key, ())
return tuple(name for name in names if name.is_definition(include_setitem=True))
try:
for_module = _definition_name_cache[used_names]
for_module = _definition_name_cache[parso_cache_node]
except KeyError:
for_module = _definition_name_cache[used_names] = {}
for_module = _definition_name_cache[parso_cache_node] = {}
try:
return for_module[name_key]
@@ -68,18 +74,40 @@ def _get_definition_names(used_names, name_key):
return result
class AbstractUsedNamesFilter(AbstractFilter):
class _AbstractUsedNamesFilter(AbstractFilter):
name_class = TreeNameDefinition
def __init__(self, parent_context, parser_scope):
self._parser_scope = parser_scope
self._module_node = self._parser_scope.get_root_node()
self._used_names = self._module_node.get_used_names()
def __init__(self, parent_context, node_context=None):
if node_context is None:
node_context = parent_context
self._node_context = node_context
self._parser_scope = node_context.tree_node
module_context = node_context.get_root_context()
# It is quite hacky that we have to use that. This is for caching
# certain things with a WeakKeyDictionary. However, parso intentionally
# uses slots (to save memory) and therefore we end up with having to
# have a weak reference to the object that caches the tree.
#
# Previously we have tried to solve this by using a weak reference onto
# used_names. However that also does not work, because it has a
# reference from the module, which itself is referenced by any node
# through parents.
path = module_context.py__file__()
if path is None:
# If the path is None, there is no guarantee that parso caches it.
self._parso_cache_node = None
else:
self._parso_cache_node = get_parso_cache_node(
module_context.inference_state.latest_grammar
if module_context.is_stub() else module_context.inference_state.grammar,
path
)
self._used_names = module_context.tree_node.get_used_names()
self.parent_context = parent_context
def get(self, name):
return self._convert_names(self._filter(
_get_definition_names(self._used_names, name),
_get_definition_names(self._parso_cache_node, self._used_names, name),
))
def _convert_names(self, names):
@@ -90,7 +118,7 @@ class AbstractUsedNamesFilter(AbstractFilter):
name
for name_key in self._used_names
for name in self._filter(
_get_definition_names(self._used_names, name_key),
_get_definition_names(self._parso_cache_node, self._used_names, name_key),
)
)
@@ -98,7 +126,7 @@ class AbstractUsedNamesFilter(AbstractFilter):
return '<%s: %s>' % (self.__class__.__name__, self.parent_context)
class ParserTreeFilter(AbstractUsedNamesFilter):
class ParserTreeFilter(_AbstractUsedNamesFilter):
def __init__(self, parent_context, node_context=None, until_position=None,
origin_scope=None):
"""
@@ -107,15 +135,12 @@ class ParserTreeFilter(AbstractUsedNamesFilter):
value, but for some type inference it's important to have a local
value of the other classes.
"""
if node_context is None:
node_context = parent_context
super(ParserTreeFilter, self).__init__(parent_context, node_context.tree_node)
self._node_context = node_context
super().__init__(parent_context, node_context)
self._origin_scope = origin_scope
self._until_position = until_position
def _filter(self, names):
names = super(ParserTreeFilter, self)._filter(names)
names = super()._filter(names)
names = [n for n in names if self._is_name_reachable(n)]
return list(self._check_flows(names))
@@ -124,7 +149,7 @@ class ParserTreeFilter(AbstractUsedNamesFilter):
if parent.type == 'trailer':
return False
base_node = parent if parent.type in ('classdef', 'funcdef') else name
return get_cached_parent_scope(self._used_names, base_node) == self._parser_scope
return get_cached_parent_scope(self._parso_cache_node, base_node) == self._parser_scope
def _check_flows(self, names):
for name in sorted(names, key=lambda name: name.start_pos, reverse=True):
@@ -143,7 +168,7 @@ class ParserTreeFilter(AbstractUsedNamesFilter):
class _FunctionExecutionFilter(ParserTreeFilter):
def __init__(self, parent_context, function_value, until_position, origin_scope):
super(_FunctionExecutionFilter, self).__init__(
super().__init__(
parent_context,
until_position=until_position,
origin_scope=origin_scope,
@@ -167,9 +192,9 @@ class _FunctionExecutionFilter(ParserTreeFilter):
class FunctionExecutionFilter(_FunctionExecutionFilter):
def __init__(self, *args, **kwargs):
self._arguments = kwargs.pop('arguments') # Python 2
super(FunctionExecutionFilter, self).__init__(*args, **kwargs)
def __init__(self, *args, arguments, **kwargs):
super().__init__(*args, **kwargs)
self._arguments = arguments
def _convert_param(self, param, name):
return ParamName(self._function_value, name, self._arguments)
@@ -180,7 +205,7 @@ class AnonymousFunctionExecutionFilter(_FunctionExecutionFilter):
return AnonymousParamName(self._function_value, name)
class GlobalNameFilter(AbstractUsedNamesFilter):
class GlobalNameFilter(_AbstractUsedNamesFilter):
def get(self, name):
try:
names = self._used_names[name]
@@ -230,7 +255,7 @@ class DictFilter(AbstractFilter):
return '<%s: for {%s}>' % (self.__class__.__name__, keys)
class MergedFilter(object):
class MergedFilter:
def __init__(self, *filters):
self._filters = filters
@@ -246,10 +271,10 @@ class MergedFilter(object):
class _BuiltinMappedMethod(ValueWrapper):
"""``Generator.__next__`` ``dict.values`` methods and so on."""
api_type = u'function'
api_type = 'function'
def __init__(self, value, method, builtin_func):
super(_BuiltinMappedMethod, self).__init__(builtin_func)
super().__init__(builtin_func)
self._value = value
self._method = method
@@ -264,14 +289,9 @@ class SpecialMethodFilter(DictFilter):
classes like Generator (for __next__, etc).
"""
class SpecialMethodName(AbstractNameDefinition):
api_type = u'function'
def __init__(self, parent_context, string_name, value, builtin_value):
callable_, python_version = value
if python_version is not None and \
python_version != parent_context.inference_state.environment.version_info.major:
raise KeyError
api_type = 'function'
def __init__(self, parent_context, string_name, callable_, builtin_value):
self.parent_context = parent_context
self.string_name = string_name
self._callable = callable_
@@ -293,7 +313,7 @@ class SpecialMethodFilter(DictFilter):
])
def __init__(self, value, dct, builtin_value):
super(SpecialMethodFilter, self).__init__(dct)
super().__init__(dct)
self.value = value
self._builtin_value = builtin_value
"""
@@ -309,7 +329,7 @@ class SpecialMethodFilter(DictFilter):
class _OverwriteMeta(type):
def __init__(cls, name, bases, dct):
super(_OverwriteMeta, cls).__init__(name, bases, dct)
super().__init__(name, bases, dct)
base_dct = {}
for base_cls in reversed(cls.__bases__):
@@ -326,28 +346,26 @@ class _OverwriteMeta(type):
cls.overwritten_methods = base_dct
class _AttributeOverwriteMixin(object):
class _AttributeOverwriteMixin:
def get_filters(self, *args, **kwargs):
yield SpecialMethodFilter(self, self.overwritten_methods, self._wrapped_value)
for filter in self._wrapped_value.get_filters(*args, **kwargs):
yield filter
yield from self._wrapped_value.get_filters(*args, **kwargs)
class LazyAttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin,
LazyValueWrapper)):
class LazyAttributeOverwrite(_AttributeOverwriteMixin, LazyValueWrapper,
metaclass=_OverwriteMeta):
def __init__(self, inference_state):
self.inference_state = inference_state
class AttributeOverwrite(use_metaclass(_OverwriteMeta, _AttributeOverwriteMixin,
ValueWrapper)):
class AttributeOverwrite(_AttributeOverwriteMixin, ValueWrapper,
metaclass=_OverwriteMeta):
pass
def publish_method(method_name, python_version_match=None):
def publish_method(method_name):
def decorator(func):
dct = func.__dict__.setdefault('registered_overwritten_methods', {})
dct[method_name] = func, python_version_match
dct[method_name] = func
return func
return decorator

View File

@@ -1,12 +1,14 @@
from typing import Dict, Optional
from jedi.parser_utils import get_flow_branch_keyword, is_scope, get_parent_scope
from jedi.inference.recursion import execution_allowed
from jedi.inference.helpers import is_big_annoying_library
class Status(object):
lookup_table = {}
class Status:
lookup_table: Dict[Optional[bool], 'Status'] = {}
def __init__(self, value, name):
def __init__(self, value: Optional[bool], name: str) -> None:
self._value = value
self._name = name
Status.lookup_table[value] = self

View File

@@ -6,10 +6,10 @@ as annotations in future python versions.
"""
import re
from inspect import Parameter
from parso import ParserSyntaxError, parse
from jedi._compatibility import force_unicode, Parameter
from jedi.inference.cache import inference_state_method_cache
from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.inference.gradual.base import DefineGenericBaseClass, GenericClass
@@ -53,8 +53,10 @@ def _infer_annotation_string(context, string, index=None):
value_set = context.infer_node(node)
if index is not None:
value_set = value_set.filter(
lambda value: value.array_type == u'tuple' # noqa
and len(list(value.py__iter__())) >= index
lambda value: (
value.array_type == 'tuple'
and len(list(value.py__iter__())) >= index
)
).py__simple_getitem__(index)
return value_set
@@ -62,7 +64,7 @@ def _infer_annotation_string(context, string, index=None):
def _get_forward_reference_node(context, string):
try:
new_node = context.inference_state.grammar.parse(
force_unicode(string),
string,
start_symbol='eval_input',
error_recovery=False
)
@@ -138,8 +140,7 @@ def _infer_param(function_value, param):
"""
annotation = param.annotation
if annotation is None:
# If no Python 3-style annotation, look for a Python 2-style comment
# annotation.
# If no Python 3-style annotation, look for a comment annotation.
# Identify parameters to function in the same sequence as they would
# appear in a type comment.
all_params = [child for child in param.parent.children
@@ -195,16 +196,47 @@ def py__annotations__(funcdef):
return dct
def resolve_forward_references(context, all_annotations):
def resolve(node):
if node is None or node.type != 'string':
return node
node = _get_forward_reference_node(
context,
context.inference_state.compiled_subprocess.safe_literal_eval(
node.value,
),
)
if node is None:
# There was a string, but it's not a valid annotation
return None
# The forward reference tree has an additional root node ('eval_input')
# that we don't want. Extract the node we do want, that is equivalent to
# the nodes returned by `py__annotations__` for a non-quoted node.
node = node.children[0]
return node
return {name: resolve(node) for name, node in all_annotations.items()}
@inference_state_method_cache()
def infer_return_types(function, arguments):
"""
Infers the type of a function's return value,
according to type annotations.
"""
all_annotations = py__annotations__(function.tree_node)
context = function.get_default_param_context()
all_annotations = resolve_forward_references(
context,
py__annotations__(function.tree_node),
)
annotation = all_annotations.get("return", None)
if annotation is None:
# If there is no Python 3-type annotation, look for a Python 2-type annotation
# If there is no Python 3-type annotation, look for an annotation
# comment.
node = function.tree_node
comment = parser_utils.get_following_comment_same_line(node)
if comment is None:
@@ -215,11 +247,10 @@ def infer_return_types(function, arguments):
return NO_VALUES
return _infer_annotation_string(
function.get_default_param_context(),
context,
match.group(1).strip()
).execute_annotation()
context = function.get_default_param_context()
unknown_type_vars = find_unknown_type_vars(context, annotation)
annotation_values = infer_annotation(context, annotation)
if not unknown_type_vars:
@@ -281,7 +312,8 @@ def infer_return_for_callable(arguments, param_values, result_values):
return ValueSet.from_sets(
v.define_generics(all_type_vars)
if isinstance(v, (DefineGenericBaseClass, TypeVar)) else ValueSet({v})
if isinstance(v, (DefineGenericBaseClass, TypeVar))
else ValueSet({v})
for v in result_values
).execute_annotation()
@@ -370,6 +402,10 @@ def find_type_from_comment_hint_for(context, node, name):
def find_type_from_comment_hint_with(context, node, name):
if len(node.children) > 4:
# In case there are multiple with_items, we do not want a type hint for
# now.
return []
assert len(node.children[1].children) == 3, \
"Can only be here when children[1] is 'foo() as f'"
varlist = node.children[1].children[2]

View File

@@ -25,8 +25,7 @@ class _BoundTypeVarName(AbstractNameDefinition):
# Replace any with the constraints if they are there.
from jedi.inference.gradual.typing import AnyClass
if isinstance(value, AnyClass):
for constraint in self._type_var.constraints:
yield constraint
yield from self._type_var.constraints
else:
yield value
return ValueSet(iter_())
@@ -38,7 +37,7 @@ class _BoundTypeVarName(AbstractNameDefinition):
return '<%s %s -> %s>' % (self.__class__.__name__, self.py__name__(), self._value_set)
class _TypeVarFilter(object):
class _TypeVarFilter:
"""
A filter for all given variables in a class.
@@ -70,11 +69,10 @@ class _TypeVarFilter(object):
class _AnnotatedClassContext(ClassContext):
def get_filters(self, *args, **kwargs):
filters = super(_AnnotatedClassContext, self).get_filters(
filters = super().get_filters(
*args, **kwargs
)
for f in filters:
yield f
yield from filters
# The type vars can only be looked up if it's a global search and
# not a direct lookup on the class.
@@ -148,6 +146,9 @@ class DefineGenericBaseClass(LazyValueWrapper):
) for class_set1, class_set2 in zip(given_params1, given_params2)
)
def get_signatures(self):
return []
def __repr__(self):
return '<%s: %s%s>' % (
self.__class__.__name__,
@@ -164,7 +165,7 @@ class GenericClass(DefineGenericBaseClass, ClassMixin):
my_foo_int_cls = Foo[int]
"""
def __init__(self, class_value, generics_manager):
super(GenericClass, self).__init__(generics_manager)
super().__init__(generics_manager)
self._class_value = class_value
def _get_wrapped_value(self):
@@ -186,7 +187,7 @@ class GenericClass(DefineGenericBaseClass, ClassMixin):
return _TypeVarFilter(self.get_generics(), self.list_type_vars())
def py__call__(self, arguments):
instance, = super(GenericClass, self).py__call__(arguments)
instance, = super().py__call__(arguments)
return ValueSet([_GenericInstanceWrapper(instance)])
def _as_context(self):
@@ -201,7 +202,7 @@ class GenericClass(DefineGenericBaseClass, ClassMixin):
return GenericClass(self._class_value, generics_manager)
def is_sub_class_of(self, class_value):
if super(GenericClass, self).is_sub_class_of(class_value):
if super().is_sub_class_of(class_value):
return True
return self._class_value.is_sub_class_of(class_value)
@@ -230,7 +231,7 @@ class GenericClass(DefineGenericBaseClass, ClassMixin):
else:
continue
if py_class.api_type != u'class':
if py_class.api_type != 'class':
# Functions & modules don't have an MRO and we're not
# expecting a Callable (those are handled separately within
# TypingClassValueWithIndex).
@@ -248,7 +249,7 @@ class GenericClass(DefineGenericBaseClass, ClassMixin):
return type_var_dict
class _LazyGenericBaseClass(object):
class _LazyGenericBaseClass:
def __init__(self, class_value, lazy_base_class, generics_manager):
self._class_value = class_value
self._lazy_base_class = lazy_base_class
@@ -309,7 +310,7 @@ class _GenericInstanceWrapper(ValueWrapper):
except IndexError:
pass
elif cls.py__name__() == 'Iterator':
return ValueSet([builtin_from_name(self.inference_state, u'None')])
return ValueSet([builtin_from_name(self.inference_state, 'None')])
return self._wrapped_value.py__stop_iteration_returns()
def get_type_hint(self, add_class_info=True):
@@ -326,10 +327,10 @@ class _PseudoTreeNameClass(Value):
this class. Essentially this class makes it possible to goto that `Tuple`
name, without affecting anything else negatively.
"""
api_type = u'class'
api_type = 'class'
def __init__(self, parent_context, tree_name):
super(_PseudoTreeNameClass, self).__init__(
super().__init__(
parent_context.inference_state,
parent_context
)
@@ -356,7 +357,7 @@ class _PseudoTreeNameClass(Value):
def py__class__(self):
# This might not be 100% correct, but it is good enough. The details of
# the typing library are not really an issue for Jedi.
return builtin_from_name(self.inference_state, u'type')
return builtin_from_name(self.inference_state, 'type')
@property
def name(self):
@@ -382,13 +383,16 @@ class BaseTypingValue(LazyValueWrapper):
def _get_wrapped_value(self):
return _PseudoTreeNameClass(self.parent_context, self._tree_name)
def get_signatures(self):
return self._wrapped_value.get_signatures()
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self._tree_name.value)
class BaseTypingClassWithGenerics(DefineGenericBaseClass):
def __init__(self, parent_context, tree_name, generics_manager):
super(BaseTypingClassWithGenerics, self).__init__(generics_manager)
super().__init__(generics_manager)
self.inference_state = parent_context.inference_state
self.parent_context = parent_context
self._tree_name = tree_name
@@ -415,12 +419,15 @@ class BaseTypingInstance(LazyValueWrapper):
def get_annotated_class_object(self):
return self._class_value
def get_qualified_names(self):
return (self.py__name__(),)
@property
def name(self):
return ValueName(self, self._tree_name)
def _get_wrapped_value(self):
object_, = builtin_from_name(self.inference_state, u'object').execute_annotation()
object_, = builtin_from_name(self.inference_state, 'object').execute_annotation()
return object_
def __repr__(self):

View File

@@ -18,6 +18,7 @@ def _stub_to_python_value_set(stub_value, ignore_compiled=False):
was_instance = stub_value.is_instance()
if was_instance:
arguments = getattr(stub_value, '_arguments', None)
stub_value = stub_value.py__class__()
qualified_names = stub_value.get_qualified_names()
@@ -30,11 +31,12 @@ def _stub_to_python_value_set(stub_value, ignore_compiled=False):
method_name = qualified_names[-1]
qualified_names = qualified_names[:-1]
was_instance = True
arguments = None
values = _infer_from_stub(stub_module_context, qualified_names, ignore_compiled)
if was_instance:
values = ValueSet.from_sets(
c.execute_with_values()
c.execute_with_values() if arguments is None else c.execute(arguments)
for c in values
if c.is_class()
)
@@ -133,8 +135,7 @@ def _python_to_stub_names(names, fallback_to_python=False):
if converted:
converted_names = converted.goto(name.get_public_name())
if converted_names:
for n in converted_names:
yield n
yield from converted_names
continue
if fallback_to_python:
# This is the part where if we haven't found anything, just return

View File

@@ -23,7 +23,7 @@ def _resolve_forward_references(context, value_set):
yield value
class _AbstractGenericManager(object):
class _AbstractGenericManager:
def get_index_and_execute(self, index):
try:
return self[index].execute_annotation()

View File

@@ -10,7 +10,7 @@ class StubModuleValue(ModuleValue):
_module_name_class = StubModuleName
def __init__(self, non_stub_value_set, *args, **kwargs):
super(StubModuleValue, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self.non_stub_value_set = non_stub_value_set
def is_stub(self):
@@ -30,7 +30,7 @@ class StubModuleValue(ModuleValue):
pass
else:
names.update(method())
names.update(super(StubModuleValue, self).sub_modules_dict())
names.update(super().sub_modules_dict())
return names
def _get_stub_filters(self, origin_scope):
@@ -40,14 +40,11 @@ class StubModuleValue(ModuleValue):
)] + list(self.iter_star_filters())
def get_filters(self, origin_scope=None):
filters = super(StubModuleValue, self).get_filters(origin_scope)
filters = super().get_filters(origin_scope)
next(filters, None) # Ignore the first filter and replace it with our own
stub_filters = self._get_stub_filters(origin_scope=origin_scope)
for f in stub_filters:
yield f
for f in filters:
yield f
yield from stub_filters
yield from filters
def _as_context(self):
return StubModuleContext(self)
@@ -57,17 +54,16 @@ class StubModuleContext(ModuleContext):
def get_filters(self, until_position=None, origin_scope=None):
# Make sure to ignore the position, because positions are not relevant
# for stubs.
return super(StubModuleContext, self).get_filters(origin_scope=origin_scope)
return super().get_filters(origin_scope=origin_scope)
class TypingModuleWrapper(StubModuleValue):
def get_filters(self, *args, **kwargs):
filters = super(TypingModuleWrapper, self).get_filters(*args, **kwargs)
filters = super().get_filters(*args, **kwargs)
f = next(filters, None)
assert f is not None
yield TypingModuleFilterWrapper(f)
for f in filters:
yield f
yield from filters
def _as_context(self):
return TypingModuleContext(self)
@@ -75,22 +71,23 @@ class TypingModuleWrapper(StubModuleValue):
class TypingModuleContext(ModuleContext):
def get_filters(self, *args, **kwargs):
filters = super(TypingModuleContext, self).get_filters(*args, **kwargs)
filters = super().get_filters(*args, **kwargs)
yield TypingModuleFilterWrapper(next(filters, None))
for f in filters:
yield f
yield from filters
class StubFilter(ParserTreeFilter):
name_class = StubName
def _is_name_reachable(self, name):
if not super(StubFilter, self)._is_name_reachable(name):
if not super()._is_name_reachable(name):
return False
# Imports in stub files are only public if they have an "as"
# export.
definition = name.get_definition()
if definition is None:
return False
if definition.type in ('import_from', 'import_name'):
if name.parent.type not in ('import_as_name', 'dotted_as_name'):
return False

View File

@@ -1,10 +1,9 @@
from jedi._compatibility import unicode, force_unicode
from jedi import debug
from jedi.inference.base_value import ValueSet, NO_VALUES, ValueWrapper
from jedi.inference.gradual.base import BaseTypingValue
class TypeVarClass(BaseTypingValue):
class TypeVarClass(ValueWrapper):
def py__call__(self, arguments):
unpacked = arguments.unpack()
@@ -18,9 +17,9 @@ class TypeVarClass(BaseTypingValue):
return ValueSet([TypeVar.create_cached(
self.inference_state,
self.parent_context,
self._tree_name,
var_name,
unpacked
tree_name=self.tree_node.name,
var_name=var_name,
unpacked_args=unpacked,
)])
def _find_string_name(self, lazy_value):
@@ -40,17 +39,14 @@ class TypeVarClass(BaseTypingValue):
return None
else:
safe_value = method(default=None)
if self.inference_state.environment.version_info.major == 2:
if isinstance(safe_value, bytes):
return force_unicode(safe_value)
if isinstance(safe_value, (str, unicode)):
if isinstance(safe_value, str):
return safe_value
return None
class TypeVar(BaseTypingValue):
def __init__(self, parent_context, tree_name, var_name, unpacked_args):
super(TypeVar, self).__init__(parent_context, tree_name)
super().__init__(parent_context, tree_name)
self._var_name = var_name
self._constraints_lazy_values = []
@@ -102,7 +98,7 @@ class TypeVar(BaseTypingValue):
else:
if found:
return found
return self._get_classes() or ValueSet({self})
return ValueSet({self})
def execute_annotation(self):
return self._get_classes().execute_annotation()
@@ -124,7 +120,7 @@ class TypeVar(BaseTypingValue):
class TypeWrapper(ValueWrapper):
def __init__(self, wrapped_value, original_value):
super(TypeWrapper, self).__init__(wrapped_value)
super().__init__(wrapped_value)
self._original_value = original_value
def execute_annotation(self):

View File

@@ -1,77 +1,79 @@
import os
import re
from functools import wraps
from collections import namedtuple
from typing import Dict, Mapping, Tuple
from pathlib import Path
from jedi import settings
from jedi.file_io import FileIO
from jedi._compatibility import FileNotFoundError, cast_path
from jedi.parser_utils import get_cached_code_lines
from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.inference.gradual.stub_value import TypingModuleWrapper, StubModuleValue
from jedi.inference.value import ModuleValue
_jedi_path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
TYPESHED_PATH = os.path.join(_jedi_path, 'third_party', 'typeshed')
DJANGO_INIT_PATH = os.path.join(_jedi_path, 'third_party', 'django-stubs',
'django-stubs', '__init__.pyi')
_jedi_path = Path(__file__).parent.parent.parent
TYPESHED_PATH = _jedi_path.joinpath('third_party', 'typeshed')
DJANGO_INIT_PATH = _jedi_path.joinpath('third_party', 'django-stubs',
'django-stubs', '__init__.pyi')
_IMPORT_MAP = dict(
_collections='collections',
_socket='socket',
)
PathInfo = namedtuple('PathInfo', 'path is_third_party')
def _merge_create_stub_map(directories):
def _merge_create_stub_map(path_infos):
map_ = {}
for directory in directories:
map_.update(_create_stub_map(directory))
for directory_path_info in path_infos:
map_.update(_create_stub_map(directory_path_info))
return map_
def _create_stub_map(directory):
def _create_stub_map(directory_path_info):
"""
Create a mapping of an importable name in Python to a stub file.
"""
def generate():
try:
listed = os.listdir(directory)
except (FileNotFoundError, OSError):
# OSError is Python 2
listed = os.listdir(directory_path_info.path)
except (FileNotFoundError, NotADirectoryError):
return
for entry in listed:
entry = cast_path(entry)
path = os.path.join(directory, entry)
path = os.path.join(directory_path_info.path, entry)
if os.path.isdir(path):
init = os.path.join(path, '__init__.pyi')
if os.path.isfile(init):
yield entry, init
yield entry, PathInfo(init, directory_path_info.is_third_party)
elif entry.endswith('.pyi') and os.path.isfile(path):
name = entry[:-4]
if name != '__init__':
yield name, path
yield name, PathInfo(path, directory_path_info.is_third_party)
# Create a dictionary from the tuple generator.
return dict(generate())
def _get_typeshed_directories(version_info):
check_version_list = ['2and3', str(version_info.major)]
check_version_list = ['2and3', '3']
for base in ['stdlib', 'third_party']:
base = os.path.join(TYPESHED_PATH, base)
base_list = os.listdir(base)
base_path = TYPESHED_PATH.joinpath(base)
base_list = os.listdir(base_path)
for base_list_entry in base_list:
match = re.match(r'(\d+)\.(\d+)$', base_list_entry)
if match is not None:
if int(match.group(1)) == version_info.major \
and int(match.group(2)) <= version_info.minor:
if match.group(1) == '3' and int(match.group(2)) <= version_info.minor:
check_version_list.append(base_list_entry)
for check_version in check_version_list:
yield os.path.join(base, check_version)
is_third_party = base != 'stdlib'
yield PathInfo(str(base_path.joinpath(check_version)), is_third_party)
_version_cache = {}
_version_cache: Dict[Tuple[int, int], Mapping[str, PathInfo]] = {}
def _cache_stub_file_map(version_info):
@@ -107,7 +109,7 @@ def import_module_decorator(func):
# ``os``.
python_value_set = ValueSet.from_sets(
func(inference_state, (n,), None, sys_path,)
for n in [u'posixpath', u'ntpath', u'macpath', u'os2emxpath']
for n in ['posixpath', 'ntpath', 'macpath', 'os2emxpath']
)
else:
python_value_set = ValueSet.from_sets(
@@ -116,7 +118,7 @@ def import_module_decorator(func):
)
inference_state.module_cache.add(import_names, python_value_set)
if not prefer_stubs:
if not prefer_stubs or import_names[0] in settings.auto_import_modules:
return python_value_set
stub = try_to_load_stub_cached(inference_state, import_names, python_value_set,
@@ -165,7 +167,6 @@ def _try_to_load_stub(inference_state, import_names, python_value_set,
if len(import_names) == 1:
# foo-stubs
for p in sys_path:
p = cast_path(p)
init = os.path.join(p, *import_names) + '-stubs' + os.path.sep + '__init__.pyi'
m = _try_to_load_stub_from_file(
inference_state,
@@ -175,11 +176,11 @@ def _try_to_load_stub(inference_state, import_names, python_value_set,
)
if m is not None:
return m
if import_names[0] == 'django':
if import_names[0] == 'django' and python_value_set:
return _try_to_load_stub_from_file(
inference_state,
python_value_set,
file_io=FileIO(DJANGO_INIT_PATH),
file_io=FileIO(str(DJANGO_INIT_PATH)),
import_names=import_names,
)
@@ -194,8 +195,8 @@ def _try_to_load_stub(inference_state, import_names, python_value_set,
file_paths = []
if c.is_namespace():
file_paths = [os.path.join(p, '__init__.pyi') for p in c.py__path__()]
elif file_path is not None and file_path.endswith('.py'):
file_paths = [file_path + 'i']
elif file_path is not None and file_path.suffix == '.py':
file_paths = [str(file_path) + 'i']
for file_path in file_paths:
m = _try_to_load_stub_from_file(
@@ -249,16 +250,20 @@ def _load_from_typeshed(inference_state, python_value_set, parent_module_value,
# Only if it's a package (= a folder) something can be
# imported.
return None
path = parent_module_value.py__path__()
map_ = _merge_create_stub_map(path)
paths = parent_module_value.py__path__()
# Once the initial package has been loaded, the sub packages will
# always be loaded, regardless if they are there or not. This makes
# sense, IMO, because stubs take preference, even if the original
# library doesn't provide a module (it could be dynamic). ~dave
map_ = _merge_create_stub_map([PathInfo(p, is_third_party=False) for p in paths])
if map_ is not None:
path = map_.get(import_name)
if path is not None:
path_info = map_.get(import_name)
if path_info is not None and (not path_info.is_third_party or python_value_set):
return _try_to_load_stub_from_file(
inference_state,
python_value_set,
file_io=FileIO(path),
file_io=FileIO(path_info.path),
import_names=import_names,
)
@@ -266,13 +271,13 @@ def _load_from_typeshed(inference_state, python_value_set, parent_module_value,
def _try_to_load_stub_from_file(inference_state, python_value_set, file_io, import_names):
try:
stub_module_node = parse_stub_module(inference_state, file_io)
except (OSError, IOError): # IOError is Python 2 only
except OSError:
# The file that you're looking for doesn't exist (anymore).
return None
else:
return create_stub_module(
inference_state, python_value_set, stub_module_node, file_io,
import_names
inference_state, inference_state.latest_grammar, python_value_set,
stub_module_node, file_io, import_names
)
@@ -286,7 +291,8 @@ def parse_stub_module(inference_state, file_io):
)
def create_stub_module(inference_state, python_value_set, stub_module_node, file_io, import_names):
def create_stub_module(inference_state, grammar, python_value_set,
stub_module_node, file_io, import_names):
if import_names == ('typing',):
module_cls = TypingModuleWrapper
else:
@@ -298,7 +304,7 @@ def create_stub_module(inference_state, python_value_set, stub_module_node, file
string_names=import_names,
# The code was loaded with latest_grammar, so use
# that.
code_lines=get_cached_code_lines(inference_state.latest_grammar, file_io.path),
code_lines=get_cached_code_lines(grammar, file_io.path),
is_package=file_name == '__init__.pyi',
)
return stub_module_value

View File

@@ -7,11 +7,10 @@ This file deals with all the typing.py cases.
"""
import itertools
from jedi._compatibility import unicode
from jedi import debug
from jedi.inference.compiled import builtin_from_name, create_simple_object
from jedi.inference.base_value import ValueSet, NO_VALUES, Value, \
LazyValueWrapper
LazyValueWrapper, ValueWrapper
from jedi.inference.lazy_value import LazyKnownValues
from jedi.inference.arguments import repack_with_argument_clinic
from jedi.inference.filters import FilterWrapper
@@ -33,7 +32,7 @@ _TYPE_ALIAS_TYPES = {
'DefaultDict': 'collections.defaultdict',
'Deque': 'collections.deque',
}
_PROXY_TYPES = 'Optional Union ClassVar'.split()
_PROXY_TYPES = 'Optional Union ClassVar Annotated'.split()
class TypingModuleName(NameWrapper):
@@ -64,37 +63,34 @@ class TypingModuleName(NameWrapper):
# have any effects there (because it's never executed).
return
elif name == 'TypeVar':
yield TypeVarClass.create_cached(
inference_state, self.parent_context, self.tree_name)
cls, = self._wrapped_name.infer()
yield TypeVarClass.create_cached(inference_state, cls)
elif name == 'Any':
yield AnyClass.create_cached(
inference_state, self.parent_context, self.tree_name)
elif name == 'TYPE_CHECKING':
# This is needed for e.g. imports that are only available for type
# checking or are in cycles. The user can then check this variable.
yield builtin_from_name(inference_state, u'True')
yield builtin_from_name(inference_state, 'True')
elif name == 'overload':
yield OverloadFunction.create_cached(
inference_state, self.parent_context, self.tree_name)
elif name == 'NewType':
yield NewTypeFunction.create_cached(
inference_state, self.parent_context, self.tree_name)
v, = self._wrapped_name.infer()
yield NewTypeFunction.create_cached(inference_state, v)
elif name == 'cast':
yield CastFunction.create_cached(
inference_state, self.parent_context, self.tree_name)
cast_fn, = self._wrapped_name.infer()
yield CastFunction.create_cached(inference_state, cast_fn)
elif name == 'TypedDict':
# TODO doesn't even exist in typeshed/typing.py, yet. But will be
# added soon.
yield TypedDictClass.create_cached(
inference_state, self.parent_context, self.tree_name)
elif name in ('no_type_check', 'no_type_check_decorator'):
# This is not necessary, as long as we are not doing type checking.
for c in self._wrapped_name.infer(): # Fuck my life Python 2
yield c
else:
# Everything else shouldn't be relevant for type checking.
for c in self._wrapped_name.infer(): # Fuck my life Python 2
yield c
# Not necessary, as long as we are not doing type checking:
# no_type_check & no_type_check_decorator
# Everything else shouldn't be relevant...
yield from self._wrapped_name.infer()
class TypingModuleFilterWrapper(FilterWrapper):
@@ -113,11 +109,11 @@ class ProxyWithGenerics(BaseTypingClassWithGenerics):
# Optional is basically just saying it's either None or the actual
# type.
return self.gather_annotation_classes().execute_annotation() \
| ValueSet([builtin_from_name(self.inference_state, u'None')])
| ValueSet([builtin_from_name(self.inference_state, 'None')])
elif string_name == 'Type':
# The type is actually already given in the index_value
return self._generics_manager[0]
elif string_name == 'ClassVar':
elif string_name in ['ClassVar', 'Annotated']:
# For now don't do anything here, ClassVars are always used.
return self._generics_manager[0].execute_annotation()
@@ -156,7 +152,7 @@ class ProxyWithGenerics(BaseTypingClassWithGenerics):
# Optional[T] is equivalent to Union[T, None]. In Jedi unions
# are represented by members within a ValueSet, so we extract
# the T from the Optional[T] by removing the None value.
none = builtin_from_name(self.inference_state, u'None')
none = builtin_from_name(self.inference_state, 'None')
return annotation_generics[0].infer_type_vars(
value_set.filter(lambda x: x != none),
)
@@ -263,8 +259,6 @@ class TypeAlias(LazyValueWrapper):
def _get_wrapped_value(self):
module_name, class_name = self._actual.split('.')
if self.inference_state.environment.version_info.major == 2 and module_name == 'builtins':
module_name = '__builtin__'
# TODO use inference_state.import_module?
from jedi.inference.imports import Importer
@@ -280,6 +274,9 @@ class TypeAlias(LazyValueWrapper):
def gather_annotation_classes(self):
return ValueSet([self._get_wrapped_value()])
def get_signatures(self):
return []
class Callable(BaseTypingInstance):
def py__call__(self, arguments):
@@ -297,6 +294,9 @@ class Callable(BaseTypingInstance):
from jedi.inference.gradual.annotation import infer_return_for_callable
return infer_return_for_callable(arguments, param_values, result_values)
def py__get__(self, instance, class_value):
return ValueSet([self])
class Tuple(BaseTypingInstance):
def _is_homogenous(self):
@@ -400,7 +400,7 @@ class OverloadFunction(BaseTypingValue):
return func_value_set
class NewTypeFunction(BaseTypingValue):
class NewTypeFunction(ValueWrapper):
def py__call__(self, arguments):
ordered_args = arguments.unpack()
next(ordered_args, (None, None))
@@ -418,7 +418,7 @@ class NewTypeFunction(BaseTypingValue):
class NewType(Value):
def __init__(self, inference_state, parent_context, tree_node, type_value_set):
super(NewType, self).__init__(inference_state, parent_context)
super().__init__(inference_state, parent_context)
self._type_value_set = type_value_set
self.tree_node = tree_node
@@ -434,8 +434,11 @@ class NewType(Value):
from jedi.inference.compiled.value import CompiledValueName
return CompiledValueName(self, 'NewType')
def __repr__(self) -> str:
return '<NewType: %s>%s' % (self.tree_node, self._type_value_set)
class CastFunction(BaseTypingValue):
class CastFunction(ValueWrapper):
@repack_with_argument_clinic('type, object, /')
def py__call__(self, type_value_set, object_value_set):
return type_value_set.execute_annotation()
@@ -461,7 +464,7 @@ class TypedDict(LazyValueWrapper):
return ValueName(self, self.tree_node.name)
def py__simple_getitem__(self, index):
if isinstance(index, unicode):
if isinstance(index, str):
return ValueSet.from_sets(
name.infer()
for filter in self._definition_class.get_filters(is_instance=True)

View File

@@ -1,29 +1,34 @@
import os
from pathlib import Path
from jedi.inference.gradual.typeshed import TYPESHED_PATH, create_stub_module
def load_proper_stub_module(inference_state, file_io, import_names, module_node):
def load_proper_stub_module(inference_state, grammar, file_io, import_names, module_node):
"""
This function is given a random .pyi file and should return the proper
module.
"""
path = file_io.path
assert path.endswith('.pyi')
if path.startswith(TYPESHED_PATH):
# /foo/stdlib/3/os/__init__.pyi -> stdlib/3/os/__init__
rest = path[len(TYPESHED_PATH) + 1: -4]
split_paths = tuple(rest.split(os.path.sep))
# Remove the stdlib/3 or third_party/3.5 part
import_names = split_paths[2:]
if import_names[-1] == '__init__':
path = Path(path)
assert path.suffix == '.pyi'
try:
relative_path = path.relative_to(TYPESHED_PATH)
except ValueError:
pass
else:
# /[...]/stdlib/3/os/__init__.pyi -> stdlib/3/os/__init__
rest = relative_path.with_suffix('')
# Remove the stdlib/3 or third_party/3.6 part
import_names = rest.parts[2:]
if rest.name == '__init__':
import_names = import_names[:-1]
if import_names is not None:
actual_value_set = inference_state.import_module(import_names, prefer_stubs=False)
stub = create_stub_module(
inference_state, actual_value_set, module_node, file_io, import_names
inference_state, grammar, actual_value_set,
module_node, file_io, import_names
)
inference_state.stub_module_cache[import_names] = stub
return stub

View File

@@ -7,18 +7,17 @@ from contextlib import contextmanager
from parso.python import tree
from jedi._compatibility import unicode
def is_stdlib_path(path):
# Python standard library paths look like this:
# /usr/lib/python3.5/...
# /usr/lib/python3.9/...
# TODO The implementation below is probably incorrect and not complete.
if 'dist-packages' in path or 'site-packages' in path:
parts = path.parts
if 'dist-packages' in parts or 'site-packages' in parts:
return False
base_path = os.path.join(sys.prefix, 'lib', 'python')
return bool(re.match(re.escape(base_path) + r'\d.\d', path))
return bool(re.match(re.escape(base_path) + r'\d.\d', str(path)))
def deep_ast_copy(obj):
@@ -122,11 +121,7 @@ def get_names_of_node(node):
def is_string(value):
if value.inference_state.environment.version_info.major == 2:
str_classes = (unicode, bytes)
else:
str_classes = (unicode,)
return value.is_compiled() and isinstance(value.get_safe_value(default=None), str_classes)
return value.is_compiled() and isinstance(value.get_safe_value(default=None), str)
def is_literal(value):
@@ -144,7 +139,7 @@ def get_int_or_none(value):
def get_str_or_none(value):
return _get_safe_value_or_none(value, (bytes, unicode))
return _get_safe_value_or_none(value, str)
def is_number(value):

View File

@@ -5,18 +5,15 @@ not any actual importing done. This module is about finding modules in the
filesystem. This can be quite tricky sometimes, because Python imports are not
always that simple.
This module uses imp for python up to 3.2 and importlib for python 3.3 on; the
correct implementation is delegated to _compatibility.
This module also supports import autocompletion, which means to complete
statements like ``from datetim`` (cursor at the end would return ``datetime``).
"""
import os
from pathlib import Path
from parso.python import tree
from parso.tree import search_ancestor
from jedi._compatibility import ImplicitNSInfo, force_unicode, FileNotFoundError
from jedi import debug
from jedi import settings
from jedi.file_io import FolderIO
@@ -31,10 +28,11 @@ from jedi.inference.names import ImportName, SubModuleName
from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.inference.gradual.typeshed import import_module_decorator, \
create_stub_module, parse_stub_module
from jedi.inference.compiled.subprocess.functions import ImplicitNSInfo
from jedi.plugins import plugin_manager
class ModuleCache(object):
class ModuleCache:
def __init__(self):
self._name_cache = {}
@@ -152,7 +150,7 @@ def _level_to_base_import_path(project_path, directory, level):
return None, directory
class Importer(object):
class Importer:
def __init__(self, inference_state, import_path, module_context, level=0):
"""
An implementation similar to ``__import__``. Use `follow`
@@ -192,7 +190,7 @@ class Importer(object):
import_path = base + tuple(import_path)
else:
path = module_context.py__file__()
project_path = self._inference_state.project._path
project_path = self._inference_state.project.path
import_path = list(import_path)
if path is None:
# If no path is defined, our best guess is that the current
@@ -211,7 +209,7 @@ class Importer(object):
# somewhere out of the filesystem.
self._infer_possible = False
else:
self._fixed_sys_path = [force_unicode(base_directory)]
self._fixed_sys_path = [base_directory]
if base_import_path is None:
if import_path:
@@ -240,11 +238,49 @@ class Importer(object):
# inference we want to show the user as much as possible.
# See GH #1446.
self._inference_state.get_sys_path(add_init_paths=not is_completion)
+ sys_path.check_sys_path_modifications(self._module_context)
+ [
str(p) for p
in sys_path.check_sys_path_modifications(self._module_context)
]
)
def follow(self):
if not self.import_path or not self._infer_possible:
if not self.import_path:
if self._fixed_sys_path:
# This is a bit of a special case, that maybe should be
# revisited. If the project path is wrong or the user uses
# relative imports the wrong way, we might end up here, where
# the `fixed_sys_path == project.path` in that case we kind of
# use the project.path.parent directory as our path. This is
# usually not a problem, except if imports in other places are
# using the same names. Example:
#
# foo/ < #1
# - setup.py
# - foo/ < #2
# - __init__.py
# - foo.py < #3
#
# If the top foo is our project folder and somebody uses
# `from . import foo` in `setup.py`, it will resolve to foo #2,
# which means that the import for foo.foo is cached as
# `__init__.py` (#2) and not as `foo.py` (#3). This is usually
# not an issue, because this case is probably pretty rare, but
# might be an issue for some people.
#
# However for most normal cases where we work with different
# file names, this code path hits where we basically change the
# project path to an ancestor of project path.
from jedi.inference.value.namespace import ImplicitNamespaceValue
import_path = (os.path.basename(self._fixed_sys_path[0]),)
ns = ImplicitNamespaceValue(
self._inference_state,
string_names=import_path,
paths=self._fixed_sys_path,
)
return ValueSet({ns})
return NO_VALUES
if not self._infer_possible:
return NO_VALUES
# Check caches first
@@ -303,7 +339,7 @@ class Importer(object):
values = self.follow()
for value in values:
# Non-modules are not completable.
if value.api_type != 'module': # not a module
if value.api_type not in ('module', 'namespace'): # not a module
continue
if not value.is_compiled():
# sub_modules_dict is not implemented for compiled modules.
@@ -332,7 +368,7 @@ def import_module_by_names(inference_state, import_names, sys_path=None,
sys_path = inference_state.get_sys_path()
str_import_names = tuple(
force_unicode(i.value if isinstance(i, tree.Name) else i)
i.value if isinstance(i, tree.Name) else i
for i in import_names
)
value_set = [None]
@@ -386,20 +422,13 @@ def import_module(inference_state, import_names, parent_module_value, sys_path):
# The module might not be a package.
return NO_VALUES
for path in paths:
# At the moment we are only using one path. So this is
# not important to be correct.
if not isinstance(path, list):
path = [path]
file_io_or_ns, is_pkg = inference_state.compiled_subprocess.get_module_info(
string=import_names[-1],
path=path,
full_name=module_name,
is_global_search=False,
)
if is_pkg is not None:
break
else:
file_io_or_ns, is_pkg = inference_state.compiled_subprocess.get_module_info(
string=import_names[-1],
path=paths,
full_name=module_name,
is_global_search=False,
)
if is_pkg is None:
return NO_VALUES
if isinstance(file_io_or_ns, ImplicitNSInfo):
@@ -470,19 +499,19 @@ def load_module_from_path(inference_state, file_io, import_names=None, is_packag
here to ensure that a random path is still properly loaded into the Jedi
module structure.
"""
path = file_io.path
path = Path(file_io.path)
if import_names is None:
e_sys_path = inference_state.get_sys_path()
import_names, is_package = sys_path.transform_path_to_dotted(e_sys_path, path)
else:
assert isinstance(is_package, bool)
is_stub = file_io.path.endswith('.pyi')
is_stub = path.suffix == '.pyi'
if is_stub:
folder_io = file_io.get_parent_folder()
if folder_io.path.endswith('-stubs'):
folder_io = FolderIO(folder_io.path[:-6])
if file_io.path.endswith('__init__.pyi'):
if path.name == '__init__.pyi':
python_file_io = folder_io.get_file_io('__init__.py')
else:
python_file_io = folder_io.get_file_io(import_names[-1] + '.py')
@@ -497,8 +526,8 @@ def load_module_from_path(inference_state, file_io, import_names=None, is_packag
values = NO_VALUES
return create_stub_module(
inference_state, values, parse_stub_module(inference_state, file_io),
file_io, import_names
inference_state, inference_state.latest_grammar, values,
parse_stub_module(inference_state, file_io), file_io, import_names
)
else:
module = _load_python_module(
@@ -513,7 +542,7 @@ def load_module_from_path(inference_state, file_io, import_names=None, is_packag
def load_namespace_from_path(inference_state, folder_io):
import_names, is_package = sys_path.transform_path_to_dotted(
inference_state.get_sys_path(),
folder_io.path
Path(folder_io.path)
)
from jedi.inference.value.namespace import ImplicitNamespaceValue
return ImplicitNamespaceValue(inference_state, import_names, [folder_io.path])

View File

@@ -2,7 +2,7 @@ from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.common import monkeypatch
class AbstractLazyValue(object):
class AbstractLazyValue:
def __init__(self, data, min=1, max=1):
self.data = data
self.min = min
@@ -29,7 +29,7 @@ class LazyKnownValues(AbstractLazyValue):
class LazyUnknownValue(AbstractLazyValue):
def __init__(self, min=1, max=1):
super(LazyUnknownValue, self).__init__(None, min, max)
super().__init__(None, min, max)
def infer(self):
return NO_VALUES
@@ -37,7 +37,7 @@ class LazyUnknownValue(AbstractLazyValue):
class LazyTreeValue(AbstractLazyValue):
def __init__(self, context, node, min=1, max=1):
super(LazyTreeValue, self).__init__(node, min, max)
super().__init__(node, min, max)
self.context = context
# We need to save the predefined names. It's an unfortunate side effect
# that needs to be tracked otherwise results will be wrong.

View File

@@ -1,11 +1,13 @@
from abc import abstractmethod
from inspect import Parameter
from typing import Optional, Tuple
from parso.tree import search_ancestor
from jedi._compatibility import Parameter
from jedi.parser_utils import find_statement_documentation, clean_scope_docstring
from jedi.inference.utils import unite
from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.inference.cache import inference_state_method_cache
from jedi.inference import docstrings
from jedi.cache import memoize_method
from jedi.inference.helpers import deep_ast_copy, infer_call_of_leaf
@@ -23,9 +25,9 @@ def _merge_name_docs(names):
return doc
class AbstractNameDefinition(object):
start_pos = None
string_name = None
class AbstractNameDefinition:
start_pos: Optional[Tuple[int, int]] = None
string_name: str
parent_context = None
tree_name = None
is_value_name = True
@@ -123,7 +125,7 @@ class AbstractTreeName(AbstractNameDefinition):
else:
return None
return super(AbstractTreeName, self).get_qualified_names(include_module_names)
return super().get_qualified_names(include_module_names)
def _get_qualified_names(self):
parent_names = self.parent_context.get_qualified_names()
@@ -223,7 +225,7 @@ class AbstractTreeName(AbstractNameDefinition):
return self.tree_name.start_pos
class ValueNameMixin(object):
class ValueNameMixin:
def infer(self):
return ValueSet([self._value])
@@ -242,11 +244,11 @@ class ValueNameMixin(object):
def get_root_context(self):
if self.parent_context is None: # A module
return self._value.as_context()
return super(ValueNameMixin, self).get_root_context()
return super().get_root_context()
def get_defining_qualified_value(self):
context = self.parent_context
if context.is_module() or context.is_class():
if context is not None and (context.is_module() or context.is_class()):
return self.parent_context.get_value() # Might be None
return None
@@ -257,7 +259,7 @@ class ValueNameMixin(object):
class ValueName(ValueNameMixin, AbstractTreeName):
def __init__(self, value, tree_name):
super(ValueName, self).__init__(value.parent_context, tree_name)
super().__init__(value.parent_context, tree_name)
self._value = value
def goto(self):
@@ -330,9 +332,21 @@ class TreeNameDefinition(AbstractTreeName):
node = node.parent
return indexes
@property
def inference_state(self):
# Used by the cache function below
return self.parent_context.inference_state
@inference_state_method_cache(default='')
def py__doc__(self):
api_type = self.api_type
if api_type in ('function', 'class'):
if api_type in ('function', 'class', 'property'):
if self.parent_context.get_root_context().is_stub():
from jedi.inference.gradual.conversion import convert_names
names = convert_names([self], prefer_stub_to_compiled=False)
if self not in names:
return _merge_name_docs(names)
# Make sure the names are not TreeNameDefinitions anymore.
return clean_scope_docstring(self.tree_name.get_definition())
@@ -346,7 +360,7 @@ class TreeNameDefinition(AbstractTreeName):
return ''
class _ParamMixin(object):
class _ParamMixin:
def maybe_positional_argument(self, include_star=True):
options = [Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD]
if include_star:
@@ -372,7 +386,7 @@ class _ParamMixin(object):
class ParamNameInterface(_ParamMixin):
api_type = u'param'
api_type = 'param'
def get_kind(self):
raise NotImplementedError
@@ -400,6 +414,9 @@ class ParamNameInterface(_ParamMixin):
return 2
return 0
def infer_default(self):
return NO_VALUES
class BaseTreeParamName(ParamNameInterface, AbstractTreeName):
annotation_node = None
@@ -429,7 +446,7 @@ class BaseTreeParamName(ParamNameInterface, AbstractTreeName):
class _ActualTreeParamName(BaseTreeParamName):
def __init__(self, function_value, tree_name):
super(_ActualTreeParamName, self).__init__(
super().__init__(
function_value.get_default_param_context(), tree_name)
self.function_value = function_value
@@ -499,11 +516,11 @@ class _ActualTreeParamName(BaseTreeParamName):
class AnonymousParamName(_ActualTreeParamName):
@plugin_manager.decorate(name='goto_anonymous_param')
def goto(self):
return super(AnonymousParamName, self).goto()
return super().goto()
@plugin_manager.decorate(name='infer_anonymous_param')
def infer(self):
values = super(AnonymousParamName, self).infer()
values = super().infer()
if values:
return values
from jedi.inference.dynamic_params import dynamic_param_lookup
@@ -527,11 +544,11 @@ class AnonymousParamName(_ActualTreeParamName):
class ParamName(_ActualTreeParamName):
def __init__(self, function_value, tree_name, arguments):
super(ParamName, self).__init__(function_value, tree_name)
super().__init__(function_value, tree_name)
self.arguments = arguments
def infer(self):
values = super(ParamName, self).infer()
values = super().infer()
if values:
return values
@@ -604,7 +621,7 @@ class SubModuleName(ImportName):
_level = 1
class NameWrapper(object):
class NameWrapper:
def __init__(self, wrapped_name):
self._wrapped_name = wrapped_name
@@ -615,7 +632,7 @@ class NameWrapper(object):
return '%s(%s)' % (self.__class__.__name__, self._wrapped_name)
class StubNameMixin(object):
class StubNameMixin:
def py__doc__(self):
from jedi.inference.gradual.conversion import convert_names
# Stubs are not complicated and we can just follow simple statements
@@ -627,7 +644,7 @@ class StubNameMixin(object):
names = convert_names(names, prefer_stub_to_compiled=False)
if self in names:
return super(StubNameMixin, self).py__doc__()
return super().py__doc__()
else:
# We have signatures ourselves in stubs, so don't use signatures
# from the implementation.
@@ -637,7 +654,7 @@ class StubNameMixin(object):
# From here on down we make looking up the sys.version_info fast.
class StubName(StubNameMixin, TreeNameDefinition):
def infer(self):
inferred = super(StubName, self).infer()
inferred = super().infer()
if self.string_name == 'version_info' and self.get_root_context().py__name__() == 'sys':
from jedi.inference.gradual.stub_value import VersionInfo
return ValueSet(VersionInfo(c) for c in inferred)

View File

@@ -1,4 +1,5 @@
from collections import defaultdict
from inspect import Parameter
from jedi import debug
from jedi.inference.utils import PushBackIterator
@@ -6,7 +7,6 @@ from jedi.inference import analysis
from jedi.inference.lazy_value import LazyKnownValue, \
LazyTreeValue, LazyUnknownValue
from jedi.inference.value import iterable
from jedi._compatibility import Parameter
from jedi.inference.names import ParamName
@@ -20,8 +20,7 @@ def _add_argument_issue(error_name, lazy_value, message):
class ExecutedParamName(ParamName):
def __init__(self, function_value, arguments, param_node, lazy_value, is_default=False):
super(ExecutedParamName, self).__init__(
function_value, param_node.name, arguments=arguments)
super().__init__(function_value, param_node.name, arguments=arguments)
self._lazy_value = lazy_value
self._is_default = is_default

View File

@@ -12,7 +12,7 @@ count the function calls.
Settings
~~~~~~~~~~
Recursion settings are important if you don't want extremly
Recursion settings are important if you don't want extremely
recursive python code to go absolutely crazy.
The default values are based on experiments while completing the |jedi| library
@@ -50,7 +50,7 @@ A function may not be executed more than this number of times recursively.
"""
class RecursionDetector(object):
class RecursionDetector:
def __init__(self):
self.pushed_nodes = []
@@ -92,7 +92,7 @@ def execution_recursion_decorator(default=NO_VALUES):
return decorator
class ExecutionRecursionDetector(object):
class ExecutionRecursionDetector:
"""
Catches recursions of executions.
"""

View File

@@ -3,14 +3,14 @@ import re
from parso import python_bytes_to_unicode
from jedi._compatibility import FileNotFoundError
from jedi.debug import dbg
from jedi.file_io import KnownContentFileIO
from jedi.inference.imports import SubModuleName, load_module_from_path
from jedi.file_io import KnownContentFileIO, FolderIO
from jedi.inference.names import SubModuleName
from jedi.inference.imports import load_module_from_path
from jedi.inference.filters import ParserTreeFilter
from jedi.inference.gradual.conversion import convert_names
_IGNORE_FOLDERS = ('.tox', '.venv', 'venv', '__pycache__')
_IGNORE_FOLDERS = ('.tox', '.venv', '.mypy_cache', 'venv', '__pycache__')
_OPENED_FILE_LIMIT = 2000
"""
@@ -39,8 +39,7 @@ def _resolve_names(definition_names, avoid_names=()):
yield name
if name.api_type == 'module':
for n in _resolve_names(name.goto(), definition_names):
yield n
yield from _resolve_names(name.goto(), definition_names)
def _dictionarize(names):
@@ -91,8 +90,7 @@ def _add_names_in_same_context(context, string_name):
names = set(filter_.get(string_name))
if not names:
break
for name in names:
yield name
yield from names
ordered = sorted(names, key=lambda x: x.start_pos)
until_position = ordered[0].start_pos
@@ -110,8 +108,7 @@ def _find_global_variables(names, search_name):
for global_name in method().get(search_name):
yield global_name
c = module_context.create_context(global_name.tree_name)
for n in _add_names_in_same_context(c, global_name.string_name):
yield n
yield from _add_names_in_same_context(c, global_name.string_name)
def find_references(module_context, tree_name, only_in_module=False):
@@ -130,10 +127,10 @@ def find_references(module_context, tree_name, only_in_module=False):
module_contexts = [module_context]
if not only_in_module:
module_contexts.extend(
m for m in set(d.get_root_context() for d in found_names)
if m != module_context and m.tree_node is not None
)
for m in set(d.get_root_context() for d in found_names):
if m != module_context and m.tree_node is not None \
and inf.project.path in m.py__file__().parents:
module_contexts.append(m)
# For param no search for other modules is necessary.
if only_in_module or any(n.api_type == 'param' for n in found_names):
potential_modules = module_contexts
@@ -183,43 +180,58 @@ def _check_fs(inference_state, file_io, regex):
return m.as_context()
def gitignored_lines(folder_io, file_io):
ignored_paths = set()
ignored_names = set()
def gitignored_paths(folder_io, file_io):
ignored_paths_abs = set()
ignored_paths_rel = set()
for l in file_io.read().splitlines():
if not l or l.startswith(b'#'):
if not l or l.startswith(b'#') or l.startswith(b'!') or b'*' in l:
continue
p = l.decode('utf-8', 'ignore')
if p.startswith('/'):
name = p[1:]
if name.endswith(os.path.sep):
name = name[:-1]
ignored_paths.add(os.path.join(folder_io.path, name))
p = l.decode('utf-8', 'ignore').rstrip('/')
if '/' in p:
name = p.lstrip('/')
ignored_paths_abs.add(os.path.join(folder_io.path, name))
else:
ignored_names.add(p)
return ignored_paths, ignored_names
name = p
ignored_paths_rel.add((folder_io.path, name))
return ignored_paths_abs, ignored_paths_rel
def expand_relative_ignore_paths(folder_io, relative_paths):
curr_path = folder_io.path
return {os.path.join(curr_path, p[1]) for p in relative_paths if curr_path.startswith(p[0])}
def recurse_find_python_folders_and_files(folder_io, except_paths=()):
except_paths = set(except_paths)
except_paths_relative = set()
for root_folder_io, folder_ios, file_ios in folder_io.walk():
# Delete folders that we don't want to iterate over.
for file_io in file_ios:
path = file_io.path
if path.endswith('.py') or path.endswith('.pyi'):
if path.suffix in ('.py', '.pyi'):
if path not in except_paths:
yield None, file_io
if path.endswith('.gitignore'):
ignored_paths, ignored_names = \
gitignored_lines(root_folder_io, file_io)
except_paths |= ignored_paths
if path.name == '.gitignore':
ignored_paths_abs, ignored_paths_rel = gitignored_paths(
root_folder_io, file_io
)
except_paths |= ignored_paths_abs
except_paths_relative |= ignored_paths_rel
except_paths_relative_expanded = expand_relative_ignore_paths(
root_folder_io, except_paths_relative
)
folder_ios[:] = [
folder_io
for folder_io in folder_ios
if folder_io.path not in except_paths
and folder_io.path not in except_paths_relative_expanded
and folder_io.get_base_name() not in _IGNORE_FOLDERS
]
for folder_io in folder_ios:
@@ -253,6 +265,11 @@ def _find_python_files_in_sys_path(inference_state, module_contexts):
folder_io = folder_io.get_parent_folder()
def _find_project_modules(inference_state, module_contexts):
except_ = [m.py__file__() for m in module_contexts]
yield from recurse_find_python_files(FolderIO(inference_state.project.path), except_)
def get_module_contexts_containing_name(inference_state, module_contexts, name,
limit_reduction=1):
"""
@@ -272,18 +289,21 @@ def get_module_contexts_containing_name(inference_state, module_contexts, name,
if len(name) <= 2:
return
file_io_iterator = _find_python_files_in_sys_path(inference_state, module_contexts)
for x in search_in_file_ios(inference_state, file_io_iterator, name,
limit_reduction=limit_reduction):
yield x # Python 2...
# Currently not used, because there's only `scope=project` and `scope=file`
# At the moment there is no such thing as `scope=sys.path`.
# file_io_iterator = _find_python_files_in_sys_path(inference_state, module_contexts)
file_io_iterator = _find_project_modules(inference_state, module_contexts)
yield from search_in_file_ios(inference_state, file_io_iterator, name,
limit_reduction=limit_reduction)
def search_in_file_ios(inference_state, file_io_iterator, name, limit_reduction=1):
def search_in_file_ios(inference_state, file_io_iterator, name,
limit_reduction=1, complete=False):
parse_limit = _PARSED_FILE_LIMIT / limit_reduction
open_limit = _OPENED_FILE_LIMIT / limit_reduction
file_io_count = 0
parsed_file_count = 0
regex = re.compile(r'\b' + re.escape(name) + r'\b')
regex = re.compile(r'\b' + re.escape(name) + (r'' if complete else r'\b'))
for file_io in file_io_iterator:
file_io_count += 1
m = _check_fs(inference_state, file_io, regex)

View File

@@ -1,10 +1,11 @@
from jedi._compatibility import Parameter
from inspect import Parameter
from jedi.cache import memoize_method
from jedi import debug
from jedi import parser_utils
class _SignatureMixin(object):
class _SignatureMixin:
def to_string(self):
def param_strings():
is_positional = False
@@ -67,7 +68,7 @@ class AbstractSignature(_SignatureMixin):
class TreeSignature(AbstractSignature):
def __init__(self, value, function_value=None, is_bound=False):
super(TreeSignature, self).__init__(value, is_bound)
super().__init__(value, is_bound)
self._function_value = function_value or value
def bind(self, value):
@@ -90,10 +91,12 @@ class TreeSignature(AbstractSignature):
@memoize_method
def get_param_names(self, resolve_stars=False):
params = super(TreeSignature, self).get_param_names(resolve_stars=False)
params = self._function_value.get_param_names()
if resolve_stars:
from jedi.inference.star_args import process_params
params = process_params(params)
if self.is_bound:
return params[1:]
return params
def matches_signature(self, arguments):
@@ -119,7 +122,7 @@ class TreeSignature(AbstractSignature):
class BuiltinSignature(AbstractSignature):
def __init__(self, value, return_string, function_value=None, is_bound=False):
super(BuiltinSignature, self).__init__(value, is_bound)
super().__init__(value, is_bound)
self._return_string = return_string
self.__function_value = function_value

View File

@@ -10,8 +10,10 @@ This means for example in this case::
The signature here for bar should be `bar(b, c)` instead of bar(*args).
"""
from inspect import Parameter
from parso import tree
from jedi._compatibility import Parameter
from jedi.inference.utils import to_list
from jedi.inference.names import ParamNameWrapper
from jedi.inference.helpers import is_big_annoying_library
@@ -22,7 +24,11 @@ def _iter_nodes_for_param(param_name):
from jedi.inference.arguments import TreeArguments
execution_context = param_name.parent_context
function_node = execution_context.tree_node
# Walk up the parso tree to get the FunctionNode we want. We use the parso
# tree rather than going via the execution context so that we're agnostic of
# the specific scope we're evaluating within (i.e: module or function,
# etc.).
function_node = tree.search_ancestor(param_name.tree_name, 'funcdef', 'lambdef')
module_node = function_node.get_root_node()
start = function_node.children[-1].start_pos
end = function_node.children[-1].end_pos
@@ -32,8 +38,6 @@ def _iter_nodes_for_param(param_name):
argument = name.parent
if argument.type == 'argument' \
and argument.children[0] == '*' * param_name.star_count:
# No support for Python 2.7 here, but they are end-of-life
# anyway
trailer = search_ancestor(argument, 'trailer')
if trailer is not None: # Make sure we're in a function
context = execution_context.create_context(trailer)
@@ -98,8 +102,7 @@ def process_params(param_names, star_count=3): # default means both * and **
if is_big_annoying_library(param_names[0].parent_context):
# At first this feature can look innocent, but it does a lot of
# type inference in some cases, so we just ditch it.
for p in param_names:
yield p
yield from param_names
return
used_names = set()
@@ -210,7 +213,7 @@ def process_params(param_names, star_count=3): # default means both * and **
class ParamNameFixedKind(ParamNameWrapper):
def __init__(self, param_name, new_kind):
super(ParamNameFixedKind, self).__init__(param_name)
super().__init__(param_name)
self._new_kind = new_kind
def get_kind(self):

View File

@@ -2,10 +2,10 @@
Functions inferring the syntax tree.
"""
import copy
import itertools
from parso.python import tree
from jedi._compatibility import force_unicode, unicode
from jedi import debug
from jedi import parser_utils
from jedi.inference.base_value import ValueSet, NO_VALUES, ContextualizedNode, \
@@ -35,6 +35,7 @@ operator_to_magic_method = {
'+': '__add__',
'-': '__sub__',
'*': '__mul__',
'@': '__matmul__',
'/': '__truediv__',
'//': '__floordiv__',
'%': '__mod__',
@@ -224,12 +225,10 @@ def _infer_node(context, element):
| context.infer_node(element.children[-1]))
elif typ == 'operator':
# Must be an ellipsis, other operators are not inferred.
# In Python 2 ellipsis is coded as three single dot tokens, not
# as one token 3 dot token.
if element.value not in ('.', '...'):
if element.value != '...':
origin = element.parent
raise AssertionError("unhandled operator %s in %s " % (repr(element.value), origin))
return ValueSet([compiled.builtin_from_name(inference_state, u'Ellipsis')])
return ValueSet([compiled.builtin_from_name(inference_state, 'Ellipsis')])
elif typ == 'dotted_name':
value_set = infer_atom(context, element.children[0])
for next_name in element.children[2::2]:
@@ -288,15 +287,12 @@ def infer_atom(context, atom):
"""
state = context.inference_state
if atom.type == 'name':
if atom.value in ('True', 'False', 'None'):
# Python 2...
return ValueSet([compiled.builtin_from_name(state, atom.value)])
# This is the first global lookup.
stmt = tree.search_ancestor(
atom, 'expr_stmt', 'lambdef'
) or atom
if stmt.type == 'lambdef':
stmt = tree.search_ancestor(atom, 'expr_stmt', 'lambdef', 'if_stmt') or atom
if stmt.type == 'if_stmt':
if not any(n.start_pos <= atom.start_pos < n.end_pos for n in stmt.get_test_nodes()):
stmt = atom
elif stmt.type == 'lambdef':
stmt = atom
position = stmt.start_pos
if _is_annotation_name(atom):
@@ -311,9 +307,6 @@ def infer_atom(context, atom):
# For False/True/None
if atom.value in ('False', 'True', 'None'):
return ValueSet([compiled.builtin_from_name(state, atom.value)])
elif atom.value == 'print':
# print e.g. could be inferred like this in Python 2.7
return NO_VALUES
elif atom.value == 'yield':
# Contrary to yield from, yield can just appear alone to return a
# value when used with `.send()`.
@@ -328,7 +321,7 @@ def infer_atom(context, atom):
value_set = infer_atom(context, atom.children[0])
for string in atom.children[1:]:
right = infer_atom(context, string)
value_set = _infer_comparison(context, value_set, u'+', right)
value_set = _infer_comparison(context, value_set, '+', right)
return value_set
elif atom.type == 'fstring':
return compiled.get_string_value_set(state)
@@ -336,8 +329,8 @@ def infer_atom(context, atom):
c = atom.children
# Parentheses without commas are not tuples.
if c[0] == '(' and not len(c) == 2 \
and not(c[1].type == 'testlist_comp'
and len(c[1].children) > 1):
and not (c[1].type == 'testlist_comp'
and len(c[1].children) > 1):
return context.infer_node(c[1])
try:
@@ -500,8 +493,10 @@ def infer_factor(value_set, operator):
elif operator == 'not':
b = value.py__bool__()
if b is None: # Uncertainty.
return
yield compiled.create_simple_object(value.inference_state, not b)
yield list(value.inference_state.builtins_module.py__getattribute__('bool')
.execute_annotation()).pop()
else:
yield compiled.create_simple_object(value.inference_state, not b)
else:
yield value
@@ -523,10 +518,20 @@ def _literals_to_types(inference_state, result):
def _infer_comparison(context, left_values, operator, right_values):
state = context.inference_state
if isinstance(operator, str):
operator_str = operator
else:
operator_str = str(operator.value)
if not left_values or not right_values:
# illegal slices e.g. cause left/right_result to be None
result = (left_values or NO_VALUES) | (right_values or NO_VALUES)
return _literals_to_types(state, result)
elif operator_str == "|" and all(
value.is_class() or value.is_compiled()
for value in itertools.chain(left_values, right_values)
):
# ^^^ A naive hack for PEP 604
return ValueSet.from_sets((left_values, right_values))
else:
# I don't think there's a reasonable chance that a string
# operation is still correct, once we pass something like six
@@ -566,7 +571,7 @@ def _is_tuple(value):
def _bool_to_value(inference_state, bool_):
return compiled.builtin_from_name(inference_state, force_unicode(str(bool_)))
return compiled.builtin_from_name(inference_state, str(bool_))
def _get_tuple_ints(value):
@@ -589,10 +594,10 @@ def _get_tuple_ints(value):
def _infer_comparison_part(inference_state, context, left, operator, right):
l_is_num = is_number(left)
r_is_num = is_number(right)
if isinstance(operator, unicode):
if isinstance(operator, str):
str_operator = operator
else:
str_operator = force_unicode(str(operator.value))
str_operator = str(operator.value)
if str_operator == '*':
# for iterables, ignore * operations
@@ -642,7 +647,7 @@ def _infer_comparison_part(inference_state, context, left, operator, right):
_bool_to_value(inference_state, False)
])
elif str_operator in ('in', 'not in'):
return NO_VALUES
return inference_state.builtins_module.py__getattribute__('bool').execute_annotation()
def check(obj):
"""Checks if a Jedi object is either a float or an int."""
@@ -692,8 +697,15 @@ def tree_name_to_values(inference_state, context, tree_name):
if expr_stmt.type == "expr_stmt" and expr_stmt.children[1].type == "annassign":
correct_scope = parser_utils.get_parent_scope(name) == context.tree_node
ann_assign = expr_stmt.children[1]
if correct_scope:
found_annotation = True
if (
(ann_assign.children[1].type == 'name')
and (ann_assign.children[1].value == tree_name.value)
and context.parent_context
):
context = context.parent_context
value_set |= annotation.infer_annotation(
context, expr_stmt.children[1].children[1]
).execute_annotation()
@@ -746,7 +758,14 @@ def tree_name_to_values(inference_state, context, tree_name):
types = infer_expr_stmt(context, node, tree_name)
elif typ == 'with_stmt':
value_managers = context.infer_node(node.get_test_node_from_name(tree_name))
enter_methods = value_managers.py__getattribute__(u'__enter__')
if node.parent.type == 'async_stmt':
# In the case of `async with` statements, we need to
# first get the coroutine from the `__aenter__` method,
# then "unwrap" via the `__await__` method
enter_methods = value_managers.py__getattribute__('__aenter__')
coro = enter_methods.execute_with_values()
return coro.py__await__().py__stop_iteration_returns()
enter_methods = value_managers.py__getattribute__('__enter__')
return enter_methods.execute_with_values()
elif typ in ('import_from', 'import_name'):
types = imports.infer_import(context, tree_name)
@@ -762,6 +781,8 @@ def tree_name_to_values(inference_state, context, tree_name):
types = NO_VALUES
elif typ == 'del_stmt':
types = NO_VALUES
elif typ == 'namedexpr_test':
types = infer_node(context, node)
else:
raise ValueError("Should not happen. type: %s" % typ)
return types
@@ -860,8 +881,7 @@ def _infer_subscript_list(context, index):
return ValueSet([iterable.Slice(context, None, None, None)])
elif index.type == 'subscript' and not index.children[0] == '.':
# subscript basically implies a slice operation, except for Python 2's
# Ellipsis.
# subscript basically implies a slice operation
# e.g. array[:3]
result = []
for el in index.children:

View File

@@ -1,11 +1,11 @@
import os
import re
from pathlib import Path
from importlib.machinery import all_suffixes
from jedi._compatibility import unicode, force_unicode, all_suffixes
from jedi.inference.cache import inference_state_method_cache
from jedi.inference.base_value import ContextualizedNode
from jedi.inference.helpers import is_string, get_str_or_none
from jedi.common import traverse_parents
from jedi.parser_utils import get_cached_code_lines
from jedi.file_io import FileIO
from jedi import settings
@@ -14,8 +14,9 @@ from jedi import debug
_BUILDOUT_PATH_INSERTION_LIMIT = 10
def _abs_path(module_context, path):
if os.path.isabs(path):
def _abs_path(module_context, str_path: str):
path = Path(str_path)
if path.is_absolute():
return path
module_path = module_context.py__file__()
@@ -24,9 +25,8 @@ def _abs_path(module_context, path):
# system.
return None
base_dir = os.path.dirname(module_path)
path = force_unicode(path)
return os.path.abspath(os.path.join(base_dir, path))
base_dir = module_path.parent
return base_dir.joinpath(path).absolute()
def _paths_from_assignment(module_context, expr_stmt):
@@ -148,7 +148,7 @@ def discover_buildout_paths(inference_state, script_path):
def _get_paths_from_buildout_script(inference_state, buildout_script_path):
file_io = FileIO(buildout_script_path)
file_io = FileIO(str(buildout_script_path))
try:
module_node = inference_state.parse(
file_io=file_io,
@@ -166,36 +166,37 @@ def _get_paths_from_buildout_script(inference_state, buildout_script_path):
string_names=None,
code_lines=get_cached_code_lines(inference_state.grammar, buildout_script_path),
).as_context()
for path in check_sys_path_modifications(module_context):
yield path
yield from check_sys_path_modifications(module_context)
def _get_parent_dir_with_file(path, filename):
for parent in traverse_parents(path):
if os.path.isfile(os.path.join(parent, filename)):
return parent
def _get_parent_dir_with_file(path: Path, filename):
for parent in path.parents:
try:
if parent.joinpath(filename).is_file():
return parent
except OSError:
continue
return None
def _get_buildout_script_paths(search_path):
def _get_buildout_script_paths(search_path: Path):
"""
if there is a 'buildout.cfg' file in one of the parent directories of the
given module it will return a list of all files in the buildout bin
directory that look like python files.
:param search_path: absolute path to the module.
:type search_path: str
"""
project_root = _get_parent_dir_with_file(search_path, 'buildout.cfg')
if not project_root:
return
bin_path = os.path.join(project_root, 'bin')
if not os.path.exists(bin_path):
bin_path = project_root.joinpath('bin')
if not bin_path.exists():
return
for filename in os.listdir(bin_path):
try:
filepath = os.path.join(bin_path, filename)
filepath = bin_path.joinpath(filename)
with open(filepath, 'r') as f:
firstline = f.readline()
if firstline.startswith('#!') and 'python' in firstline:
@@ -203,14 +204,14 @@ def _get_buildout_script_paths(search_path):
except (UnicodeDecodeError, IOError) as e:
# Probably a binary file; permission error or race cond. because
# file got deleted. Ignore it.
debug.warning(unicode(e))
debug.warning(str(e))
continue
def remove_python_path_suffix(path):
for suffix in all_suffixes() + ['.pyi']:
if path.endswith(suffix):
path = path[:-len(suffix)]
if path.suffix == suffix:
path = path.with_name(path.stem)
break
return path
@@ -219,8 +220,7 @@ def transform_path_to_dotted(sys_path, module_path):
"""
Returns the dotted path inside a sys.path as a list of names. e.g.
>>> from os.path import abspath
>>> transform_path_to_dotted([abspath("/foo")], abspath('/foo/bar/baz.py'))
>>> transform_path_to_dotted([str(Path("/foo").absolute())], Path('/foo/bar/baz.py').absolute())
(('bar', 'baz'), False)
Returns (None, False) if the path doesn't really resolve to anything.
@@ -228,21 +228,22 @@ def transform_path_to_dotted(sys_path, module_path):
"""
# First remove the suffix.
module_path = remove_python_path_suffix(module_path)
if module_path.name.startswith('.'):
return None, False
# Once the suffix was removed we are using the files as we know them. This
# means that if someone uses an ending like .vim for a Python file, .vim
# will be part of the returned dotted part.
is_package = module_path.endswith(os.path.sep + '__init__')
is_package = module_path.name == '__init__'
if is_package:
# -1 to remove the separator
module_path = module_path[:-len('__init__') - 1]
module_path = module_path.parent
def iter_potential_solutions():
for p in sys_path:
if module_path.startswith(p):
if str(module_path).startswith(p):
# Strip the trailing slash/backslash
rest = module_path[len(p):]
rest = str(module_path)[len(p):]
# On Windows a path can also use a slash.
if rest.startswith(os.path.sep) or rest.startswith('/'):
# Remove a slash in cases it's still there.

View File

@@ -1,12 +1,8 @@
""" A universal module with functions / classes without dependencies. """
import sys
import contextlib
import functools
import re
import os
from jedi._compatibility import reraise
_sep = os.path.sep
if os.path.altsep is not None:
@@ -36,7 +32,6 @@ class UncaughtAttributeError(Exception):
"""
Important, because `__getattr__` and `hasattr` catch AttributeErrors
implicitly. This is really evil (mainly because of `__getattr__`).
`hasattr` in Python 2 is even more evil, because it catches ALL exceptions.
Therefore this class originally had to be derived from `BaseException`
instead of `Exception`. But because I removed relevant `hasattr` from
the code base, we can now switch back to `Exception`.
@@ -65,21 +60,17 @@ def reraise_uncaught(func):
difficult. This decorator is to help us getting there by changing
`AttributeError` to `UncaughtAttributeError` to avoid unexpected catch.
This helps us noticing bugs earlier and facilitates debugging.
.. note:: Treating StopIteration here is easy.
Add that feature when needed.
"""
@functools.wraps(func)
def wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except AttributeError:
exc_info = sys.exc_info()
reraise(UncaughtAttributeError(exc_info[1]), exc_info[2])
except AttributeError as e:
raise UncaughtAttributeError(e) from e
return wrapper
class PushBackIterator(object):
class PushBackIterator:
def __init__(self, iterator):
self.pushes = []
self.iterator = iterator
@@ -91,25 +82,9 @@ class PushBackIterator(object):
def __iter__(self):
return self
def next(self):
""" Python 2 Compatibility """
return self.__next__()
def __next__(self):
if self.pushes:
self.current = self.pushes.pop()
else:
self.current = next(self.iterator)
return self.current
@contextlib.contextmanager
def ignored(*exceptions):
"""
Value manager that ignores all of the specified exceptions. This will
be in the standard library starting with Python 3.5.
"""
try:
yield
except exceptions:
pass

View File

@@ -1,3 +1,6 @@
# Re-export symbols for wider use. We configure mypy and flake8 to be aware that
# this file does this.
from jedi.inference.value.module import ModuleValue
from jedi.inference.value.klass import ClassValue
from jedi.inference.value.function import FunctionValue, \

View File

@@ -3,13 +3,32 @@ Decorators are not really values, however we need some wrappers to improve
docstrings and other things around decorators.
'''
from jedi.inference.base_value import ValueWrapper
from jedi.inference.base_value import ValueWrapper, ValueSet
class Decoratee(ValueWrapper):
def __init__(self, wrapped_value, original_value):
super(Decoratee, self).__init__(wrapped_value)
super().__init__(wrapped_value)
self._original_value = original_value
def py__doc__(self):
return self._original_value.py__doc__()
def py__get__(self, instance, class_value):
return ValueSet(
Decoratee(v, self._original_value)
for v in self._wrapped_value.py__get__(instance, class_value)
)
def get_signatures(self):
signatures = self._wrapped_value.get_signatures()
if signatures:
return signatures
# Fallback to signatures of the original function/class if the
# decorator has no signature or it is not inferrable.
#
# __get__ means that it's a descriptor. In that case we don't return
# signatures, because they are usually properties.
if not self._wrapped_value.py__getattribute__('__get__'):
return self._original_value.get_signatures()
return []

View File

@@ -16,7 +16,7 @@ settings will stop this process.
It is important to note that:
1. Array modfications work only in the current module.
1. Array modifications work only in the current module.
2. Jedi only checks Array additions; ``list.pop``, etc are ignored.
"""
from jedi import debug
@@ -155,14 +155,12 @@ class _DynamicArrayAdditions(HelperValueMixin):
except StopIteration:
pass
else:
for lazy in lazy_value.infer().iterate():
yield lazy
yield from lazy_value.infer().iterate()
from jedi.inference.arguments import TreeArguments
if isinstance(arguments, TreeArguments):
additions = _internal_check_array_additions(arguments.context, self._instance)
for addition in additions:
yield addition
yield from additions
def iterate(self, contextualized_node=None, is_async=False):
return self.py__iter__(contextualized_node)
@@ -170,7 +168,7 @@ class _DynamicArrayAdditions(HelperValueMixin):
class _Modification(ValueWrapper):
def __init__(self, wrapped_value, assigned_values, contextualized_key):
super(_Modification, self).__init__(wrapped_value)
super().__init__(wrapped_value)
self._assigned_values = assigned_values
self._contextualized_key = contextualized_key
@@ -189,8 +187,7 @@ class _Modification(ValueWrapper):
class DictModification(_Modification):
def py__iter__(self, contextualized_node=None):
for lazy_context in self._wrapped_value.py__iter__(contextualized_node):
yield lazy_context
yield from self._wrapped_value.py__iter__(contextualized_node)
yield self._contextualized_key
def get_key_values(self):
@@ -199,6 +196,5 @@ class DictModification(_Modification):
class ListModification(_Modification):
def py__iter__(self, contextualized_node=None):
for lazy_context in self._wrapped_value.py__iter__(contextualized_node):
yield lazy_context
yield from self._wrapped_value.py__iter__(contextualized_node)
yield LazyKnownValues(self._assigned_values)

View File

@@ -1,6 +1,5 @@
from parso.python import tree
from jedi._compatibility import use_metaclass
from jedi import debug
from jedi.inference.cache import inference_state_method_cache, CachedMetaClass
from jedi.inference import compiled
@@ -26,7 +25,7 @@ from jedi.inference.gradual.generics import TupleGenericManager
class LambdaName(AbstractNameDefinition):
string_name = '<lambda>'
api_type = u'function'
api_type = 'function'
def __init__(self, lambda_value):
self._lambda_value = lambda_value
@@ -54,14 +53,13 @@ class FunctionAndClassBase(TreeValue):
return None
class FunctionMixin(object):
api_type = u'function'
class FunctionMixin:
api_type = 'function'
def get_filters(self, origin_scope=None):
cls = self.py__class__()
for instance in cls.execute_with_values():
for filter in instance.get_filters(origin_scope=origin_scope):
yield filter
yield from instance.get_filters(origin_scope=origin_scope)
def py__get__(self, instance, class_value):
from jedi.inference.value.instance import BoundMethod
@@ -126,7 +124,7 @@ class FunctionMixin(object):
return [TreeSignature(f) for f in self.get_signature_functions()]
class FunctionValue(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndClassBase)):
class FunctionValue(FunctionMixin, FunctionAndClassBase, metaclass=CachedMetaClass):
@classmethod
def from_context(cls, context, tree_node):
def create(tree_node):
@@ -161,7 +159,7 @@ class FunctionValue(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndCla
return function
def py__class__(self):
c, = values_from_qualified_names(self.inference_state, u'types', u'FunctionType')
c, = values_from_qualified_names(self.inference_state, 'types', 'FunctionType')
return c
def get_default_param_context(self):
@@ -173,7 +171,7 @@ class FunctionValue(use_metaclass(CachedMetaClass, FunctionMixin, FunctionAndCla
class FunctionNameInClass(NameWrapper):
def __init__(self, class_context, name):
super(FunctionNameInClass, self).__init__(name)
super().__init__(name)
self._class_context = class_context
def get_defining_qualified_value(self):
@@ -182,7 +180,7 @@ class FunctionNameInClass(NameWrapper):
class MethodValue(FunctionValue):
def __init__(self, inference_state, class_context, *args, **kwargs):
super(MethodValue, self).__init__(inference_state, *args, **kwargs)
super().__init__(inference_state, *args, **kwargs)
self.class_context = class_context
def get_default_param_context(self):
@@ -198,11 +196,11 @@ class MethodValue(FunctionValue):
@property
def name(self):
return FunctionNameInClass(self.class_context, super(MethodValue, self).name)
return FunctionNameInClass(self.class_context, super().name)
class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
def _infer_annotations(self):
def infer_annotations(self):
raise NotImplementedError
@inference_state_method_cache(default=NO_VALUES)
@@ -216,7 +214,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
value_set = NO_VALUES
returns = get_yield_exprs(self.inference_state, funcdef)
else:
value_set = self._infer_annotations()
value_set = self.infer_annotations()
if value_set:
# If there are annotations, prefer them over anything else.
# This will make it faster.
@@ -238,7 +236,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
try:
children = r.children
except AttributeError:
ctx = compiled.builtin_from_name(self.inference_state, u'None')
ctx = compiled.builtin_from_name(self.inference_state, 'None')
value_set |= ValueSet([ctx])
else:
value_set |= self.infer_node(children[1])
@@ -250,15 +248,14 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
def _get_yield_lazy_value(self, yield_expr):
if yield_expr.type == 'keyword':
# `yield` just yields None.
ctx = compiled.builtin_from_name(self.inference_state, u'None')
ctx = compiled.builtin_from_name(self.inference_state, 'None')
yield LazyKnownValue(ctx)
return
node = yield_expr.children[1]
if node.type == 'yield_arg': # It must be a yield from.
cn = ContextualizedNode(self, node.children[1])
for lazy_value in cn.infer().iterate(cn):
yield lazy_value
yield from cn.infer().iterate(cn)
else:
yield LazyTreeValue(self, node)
@@ -297,8 +294,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
if for_stmt is None:
# No for_stmt, just normal yields.
for yield_ in yields:
for result in self._get_yield_lazy_value(yield_):
yield result
yield from self._get_yield_lazy_value(yield_)
else:
input_node = for_stmt.get_testlist()
cn = ContextualizedNode(self, input_node)
@@ -308,8 +304,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
dct = {str(for_stmt.children[1].value): lazy_value.infer()}
with self.predefine_names(for_stmt, dct):
for yield_in_same_for_stmt in yields:
for result in self._get_yield_lazy_value(yield_in_same_for_stmt):
yield result
yield from self._get_yield_lazy_value(yield_in_same_for_stmt)
def merge_yield_values(self, is_async=False):
return ValueSet.from_sets(
@@ -330,8 +325,6 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
if is_coroutine:
if self.is_generator():
if inference_state.environment.version_info < (3, 6):
return NO_VALUES
async_generator_classes = inference_state.typing_module \
.py__getattribute__('AsyncGenerator')
@@ -339,13 +332,10 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
# The contravariant doesn't seem to be defined.
generics = (yield_values.py__class__(), NO_VALUES)
return ValueSet(
# In Python 3.6 AsyncGenerator is still a class.
GenericClass(c, TupleGenericManager(generics))
for c in async_generator_classes
).execute_annotation()
else:
if inference_state.environment.version_info < (3, 5):
return NO_VALUES
async_classes = inference_state.typing_module.py__getattribute__('Coroutine')
return_values = self.get_return_values()
# Only the first generic is relevant.
@@ -354,7 +344,8 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
GenericClass(c, TupleGenericManager(generics)) for c in async_classes
).execute_annotation()
else:
if self.is_generator():
# If there are annotations, prefer them over anything else.
if self.is_generator() and not self.infer_annotations():
return ValueSet([iterable.Generator(inference_state, self)])
else:
return self.get_return_values()
@@ -362,7 +353,7 @@ class BaseFunctionExecutionContext(ValueContext, TreeContextMixin):
class FunctionExecutionContext(BaseFunctionExecutionContext):
def __init__(self, function_value, arguments):
super(FunctionExecutionContext, self).__init__(function_value)
super().__init__(function_value)
self._arguments = arguments
def get_filters(self, until_position=None, origin_scope=None):
@@ -373,7 +364,7 @@ class FunctionExecutionContext(BaseFunctionExecutionContext):
arguments=self._arguments
)
def _infer_annotations(self):
def infer_annotations(self):
from jedi.inference.gradual.annotation import infer_return_types
return infer_return_types(self._value, self._arguments)
@@ -385,7 +376,7 @@ class FunctionExecutionContext(BaseFunctionExecutionContext):
class AnonymousFunctionExecution(BaseFunctionExecutionContext):
def _infer_annotations(self):
def infer_annotations(self):
# I don't think inferring anonymous executions is a big thing.
# Anonymous contexts are mostly there for the user to work in. ~ dave
return NO_VALUES
@@ -403,7 +394,7 @@ class AnonymousFunctionExecution(BaseFunctionExecutionContext):
class OverloadedFunctionValue(FunctionMixin, ValueWrapper):
def __init__(self, function, overloaded_functions):
super(OverloadedFunctionValue, self).__init__(function)
super().__init__(function)
self._overloaded_functions = overloaded_functions
def py__call__(self, arguments):

View File

@@ -1,6 +1,6 @@
from abc import abstractproperty
from parso.python.tree import search_ancestor
from parso.tree import search_ancestor
from jedi import debug
from jedi import settings
@@ -25,7 +25,7 @@ from jedi.parser_utils import function_is_staticmethod, function_is_classmethod
class InstanceExecutedParamName(ParamName):
def __init__(self, instance, function_value, tree_name):
super(InstanceExecutedParamName, self).__init__(
super().__init__(
function_value, tree_name, arguments=None)
self._instance = instance
@@ -38,7 +38,7 @@ class InstanceExecutedParamName(ParamName):
class AnonymousMethodExecutionFilter(AnonymousFunctionExecutionFilter):
def __init__(self, instance, *args, **kwargs):
super(AnonymousMethodExecutionFilter, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self._instance = instance
def _convert_param(self, param, name):
@@ -55,12 +55,12 @@ class AnonymousMethodExecutionFilter(AnonymousFunctionExecutionFilter):
self._function_value,
name
)
return super(AnonymousMethodExecutionFilter, self)._convert_param(param, name)
return super()._convert_param(param, name)
class AnonymousMethodExecutionContext(BaseFunctionExecutionContext):
def __init__(self, instance, value):
super(AnonymousMethodExecutionContext, self).__init__(value)
super().__init__(value)
self.instance = instance
def get_filters(self, until_position=None, origin_scope=None):
@@ -83,15 +83,15 @@ class AnonymousMethodExecutionContext(BaseFunctionExecutionContext):
class MethodExecutionContext(FunctionExecutionContext):
def __init__(self, instance, *args, **kwargs):
super(MethodExecutionContext, self).__init__(*args, **kwargs)
super().__init__(*args, **kwargs)
self.instance = instance
class AbstractInstanceValue(Value):
api_type = u'instance'
api_type = 'instance'
def __init__(self, inference_state, parent_context, class_value):
super(AbstractInstanceValue, self).__init__(inference_state, parent_context)
super().__init__(inference_state, parent_context)
# Generated instances are classes that are just generated by self
# (No arguments) used.
self.class_value = class_value
@@ -121,7 +121,13 @@ class AbstractInstanceValue(Value):
return [s.bind(self) for s in call_funcs.get_signatures()]
def get_function_slot_names(self, name):
# Searches for Python functions in classes.
# Python classes don't look at the dictionary of the instance when
# looking up `__call__`. This is something that has to do with Python's
# internal slot system (note: not __slots__, but C slots).
for filter in self.get_filters(include_self_names=False):
names = filter.get(name)
if names:
return names
return []
def execute_function_slots(self, names, *inferred_args):
@@ -133,6 +139,27 @@ class AbstractInstanceValue(Value):
def get_type_hint(self, add_class_info=True):
return self.py__name__()
def py__getitem__(self, index_value_set, contextualized_node):
names = self.get_function_slot_names('__getitem__')
if not names:
return super().py__getitem__(
index_value_set,
contextualized_node,
)
args = ValuesArguments([index_value_set])
return ValueSet.from_sets(name.infer().execute(args) for name in names)
def py__iter__(self, contextualized_node=None):
iter_slot_names = self.get_function_slot_names('__iter__')
if not iter_slot_names:
return super().py__iter__(contextualized_node)
def iterate():
for generator in self.execute_function_slots(iter_slot_names):
yield from generator.py__next__(contextualized_node)
return iterate()
def __repr__(self):
return "<%s of %s>" % (self.__class__.__name__, self.class_value)
@@ -141,8 +168,7 @@ class CompiledInstance(AbstractInstanceValue):
# This is not really a compiled class, it's just an instance from a
# compiled class.
def __init__(self, inference_state, parent_context, class_value, arguments):
super(CompiledInstance, self).__init__(inference_state, parent_context,
class_value)
super().__init__(inference_state, parent_context, class_value)
self._arguments = arguments
def get_filters(self, origin_scope=None, include_self_names=True):
@@ -234,51 +260,25 @@ class _BaseTreeInstance(AbstractInstanceValue):
# other way around.
if is_big_annoying_library(self.parent_context):
return NO_VALUES
names = (self.get_function_slot_names(u'__getattr__')
or self.get_function_slot_names(u'__getattribute__'))
names = (self.get_function_slot_names('__getattr__')
or self.get_function_slot_names('__getattribute__'))
return self.execute_function_slots(names, name)
def py__getitem__(self, index_value_set, contextualized_node):
names = self.get_function_slot_names(u'__getitem__')
if not names:
return super(_BaseTreeInstance, self).py__getitem__(
index_value_set,
contextualized_node,
def py__next__(self, contextualized_node=None):
name = u'__next__'
next_slot_names = self.get_function_slot_names(name)
if next_slot_names:
yield LazyKnownValues(
self.execute_function_slots(next_slot_names)
)
args = ValuesArguments([index_value_set])
return ValueSet.from_sets(name.infer().execute(args) for name in names)
def py__iter__(self, contextualized_node=None):
iter_slot_names = self.get_function_slot_names(u'__iter__')
if not iter_slot_names:
return super(_BaseTreeInstance, self).py__iter__(contextualized_node)
def iterate():
for generator in self.execute_function_slots(iter_slot_names):
if generator.is_instance() and not generator.is_compiled():
# `__next__` logic.
if self.inference_state.environment.version_info.major == 2:
name = u'next'
else:
name = u'__next__'
next_slot_names = generator.get_function_slot_names(name)
if next_slot_names:
yield LazyKnownValues(
generator.execute_function_slots(next_slot_names)
)
else:
debug.warning('Instance has no __next__ function in %s.', generator)
else:
for lazy_value in generator.py__iter__():
yield lazy_value
return iterate()
else:
debug.warning('Instance has no __next__ function in %s.', self)
def py__call__(self, arguments):
names = self.get_function_slot_names(u'__call__')
names = self.get_function_slot_names('__call__')
if not names:
# Means the Instance is not callable.
return super(_BaseTreeInstance, self).py__call__(arguments)
return super().py__call__(arguments)
return ValueSet.from_sets(name.infer().execute(arguments) for name in names)
@@ -293,24 +293,14 @@ class _BaseTreeInstance(AbstractInstanceValue):
if result is not NotImplemented:
return result
names = self.get_function_slot_names(u'__get__')
names = self.get_function_slot_names('__get__')
if names:
if instance is None:
instance = compiled.builtin_from_name(self.inference_state, u'None')
instance = compiled.builtin_from_name(self.inference_state, 'None')
return self.execute_function_slots(names, instance, class_value)
else:
return ValueSet([self])
def get_function_slot_names(self, name):
# Python classes don't look at the dictionary of the instance when
# looking up `__call__`. This is something that has to do with Python's
# internal slot system (note: not __slots__, but C slots).
for filter in self.get_filters(include_self_names=False):
names = filter.get(name)
if names:
return names
return []
class TreeInstance(_BaseTreeInstance):
def __init__(self, inference_state, parent_context, class_value, arguments):
@@ -322,7 +312,7 @@ class TreeInstance(_BaseTreeInstance):
if settings.dynamic_array_additions:
arguments = get_dynamic_array_instance(self, arguments)
super(TreeInstance, self).__init__(inference_state, parent_context, class_value)
super().__init__(inference_state, parent_context, class_value)
self._arguments = arguments
self.tree_node = class_value.tree_node
@@ -396,7 +386,7 @@ class TreeInstance(_BaseTreeInstance):
else:
if key == index:
return lazy_context.infer()
return super(TreeInstance, self).py__simple_getitem__(index)
return super().py__simple_getitem__(index)
def __repr__(self):
return "<%s of %s(%s)>" % (self.__class__.__name__, self.class_value,
@@ -407,21 +397,10 @@ class AnonymousInstance(_BaseTreeInstance):
_arguments = None
class CompiledInstanceName(compiled.CompiledName):
def __init__(self, inference_state, instance, klass, name):
parent_value = klass.parent_context.get_value()
assert parent_value is not None, "How? Please reproduce and report"
super(CompiledInstanceName, self).__init__(
inference_state,
parent_value,
name.string_name
)
self._instance = instance
self._class_member_name = name
class CompiledInstanceName(NameWrapper):
@iterator_to_value_set
def infer(self):
for result_value in self._class_member_name.infer():
for result_value in self._wrapped_name.infer():
if result_value.api_type == 'function':
yield CompiledBoundMethod(result_value)
else:
@@ -440,16 +419,12 @@ class CompiledInstanceClassFilter(AbstractFilter):
return self._convert(self._class_filter.values())
def _convert(self, names):
klass = self._class_filter.compiled_value
return [
CompiledInstanceName(self._instance.inference_state, self._instance, klass, n)
for n in names
]
return [CompiledInstanceName(n) for n in names]
class BoundMethod(FunctionMixin, ValueWrapper):
def __init__(self, instance, class_context, function):
super(BoundMethod, self).__init__(function)
super().__init__(function)
self.instance = instance
self._class_context = class_context
@@ -460,11 +435,11 @@ class BoundMethod(FunctionMixin, ValueWrapper):
def name(self):
return FunctionNameInClass(
self._class_context,
super(BoundMethod, self).name
super().name
)
def py__class__(self):
c, = values_from_qualified_names(self.inference_state, u'types', u'MethodType')
c, = values_from_qualified_names(self.inference_state, 'types', 'MethodType')
return c
def _get_arguments(self, arguments):
@@ -492,7 +467,7 @@ class BoundMethod(FunctionMixin, ValueWrapper):
]
def get_signatures(self):
return [sig.bind(self) for sig in super(BoundMethod, self).get_signatures()]
return [sig.bind(self) for sig in super().get_signatures()]
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self._wrapped_value)
@@ -522,17 +497,28 @@ class SelfName(TreeNameDefinition):
def get_defining_qualified_value(self):
return self._instance
def infer(self):
stmt = search_ancestor(self.tree_name, 'expr_stmt')
if stmt is not None:
if stmt.children[1].type == "annassign":
from jedi.inference.gradual.annotation import infer_annotation
values = infer_annotation(
self.parent_context, stmt.children[1].children[1]
).execute_annotation()
if values:
return values
return super().infer()
class LazyInstanceClassName(NameWrapper):
def __init__(self, instance, class_member_name):
super(LazyInstanceClassName, self).__init__(class_member_name)
super().__init__(class_member_name)
self._instance = instance
@iterator_to_value_set
def infer(self):
for result_value in self._wrapped_name.infer():
for c in result_value.py__get__(self._instance, self._instance.py__class__()):
yield c
yield from result_value.py__get__(self._instance, self._instance.py__class__())
def get_signatures(self):
return self.infer().get_signatures()
@@ -572,7 +558,7 @@ class SelfAttributeFilter(ClassFilter):
This class basically filters all the use cases where `self.*` was assigned.
"""
def __init__(self, instance, instance_class, node_context, origin_scope):
super(SelfAttributeFilter, self).__init__(
super().__init__(
class_value=instance_class,
node_context=node_context,
origin_scope=origin_scope,
@@ -616,10 +602,9 @@ class SelfAttributeFilter(ClassFilter):
class InstanceArguments(TreeArgumentsWrapper):
def __init__(self, instance, arguments):
super(InstanceArguments, self).__init__(arguments)
super().__init__(arguments)
self.instance = instance
def unpack(self, func=None):
yield None, LazyKnownValue(self.instance)
for values in self._wrapped_arguments.unpack(func):
yield values
yield from self._wrapped_arguments.unpack(func)

View File

@@ -2,9 +2,6 @@
Contains all classes and functions to deal with lists, dicts, generators and
iterators in general.
"""
import sys
from jedi._compatibility import force_unicode, is_py3
from jedi.inference import compiled
from jedi.inference import analysis
from jedi.inference.lazy_value import LazyKnownValue, LazyKnownValues, \
@@ -22,9 +19,12 @@ from jedi.inference.context import CompForContext
from jedi.inference.value.dynamic_arrays import check_array_additions
class IterableMixin(object):
class IterableMixin:
def py__next__(self, contextualized_node=None):
return self.py__iter__(contextualized_node)
def py__stop_iteration_returns(self):
return ValueSet([compiled.builtin_from_name(self.inference_state, u'None')])
return ValueSet([compiled.builtin_from_name(self.inference_state, 'None')])
# At the moment, safe values are simple values like "foo", 1 and not
# lists/dicts. Therefore as a small speed optimization we can just do the
@@ -32,27 +32,19 @@ class IterableMixin(object):
# doing this in the end as well.
# This mostly speeds up patterns like `sys.version_info >= (3, 0)` in
# typeshed.
if sys.version_info[0] == 2:
# Python 2...........
def get_safe_value(self, default=sentinel):
if default is sentinel:
raise ValueError("There exists no safe value for value %s" % self)
return default
else:
get_safe_value = Value.get_safe_value
get_safe_value = Value.get_safe_value
class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
array_type = None
def _get_wrapped_value(self):
generator, = self.inference_state.typing_module \
.py__getattribute__('Generator') \
.execute_annotation()
return generator
instance, = self._get_cls().execute_annotation()
return instance
def is_instance(self):
return False
def _get_cls(self):
generator, = self.inference_state.typing_module.py__getattribute__('Generator')
return generator
def py__bool__(self):
return True
@@ -62,26 +54,34 @@ class GeneratorBase(LazyAttributeOverwrite, IterableMixin):
return ValueSet([self])
@publish_method('send')
@publish_method('next', python_version_match=2)
@publish_method('__next__', python_version_match=3)
def py__next__(self, arguments):
@publish_method('__next__')
def _next(self, arguments):
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())
def py__stop_iteration_returns(self):
return ValueSet([compiled.builtin_from_name(self.inference_state, u'None')])
return ValueSet([compiled.builtin_from_name(self.inference_state, 'None')])
@property
def name(self):
return compiled.CompiledValueName(self, 'Generator')
def get_annotated_class_object(self):
from jedi.inference.gradual.generics import TupleGenericManager
gen_values = self.merge_types_of_iterate().py__class__()
gm = TupleGenericManager((gen_values, NO_VALUES, NO_VALUES))
return self._get_cls().with_generics(gm)
class Generator(GeneratorBase):
"""Handling of `yield` functions."""
def __init__(self, inference_state, func_execution_context):
super(Generator, self).__init__(inference_state)
super().__init__(inference_state)
self._func_execution_context = func_execution_context
def py__iter__(self, contextualized_node=None):
iterators = self._func_execution_context.infer_annotations()
if iterators:
return iterators.iterate(contextualized_node)
return self._func_execution_context.get_yield_lazy_values()
def py__stop_iteration_returns(self):
@@ -127,7 +127,7 @@ def comprehension_from_atom(inference_state, value, atom):
)
class ComprehensionMixin(object):
class ComprehensionMixin:
@inference_state_method_cache()
def _get_comp_for_context(self, parent_context, comp_for):
return CompForContext(parent_context, comp_for)
@@ -153,8 +153,7 @@ class ComprehensionMixin(object):
)
with context.predefine_names(comp_for, dct):
try:
for result in self._nested(comp_fors[1:], context):
yield result
yield from self._nested(comp_fors[1:], context)
except IndexError:
iterated = context.infer_node(self._entry_node)
if self.array_type == 'dict':
@@ -166,8 +165,7 @@ class ComprehensionMixin(object):
@to_list
def _iterate(self):
comp_fors = tuple(get_sync_comp_fors(self._sync_comp_for_node))
for result in self._nested(comp_fors):
yield result
yield from self._nested(comp_fors)
def py__iter__(self, contextualized_node=None):
for set_ in self._iterate():
@@ -177,13 +175,13 @@ class ComprehensionMixin(object):
return "<%s of %s>" % (type(self).__name__, self._sync_comp_for_node)
class _DictMixin(object):
class _DictMixin:
def _get_generics(self):
return tuple(c_set.py__class__() for c_set in self.get_mapping_item_values())
class Sequence(LazyAttributeOverwrite, IterableMixin):
api_type = u'instance'
api_type = 'instance'
@property
def name(self):
@@ -222,14 +220,14 @@ class Sequence(LazyAttributeOverwrite, IterableMixin):
class _BaseComprehension(ComprehensionMixin):
def __init__(self, inference_state, defining_context, sync_comp_for_node, entry_node):
assert sync_comp_for_node.type == 'sync_comp_for'
super(_BaseComprehension, self).__init__(inference_state)
super().__init__(inference_state)
self._defining_context = defining_context
self._sync_comp_for_node = sync_comp_for_node
self._entry_node = entry_node
class ListComprehension(_BaseComprehension, Sequence):
array_type = u'list'
array_type = 'list'
def py__simple_getitem__(self, index):
if isinstance(index, slice):
@@ -242,14 +240,14 @@ class ListComprehension(_BaseComprehension, Sequence):
class SetComprehension(_BaseComprehension, Sequence):
array_type = u'set'
array_type = 'set'
class GeneratorComprehension(_BaseComprehension, GeneratorBase):
pass
class _DictKeyMixin(object):
class _DictKeyMixin:
# TODO merge with _DictMixin?
def get_mapping_item_values(self):
return self._dict_keys(), self._dict_values()
@@ -260,11 +258,11 @@ class _DictKeyMixin(object):
class DictComprehension(ComprehensionMixin, Sequence, _DictKeyMixin):
array_type = u'dict'
array_type = 'dict'
def __init__(self, inference_state, defining_context, sync_comp_for_node, key_node, value_node):
assert sync_comp_for_node.type == 'sync_comp_for'
super(DictComprehension, self).__init__(inference_state)
super().__init__(inference_state)
self._defining_context = defining_context
self._sync_comp_for_node = sync_comp_for_node
self._entry_node = key_node
@@ -317,25 +315,25 @@ class DictComprehension(ComprehensionMixin, Sequence, _DictKeyMixin):
class SequenceLiteralValue(Sequence):
_TUPLE_LIKE = 'testlist_star_expr', 'testlist', 'subscriptlist'
mapping = {'(': u'tuple',
'[': u'list',
'{': u'set'}
mapping = {'(': 'tuple',
'[': 'list',
'{': 'set'}
def __init__(self, inference_state, defining_context, atom):
super(SequenceLiteralValue, self).__init__(inference_state)
super().__init__(inference_state)
self.atom = atom
self._defining_context = defining_context
if self.atom.type in self._TUPLE_LIKE:
self.array_type = u'tuple'
self.array_type = 'tuple'
else:
self.array_type = SequenceLiteralValue.mapping[atom.children[0]]
"""The builtin name of the array (list, set, tuple or dict)."""
def _get_generics(self):
if self.array_type == u'tuple':
if self.array_type == 'tuple':
return tuple(x.infer().py__class__() for x in self.py__iter__())
return super(SequenceLiteralValue, self)._get_generics()
return super()._get_generics()
def py__simple_getitem__(self, index):
"""Here the index is an int/str. Raises IndexError/KeyError."""
@@ -344,6 +342,8 @@ class SequenceLiteralValue(Sequence):
else:
with reraise_getitem_errors(TypeError, KeyError, IndexError):
node = self.get_tree_entries()[index]
if node == ':' or node.type == 'subscript':
return NO_VALUES
return self._defining_context.infer_node(node)
def py__iter__(self, contextualized_node=None):
@@ -358,8 +358,7 @@ class SequenceLiteralValue(Sequence):
yield LazyKnownValue(Slice(self._defining_context, None, None, None))
else:
yield LazyTreeValue(self._defining_context, node)
for addition in check_array_additions(self._defining_context, self):
yield addition
yield from check_array_additions(self._defining_context, self)
def py__len__(self):
# This function is not really used often. It's more of a try.
@@ -410,25 +409,17 @@ class SequenceLiteralValue(Sequence):
else:
return [array_node]
def exact_key_items(self):
"""
Returns a generator of tuples like dict.items(), where the key is
resolved (as a string) and the values are still lazy values.
"""
for key_node, value in self.get_tree_entries():
for key in self._defining_context.infer_node(key_node):
if is_string(key):
yield key.get_safe_value(), LazyTreeValue(self._defining_context, value)
def __repr__(self):
return "<%s of %s>" % (self.__class__.__name__, self.atom)
class DictLiteralValue(_DictMixin, SequenceLiteralValue, _DictKeyMixin):
array_type = u'dict'
array_type = 'dict'
def __init__(self, inference_state, defining_context, atom):
super(SequenceLiteralValue, self).__init__(inference_state)
# Intentionally don't call the super class. This is definitely a sign
# that the architecture is bad and we should refactor.
Sequence.__init__(self, inference_state)
self._defining_context = defining_context
self.atom = atom
@@ -437,7 +428,7 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue, _DictKeyMixin):
compiled_value_index = compiled.create_simple_object(self.inference_state, index)
for key, value in self.get_tree_entries():
for k in self._defining_context.infer_node(key):
for key_v in k.execute_operation(compiled_value_index, u'=='):
for key_v in k.execute_operation(compiled_value_index, '=='):
if key_v.get_safe_value():
return self._defining_context.infer_node(value)
raise SimpleGetItemNotFound('No key found in dictionary %s.' % self)
@@ -473,6 +464,16 @@ class DictLiteralValue(_DictMixin, SequenceLiteralValue, _DictKeyMixin):
return ValueSet([FakeList(self.inference_state, lazy_values)])
def exact_key_items(self):
"""
Returns a generator of tuples like dict.items(), where the key is
resolved (as a string) and the values are still lazy values.
"""
for key_node, value in self.get_tree_entries():
for key in self._defining_context.infer_node(key_node):
if is_string(key):
yield key.get_safe_value(), LazyTreeValue(self._defining_context, value)
def _dict_values(self):
return ValueSet.from_sets(
self._defining_context.infer_node(v)
@@ -491,7 +492,7 @@ class _FakeSequence(Sequence):
"""
type should be one of "tuple", "list"
"""
super(_FakeSequence, self).__init__(inference_state)
super().__init__(inference_state)
self._lazy_value_list = lazy_value_list
def py__simple_getitem__(self, index):
@@ -513,18 +514,18 @@ class _FakeSequence(Sequence):
class FakeTuple(_FakeSequence):
array_type = u'tuple'
array_type = 'tuple'
class FakeList(_FakeSequence):
array_type = u'tuple'
array_type = 'tuple'
class FakeDict(_DictMixin, Sequence, _DictKeyMixin):
array_type = u'dict'
array_type = 'dict'
def __init__(self, inference_state, dct):
super(FakeDict, self).__init__(inference_state)
super().__init__(inference_state)
self._dct = dct
def py__iter__(self, contextualized_node=None):
@@ -532,21 +533,6 @@ class FakeDict(_DictMixin, Sequence, _DictKeyMixin):
yield LazyKnownValue(compiled.create_simple_object(self.inference_state, key))
def py__simple_getitem__(self, index):
if is_py3 and self.inference_state.environment.version_info.major == 2:
# In Python 2 bytes and unicode compare.
if isinstance(index, bytes):
index_unicode = force_unicode(index)
try:
return self._dct[index_unicode].infer()
except KeyError:
pass
elif isinstance(index, str):
index_bytes = index.encode('utf-8')
try:
return self._dct[index_bytes].infer()
except KeyError:
pass
with reraise_getitem_errors(KeyError, TypeError):
lazy_value = self._dct[index]
return lazy_value.infer()
@@ -573,14 +559,13 @@ class FakeDict(_DictMixin, Sequence, _DictKeyMixin):
class MergedArray(Sequence):
def __init__(self, inference_state, arrays):
super(MergedArray, self).__init__(inference_state)
super().__init__(inference_state)
self.array_type = arrays[-1].array_type
self._arrays = arrays
def py__iter__(self, contextualized_node=None):
for array in self._arrays:
for lazy_value in array.py__iter__():
yield lazy_value
yield from array.py__iter__()
def py__simple_getitem__(self, index):
return ValueSet.from_sets(lazy_value.infer() for lazy_value in self.py__iter__())

View File

@@ -37,8 +37,8 @@ py__doc__() Returns the docstring for a value.
"""
from jedi import debug
from jedi._compatibility import use_metaclass
from jedi.parser_utils import get_cached_parent_scope, expr_is_dotted
from jedi.parser_utils import get_cached_parent_scope, expr_is_dotted, \
function_is_property
from jedi.inference.cache import inference_state_method_cache, CachedMetaClass, \
inference_state_method_generator_cache
from jedi.inference import compiled
@@ -56,7 +56,7 @@ from jedi.plugins import plugin_manager
class ClassName(TreeNameDefinition):
def __init__(self, class_value, tree_name, name_context, apply_decorators):
super(ClassName, self).__init__(name_context, tree_name)
super().__init__(name_context, tree_name)
self._apply_decorators = apply_decorators
self._class_value = class_value
@@ -69,16 +69,31 @@ class ClassName(TreeNameDefinition):
for result_value in inferred:
if self._apply_decorators:
for c in result_value.py__get__(instance=None, class_value=self._class_value):
yield c
yield from result_value.py__get__(instance=None, class_value=self._class_value)
else:
yield result_value
@property
def api_type(self):
type_ = super().api_type
if type_ == 'function':
definition = self.tree_name.get_definition()
if definition is None:
return type_
if function_is_property(definition):
# This essentially checks if there is an @property before
# the function. @property could be something different, but
# any programmer that redefines property as something that
# is not really a property anymore, should be shot. (i.e.
# this is a heuristic).
return 'property'
return type_
class ClassFilter(ParserTreeFilter):
def __init__(self, class_value, node_context=None, until_position=None,
origin_scope=None, is_instance=False):
super(ClassFilter, self).__init__(
super().__init__(
class_value.as_context(), node_context,
until_position=until_position,
origin_scope=origin_scope,
@@ -101,35 +116,20 @@ class ClassFilter(ParserTreeFilter):
while node is not None:
if node == self._parser_scope or node == self.parent_context:
return True
node = get_cached_parent_scope(self._used_names, node)
node = get_cached_parent_scope(self._parso_cache_node, node)
return False
def _access_possible(self, name):
# Filter for ClassVar variables
# TODO this is not properly done, yet. It just checks for the string
# ClassVar in the annotation, which can be quite imprecise. If we
# wanted to do this correct, we would have to infer the ClassVar.
if not self._is_instance:
expr_stmt = name.get_definition()
if expr_stmt is not None and expr_stmt.type == 'expr_stmt':
annassign = expr_stmt.children[1]
if annassign.type == 'annassign':
# If there is an =, the variable is obviously also
# defined on the class.
if 'ClassVar' not in annassign.children[1].get_code() \
and '=' not in annassign.children:
return False
# Filter for name mangling of private variables like __foo
return not name.value.startswith('__') or name.value.endswith('__') \
or self._equals_origin_scope()
def _filter(self, names):
names = super(ClassFilter, self)._filter(names)
names = super()._filter(names)
return [name for name in names if self._access_possible(name)]
class ClassMixin(object):
class ClassMixin:
def is_class(self):
return True
@@ -145,7 +145,7 @@ class ClassMixin(object):
return ValueSet([TreeInstance(self.inference_state, self.parent_context, self, arguments)])
def py__class__(self):
return compiled.builtin_from_name(self.inference_state, u'type')
return compiled.builtin_from_name(self.inference_state, 'type')
@property
def name(self):
@@ -192,13 +192,11 @@ class ClassMixin(object):
if include_metaclasses:
metaclasses = self.get_metaclasses()
if metaclasses:
for f in self.get_metaclass_filters(metaclasses, is_instance):
yield f # Python 2..
yield from self.get_metaclass_filters(metaclasses, is_instance)
for cls in self.py__mro__():
if cls.is_compiled():
for filter in cls.get_filters(is_instance=is_instance):
yield filter
yield from cls.get_filters(is_instance=is_instance)
else:
yield ClassFilter(
self, node_context=cls.as_context(),
@@ -207,7 +205,7 @@ class ClassMixin(object):
)
if not is_instance and include_type_when_class:
from jedi.inference.compiled import builtin_from_name
type_ = builtin_from_name(self.inference_state, u'type')
type_ = builtin_from_name(self.inference_state, 'type')
assert isinstance(type_, ClassValue)
if type_ != self:
# We are not using execute_with_values here, because the
@@ -321,8 +319,8 @@ class ClassMixin(object):
return ValueSet({self})
class ClassValue(use_metaclass(CachedMetaClass, ClassMixin, FunctionAndClassBase)):
api_type = u'class'
class ClassValue(ClassMixin, FunctionAndClassBase, metaclass=CachedMetaClass):
api_type = 'class'
@inference_state_method_cache()
def list_type_vars(self):

View File

@@ -1,4 +1,6 @@
import os
from pathlib import Path
from typing import Optional
from jedi.inference.cache import inference_state_method_cache
from jedi.inference.names import AbstractNameDefinition, ModuleName
@@ -16,7 +18,7 @@ class _ModuleAttributeName(AbstractNameDefinition):
"""
For module attributes like __file__, __str__ and so on.
"""
api_type = u'instance'
api_type = 'instance'
def __init__(self, parent_module, string_name, string_value=None):
self.parent_context = parent_module
@@ -26,16 +28,13 @@ class _ModuleAttributeName(AbstractNameDefinition):
def infer(self):
if self._string_value is not None:
s = self._string_value
if self.parent_context.inference_state.environment.version_info.major == 2 \
and not isinstance(s, bytes):
s = s.encode('utf-8')
return ValueSet([
create_simple_object(self.parent_context.inference_state, s)
])
return compiled.get_string_value_set(self.parent_context.inference_state)
class SubModuleDictMixin(object):
class SubModuleDictMixin:
@inference_state_method_cache()
def sub_modules_dict(self):
"""
@@ -65,15 +64,14 @@ class ModuleMixin(SubModuleDictMixin):
parent_context=self.as_context(),
origin_scope=origin_scope
),
GlobalNameFilter(self.as_context(), self.tree_node),
GlobalNameFilter(self.as_context()),
)
yield DictFilter(self.sub_modules_dict())
yield DictFilter(self._module_attributes_dict())
for star_filter in self.iter_star_filters():
yield star_filter
yield from self.iter_star_filters()
def py__class__(self):
c, = values_from_qualified_names(self.inference_state, u'types', u'ModuleType')
c, = values_from_qualified_names(self.inference_state, 'types', 'ModuleType')
return c
def is_module(self):
@@ -82,7 +80,7 @@ class ModuleMixin(SubModuleDictMixin):
def is_stub(self):
return False
@property
@property # type: ignore[misc]
@inference_state_method_cache()
def name(self):
return self._module_name_class(self, self.string_names[-1])
@@ -92,9 +90,9 @@ class ModuleMixin(SubModuleDictMixin):
names = ['__package__', '__doc__', '__name__']
# All the additional module attributes are strings.
dct = dict((n, _ModuleAttributeName(self, n)) for n in names)
file = self.py__file__()
if file is not None:
dct['__file__'] = _ModuleAttributeName(self, '__file__', file)
path = self.py__file__()
if path is not None:
dct['__file__'] = _ModuleAttributeName(self, '__file__', str(path))
return dct
def iter_star_filters(self):
@@ -137,18 +135,18 @@ class ModuleMixin(SubModuleDictMixin):
class ModuleValue(ModuleMixin, TreeValue):
api_type = u'module'
api_type = 'module'
def __init__(self, inference_state, module_node, code_lines, file_io=None,
string_names=None, is_package=False):
super(ModuleValue, self).__init__(
super().__init__(
inference_state,
parent_context=None,
tree_node=module_node
)
self.file_io = file_io
if file_io is None:
self._path = None
self._path: Optional[Path] = None
else:
self._path = file_io.path
self.string_names = string_names # Optional[Tuple[str, ...]]
@@ -156,31 +154,34 @@ class ModuleValue(ModuleMixin, TreeValue):
self._is_package = is_package
def is_stub(self):
if self._path is not None and self._path.endswith('.pyi'):
if self._path is not None and self._path.suffix == '.pyi':
# Currently this is the way how we identify stubs when e.g. goto is
# used in them. This could be changed if stubs would be identified
# sooner and used as StubModuleValue.
return True
return super(ModuleValue, self).is_stub()
return super().is_stub()
def py__name__(self):
if self.string_names is None:
return None
return '.'.join(self.string_names)
def py__file__(self):
def py__file__(self) -> Optional[Path]:
"""
In contrast to Python's __file__ can be None.
"""
if self._path is None:
return None
return os.path.abspath(self._path)
return self._path.absolute()
def is_package(self):
return self._is_package
def py__package__(self):
if self.string_names is None:
return []
if self._is_package:
return self.string_names
return self.string_names[:-1]

View File

@@ -1,3 +1,6 @@
from pathlib import Path
from typing import Optional
from jedi.inference.cache import inference_state_method_cache
from jedi.inference.filters import DictFilter
from jedi.inference.names import ValueNameMixin, AbstractNameDefinition
@@ -20,14 +23,11 @@ class ImplicitNamespaceValue(Value, SubModuleDictMixin):
"""
Provides support for implicit namespace packages
"""
# Is a module like every other module, because if you import an empty
# folder foobar it will be available as an object:
# <module 'foobar' (namespace)>.
api_type = u'module'
api_type = 'namespace'
parent_context = None
def __init__(self, inference_state, string_names, paths):
super(ImplicitNamespaceValue, self).__init__(inference_state, parent_context=None)
super().__init__(inference_state, parent_context=None)
self.inference_state = inference_state
self.string_names = string_names
self._paths = paths
@@ -38,13 +38,13 @@ class ImplicitNamespaceValue(Value, SubModuleDictMixin):
def get_qualified_names(self):
return ()
@property
@property # type: ignore[misc]
@inference_state_method_cache()
def name(self):
string_name = self.py__package__()[-1]
return ImplicitNSName(self, string_name)
def py__file__(self):
def py__file__(self) -> Optional[Path]:
return None
def py__package__(self):

View File

@@ -1,5 +1,6 @@
import re
import textwrap
from ast import literal_eval
from inspect import cleandoc
from weakref import WeakKeyDictionary
@@ -7,8 +8,6 @@ from parso.python import tree
from parso.cache import parser_cache
from parso import split_lines
from jedi._compatibility import literal_eval, force_unicode
_EXECUTE_NODES = {'funcdef', 'classdef', 'import_from', 'import_name', 'test',
'or_test', 'and_test', 'not_test', 'comparison', 'expr',
'xor_expr', 'and_expr', 'shift_expr', 'arith_expr',
@@ -91,7 +90,7 @@ def get_flow_branch_keyword(flow_node, node):
first_leaf = child.get_first_leaf()
if first_leaf in _FLOW_KEYWORDS:
keyword = first_leaf
return 0
return None
def clean_scope_docstring(scope_node):
@@ -102,10 +101,7 @@ def clean_scope_docstring(scope_node):
# leaves anymore that might be part of the docstring. A
# docstring can also look like this: ``'foo' 'bar'
# Returns a literal cleaned version of the ``Token``.
cleaned = cleandoc(safe_literal_eval(node.value))
# Since we want the docstr output to be always unicode, just
# force it.
return force_unicode(cleaned)
return cleandoc(safe_literal_eval(node.value))
return ''
@@ -117,10 +113,7 @@ def find_statement_documentation(tree_node):
if maybe_string.type == 'simple_stmt':
maybe_string = maybe_string.children[0]
if maybe_string.type == 'string':
cleaned = cleandoc(safe_literal_eval(maybe_string.value))
# Since we want the docstr output to be always unicode, just
# force it.
return force_unicode(cleaned)
return cleandoc(safe_literal_eval(maybe_string.value))
return ''
@@ -131,15 +124,7 @@ def safe_literal_eval(value):
# manually, but that's right now not implemented.
return ''
try:
return literal_eval(value)
except SyntaxError:
# It's possible to create syntax errors with literals like rb'' in
# Python 2. This should not be possible and in that case just return an
# empty string.
# Before Python 3.3 there was a more strict definition in which order
# you could define literals.
return ''
return literal_eval(value)
def get_signature(funcdef, width=72, call_string=None,
@@ -231,11 +216,14 @@ def is_scope(node):
def _get_parent_scope_cache(func):
cache = WeakKeyDictionary()
def wrapper(used_names, node, include_flows=False):
def wrapper(parso_cache_node, node, include_flows=False):
if parso_cache_node is None:
return func(node, include_flows)
try:
for_module = cache[used_names]
for_module = cache[parso_cache_node]
except KeyError:
for_module = cache[used_names] = {}
for_module = cache[parso_cache_node] = {}
try:
return for_module[node]
@@ -254,7 +242,7 @@ def get_parent_scope(node, include_flows=False):
return None # It's a module already.
while True:
if is_scope(scope) or include_flows and isinstance(scope, tree.Flow):
if is_scope(scope):
if scope.type in ('classdef', 'funcdef', 'lambdef'):
index = scope.children.index(':')
if scope.children[index].start_pos >= node.start_pos:
@@ -266,6 +254,14 @@ def get_parent_scope(node, include_flows=False):
scope = scope.parent
continue
return scope
elif include_flows and isinstance(scope, tree.Flow):
# The cursor might be on `if foo`, so the parent scope will not be
# the if, but the parent of the if.
if not (scope.type == 'if_stmt'
and any(n.start_pos <= node.start_pos < n.end_pos
for n in scope.get_test_nodes())):
return scope
scope = scope.parent
@@ -277,7 +273,18 @@ def get_cached_code_lines(grammar, path):
Basically access the cached code lines in parso. This is not the nicest way
to do this, but we avoid splitting all the lines again.
"""
return parser_cache[grammar._hashed][path].lines
return get_parso_cache_node(grammar, path).lines
def get_parso_cache_node(grammar, path):
"""
This is of course not public. But as long as I control parso, this
shouldn't be a problem. ~ Dave
The reason for this is mostly caching. This is obviously also a sign of a
broken caching architecture.
"""
return parser_cache[grammar._hashed][path]
def cut_value_at_position(leaf, position):
@@ -313,7 +320,7 @@ def expr_is_dotted(node):
return node.type == 'name'
def _function_is_x_method(method_name):
def _function_is_x_method(decorator_checker):
def wrapper(function_node):
"""
This is a heuristic. It will not hold ALL the times, but it will be
@@ -323,11 +330,16 @@ def _function_is_x_method(method_name):
"""
for decorator in function_node.get_decorators():
dotted_name = decorator.children[1]
if dotted_name.get_code() == method_name:
if decorator_checker(dotted_name.get_code()):
return True
return False
return wrapper
function_is_staticmethod = _function_is_x_method('staticmethod')
function_is_classmethod = _function_is_x_method('classmethod')
function_is_staticmethod = _function_is_x_method(lambda m: m == "staticmethod")
function_is_classmethod = _function_is_x_method(lambda m: m == "classmethod")
function_is_property = _function_is_x_method(
lambda m: m == "property"
or m == "cached_property"
or (m.endswith(".setter"))
)

View File

@@ -1,7 +1,7 @@
from functools import wraps
class _PluginManager(object):
class _PluginManager:
def __init__(self):
self._registered_plugins = []
self._cached_base_callbacks = {}

View File

@@ -1,7 +1,8 @@
"""
Module is used to infer Django model fields.
"""
from jedi._compatibility import Parameter
from inspect import Parameter
from jedi import debug
from jedi.inference.cache import inference_state_function_cache
from jedi.inference.base_value import ValueSet, iterator_to_value_set, ValueWrapper
@@ -113,7 +114,7 @@ def _infer_field(cls, field_name, is_instance):
class DjangoModelName(NameWrapper):
def __init__(self, cls, name, is_instance):
super(DjangoModelName, self).__init__(name)
super().__init__(name)
self._cls = cls
self._is_instance = is_instance
@@ -257,7 +258,7 @@ class GenericFieldWrapper(AttributeOverwrite, ClassMixin):
class DjangoModelSignature(AbstractSignature):
def __init__(self, value, field_names):
super(DjangoModelSignature, self).__init__(value)
super().__init__(value)
self._field_names = field_names
def get_param_names(self, resolve_stars=False):
@@ -266,7 +267,7 @@ class DjangoModelSignature(AbstractSignature):
class DjangoParamName(BaseTreeParamName):
def __init__(self, field_name):
super(DjangoParamName, self).__init__(field_name.parent_context, field_name.tree_name)
super().__init__(field_name.parent_context, field_name.tree_name)
self._field_name = field_name
def get_kind(self):
@@ -278,7 +279,7 @@ class DjangoParamName(BaseTreeParamName):
class QuerySetMethodWrapper(ValueWrapper):
def __init__(self, method, model_cls):
super(QuerySetMethodWrapper, self).__init__(method)
super().__init__(method)
self._model_cls = model_cls
def py__get__(self, instance, class_value):
@@ -288,7 +289,7 @@ class QuerySetMethodWrapper(ValueWrapper):
class QuerySetBoundMethodWrapper(ValueWrapper):
def __init__(self, method, model_cls):
super(QuerySetBoundMethodWrapper, self).__init__(method)
super().__init__(method)
self._model_cls = model_cls
def get_signatures(self):

View File

@@ -6,14 +6,14 @@ def import_module(callback):
def wrapper(inference_state, import_names, module_context, *args, **kwargs):
if len(import_names) == 3 and import_names[:2] == ('flask', 'ext'):
# New style.
ipath = (u'flask_' + import_names[2]),
ipath = ('flask_' + import_names[2]),
value_set = callback(inference_state, ipath, None, *args, **kwargs)
if value_set:
return value_set
value_set = callback(inference_state, (u'flaskext',), None, *args, **kwargs)
value_set = callback(inference_state, ('flaskext',), None, *args, **kwargs)
return callback(
inference_state,
(u'flaskext', import_names[2]),
('flaskext', import_names[2]),
next(iter(value_set)),
*args, **kwargs
)

View File

@@ -1,9 +1,13 @@
from parso.python.tree import search_ancestor
from jedi._compatibility import FileNotFoundError
import sys
from typing import List
from pathlib import Path
from parso.tree import search_ancestor
from jedi.inference.cache import inference_state_method_cache
from jedi.inference.imports import load_module_from_path
from jedi.inference.imports import goto_import, load_module_from_path
from jedi.inference.filters import ParserTreeFilter
from jedi.inference.base_value import NO_VALUES, ValueSet
from jedi.inference.helpers import infer_call_of_leaf
_PYTEST_FIXTURE_MODULES = [
('_pytest', 'monkeypatch'),
@@ -29,7 +33,15 @@ def execute(callback):
def infer_anonymous_param(func):
def get_returns(value):
if value.tree_node.annotation is not None:
return value.execute_with_values()
result = value.execute_with_values()
if any(v.name.get_qualified_names(include_module_names=True)
== ('typing', 'Generator')
for v in result):
return ValueSet.from_sets(
v.py__getattribute__('__next__').execute_annotation()
for v in result
)
return result
# In pytest we need to differentiate between generators and normal
# returns.
@@ -41,6 +53,9 @@ def infer_anonymous_param(func):
return function_context.get_return_values()
def wrapper(param_name):
# parameters with an annotation do not need special handling
if param_name.annotation_node:
return func(param_name)
is_pytest_param, param_name_is_function_name = \
_is_a_pytest_param_and_inherited(param_name)
if is_pytest_param:
@@ -118,6 +133,36 @@ def _is_pytest_func(func_name, decorator_nodes):
or any('fixture' in n.get_code() for n in decorator_nodes)
def _find_pytest_plugin_modules() -> List[List[str]]:
"""
Finds pytest plugin modules hooked by setuptools entry points
See https://docs.pytest.org/en/stable/how-to/writing_plugins.html#setuptools-entry-points
"""
if sys.version_info >= (3, 8):
from importlib.metadata import entry_points
if sys.version_info >= (3, 10):
pytest_entry_points = entry_points(group="pytest11")
else:
pytest_entry_points = entry_points().get("pytest11", ())
if sys.version_info >= (3, 9):
return [ep.module.split(".") for ep in pytest_entry_points]
else:
# Python 3.8 doesn't have `EntryPoint.module`. Implement equivalent
# to what Python 3.9 does (with additional None check to placate `mypy`)
matches = [
ep.pattern.match(ep.value)
for ep in pytest_entry_points
]
return [x.group('module').split(".") for x in matches if x]
else:
from pkg_resources import iter_entry_points
return [ep.module_name.split(".") for ep in iter_entry_points(group="pytest11")]
@inference_state_method_cache()
def _iter_pytest_modules(module_context, skip_own_module=False):
if not skip_own_module:
@@ -127,37 +172,97 @@ def _iter_pytest_modules(module_context, skip_own_module=False):
if file_io is not None:
folder = file_io.get_parent_folder()
sys_path = module_context.inference_state.get_sys_path()
# prevent an infinite loop when reaching the root of the current drive
last_folder = None
while any(folder.path.startswith(p) for p in sys_path):
file_io = folder.get_file_io('conftest.py')
if file_io.path != module_context.py__file__():
if Path(file_io.path) != module_context.py__file__():
try:
m = load_module_from_path(module_context.inference_state, file_io)
yield m.as_context()
conftest_module = m.as_context()
yield conftest_module
plugins_list = m.tree_node.get_used_names().get("pytest_plugins")
if plugins_list:
name = conftest_module.create_name(plugins_list[0])
yield from _load_pytest_plugins(module_context, name)
except FileNotFoundError:
pass
folder = folder.get_parent_folder()
for names in _PYTEST_FIXTURE_MODULES:
# prevent an infinite for loop if the same parent folder is return twice
if last_folder is not None and folder.path == last_folder.path:
break
last_folder = folder # keep track of the last found parent name
for names in _PYTEST_FIXTURE_MODULES + _find_pytest_plugin_modules():
for module_value in module_context.inference_state.import_module(names):
yield module_value.as_context()
def _load_pytest_plugins(module_context, name):
from jedi.inference.helpers import get_str_or_none
for inferred in name.infer():
for seq_value in inferred.py__iter__():
for value in seq_value.infer():
fq_name = get_str_or_none(value)
if fq_name:
names = fq_name.split(".")
for module_value in module_context.inference_state.import_module(names):
yield module_value.as_context()
class FixtureFilter(ParserTreeFilter):
def _filter(self, names):
for name in super(FixtureFilter, self)._filter(names):
funcdef = name.parent
if funcdef.type == 'funcdef':
# Class fixtures are not supported
decorated = funcdef.parent
if decorated.type == 'decorated' and self._is_fixture(decorated):
for name in super()._filter(names):
# look for fixture definitions of imported names
if name.parent.type == "import_from":
imported_names = goto_import(self.parent_context, name)
if any(
self._is_fixture(iname.parent_context, iname.tree_name)
for iname in imported_names
# discard imports of whole modules, that have no tree_name
if iname.tree_name
):
yield name
def _is_fixture(self, decorated):
for decorator in decorated.children:
elif self._is_fixture(self.parent_context, name):
yield name
def _is_fixture(self, context, name):
funcdef = name.parent
# Class fixtures are not supported
if funcdef.type != "funcdef":
return False
decorated = funcdef.parent
if decorated.type != "decorated":
return False
decorators = decorated.children[0]
if decorators.type == 'decorators':
decorators = decorators.children
else:
decorators = [decorators]
for decorator in decorators:
dotted_name = decorator.children[1]
# A heuristic, this makes it faster.
if 'fixture' in dotted_name.get_code():
for value in self.parent_context.infer_node(dotted_name):
if dotted_name.type == 'atom_expr':
# Since Python3.9 a decorator does not have dotted names
# anymore.
last_trailer = dotted_name.children[-1]
last_leaf = last_trailer.get_last_leaf()
if last_leaf == ')':
values = infer_call_of_leaf(
context, last_leaf, cut_own_trailer=True
)
else:
values = context.infer_node(dotted_name)
else:
values = context.infer_node(dotted_name)
for value in values:
if value.name.get_qualified_names(include_module_names=True) \
== ('_pytest', 'fixtures', 'fixture'):
return True

View File

@@ -11,8 +11,8 @@ compiled module that returns the types for C-builtins.
"""
import parso
import os
from inspect import Parameter
from jedi._compatibility import force_unicode, Parameter
from jedi import debug
from jedi.inference.utils import safe_property
from jedi.inference.helpers import get_str_or_none
@@ -119,7 +119,9 @@ def execute(callback):
else:
return call()
if value.is_bound_method():
if value.is_bound_method() or value.is_instance():
# value can be an instance for example if it is a partial
# object.
return call()
# for now we just support builtin functions.
@@ -180,14 +182,9 @@ def argument_clinic(clinic_string, want_value=False, want_context=False,
@argument_clinic('iterator[, default], /', want_inference_state=True)
def builtins_next(iterators, defaults, inference_state):
if inference_state.environment.version_info.major == 2:
name = 'next'
else:
name = '__next__'
# TODO theoretically we have to check here if something is an iterator.
# That is probably done by checking if it's not a class.
return defaults | iterators.py__getattribute__(name).execute_with_values()
return defaults | iterators.py__getattribute__('__next__').execute_with_values()
@argument_clinic('iterator[, default], /')
@@ -206,7 +203,7 @@ def builtins_getattr(objects, names, defaults=None):
debug.warning('getattr called without str')
continue
else:
return value.py__getattribute__(force_unicode(string))
return value.py__getattribute__(string)
return NO_VALUES
@@ -257,15 +254,14 @@ def builtins_super(types, objects, context):
class ReversedObject(AttributeOverwrite):
def __init__(self, reversed_obj, iter_list):
super(ReversedObject, self).__init__(reversed_obj)
super().__init__(reversed_obj)
self._iter_list = iter_list
def py__iter__(self, contextualized_node):
def py__iter__(self, contextualized_node=None):
return self._iter_list
@publish_method('next', python_version_match=2)
@publish_method('__next__', python_version_match=3)
def py__next__(self, arguments):
@publish_method('__next__')
def _next(self, arguments):
return ValueSet.from_sets(
lazy_value.infer() for lazy_value in self._iter_list
)
@@ -327,7 +323,7 @@ def builtins_isinstance(objects, types, arguments, inference_state):
analysis.add(lazy_value.context, 'type-error-isinstance', node, message)
return ValueSet(
compiled.builtin_from_name(inference_state, force_unicode(str(b)))
compiled.builtin_from_name(inference_state, str(b))
for b in bool_results
)
@@ -344,7 +340,7 @@ def builtins_staticmethod(functions):
class ClassMethodObject(ValueWrapper):
def __init__(self, class_method_obj, function):
super(ClassMethodObject, self).__init__(class_method_obj)
super().__init__(class_method_obj)
self._function = function
def py__get__(self, instance, class_value):
@@ -356,7 +352,7 @@ class ClassMethodObject(ValueWrapper):
class ClassMethodGet(ValueWrapper):
def __init__(self, get_method, klass, function):
super(ClassMethodGet, self).__init__(get_method)
super().__init__(get_method)
self._class = klass
self._function = function
@@ -369,7 +365,7 @@ class ClassMethodGet(ValueWrapper):
class ClassMethodArguments(TreeArgumentsWrapper):
def __init__(self, klass, arguments):
super(ClassMethodArguments, self).__init__(arguments)
super().__init__(arguments)
self._class = klass
def unpack(self, func=None):
@@ -388,8 +384,10 @@ def builtins_classmethod(functions, value, arguments):
class PropertyObject(AttributeOverwrite, ValueWrapper):
api_type = 'property'
def __init__(self, property_obj, function):
super(PropertyObject, self).__init__(property_obj)
super().__init__(property_obj)
self._function = function
def py__get__(self, instance, class_value):
@@ -424,11 +422,11 @@ def collections_namedtuple(value, arguments, callback):
inference_state = value.inference_state
# Process arguments
name = u'jedi_unknown_namedtuple'
name = 'jedi_unknown_namedtuple'
for c in _follow_param(inference_state, arguments, 0):
x = get_str_or_none(c)
if x is not None:
name = force_unicode(x)
name = x
break
# TODO here we only use one of the types, we should use all.
@@ -438,10 +436,10 @@ def collections_namedtuple(value, arguments, callback):
_fields = list(param_values)[0]
string = get_str_or_none(_fields)
if string is not None:
fields = force_unicode(string).replace(',', ' ').split()
fields = string.replace(',', ' ').split()
elif isinstance(_fields, iterable.Sequence):
fields = [
force_unicode(get_str_or_none(v))
get_str_or_none(v)
for lazy_value in _fields.py__iter__()
for v in lazy_value.infer()
]
@@ -454,7 +452,7 @@ def collections_namedtuple(value, arguments, callback):
typename=name,
field_names=tuple(fields),
num_fields=len(fields),
arg_list=repr(tuple(fields)).replace("u'", "").replace("'", "")[1:-1],
arg_list=repr(tuple(fields)).replace("'", "")[1:-1],
repr_fmt='',
field_defs='\n'.join(_NAMEDTUPLE_FIELD_TEMPLATE.format(index=index, name=name)
for index, name in enumerate(fields))
@@ -473,12 +471,11 @@ def collections_namedtuple(value, arguments, callback):
class PartialObject(ValueWrapper):
def __init__(self, actual_value, arguments, instance=None):
super(PartialObject, self).__init__(actual_value)
self._actual_value = actual_value
super().__init__(actual_value)
self._arguments = arguments
self._instance = instance
def _get_function(self, unpacked_arguments):
def _get_functions(self, unpacked_arguments):
key, lazy_value = next(unpacked_arguments, (None, None))
if key is not None or lazy_value is None:
debug.warning("Partial should have a proper function %s", self._arguments)
@@ -487,8 +484,8 @@ class PartialObject(ValueWrapper):
def get_signatures(self):
unpacked_arguments = self._arguments.unpack()
func = self._get_function(unpacked_arguments)
if func is None:
funcs = self._get_functions(unpacked_arguments)
if funcs is None:
return []
arg_count = 0
@@ -500,17 +497,30 @@ class PartialObject(ValueWrapper):
arg_count += 1
else:
keys.add(key)
return [PartialSignature(s, arg_count, keys) for s in func.get_signatures()]
return [PartialSignature(s, arg_count, keys) for s in funcs.get_signatures()]
def py__call__(self, arguments):
func = self._get_function(self._arguments.unpack())
if func is None:
funcs = self._get_functions(self._arguments.unpack())
if funcs is None:
return NO_VALUES
return func.execute(
return funcs.execute(
MergedPartialArguments(self._arguments, arguments, self._instance)
)
def py__doc__(self):
"""
In CPython partial does not replace the docstring. However we are still
imitating it here, because we want this docstring to be worth something
for the user.
"""
callables = self._get_functions(self._arguments.unpack())
if callables is None:
return ''
for callable_ in callables:
return callable_.py__doc__()
return ''
def py__get__(self, instance, class_value):
return ValueSet([self])
@@ -519,12 +529,12 @@ class PartialMethodObject(PartialObject):
def py__get__(self, instance, class_value):
if instance is None:
return ValueSet([self])
return ValueSet([PartialObject(self._actual_value, self._arguments, instance)])
return ValueSet([PartialObject(self._wrapped_value, self._arguments, instance)])
class PartialSignature(SignatureWrapper):
def __init__(self, wrapped_signature, skipped_arg_count, skipped_arg_set):
super(PartialSignature, self).__init__(wrapped_signature)
super().__init__(wrapped_signature)
self._skipped_arg_count = skipped_arg_count
self._skipped_arg_set = skipped_arg_set
@@ -617,7 +627,7 @@ class DataclassWrapper(ValueWrapper, ClassMixin):
class DataclassSignature(AbstractSignature):
def __init__(self, value, param_names):
super(DataclassSignature, self).__init__(value)
super().__init__(value)
self._param_names = param_names
def get_param_names(self, resolve_stars=False):
@@ -626,7 +636,7 @@ class DataclassSignature(AbstractSignature):
class DataclassParamName(BaseTreeParamName):
def __init__(self, parent_context, tree_name, annotation_node, default_node):
super(DataclassParamName, self).__init__(parent_context, tree_name)
super().__init__(parent_context, tree_name)
self.annotation_node = annotation_node
self.default_node = default_node
@@ -642,7 +652,7 @@ class DataclassParamName(BaseTreeParamName):
class ItemGetterCallable(ValueWrapper):
def __init__(self, instance, args_value_set):
super(ItemGetterCallable, self).__init__(instance)
super().__init__(instance)
self._args_value_set = args_value_set
@repack_with_argument_clinic('item, /')
@@ -680,7 +690,7 @@ class WrapsCallable(ValueWrapper):
class Wrapped(ValueWrapper, FunctionMixin):
def __init__(self, func, original_function):
super(Wrapped, self).__init__(func)
super().__init__(func)
self._original_function = original_function
@property
@@ -718,7 +728,7 @@ def _create_string_input_function(func):
@argument_clinic('*args, /', want_callback=True)
def _os_path_join(args_set, callback):
if len(args_set) == 1:
string = u''
string = ''
sequence, = args_set
is_first = True
for lazy_value in sequence.py__iter__():
@@ -730,7 +740,7 @@ def _os_path_join(args_set, callback):
break
if not is_first:
string += os.path.sep
string += force_unicode(s)
string += s
is_first = False
else:
return ValueSet([compiled.create_simple_object(sequence.inference_state, string)])
@@ -793,6 +803,17 @@ _implemented = {
# For now this works at least better than Jedi trying to understand it.
'dataclass': _dataclass
},
# attrs exposes declaration interface roughly compatible with dataclasses
# via attrs.define, attrs.frozen and attrs.mutable
# https://www.attrs.org/en/stable/names.html
'attr': {
'define': _dataclass,
'frozen': _dataclass,
},
'attrs': {
'define': _dataclass,
'frozen': _dataclass,
},
'os.path': {
'dirname': _create_string_input_function(os.path.dirname),
'abspath': _create_string_input_function(os.path.abspath),
@@ -809,7 +830,8 @@ def get_metaclass_filters(func):
and metaclass.get_root_context().py__name__() == 'enum':
filter_ = ParserTreeFilter(parent_context=cls.as_context())
return [DictFilter({
name.string_name: EnumInstance(cls, name).name for name in filter_.values()
name.string_name: EnumInstance(cls, name).name
for name in filter_.values()
})]
return func(cls, metaclasses, is_instance)
return wrapper
@@ -827,6 +849,14 @@ class EnumInstance(LazyValueWrapper):
return ValueName(self, self._name.tree_name)
def _get_wrapped_value(self):
n = self._name.string_name
if n.startswith('__') and n.endswith('__') or self._name.api_type == 'function':
inferred = self._name.infer()
if inferred:
return next(iter(inferred))
o, = self.inference_state.builtins_module.py__getattribute__('object')
return o
value, = self._cls.execute_with_values()
return value

View File

@@ -69,8 +69,11 @@ Adds an opening bracket after a function for completions.
# ----------------
if platform.system().lower() == 'windows':
_cache_directory = os.path.join(os.getenv('LOCALAPPDATA') or
os.path.expanduser('~'), 'Jedi', 'Jedi')
_cache_directory = os.path.join(
os.getenv('LOCALAPPDATA') or os.path.expanduser('~'),
'Jedi',
'Jedi',
)
elif platform.system().lower() == 'darwin':
_cache_directory = os.path.join('~', 'Library', 'Caches', 'Jedi')
else:
@@ -98,7 +101,7 @@ parse the parts again that have changed, while reusing the rest of the syntax
tree.
"""
_cropped_file_size = 10e6 # 1 Megabyte
_cropped_file_size = int(10e6) # 1 Megabyte
"""
Jedi gets extremely slow if the file size exceed a few thousand lines.
To avoid getting stuck completely Jedi crops the file at some point.
@@ -140,6 +143,15 @@ This improves autocompletion for libraries that use ``setattr`` or
``globals()`` modifications a lot.
"""
allow_unsafe_interpreter_executions = True
"""
Controls whether descriptors are evaluated when using an Interpreter. This is
something you might want to control when using Jedi from a Repl (e.g. IPython)
Generally this setting allows Jedi to execute __getitem__ and descriptors like
`property`.
"""
# ----------------
# Caching Validity
# ----------------

Some files were not shown because too many files have changed in this diff Show More