mirror of
https://github.com/davidhalter/typeshed.git
synced 2025-12-09 05:24:52 +08:00
2
.flake8
2
.flake8
@@ -14,8 +14,6 @@ per-file-ignores =
|
||||
# Y026: Have implicit type aliases
|
||||
# Y053: have literals >50 characters long
|
||||
stubs/*_pb2.pyi: Y021, Y023, Y026, Y053
|
||||
# TODO: Remove bare "Incomplete"s from stubs (Y065)
|
||||
stubs/*.pyi: Y065
|
||||
|
||||
exclude = .venv*,.git
|
||||
noqa_require_code = true
|
||||
|
||||
@@ -75,9 +75,11 @@
|
||||
"stubs/psutil",
|
||||
"stubs/psycopg2",
|
||||
"stubs/pyasn1",
|
||||
"stubs/pycurl",
|
||||
"stubs/pyflakes",
|
||||
"stubs/Pygments",
|
||||
"stubs/PyMySQL",
|
||||
"stubs/python-crontab",
|
||||
"stubs/python-dateutil",
|
||||
"stubs/python-jose",
|
||||
"stubs/pywin32",
|
||||
@@ -87,8 +89,10 @@
|
||||
"stubs/redis",
|
||||
"stubs/requests",
|
||||
"stubs/requests-oauthlib",
|
||||
"stubs/seaborn",
|
||||
"stubs/setuptools/setuptools",
|
||||
"stubs/stripe",
|
||||
"stubs/tensorflow",
|
||||
"stubs/tqdm",
|
||||
"stubs/ttkthemes",
|
||||
"stubs/vobject",
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from _typeshed import Incomplete
|
||||
from typing import Any
|
||||
|
||||
def BuildMessageAndEnumDescriptors(file_des: Incomplete, module: dict[str, Any]) -> None: ...
|
||||
def BuildTopDescriptorsAndMessages(file_des: Incomplete, module_name: str, module: dict[str, Any]) -> None: ...
|
||||
def BuildMessageAndEnumDescriptors(file_des, module: dict[str, Any]) -> None: ...
|
||||
def BuildTopDescriptorsAndMessages(file_des, module_name: str, module: dict[str, Any]) -> None: ...
|
||||
|
||||
@@ -9,12 +9,10 @@ from google.protobuf import struct_pb2
|
||||
class Any:
|
||||
type_url: str
|
||||
value: Incomplete
|
||||
def Pack(
|
||||
self, msg: Incomplete, type_url_prefix: str = "type.googleapis.com/", deterministic: Incomplete | None = None
|
||||
) -> None: ...
|
||||
def Unpack(self, msg: Incomplete) -> bool: ...
|
||||
def Pack(self, msg, type_url_prefix: str = "type.googleapis.com/", deterministic: Incomplete | None = None) -> None: ...
|
||||
def Unpack(self, msg) -> bool: ...
|
||||
def TypeName(self) -> str: ...
|
||||
def Is(self, descriptor: Incomplete) -> bool: ...
|
||||
def Is(self, descriptor) -> bool: ...
|
||||
|
||||
class Timestamp:
|
||||
def ToJsonString(self) -> str: ...
|
||||
|
||||
@@ -282,8 +282,8 @@ class Warning(Exception): ...
|
||||
class ISQLQuote:
|
||||
_wrapped: Any
|
||||
def __init__(self, wrapped: object, /, **kwargs) -> None: ...
|
||||
def getbinary(self) -> Incomplete: ...
|
||||
def getbuffer(self) -> Incomplete: ...
|
||||
def getbinary(self): ...
|
||||
def getbuffer(self): ...
|
||||
def getquoted(self) -> bytes: ...
|
||||
|
||||
class Decimal:
|
||||
@@ -582,7 +582,7 @@ def Timestamp(
|
||||
def TimestampFromPy(datetime: dt.datetime, /) -> _datetime: ...
|
||||
def TimestampFromTicks(ticks: float, /) -> _datetime: ...
|
||||
def _connect(*args, **kwargs): ...
|
||||
def adapt(obj: object, protocol: Incomplete = ..., alternate: Incomplete = ..., /) -> Any: ...
|
||||
def adapt(obj: object, protocol=..., alternate=..., /) -> Any: ...
|
||||
def encrypt_password(
|
||||
password: str | bytes, user: str | bytes, scope: connection | cursor | None = None, algorithm: str | None = None
|
||||
) -> str: ...
|
||||
|
||||
@@ -12,8 +12,8 @@ class AbstractConnectionPool:
|
||||
def __init__(self, minconn: ConvertibleToInt, maxconn: ConvertibleToInt, *args, **kwargs) -> None: ...
|
||||
# getconn, putconn and closeall are officially documented as methods of the
|
||||
# abstract base class, but in reality, they only exist on the children classes
|
||||
def getconn(self, key: Hashable | None = None) -> Incomplete: ...
|
||||
def putconn(self, conn: Incomplete, key: Hashable | None = None, close: bool = False) -> None: ...
|
||||
def getconn(self, key: Hashable | None = None): ...
|
||||
def putconn(self, conn, key: Hashable | None = None, close: bool = False) -> None: ...
|
||||
def closeall(self) -> None: ...
|
||||
|
||||
class SimpleConnectionPool(AbstractConnectionPool): ...
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
version = "24.0.*"
|
||||
upstream_repository = "https://github.com/pyca/pyopenssl"
|
||||
# Requires a version of cryptography with a `py.typed` file
|
||||
requires = ["cryptography>=35.0.0"]
|
||||
requires = ["cryptography>=35.0.0", "types-cffi"]
|
||||
partial_stub = true
|
||||
|
||||
[tool.stubtest]
|
||||
|
||||
@@ -4,6 +4,7 @@ from _typeshed import Incomplete, ReadableBuffer
|
||||
from collections.abc import Callable, MutableSequence, Sequence
|
||||
from typing import Any, TypeVar
|
||||
|
||||
import _cffi_backend as cffi
|
||||
from OpenSSL.crypto import X509, PKey, X509Name
|
||||
|
||||
OPENSSL_VERSION_NUMBER: int
|
||||
@@ -164,9 +165,7 @@ class Connection:
|
||||
def server_random(self) -> bytes | None: ...
|
||||
def client_random(self) -> bytes | None: ...
|
||||
def master_key(self) -> bytes | None: ...
|
||||
def export_keying_material(
|
||||
self, label: Incomplete, olen: Incomplete, context: Incomplete = None
|
||||
) -> Incomplete: ... # TODO: type, see RFC-5705
|
||||
def export_keying_material(self, label: bytes, olen: int, context: bytes | None = None) -> cffi.buffer: ...
|
||||
def get_app_data(self) -> Any: ...
|
||||
def set_app_data(self, data: Any) -> None: ...
|
||||
def sock_shutdown(self, how: int, /) -> None: ... # alias to `_socket.socket.shutdown`
|
||||
|
||||
@@ -17,16 +17,16 @@ class error(Exception): ...
|
||||
class Curl:
|
||||
USERPWD: int
|
||||
def close(self) -> None: ...
|
||||
def setopt(self, option: int, value: Incomplete) -> None: ...
|
||||
def setopt(self, option: int, value) -> None: ...
|
||||
def setopt_string(self, option: int, value: str) -> None: ...
|
||||
def perform(self) -> None: ...
|
||||
def perform_rb(self) -> bytes: ...
|
||||
def perform_rs(self) -> str: ...
|
||||
def getinfo(self, info: Incomplete) -> Incomplete: ...
|
||||
def getinfo_raw(self, info: Incomplete) -> Incomplete: ...
|
||||
def getinfo(self, info): ...
|
||||
def getinfo_raw(self, info): ...
|
||||
def reset(self) -> None: ...
|
||||
def unsetopt(self, option: int) -> Incomplete: ...
|
||||
def pause(self, bitmask: Incomplete) -> Incomplete: ...
|
||||
def unsetopt(self, option: int): ...
|
||||
def pause(self, bitmask): ...
|
||||
def errstr(self) -> str: ...
|
||||
def duphandle(self) -> Self: ...
|
||||
def errstr_raw(self) -> bytes: ...
|
||||
@@ -37,20 +37,20 @@ class CurlMulti:
|
||||
def close(self) -> None: ...
|
||||
def add_handle(self, obj: Curl) -> None: ...
|
||||
def remove_handle(self, obj: Curl) -> None: ...
|
||||
def setopt(self, option: int, value: Incomplete) -> None: ...
|
||||
def setopt(self, option: int, value) -> None: ...
|
||||
def perform(self) -> tuple[Incomplete, int]: ...
|
||||
def fdset(self) -> tuple[list[Incomplete], list[Incomplete], list[Incomplete]]: ...
|
||||
def select(self, timeout: float) -> int: ...
|
||||
def info_read(self, max_objects: int = ...) -> tuple[int, list[Incomplete], list[Incomplete]]: ...
|
||||
def socket_action(self, sockfd: int, ev_bitmask: int) -> tuple[int, int]: ...
|
||||
def assign(self, sockfd: int, socket: Incomplete, /) -> Incomplete: ...
|
||||
def assign(self, sockfd: int, socket, /): ...
|
||||
def socket_all(self) -> tuple[int, int]: ...
|
||||
def timeout(self) -> int: ...
|
||||
|
||||
@final
|
||||
class CurlShare:
|
||||
def close(self) -> None: ...
|
||||
def setopt(self, option: int, value: Incomplete) -> Incomplete: ...
|
||||
def setopt(self, option: int, value): ...
|
||||
|
||||
if sys.platform != "darwin":
|
||||
CURL_VERSION_HTTP3: int
|
||||
|
||||
@@ -24,7 +24,25 @@ class Splash(Target):
|
||||
script: Incomplete
|
||||
splash_requirements: Incomplete
|
||||
binaries: list[_TOCTuple]
|
||||
def __init__(self, image_file: StrPath, binaries: list[_TOCTuple], datas: list[_TOCTuple], **kwargs: Incomplete) -> None: ...
|
||||
def __init__(
|
||||
self,
|
||||
image_file: StrPath,
|
||||
binaries: list[_TOCTuple],
|
||||
datas: list[_TOCTuple],
|
||||
*,
|
||||
text_pos: tuple[int, int] = ...,
|
||||
text_size: int = 12,
|
||||
text_font: str = ...,
|
||||
text_color: str = "black",
|
||||
text_default: str = "Initializing",
|
||||
full_tk: bool = False,
|
||||
minify_script: bool = True,
|
||||
rundir: str = "__splash",
|
||||
name: str = ...,
|
||||
script_name: str = ...,
|
||||
max_img_size: tuple[int, int] | None = (760, 480),
|
||||
always_on_top: bool = True,
|
||||
) -> None: ...
|
||||
def assemble(self) -> None: ...
|
||||
def test_tk_version(self) -> None: ...
|
||||
def generate_script(self) -> str: ...
|
||||
|
||||
@@ -158,9 +158,9 @@ class CronItem:
|
||||
def run_pending(self, now: datetime | None = ...) -> int | str: ...
|
||||
def run(self) -> str: ...
|
||||
# TODO: use types from `croniter` module here:
|
||||
def schedule(self, date_from: datetime | None = ...) -> Incomplete: ...
|
||||
def schedule(self, date_from: datetime | None = ...): ...
|
||||
# TODO: use types from `cron_descriptor` here:
|
||||
def description(self, **kw: Incomplete) -> Incomplete: ...
|
||||
def description(self, **kw): ...
|
||||
@property
|
||||
def log(self) -> CronLog: ...
|
||||
@property
|
||||
@@ -292,6 +292,6 @@ class OrderedVariableList(OrderedDict[Incomplete, Incomplete]):
|
||||
job: Incomplete
|
||||
def __init__(self, *args: Any, **kw: Any) -> None: ...
|
||||
@property
|
||||
def previous(self) -> Incomplete: ...
|
||||
def previous(self): ...
|
||||
def all(self) -> Self: ...
|
||||
def __getitem__(self, key: Incomplete) -> Incomplete: ...
|
||||
def __getitem__(self, key): ...
|
||||
|
||||
@@ -1,17 +1,21 @@
|
||||
from _typeshed import Incomplete
|
||||
from collections.abc import Iterator, Mapping as DictMixin
|
||||
from typing import TypeVar
|
||||
|
||||
class LazyDict(DictMixin[str, Incomplete]):
|
||||
data: dict[str, Incomplete] | None
|
||||
def __getitem__(self, key: str) -> Incomplete: ...
|
||||
_T = TypeVar("_T")
|
||||
_VT = TypeVar("_VT")
|
||||
|
||||
class LazyDict(DictMixin[str, _VT]):
|
||||
data: dict[str, _VT] | None
|
||||
def __getitem__(self, key: str) -> _VT: ...
|
||||
def __contains__(self, key: object) -> bool: ...
|
||||
def __iter__(self) -> Iterator[str]: ...
|
||||
def __len__(self) -> int: ...
|
||||
|
||||
class LazyList(list[Incomplete]):
|
||||
class LazyList(list[_T]):
|
||||
# does not return `Self` type:
|
||||
def __new__(cls, fill_iter: Incomplete | None = None) -> LazyList: ... # noqa: Y034
|
||||
def __new__(cls, fill_iter: Incomplete | None = None) -> LazyList[_T]: ...
|
||||
|
||||
class LazySet(set[Incomplete]):
|
||||
class LazySet(set[_T]):
|
||||
# does not return `Self` type:
|
||||
def __new__(cls, fill_iter: Incomplete | None = None) -> LazySet: ... # noqa: Y034
|
||||
def __new__(cls, fill_iter: Incomplete | None = None) -> LazySet[_T]: ...
|
||||
|
||||
@@ -250,7 +250,7 @@ class PipelineCommand:
|
||||
) -> None: ...
|
||||
|
||||
class _ParseResponseCallback(Protocol):
|
||||
def __call__(self, connection: Connection, command: EncodableT, /, **kwargs: Incomplete) -> Any: ...
|
||||
def __call__(self, connection: Connection, command: EncodableT, /, **kwargs) -> Any: ...
|
||||
|
||||
class NodeCommands:
|
||||
parse_response: _ParseResponseCallback
|
||||
|
||||
@@ -52,7 +52,7 @@ class RedisClusterCommands(
|
||||
def cluster_replicas(self, node_id, target_nodes: Incomplete | None = None): ...
|
||||
def cluster_slots(self, target_nodes: Incomplete | None = None): ...
|
||||
def cluster_myshardid(self, target_nodes: Incomplete | None = None): ...
|
||||
def cluster_links(self, target_node: Incomplete): ...
|
||||
def cluster_links(self, target_node): ...
|
||||
def cluster_flushslots(self, target_nodes: Incomplete | None = None) -> NoReturn: ...
|
||||
def cluster_bumpepoch(self, target_nodes: Incomplete | None = None) -> NoReturn: ...
|
||||
read_from_replicas: bool
|
||||
|
||||
@@ -16,9 +16,7 @@ class _RefreshTokenResponseHook(Protocol):
|
||||
def __call__(self, response: requests.Response, /) -> requests.Response: ...
|
||||
|
||||
class _ProtectedRequestHook(Protocol):
|
||||
def __call__(
|
||||
self, url: Incomplete, headers: Incomplete, data: Incomplete, /
|
||||
) -> tuple[Incomplete, Incomplete, Incomplete]: ...
|
||||
def __call__(self, url, headers, data, /) -> tuple[Incomplete, Incomplete, Incomplete]: ...
|
||||
|
||||
class _ComplianceHooks(TypedDict):
|
||||
access_token_response: set[_AccessTokenResponseHook]
|
||||
|
||||
@@ -23,10 +23,10 @@ class GroupBy:
|
||||
self,
|
||||
data: DataFrame,
|
||||
func: _AggFuncTypeFrame = ...,
|
||||
*args: Incomplete,
|
||||
*args,
|
||||
engine: str | None = None,
|
||||
engine_kwargs: dict[str, bool] | None = None,
|
||||
**kwargs: Incomplete,
|
||||
**kwargs,
|
||||
) -> DataFrame: ...
|
||||
def apply(
|
||||
self, data: DataFrame, func: Callable[Concatenate[DataFrame, _P], DataFrame], *args: _P.args, **kwargs: _P.kwargs
|
||||
|
||||
@@ -53,7 +53,7 @@ class ThemeConfig(mpl.RcParams):
|
||||
THEME_GROUPS: list[str]
|
||||
def __init__(self) -> None: ...
|
||||
def reset(self) -> None: ...
|
||||
def update(self, other: SupportsKeysAndGetItem[Incomplete, Incomplete] | None = None, /, **kwds: Incomplete) -> None: ... # type: ignore[override]
|
||||
def update(self, other: SupportsKeysAndGetItem[Incomplete, Incomplete] | None = None, /, **kwds) -> None: ... # type: ignore[override]
|
||||
|
||||
class DisplayConfig(TypedDict):
|
||||
format: Literal["png", "svg"]
|
||||
|
||||
@@ -93,8 +93,8 @@ class PseudoAxis:
|
||||
def set_major_formatter(self, formatter: Formatter) -> None: ...
|
||||
def set_minor_locator(self, locator: Locator) -> None: ...
|
||||
def set_minor_formatter(self, formatter: Formatter) -> None: ...
|
||||
def set_units(self, units: Incomplete) -> None: ...
|
||||
def update_units(self, x: Incomplete) -> None: ...
|
||||
def convert_units(self, x: Incomplete) -> Incomplete: ...
|
||||
def set_units(self, units) -> None: ...
|
||||
def update_units(self, x) -> None: ...
|
||||
def convert_units(self, x): ...
|
||||
def get_scale(self) -> ScaleBase: ...
|
||||
def get_majorticklocs(self) -> NDArray[Incomplete]: ...
|
||||
|
||||
@@ -12,7 +12,7 @@ from pandas import DataFrame, Index, Series, Timedelta, Timestamp
|
||||
class SupportsDataFrame(Protocol):
|
||||
# `__dataframe__` should return pandas.core.interchange.dataframe_protocol.DataFrame
|
||||
# but this class needs to be defined as a Protocol, not as an ABC.
|
||||
def __dataframe__(self, nan_as_null: bool = ..., allow_copy: bool = ...) -> Incomplete: ...
|
||||
def __dataframe__(self, nan_as_null: bool = ..., allow_copy: bool = ...): ...
|
||||
|
||||
ColumnName: TypeAlias = str | bytes | date | datetime | timedelta | bool | complex | Timestamp | Timedelta
|
||||
Vector: TypeAlias = Series[Any] | Index[Any] | ndarray[Any, Any]
|
||||
|
||||
@@ -64,7 +64,7 @@ class _BaseGrid:
|
||||
path_effects: list[AbstractPathEffect] = ...,
|
||||
picker: bool | float | Callable[[Artist, MouseEvent], tuple[bool, dict[Any, Any]]] | None = ...,
|
||||
position: Bbox | tuple[float, float, float, float] = ...,
|
||||
prop_cycle: Incomplete = ..., # TODO: use cycler.Cycler when cycler gets typed
|
||||
prop_cycle=..., # TODO: use cycler.Cycler when cycler gets typed
|
||||
rasterization_zorder: float | None = ...,
|
||||
rasterized: bool = ...,
|
||||
sketch_params: float | None = ...,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from _typeshed import Incomplete
|
||||
from collections.abc import Callable, Iterable
|
||||
from typing import Any, Literal
|
||||
|
||||
@@ -71,9 +70,9 @@ def violinplot(
|
||||
log_scale: _LogScale | None = None,
|
||||
native_scale: bool = False,
|
||||
legend: _Legend = "auto",
|
||||
scale: Incomplete = ..., # deprecated
|
||||
scale_hue: Incomplete = ..., # deprecated
|
||||
bw: Incomplete = ..., # deprecated
|
||||
scale=..., # deprecated
|
||||
scale_hue=..., # deprecated
|
||||
bw=..., # deprecated
|
||||
inner_kws: dict[str, Any] | None = None,
|
||||
ax: Axes | None = None,
|
||||
**kwargs: Any,
|
||||
@@ -106,7 +105,7 @@ def boxenplot(
|
||||
native_scale: bool = False,
|
||||
formatter: Callable[[Any], str] | None = None,
|
||||
legend: _Legend = "auto",
|
||||
scale: Incomplete = ..., # deprecated
|
||||
scale=..., # deprecated
|
||||
box_kws: dict[str, Any] | None = None,
|
||||
flier_kws: dict[str, Any] | None = None,
|
||||
line_kws: dict[str, Any] | None = None,
|
||||
@@ -190,9 +189,9 @@ def barplot(
|
||||
legend: _Legend = "auto",
|
||||
capsize: float = 0,
|
||||
err_kws: dict[str, Any] | None = None,
|
||||
ci: Incomplete = ..., # deprecated
|
||||
errcolor: Incomplete = ..., # deprecated
|
||||
errwidth: Incomplete = ..., # deprecated
|
||||
ci=..., # deprecated
|
||||
errcolor=..., # deprecated
|
||||
errwidth=..., # deprecated
|
||||
ax: Axes | None = None,
|
||||
**kwargs: Any,
|
||||
) -> Axes: ...
|
||||
@@ -223,10 +222,10 @@ def pointplot(
|
||||
formatter: Callable[[Any], str] | None = None,
|
||||
legend: _Legend = "auto",
|
||||
err_kws: dict[str, Any] | None = None,
|
||||
ci: Incomplete = ..., # deprecated
|
||||
errwidth: Incomplete = ..., # deprecated
|
||||
join: Incomplete = ..., # deprecated
|
||||
scale: Incomplete = ..., # deprecated
|
||||
ci=..., # deprecated
|
||||
errwidth=..., # deprecated
|
||||
join=..., # deprecated
|
||||
scale=..., # deprecated
|
||||
ax: Axes | None = None,
|
||||
**kwargs: Any,
|
||||
) -> Axes: ...
|
||||
@@ -290,6 +289,6 @@ def catplot(
|
||||
sharey: bool = True,
|
||||
margin_titles: bool = False,
|
||||
facet_kws: dict[str, Any] | None = None,
|
||||
ci: Incomplete = ..., # deprecated
|
||||
ci=..., # deprecated
|
||||
**kwargs: Any,
|
||||
) -> FacetGrid: ...
|
||||
|
||||
@@ -40,7 +40,7 @@ def heatmap(
|
||||
yticklabels: Literal["auto"] | bool | int | Sequence[str] = "auto",
|
||||
mask: NDArray[np.bool_] | DataFrame | None = None,
|
||||
ax: Axes | None = None,
|
||||
**kwargs: Incomplete,
|
||||
**kwargs,
|
||||
) -> Axes: ...
|
||||
|
||||
class _DendrogramPlotter:
|
||||
@@ -150,10 +150,8 @@ class ClusterGrid(Grid):
|
||||
col_linkage: NDArray[Incomplete] | None,
|
||||
tree_kws: dict[str, Incomplete] | None,
|
||||
) -> None: ...
|
||||
def plot_colors(self, xind: _ArrayLikeInt_co, yind: _ArrayLikeInt_co, **kws: Incomplete) -> None: ...
|
||||
def plot_matrix(
|
||||
self, colorbar_kws: dict[str, Incomplete], xind: _ArrayLikeInt_co, yind: _ArrayLikeInt_co, **kws: Incomplete
|
||||
) -> None: ...
|
||||
def plot_colors(self, xind: _ArrayLikeInt_co, yind: _ArrayLikeInt_co, **kws) -> None: ...
|
||||
def plot_matrix(self, colorbar_kws: dict[str, Incomplete], xind: _ArrayLikeInt_co, yind: _ArrayLikeInt_co, **kws) -> None: ...
|
||||
def plot(
|
||||
self,
|
||||
metric: str,
|
||||
@@ -164,7 +162,7 @@ class ClusterGrid(Grid):
|
||||
row_linkage: NDArray[Incomplete] | None,
|
||||
col_linkage: NDArray[Incomplete] | None,
|
||||
tree_kws: dict[str, Incomplete] | None,
|
||||
**kws: Incomplete,
|
||||
**kws,
|
||||
) -> Self: ...
|
||||
|
||||
def clustermap(
|
||||
@@ -194,5 +192,5 @@ def clustermap(
|
||||
colors_ratio: float | tuple[float, float] = 0.03,
|
||||
cbar_pos: tuple[float, float, float, float] | None = (0.02, 0.8, 0.05, 0.18),
|
||||
tree_kws: dict[str, Incomplete] | None = None,
|
||||
**kwargs: Incomplete,
|
||||
**kwargs,
|
||||
) -> ClusterGrid: ...
|
||||
|
||||
@@ -102,9 +102,7 @@ def get_data_home(data_home: str | None = None) -> str: ...
|
||||
def load_dataset(name: str, cache: bool = True, data_home: str | None = None, **kws: Any) -> DataFrame: ...
|
||||
def axis_ticklabels_overlap(labels: Iterable[Text]) -> bool: ...
|
||||
def axes_ticklabels_overlap(ax: Axes) -> tuple[bool, bool]: ...
|
||||
def locator_to_legend_entries(
|
||||
locator: Locator, limits: Iterable[float], dtype: Incomplete
|
||||
) -> tuple[list[Incomplete], list[str]]: ...
|
||||
def locator_to_legend_entries(locator: Locator, limits: Iterable[float], dtype) -> tuple[list[Incomplete], list[str]]: ...
|
||||
@overload
|
||||
def relative_luminance(color: ColorType) -> float: ... # type: ignore[overload-overlap]
|
||||
@overload
|
||||
|
||||
@@ -182,7 +182,7 @@ class RaggedTensor(metaclass=ABCMeta):
|
||||
class Operation:
|
||||
def __init__(
|
||||
self,
|
||||
node_def: Incomplete,
|
||||
node_def,
|
||||
g: Graph,
|
||||
# isinstance is used so can not be Sequence/Iterable.
|
||||
inputs: list[Tensor] | None = None,
|
||||
@@ -190,7 +190,7 @@ class Operation:
|
||||
control_inputs: Iterable[Tensor | Operation] | None = None,
|
||||
input_types: Iterable[DType] | None = None,
|
||||
original_op: Operation | None = None,
|
||||
op_def: Incomplete = None,
|
||||
op_def: Incomplete | None = None,
|
||||
) -> None: ...
|
||||
@property
|
||||
def inputs(self) -> list[Tensor]: ...
|
||||
@@ -301,7 +301,7 @@ class TypeSpec(ABC, Generic[_SpecProto]):
|
||||
def experimental_type_proto(cls) -> type[_SpecProto]: ...
|
||||
def is_compatible_with(self, spec_or_value: Self | TensorCompatible | SparseTensor | RaggedTensor) -> _bool: ...
|
||||
# Incomplete as tf.types is not yet covered.
|
||||
def is_subtype_of(self, other: Incomplete) -> _bool: ...
|
||||
def is_subtype_of(self, other) -> _bool: ...
|
||||
def most_specific_common_supertype(self, others: Sequence[Incomplete]) -> Self | None: ...
|
||||
def most_specific_compatible_type(self, other: Self) -> Self: ...
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from _typeshed import Incomplete
|
||||
from collections.abc import Callable, Iterable, Mapping, Sequence
|
||||
from typing import Any, Literal
|
||||
from typing_extensions import TypeAlias
|
||||
@@ -148,7 +147,7 @@ class ReduceLROnPlateau(Callback):
|
||||
min_delta: float = 1e-4,
|
||||
cooldown: int = 0,
|
||||
min_lr: float = 0,
|
||||
**kwargs: Incomplete,
|
||||
**kwargs,
|
||||
) -> None: ...
|
||||
def in_cooldown(self) -> bool: ...
|
||||
|
||||
|
||||
@@ -4,6 +4,6 @@ import tensorflow as tf
|
||||
from tensorflow.keras.layers.experimental.preprocessing import PreprocessingLayer
|
||||
|
||||
class _IndexLookup(PreprocessingLayer):
|
||||
def compute_output_signature(self, input_spec: Incomplete) -> tf.TensorSpec: ...
|
||||
def compute_output_signature(self, input_spec) -> tf.TensorSpec: ...
|
||||
def get_vocabulary(self, include_special_tokens: bool = True) -> list[Incomplete]: ...
|
||||
def vocabulary_size(self) -> int: ...
|
||||
|
||||
@@ -26,8 +26,8 @@ class Model(Layer[_InputT, _OutputT], tf.Module):
|
||||
def __new__(cls, *args: Any, **kwargs: Any) -> Model[_InputT, _OutputT]: ...
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
|
||||
def __setattr__(self, name: str, value: Any) -> None: ...
|
||||
def __reduce__(self) -> Incomplete: ...
|
||||
def __deepcopy__(self, memo: Incomplete) -> Incomplete: ...
|
||||
def __reduce__(self): ...
|
||||
def __deepcopy__(self, memo): ...
|
||||
def build(self, input_shape: ShapeLike) -> None: ...
|
||||
def __call__(self, inputs: _InputT, *, training: bool = False, mask: TensorCompatible | None = None) -> _OutputT: ...
|
||||
def call(self, inputs: _InputT, training: bool | None = None, mask: TensorCompatible | None = None) -> _OutputT: ...
|
||||
@@ -61,7 +61,7 @@ class Model(Layer[_InputT, _OutputT], tf.Module):
|
||||
def jit_compile(self) -> bool: ...
|
||||
@property
|
||||
def distribute_reduction_method(self) -> Incomplete | Literal["auto"]: ...
|
||||
def train_step(self, data: TensorCompatible) -> Incomplete: ...
|
||||
def train_step(self, data: TensorCompatible): ...
|
||||
def compute_loss(
|
||||
self,
|
||||
x: TensorCompatible | None = None,
|
||||
@@ -70,7 +70,7 @@ class Model(Layer[_InputT, _OutputT], tf.Module):
|
||||
sample_weight: Incomplete | None = None,
|
||||
) -> tf.Tensor | None: ...
|
||||
def compute_metrics(
|
||||
self, x: TensorCompatible, y: TensorCompatible, y_pred: TensorCompatible, sample_weight: Incomplete
|
||||
self, x: TensorCompatible, y: TensorCompatible, y_pred: TensorCompatible, sample_weight
|
||||
) -> dict[str, float]: ...
|
||||
def get_metrics_result(self) -> dict[str, float]: ...
|
||||
def make_train_function(self, force: bool = False) -> Callable[[tf.data.Iterator[Incomplete]], dict[str, float]]: ...
|
||||
@@ -186,7 +186,7 @@ class Model(Layer[_InputT, _OutputT], tf.Module):
|
||||
def trainable_weights(self) -> list[Variable]: ...
|
||||
@property
|
||||
def non_trainable_weights(self) -> list[Variable]: ...
|
||||
def get_weights(self) -> Incomplete: ...
|
||||
def get_weights(self): ...
|
||||
def save(
|
||||
self, filepath: str | Path, overwrite: bool = True, save_format: Literal["keras", "tf", "h5"] | None = None, **kwargs: Any
|
||||
) -> None: ...
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
from _typeshed import Incomplete
|
||||
from enum import Enum
|
||||
from typing_extensions import Self
|
||||
|
||||
@@ -24,7 +23,7 @@ class Fingerprint:
|
||||
version: Integer | None = None,
|
||||
) -> None: ...
|
||||
@classmethod
|
||||
def from_proto(cls, proto: Incomplete) -> Self: ...
|
||||
def from_proto(cls, proto) -> Self: ...
|
||||
def singleprint(self) -> str: ...
|
||||
|
||||
class TrackableResource(CapturableResource):
|
||||
|
||||
@@ -24,6 +24,6 @@ class GenericFunction(Callable[_P, _R], metaclass=abc.ABCMeta):
|
||||
def get_concrete_function(
|
||||
self, *args: ContainerGeneric[tf.TypeSpec[Any]], **kwargs: ContainerGeneric[tf.TypeSpec[Any]]
|
||||
) -> ConcreteFunction[_P, _R]: ...
|
||||
def experimental_get_compiler_ir(self, *args: Incomplete, **kwargs: Incomplete) -> Incomplete: ...
|
||||
def experimental_get_compiler_ir(self, *args, **kwargs): ...
|
||||
|
||||
def __getattr__(name: str) -> Incomplete: ...
|
||||
|
||||
@@ -80,12 +80,7 @@ class tqdm_discord(tqdm_auto[_T]):
|
||||
**kwargs,
|
||||
) -> None: ...
|
||||
def display(
|
||||
self,
|
||||
msg: str | None = ...,
|
||||
pos: int | None = ...,
|
||||
close: bool = ...,
|
||||
bar_style: Incomplete = ...,
|
||||
check_delay: bool = ...,
|
||||
self, msg: str | None = ..., pos: int | None = ..., close: bool = ..., bar_style=..., check_delay: bool = ...
|
||||
) -> None: ...
|
||||
def clear(self, *args, **kwargs) -> None: ...
|
||||
|
||||
|
||||
@@ -84,7 +84,9 @@ class tqdm_slack(tqdm_auto[_T]):
|
||||
channel: int = ...,
|
||||
**kwargs,
|
||||
) -> None: ...
|
||||
def display(self, *, msg: str | None = ..., pos: int | None = ..., close: bool = ..., bar_style: Incomplete = ..., check_delay: bool = ...) -> None: ... # type: ignore[override]
|
||||
def display( # type: ignore[override]
|
||||
self, *, msg: str | None = ..., pos: int | None = ..., close: bool = ..., bar_style=..., check_delay: bool = ...
|
||||
) -> None: ...
|
||||
def clear(self, *args, **kwargs) -> None: ...
|
||||
|
||||
def tsrange(*args, **kwargs) -> tqdm_slack[int]: ...
|
||||
|
||||
@@ -89,7 +89,9 @@ class tqdm_telegram(tqdm_auto[_T]):
|
||||
chat_id: str = ...,
|
||||
**kwargs,
|
||||
) -> None: ...
|
||||
def display(self, *, msg: str | None = ..., pos: int | None = ..., close: bool = ..., bar_style: Incomplete = ..., check_delay: bool = ...) -> None: ... # type: ignore[override]
|
||||
def display( # type: ignore[override]
|
||||
self, *, msg: str | None = ..., pos: int | None = ..., close: bool = ..., bar_style=..., check_delay: bool = ...
|
||||
) -> None: ...
|
||||
def clear(self, *args, **kwargs) -> None: ...
|
||||
def close(self) -> None: ...
|
||||
|
||||
|
||||
@@ -13,10 +13,10 @@ _T = TypeVar("_T")
|
||||
class _ProgressColumn(ABC):
|
||||
max_refresh: float | None
|
||||
def __init__(self, table_column: Incomplete | None = ...) -> None: ...
|
||||
def get_table_column(self) -> Incomplete: ...
|
||||
def __call__(self, task: Incomplete) -> Incomplete: ...
|
||||
def get_table_column(self): ...
|
||||
def __call__(self, task): ...
|
||||
@abstractmethod
|
||||
def render(self, task: Incomplete) -> Incomplete: ...
|
||||
def render(self, task): ...
|
||||
|
||||
class FractionColumn(_ProgressColumn):
|
||||
unit_scale: bool
|
||||
|
||||
Reference in New Issue
Block a user