Remove SQLAlchemy stubs (#10389)

Co-authored-by: AlexWaygood <alex.waygood@gmail.com>
This commit is contained in:
Nikita Sobolev
2023-07-08 12:24:59 +03:00
committed by GitHub
parent 507e714b9e
commit e7ba0327e1
232 changed files with 2 additions and 17243 deletions

View File

@@ -35,7 +35,6 @@
"stubs/dateparser",
"stubs/docutils",
"stubs/Flask-Migrate",
"stubs/Flask-SQLAlchemy",
"stubs/fpdf2",
"stubs/html5lib",
"stubs/httplib2",
@@ -69,7 +68,6 @@
"stubs/redis",
"stubs/requests",
"stubs/setuptools",
"stubs/SQLAlchemy",
"stubs/stripe",
"stubs/tqdm",
"stubs/ttkthemes",

View File

@@ -1,6 +1,6 @@
version = "4.0.*"
# Requires a version of flask with a `py.typed` file
requires = ["Flask>=2.0.0", "types-Flask-SQLAlchemy"]
# Requires versions of flask and Flask-SQLAlchemy with `py.typed` files
requires = ["Flask>=2.0.0", "Flask-SQLAlchemy>=3.0.1"]
partial_stub = true
[tool.stubtest]

View File

@@ -1,2 +0,0 @@
# Needed due dynamic attribute generation
flask_sqlalchemy.SQLAlchemy.__getattr__

View File

@@ -1,7 +0,0 @@
version = "2.5.*"
requires = ["types-SQLAlchemy"]
obsolete_since = "3.0.1" # Released on 2022-10-11
partial_stub = true
[tool.stubtest]
ignore_missing_stub = true

View File

@@ -1,97 +0,0 @@
from _typeshed import Incomplete
from collections.abc import Generator
from typing import Any, Generic, TypeVar
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm.query import Query
from sqlalchemy.orm.session import Session
from . import utils as utils
from .model import DefaultMeta as DefaultMeta, Model as Model
models_committed: Any
before_models_committed: Any
class SignallingSession(Session):
app: Any
def __init__(self, db, autocommit: bool = False, autoflush: bool = True, **options) -> None: ...
def get_bind(self, mapper: Incomplete | None = None, clause: Incomplete | None = None): ... # type: ignore[override]
def get_debug_queries(): ...
_T = TypeVar("_T")
class BaseQuery(Query[_T]):
def get_or_404(self, ident, description: Incomplete | None = None): ...
def first_or_404(self, description: Incomplete | None = None): ...
def paginate(
self,
page: Incomplete | None = None,
per_page: Incomplete | None = None,
error_out: bool = True,
max_per_page: Incomplete | None = None,
) -> Pagination[_T]: ...
class Pagination(Generic[_T]):
query: BaseQuery[_T] | None
page: int
per_page: int
total: int | None
items: Any
def __init__(self, query: BaseQuery[_T] | None, page: int, per_page: int, total: int | None, items) -> None: ...
@property
def pages(self) -> int: ...
def prev(self, error_out: bool = False) -> Pagination[_T]: ...
@property
def prev_num(self) -> int | None: ...
@property
def has_prev(self) -> bool: ...
def next(self, error_out: bool = False) -> Pagination[_T]: ...
@property
def has_next(self) -> bool: ...
@property
def next_num(self) -> int | None: ...
def iter_pages(
self, left_edge: int = 2, left_current: int = 2, right_current: int = 5, right_edge: int = 2
) -> Generator[int | None, None, None]: ...
def get_state(app): ...
class SQLAlchemy:
Query: Any
use_native_unicode: Any
session: scoped_session
Model: Model
app: Any
def __init__(
self,
app: Incomplete | None = None,
use_native_unicode: bool = True,
session_options: Incomplete | None = None,
metadata: Incomplete | None = None,
query_class=...,
model_class=...,
engine_options: Incomplete | None = None,
) -> None: ...
@property
def metadata(self): ...
def create_scoped_session(self, options: Incomplete | None = None): ...
def create_session(self, options): ...
def make_declarative_base(self, model, metadata: Incomplete | None = None): ...
def init_app(self, app): ...
def apply_pool_defaults(self, app, options): ...
def apply_driver_hacks(self, app, sa_url, options): ...
@property
def engine(self): ...
def make_connector(self, app: Incomplete | None = None, bind: Incomplete | None = None): ...
def get_engine(self, app: Incomplete | None = None, bind: Incomplete | None = None): ...
def create_engine(self, sa_url, engine_opts): ...
def get_app(self, reference_app: Incomplete | None = None): ...
def get_tables_for_bind(self, bind: Incomplete | None = None): ...
def get_binds(self, app: Incomplete | None = None): ...
def create_all(self, bind: str = "__all__", app: Incomplete | None = None) -> None: ...
def drop_all(self, bind: str = "__all__", app: Incomplete | None = None) -> None: ...
def reflect(self, bind: str = "__all__", app: Incomplete | None = None) -> None: ...
def __getattr__(self, name: str) -> Any: ... # exposes dynamically classes of SQLAlchemy
class FSADeprecationWarning(DeprecationWarning): ...

View File

@@ -1,25 +0,0 @@
from re import Pattern
from typing import Any
from sqlalchemy import Table
from sqlalchemy.ext.declarative import DeclarativeMeta
from sqlalchemy.orm import Query
def should_set_tablename(cls: type) -> bool: ...
camelcase_re: Pattern[str]
def camel_to_snake_case(name: str) -> str: ...
class NameMetaMixin(type):
def __init__(cls, name: str, bases: tuple[type, ...], d: dict[str, Any]) -> None: ...
def __table_cls__(cls, *args, **kwargs) -> Table | None: ...
class BindMetaMixin(type):
def __init__(cls, name: str, bases: tuple[type, ...], d: dict[str, Any]) -> None: ...
class DefaultMeta(NameMetaMixin, BindMetaMixin, DeclarativeMeta): ...
class Model:
query_class: type[Query[Any]] | None
query: Query[Any] | None

View File

@@ -1,3 +0,0 @@
def parse_version(v: str) -> tuple[int, int, int]: ...
def sqlalchemy_version(op: str, val: str) -> bool: ...
def engine_config_warning(config, version: str, deprecated_config_key: str, engine_option) -> None: ...

View File

@@ -1,85 +0,0 @@
# mypy plugin not supported in typeshed
sqlalchemy.ext.mypy.*
# test suites
sqlalchemy.testing.suite
# Leaked re-exports from the compat module
sqlalchemy.util.quote
# Expanding keyword arguments in stubs
sqlalchemy.ext.declarative.as_declarative
# not always present
sqlalchemy.engine.Engine.logging_name # initialized if not None
sqlalchemy.engine.base.Engine.logging_name # initialized if not None
sqlalchemy.sql.lambdas.PyWrapper.__clause_element__
sqlalchemy.testing.util.non_refcount_gc_collect
# potentially replaced at runtime
sqlalchemy.engine.Row.count
sqlalchemy.engine.Row.index
sqlalchemy.engine.row.Row.count
sqlalchemy.engine.row.Row.index
# abstract fields not present at runtime
sqlalchemy.engine.Transaction.connection
sqlalchemy.engine.Transaction.is_active
sqlalchemy.engine.base.Transaction.connection
sqlalchemy.engine.base.Transaction.is_active
# initialized to None during class construction, but overridden during __init__() or __new__()
sqlalchemy.engine.base.Connection.engine
sqlalchemy.engine.Connection.engine
sqlalchemy.orm.Mapper.single
sqlalchemy.orm.mapper.Mapper.single
# Uses @memoized_property at runtime, but we use @property for compatibility
sqlalchemy.engine.URL.normalized_query
sqlalchemy.engine.url.URL.normalized_query
# Uses @memoized_property, but that causes regr_test to raise 'Cannot determine type of "..." in base class "..." [misc]'
sqlalchemy.schema.SchemaItem.info
sqlalchemy.sql.elements.AnnotatedColumnElement.info
sqlalchemy.sql.elements.AnnotatedColumnElement.key
sqlalchemy.sql.elements.AnnotatedColumnElement.name
sqlalchemy.sql.elements.AnnotatedColumnElement.table
sqlalchemy.sql.schema.SchemaItem.info
# runtime has extra internal arguments that are inconsistent across micro versions
sqlalchemy.testing.engines.testing_engine
# __new__ signature conflicts with __init__ signature (which is more precise),
# so __new__ is deliberately omitted in the stub
sqlalchemy.sql.annotation.Annotated.__new__
# At runtime __new__ is defined, but we define __init__ in the stub
# because otherwise all subclasses would be identified by pyright
# as having conflicting __new__/__init__ methods
sqlalchemy.orm.unitofwork.PostSortRec.__new__
# KeyError/AttributeError on import due to dynamic initialization from a different module
sqlalchemy.testing.fixtures
sqlalchemy.testing.pickleable
sqlalchemy.testing.plugin.bootstrap
# method arguments starting with double underscores in the implementation trips up stubtest
sqlalchemy.testing.resolve_lambda
sqlalchemy.testing.util.resolve_lambda
sqlalchemy.orm.collections.MappedCollection.update
# stubtest thinks __slots__ are always members
# https://github.com/python/mypy/issues/13906
sqlalchemy.sql.elements.quoted_name.lower
sqlalchemy.sql.elements.quoted_name.upper
sqlalchemy.sql.expression.quoted_name.lower
sqlalchemy.sql.expression.quoted_name.upper
sqlalchemy.sql.quoted_name.lower
sqlalchemy.sql.quoted_name.upper
sqlalchemy.orm.ColumnProperty.Comparator.__clause_element__
sqlalchemy.orm.properties.ColumnProperty.Comparator.__clause_element__
# Same error as in stdlib due to it being re-exported
sqlalchemy.dialects.mysql.asyncmy.AsyncAdapt_asyncmy_dbapi.Binary
sqlalchemy.util.compat.StringIO.seek
sqlalchemy.util.compat.StringIO.truncate
sqlalchemy.util.StringIO.seek
sqlalchemy.util.StringIO.truncate
sqlalchemy.testing.mock.patch

View File

@@ -1,60 +0,0 @@
from __future__ import annotations
from typing_extensions import assert_type
from sqlalchemy.orm.strategy_options import (
Load,
contains_eager,
defaultload,
defer,
immediateload,
joinedload,
lazyload,
load_only,
loader_option,
noload,
raiseload,
selectin_polymorphic,
selectinload,
subqueryload,
undefer,
undefer_group,
with_expression,
)
def fn(loadopt: Load, *args: object) -> loader_option:
return loader_option()
# Testing that the function and return type of function are actually all instances of "loader_option"
assert_type(contains_eager, loader_option)
assert_type(contains_eager(fn), loader_option)
assert_type(load_only, loader_option)
assert_type(load_only(fn), loader_option)
assert_type(joinedload, loader_option)
assert_type(joinedload(fn), loader_option)
assert_type(subqueryload, loader_option)
assert_type(subqueryload(fn), loader_option)
assert_type(selectinload, loader_option)
assert_type(selectinload(fn), loader_option)
assert_type(lazyload, loader_option)
assert_type(lazyload(fn), loader_option)
assert_type(immediateload, loader_option)
assert_type(immediateload(fn), loader_option)
assert_type(noload, loader_option)
assert_type(noload(fn), loader_option)
assert_type(raiseload, loader_option)
assert_type(raiseload(fn), loader_option)
assert_type(defaultload, loader_option)
assert_type(defaultload(fn), loader_option)
assert_type(defer, loader_option)
assert_type(defer(fn), loader_option)
assert_type(undefer, loader_option)
assert_type(undefer(fn), loader_option)
assert_type(undefer_group, loader_option)
assert_type(undefer_group(fn), loader_option)
assert_type(with_expression, loader_option)
assert_type(with_expression(fn), loader_option)
assert_type(selectin_polymorphic, loader_option)
assert_type(selectin_polymorphic(fn), loader_option)

View File

@@ -1,68 +0,0 @@
from __future__ import annotations
from _typeshed.dbapi import DBAPIConnection
from typing import cast
from sqlalchemy.engine.base import Engine
from sqlalchemy.engine.default import DefaultDialect
from sqlalchemy.engine.url import URL
from sqlalchemy.pool.base import Pool
from sqlalchemy.testing import config as ConfigModule
from sqlalchemy.testing.provision import (
configure_follower,
create_db,
drop_all_schema_objects_post_tables,
drop_all_schema_objects_pre_tables,
drop_db,
follower_url_from_main,
generate_driver_url,
get_temp_table_name,
post_configure_engine,
prepare_for_drop_tables,
register,
run_reap_dbs,
set_default_schema_on_connection,
stop_test_class_outside_fixtures,
temp_table_keyword_args,
update_db_opts,
)
from sqlalchemy.util import immutabledict
url = URL("", "", "", "", 0, "", immutabledict())
engine = Engine(Pool(lambda: cast(DBAPIConnection, object())), DefaultDialect(), "")
config = cast(ConfigModule.Config, object())
unused = None
class Foo:
pass
# Test that the decorator changes the first parameter to "cfg: str | URL | _ConfigProtocol"
@register.init
def no_args(__foo: Foo) -> None:
pass
no_args(cfg="")
no_args(cfg=url)
no_args(cfg=config)
# Test pre-decorated functions
generate_driver_url(url, "", "")
drop_all_schema_objects_pre_tables(url, unused)
drop_all_schema_objects_post_tables(url, unused)
create_db(url, engine, unused)
drop_db(url, engine, unused)
update_db_opts(url, unused)
post_configure_engine(url, unused, unused)
follower_url_from_main(url, "")
configure_follower(url, unused)
run_reap_dbs(url, unused)
temp_table_keyword_args(url, engine)
prepare_for_drop_tables(url, unused)
stop_test_class_outside_fixtures(url, unused, type)
get_temp_table_name(url, unused, "")
set_default_schema_on_connection(url, unused, unused)
set_default_schema_on_connection(ConfigModule, unused, unused)
set_default_schema_on_connection(config, unused, unused)

View File

@@ -1,9 +0,0 @@
version = "1.4.46"
extra_description = """\
The `sqlalchemy-stubs` package is an alternative to this package and also \
includes a mypy plugin for more precise types.\
"""
obsolete_since = "2.0.0" # Released on 2023-01-26
[tool.stubtest]
stubtest_requirements = ["pytest"]

View File

@@ -1,133 +0,0 @@
from .engine import (
create_engine as create_engine,
create_mock_engine as create_mock_engine,
engine_from_config as engine_from_config,
)
from .inspection import inspect as inspect
from .schema import (
BLANK_SCHEMA as BLANK_SCHEMA,
DDL as DDL,
CheckConstraint as CheckConstraint,
Column as Column,
ColumnDefault as ColumnDefault,
Computed as Computed,
Constraint as Constraint,
DefaultClause as DefaultClause,
FetchedValue as FetchedValue,
ForeignKey as ForeignKey,
ForeignKeyConstraint as ForeignKeyConstraint,
Identity as Identity,
Index as Index,
MetaData as MetaData,
PrimaryKeyConstraint as PrimaryKeyConstraint,
Sequence as Sequence,
Table as Table,
ThreadLocalMetaData as ThreadLocalMetaData,
UniqueConstraint as UniqueConstraint,
)
from .sql import (
LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT,
LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY,
LABEL_STYLE_NONE as LABEL_STYLE_NONE,
LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL,
alias as alias,
all_ as all_,
and_ as and_,
any_ as any_,
asc as asc,
between as between,
bindparam as bindparam,
case as case,
cast as cast,
collate as collate,
column as column,
delete as delete,
desc as desc,
distinct as distinct,
except_ as except_,
except_all as except_all,
exists as exists,
extract as extract,
false as false,
func as func,
funcfilter as funcfilter,
insert as insert,
intersect as intersect,
intersect_all as intersect_all,
join as join,
lambda_stmt as lambda_stmt,
lateral as lateral,
literal as literal,
literal_column as literal_column,
modifier as modifier,
not_ as not_,
null as null,
nulls_first as nulls_first,
nulls_last as nulls_last,
nullsfirst as nullsfirst,
nullslast as nullslast,
or_ as or_,
outerjoin as outerjoin,
outparam as outparam,
over as over,
select as select,
subquery as subquery,
table as table,
tablesample as tablesample,
text as text,
true as true,
tuple_ as tuple_,
type_coerce as type_coerce,
union as union,
union_all as union_all,
update as update,
values as values,
within_group as within_group,
)
from .sql.sqltypes import (
ARRAY as ARRAY,
BIGINT as BIGINT,
BINARY as BINARY,
BLOB as BLOB,
BOOLEAN as BOOLEAN,
CHAR as CHAR,
CLOB as CLOB,
DATE as DATE,
DATETIME as DATETIME,
DECIMAL as DECIMAL,
FLOAT as FLOAT,
INT as INT,
INTEGER as INTEGER,
JSON as JSON,
NCHAR as NCHAR,
NUMERIC as NUMERIC,
NVARCHAR as NVARCHAR,
REAL as REAL,
SMALLINT as SMALLINT,
TEXT as TEXT,
TIME as TIME,
TIMESTAMP as TIMESTAMP,
VARBINARY as VARBINARY,
VARCHAR as VARCHAR,
BigInteger as BigInteger,
Boolean as Boolean,
Date as Date,
DateTime as DateTime,
Enum as Enum,
Float as Float,
Integer as Integer,
Interval as Interval,
LargeBinary as LargeBinary,
Numeric as Numeric,
PickleType as PickleType,
SmallInteger as SmallInteger,
String as String,
Text as Text,
Time as Time,
TupleType as TupleType,
TypeDecorator as TypeDecorator,
Unicode as Unicode,
UnicodeText as UnicodeText,
)
__version__: str

View File

@@ -1,19 +0,0 @@
from _typeshed import SupportsKeysAndGetItem
from collections.abc import Iterable
from typing import Generic, TypeVar, overload
from typing_extensions import final
_KT = TypeVar("_KT")
_KT2 = TypeVar("_KT2")
_VT = TypeVar("_VT")
_VT2 = TypeVar("_VT2")
@final
class immutabledict(dict[_KT, _VT], Generic[_KT, _VT]):
@overload
def union(self, __dict: dict[_KT2, _VT2]) -> immutabledict[_KT | _KT2, _VT | _VT2]: ...
@overload
def union(self, __dict: None = None, **kw: SupportsKeysAndGetItem[_KT2, _VT2]) -> immutabledict[_KT | _KT2, _VT | _VT2]: ...
def merge_with(
self, *args: SupportsKeysAndGetItem[_KT | _KT2, _VT2] | Iterable[tuple[_KT2, _VT2]] | None
) -> immutabledict[_KT | _KT2, _VT | _VT2]: ...

View File

@@ -1 +0,0 @@
class Connector: ...

View File

@@ -1,17 +0,0 @@
from _typeshed import Incomplete
from . import Connector
class MxODBCConnector(Connector):
driver: str
supports_sane_multi_rowcount: bool
supports_unicode_statements: bool
supports_unicode_binds: bool
supports_native_decimal: bool
@classmethod
def dbapi(cls): ...
def on_connect(self): ...
def create_connect_args(self, url): ...
def is_disconnect(self, e, connection, cursor): ...
def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...
def do_execute(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...

View File

@@ -1,22 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from . import Connector
class PyODBCConnector(Connector):
driver: str
supports_sane_rowcount_returning: bool
supports_sane_multi_rowcount: bool
supports_unicode_statements: bool
supports_unicode_binds: bool
supports_native_decimal: bool
default_paramstyle: str
use_setinputsizes: bool
pyodbc_driver_name: Any
def __init__(self, supports_unicode_binds: Incomplete | None = None, use_setinputsizes: bool = False, **kw) -> None: ...
@classmethod
def dbapi(cls): ...
def create_connect_args(self, url): ...
def is_disconnect(self, e, connection, cursor): ...
def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ...
def set_isolation_level(self, connection, level) -> None: ...

View File

@@ -1,15 +0,0 @@
class DecimalResultProcessor:
def __init__(self, *args, **kwargs) -> None: ...
def process(self, *args, **kwargs): ...
class UnicodeResultProcessor:
def __init__(self, *args, **kwargs) -> None: ...
def conditional_process(self, *args, **kwargs): ...
def process(self, *args, **kwargs): ...
def int_to_boolean(*args, **kwargs): ...
def str_to_date(*args, **kwargs): ...
def str_to_datetime(*args, **kwargs): ...
def str_to_time(*args, **kwargs): ...
def to_float(*args, **kwargs): ...
def to_str(*args, **kwargs): ...

View File

@@ -1,23 +0,0 @@
from _typeshed import Incomplete
from collections.abc import Callable, Iterable, Iterator
from typing import Any, overload
class BaseRow:
def __init__(
self,
__parent,
__processors: Iterable[Callable[[Any], Any]] | None,
__keymap: dict[Incomplete, Incomplete],
__key_style: int,
__row: Iterable[Any],
) -> None: ...
def __reduce__(self) -> tuple[Incomplete, tuple[Incomplete, Incomplete]]: ...
def __iter__(self) -> Iterator[Any]: ...
def __len__(self) -> int: ...
def __hash__(self) -> int: ...
@overload
def __getitem__(self, __key: str | int) -> tuple[Any, ...]: ...
@overload
def __getitem__(self, __key: slice) -> tuple[tuple[Any, ...]]: ...
def safe_rowproxy_reconstructor(__cls, __state): ...

View File

@@ -1,18 +0,0 @@
from ..dialects.firebird import base as firebird_base
from ..dialects.mssql import base as mssql_base
from ..dialects.mysql import base as mysql_base
from ..dialects.oracle import base as oracle_base
from ..dialects.postgresql import base as postgresql_base
from ..dialects.sqlite import base as sqlite_base
from ..dialects.sybase import base as sybase_base
__all__ = ("firebird", "mssql", "mysql", "postgresql", "sqlite", "oracle", "sybase")
firebird = firebird_base
mssql = mssql_base
mysql = mysql_base
oracle = oracle_base
postgresql = postgresql_base
postgres = postgresql_base
sqlite = sqlite_base
sybase = sybase_base

View File

@@ -1,16 +0,0 @@
from typing import Any
from . import (
firebird as firebird,
mssql as mssql,
mysql as mysql,
oracle as oracle,
postgresql as postgresql,
sqlite as sqlite,
sybase as sybase,
)
__all__ = ("firebird", "mssql", "mysql", "oracle", "postgresql", "sqlite", "sybase")
registry: Any
plugins: Any

View File

@@ -1,34 +0,0 @@
from typing import Any
from .base import (
BIGINT as BIGINT,
BLOB as BLOB,
CHAR as CHAR,
DATE as DATE,
FLOAT as FLOAT,
NUMERIC as NUMERIC,
SMALLINT as SMALLINT,
TEXT as TEXT,
TIME as TIME,
TIMESTAMP as TIMESTAMP,
VARCHAR as VARCHAR,
)
__all__ = (
"SMALLINT",
"BIGINT",
"FLOAT",
"FLOAT",
"DATE",
"TIME",
"TEXT",
"NUMERIC",
"FLOAT",
"TIMESTAMP",
"VARCHAR",
"CHAR",
"BLOB",
"dialect",
)
dialect: Any

View File

@@ -1,108 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...engine import default
from ...sql import compiler, sqltypes
from ...sql.sqltypes import (
BIGINT as BIGINT,
BLOB as BLOB,
DATE as DATE,
FLOAT as FLOAT,
INTEGER as INTEGER,
NUMERIC as NUMERIC,
SMALLINT as SMALLINT,
TEXT as TEXT,
TIME as TIME,
TIMESTAMP as TIMESTAMP,
Integer as Integer,
)
RESERVED_WORDS: Any
class _StringType(sqltypes.String):
charset: Any
def __init__(self, charset: Incomplete | None = None, **kw) -> None: ...
class VARCHAR(_StringType, sqltypes.VARCHAR):
__visit_name__: str
def __init__(self, length: Incomplete | None = None, **kwargs) -> None: ...
class CHAR(_StringType, sqltypes.CHAR):
__visit_name__: str
def __init__(self, length: Incomplete | None = None, **kwargs) -> None: ...
class _FBDateTime(sqltypes.DateTime):
def bind_processor(self, dialect): ...
colspecs: Any
ischema_names: Any
class FBTypeCompiler(compiler.GenericTypeCompiler):
def visit_boolean(self, type_, **kw): ...
def visit_datetime(self, type_, **kw): ...
def visit_TEXT(self, type_, **kw): ...
def visit_BLOB(self, type_, **kw): ...
def visit_CHAR(self, type_, **kw): ...
def visit_VARCHAR(self, type_, **kw): ...
class FBCompiler(compiler.SQLCompiler):
ansi_bind_rules: bool
def visit_now_func(self, fn, **kw): ...
def visit_startswith_op_binary(self, binary, operator, **kw): ...
def visit_not_startswith_op_binary(self, binary, operator, **kw): ...
def visit_mod_binary(self, binary, operator, **kw): ...
def visit_alias(self, alias, asfrom: bool = False, **kwargs): ... # type: ignore[override]
def visit_substring_func(self, func, **kw): ...
def visit_length_func(self, function, **kw): ...
visit_char_length_func: Any
def function_argspec(self, func, **kw): ...
def default_from(self): ...
def visit_sequence(self, seq, **kw): ...
def get_select_precolumns(self, select, **kw): ...
def limit_clause(self, select, **kw): ...
def returning_clause(self, stmt, returning_cols): ...
class FBDDLCompiler(compiler.DDLCompiler):
def visit_create_sequence(self, create): ...
def visit_drop_sequence(self, drop): ...
def visit_computed_column(self, generated): ...
class FBIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words: Any
illegal_initial_characters: Any
def __init__(self, dialect) -> None: ...
class FBExecutionContext(default.DefaultExecutionContext):
def fire_sequence(self, seq, type_): ...
class FBDialect(default.DefaultDialect):
name: str
supports_statement_cache: bool
max_identifier_length: int
supports_sequences: bool
sequences_optional: bool
supports_default_values: bool
postfetch_lastrowid: bool
supports_native_boolean: bool
requires_name_normalize: bool
supports_empty_insert: bool
statement_compiler: Any
ddl_compiler: Any
preparer: Any
type_compiler: Any
colspecs: Any
ischema_names: Any
construct_arguments: Any
def __init__(self, *args, **kwargs) -> None: ...
implicit_returning: Any
def initialize(self, connection) -> None: ...
def has_table(self, connection, table_name, schema: Incomplete | None = None): ... # type: ignore[override]
def has_sequence(self, connection, sequence_name, schema: Incomplete | None = None): ... # type: ignore[override]
def get_table_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_view_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_view_definition(self, connection, view_name, schema: Incomplete | None = None, **kw): ...
def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_column_sequence(self, connection, table_name, column_name, schema: Incomplete | None = None, **kw): ...
def get_columns(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_foreign_keys(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_indexes(self, connection, table_name, schema: Incomplete | None = None, **kw): ...

View File

@@ -1,10 +0,0 @@
from .kinterbasdb import FBDialect_kinterbasdb
class FBDialect_fdb(FBDialect_kinterbasdb):
supports_statement_cache: bool
def __init__(self, enable_rowcount: bool = True, retaining: bool = False, **kwargs) -> None: ...
@classmethod
def dbapi(cls): ...
def create_connect_args(self, url): ...
dialect = FBDialect_fdb

View File

@@ -1,39 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...sql.sqltypes import Float, Numeric
from .base import FBDialect, FBExecutionContext
class _kinterbasdb_numeric:
def bind_processor(self, dialect): ...
class _FBNumeric_kinterbasdb(_kinterbasdb_numeric, Numeric): ...
class _FBFloat_kinterbasdb(_kinterbasdb_numeric, Float): ...
class FBExecutionContext_kinterbasdb(FBExecutionContext):
@property
def rowcount(self): ...
class FBDialect_kinterbasdb(FBDialect):
driver: str
supports_statement_cache: bool
supports_sane_rowcount: bool
supports_sane_multi_rowcount: bool
supports_native_decimal: bool
colspecs: Any
enable_rowcount: Any
type_conv: Any
concurrency_level: Any
retaining: Any
def __init__(
self, type_conv: int = 200, concurrency_level: int = 1, enable_rowcount: bool = True, retaining: bool = False, **kwargs
) -> None: ...
@classmethod
def dbapi(cls): ...
def do_execute(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...
def do_rollback(self, dbapi_connection) -> None: ...
def do_commit(self, dbapi_connection) -> None: ...
def create_connect_args(self, url): ...
def is_disconnect(self, e, connection, cursor): ...
dialect = FBDialect_kinterbasdb

View File

@@ -1,76 +0,0 @@
from typing import Any
from .base import (
BIGINT as BIGINT,
BINARY as BINARY,
BIT as BIT,
CHAR as CHAR,
DATE as DATE,
DATETIME as DATETIME,
DATETIME2 as DATETIME2,
DATETIMEOFFSET as DATETIMEOFFSET,
DECIMAL as DECIMAL,
FLOAT as FLOAT,
IMAGE as IMAGE,
INTEGER as INTEGER,
JSON as JSON,
MONEY as MONEY,
NCHAR as NCHAR,
NTEXT as NTEXT,
NUMERIC as NUMERIC,
NVARCHAR as NVARCHAR,
REAL as REAL,
ROWVERSION as ROWVERSION,
SMALLDATETIME as SMALLDATETIME,
SMALLINT as SMALLINT,
SMALLMONEY as SMALLMONEY,
SQL_VARIANT as SQL_VARIANT,
TEXT as TEXT,
TIME as TIME,
TIMESTAMP as TIMESTAMP,
TINYINT as TINYINT,
UNIQUEIDENTIFIER as UNIQUEIDENTIFIER,
VARBINARY as VARBINARY,
VARCHAR as VARCHAR,
XML as XML,
try_cast as try_cast,
)
__all__ = (
"JSON",
"INTEGER",
"BIGINT",
"SMALLINT",
"TINYINT",
"VARCHAR",
"NVARCHAR",
"CHAR",
"NCHAR",
"TEXT",
"NTEXT",
"DECIMAL",
"NUMERIC",
"FLOAT",
"DATETIME",
"DATETIME2",
"DATETIMEOFFSET",
"DATE",
"TIME",
"SMALLDATETIME",
"BINARY",
"VARBINARY",
"BIT",
"REAL",
"IMAGE",
"TIMESTAMP",
"ROWVERSION",
"MONEY",
"SMALLMONEY",
"UNIQUEIDENTIFIER",
"SQL_VARIANT",
"XML",
"dialect",
"try_cast",
)
dialect: Any

View File

@@ -1,323 +0,0 @@
from _typeshed import Incomplete
from typing import Any, overload
from typing_extensions import Literal
from ...engine import default
from ...sql import compiler, sqltypes
from ...sql.elements import Cast
from ...sql.sqltypes import (
BIGINT as BIGINT,
BINARY as BINARY,
CHAR as CHAR,
DATE as DATE,
DATETIME as DATETIME,
DECIMAL as DECIMAL,
FLOAT as FLOAT,
INTEGER as INTEGER,
NCHAR as NCHAR,
NUMERIC as NUMERIC,
NVARCHAR as NVARCHAR,
SMALLINT as SMALLINT,
TEXT as TEXT,
VARCHAR as VARCHAR,
)
from .json import JSON as JSON
MS_2017_VERSION: Any
MS_2016_VERSION: Any
MS_2014_VERSION: Any
MS_2012_VERSION: Any
MS_2008_VERSION: Any
MS_2005_VERSION: Any
MS_2000_VERSION: Any
RESERVED_WORDS: Any
class REAL(sqltypes.REAL):
__visit_name__: str
def __init__(self, **kw) -> None: ...
class TINYINT(sqltypes.Integer):
__visit_name__: str
class _MSDate(sqltypes.Date):
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class TIME(sqltypes.TIME):
precision: Any
def __init__(self, precision: Incomplete | None = None, **kwargs) -> None: ...
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
_MSTime = TIME
class _BASETIMEIMPL(TIME):
__visit_name__: str
class _DateTimeBase:
def bind_processor(self, dialect): ...
class _MSDateTime(_DateTimeBase, sqltypes.DateTime): ...
class SMALLDATETIME(_DateTimeBase, sqltypes.DateTime):
__visit_name__: str
class DATETIME2(_DateTimeBase, sqltypes.DateTime):
__visit_name__: str
precision: Any
def __init__(self, precision: Incomplete | None = None, **kw) -> None: ...
class DATETIMEOFFSET(_DateTimeBase, sqltypes.DateTime):
__visit_name__: str
precision: Any
def __init__(self, precision: Incomplete | None = None, **kw) -> None: ...
class _UnicodeLiteral:
def literal_processor(self, dialect): ...
class _MSUnicode(_UnicodeLiteral, sqltypes.Unicode): ...
class _MSUnicodeText(_UnicodeLiteral, sqltypes.UnicodeText): ...
class TIMESTAMP(sqltypes._Binary):
__visit_name__: str
length: Any
convert_int: Any
def __init__(self, convert_int: bool = False) -> None: ...
def result_processor(self, dialect, coltype): ...
class ROWVERSION(TIMESTAMP):
__visit_name__: str
class NTEXT(sqltypes.UnicodeText):
__visit_name__: str
class VARBINARY(sqltypes.VARBINARY, sqltypes.LargeBinary):
__visit_name__: str
filestream: bool
@overload
def __init__(self, length: Literal["max"] | None, filestream: Literal[True]) -> None: ...
@overload
def __init__(self, *, filestream: Literal[True]) -> None: ...
@overload
def __init__(self, length: Incomplete | None = None, filestream: Literal[False] = False) -> None: ...
class IMAGE(sqltypes.LargeBinary):
__visit_name__: str
class XML(sqltypes.Text):
__visit_name__: str
class BIT(sqltypes.Boolean):
__visit_name__: str
class MONEY(sqltypes.TypeEngine):
__visit_name__: str
class SMALLMONEY(sqltypes.TypeEngine):
__visit_name__: str
class UNIQUEIDENTIFIER(sqltypes.TypeEngine):
__visit_name__: str
class SQL_VARIANT(sqltypes.TypeEngine):
__visit_name__: str
class TryCast(Cast):
__visit_name__: str
stringify_dialect: str
inherit_cache: bool
def __init__(self, *arg, **kw) -> None: ...
try_cast: Any
MSDateTime: Any
MSDate: Any
MSReal = REAL
MSTinyInteger = TINYINT
MSTime = TIME
MSSmallDateTime = SMALLDATETIME
MSDateTime2 = DATETIME2
MSDateTimeOffset = DATETIMEOFFSET
MSText = TEXT
MSNText = NTEXT
MSString = VARCHAR
MSNVarchar = NVARCHAR
MSChar = CHAR
MSNChar = NCHAR
MSBinary = BINARY
MSVarBinary = VARBINARY
MSImage = IMAGE
MSBit = BIT
MSMoney = MONEY
MSSmallMoney = SMALLMONEY
MSUniqueIdentifier = UNIQUEIDENTIFIER
MSVariant = SQL_VARIANT
ischema_names: Any
class MSTypeCompiler(compiler.GenericTypeCompiler):
def visit_FLOAT(self, type_, **kw): ...
def visit_TINYINT(self, type_, **kw): ...
def visit_TIME(self, type_, **kw): ...
def visit_TIMESTAMP(self, type_, **kw): ...
def visit_ROWVERSION(self, type_, **kw): ...
def visit_datetime(self, type_, **kw): ...
def visit_DATETIMEOFFSET(self, type_, **kw): ...
def visit_DATETIME2(self, type_, **kw): ...
def visit_SMALLDATETIME(self, type_, **kw): ...
def visit_unicode(self, type_, **kw): ...
def visit_text(self, type_, **kw): ...
def visit_unicode_text(self, type_, **kw): ...
def visit_NTEXT(self, type_, **kw): ...
def visit_TEXT(self, type_, **kw): ...
def visit_VARCHAR(self, type_, **kw): ...
def visit_CHAR(self, type_, **kw): ...
def visit_NCHAR(self, type_, **kw): ...
def visit_NVARCHAR(self, type_, **kw): ...
def visit_date(self, type_, **kw): ...
def visit__BASETIMEIMPL(self, type_, **kw): ...
def visit_time(self, type_, **kw): ...
def visit_large_binary(self, type_, **kw): ...
def visit_IMAGE(self, type_, **kw): ...
def visit_XML(self, type_, **kw): ...
def visit_VARBINARY(self, type_, **kw): ...
def visit_boolean(self, type_, **kw): ...
def visit_BIT(self, type_, **kw): ...
def visit_JSON(self, type_, **kw): ...
def visit_MONEY(self, type_, **kw): ...
def visit_SMALLMONEY(self, type_, **kw): ...
def visit_UNIQUEIDENTIFIER(self, type_, **kw): ...
def visit_SQL_VARIANT(self, type_, **kw): ...
class MSExecutionContext(default.DefaultExecutionContext):
def pre_exec(self) -> None: ...
cursor_fetch_strategy: Any
def post_exec(self) -> None: ...
def get_lastrowid(self): ...
@property
def rowcount(self): ...
def handle_dbapi_exception(self, e) -> None: ...
def fire_sequence(self, seq, type_): ...
def get_insert_default(self, column): ...
class MSSQLCompiler(compiler.SQLCompiler):
returning_precedes_values: bool
extract_map: Any
tablealiases: Any
def __init__(self, *args, **kwargs) -> None: ...
def visit_now_func(self, fn, **kw): ...
def visit_current_date_func(self, fn, **kw): ...
def visit_length_func(self, fn, **kw): ...
def visit_char_length_func(self, fn, **kw): ...
def visit_concat_op_binary(self, binary, operator, **kw): ...
def visit_true(self, expr, **kw): ...
def visit_false(self, expr, **kw): ...
def visit_match_op_binary(self, binary, operator, **kw): ...
def get_select_precolumns(self, select, **kw): ...
def get_from_hint_text(self, table, text): ...
def get_crud_hint_text(self, table, text): ...
def fetch_clause(self, cs, **kwargs): ...
def limit_clause(self, cs, **kwargs): ...
def visit_try_cast(self, element, **kw): ...
def translate_select_structure(self, select_stmt, **kwargs): ...
def visit_table(self, table, mssql_aliased: bool = ..., iscrud: bool = ..., **kwargs): ... # type: ignore[override]
def visit_alias(self, alias, **kw): ...
def visit_column(self, column, add_to_result_map: Incomplete | None = ..., **kw): ... # type: ignore[override]
def visit_extract(self, extract, **kw): ...
def visit_savepoint(self, savepoint_stmt): ...
def visit_rollback_to_savepoint(self, savepoint_stmt): ...
def visit_binary(self, binary, **kwargs): ...
def returning_clause(self, stmt, returning_cols): ...
def get_cte_preamble(self, recursive): ...
def label_select_column(self, select, column, asfrom): ...
def for_update_clause(self, select, **kw): ...
def order_by_clause(self, select, **kw): ...
def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): ...
def delete_table_clause(self, delete_stmt, from_table, extra_froms): ...
def delete_extra_from_clause(self, delete_stmt, from_table, extra_froms, from_hints, **kw): ...
def visit_empty_set_expr(self, type_): ...
def visit_is_distinct_from_binary(self, binary, operator, **kw): ...
def visit_is_not_distinct_from_binary(self, binary, operator, **kw): ...
def visit_json_getitem_op_binary(self, binary, operator, **kw): ...
def visit_json_path_getitem_op_binary(self, binary, operator, **kw): ...
def visit_sequence(self, seq, **kw): ...
class MSSQLStrictCompiler(MSSQLCompiler):
ansi_bind_rules: bool
def visit_in_op_binary(self, binary, operator, **kw): ...
def visit_not_in_op_binary(self, binary, operator, **kw): ...
def render_literal_value(self, value, type_): ...
class MSDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs): ...
def visit_create_index(self, create, include_schema: bool = False): ... # type: ignore[override]
def visit_drop_index(self, drop): ...
def visit_primary_key_constraint(self, constraint): ...
def visit_unique_constraint(self, constraint): ...
def visit_computed_column(self, generated): ...
def visit_create_sequence(self, create, **kw): ...
def visit_identity_column(self, identity, **kw): ...
class MSIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words: Any
def __init__(self, dialect) -> None: ...
def quote_schema(self, schema, force: Incomplete | None = None): ...
class MSDialect(default.DefaultDialect):
name: str
supports_statement_cache: bool
supports_default_values: bool
supports_empty_insert: bool
use_scope_identity: bool
max_identifier_length: int
schema_name: str
implicit_returning: bool
full_returning: bool
colspecs: Any
engine_config_types: Any
ischema_names: Any
supports_sequences: bool
sequences_optional: bool
default_sequence_base: int
supports_native_boolean: bool
non_native_boolean_check_constraint: bool
supports_unicode_binds: bool
postfetch_lastrowid: bool
legacy_schema_aliasing: bool
server_version_info: Any
statement_compiler: Any
ddl_compiler: Any
type_compiler: Any
preparer: Any
construct_arguments: Any
query_timeout: Any
deprecate_large_types: Any
isolation_level: Any
def __init__(
self,
query_timeout: Incomplete | None = None,
use_scope_identity: bool = True,
schema_name: str = "dbo",
isolation_level: Incomplete | None = None,
deprecate_large_types: Incomplete | None = None,
json_serializer: Incomplete | None = None,
json_deserializer: Incomplete | None = None,
legacy_schema_aliasing: Incomplete | None = None,
ignore_no_transaction_on_rollback: bool = False,
**opts,
) -> None: ...
def do_savepoint(self, connection, name) -> None: ...
def do_release_savepoint(self, connection, name) -> None: ...
def set_isolation_level(self, connection, level) -> None: ...
def get_isolation_level(self, dbapi_connection): ...
def initialize(self, connection) -> None: ...
def on_connect(self): ...
def has_table(self, connection, tablename, dbname, owner, schema): ...
def has_sequence(self, connection, sequencename, dbname, owner, schema): ...
def get_sequence_names(self, connection, dbname, owner, schema, **kw): ...
def get_schema_names(self, connection, **kw): ...
def get_table_names(self, connection, dbname, owner, schema, **kw): ...
def get_view_names(self, connection, dbname, owner, schema, **kw): ...
def get_indexes(self, connection, tablename, dbname, owner, schema, **kw): ...
def get_view_definition(self, connection, viewname, dbname, owner, schema, **kw): ...
def get_columns(self, connection, tablename, dbname, owner, schema, **kw): ...
def get_pk_constraint(self, connection, tablename, dbname, owner, schema, **kw): ...
def get_foreign_keys(self, connection, tablename, dbname, owner, schema, **kw): ...

View File

@@ -1,35 +0,0 @@
from typing import Any
from ...sql import expression
from ...sql.type_api import TypeDecorator
ischema: Any
class CoerceUnicode(TypeDecorator):
impl: Any
cache_ok: bool
def process_bind_param(self, value, dialect): ...
def bind_expression(self, bindvalue): ...
class _cast_on_2005(expression.ColumnElement[Any]):
bindvalue: Any
def __init__(self, bindvalue) -> None: ...
schemata: Any
tables: Any
columns: Any
mssql_temp_table_columns: Any
constraints: Any
column_constraints: Any
key_constraints: Any
ref_constraints: Any
views: Any
computed_columns: Any
sequences: Any
class IdentitySqlVariant(TypeDecorator):
impl: Any
cache_ok: bool
def column_expression(self, colexpr): ...
identity_columns: Any

View File

@@ -1,10 +0,0 @@
from ...sql.sqltypes import JSON as _JSON
class JSON(_JSON): ...
class _FormatTypeMixin:
def bind_processor(self, dialect): ...
def literal_processor(self, dialect): ...
class JSONIndexType(_FormatTypeMixin, _JSON.JSONIndexType): ...
class JSONPathType(_FormatTypeMixin, _JSON.JSONPathType): ...

View File

@@ -1,27 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...connectors.mxodbc import MxODBCConnector
from .base import VARBINARY, MSDialect, _MSDate, _MSTime
from .pyodbc import MSExecutionContext_pyodbc, _MSNumeric_pyodbc
class _MSNumeric_mxodbc(_MSNumeric_pyodbc): ...
class _MSDate_mxodbc(_MSDate):
def bind_processor(self, dialect): ...
class _MSTime_mxodbc(_MSTime):
def bind_processor(self, dialect): ...
class _VARBINARY_mxodbc(VARBINARY):
def bind_processor(self, dialect): ...
class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc): ...
class MSDialect_mxodbc(MxODBCConnector, MSDialect):
supports_statement_cache: bool
colspecs: Any
description_encoding: Any
def __init__(self, description_encoding: Incomplete | None = None, **params) -> None: ...
dialect = MSDialect_mxodbc

View File

@@ -1,24 +0,0 @@
from typing import Any
from ...sql.sqltypes import Numeric
from .base import MSDialect, MSIdentifierPreparer
class _MSNumeric_pymssql(Numeric):
def result_processor(self, dialect, type_): ...
class MSIdentifierPreparer_pymssql(MSIdentifierPreparer):
def __init__(self, dialect) -> None: ...
class MSDialect_pymssql(MSDialect):
supports_statement_cache: bool
supports_native_decimal: bool
driver: str
preparer: Any
colspecs: Any
@classmethod
def dbapi(cls): ...
def create_connect_args(self, url): ...
def is_disconnect(self, e, connection, cursor): ...
def set_isolation_level(self, connection, level) -> None: ...
dialect = MSDialect_pymssql

View File

@@ -1,45 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...connectors.pyodbc import PyODBCConnector
from ...sql.sqltypes import DateTime, Float, Numeric
from .base import BINARY, DATETIMEOFFSET, VARBINARY, MSDialect, MSExecutionContext
class _ms_numeric_pyodbc:
def bind_processor(self, dialect): ...
class _MSNumeric_pyodbc(_ms_numeric_pyodbc, Numeric): ...
class _MSFloat_pyodbc(_ms_numeric_pyodbc, Float): ...
class _ms_binary_pyodbc:
def bind_processor(self, dialect): ...
class _ODBCDateTimeBindProcessor:
has_tz: bool
def bind_processor(self, dialect): ...
class _ODBCDateTime(_ODBCDateTimeBindProcessor, DateTime): ...
class _ODBCDATETIMEOFFSET(_ODBCDateTimeBindProcessor, DATETIMEOFFSET):
has_tz: bool
class _VARBINARY_pyodbc(_ms_binary_pyodbc, VARBINARY): ...
class _BINARY_pyodbc(_ms_binary_pyodbc, BINARY): ...
class MSExecutionContext_pyodbc(MSExecutionContext):
def pre_exec(self) -> None: ...
def post_exec(self) -> None: ...
class MSDialect_pyodbc(PyODBCConnector, MSDialect):
supports_statement_cache: bool
supports_sane_rowcount_returning: bool
colspecs: Any
description_encoding: Any
use_scope_identity: Any
fast_executemany: Any
def __init__(self, description_encoding: Incomplete | None = None, fast_executemany: bool = False, **params) -> None: ...
def on_connect(self): ...
def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...
def is_disconnect(self, e, connection, cursor): ...
dialect = MSDialect_pyodbc

View File

@@ -1,85 +0,0 @@
from typing import Any
from .base import (
BIGINT as BIGINT,
BINARY as BINARY,
BIT as BIT,
BLOB as BLOB,
BOOLEAN as BOOLEAN,
CHAR as CHAR,
DATE as DATE,
DATETIME as DATETIME,
DECIMAL as DECIMAL,
DOUBLE as DOUBLE,
ENUM as ENUM,
FLOAT as FLOAT,
INTEGER as INTEGER,
JSON as JSON,
LONGBLOB as LONGBLOB,
LONGTEXT as LONGTEXT,
MEDIUMBLOB as MEDIUMBLOB,
MEDIUMINT as MEDIUMINT,
MEDIUMTEXT as MEDIUMTEXT,
NCHAR as NCHAR,
NUMERIC as NUMERIC,
NVARCHAR as NVARCHAR,
REAL as REAL,
SET as SET,
SMALLINT as SMALLINT,
TEXT as TEXT,
TIME as TIME,
TIMESTAMP as TIMESTAMP,
TINYBLOB as TINYBLOB,
TINYINT as TINYINT,
TINYTEXT as TINYTEXT,
VARBINARY as VARBINARY,
VARCHAR as VARCHAR,
YEAR as YEAR,
)
from .dml import Insert as Insert, insert as insert
from .expression import match as match
__all__ = (
"BIGINT",
"BINARY",
"BIT",
"BLOB",
"BOOLEAN",
"CHAR",
"DATE",
"DATETIME",
"DECIMAL",
"DOUBLE",
"ENUM",
"DECIMAL",
"FLOAT",
"INTEGER",
"INTEGER",
"JSON",
"LONGBLOB",
"LONGTEXT",
"MEDIUMBLOB",
"MEDIUMINT",
"MEDIUMTEXT",
"NCHAR",
"NVARCHAR",
"NUMERIC",
"SET",
"SMALLINT",
"REAL",
"TEXT",
"TIME",
"TIMESTAMP",
"TINYBLOB",
"TINYINT",
"TINYTEXT",
"VARBINARY",
"VARCHAR",
"YEAR",
"dialect",
"insert",
"Insert",
"match",
)
dialect: Any

View File

@@ -1,74 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...engine import AdaptedConnection
from .pymysql import MySQLDialect_pymysql
class AsyncAdapt_aiomysql_cursor:
server_side: bool
await_: Any
def __init__(self, adapt_connection) -> None: ...
@property
def description(self): ...
@property
def rowcount(self): ...
@property
def arraysize(self): ...
@arraysize.setter
def arraysize(self, value) -> None: ...
@property
def lastrowid(self): ...
def close(self) -> None: ...
def execute(self, operation, parameters: Incomplete | None = None): ...
def executemany(self, operation, seq_of_parameters): ...
def setinputsizes(self, *inputsizes) -> None: ...
def __iter__(self): ...
def fetchone(self): ...
def fetchmany(self, size: Incomplete | None = None): ...
def fetchall(self): ...
class AsyncAdapt_aiomysql_ss_cursor(AsyncAdapt_aiomysql_cursor):
server_side: bool
await_: Any
def __init__(self, adapt_connection) -> None: ...
def close(self) -> None: ...
def fetchone(self): ...
def fetchmany(self, size: Incomplete | None = None): ...
def fetchall(self): ...
class AsyncAdapt_aiomysql_connection(AdaptedConnection):
await_: Any
dbapi: Any
def __init__(self, dbapi, connection) -> None: ...
def ping(self, reconnect): ...
def character_set_name(self): ...
def autocommit(self, value) -> None: ...
def cursor(self, server_side: bool = False): ...
def rollback(self) -> None: ...
def commit(self) -> None: ...
def close(self) -> None: ...
class AsyncAdaptFallback_aiomysql_connection(AsyncAdapt_aiomysql_connection):
await_: Any
class AsyncAdapt_aiomysql_dbapi:
aiomysql: Any
pymysql: Any
paramstyle: str
def __init__(self, aiomysql, pymysql) -> None: ...
def connect(self, *arg, **kw): ...
class MySQLDialect_aiomysql(MySQLDialect_pymysql):
driver: str
supports_statement_cache: bool
supports_server_side_cursors: bool
is_async: bool
@classmethod
def dbapi(cls): ...
@classmethod
def get_pool_class(cls, url): ...
def create_connect_args(self, url): ...
def is_disconnect(self, e, connection, cursor): ...
def get_driver_connection(self, connection): ...
dialect = MySQLDialect_aiomysql

View File

@@ -1,84 +0,0 @@
from _typeshed import Incomplete, ReadableBuffer
from collections.abc import Iterable
from typing import Any, SupportsBytes
from typing_extensions import SupportsIndex
from ...engine import AdaptedConnection
from .pymysql import MySQLDialect_pymysql
class AsyncAdapt_asyncmy_cursor:
server_side: bool
await_: Any
def __init__(self, adapt_connection) -> None: ...
@property
def description(self): ...
@property
def rowcount(self): ...
@property
def arraysize(self): ...
@arraysize.setter
def arraysize(self, value) -> None: ...
@property
def lastrowid(self): ...
def close(self) -> None: ...
def execute(self, operation, parameters: Incomplete | None = None): ...
def executemany(self, operation, seq_of_parameters): ...
def setinputsizes(self, *inputsizes) -> None: ...
def __iter__(self): ...
def fetchone(self): ...
def fetchmany(self, size: Incomplete | None = None): ...
def fetchall(self): ...
class AsyncAdapt_asyncmy_ss_cursor(AsyncAdapt_asyncmy_cursor):
server_side: bool
await_: Any
def __init__(self, adapt_connection) -> None: ...
def close(self) -> None: ...
def fetchone(self): ...
def fetchmany(self, size: Incomplete | None = None): ...
def fetchall(self): ...
class AsyncAdapt_asyncmy_connection(AdaptedConnection):
await_: Any
dbapi: Any
def __init__(self, dbapi, connection) -> None: ...
def ping(self, reconnect): ...
def character_set_name(self): ...
def autocommit(self, value) -> None: ...
def cursor(self, server_side: bool = False): ...
def rollback(self) -> None: ...
def commit(self) -> None: ...
def close(self) -> None: ...
class AsyncAdaptFallback_asyncmy_connection(AsyncAdapt_asyncmy_connection):
await_: Any
def _Binary(x: Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer) -> bytes: ...
class AsyncAdapt_asyncmy_dbapi:
asyncmy: Any
pymysql: Any
paramstyle: str
STRING: Incomplete
NUMBER: Incomplete
BINARY: Incomplete
DATETIME: Incomplete
TIMESTAMP: Incomplete
Binary = staticmethod(_Binary)
def __init__(self, asyncmy: Any) -> None: ...
def connect(self, *arg, **kw): ...
class MySQLDialect_asyncmy(MySQLDialect_pymysql):
driver: str
supports_statement_cache: bool
supports_server_side_cursors: bool
is_async: bool
@classmethod
def dbapi(cls): ...
@classmethod
def get_pool_class(cls, url): ...
def create_connect_args(self, url): ...
def is_disconnect(self, e, connection, cursor): ...
def get_driver_connection(self, connection): ...
dialect = MySQLDialect_asyncmy

View File

@@ -1,241 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...engine import default
from ...sql import compiler
from ...sql.sqltypes import BINARY as BINARY, BLOB as BLOB, BOOLEAN as BOOLEAN, DATE as DATE, VARBINARY as VARBINARY
from .enumerated import ENUM as ENUM, SET as SET
from .json import JSON as JSON
from .reserved_words import RESERVED_WORDS_MARIADB as RESERVED_WORDS_MARIADB, RESERVED_WORDS_MYSQL as RESERVED_WORDS_MYSQL
from .types import (
BIGINT as BIGINT,
BIT as BIT,
CHAR as CHAR,
DATETIME as DATETIME,
DECIMAL as DECIMAL,
DOUBLE as DOUBLE,
FLOAT as FLOAT,
INTEGER as INTEGER,
LONGBLOB as LONGBLOB,
LONGTEXT as LONGTEXT,
MEDIUMBLOB as MEDIUMBLOB,
MEDIUMINT as MEDIUMINT,
MEDIUMTEXT as MEDIUMTEXT,
NCHAR as NCHAR,
NUMERIC as NUMERIC,
NVARCHAR as NVARCHAR,
REAL as REAL,
SMALLINT as SMALLINT,
TEXT as TEXT,
TIME as TIME,
TIMESTAMP as TIMESTAMP,
TINYBLOB as TINYBLOB,
TINYINT as TINYINT,
TINYTEXT as TINYTEXT,
VARCHAR as VARCHAR,
YEAR as YEAR,
)
AUTOCOMMIT_RE: Any
SET_RE: Any
MSTime = TIME
MSSet = SET
MSEnum = ENUM
MSLongBlob = LONGBLOB
MSMediumBlob = MEDIUMBLOB
MSTinyBlob = TINYBLOB
MSBlob = BLOB
MSBinary = BINARY
MSVarBinary = VARBINARY
MSNChar = NCHAR
MSNVarChar = NVARCHAR
MSChar = CHAR
MSString = VARCHAR
MSLongText = LONGTEXT
MSMediumText = MEDIUMTEXT
MSTinyText = TINYTEXT
MSText = TEXT
MSYear = YEAR
MSTimeStamp = TIMESTAMP
MSBit = BIT
MSSmallInteger = SMALLINT
MSTinyInteger = TINYINT
MSMediumInteger = MEDIUMINT
MSBigInteger = BIGINT
MSNumeric = NUMERIC
MSDecimal = DECIMAL
MSDouble = DOUBLE
MSReal = REAL
MSFloat = FLOAT
MSInteger = INTEGER
colspecs: Any
ischema_names: Any
class MySQLExecutionContext(default.DefaultExecutionContext):
def should_autocommit_text(self, statement): ...
def create_server_side_cursor(self): ...
def fire_sequence(self, seq, type_): ...
class MySQLCompiler(compiler.SQLCompiler):
render_table_with_column_in_update_from: bool
extract_map: Any
def default_from(self): ...
def visit_random_func(self, fn, **kw): ...
def visit_sequence(self, seq, **kw): ...
def visit_sysdate_func(self, fn, **kw): ...
def visit_json_getitem_op_binary(self, binary, operator, **kw): ...
def visit_json_path_getitem_op_binary(self, binary, operator, **kw): ...
def visit_on_duplicate_key_update(self, on_duplicate, **kw): ...
def visit_concat_op_binary(self, binary, operator, **kw): ...
def visit_mysql_match(self, element, **kw): ...
def visit_match_op_binary(self, binary, operator, **kw): ...
def get_from_hint_text(self, table, text): ...
def visit_typeclause(self, typeclause, type_: Incomplete | None = None, **kw): ...
def visit_cast(self, cast, **kw): ...
def render_literal_value(self, value, type_): ...
def visit_true(self, element, **kw): ...
def visit_false(self, element, **kw): ...
def get_select_precolumns(self, select, **kw): ...
def visit_join(self, join, asfrom: bool = False, from_linter: Incomplete | None = None, **kwargs): ...
def for_update_clause(self, select, **kw): ...
def limit_clause(self, select, **kw): ...
def update_limit_clause(self, update_stmt): ...
def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): ...
def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw) -> None: ...
def delete_table_clause(self, delete_stmt, from_table, extra_froms): ...
def delete_extra_from_clause(self, delete_stmt, from_table, extra_froms, from_hints, **kw): ...
def visit_empty_set_expr(self, element_types): ...
def visit_is_distinct_from_binary(self, binary, operator, **kw): ...
def visit_is_not_distinct_from_binary(self, binary, operator, **kw): ...
def visit_regexp_match_op_binary(self, binary, operator, **kw): ...
def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ...
def visit_regexp_replace_op_binary(self, binary, operator, **kw): ...
class MySQLDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kw): ...
def post_create_table(self, table): ...
def visit_create_index(self, create, **kw): ...
def visit_primary_key_constraint(self, constraint): ...
def visit_drop_index(self, drop): ...
def visit_drop_constraint(self, drop): ...
def define_constraint_match(self, constraint): ...
def visit_set_table_comment(self, create): ...
def visit_drop_table_comment(self, create): ...
def visit_set_column_comment(self, create): ...
class MySQLTypeCompiler(compiler.GenericTypeCompiler):
def visit_NUMERIC(self, type_, **kw): ...
def visit_DECIMAL(self, type_, **kw): ...
def visit_DOUBLE(self, type_, **kw): ...
def visit_REAL(self, type_, **kw): ...
def visit_FLOAT(self, type_, **kw): ...
def visit_INTEGER(self, type_, **kw): ...
def visit_BIGINT(self, type_, **kw): ...
def visit_MEDIUMINT(self, type_, **kw): ...
def visit_TINYINT(self, type_, **kw): ...
def visit_SMALLINT(self, type_, **kw): ...
def visit_BIT(self, type_, **kw): ...
def visit_DATETIME(self, type_, **kw): ...
def visit_DATE(self, type_, **kw): ...
def visit_TIME(self, type_, **kw): ...
def visit_TIMESTAMP(self, type_, **kw): ...
def visit_YEAR(self, type_, **kw): ...
def visit_TEXT(self, type_, **kw): ...
def visit_TINYTEXT(self, type_, **kw): ...
def visit_MEDIUMTEXT(self, type_, **kw): ...
def visit_LONGTEXT(self, type_, **kw): ...
def visit_VARCHAR(self, type_, **kw): ...
def visit_CHAR(self, type_, **kw): ...
def visit_NVARCHAR(self, type_, **kw): ...
def visit_NCHAR(self, type_, **kw): ...
def visit_VARBINARY(self, type_, **kw): ...
def visit_JSON(self, type_, **kw): ...
def visit_large_binary(self, type_, **kw): ...
def visit_enum(self, type_, **kw): ...
def visit_BLOB(self, type_, **kw): ...
def visit_TINYBLOB(self, type_, **kw): ...
def visit_MEDIUMBLOB(self, type_, **kw): ...
def visit_LONGBLOB(self, type_, **kw): ...
def visit_ENUM(self, type_, **kw): ...
def visit_SET(self, type_, **kw): ...
def visit_BOOLEAN(self, type_, **kw): ...
class MySQLIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words: Any
def __init__(self, dialect, server_ansiquotes: bool = False, **kw) -> None: ...
class MariaDBIdentifierPreparer(MySQLIdentifierPreparer):
reserved_words: Any
class MySQLDialect(default.DefaultDialect):
logger: Any
name: str
supports_statement_cache: bool
supports_alter: bool
supports_native_boolean: bool
max_identifier_length: int
max_index_name_length: int
max_constraint_name_length: int
supports_native_enum: bool
supports_sequences: bool
sequences_optional: bool
supports_for_update_of: bool
supports_default_values: bool
supports_default_metavalue: bool
supports_sane_rowcount: bool
supports_sane_multi_rowcount: bool
supports_multivalues_insert: bool
supports_comments: bool
inline_comments: bool
default_paramstyle: str
colspecs: Any
cte_follows_insert: bool
statement_compiler: Any
ddl_compiler: Any
type_compiler: Any
ischema_names: Any
preparer: Any
is_mariadb: bool
construct_arguments: Any
isolation_level: Any
def __init__(
self,
isolation_level: Incomplete | None = None,
json_serializer: Incomplete | None = None,
json_deserializer: Incomplete | None = None,
is_mariadb: Incomplete | None = None,
**kwargs,
) -> None: ...
def on_connect(self): ...
def set_isolation_level(self, connection, level) -> None: ...
def get_isolation_level(self, connection): ...
def do_begin_twophase(self, connection, xid) -> None: ...
def do_prepare_twophase(self, connection, xid) -> None: ...
def do_rollback_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
def do_commit_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
def do_recover_twophase(self, connection): ...
def is_disconnect(self, e, connection, cursor): ...
def has_table(self, connection, table_name, schema: Incomplete | None = None): ... # type: ignore[override]
def has_sequence(self, connection, sequence_name, schema: Incomplete | None = None): ... # type: ignore[override]
def get_sequence_names(self, connection, schema: Incomplete | None = None, **kw): ...
identifier_preparer: Any
def initialize(self, connection) -> None: ...
def get_schema_names(self, connection, **kw): ...
def get_table_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_view_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_table_options(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_columns(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_foreign_keys(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_check_constraints(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_table_comment(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_indexes(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_unique_constraints(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_view_definition(self, connection, view_name, schema: Incomplete | None = None, **kw): ...
class _DecodingRow:
rowproxy: Any
charset: Any
def __init__(self, rowproxy, charset) -> None: ...
def __getitem__(self, index): ...
def __getattr__(self, attr: str): ...

View File

@@ -1,21 +0,0 @@
from typing import Any
from .base import BIT
from .mysqldb import MySQLDialect_mysqldb
class _cymysqlBIT(BIT):
def result_processor(self, dialect, coltype): ...
class MySQLDialect_cymysql(MySQLDialect_mysqldb):
driver: str
supports_statement_cache: bool
description_encoding: Any
supports_sane_rowcount: bool
supports_sane_multi_rowcount: bool
supports_unicode_statements: bool
colspecs: Any
@classmethod
def dbapi(cls): ...
def is_disconnect(self, e, connection, cursor): ...
dialect = MySQLDialect_cymysql

View File

@@ -1,23 +0,0 @@
from typing import Any
from ...sql.dml import Insert as StandardInsert
from ...sql.elements import ClauseElement
from ...util.langhelpers import memoized_property
class Insert(StandardInsert):
stringify_dialect: str
inherit_cache: bool
@property
def inserted(self): ...
@memoized_property
def inserted_alias(self): ...
def on_duplicate_key_update(self, *args, **kw) -> None: ...
insert: Any
class OnDuplicateClause(ClauseElement):
__visit_name__: str
stringify_dialect: str
inserted_alias: Any
update: Any
def __init__(self, inserted_alias, update) -> None: ...

View File

@@ -1,21 +0,0 @@
from typing import Any
from ...sql import sqltypes
from .types import _StringType
class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum, _StringType): # type: ignore[misc] # incompatible with base class
__visit_name__: str
native_enum: bool
def __init__(self, *enums, **kw) -> None: ...
@classmethod
def adapt_emulated_to_native(cls, impl, **kw): ...
class SET(_StringType):
__visit_name__: str
retrieve_as_bitwise: Any
values: Any
def __init__(self, *values, **kw) -> None: ...
def column_expression(self, colexpr): ...
def result_processor(self, dialect, coltype): ...
def bind_processor(self, dialect): ...
def adapt(self, impltype, **kw): ...

View File

@@ -1,13 +0,0 @@
from typing import Any
from ...sql import elements
from ...sql.base import Generative
class match(Generative, elements.BinaryExpression):
__visit_name__: str
inherit_cache: bool
def __init__(self, *cols, **kw) -> None: ...
modifiers: Any
def in_boolean_mode(self) -> None: ...
def in_natural_language_mode(self) -> None: ...
def with_query_expansion(self) -> None: ...

View File

@@ -1,10 +0,0 @@
from ...sql import sqltypes
class JSON(sqltypes.JSON): ...
class _FormatTypeMixin:
def bind_processor(self, dialect): ...
def literal_processor(self, dialect): ...
class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType): ...
class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType): ...

View File

@@ -1,11 +0,0 @@
from typing import Any
from .base import MySQLDialect
class MariaDBDialect(MySQLDialect):
is_mariadb: bool
supports_statement_cache: bool
name: str
preparer: Any
def loader(driver): ...

View File

@@ -1,36 +0,0 @@
from typing import Any
from .base import MySQLCompiler, MySQLDialect, MySQLExecutionContext
mariadb_cpy_minimum_version: Any
class MySQLExecutionContext_mariadbconnector(MySQLExecutionContext):
def create_server_side_cursor(self): ...
def create_default_cursor(self): ...
class MySQLCompiler_mariadbconnector(MySQLCompiler): ...
class MySQLDialect_mariadbconnector(MySQLDialect):
driver: str
supports_statement_cache: bool
supports_unicode_statements: bool
encoding: str
convert_unicode: bool
supports_sane_rowcount: bool
supports_sane_multi_rowcount: bool
supports_native_decimal: bool
default_paramstyle: str
statement_compiler: Any
supports_server_side_cursors: bool
paramstyle: str
def __init__(self, **kwargs) -> None: ...
@classmethod
def dbapi(cls): ...
def is_disconnect(self, e, connection, cursor): ...
def create_connect_args(self, url): ...
def do_begin_twophase(self, connection, xid) -> None: ...
def do_prepare_twophase(self, connection, xid) -> None: ...
def do_rollback_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
def do_commit_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
dialect = MySQLDialect_mariadbconnector

View File

@@ -1,38 +0,0 @@
from typing import Any
from ...util.langhelpers import memoized_property
from .base import BIT, MySQLCompiler, MySQLDialect, MySQLIdentifierPreparer
class MySQLCompiler_mysqlconnector(MySQLCompiler):
def visit_mod_binary(self, binary, operator, **kw): ...
def post_process_text(self, text): ...
def escape_literal_column(self, text): ...
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer): ...
class _myconnpyBIT(BIT):
def result_processor(self, dialect, coltype) -> None: ...
class MySQLDialect_mysqlconnector(MySQLDialect):
driver: str
supports_statement_cache: bool
supports_unicode_binds: bool
supports_sane_rowcount: bool
supports_sane_multi_rowcount: bool
supports_native_decimal: bool
default_paramstyle: str
statement_compiler: Any
preparer: Any
colspecs: Any
def __init__(self, *arg, **kw) -> None: ...
@property
def description_encoding(self): ...
@memoized_property
def supports_unicode_statements(self): ...
@classmethod
def dbapi(cls): ...
def do_ping(self, dbapi_connection): ...
def create_connect_args(self, url): ...
def is_disconnect(self, e, connection, cursor): ...
dialect = MySQLDialect_mysqlconnector

View File

@@ -1,33 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...util.langhelpers import memoized_property
from .base import MySQLCompiler, MySQLDialect, MySQLExecutionContext
class MySQLExecutionContext_mysqldb(MySQLExecutionContext):
@property
def rowcount(self): ...
class MySQLCompiler_mysqldb(MySQLCompiler): ...
class MySQLDialect_mysqldb(MySQLDialect):
driver: str
supports_statement_cache: bool
supports_unicode_statements: bool
supports_sane_rowcount: bool
supports_sane_multi_rowcount: bool
supports_native_decimal: bool
default_paramstyle: str
statement_compiler: Any
preparer: Any
def __init__(self, **kwargs) -> None: ...
@memoized_property
def supports_server_side_cursors(self): ...
@classmethod
def dbapi(cls): ...
def on_connect(self): ...
def do_ping(self, dbapi_connection): ...
def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...
def create_connect_args(self, url, _translate_args: Incomplete | None = None): ...
dialect = MySQLDialect_mysqldb

View File

@@ -1,40 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from .base import BIT, MySQLDialect, MySQLExecutionContext
class _oursqlBIT(BIT):
def result_processor(self, dialect, coltype) -> None: ...
class MySQLExecutionContext_oursql(MySQLExecutionContext):
@property
def plain_query(self): ...
class MySQLDialect_oursql(MySQLDialect):
driver: str
supports_statement_cache: bool
supports_unicode_binds: bool
supports_unicode_statements: bool
supports_native_decimal: bool
supports_sane_rowcount: bool
supports_sane_multi_rowcount: bool
colspecs: Any
@classmethod
def dbapi(cls): ...
def do_execute(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...
def do_begin(self, connection) -> None: ...
def do_begin_twophase(self, connection, xid) -> None: ...
def do_prepare_twophase(self, connection, xid) -> None: ...
def do_rollback_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
def do_commit_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
def has_table(self, connection, table_name, schema: Incomplete | None = None): ... # type: ignore[override]
def get_table_options(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_columns(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_view_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_table_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_schema_names(self, connection, **kw): ...
def initialize(self, connection): ...
def is_disconnect(self, e, connection, cursor): ...
def create_connect_args(self, url): ...
dialect = MySQLDialect_oursql

View File

@@ -1,20 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...util.langhelpers import memoized_property
from .mysqldb import MySQLDialect_mysqldb
class MySQLDialect_pymysql(MySQLDialect_mysqldb):
driver: str
supports_statement_cache: bool
description_encoding: Any
supports_unicode_statements: bool
supports_unicode_binds: bool
@memoized_property
def supports_server_side_cursors(self): ...
@classmethod
def dbapi(cls): ...
def create_connect_args(self, url, _translate_args: Incomplete | None = None): ...
def is_disconnect(self, e, connection, cursor): ...
dialect = MySQLDialect_pymysql

View File

@@ -1,20 +0,0 @@
from typing import Any
from ...connectors.pyodbc import PyODBCConnector
from .base import MySQLDialect, MySQLExecutionContext
from .types import TIME
class _pyodbcTIME(TIME):
def result_processor(self, dialect, coltype): ...
class MySQLExecutionContext_pyodbc(MySQLExecutionContext):
def get_lastrowid(self): ...
class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect):
supports_statement_cache: bool
colspecs: Any
supports_unicode_statements: bool
pyodbc_driver_name: str
def on_connect(self): ...
dialect = MySQLDialect_pyodbc

View File

@@ -1,16 +0,0 @@
from typing import Any
class ReflectedState:
columns: Any
table_options: Any
table_name: Any
keys: Any
fk_constraints: Any
ck_constraints: Any
class MySQLTableDefinitionParser:
logger: Any
dialect: Any
preparer: Any
def __init__(self, dialect, preparer) -> None: ...
def parse(self, show_create, charset): ...

View File

@@ -1,4 +0,0 @@
from typing import Any
RESERVED_WORDS_MARIADB: Any
RESERVED_WORDS_MYSQL: Any

View File

@@ -1,158 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...sql import sqltypes
class _NumericType:
unsigned: Any
zerofill: Any
def __init__(self, unsigned: bool = False, zerofill: bool = False, **kw) -> None: ...
class _FloatType(_NumericType, sqltypes.Float):
scale: Any
def __init__(
self, precision: Incomplete | None = None, scale: Incomplete | None = None, asdecimal: bool = True, **kw
) -> None: ...
class _IntegerType(_NumericType, sqltypes.Integer):
display_width: Any
def __init__(self, display_width: Incomplete | None = None, **kw) -> None: ...
class _StringType(sqltypes.String):
charset: Any
ascii: Any
unicode: Any
binary: Any
national: Any
def __init__(
self,
charset: Incomplete | None = None,
collation: Incomplete | None = None,
ascii: bool = False,
binary: bool = False,
unicode: bool = False,
national: bool = False,
**kw,
) -> None: ...
class _MatchType(sqltypes.Float, sqltypes.MatchType): # type: ignore[misc] # incompatible with base class
def __init__(self, **kw) -> None: ...
class NUMERIC(_NumericType, sqltypes.NUMERIC):
__visit_name__: str
def __init__(
self, precision: Incomplete | None = None, scale: Incomplete | None = None, asdecimal: bool = True, **kw
) -> None: ...
class DECIMAL(_NumericType, sqltypes.DECIMAL):
__visit_name__: str
def __init__(
self, precision: Incomplete | None = None, scale: Incomplete | None = None, asdecimal: bool = True, **kw
) -> None: ...
class DOUBLE(_FloatType):
__visit_name__: str
def __init__(
self, precision: Incomplete | None = None, scale: Incomplete | None = None, asdecimal: bool = True, **kw
) -> None: ...
class REAL(_FloatType, sqltypes.REAL):
__visit_name__: str
def __init__(
self, precision: Incomplete | None = None, scale: Incomplete | None = None, asdecimal: bool = True, **kw
) -> None: ...
class FLOAT(_FloatType, sqltypes.FLOAT):
__visit_name__: str
def __init__(
self, precision: Incomplete | None = None, scale: Incomplete | None = None, asdecimal: bool = False, **kw
) -> None: ...
def bind_processor(self, dialect) -> None: ...
class INTEGER(_IntegerType, sqltypes.INTEGER):
__visit_name__: str
def __init__(self, display_width: Incomplete | None = None, **kw) -> None: ...
class BIGINT(_IntegerType, sqltypes.BIGINT):
__visit_name__: str
def __init__(self, display_width: Incomplete | None = None, **kw) -> None: ...
class MEDIUMINT(_IntegerType):
__visit_name__: str
def __init__(self, display_width: Incomplete | None = None, **kw) -> None: ...
class TINYINT(_IntegerType):
__visit_name__: str
def __init__(self, display_width: Incomplete | None = None, **kw) -> None: ...
class SMALLINT(_IntegerType, sqltypes.SMALLINT):
__visit_name__: str
def __init__(self, display_width: Incomplete | None = None, **kw) -> None: ...
class BIT(sqltypes.TypeEngine):
__visit_name__: str
length: Any
def __init__(self, length: Incomplete | None = None) -> None: ...
def result_processor(self, dialect, coltype): ...
class TIME(sqltypes.TIME):
__visit_name__: str
fsp: Any
def __init__(self, timezone: bool = False, fsp: Incomplete | None = None) -> None: ...
def result_processor(self, dialect, coltype): ...
class TIMESTAMP(sqltypes.TIMESTAMP):
__visit_name__: str
fsp: Any
def __init__(self, timezone: bool = False, fsp: Incomplete | None = None) -> None: ...
class DATETIME(sqltypes.DATETIME):
__visit_name__: str
fsp: Any
def __init__(self, timezone: bool = False, fsp: Incomplete | None = None) -> None: ...
class YEAR(sqltypes.TypeEngine):
__visit_name__: str
display_width: Any
def __init__(self, display_width: Incomplete | None = None) -> None: ...
class TEXT(_StringType, sqltypes.TEXT):
__visit_name__: str
def __init__(self, length: Incomplete | None = None, **kw) -> None: ...
class TINYTEXT(_StringType):
__visit_name__: str
def __init__(self, **kwargs) -> None: ...
class MEDIUMTEXT(_StringType):
__visit_name__: str
def __init__(self, **kwargs) -> None: ...
class LONGTEXT(_StringType):
__visit_name__: str
def __init__(self, **kwargs) -> None: ...
class VARCHAR(_StringType, sqltypes.VARCHAR):
__visit_name__: str
def __init__(self, length: Incomplete | None = None, **kwargs) -> None: ...
class CHAR(_StringType, sqltypes.CHAR):
__visit_name__: str
def __init__(self, length: Incomplete | None = None, **kwargs) -> None: ...
class NVARCHAR(_StringType, sqltypes.NVARCHAR):
__visit_name__: str
def __init__(self, length: Incomplete | None = None, **kwargs) -> None: ...
class NCHAR(_StringType, sqltypes.NCHAR):
__visit_name__: str
def __init__(self, length: Incomplete | None = None, **kwargs) -> None: ...
class TINYBLOB(sqltypes._Binary):
__visit_name__: str
class MEDIUMBLOB(sqltypes._Binary):
__visit_name__: str
class LONGBLOB(sqltypes._Binary):
__visit_name__: str

View File

@@ -1,52 +0,0 @@
from typing import Any
from .base import (
BFILE as BFILE,
BINARY_DOUBLE as BINARY_DOUBLE,
BINARY_FLOAT as BINARY_FLOAT,
BLOB as BLOB,
CHAR as CHAR,
CLOB as CLOB,
DATE as DATE,
DOUBLE_PRECISION as DOUBLE_PRECISION,
FLOAT as FLOAT,
INTERVAL as INTERVAL,
LONG as LONG,
NCHAR as NCHAR,
NCLOB as NCLOB,
NUMBER as NUMBER,
NVARCHAR as NVARCHAR,
NVARCHAR2 as NVARCHAR2,
RAW as RAW,
ROWID as ROWID,
TIMESTAMP as TIMESTAMP,
VARCHAR as VARCHAR,
VARCHAR2 as VARCHAR2,
)
__all__ = (
"VARCHAR",
"NVARCHAR",
"CHAR",
"NCHAR",
"DATE",
"NUMBER",
"BLOB",
"BFILE",
"CLOB",
"NCLOB",
"TIMESTAMP",
"RAW",
"FLOAT",
"DOUBLE_PRECISION",
"BINARY_DOUBLE",
"BINARY_FLOAT",
"LONG",
"dialect",
"INTERVAL",
"VARCHAR2",
"NVARCHAR2",
"ROWID",
)
dialect: Any

View File

@@ -1,222 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...engine import default
from ...sql import ClauseElement, compiler, sqltypes
from ...sql.sqltypes import (
BLOB as BLOB,
CHAR as CHAR,
CLOB as CLOB,
FLOAT as FLOAT,
INTEGER as INTEGER,
NCHAR as NCHAR,
NVARCHAR as NVARCHAR,
TIMESTAMP as TIMESTAMP,
VARCHAR as VARCHAR,
)
RESERVED_WORDS: Any
NO_ARG_FNS: Any
class RAW(sqltypes._Binary):
__visit_name__: str
OracleRaw = RAW
class NCLOB(sqltypes.Text):
__visit_name__: str
class VARCHAR2(VARCHAR):
__visit_name__: str
NVARCHAR2 = NVARCHAR
class NUMBER(sqltypes.Numeric, sqltypes.Integer):
__visit_name__: str
def __init__(
self, precision: Incomplete | None = None, scale: Incomplete | None = None, asdecimal: Incomplete | None = None
) -> None: ...
def adapt(self, impltype): ...
class DOUBLE_PRECISION(sqltypes.Float):
__visit_name__: str
class BINARY_DOUBLE(sqltypes.Float):
__visit_name__: str
class BINARY_FLOAT(sqltypes.Float):
__visit_name__: str
class BFILE(sqltypes.LargeBinary):
__visit_name__: str
class LONG(sqltypes.Text):
__visit_name__: str
class DATE(sqltypes.DateTime):
__visit_name__: str
class INTERVAL(sqltypes.NativeForEmulated, sqltypes._AbstractInterval):
__visit_name__: str
day_precision: Any
second_precision: Any
def __init__(self, day_precision: Incomplete | None = None, second_precision: Incomplete | None = None) -> None: ...
def as_generic(self, allow_nulltype: bool = False): ...
def coerce_compared_value(self, op, value): ...
class ROWID(sqltypes.TypeEngine):
__visit_name__: str
class _OracleBoolean(sqltypes.Boolean):
def get_dbapi_type(self, dbapi): ...
colspecs: Any
ischema_names: Any
class OracleTypeCompiler(compiler.GenericTypeCompiler):
def visit_datetime(self, type_, **kw): ...
def visit_float(self, type_, **kw): ...
def visit_unicode(self, type_, **kw): ...
def visit_INTERVAL(self, type_, **kw): ...
def visit_LONG(self, type_, **kw): ...
def visit_TIMESTAMP(self, type_, **kw): ...
def visit_DOUBLE_PRECISION(self, type_, **kw): ...
def visit_BINARY_DOUBLE(self, type_, **kw): ...
def visit_BINARY_FLOAT(self, type_, **kw): ...
def visit_FLOAT(self, type_, **kw): ...
def visit_NUMBER(self, type_, **kw): ...
def visit_string(self, type_, **kw): ...
def visit_VARCHAR2(self, type_, **kw): ...
def visit_NVARCHAR2(self, type_, **kw): ...
visit_NVARCHAR: Any
def visit_VARCHAR(self, type_, **kw): ...
def visit_text(self, type_, **kw): ...
def visit_unicode_text(self, type_, **kw): ...
def visit_large_binary(self, type_, **kw): ...
def visit_big_integer(self, type_, **kw): ...
def visit_boolean(self, type_, **kw): ...
def visit_RAW(self, type_, **kw): ...
def visit_ROWID(self, type_, **kw): ...
class OracleCompiler(compiler.SQLCompiler):
compound_keywords: Any
def __init__(self, *args, **kwargs) -> None: ...
def visit_mod_binary(self, binary, operator, **kw): ...
def visit_now_func(self, fn, **kw): ...
def visit_char_length_func(self, fn, **kw): ...
def visit_match_op_binary(self, binary, operator, **kw): ...
def visit_true(self, expr, **kw): ...
def visit_false(self, expr, **kw): ...
def get_cte_preamble(self, recursive): ...
def get_select_hint_text(self, byfroms): ...
def function_argspec(self, fn, **kw): ...
def visit_function(self, func, **kw): ...
def visit_table_valued_column(self, element, **kw): ...
def default_from(self): ...
def visit_join(self, join, from_linter: Incomplete | None = None, **kwargs): ... # type: ignore[override]
def visit_outer_join_column(self, vc, **kw): ...
def visit_sequence(self, seq, **kw): ...
def get_render_as_alias_suffix(self, alias_name_text): ...
has_out_parameters: bool
def returning_clause(self, stmt, returning_cols): ...
def translate_select_structure(self, select_stmt, **kwargs): ...
def limit_clause(self, select, **kw): ...
def visit_empty_set_expr(self, type_): ...
def for_update_clause(self, select, **kw): ...
def visit_is_distinct_from_binary(self, binary, operator, **kw): ...
def visit_is_not_distinct_from_binary(self, binary, operator, **kw): ...
def visit_regexp_match_op_binary(self, binary, operator, **kw): ...
def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ...
def visit_regexp_replace_op_binary(self, binary, operator, **kw): ...
class OracleDDLCompiler(compiler.DDLCompiler):
def define_constraint_cascades(self, constraint): ...
def visit_drop_table_comment(self, drop): ...
def visit_create_index(self, create): ...
def post_create_table(self, table): ...
def get_identity_options(self, identity_options): ...
def visit_computed_column(self, generated): ...
def visit_identity_column(self, identity, **kw): ...
class OracleIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words: Any
illegal_initial_characters: Any
def format_savepoint(self, savepoint): ...
class OracleExecutionContext(default.DefaultExecutionContext):
def fire_sequence(self, seq, type_): ...
class OracleDialect(default.DefaultDialect):
name: str
supports_statement_cache: bool
supports_alter: bool
supports_unicode_statements: bool
supports_unicode_binds: bool
max_identifier_length: int
supports_simple_order_by_label: bool
cte_follows_insert: bool
supports_sequences: bool
sequences_optional: bool
postfetch_lastrowid: bool
default_paramstyle: str
colspecs: Any
ischema_names: Any
requires_name_normalize: bool
supports_comments: bool
supports_default_values: bool
supports_default_metavalue: bool
supports_empty_insert: bool
supports_identity_columns: bool
statement_compiler: Any
ddl_compiler: Any
type_compiler: Any
preparer: Any
reflection_options: Any
construct_arguments: Any
use_ansi: Any
optimize_limits: Any
exclude_tablespaces: Any
def __init__(
self,
use_ansi: bool = True,
optimize_limits: bool = False,
use_binds_for_limits: Incomplete | None = None,
use_nchar_for_unicode: bool = False,
exclude_tablespaces=("SYSTEM", "SYSAUX"),
**kwargs,
) -> None: ...
implicit_returning: Any
def initialize(self, connection) -> None: ...
def do_release_savepoint(self, connection, name) -> None: ...
def get_isolation_level(self, connection) -> None: ...
def get_default_isolation_level(self, dbapi_conn): ...
def set_isolation_level(self, connection, level) -> None: ...
def has_table(self, connection, table_name, schema: Incomplete | None = None): ... # type: ignore[override]
def has_sequence(self, connection, sequence_name, schema: Incomplete | None = None): ... # type: ignore[override]
def get_schema_names(self, connection, **kw): ...
def get_table_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_temp_table_names(self, connection, **kw): ...
def get_view_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_sequence_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_table_options(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_columns(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_table_comment(
self, connection, table_name, schema: Incomplete | None = None, resolve_synonyms: bool = False, dblink: str = "", **kw
): ...
def get_indexes(
self, connection, table_name, schema: Incomplete | None = None, resolve_synonyms: bool = False, dblink: str = "", **kw
): ...
def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_foreign_keys(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_unique_constraints(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_view_definition(
self, connection, view_name, schema: Incomplete | None = None, resolve_synonyms: bool = False, dblink: str = "", **kw
): ...
def get_check_constraints(
self, connection, table_name, schema: Incomplete | None = None, include_all: bool = False, **kw
): ...
class _OuterJoinColumn(ClauseElement):
__visit_name__: str
column: Any
def __init__(self, column) -> None: ...

View File

@@ -1,127 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...sql import sqltypes
from . import base as oracle
from .base import OracleCompiler, OracleDialect, OracleExecutionContext
class _OracleInteger(sqltypes.Integer):
def get_dbapi_type(self, dbapi): ...
class _OracleNumeric(sqltypes.Numeric):
is_number: bool
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype) -> None: ...
class _OracleBinaryFloat(_OracleNumeric):
def get_dbapi_type(self, dbapi): ...
class _OracleBINARY_FLOAT(_OracleBinaryFloat, oracle.BINARY_FLOAT): ...
class _OracleBINARY_DOUBLE(_OracleBinaryFloat, oracle.BINARY_DOUBLE): ...
class _OracleNUMBER(_OracleNumeric):
is_number: bool
class _OracleDate(sqltypes.Date):
def bind_processor(self, dialect) -> None: ...
def result_processor(self, dialect, coltype): ...
class _OracleChar(sqltypes.CHAR):
def get_dbapi_type(self, dbapi): ...
class _OracleNChar(sqltypes.NCHAR):
def get_dbapi_type(self, dbapi): ...
class _OracleUnicodeStringNCHAR(oracle.NVARCHAR2):
def get_dbapi_type(self, dbapi): ...
class _OracleUnicodeStringCHAR(sqltypes.Unicode):
def get_dbapi_type(self, dbapi): ...
class _OracleUnicodeTextNCLOB(oracle.NCLOB):
def get_dbapi_type(self, dbapi): ...
class _OracleUnicodeTextCLOB(sqltypes.UnicodeText):
def get_dbapi_type(self, dbapi): ...
class _OracleText(sqltypes.Text):
def get_dbapi_type(self, dbapi): ...
class _OracleLong(oracle.LONG):
def get_dbapi_type(self, dbapi): ...
class _OracleString(sqltypes.String): ...
class _OracleEnum(sqltypes.Enum):
def bind_processor(self, dialect): ...
class _OracleBinary(sqltypes.LargeBinary):
def get_dbapi_type(self, dbapi): ...
def bind_processor(self, dialect) -> None: ...
def result_processor(self, dialect, coltype): ...
class _OracleInterval(oracle.INTERVAL):
def get_dbapi_type(self, dbapi): ...
class _OracleRaw(oracle.RAW): ...
class _OracleRowid(oracle.ROWID):
def get_dbapi_type(self, dbapi): ...
class OracleCompiler_cx_oracle(OracleCompiler):
def bindparam_string(self, name, **kw): ...
class OracleExecutionContext_cx_oracle(OracleExecutionContext):
out_parameters: Any
include_set_input_sizes: Any
def pre_exec(self) -> None: ...
cursor_fetch_strategy: Any
def post_exec(self) -> None: ...
def create_cursor(self): ...
def get_out_parameter_values(self, out_param_names): ...
class OracleDialect_cx_oracle(OracleDialect):
supports_statement_cache: bool
statement_compiler: Any
supports_sane_rowcount: bool
supports_sane_multi_rowcount: bool
supports_unicode_statements: bool
supports_unicode_binds: bool
use_setinputsizes: bool
driver: str
colspecs: Any
execute_sequence_format: Any
arraysize: Any
encoding_errors: Any
auto_convert_lobs: Any
coerce_to_unicode: Any
coerce_to_decimal: Any
cx_oracle_ver: Any
def __init__(
self,
auto_convert_lobs: bool = True,
coerce_to_unicode: bool = True,
coerce_to_decimal: bool = True,
arraysize: int = 50,
encoding_errors: Incomplete | None = None,
threaded: Incomplete | None = None,
**kwargs,
): ...
@classmethod
def dbapi(cls): ...
def initialize(self, connection) -> None: ...
def get_isolation_level(self, connection): ...
def set_isolation_level(self, connection, level) -> None: ...
def on_connect(self): ...
def create_connect_args(self, url): ...
def is_disconnect(self, e, connection, cursor): ...
def create_xid(self): ...
def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...
def do_begin_twophase(self, connection, xid) -> None: ...
def do_prepare_twophase(self, connection, xid) -> None: ...
def do_rollback_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
def do_commit_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ...
def do_recover_twophase(self, connection) -> None: ...
dialect = OracleDialect_cx_oracle

View File

@@ -1,100 +0,0 @@
import typing
from .array import ARRAY as ARRAY, All as All, Any as Any, array as array
from .base import (
BIGINT as BIGINT,
BIT as BIT,
BOOLEAN as BOOLEAN,
BYTEA as BYTEA,
CHAR as CHAR,
CIDR as CIDR,
DATE as DATE,
DOUBLE_PRECISION as DOUBLE_PRECISION,
ENUM as ENUM,
FLOAT as FLOAT,
INET as INET,
INTEGER as INTEGER,
INTERVAL as INTERVAL,
MACADDR as MACADDR,
MACADDR8 as MACADDR8,
MONEY as MONEY,
NUMERIC as NUMERIC,
OID as OID,
REAL as REAL,
REGCLASS as REGCLASS,
SMALLINT as SMALLINT,
TEXT as TEXT,
TIME as TIME,
TIMESTAMP as TIMESTAMP,
TSVECTOR as TSVECTOR,
UUID as UUID,
VARCHAR as VARCHAR,
CreateEnumType as CreateEnumType,
DropEnumType as DropEnumType,
)
from .dml import Insert as Insert, insert as insert
from .ext import ExcludeConstraint as ExcludeConstraint, aggregate_order_by as aggregate_order_by, array_agg as array_agg
from .hstore import HSTORE as HSTORE, hstore as hstore
from .json import JSON as JSON, JSONB as JSONB
from .ranges import (
DATERANGE as DATERANGE,
INT4RANGE as INT4RANGE,
INT8RANGE as INT8RANGE,
NUMRANGE as NUMRANGE,
TSRANGE as TSRANGE,
TSTZRANGE as TSTZRANGE,
)
__all__ = (
"INTEGER",
"BIGINT",
"SMALLINT",
"VARCHAR",
"CHAR",
"TEXT",
"NUMERIC",
"FLOAT",
"REAL",
"INET",
"CIDR",
"UUID",
"BIT",
"MACADDR",
"MACADDR8",
"MONEY",
"OID",
"REGCLASS",
"DOUBLE_PRECISION",
"TIMESTAMP",
"TIME",
"DATE",
"BYTEA",
"BOOLEAN",
"INTERVAL",
"ARRAY",
"ENUM",
"dialect",
"array",
"HSTORE",
"hstore",
"INT4RANGE",
"INT8RANGE",
"NUMRANGE",
"DATERANGE",
"TSVECTOR",
"TSRANGE",
"TSTZRANGE",
"JSON",
"JSONB",
"Any",
"All",
"DropEnumType",
"CreateEnumType",
"ExcludeConstraint",
"aggregate_order_by",
"array_agg",
"insert",
"Insert",
)
dialect: typing.Any

View File

@@ -1,41 +0,0 @@
from _typeshed import Incomplete
from typing import Any as _Any
from ...sql import expression, sqltypes
def Any(other, arrexpr, operator=...): ...
def All(other, arrexpr, operator=...): ...
class array(expression.ClauseList, expression.ColumnElement[_Any]):
__visit_name__: str
stringify_dialect: str
inherit_cache: bool
type: _Any
def __init__(self, clauses, **kw) -> None: ...
def self_group(self, against: Incomplete | None = None): ...
CONTAINS: _Any
CONTAINED_BY: _Any
OVERLAP: _Any
class ARRAY(sqltypes.ARRAY):
class Comparator(sqltypes.ARRAY.Comparator[_Any]):
def contains(self, other, **kwargs): ...
def contained_by(self, other): ...
def overlap(self, other): ...
comparator_factory: _Any
item_type: _Any
as_tuple: _Any
dimensions: _Any
zero_indexes: _Any
def __init__(
self, item_type, as_tuple: bool = False, dimensions: Incomplete | None = None, zero_indexes: bool = False
) -> None: ...
@property
def hashable(self): ...
@property
def python_type(self): ...
def compare_values(self, x, y): ...
def bind_expression(self, bindvalue): ...
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...

View File

@@ -1,206 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...engine import AdaptedConnection
from ...sql import sqltypes
from . import json
from .base import ENUM, INTERVAL, OID, REGCLASS, UUID, PGCompiler, PGDialect, PGExecutionContext, PGIdentifierPreparer
class AsyncpgTime(sqltypes.Time):
def get_dbapi_type(self, dbapi): ...
class AsyncpgDate(sqltypes.Date):
def get_dbapi_type(self, dbapi): ...
class AsyncpgDateTime(sqltypes.DateTime):
def get_dbapi_type(self, dbapi): ...
class AsyncpgBoolean(sqltypes.Boolean):
def get_dbapi_type(self, dbapi): ...
class AsyncPgInterval(INTERVAL):
def get_dbapi_type(self, dbapi): ...
@classmethod
def adapt_emulated_to_native(cls, interval, **kw): ...
class AsyncPgEnum(ENUM):
def get_dbapi_type(self, dbapi): ...
class AsyncpgInteger(sqltypes.Integer):
def get_dbapi_type(self, dbapi): ...
class AsyncpgBigInteger(sqltypes.BigInteger):
def get_dbapi_type(self, dbapi): ...
class AsyncpgJSON(json.JSON):
def get_dbapi_type(self, dbapi): ...
def result_processor(self, dialect, coltype) -> None: ...
class AsyncpgJSONB(json.JSONB):
def get_dbapi_type(self, dbapi): ...
def result_processor(self, dialect, coltype) -> None: ...
class AsyncpgJSONIndexType(sqltypes.JSON.JSONIndexType):
def get_dbapi_type(self, dbapi) -> None: ...
class AsyncpgJSONIntIndexType(sqltypes.JSON.JSONIntIndexType):
def get_dbapi_type(self, dbapi): ...
class AsyncpgJSONStrIndexType(sqltypes.JSON.JSONStrIndexType):
def get_dbapi_type(self, dbapi): ...
class AsyncpgJSONPathType(json.JSONPathType):
def bind_processor(self, dialect): ...
class AsyncpgUUID(UUID):
def get_dbapi_type(self, dbapi): ...
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class AsyncpgNumeric(sqltypes.Numeric):
def get_dbapi_type(self, dbapi): ...
def bind_processor(self, dialect) -> None: ...
def result_processor(self, dialect, coltype): ...
class AsyncpgFloat(AsyncpgNumeric):
def get_dbapi_type(self, dbapi): ...
class AsyncpgREGCLASS(REGCLASS):
def get_dbapi_type(self, dbapi): ...
class AsyncpgOID(OID):
def get_dbapi_type(self, dbapi): ...
class PGExecutionContext_asyncpg(PGExecutionContext):
def handle_dbapi_exception(self, e) -> None: ...
exclude_set_input_sizes: Any
def pre_exec(self) -> None: ...
def create_server_side_cursor(self): ...
class PGCompiler_asyncpg(PGCompiler): ...
class PGIdentifierPreparer_asyncpg(PGIdentifierPreparer): ...
class AsyncAdapt_asyncpg_cursor:
server_side: bool
description: Any
arraysize: int
rowcount: int
def __init__(self, adapt_connection) -> None: ...
def close(self) -> None: ...
def execute(self, operation, parameters: Incomplete | None = None) -> None: ...
def executemany(self, operation, seq_of_parameters): ...
def setinputsizes(self, *inputsizes) -> None: ...
def __iter__(self): ...
def fetchone(self): ...
def fetchmany(self, size: Incomplete | None = None): ...
def fetchall(self): ...
class AsyncAdapt_asyncpg_ss_cursor(AsyncAdapt_asyncpg_cursor):
server_side: bool
def __init__(self, adapt_connection) -> None: ...
def close(self) -> None: ...
def __aiter__(self): ...
async def __anext__(self) -> None: ...
def fetchone(self): ...
def fetchmany(self, size: Incomplete | None = None): ...
def fetchall(self): ...
def executemany(self, operation, seq_of_parameters) -> None: ...
class AsyncAdapt_asyncpg_connection(AdaptedConnection):
await_: Any
dbapi: Any
isolation_level: str
readonly: bool
deferrable: bool
def __init__(self, dbapi, connection, prepared_statement_cache_size: int = 100) -> None: ...
@property
def autocommit(self): ...
@autocommit.setter
def autocommit(self, value) -> None: ...
def set_isolation_level(self, level) -> None: ...
def cursor(self, server_side: bool = False): ...
def rollback(self) -> None: ...
def commit(self) -> None: ...
def close(self) -> None: ...
def terminate(self) -> None: ...
class AsyncAdaptFallback_asyncpg_connection(AsyncAdapt_asyncpg_connection):
await_: Any
class AsyncAdapt_asyncpg_dbapi:
asyncpg: Any
paramstyle: str
def __init__(self, asyncpg) -> None: ...
def connect(self, *arg, **kw): ...
class Error(Exception): ...
class Warning(Exception): ...
class InterfaceError(Error): ...
class DatabaseError(Error): ...
class InternalError(DatabaseError): ...
class OperationalError(DatabaseError): ...
class ProgrammingError(DatabaseError): ...
class IntegrityError(DatabaseError): ...
class DataError(DatabaseError): ...
class NotSupportedError(DatabaseError): ...
class InternalServerError(InternalError): ...
class InvalidCachedStatementError(NotSupportedError):
def __init__(self, message) -> None: ...
def Binary(self, value): ...
STRING: Any
TIMESTAMP: Any
TIMESTAMP_W_TZ: Any
TIME: Any
TIME_W_TZ: Incomplete
DATE: Any
INTERVAL: Any
NUMBER: Any
FLOAT: Any
BOOLEAN: Any
INTEGER: Any
BIGINTEGER: Any
BYTES: Any
DECIMAL: Any
JSON: Any
JSONB: Any
ENUM: Any
UUID: Any
BYTEA: Any
DATETIME: Any
BINARY: Any
class PGDialect_asyncpg(PGDialect):
driver: str
supports_statement_cache: bool
supports_unicode_statements: bool
supports_server_side_cursors: bool
supports_unicode_binds: bool
has_terminate: bool
default_paramstyle: str
supports_sane_multi_rowcount: bool
statement_compiler: Any
preparer: Any
use_setinputsizes: bool
use_native_uuid: bool
colspecs: Any
is_async: bool
@classmethod
def dbapi(cls): ...
def set_isolation_level(self, connection, level) -> None: ...
def set_readonly(self, connection, value) -> None: ...
def get_readonly(self, connection): ...
def set_deferrable(self, connection, value) -> None: ...
def get_deferrable(self, connection): ...
def create_connect_args(self, url): ...
@classmethod
def get_pool_class(cls, url): ...
def is_disconnect(self, e, connection, cursor): ...
def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ...
async def setup_asyncpg_json_codec(self, conn): ...
async def setup_asyncpg_jsonb_codec(self, conn): ...
def on_connect(self): ...
def get_driver_connection(self, connection): ...
dialect = PGDialect_asyncpg

View File

@@ -1,314 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...engine import characteristics, default, reflection
from ...schema import _CreateDropBase
from ...sql import compiler, elements, sqltypes
from ...sql.ddl import DDLBase
from ...sql.sqltypes import (
BIGINT as BIGINT,
BOOLEAN as BOOLEAN,
CHAR as CHAR,
DATE as DATE,
FLOAT as FLOAT,
INTEGER as INTEGER,
NUMERIC as NUMERIC,
REAL as REAL,
SMALLINT as SMALLINT,
TEXT as TEXT,
VARCHAR as VARCHAR,
)
IDX_USING: Any
AUTOCOMMIT_REGEXP: Any
RESERVED_WORDS: Any
class BYTEA(sqltypes.LargeBinary):
__visit_name__: str
class DOUBLE_PRECISION(sqltypes.Float):
__visit_name__: str
class INET(sqltypes.TypeEngine):
__visit_name__: str
PGInet = INET
class CIDR(sqltypes.TypeEngine):
__visit_name__: str
PGCidr = CIDR
class MACADDR(sqltypes.TypeEngine):
__visit_name__: str
PGMacAddr = MACADDR
class MACADDR8(sqltypes.TypeEngine):
__visit_name__: str
PGMacAddr8 = MACADDR8
class MONEY(sqltypes.TypeEngine):
__visit_name__: str
class OID(sqltypes.TypeEngine):
__visit_name__: str
class REGCLASS(sqltypes.TypeEngine):
__visit_name__: str
class TIMESTAMP(sqltypes.TIMESTAMP):
precision: Any
def __init__(self, timezone: bool = False, precision: Incomplete | None = None) -> None: ...
class TIME(sqltypes.TIME):
precision: Any
def __init__(self, timezone: bool = False, precision: Incomplete | None = None) -> None: ...
class INTERVAL(sqltypes.NativeForEmulated, sqltypes._AbstractInterval):
__visit_name__: str
native: bool
precision: Any
fields: Any
def __init__(self, precision: Incomplete | None = None, fields: Incomplete | None = None) -> None: ...
@classmethod
def adapt_emulated_to_native(cls, interval, **kw): ...
def as_generic(self, allow_nulltype: bool = False): ...
@property
def python_type(self): ...
def coerce_compared_value(self, op, value): ...
PGInterval = INTERVAL
class BIT(sqltypes.TypeEngine):
__visit_name__: str
length: Any
varying: Any
def __init__(self, length: Incomplete | None = None, varying: bool = False) -> None: ...
PGBit = BIT
class UUID(sqltypes.TypeEngine):
__visit_name__: str
as_uuid: Any
def __init__(self, as_uuid: bool = False) -> None: ...
def coerce_compared_value(self, op, value): ...
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
PGUuid = UUID
class TSVECTOR(sqltypes.TypeEngine):
__visit_name__: str
class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum): # type: ignore[misc] # base classes incompatible
native_enum: bool
create_type: Any
def __init__(self, *enums, **kw) -> None: ...
@classmethod
def adapt_emulated_to_native(cls, impl, **kw): ...
def create(self, bind: Incomplete | None = None, checkfirst: bool = True) -> None: ...
def drop(self, bind: Incomplete | None = None, checkfirst: bool = True) -> None: ...
class EnumGenerator(DDLBase):
checkfirst: Any
def __init__(self, dialect, connection, checkfirst: bool = False, **kwargs) -> None: ...
def visit_enum(self, enum) -> None: ...
class EnumDropper(DDLBase):
checkfirst: Any
def __init__(self, dialect, connection, checkfirst: bool = False, **kwargs) -> None: ...
def visit_enum(self, enum) -> None: ...
class _ColonCast(elements.Cast):
__visit_name__: str
type: Any
clause: Any
typeclause: Any
def __init__(self, expression, type_) -> None: ...
colspecs: Any
ischema_names: Any
class PGCompiler(compiler.SQLCompiler):
def visit_colon_cast(self, element, **kw): ...
def visit_array(self, element, **kw): ...
def visit_slice(self, element, **kw): ...
def visit_json_getitem_op_binary(self, binary, operator, _cast_applied: bool = False, **kw): ...
def visit_json_path_getitem_op_binary(self, binary, operator, _cast_applied: bool = False, **kw): ...
def visit_getitem_binary(self, binary, operator, **kw): ...
def visit_aggregate_order_by(self, element, **kw): ...
def visit_match_op_binary(self, binary, operator, **kw): ...
def visit_ilike_op_binary(self, binary, operator, **kw): ...
def visit_not_ilike_op_binary(self, binary, operator, **kw): ...
def visit_regexp_match_op_binary(self, binary, operator, **kw): ...
def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ...
def visit_regexp_replace_op_binary(self, binary, operator, **kw): ...
def visit_empty_set_expr(self, element_types): ...
def render_literal_value(self, value, type_): ...
def visit_sequence(self, seq, **kw): ...
def limit_clause(self, select, **kw): ...
def format_from_hint_text(self, sqltext, table, hint, iscrud): ...
def get_select_precolumns(self, select, **kw): ...
def for_update_clause(self, select, **kw): ...
def returning_clause(self, stmt, returning_cols): ...
def visit_substring_func(self, func, **kw): ...
def visit_on_conflict_do_nothing(self, on_conflict, **kw): ...
def visit_on_conflict_do_update(self, on_conflict, **kw): ...
def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): ...
def delete_extra_from_clause(self, delete_stmt, from_table, extra_froms, from_hints, **kw): ...
def fetch_clause(self, select, **kw): ...
class PGDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs): ...
def visit_check_constraint(self, constraint): ...
def visit_foreign_key_constraint(self, constraint) -> str: ... # type: ignore[override] # Different params
def visit_drop_table_comment(self, drop): ...
def visit_create_enum_type(self, create): ...
def visit_drop_enum_type(self, drop): ...
def visit_create_index(self, create): ...
def visit_drop_index(self, drop): ...
def visit_exclude_constraint(self, constraint, **kw): ...
def post_create_table(self, table): ...
def visit_computed_column(self, generated): ...
def visit_create_sequence(self, create, **kw): ...
class PGTypeCompiler(compiler.GenericTypeCompiler):
def visit_TSVECTOR(self, type_, **kw): ...
def visit_INET(self, type_, **kw): ...
def visit_CIDR(self, type_, **kw): ...
def visit_MACADDR(self, type_, **kw): ...
def visit_MACADDR8(self, type_, **kw): ...
def visit_MONEY(self, type_, **kw): ...
def visit_OID(self, type_, **kw): ...
def visit_REGCLASS(self, type_, **kw): ...
def visit_FLOAT(self, type_, **kw): ...
def visit_DOUBLE_PRECISION(self, type_, **kw): ...
def visit_BIGINT(self, type_, **kw): ...
def visit_HSTORE(self, type_, **kw): ...
def visit_JSON(self, type_, **kw): ...
def visit_JSONB(self, type_, **kw): ...
def visit_INT4RANGE(self, type_, **kw): ...
def visit_INT8RANGE(self, type_, **kw): ...
def visit_NUMRANGE(self, type_, **kw): ...
def visit_DATERANGE(self, type_, **kw): ...
def visit_TSRANGE(self, type_, **kw): ...
def visit_TSTZRANGE(self, type_, **kw): ...
def visit_datetime(self, type_, **kw): ...
def visit_enum(self, type_, **kw): ...
def visit_ENUM(self, type_, identifier_preparer: Incomplete | None = None, **kw): ...
def visit_TIMESTAMP(self, type_, **kw): ...
def visit_TIME(self, type_, **kw): ...
def visit_INTERVAL(self, type_, **kw): ...
def visit_BIT(self, type_, **kw): ...
def visit_UUID(self, type_, **kw): ...
def visit_large_binary(self, type_, **kw): ...
def visit_BYTEA(self, type_, **kw): ...
def visit_ARRAY(self, type_, **kw): ...
class PGIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words: Any
def format_type(self, type_, use_schema: bool = True): ...
class PGInspector(reflection.Inspector):
def get_table_oid(self, table_name, schema: Incomplete | None = None): ...
def get_enums(self, schema: Incomplete | None = None): ...
def get_foreign_table_names(self, schema: Incomplete | None = None): ...
def get_view_names(self, schema: Incomplete | None = None, include=("plain", "materialized")): ...
class CreateEnumType(_CreateDropBase):
__visit_name__: str
class DropEnumType(_CreateDropBase):
__visit_name__: str
class PGExecutionContext(default.DefaultExecutionContext):
def fire_sequence(self, seq, type_): ...
def get_insert_default(self, column): ...
def should_autocommit_text(self, statement): ...
class PGReadOnlyConnectionCharacteristic(characteristics.ConnectionCharacteristic):
transactional: bool
def reset_characteristic(self, dialect, dbapi_conn) -> None: ...
def set_characteristic(self, dialect, dbapi_conn, value) -> None: ...
def get_characteristic(self, dialect, dbapi_conn): ...
class PGDeferrableConnectionCharacteristic(characteristics.ConnectionCharacteristic):
transactional: bool
def reset_characteristic(self, dialect, dbapi_conn) -> None: ...
def set_characteristic(self, dialect, dbapi_conn, value) -> None: ...
def get_characteristic(self, dialect, dbapi_conn): ...
class PGDialect(default.DefaultDialect):
name: str
supports_statement_cache: bool
supports_alter: bool
max_identifier_length: int
supports_sane_rowcount: bool
supports_native_enum: bool
supports_native_boolean: bool
supports_smallserial: bool
supports_sequences: bool
sequences_optional: bool
preexecute_autoincrement_sequences: bool
postfetch_lastrowid: bool
supports_comments: bool
supports_default_values: bool
supports_default_metavalue: bool
supports_empty_insert: bool
supports_multivalues_insert: bool
supports_identity_columns: bool
default_paramstyle: str
ischema_names: Any
colspecs: Any
statement_compiler: Any
ddl_compiler: Any
type_compiler: Any
preparer: Any
inspector: Any
isolation_level: Any
implicit_returning: bool
full_returning: bool
connection_characteristics: Any
construct_arguments: Any
reflection_options: Any
def __init__(
self,
isolation_level: Incomplete | None = None,
json_serializer: Incomplete | None = None,
json_deserializer: Incomplete | None = None,
**kwargs,
) -> None: ...
def initialize(self, connection) -> None: ...
def on_connect(self): ...
def set_isolation_level(self, connection, level) -> None: ...
def get_isolation_level(self, connection): ...
def set_readonly(self, connection, value) -> None: ...
def get_readonly(self, connection) -> None: ...
def set_deferrable(self, connection, value) -> None: ...
def get_deferrable(self, connection) -> None: ...
def do_begin_twophase(self, connection, xid) -> None: ...
def do_prepare_twophase(self, connection, xid) -> None: ...
def do_rollback_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
def do_commit_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
def do_recover_twophase(self, connection): ...
def has_schema(self, connection, schema): ...
def has_table(self, connection, table_name, schema: Incomplete | None = None): ... # type: ignore[override]
def has_sequence(self, connection, sequence_name, schema: Incomplete | None = None): ... # type: ignore[override]
def has_type(self, connection, type_name, schema: Incomplete | None = None): ...
def get_table_oid(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_schema_names(self, connection, **kw): ...
def get_table_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_view_names(self, connection, schema: Incomplete | None = None, include=("plain", "materialized"), **kw): ...
def get_sequence_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_view_definition(self, connection, view_name, schema: Incomplete | None = None, **kw): ...
def get_columns(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_foreign_keys(
self, connection, table_name, schema: Incomplete | None = None, postgresql_ignore_search_path: bool = False, **kw
): ...
def get_indexes(self, connection, table_name, schema, **kw): ...
def get_unique_constraints(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_table_comment(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_check_constraints(self, connection, table_name, schema: Incomplete | None = None, **kw): ...

View File

@@ -1,56 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...sql.dml import Insert as StandardInsert
from ...sql.elements import ClauseElement
from ...util.langhelpers import memoized_property
class Insert(StandardInsert):
stringify_dialect: str
inherit_cache: bool
@memoized_property
def excluded(self): ...
def on_conflict_do_update(
self,
constraint: Incomplete | None = None,
index_elements: Incomplete | None = None,
index_where: Incomplete | None = None,
set_: Incomplete | None = None,
where: Incomplete | None = None,
) -> None: ...
def on_conflict_do_nothing(
self,
constraint: Incomplete | None = None,
index_elements: Incomplete | None = None,
index_where: Incomplete | None = None,
) -> None: ...
insert: Any
class OnConflictClause(ClauseElement):
stringify_dialect: str
constraint_target: Any
inferred_target_elements: Any
inferred_target_whereclause: Any
def __init__(
self,
constraint: Incomplete | None = None,
index_elements: Incomplete | None = None,
index_where: Incomplete | None = None,
) -> None: ...
class OnConflictDoNothing(OnConflictClause):
__visit_name__: str
class OnConflictDoUpdate(OnConflictClause):
__visit_name__: str
update_values_to_set: Any
update_whereclause: Any
def __init__(
self,
constraint: Incomplete | None = None,
index_elements: Incomplete | None = None,
index_where: Incomplete | None = None,
set_: Incomplete | None = None,
where: Incomplete | None = None,
) -> None: ...

View File

@@ -1,27 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...sql import expression
from ...sql.schema import ColumnCollectionConstraint
class aggregate_order_by(expression.ColumnElement[Any]):
__visit_name__: str
stringify_dialect: str
target: Any
type: Any
order_by: Any
def __init__(self, target, *order_by) -> None: ...
def self_group(self, against: Incomplete | None = None): ...
def get_children(self, **kwargs): ...
class ExcludeConstraint(ColumnCollectionConstraint):
__visit_name__: str
where: Any
inherit_cache: bool
create_drop_stringify_dialect: str
operators: Any
using: Any
ops: Any
def __init__(self, *elements, **kw) -> None: ...
def array_agg(*arg, **kw): ...

View File

@@ -1,67 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...sql import functions as sqlfunc, sqltypes
class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine):
__visit_name__: str
hashable: bool
text_type: Any
def __init__(self, text_type: Incomplete | None = None) -> None: ...
class Comparator(sqltypes.Indexable.Comparator[Any], sqltypes.Concatenable.Comparator[Any]):
def has_key(self, other): ...
def has_all(self, other): ...
def has_any(self, other): ...
def contains(self, other, **kwargs): ...
def contained_by(self, other): ...
def defined(self, key): ...
def delete(self, key): ...
def slice(self, array): ...
def keys(self): ...
def vals(self): ...
def array(self): ...
def matrix(self): ...
comparator_factory: Any
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class hstore(sqlfunc.GenericFunction):
type: Any
name: str
inherit_cache: bool
class _HStoreDefinedFunction(sqlfunc.GenericFunction):
type: Any
name: str
inherit_cache: bool
class _HStoreDeleteFunction(sqlfunc.GenericFunction):
type: Any
name: str
inherit_cache: bool
class _HStoreSliceFunction(sqlfunc.GenericFunction):
type: Any
name: str
inherit_cache: bool
class _HStoreKeysFunction(sqlfunc.GenericFunction):
type: Any
name: str
inherit_cache: bool
class _HStoreValsFunction(sqlfunc.GenericFunction):
type: Any
name: str
inherit_cache: bool
class _HStoreArrayFunction(sqlfunc.GenericFunction):
type: Any
name: str
inherit_cache: bool
class _HStoreMatrixFunction(sqlfunc.GenericFunction):
type: Any
name: str
inherit_cache: bool

View File

@@ -1,28 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...sql import sqltypes
class JSONPathType(sqltypes.JSON.JSONPathType):
def bind_processor(self, dialect): ...
def literal_processor(self, dialect): ...
class JSON(sqltypes.JSON):
astext_type: Any
def __init__(self, none_as_null: bool = False, astext_type: Incomplete | None = None) -> None: ...
class Comparator(sqltypes.JSON.Comparator):
@property
def astext(self): ...
comparator_factory: Any
class JSONB(JSON):
__visit_name__: str
class Comparator(JSON.Comparator):
def has_key(self, other): ...
def has_all(self, other): ...
def has_any(self, other): ...
def contains(self, other, **kwargs): ...
def contained_by(self, other): ...
comparator_factory: Any

View File

@@ -1,134 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...sql import sqltypes
from .array import ARRAY as PGARRAY
from .base import ENUM, INTERVAL, UUID, PGCompiler, PGDialect, PGExecutionContext, PGIdentifierPreparer
from .json import JSON, JSONB, JSONPathType
class _PGNumeric(sqltypes.Numeric):
def result_processor(self, dialect, coltype): ...
class _PGNumericNoBind(_PGNumeric):
def bind_processor(self, dialect) -> None: ...
class _PGJSON(JSON):
def result_processor(self, dialect, coltype) -> None: ...
def get_dbapi_type(self, dbapi): ...
class _PGJSONB(JSONB):
def result_processor(self, dialect, coltype) -> None: ...
def get_dbapi_type(self, dbapi): ...
class _PGJSONIndexType(sqltypes.JSON.JSONIndexType):
def get_dbapi_type(self, dbapi) -> None: ...
class _PGJSONIntIndexType(sqltypes.JSON.JSONIntIndexType):
def get_dbapi_type(self, dbapi): ...
class _PGJSONStrIndexType(sqltypes.JSON.JSONStrIndexType):
def get_dbapi_type(self, dbapi): ...
class _PGJSONPathType(JSONPathType):
def get_dbapi_type(self, dbapi): ...
class _PGUUID(UUID):
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class _PGEnum(ENUM):
def get_dbapi_type(self, dbapi): ...
class _PGInterval(INTERVAL):
def get_dbapi_type(self, dbapi): ...
@classmethod
def adapt_emulated_to_native(cls, interval, **kw): ...
class _PGTimeStamp(sqltypes.DateTime):
def get_dbapi_type(self, dbapi): ...
class _PGTime(sqltypes.Time):
def get_dbapi_type(self, dbapi): ...
class _PGInteger(sqltypes.Integer):
def get_dbapi_type(self, dbapi): ...
class _PGSmallInteger(sqltypes.SmallInteger):
def get_dbapi_type(self, dbapi): ...
class _PGNullType(sqltypes.NullType):
def get_dbapi_type(self, dbapi): ...
class _PGBigInteger(sqltypes.BigInteger):
def get_dbapi_type(self, dbapi): ...
class _PGBoolean(sqltypes.Boolean):
def get_dbapi_type(self, dbapi): ...
class _PGARRAY(PGARRAY):
def bind_expression(self, bindvalue): ...
class PGExecutionContext_pg8000(PGExecutionContext):
def create_server_side_cursor(self): ...
def pre_exec(self) -> None: ...
class ServerSideCursor:
server_side: bool
ident: Any
cursor: Any
def __init__(self, cursor, ident) -> None: ...
@property
def connection(self): ...
@property
def rowcount(self): ...
@property
def description(self): ...
def execute(self, operation, args=(), stream: Incomplete | None = None): ...
def executemany(self, operation, param_sets): ...
def fetchone(self): ...
def fetchmany(self, num: Incomplete | None = None): ...
def fetchall(self): ...
def close(self) -> None: ...
def setinputsizes(self, *sizes) -> None: ...
def setoutputsize(self, size, column: Incomplete | None = None) -> None: ...
class PGCompiler_pg8000(PGCompiler):
def visit_mod_binary(self, binary, operator, **kw): ...
class PGIdentifierPreparer_pg8000(PGIdentifierPreparer):
def __init__(self, *args, **kwargs) -> None: ...
class PGDialect_pg8000(PGDialect):
driver: str
supports_statement_cache: bool
supports_unicode_statements: bool
supports_unicode_binds: bool
default_paramstyle: str
supports_sane_multi_rowcount: bool
statement_compiler: Any
preparer: Any
supports_server_side_cursors: bool
use_setinputsizes: bool
description_encoding: Any
colspecs: Any
client_encoding: Any
def __init__(self, client_encoding: Incomplete | None = None, **kwargs) -> None: ...
@classmethod
def dbapi(cls): ...
def create_connect_args(self, url): ...
def is_disconnect(self, e, connection, cursor): ...
def set_isolation_level(self, connection, level) -> None: ...
def set_readonly(self, connection, value) -> None: ...
def get_readonly(self, connection): ...
def set_deferrable(self, connection, value) -> None: ...
def get_deferrable(self, connection): ...
def set_client_encoding(self, connection, client_encoding) -> None: ...
def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ...
def do_begin_twophase(self, connection, xid) -> None: ...
def do_prepare_twophase(self, connection, xid) -> None: ...
def do_rollback_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
def do_commit_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
def do_recover_twophase(self, connection): ...
def on_connect(self): ...
dialect = PGDialect_pg8000

View File

@@ -1,95 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...sql import sqltypes
from .array import ARRAY as PGARRAY
from .base import ENUM, UUID, PGCompiler, PGDialect, PGExecutionContext, PGIdentifierPreparer
from .hstore import HSTORE
from .json import JSON, JSONB
logger: Any
class _PGNumeric(sqltypes.Numeric):
def bind_processor(self, dialect) -> None: ...
def result_processor(self, dialect, coltype): ...
class _PGEnum(ENUM):
def result_processor(self, dialect, coltype): ...
class _PGHStore(HSTORE):
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class _PGARRAY(PGARRAY):
def bind_expression(self, bindvalue): ...
class _PGJSON(JSON):
def result_processor(self, dialect, coltype) -> None: ...
class _PGJSONB(JSONB):
def result_processor(self, dialect, coltype) -> None: ...
class _PGUUID(UUID):
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class PGExecutionContext_psycopg2(PGExecutionContext):
def create_server_side_cursor(self): ...
cursor_fetch_strategy: Any
def post_exec(self) -> None: ...
class PGCompiler_psycopg2(PGCompiler): ...
class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer): ...
EXECUTEMANY_PLAIN: Any
EXECUTEMANY_BATCH: Any
EXECUTEMANY_VALUES: Any
EXECUTEMANY_VALUES_PLUS_BATCH: Any
class PGDialect_psycopg2(PGDialect):
driver: str
supports_statement_cache: bool
supports_unicode_statements: bool
supports_server_side_cursors: bool
default_paramstyle: str
supports_sane_multi_rowcount: bool
statement_compiler: Any
preparer: Any
psycopg2_version: Any
engine_config_types: Any
colspecs: Any
use_native_unicode: Any
use_native_hstore: Any
use_native_uuid: Any
supports_unicode_binds: Any
client_encoding: Any
executemany_mode: Any
insert_executemany_returning: bool
executemany_batch_page_size: Any
executemany_values_page_size: Any
def __init__(
self,
use_native_unicode: bool = True,
client_encoding: Incomplete | None = None,
use_native_hstore: bool = True,
use_native_uuid: bool = True,
executemany_mode: str = "values_only",
executemany_batch_page_size: int = 100,
executemany_values_page_size: int = 1000,
**kwargs,
) -> None: ...
def initialize(self, connection) -> None: ...
@classmethod
def dbapi(cls): ...
def set_isolation_level(self, connection, level) -> None: ...
def set_readonly(self, connection, value) -> None: ...
def get_readonly(self, connection): ...
def set_deferrable(self, connection, value) -> None: ...
def get_deferrable(self, connection): ...
def do_ping(self, dbapi_connection): ...
def on_connect(self): ...
def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...
def create_connect_args(self, url): ...
def is_disconnect(self, e, connection, cursor): ...
dialect = PGDialect_psycopg2

View File

@@ -1,13 +0,0 @@
from typing import Any
from .psycopg2 import PGDialect_psycopg2
class PGDialect_psycopg2cffi(PGDialect_psycopg2):
driver: str
supports_unicode_statements: bool
supports_statement_cache: bool
FEATURE_VERSION_MAP: Any
@classmethod
def dbapi(cls): ...
dialect = PGDialect_psycopg2cffi

View File

@@ -1,52 +0,0 @@
from typing import Any
from ...sql.sqltypes import Numeric
from .base import UUID, PGCompiler, PGDialect, PGIdentifierPreparer
from .hstore import HSTORE
from .json import JSON, JSONB
class _PGNumeric(Numeric):
def bind_processor(self, dialect) -> None: ...
def result_processor(self, dialect, coltype): ...
class _PGHStore(HSTORE):
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class _PGJSON(JSON):
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class _PGJSONB(JSONB):
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class _PGUUID(UUID):
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class _PGCompiler(PGCompiler):
def visit_mod_binary(self, binary, operator, **kw): ...
def post_process_text(self, text): ...
class _PGIdentifierPreparer(PGIdentifierPreparer): ...
class PGDialect_pygresql(PGDialect):
driver: str
supports_statement_cache: bool
statement_compiler: Any
preparer: Any
@classmethod
def dbapi(cls): ...
colspecs: Any
dbapi_version: Any
supports_unicode_statements: bool
supports_unicode_binds: bool
has_native_hstore: Any
has_native_json: Any
has_native_uuid: Any
def __init__(self, **kwargs) -> None: ...
def create_connect_args(self, url): ...
def is_disconnect(self, e, connection, cursor): ...
dialect = PGDialect_pygresql

View File

@@ -1,30 +0,0 @@
from typing import Any
from ...sql import sqltypes
from ...util.langhelpers import memoized_property
from .base import PGDialect, PGExecutionContext
class PGNumeric(sqltypes.Numeric):
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class PGExecutionContext_pypostgresql(PGExecutionContext): ...
class PGDialect_pypostgresql(PGDialect):
driver: str
supports_statement_cache: bool
supports_unicode_statements: bool
supports_unicode_binds: bool
description_encoding: Any
default_paramstyle: str
supports_sane_rowcount: bool
supports_sane_multi_rowcount: bool
colspecs: Any
@classmethod
def dbapi(cls): ...
@memoized_property
def dbapi_exception_translation_map(self): ...
def create_connect_args(self, url): ...
def is_disconnect(self, e, connection, cursor): ...
dialect = PGDialect_pypostgresql

View File

@@ -1,36 +0,0 @@
from typing import Any
from ...sql import sqltypes
class RangeOperators:
class comparator_factory(sqltypes.Concatenable.Comparator[Any]):
def __ne__(self, other): ...
def contains(self, other, **kw): ...
def contained_by(self, other): ...
def overlaps(self, other): ...
def strictly_left_of(self, other): ...
__lshift__: Any
def strictly_right_of(self, other): ...
__rshift__: Any
def not_extend_right_of(self, other): ...
def not_extend_left_of(self, other): ...
def adjacent_to(self, other): ...
def __add__(self, other): ...
class INT4RANGE(RangeOperators, sqltypes.TypeEngine):
__visit_name__: str
class INT8RANGE(RangeOperators, sqltypes.TypeEngine):
__visit_name__: str
class NUMRANGE(RangeOperators, sqltypes.TypeEngine):
__visit_name__: str
class DATERANGE(RangeOperators, sqltypes.TypeEngine):
__visit_name__: str
class TSRANGE(RangeOperators, sqltypes.TypeEngine):
__visit_name__: str
class TSTZRANGE(RangeOperators, sqltypes.TypeEngine):
__visit_name__: str

View File

@@ -1,45 +0,0 @@
from typing import Any
from .base import (
BLOB as BLOB,
BOOLEAN as BOOLEAN,
CHAR as CHAR,
DATE as DATE,
DATETIME as DATETIME,
DECIMAL as DECIMAL,
FLOAT as FLOAT,
INTEGER as INTEGER,
JSON as JSON,
NUMERIC as NUMERIC,
REAL as REAL,
SMALLINT as SMALLINT,
TEXT as TEXT,
TIME as TIME,
TIMESTAMP as TIMESTAMP,
VARCHAR as VARCHAR,
)
from .dml import Insert as Insert, insert as insert
__all__ = (
"BLOB",
"BOOLEAN",
"CHAR",
"DATE",
"DATETIME",
"DECIMAL",
"FLOAT",
"INTEGER",
"JSON",
"NUMERIC",
"SMALLINT",
"TEXT",
"TIME",
"TIMESTAMP",
"VARCHAR",
"REAL",
"Insert",
"insert",
"dialect",
)
dialect: Any

View File

@@ -1,73 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...engine import AdaptedConnection
from .base import SQLiteExecutionContext
from .pysqlite import SQLiteDialect_pysqlite
class AsyncAdapt_aiosqlite_cursor:
server_side: bool
await_: Any
arraysize: int
rowcount: int
description: Any
def __init__(self, adapt_connection) -> None: ...
def close(self) -> None: ...
lastrowid: int
def execute(self, operation, parameters: Incomplete | None = None) -> None: ...
def executemany(self, operation, seq_of_parameters) -> None: ...
def setinputsizes(self, *inputsizes) -> None: ...
def __iter__(self): ...
def fetchone(self): ...
def fetchmany(self, size: Incomplete | None = None): ...
def fetchall(self): ...
class AsyncAdapt_aiosqlite_ss_cursor(AsyncAdapt_aiosqlite_cursor):
server_side: bool
def __init__(self, *arg, **kw) -> None: ...
def close(self) -> None: ...
def fetchone(self): ...
def fetchmany(self, size: Incomplete | None = None): ...
def fetchall(self): ...
class AsyncAdapt_aiosqlite_connection(AdaptedConnection):
await_: Any
dbapi: Any
def __init__(self, dbapi, connection) -> None: ...
@property
def isolation_level(self): ...
@isolation_level.setter
def isolation_level(self, value) -> None: ...
def create_function(self, *args, **kw) -> None: ...
def cursor(self, server_side: bool = False): ...
def execute(self, *args, **kw): ...
def rollback(self) -> None: ...
def commit(self) -> None: ...
def close(self) -> None: ...
class AsyncAdaptFallback_aiosqlite_connection(AsyncAdapt_aiosqlite_connection):
await_: Any
class AsyncAdapt_aiosqlite_dbapi:
aiosqlite: Any
sqlite: Any
paramstyle: str
def __init__(self, aiosqlite, sqlite) -> None: ...
def connect(self, *arg, **kw): ...
class SQLiteExecutionContext_aiosqlite(SQLiteExecutionContext):
def create_server_side_cursor(self): ...
class SQLiteDialect_aiosqlite(SQLiteDialect_pysqlite):
driver: str
supports_statement_cache: bool
is_async: bool
supports_server_side_cursors: bool
@classmethod
def dbapi(cls): ...
@classmethod
def get_pool_class(cls, url): ...
def is_disconnect(self, e, connection, cursor): ...
def get_driver_connection(self, connection): ...
dialect = SQLiteDialect_aiosqlite

View File

@@ -1,141 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...engine import default
from ...sql import compiler, sqltypes
from ...sql.sqltypes import (
BLOB as BLOB,
BOOLEAN as BOOLEAN,
CHAR as CHAR,
DECIMAL as DECIMAL,
FLOAT as FLOAT,
INTEGER as INTEGER,
NUMERIC as NUMERIC,
REAL as REAL,
SMALLINT as SMALLINT,
TEXT as TEXT,
TIMESTAMP as TIMESTAMP,
VARCHAR as VARCHAR,
)
from .json import JSON as JSON
class _SQliteJson(JSON):
def result_processor(self, dialect, coltype): ...
class _DateTimeMixin:
def __init__(self, storage_format: Incomplete | None = None, regexp: Incomplete | None = None, **kw) -> None: ...
@property
def format_is_text_affinity(self): ...
def adapt(self, cls, **kw): ...
def literal_processor(self, dialect): ...
class DATETIME(_DateTimeMixin, sqltypes.DateTime):
def __init__(self, *args, **kwargs) -> None: ...
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class DATE(_DateTimeMixin, sqltypes.Date):
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class TIME(_DateTimeMixin, sqltypes.Time):
def __init__(self, *args, **kwargs) -> None: ...
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
colspecs: Any
ischema_names: Any
class SQLiteCompiler(compiler.SQLCompiler):
extract_map: Any
def visit_now_func(self, fn, **kw): ...
def visit_localtimestamp_func(self, func, **kw): ...
def visit_true(self, expr, **kw): ...
def visit_false(self, expr, **kw): ...
def visit_char_length_func(self, fn, **kw): ...
def visit_cast(self, cast, **kwargs): ...
def visit_extract(self, extract, **kw): ...
def limit_clause(self, select, **kw): ...
def for_update_clause(self, select, **kw): ...
def visit_is_distinct_from_binary(self, binary, operator, **kw): ...
def visit_is_not_distinct_from_binary(self, binary, operator, **kw): ...
def visit_json_getitem_op_binary(self, binary, operator, **kw): ...
def visit_json_path_getitem_op_binary(self, binary, operator, **kw): ...
def visit_empty_set_op_expr(self, type_, expand_op): ...
def visit_empty_set_expr(self, element_types): ...
def visit_regexp_match_op_binary(self, binary, operator, **kw): ...
def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ...
def visit_on_conflict_do_nothing(self, on_conflict, **kw): ...
def visit_on_conflict_do_update(self, on_conflict, **kw): ...
class SQLiteDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs): ...
def visit_primary_key_constraint(self, constraint): ...
def visit_unique_constraint(self, constraint): ...
def visit_check_constraint(self, constraint): ...
def visit_column_check_constraint(self, constraint): ...
def visit_foreign_key_constraint(self, constraint): ...
def define_constraint_remote_table(self, constraint, table, preparer): ...
def visit_create_index(self, create, include_schema: bool = False, include_table_schema: bool = True): ... # type: ignore[override]
def post_create_table(self, table): ...
class SQLiteTypeCompiler(compiler.GenericTypeCompiler):
def visit_large_binary(self, type_, **kw): ...
def visit_DATETIME(self, type_, **kw): ...
def visit_DATE(self, type_, **kw): ...
def visit_TIME(self, type_, **kw): ...
def visit_JSON(self, type_, **kw): ...
class SQLiteIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words: Any
class SQLiteExecutionContext(default.DefaultExecutionContext): ...
class SQLiteDialect(default.DefaultDialect):
name: str
supports_alter: bool
supports_unicode_statements: bool
supports_unicode_binds: bool
supports_default_values: bool
supports_default_metavalue: bool
supports_empty_insert: bool
supports_cast: bool
supports_multivalues_insert: bool
tuple_in_values: bool
supports_statement_cache: bool
default_paramstyle: str
statement_compiler: Any
ddl_compiler: Any
type_compiler: Any
preparer: Any
ischema_names: Any
colspecs: Any
isolation_level: Any
construct_arguments: Any
native_datetime: Any
def __init__(
self,
isolation_level: Incomplete | None = None,
native_datetime: bool = False,
json_serializer: Incomplete | None = None,
json_deserializer: Incomplete | None = None,
_json_serializer: Incomplete | None = None,
_json_deserializer: Incomplete | None = None,
**kwargs,
) -> None: ...
def set_isolation_level(self, connection, level) -> None: ...
def get_isolation_level(self, connection): ...
def on_connect(self): ...
def get_schema_names(self, connection, **kw): ...
def get_table_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_temp_table_names(self, connection, **kw): ...
def get_temp_view_names(self, connection, **kw): ...
def has_table(self, connection, table_name, schema: Incomplete | None = None): ... # type: ignore[override]
def get_view_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_view_definition(self, connection, view_name, schema: Incomplete | None = None, **kw): ...
def get_columns(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_foreign_keys(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_unique_constraints(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_check_constraints(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_indexes(self, connection, table_name, schema: Incomplete | None = None, **kw): ...

View File

@@ -1,44 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...sql.dml import Insert as StandardInsert
from ...sql.elements import ClauseElement
from ...util.langhelpers import memoized_property
class Insert(StandardInsert):
stringify_dialect: str
inherit_cache: bool
@memoized_property
def excluded(self): ...
def on_conflict_do_update(
self,
index_elements: Incomplete | None = None,
index_where: Incomplete | None = None,
set_: Incomplete | None = None,
where: Incomplete | None = None,
) -> None: ...
def on_conflict_do_nothing(self, index_elements: Incomplete | None = None, index_where: Incomplete | None = None) -> None: ...
insert: Any
class OnConflictClause(ClauseElement):
stringify_dialect: str
constraint_target: Any
inferred_target_elements: Any
inferred_target_whereclause: Any
def __init__(self, index_elements: Incomplete | None = None, index_where: Incomplete | None = None) -> None: ...
class OnConflictDoNothing(OnConflictClause):
__visit_name__: str
class OnConflictDoUpdate(OnConflictClause):
__visit_name__: str
update_values_to_set: Any
update_whereclause: Any
def __init__(
self,
index_elements: Incomplete | None = None,
index_where: Incomplete | None = None,
set_: Incomplete | None = None,
where: Incomplete | None = None,
) -> None: ...

View File

@@ -1,10 +0,0 @@
from ...sql.sqltypes import JSON as _JSON
class JSON(_JSON): ...
class _FormatTypeMixin:
def bind_processor(self, dialect): ...
def literal_processor(self, dialect): ...
class JSONIndexType(_FormatTypeMixin, _JSON.JSONIndexType): ...
class JSONPathType(_FormatTypeMixin, _JSON.JSONPathType): ...

View File

@@ -1,16 +0,0 @@
from typing import Any
from .pysqlite import SQLiteDialect_pysqlite
class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite):
driver: str
supports_statement_cache: bool
pragmas: Any
@classmethod
def dbapi(cls): ...
@classmethod
def get_pool_class(cls, url): ...
def on_connect_url(self, url): ...
def create_connect_args(self, url): ...
dialect = SQLiteDialect_pysqlcipher

View File

@@ -1,28 +0,0 @@
from typing import Any
from .base import DATE, DATETIME, SQLiteDialect
class _SQLite_pysqliteTimeStamp(DATETIME):
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class _SQLite_pysqliteDate(DATE):
def bind_processor(self, dialect): ...
def result_processor(self, dialect, coltype): ...
class SQLiteDialect_pysqlite(SQLiteDialect):
default_paramstyle: str
supports_statement_cache: bool
colspecs: Any
description_encoding: Any
driver: str
@classmethod
def dbapi(cls): ...
@classmethod
def get_pool_class(cls, url): ...
def set_isolation_level(self, connection, level): ...
def on_connect(self): ...
def create_connect_args(self, url): ...
def is_disconnect(self, e, connection, cursor): ...
dialect = SQLiteDialect_pysqlite

View File

@@ -1,58 +0,0 @@
from typing import Any
from .base import (
BIGINT as BIGINT,
BINARY as BINARY,
BIT as BIT,
CHAR as CHAR,
DATE as DATE,
DATETIME as DATETIME,
FLOAT as FLOAT,
IMAGE as IMAGE,
INT as INT,
INTEGER as INTEGER,
MONEY as MONEY,
NCHAR as NCHAR,
NUMERIC as NUMERIC,
NVARCHAR as NVARCHAR,
SMALLINT as SMALLINT,
SMALLMONEY as SMALLMONEY,
TEXT as TEXT,
TIME as TIME,
TINYINT as TINYINT,
UNICHAR as UNICHAR,
UNITEXT as UNITEXT,
UNIVARCHAR as UNIVARCHAR,
VARBINARY as VARBINARY,
VARCHAR as VARCHAR,
)
__all__ = (
"CHAR",
"VARCHAR",
"TIME",
"NCHAR",
"NVARCHAR",
"TEXT",
"DATE",
"DATETIME",
"FLOAT",
"NUMERIC",
"BIGINT",
"INT",
"INTEGER",
"SMALLINT",
"BINARY",
"VARBINARY",
"UNITEXT",
"UNICHAR",
"UNIVARCHAR",
"IMAGE",
"BIT",
"MONEY",
"SMALLMONEY",
"TINYINT",
"dialect",
)
dialect: Any

View File

@@ -1,135 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...engine import default, reflection
from ...sql import compiler, sqltypes
from ...sql.sqltypes import (
BIGINT as BIGINT,
BINARY as BINARY,
CHAR as CHAR,
DATE as DATE,
DATETIME as DATETIME,
DECIMAL as DECIMAL,
FLOAT as FLOAT,
INT as INT,
INTEGER as INTEGER,
NCHAR as NCHAR,
NUMERIC as NUMERIC,
NVARCHAR as NVARCHAR,
REAL as REAL,
SMALLINT as SMALLINT,
TEXT as TEXT,
TIME as TIME,
TIMESTAMP as TIMESTAMP,
VARBINARY as VARBINARY,
VARCHAR as VARCHAR,
Unicode as Unicode,
)
RESERVED_WORDS: Any
class _SybaseUnitypeMixin:
def result_processor(self, dialect, coltype): ...
class UNICHAR(_SybaseUnitypeMixin, sqltypes.Unicode):
__visit_name__: str
class UNIVARCHAR(_SybaseUnitypeMixin, sqltypes.Unicode):
__visit_name__: str
class UNITEXT(_SybaseUnitypeMixin, sqltypes.UnicodeText):
__visit_name__: str
class TINYINT(sqltypes.Integer):
__visit_name__: str
class BIT(sqltypes.TypeEngine):
__visit_name__: str
class MONEY(sqltypes.TypeEngine):
__visit_name__: str
class SMALLMONEY(sqltypes.TypeEngine):
__visit_name__: str
class UNIQUEIDENTIFIER(sqltypes.TypeEngine):
__visit_name__: str
class IMAGE(sqltypes.LargeBinary):
__visit_name__: str
class SybaseTypeCompiler(compiler.GenericTypeCompiler):
def visit_large_binary(self, type_, **kw): ...
def visit_boolean(self, type_, **kw): ...
def visit_unicode(self, type_, **kw): ...
def visit_UNICHAR(self, type_, **kw): ...
def visit_UNIVARCHAR(self, type_, **kw): ...
def visit_UNITEXT(self, type_, **kw): ...
def visit_TINYINT(self, type_, **kw): ...
def visit_IMAGE(self, type_, **kw): ...
def visit_BIT(self, type_, **kw): ...
def visit_MONEY(self, type_, **kw): ...
def visit_SMALLMONEY(self, type_, **kw): ...
def visit_UNIQUEIDENTIFIER(self, type_, **kw): ...
ischema_names: Any
class SybaseInspector(reflection.Inspector):
def __init__(self, conn) -> None: ...
def get_table_id(self, table_name, schema: Incomplete | None = None): ...
class SybaseExecutionContext(default.DefaultExecutionContext):
def set_ddl_autocommit(self, connection, value) -> None: ...
def pre_exec(self) -> None: ...
def post_exec(self) -> None: ...
def get_lastrowid(self): ...
class SybaseSQLCompiler(compiler.SQLCompiler):
ansi_bind_rules: bool
extract_map: Any
def get_from_hint_text(self, table, text): ...
def limit_clause(self, select, **kw): ...
def visit_extract(self, extract, **kw): ...
def visit_now_func(self, fn, **kw): ...
def for_update_clause(self, select): ...
def order_by_clause(self, select, **kw): ...
def delete_table_clause(self, delete_stmt, from_table, extra_froms): ...
def delete_extra_from_clause(self, delete_stmt, from_table, extra_froms, from_hints, **kw): ...
class SybaseDDLCompiler(compiler.DDLCompiler):
def get_column_specification(self, column, **kwargs): ...
def visit_drop_index(self, drop): ...
class SybaseIdentifierPreparer(compiler.IdentifierPreparer):
reserved_words: Any
class SybaseDialect(default.DefaultDialect):
name: str
supports_unicode_statements: bool
supports_sane_rowcount: bool
supports_sane_multi_rowcount: bool
supports_statement_cache: bool
supports_native_boolean: bool
supports_unicode_binds: bool
postfetch_lastrowid: bool
colspecs: Any
ischema_names: Any
type_compiler: Any
statement_compiler: Any
ddl_compiler: Any
preparer: Any
inspector: Any
construct_arguments: Any
def __init__(self, *args, **kwargs) -> None: ...
max_identifier_length: int
def initialize(self, connection) -> None: ...
def get_table_id(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_columns(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_foreign_keys(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_indexes(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = None, **kw): ...
def get_schema_names(self, connection, **kw): ...
def get_table_names(self, connection, schema: Incomplete | None = None, **kw): ...
def get_view_definition(self, connection, view_name, schema: Incomplete | None = None, **kw): ...
def get_view_names(self, connection, schema: Incomplete | None = None, **kw): ...
def has_table(self, connection, table_name, schema: Incomplete | None = None): ... # type: ignore[override]

View File

@@ -1,9 +0,0 @@
from ...connectors.mxodbc import MxODBCConnector
from .base import SybaseDialect, SybaseExecutionContext
class SybaseExecutionContext_mxodbc(SybaseExecutionContext): ...
class SybaseDialect_mxodbc(MxODBCConnector, SybaseDialect):
supports_statement_cache: bool
dialect = SybaseDialect_mxodbc

View File

@@ -1,19 +0,0 @@
from typing import Any
from ...connectors.pyodbc import PyODBCConnector
from ...sql import sqltypes
from .base import SybaseDialect, SybaseExecutionContext
class _SybNumeric_pyodbc(sqltypes.Numeric):
def bind_processor(self, dialect): ...
class SybaseExecutionContext_pyodbc(SybaseExecutionContext):
def set_ddl_autocommit(self, connection, value) -> None: ...
class SybaseDialect_pyodbc(PyODBCConnector, SybaseDialect):
supports_statement_cache: bool
colspecs: Any
@classmethod
def dbapi(cls): ...
dialect = SybaseDialect_pyodbc

View File

@@ -1,28 +0,0 @@
from _typeshed import Incomplete
from typing import Any
from ...sql import sqltypes
from .base import SybaseDialect, SybaseExecutionContext, SybaseSQLCompiler
class _SybNumeric(sqltypes.Numeric):
def result_processor(self, dialect, type_): ...
class SybaseExecutionContext_pysybase(SybaseExecutionContext):
def set_ddl_autocommit(self, dbapi_connection, value) -> None: ...
def pre_exec(self) -> None: ...
class SybaseSQLCompiler_pysybase(SybaseSQLCompiler):
def bindparam_string(self, name, **kw): ...
class SybaseDialect_pysybase(SybaseDialect):
driver: str
statement_compiler: Any
supports_statement_cache: bool
colspecs: Any
@classmethod
def dbapi(cls): ...
def create_connect_args(self, url): ...
def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...
def is_disconnect(self, e, connection, cursor): ...
dialect = SybaseDialect_pysybase

View File

@@ -1,46 +0,0 @@
from ..sql import ddl as ddl
from . import events as events, util as util
from .base import (
Connection as Connection,
Engine as Engine,
NestedTransaction as NestedTransaction,
RootTransaction as RootTransaction,
Transaction as Transaction,
TwoPhaseTransaction as TwoPhaseTransaction,
)
from .create import create_engine as create_engine, engine_from_config as engine_from_config
from .cursor import (
BaseCursorResult as BaseCursorResult,
BufferedColumnResultProxy as BufferedColumnResultProxy,
BufferedColumnRow as BufferedColumnRow,
BufferedRowResultProxy as BufferedRowResultProxy,
CursorResult as CursorResult,
FullyBufferedResultProxy as FullyBufferedResultProxy,
LegacyCursorResult as LegacyCursorResult,
ResultProxy as ResultProxy,
)
from .interfaces import (
AdaptedConnection as AdaptedConnection,
Compiled as Compiled,
Connectable as Connectable,
CreateEnginePlugin as CreateEnginePlugin,
Dialect as Dialect,
ExceptionContext as ExceptionContext,
ExecutionContext as ExecutionContext,
TypeCompiler as TypeCompiler,
)
from .mock import create_mock_engine as create_mock_engine
from .reflection import Inspector as Inspector
from .result import (
ChunkedIteratorResult as ChunkedIteratorResult,
FrozenResult as FrozenResult,
IteratorResult as IteratorResult,
MappingResult as MappingResult,
MergedResult as MergedResult,
Result as Result,
ScalarResult as ScalarResult,
result_tuple as result_tuple,
)
from .row import BaseRow as BaseRow, LegacyRow as LegacyRow, Row as Row, RowMapping as RowMapping
from .url import URL as URL, make_url as make_url
from .util import connection_memoize as connection_memoize

View File

@@ -1,215 +0,0 @@
from _typeshed import Incomplete
from _typeshed.dbapi import DBAPIConnection
from abc import abstractmethod
from collections.abc import Callable, Mapping
from types import TracebackType
from typing import Any, TypeVar, overload
from typing_extensions import Concatenate, ParamSpec, Self, TypeAlias
from ..log import Identified, _EchoFlag, echo_property
from ..pool import Pool
from ..sql.compiler import Compiled
from ..sql.ddl import DDLElement
from ..sql.elements import ClauseElement
from ..sql.functions import FunctionElement
from ..sql.schema import DefaultGenerator
from .cursor import CursorResult
from .interfaces import Connectable as Connectable, Dialect, ExceptionContext
from .url import URL
from .util import TransactionalContext
_T = TypeVar("_T")
_P = ParamSpec("_P")
_Executable: TypeAlias = ClauseElement | FunctionElement | DDLElement | DefaultGenerator | Compiled
class Connection(Connectable):
engine: Engine
dialect: Dialect
should_close_with_result: bool
dispatch: Any
def __init__(
self,
engine: Engine,
connection: DBAPIConnection | None = None,
close_with_result: bool = False,
_branch_from: Incomplete | None = None,
_execution_options: Incomplete | None = None,
_dispatch: Incomplete | None = None,
_has_events: Incomplete | None = None,
_allow_revalidate: bool = True,
) -> None: ...
def schema_for_object(self, obj) -> str | None: ...
def __enter__(self) -> Self: ...
def __exit__(
self, type_: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None
) -> None: ...
def execution_options(self, **opt): ...
def get_execution_options(self): ...
@property
def closed(self) -> bool: ...
@property
def invalidated(self) -> bool: ...
@property
def connection(self) -> DBAPIConnection: ...
def get_isolation_level(self): ...
@property
def default_isolation_level(self): ...
@property
def info(self): ...
def connect(self, close_with_result: bool = False): ... # type: ignore[override]
def invalidate(self, exception: Exception | None = None) -> None: ...
def detach(self) -> None: ...
def begin(self) -> Transaction: ...
def begin_nested(self) -> Transaction | None: ...
def begin_twophase(self, xid: Incomplete | None = None) -> TwoPhaseTransaction: ...
def recover_twophase(self): ...
def rollback_prepared(self, xid, recover: bool = False) -> None: ...
def commit_prepared(self, xid, recover: bool = False) -> None: ...
def in_transaction(self) -> bool: ...
def in_nested_transaction(self) -> bool: ...
def get_transaction(self) -> Transaction | None: ...
def get_nested_transaction(self) -> Transaction | None: ...
def close(self) -> None: ...
@overload
def scalar(self, object_: _Executable, *multiparams: Mapping[str, Any], **params: Any) -> Any: ...
@overload
def scalar(self, object_: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> Any: ...
def scalars(self, object_, *multiparams, **params): ...
@overload # type: ignore[override]
def execute(self, statement: _Executable, *multiparams: Mapping[str, Any], **params) -> CursorResult: ...
@overload
def execute(self, statement: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params) -> CursorResult: ...
def exec_driver_sql(
self, statement: str, parameters: Incomplete | None = None, execution_options: Incomplete | None = None
): ...
def transaction(self, callable_: Callable[Concatenate[Connection, _P], _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ...
def run_callable(self, callable_: Callable[Concatenate[Connection, _P], _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ...
class ExceptionContextImpl(ExceptionContext):
engine: Any
connection: Any
sqlalchemy_exception: Any
original_exception: Any
execution_context: Any
statement: Any
parameters: Any
is_disconnect: Any
invalidate_pool_on_disconnect: Any
def __init__(
self,
exception,
sqlalchemy_exception,
engine,
connection,
cursor,
statement,
parameters,
context,
is_disconnect,
invalidate_pool_on_disconnect,
) -> None: ...
class Transaction(TransactionalContext):
def __init__(self, connection: Connection) -> None: ...
@property
def is_valid(self) -> bool: ...
def close(self) -> None: ...
def rollback(self) -> None: ...
def commit(self) -> None: ...
# The following field are technically not defined on Transaction, but on
# all sub-classes.
@property
@abstractmethod
def connection(self) -> Connection: ...
@property
@abstractmethod
def is_active(self) -> bool: ...
class MarkerTransaction(Transaction):
connection: Connection
@property
def is_active(self) -> bool: ...
class RootTransaction(Transaction):
connection: Connection
is_active: bool
class NestedTransaction(Transaction):
connection: Connection
is_active: bool
class TwoPhaseTransaction(RootTransaction):
xid: Any
def __init__(self, connection: Connection, xid) -> None: ...
def prepare(self) -> None: ...
class Engine(Connectable, Identified):
pool: Pool
url: URL
dialect: Dialect
logging_name: str # only exists if not None during initialization
echo: echo_property
hide_parameters: bool
def __init__(
self,
pool: Pool,
dialect: Dialect,
url: str | URL,
logging_name: str | None = None,
echo: _EchoFlag = None,
query_cache_size: int = 500,
execution_options: Mapping[str, Any] | None = None,
hide_parameters: bool = False,
) -> None: ...
@property
def engine(self) -> Engine: ...
def clear_compiled_cache(self) -> None: ...
def update_execution_options(self, **opt) -> None: ...
def execution_options(self, **opt): ...
def get_execution_options(self): ...
@property
def name(self) -> str: ...
@property
def driver(self): ...
def dispose(self, close: bool = True) -> None: ...
class _trans_ctx:
conn: Connection
transaction: Transaction
close_with_result: bool
def __init__(self, conn: Connection, transaction: Transaction, close_with_result: bool) -> None: ...
def __enter__(self) -> Connection: ...
def __exit__(
self, type_: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None
) -> None: ...
def begin(self, close_with_result: bool = False) -> _trans_ctx: ...
def transaction(
self, callable_: Callable[Concatenate[Connection, _P], _T], *args: _P.args, **kwargs: _P.kwargs
) -> _T | None: ...
def run_callable(self, callable_: Callable[Concatenate[Connection, _P], _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ...
@overload # type: ignore[override]
def execute(self, statement: _Executable, *multiparams: Mapping[str, Any], **params: Any) -> CursorResult: ...
@overload
def execute(self, statement: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> CursorResult: ...
@overload # type: ignore[override]
def scalar(self, statement: _Executable, *multiparams: Mapping[str, Any], **params: Any) -> Any: ...
@overload
def scalar(self, statement: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> Any: ...
def connect(self, close_with_result: bool = False) -> Connection: ... # type: ignore[override]
def table_names(self, schema: Incomplete | None = None, connection: Connection | None = None): ...
def has_table(self, table_name: str, schema: Incomplete | None = None) -> bool: ...
def raw_connection(self, _connection: Connection | None = None) -> DBAPIConnection: ...
class OptionEngineMixin:
url: URL
dialect: Dialect
logging_name: str
echo: bool
hide_parameters: bool
dispatch: Any
def __init__(self, proxied, execution_options) -> None: ...
pool: Pool
class OptionEngine(OptionEngineMixin, Engine): ... # type: ignore[misc]

View File

@@ -1,16 +0,0 @@
from abc import ABC, ABCMeta, abstractmethod
class ConnectionCharacteristic(ABC, metaclass=ABCMeta):
transactional: bool
@abstractmethod
def reset_characteristic(self, dialect, dbapi_conn): ...
@abstractmethod
def set_characteristic(self, dialect, dbapi_conn, value): ...
@abstractmethod
def get_characteristic(self, dialect, dbapi_conn): ...
class IsolationLevelCharacteristic(ConnectionCharacteristic):
transactional: bool
def reset_characteristic(self, dialect, dbapi_conn) -> None: ...
def set_characteristic(self, dialect, dbapi_conn, value) -> None: ...
def get_characteristic(self, dialect, dbapi_conn): ...

View File

@@ -1,22 +0,0 @@
from _typeshed import Incomplete
from collections.abc import Mapping
from typing import Any, overload
from typing_extensions import Literal
from ..future.engine import Engine as FutureEngine
from .base import Engine
from .mock import MockConnection
from .url import URL
# Further kwargs are forwarded to the engine, dialect, or pool.
@overload
def create_engine(url: URL | str, *, strategy: Literal["mock"], **kwargs) -> MockConnection: ... # type: ignore[misc]
@overload
def create_engine(
url: URL | str, *, module: Incomplete | None = ..., enable_from_linting: bool = ..., future: Literal[True], **kwargs
) -> FutureEngine: ...
@overload
def create_engine(
url: URL | str, *, module: Incomplete | None = ..., enable_from_linting: bool = ..., future: Literal[False] = False, **kwargs
) -> Engine: ...
def engine_from_config(configuration: Mapping[str, Any], prefix: str = "sqlalchemy.", **kwargs) -> Engine: ...

View File

@@ -1,133 +0,0 @@
from _typeshed import Incomplete
from abc import ABCMeta
from typing import Any
from ..sql.compiler import RM_NAME as RM_NAME, RM_OBJECTS as RM_OBJECTS, RM_RENDERED_NAME as RM_RENDERED_NAME, RM_TYPE as RM_TYPE
from ..util.langhelpers import memoized_property
from .result import Result, ResultMetaData
from .row import LegacyRow
MD_INDEX: int
MD_RESULT_MAP_INDEX: int
MD_OBJECTS: int
MD_LOOKUP_KEY: int
MD_RENDERED_NAME: int
MD_PROCESSOR: int
MD_UNTRANSLATED: int
class CursorResultMetaData(ResultMetaData):
returns_rows: bool
case_sensitive: Any
def __init__(self, parent, cursor_description) -> None: ...
class LegacyCursorResultMetaData(CursorResultMetaData): ...
class ResultFetchStrategy:
alternate_cursor_description: Any
def soft_close(self, result, dbapi_cursor) -> None: ...
def hard_close(self, result, dbapi_cursor) -> None: ...
def yield_per(self, result, dbapi_cursor, num) -> None: ...
def fetchone(self, result, dbapi_cursor, hard_close: bool = False) -> None: ...
def fetchmany(self, result, dbapi_cursor, size: Incomplete | None = None) -> None: ...
def fetchall(self, result) -> None: ...
def handle_exception(self, result, dbapi_cursor, err) -> None: ...
class NoCursorFetchStrategy(ResultFetchStrategy):
def soft_close(self, result, dbapi_cursor) -> None: ...
def hard_close(self, result, dbapi_cursor) -> None: ...
def fetchone(self, result, dbapi_cursor, hard_close: bool = False): ...
def fetchmany(self, result, dbapi_cursor, size: Incomplete | None = None): ...
def fetchall(self, result, dbapi_cursor): ...
class NoCursorDQLFetchStrategy(NoCursorFetchStrategy): ...
class NoCursorDMLFetchStrategy(NoCursorFetchStrategy): ...
class CursorFetchStrategy(ResultFetchStrategy):
def soft_close(self, result, dbapi_cursor) -> None: ...
def hard_close(self, result, dbapi_cursor) -> None: ...
def handle_exception(self, result, dbapi_cursor, err) -> None: ...
def yield_per(self, result, dbapi_cursor, num) -> None: ...
def fetchone(self, result, dbapi_cursor, hard_close: bool = False): ...
def fetchmany(self, result, dbapi_cursor, size: Incomplete | None = None): ...
def fetchall(self, result, dbapi_cursor): ...
class BufferedRowCursorFetchStrategy(CursorFetchStrategy):
def __init__(
self, dbapi_cursor, execution_options, growth_factor: int = 5, initial_buffer: Incomplete | None = None
) -> None: ...
@classmethod
def create(cls, result): ...
def yield_per(self, result, dbapi_cursor, num) -> None: ...
def soft_close(self, result, dbapi_cursor) -> None: ...
def hard_close(self, result, dbapi_cursor) -> None: ...
def fetchone(self, result, dbapi_cursor, hard_close: bool = False): ...
def fetchmany(self, result, dbapi_cursor, size: Incomplete | None = None): ...
def fetchall(self, result, dbapi_cursor): ...
class FullyBufferedCursorFetchStrategy(CursorFetchStrategy):
alternate_cursor_description: Any
def __init__(
self, dbapi_cursor, alternate_description: Incomplete | None = None, initial_buffer: Incomplete | None = None
) -> None: ...
def yield_per(self, result, dbapi_cursor, num) -> None: ...
def soft_close(self, result, dbapi_cursor) -> None: ...
def hard_close(self, result, dbapi_cursor) -> None: ...
def fetchone(self, result, dbapi_cursor, hard_close: bool = False): ...
def fetchmany(self, result, dbapi_cursor, size: Incomplete | None = None): ...
def fetchall(self, result, dbapi_cursor): ...
class _NoResultMetaData(ResultMetaData):
returns_rows: bool
@property
def keys(self) -> None: ...
class _LegacyNoResultMetaData(_NoResultMetaData):
@property
def keys(self): ...
class BaseCursorResult:
out_parameters: Any
closed: bool
context: Any
dialect: Any
cursor: Any
cursor_strategy: Any
connection: Any
def __init__(self, context, cursor_strategy, cursor_description): ...
@property
def inserted_primary_key_rows(self): ...
@property
def inserted_primary_key(self): ...
def last_updated_params(self): ...
def last_inserted_params(self): ...
@property
def returned_defaults_rows(self): ...
@property
def returned_defaults(self): ...
def lastrow_has_defaults(self): ...
def postfetch_cols(self): ...
def prefetch_cols(self): ...
def supports_sane_rowcount(self): ...
def supports_sane_multi_rowcount(self): ...
@memoized_property
def rowcount(self): ...
@property
def lastrowid(self): ...
@property
def returns_rows(self): ...
@property
def is_insert(self): ...
class CursorResult(BaseCursorResult, Result):
def merge(self, *others): ...
def close(self) -> None: ...
class LegacyCursorResult(CursorResult):
def close(self) -> None: ...
ResultProxy = LegacyCursorResult
class BufferedRowResultProxy(ResultProxy): ...
class FullyBufferedResultProxy(ResultProxy): ...
class BufferedColumnRow(LegacyRow, metaclass=ABCMeta): ...
class BufferedColumnResultProxy(ResultProxy): ...

View File

@@ -1,222 +0,0 @@
from _typeshed import Incomplete
from typing import Any, ClassVar
from ..sql import sqltypes
from ..util.langhelpers import memoized_property
from . import interfaces
AUTOCOMMIT_REGEXP: Any
SERVER_SIDE_CURSOR_RE: Any
CACHE_HIT: Any
CACHE_MISS: Any
CACHING_DISABLED: Any
NO_CACHE_KEY: Any
NO_DIALECT_SUPPORT: Any
class DefaultDialect(interfaces.Dialect): # type: ignore[misc]
execution_ctx_cls: ClassVar[type[interfaces.ExecutionContext]]
statement_compiler: Any
ddl_compiler: Any
type_compiler: Any
preparer: Any
supports_alter: bool
supports_comments: bool
inline_comments: bool
use_setinputsizes: bool
supports_statement_cache: bool
default_sequence_base: int
execute_sequence_format: Any
supports_schemas: bool
supports_views: bool
supports_sequences: bool
sequences_optional: bool
preexecute_autoincrement_sequences: bool
supports_identity_columns: bool
postfetch_lastrowid: bool
implicit_returning: bool
full_returning: bool
insert_executemany_returning: bool
cte_follows_insert: bool
supports_native_enum: bool
supports_native_boolean: bool
non_native_boolean_check_constraint: bool
supports_simple_order_by_label: bool
tuple_in_values: bool
connection_characteristics: Any
engine_config_types: Any
supports_native_decimal: bool
supports_unicode_statements: bool
supports_unicode_binds: bool
returns_unicode_strings: Any
description_encoding: Any
name: str
max_identifier_length: int
isolation_level: Any
max_index_name_length: Any
max_constraint_name_length: Any
supports_sane_rowcount: bool
supports_sane_multi_rowcount: bool
colspecs: Any
default_paramstyle: str
supports_default_values: bool
supports_default_metavalue: bool
supports_empty_insert: bool
supports_multivalues_insert: bool
supports_is_distinct_from: bool
supports_server_side_cursors: bool
server_side_cursors: bool
supports_for_update_of: bool
server_version_info: Any
default_schema_name: Any
construct_arguments: Any
requires_name_normalize: bool
reflection_options: Any
dbapi_exception_translation_map: Any
is_async: bool
CACHE_HIT: Any
CACHE_MISS: Any
CACHING_DISABLED: Any
NO_CACHE_KEY: Any
NO_DIALECT_SUPPORT: Any
has_terminate: bool
convert_unicode: Any
encoding: Any
positional: bool
dbapi: Any
paramstyle: Any
identifier_preparer: Any
case_sensitive: Any
label_length: Any
compiler_linting: Any
def __init__(
self,
convert_unicode: bool = False,
encoding: str = "utf-8",
paramstyle: Incomplete | None = None,
dbapi: Incomplete | None = None,
implicit_returning: Incomplete | None = None,
case_sensitive: bool = True,
supports_native_boolean: Incomplete | None = None,
max_identifier_length: Incomplete | None = None,
label_length: Incomplete | None = None,
compiler_linting=0,
server_side_cursors: bool = False,
**kwargs,
) -> None: ...
@property
def dialect_description(self): ...
@property
def supports_sane_rowcount_returning(self): ...
@classmethod
def get_pool_class(cls, url): ...
def get_dialect_pool_class(self, url): ...
@classmethod
def load_provisioning(cls) -> None: ...
default_isolation_level: Any
def initialize(self, connection) -> None: ...
def on_connect(self) -> None: ...
def get_default_isolation_level(self, dbapi_conn): ...
def type_descriptor(self, typeobj): ...
def has_index(self, connection, table_name, index_name, schema: Incomplete | None = None): ...
def validate_identifier(self, ident) -> None: ...
def connect(self, *cargs, **cparams): ...
def create_connect_args(self, url): ...
def set_engine_execution_options(self, engine, opts) -> None: ...
def set_connection_execution_options(self, connection, opts) -> None: ...
def do_begin(self, dbapi_connection) -> None: ...
def do_rollback(self, dbapi_connection) -> None: ...
def do_commit(self, dbapi_connection) -> None: ...
def do_close(self, dbapi_connection) -> None: ...
def do_ping(self, dbapi_connection): ...
def create_xid(self): ...
def do_savepoint(self, connection, name) -> None: ...
def do_rollback_to_savepoint(self, connection, name) -> None: ...
def do_release_savepoint(self, connection, name) -> None: ...
def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...
def do_execute(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...
def do_execute_no_params(self, cursor, statement, context: Incomplete | None = None) -> None: ... # type: ignore[override]
def is_disconnect(self, e, connection, cursor): ...
def reset_isolation_level(self, dbapi_conn) -> None: ...
def normalize_name(self, name): ...
def denormalize_name(self, name): ...
def get_driver_connection(self, connection): ...
class _RendersLiteral:
def literal_processor(self, dialect): ...
class _StrDateTime(_RendersLiteral, sqltypes.DateTime): ...
class _StrDate(_RendersLiteral, sqltypes.Date): ...
class _StrTime(_RendersLiteral, sqltypes.Time): ...
class StrCompileDialect(DefaultDialect): # type: ignore[misc]
statement_compiler: Any
ddl_compiler: Any
type_compiler: Any
preparer: Any
supports_statement_cache: bool
supports_identity_columns: bool
supports_sequences: bool
sequences_optional: bool
preexecute_autoincrement_sequences: bool
implicit_returning: bool
supports_native_boolean: bool
supports_multivalues_insert: bool
supports_simple_order_by_label: bool
colspecs: Any
class DefaultExecutionContext(interfaces.ExecutionContext):
isinsert: bool
isupdate: bool
isdelete: bool
is_crud: bool
is_text: bool
isddl: bool
executemany: bool
compiled: Any
statement: Any
result_column_struct: Any
returned_default_rows: Any
execution_options: Any
include_set_input_sizes: Any
exclude_set_input_sizes: Any
cursor_fetch_strategy: Any
cache_stats: Any
invoked_statement: Any
cache_hit: Any
@memoized_property
def identifier_preparer(self): ...
@memoized_property
def engine(self): ...
@memoized_property
def postfetch_cols(self): ...
@memoized_property
def prefetch_cols(self): ...
@memoized_property
def returning_cols(self) -> None: ...
@memoized_property
def no_parameters(self): ...
@memoized_property
def should_autocommit(self): ...
@property
def connection(self): ...
def should_autocommit_text(self, statement): ...
def create_cursor(self): ...
def create_default_cursor(self): ...
def create_server_side_cursor(self) -> None: ...
def pre_exec(self) -> None: ...
def get_out_parameter_values(self, names) -> None: ...
def post_exec(self) -> None: ...
def get_result_processor(self, type_, colname, coltype): ...
def get_lastrowid(self): ...
def handle_dbapi_exception(self, e) -> None: ...
@property
def rowcount(self): ...
def supports_sane_rowcount(self): ...
def supports_sane_multi_rowcount(self): ...
@memoized_property
def inserted_primary_key_rows(self): ...
def lastrow_has_defaults(self): ...
current_parameters: Any
def get_current_parameters(self, isolate_multiinsert_groups: bool = True): ...
def get_insert_default(self, column): ...
def get_update_default(self, column): ...

View File

@@ -1,29 +0,0 @@
from .. import event as event
class ConnectionEvents(event.Events):
def before_execute(self, conn, clauseelement, multiparams, params, execution_options) -> None: ...
def after_execute(self, conn, clauseelement, multiparams, params, execution_options, result) -> None: ...
def before_cursor_execute(self, conn, cursor, statement, parameters, context, executemany) -> None: ...
def after_cursor_execute(self, conn, cursor, statement, parameters, context, executemany) -> None: ...
def handle_error(self, exception_context) -> None: ...
def engine_connect(self, conn, branch) -> None: ...
def set_connection_execution_options(self, conn, opts) -> None: ...
def set_engine_execution_options(self, engine, opts) -> None: ...
def engine_disposed(self, engine) -> None: ...
def begin(self, conn) -> None: ...
def rollback(self, conn) -> None: ...
def commit(self, conn) -> None: ...
def savepoint(self, conn, name) -> None: ...
def rollback_savepoint(self, conn, name, context) -> None: ...
def release_savepoint(self, conn, name, context) -> None: ...
def begin_twophase(self, conn, xid) -> None: ...
def prepare_twophase(self, conn, xid) -> None: ...
def rollback_twophase(self, conn, xid, is_prepared) -> None: ...
def commit_twophase(self, conn, xid, is_prepared) -> None: ...
class DialectEvents(event.Events):
def do_connect(self, dialect, conn_rec, cargs, cparams) -> None: ...
def do_executemany(self, cursor, statement, parameters, context) -> None: ...
def do_execute_no_params(self, cursor, statement, context) -> None: ...
def do_execute(self, cursor, statement, parameters, context) -> None: ...
def do_setinputsizes(self, inputsizes, cursor, statement, parameters, context) -> None: ...

View File

@@ -1,170 +0,0 @@
from _typeshed import Incomplete
from _typeshed.dbapi import DBAPIConnection, DBAPICursor
from abc import abstractmethod
from collections.abc import Callable, Collection, Mapping
from typing import Any, ClassVar, overload
from ..exc import StatementError
from ..sql.compiler import Compiled as Compiled, IdentifierPreparer, TypeCompiler as TypeCompiler
from ..sql.ddl import DDLElement
from ..sql.elements import ClauseElement
from ..sql.functions import FunctionElement
from ..sql.schema import DefaultGenerator
from .base import Connection, Engine
from .cursor import CursorResult
from .url import URL
class Dialect:
# Sub-classes are required to have the following attributes:
name: str
driver: str
positional: bool
paramstyle: str
encoding: str
statement_compiler: Compiled
ddl_compiler: Compiled
server_version_info: tuple[Any, ...]
# Only available on supporting dialects:
# default_schema_name: str
execution_ctx_cls: ClassVar[type[ExecutionContext]]
execute_sequence_format: type[tuple[Any] | list[Any]]
preparer: IdentifierPreparer
supports_alter: bool
max_identifier_length: int
supports_sane_rowcount: bool
supports_sane_multi_rowcount: bool
preexecute_autoincrement_sequences: bool
implicit_returning: bool
colspecs: dict[Any, Any]
supports_default_values: bool
supports_sequences: bool
sequences_optional: bool
supports_native_enum: bool
supports_native_boolean: bool
dbapi_exception_translation_map: dict[Any, Any]
supports_statement_cache: bool
dispatch: Incomplete
@abstractmethod
def create_connect_args(self, url: URL) -> None: ...
def initialize(self, connection) -> None: ...
def on_connect_url(self, url) -> Callable[[DBAPIConnection], object] | None: ...
def on_connect(self) -> Callable[[DBAPIConnection], object] | None: ...
# The following methods all raise NotImplementedError, but not all
# dialects implement all methods, which is why they can't be marked
# as abstract.
@classmethod
def type_descriptor(cls, typeobj) -> None: ...
def get_columns(self, connection, table_name, schema: Incomplete | None = None, **kw) -> None: ...
def get_pk_constraint(self, connection, table_name, schema: Incomplete | None = None, **kw) -> None: ...
def get_foreign_keys(self, connection, table_name, schema: Incomplete | None = None, **kw) -> None: ...
def get_table_names(self, connection, schema: Incomplete | None = None, **kw) -> None: ...
def get_temp_table_names(self, connection, schema: Incomplete | None = None, **kw) -> None: ...
def get_view_names(self, connection, schema: Incomplete | None = None, **kw) -> None: ...
def get_sequence_names(self, connection, schema: Incomplete | None = None, **kw) -> None: ...
def get_temp_view_names(self, connection, schema: Incomplete | None = None, **kw) -> None: ...
def get_view_definition(self, connection, view_name, schema: Incomplete | None = None, **kw) -> None: ...
def get_indexes(self, connection, table_name, schema: Incomplete | None = None, **kw) -> None: ...
def get_unique_constraints(self, connection, table_name, schema: Incomplete | None = None, **kw) -> None: ...
def get_check_constraints(self, connection, table_name, schema: Incomplete | None = None, **kw) -> None: ...
def get_table_comment(self, connection, table_name, schema: Incomplete | None = None, **kw) -> None: ...
def normalize_name(self, name) -> None: ...
def denormalize_name(self, name) -> None: ...
def has_table(self, connection, table_name, schema: Incomplete | None = None, **kw) -> None: ...
def has_index(self, connection, table_name, index_name, schema: Incomplete | None = None) -> None: ...
def has_sequence(self, connection, sequence_name, schema: Incomplete | None = None, **kw) -> None: ...
def do_begin(self, dbapi_connection) -> None: ...
def do_rollback(self, dbapi_connection) -> None: ...
def do_commit(self, dbapi_connection) -> None: ...
def do_terminate(self, dbapi_connection) -> None: ...
def do_close(self, dbapi_connection) -> None: ...
def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ...
def create_xid(self) -> None: ...
def do_savepoint(self, connection, name) -> None: ...
def do_rollback_to_savepoint(self, connection, name) -> None: ...
def do_release_savepoint(self, connection, name) -> None: ...
def do_begin_twophase(self, connection, xid) -> None: ...
def do_prepare_twophase(self, connection, xid) -> None: ...
def do_rollback_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
def do_commit_twophase(self, connection, xid, is_prepared: bool = True, recover: bool = False) -> None: ...
def do_recover_twophase(self, connection) -> None: ...
def do_executemany(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...
def do_execute(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...
def do_execute_no_params(self, cursor, statement, parameters, context: Incomplete | None = None) -> None: ...
def is_disconnect(self, e, connection, cursor) -> None: ...
def connect(self, *cargs, **cparams) -> DBAPIConnection: ...
def reset_isolation_level(self, dbapi_conn) -> None: ...
def set_isolation_level(self, dbapi_conn, level) -> None: ...
def get_isolation_level(self, dbapi_conn) -> None: ...
def get_default_isolation_level(self, dbapi_conn) -> None: ...
@classmethod
def get_dialect_cls(cls, url): ...
@classmethod
def load_provisioning(cls) -> None: ...
@classmethod
def engine_created(cls, engine) -> None: ...
def get_driver_connection(self, connection) -> None: ...
class CreateEnginePlugin:
url: URL
def __init__(self, url: URL, kwargs) -> None: ...
def update_url(self, url) -> None: ...
def handle_dialect_kwargs(self, dialect_cls, dialect_args) -> None: ...
def handle_pool_kwargs(self, pool_cls, pool_args) -> None: ...
def engine_created(self, engine) -> None: ...
class ExecutionContext:
def create_cursor(self) -> None: ...
def pre_exec(self) -> None: ...
def get_out_parameter_values(self, out_param_names) -> None: ...
def post_exec(self) -> None: ...
def handle_dbapi_exception(self, e) -> None: ...
def should_autocommit_text(self, statement) -> None: ...
def lastrow_has_defaults(self) -> None: ...
def get_rowcount(self) -> None: ...
class Connectable:
dispatch: Incomplete
@abstractmethod
def connect(self, **kwargs) -> Connection: ...
@property
def engine(self) -> Engine | None: ...
@abstractmethod
@overload
def execute(
self,
object_: ClauseElement | FunctionElement | DDLElement | DefaultGenerator | Compiled,
*multiparams: Mapping[str, Any],
**params: Any,
) -> CursorResult: ...
@abstractmethod
@overload
def execute(self, object_: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> CursorResult: ...
@abstractmethod
@overload
def scalar(
self,
object_: ClauseElement | FunctionElement | DDLElement | DefaultGenerator | Compiled,
*multiparams: Mapping[str, Any],
**params: Any,
) -> Any: ...
@abstractmethod
@overload
def scalar(self, object_: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> Any: ...
class ExceptionContext:
connection: Connection | None
engine: Engine | None
cursor: DBAPICursor | None
statement: str | None
parameters: Collection[Any] | None
original_exception: BaseException | None
sqlalchemy_exception: StatementError | None
chained_exception: BaseException | None
execution_context: ExecutionContext | None
is_disconnect: bool | None
invalidate_pool_on_disconnect: bool
class AdaptedConnection:
@property
def driver_connection(self): ...
def run_async(self, fn): ...

View File

@@ -1,32 +0,0 @@
from abc import abstractmethod
from collections.abc import Mapping
from typing import Any, overload
from typing_extensions import Self
from .base import _Executable
from .cursor import CursorResult
from .interfaces import Connectable, Dialect
from .url import URL
class MockConnection(Connectable):
def __init__(self, dialect: Dialect, execute) -> None: ...
@property
def engine(self) -> Self: ... # type: ignore[override]
@property
def dialect(self) -> Dialect: ...
@property
def name(self) -> str: ...
def schema_for_object(self, obj): ...
def connect(self, **kwargs): ...
def execution_options(self, **kw): ...
def compiler(self, statement, parameters, **kwargs): ...
def create(self, entity, **kwargs) -> None: ...
def drop(self, entity, **kwargs) -> None: ...
@abstractmethod
@overload
def execute(self, object_: _Executable, *multiparams: Mapping[str, Any], **params: Any) -> CursorResult: ...
@abstractmethod
@overload
def execute(self, object_: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> CursorResult: ...
def create_mock_engine(url: URL | str, executor, **kw) -> MockConnection: ...

View File

@@ -1,32 +0,0 @@
from _typeshed import Incomplete
def cache(fn, self, con, *args, **kw): ...
class Inspector:
def __init__(self, bind): ...
@classmethod
def from_engine(cls, bind): ...
@property
def default_schema_name(self): ...
def get_schema_names(self): ...
def get_table_names(self, schema: Incomplete | None = None): ...
def has_table(self, table_name, schema: Incomplete | None = None): ...
def has_sequence(self, sequence_name, schema: Incomplete | None = None): ...
def get_sorted_table_and_fkc_names(self, schema: Incomplete | None = None): ...
def get_temp_table_names(self): ...
def get_temp_view_names(self): ...
def get_table_options(self, table_name, schema: Incomplete | None = None, **kw): ...
def get_view_names(self, schema: Incomplete | None = None): ...
def get_sequence_names(self, schema: Incomplete | None = None): ...
def get_view_definition(self, view_name, schema: Incomplete | None = None): ...
def get_columns(self, table_name, schema: Incomplete | None = None, **kw): ...
def get_pk_constraint(self, table_name, schema: Incomplete | None = None, **kw): ...
def get_foreign_keys(self, table_name, schema: Incomplete | None = None, **kw): ...
def get_indexes(self, table_name, schema: Incomplete | None = None, **kw): ...
def get_unique_constraints(self, table_name, schema: Incomplete | None = None, **kw): ...
def get_table_comment(self, table_name, schema: Incomplete | None = None, **kw): ...
def get_check_constraints(self, table_name, schema: Incomplete | None = None, **kw): ...
def reflecttable(self, *args, **kwargs): ...
def reflect_table(
self, table, include_columns, exclude_columns=(), resolve_fks: bool = True, _extend_on: Incomplete | None = None
) -> None: ...

View File

@@ -1,134 +0,0 @@
from _typeshed import Incomplete
from collections.abc import Generator, KeysView
from typing import Any
from typing_extensions import Self
from ..sql.base import InPlaceGenerative
from .row import Row
class ResultMetaData:
@property
def keys(self): ...
class RMKeyView(KeysView[Any]):
def __init__(self, parent) -> None: ...
def __len__(self) -> int: ...
def __iter__(self): ...
def __contains__(self, item): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class SimpleResultMetaData(ResultMetaData):
def __init__(
self,
keys,
extra: Incomplete | None = None,
_processors: Incomplete | None = None,
_tuplefilter: Incomplete | None = None,
_translated_indexes: Incomplete | None = None,
_unique_filters: Incomplete | None = None,
) -> None: ...
def result_tuple(fields, extra: Incomplete | None = None): ...
class ResultInternal(InPlaceGenerative): ...
class _WithKeys:
def keys(self): ...
class Result(_WithKeys, ResultInternal):
def __init__(self, cursor_metadata) -> None: ...
def close(self) -> None: ...
@property
def closed(self): ...
def yield_per(self, num: int) -> Self: ...
def unique(self, strategy: Incomplete | None = None) -> Self: ...
def columns(self, *col_expressions): ...
def scalars(self, index: int = 0) -> ScalarResult: ...
def mappings(self) -> MappingResult: ...
def __iter__(self): ...
def __next__(self): ...
def partitions(self, size: int | None = None) -> Generator[list[Row], None, None]: ...
def fetchall(self) -> list[Row]: ...
def fetchone(self) -> Row | None: ...
def fetchmany(self, size: int | None = None) -> list[Row]: ...
def all(self) -> list[Row]: ...
def first(self) -> Row | None: ...
def one_or_none(self) -> Row | None: ...
def scalar_one(self) -> Any: ...
def scalar_one_or_none(self) -> Any | None: ...
def one(self) -> Row: ...
def scalar(self) -> Any | None: ...
def freeze(self) -> FrozenResult: ...
def merge(self, *others) -> MergedResult: ...
class FilterResult(ResultInternal):
def yield_per(self: Self, num) -> Self: ...
@property
def closed(self): ...
def close(self) -> None: ...
class ScalarResult(FilterResult):
def __init__(self, real_result, index) -> None: ...
def unique(self, strategy: Incomplete | None = None): ...
def partitions(self, size: Incomplete | None = None) -> None: ...
def fetchall(self): ...
def fetchmany(self, size: Incomplete | None = None): ...
def all(self): ...
def __iter__(self): ...
def __next__(self): ...
def first(self): ...
def one_or_none(self): ...
def one(self): ...
class MappingResult(_WithKeys, FilterResult):
def __init__(self, result) -> None: ...
def unique(self, strategy: Incomplete | None = None): ...
def columns(self, *col_expressions): ...
def partitions(self, size: Incomplete | None = None) -> None: ...
def fetchall(self): ...
def fetchone(self): ...
def fetchmany(self, size: Incomplete | None = None): ...
def all(self): ...
def __iter__(self): ...
def __next__(self): ...
def first(self): ...
def one_or_none(self): ...
def one(self): ...
class FrozenResult:
metadata: Any
data: Any
def __init__(self, result) -> None: ...
def rewrite_rows(self): ...
def with_new_rows(self, tuple_data): ...
def __call__(self): ...
class IteratorResult(Result):
iterator: Any
raw: Any
def __init__(
self, cursor_metadata, iterator, raw: Incomplete | None = None, _source_supports_scalars: bool = False
) -> None: ...
@property
def closed(self): ...
def null_result() -> IteratorResult: ...
class ChunkedIteratorResult(IteratorResult):
chunks: Any
raw: Any
iterator: Any
dynamic_yield_per: Any
def __init__(
self,
cursor_metadata,
chunks,
source_supports_scalars: bool = False,
raw: Incomplete | None = None,
dynamic_yield_per: bool = False,
) -> None: ...
class MergedResult(IteratorResult):
closed: bool
def __init__(self, cursor_metadata, results) -> None: ...

View File

@@ -1,63 +0,0 @@
from abc import ABCMeta
from collections.abc import ItemsView, Iterator, KeysView, Mapping, Sequence, ValuesView
from typing import Any, Generic, TypeVar
from ..cresultproxy import BaseRow as BaseRow
_VT_co = TypeVar("_VT_co", covariant=True)
MD_INDEX: int
def rowproxy_reconstructor(cls, state): ...
KEY_INTEGER_ONLY: int
KEY_OBJECTS_ONLY: int
KEY_OBJECTS_BUT_WARN: int
KEY_OBJECTS_NO_WARN: int
class Row(BaseRow, Sequence[Any], metaclass=ABCMeta):
# The count and index methods are inherited from Sequence.
# If the result set contains columns with the same names, these
# fields contains their respective values, instead. We don't reflect
# this in the stubs.
__hash__ = BaseRow.__hash__ # type: ignore[assignment]
def __lt__(self, other: Row | tuple[Any, ...]) -> bool: ...
def __le__(self, other: Row | tuple[Any, ...]) -> bool: ...
def __ge__(self, other: Row | tuple[Any, ...]) -> bool: ...
def __gt__(self, other: Row | tuple[Any, ...]) -> bool: ...
def __eq__(self, other: object) -> bool: ...
def __ne__(self, other: object) -> bool: ...
def keys(self) -> list[str]: ...
def __contains__(self, key): ...
# The following methods are public, but have a leading underscore
# to prevent conflicts with column names.
@property
def _mapping(self) -> RowMapping: ...
@property
def _fields(self) -> tuple[str, ...]: ...
def _asdict(self) -> dict[str, Any]: ...
class LegacyRow(Row, metaclass=ABCMeta):
def has_key(self, key: str) -> bool: ...
def items(self) -> list[tuple[str, Any]]: ...
def iterkeys(self) -> Iterator[str]: ...
def itervalues(self) -> Iterator[Any]: ...
def values(self) -> list[Any]: ...
BaseRowProxy = BaseRow
RowProxy = Row
class ROMappingView(KeysView[str], ValuesView[_VT_co], ItemsView[str, _VT_co], Generic[_VT_co]): # type: ignore[misc]
def __init__(self, mapping: RowMapping, items: list[_VT_co]) -> None: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[_VT_co]: ... # type: ignore[override]
def __eq__(self, other: ROMappingView[_VT_co]) -> bool: ... # type: ignore[override]
def __ne__(self, other: ROMappingView[_VT_co]) -> bool: ... # type: ignore[override]
class RowMapping(BaseRow, Mapping[str, Row]):
__getitem__: Any
def __iter__(self) -> Iterator[str]: ...
def __len__(self) -> int: ...
def items(self) -> ROMappingView[tuple[str, Any]]: ... # type: ignore[override]
def keys(self) -> list[str]: ... # type: ignore[override]
def values(self) -> ROMappingView[Any]: ... # type: ignore[override]

View File

@@ -1,4 +0,0 @@
from typing import Any
class MockEngineStrategy:
MockConnection: Any

View File

@@ -1,65 +0,0 @@
from _typeshed import SupportsItems, Unused
from collections.abc import Iterable, Mapping, Sequence
from typing import Any, NamedTuple
from typing_extensions import Self, TypeAlias
from ..util import immutabledict
from .interfaces import Dialect
# object that produces a password when called with str()
_PasswordObject: TypeAlias = object
# stub-only helper class
class _URLTuple(NamedTuple):
drivername: str
username: str | None
password: str | _PasswordObject | None
host: str | None
port: int | None
database: str | None
query: immutabledict[str, str | tuple[str, ...]]
_Query: TypeAlias = Mapping[str, str | Sequence[str]] | Sequence[tuple[str, str | Sequence[str]]]
class URL(_URLTuple):
def __new__(self, *arg, **kw) -> Self | URL: ...
@classmethod
def create(
cls,
drivername: str,
username: str | None = None,
password: str | _PasswordObject | None = None,
host: str | None = None,
port: int | None = None,
database: str | None = None,
query: _Query | None = ...,
) -> URL: ...
def set(
self,
drivername: str | None = None,
username: str | None = None,
password: str | _PasswordObject | None = None,
host: str | None = None,
port: int | None = None,
database: str | None = None,
query: _Query | None = None,
) -> Self: ...
def update_query_string(self, query_string: str, append: bool = False) -> Self: ...
def update_query_pairs(self, key_value_pairs: Iterable[tuple[str, str]], append: bool = False) -> Self: ...
def update_query_dict(self, query_parameters: SupportsItems[str, str | Sequence[str]], append: bool = False) -> Self: ...
def difference_update_query(self, names: Iterable[str]) -> URL: ...
@property
def normalized_query(self) -> immutabledict[str, tuple[str, ...]]: ...
def __to_string__(self, hide_password: bool = True) -> str: ...
def render_as_string(self, hide_password: bool = True) -> str: ...
def __copy__(self) -> Self: ...
def __deepcopy__(self, memo: Unused) -> Self: ...
def __hash__(self) -> int: ...
def __eq__(self, other: object) -> bool: ...
def __ne__(self, other: object) -> bool: ...
def get_backend_name(self) -> str: ...
def get_driver_name(self) -> str: ...
def get_dialect(self) -> type[Dialect]: ...
def translate_connect_args(self, names: list[str] | None = None, **kw: str) -> dict[str, Any]: ...
def make_url(name_or_url: str | URL) -> URL: ...

View File

@@ -1,12 +0,0 @@
from collections.abc import Callable
from types import TracebackType
from typing import Any
from typing_extensions import Self
def connection_memoize(key: str) -> Callable[..., Any]: ...
class TransactionalContext:
def __enter__(self) -> Self: ...
def __exit__(
self, type_: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None
) -> None: ...

View File

@@ -1,10 +0,0 @@
from .api import (
CANCEL as CANCEL,
NO_RETVAL as NO_RETVAL,
contains as contains,
listen as listen,
listens_for as listens_for,
remove as remove,
)
from .attr import RefCollection as RefCollection
from .base import Events as Events, dispatcher as dispatcher

Some files were not shown because too many files have changed in this diff Show More