Bump networkx to 3.2.1 (#11336)

This commit is contained in:
Avasam
2024-01-31 01:30:20 -05:00
committed by GitHub
parent 9877ed8092
commit faf9d77d79
232 changed files with 1249 additions and 73 deletions

View File

@@ -0,0 +1,23 @@
# overloaded class-decorators are not properly supported in type-checkers:
# - mypy: https://github.com/python/mypy/issues/16840
# - pyright: https://github.com/microsoft/pyright/issues/7167
networkx\.(algorithms\.)?(boundary\.)?edge_boundary
networkx\.(algorithms\.)?(bridges\.)?local_bridges
networkx\.(algorithms\.)?(clique\.)?node_clique_number
networkx\.(algorithms\.)?(shortest_paths\.)?(generic\.)?shortest_path
networkx\.(convert_matrix\.)?from_numpy_array
networkx\.(convert_matrix\.)?from_pandas_adjacency
networkx\.(convert_matrix\.)?from_pandas_edgelist
networkx\.(generators\.)?(random_clustered\.)?random_clustered_graph
networkx\.(relabel\.)?relabel_nodes
# Stubtest doesn't understand aliases of class-decorated methods (possibly https://github.com/python/mypy/issues/6700 )
networkx\.(algorithms\.)?(centrality\.)?(current_flow_closeness\.)?information_centrality
networkx\.(generators\.)?(random_graphs\.)?binomial_graph
networkx\.(generators\.)?(random_graphs\.)?erdos_renyi_graph
# Stubtest says: "runtime argument "backend" has a default value of type None, which is
# incompatible with stub argument type builtins.str. This is often caused by overloads
# failing to account for explicitly passing in the default value."
# Which is true, but would require some way of concatenating `backend` to ParamSpec.kwargs
networkx\.(utils\.backends\.)?_dispatch\.__call__

View File

@@ -0,0 +1,23 @@
from typing_extensions import assert_type
from networkx.utils.backends import _dispatch
@_dispatch
def some_method(int_p: int, str_p: str) -> float:
return 0.0
# Wrong param / order
some_method("", 0) # type: ignore
# backend is kw-only
some_method(0, "", None) # type: ignore
# No backend means no **backend_kwargs allowed
some_method(0, "", backend_specific_kwarg="") # type: ignore
some_method(0, "", backend=None, backend_specific_kwarg="") # type: ignore
# Correct usage
assert_type(some_method(0, ""), float)
# type system doesn't allow this yet (see comment in networkx/utils/backends.pyi)
# assert_type(some_method(0, "", backend=None), float)
assert_type(some_method(0, "", backend="custom backend", backend_specific_kwarg=""), float)

View File

@@ -1,4 +1,4 @@
version = "3.1"
version = "3.2.1"
upstream_repository = "https://github.com/networkx/networkx"
requires = ["numpy"]
partial_stub = true

View File

@@ -6,9 +6,11 @@ from networkx.convert_matrix import *
from networkx.drawing import *
from networkx.exception import *
from networkx.generators import *
from networkx.lazy_imports import _lazy_import as _lazy_import
from networkx.linalg import *
from networkx.readwrite import *
from networkx.relabel import *
from networkx.utils.backends import _dispatch as _dispatch
from . import (
algorithms as algorithms,

View File

@@ -1,4 +1,10 @@
from networkx.utils.backends import _dispatch
@_dispatch
def maximum_independent_set(G): ...
@_dispatch
def max_clique(G): ...
@_dispatch
def clique_removal(G): ...
@_dispatch
def large_clique_size(G): ...

View File

@@ -1,3 +1,6 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def average_clustering(G, trials: int = 1000, seed: Incomplete | None = None): ...

View File

@@ -1,5 +1,10 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def local_node_connectivity(G, source, target, cutoff: Incomplete | None = None): ...
@_dispatch
def node_connectivity(G, s: Incomplete | None = None, t: Incomplete | None = None): ...
@_dispatch
def all_pairs_node_connectivity(G, nbunch: Incomplete | None = None, cutoff: Incomplete | None = None): ...

View File

@@ -1,3 +1,6 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def diameter(G, seed: Incomplete | None = None): ...

View File

@@ -1,4 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def min_weighted_dominating_set(G, weight: Incomplete | None = None): ...
@_dispatch
def min_edge_dominating_set(G): ...

View File

@@ -1 +1,4 @@
from networkx.utils.backends import _dispatch
@_dispatch
def k_components(G, min_density: float = 0.95): ...

View File

@@ -1 +1,4 @@
from networkx.utils.backends import _dispatch
@_dispatch
def min_maximal_matching(G): ...

View File

@@ -1,4 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def randomized_partitioning(G, seed: Incomplete | None = None, p: float = 0.5, weight: Incomplete | None = None): ...
@_dispatch
def one_exchange(G, initial_cut: Incomplete | None = None, seed: Incomplete | None = None, weight: Incomplete | None = None): ...

View File

@@ -1 +1,4 @@
from networkx.utils.backends import _dispatch
@_dispatch
def ramsey_R2(G): ...

View File

@@ -1,4 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def metric_closure(G, weight: str = "weight"): ...
@_dispatch
def steiner_tree(G, terminal_nodes, weight: str = "weight", method: Incomplete | None = None): ...

View File

@@ -1,11 +1,18 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def christofides(G, weight: str = "weight", tree: Incomplete | None = None): ...
@_dispatch
def traveling_salesman_problem(
G, weight: str = "weight", nodes: Incomplete | None = None, cycle: bool = True, method: Incomplete | None = None
): ...
@_dispatch
def asadpour_atsp(G, weight: str = "weight", seed: Incomplete | None = None, source: Incomplete | None = None): ...
@_dispatch
def greedy_tsp(G, weight: str = "weight", source: Incomplete | None = None): ...
@_dispatch
def simulated_annealing_tsp(
G,
init_cycle,
@@ -19,6 +26,7 @@ def simulated_annealing_tsp(
alpha: float = 0.01,
seed: Incomplete | None = None,
): ...
@_dispatch
def threshold_accepting_tsp(
G,
init_cycle,

View File

@@ -1,8 +1,12 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
__all__ = ["treewidth_min_degree", "treewidth_min_fill_in"]
@_dispatch
def treewidth_min_degree(G): ...
@_dispatch
def treewidth_min_fill_in(G): ...
class MinDegreeHeuristic:

View File

@@ -1,3 +1,6 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def min_weighted_vertex_cover(G, weight: Incomplete | None = None): ...

View File

@@ -1,5 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def average_degree_connectivity(
G, source: str = "in+out", target: str = "in+out", nodes: Incomplete | None = None, weight: Incomplete | None = None
): ...

View File

@@ -1,10 +1,16 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def degree_assortativity_coefficient(
G, x: str = "out", y: str = "in", weight: Incomplete | None = None, nodes: Incomplete | None = None
): ...
@_dispatch
def degree_pearson_correlation_coefficient(
G, x: str = "out", y: str = "in", weight: Incomplete | None = None, nodes: Incomplete | None = None
): ...
@_dispatch
def attribute_assortativity_coefficient(G, attribute, nodes: Incomplete | None = None): ...
@_dispatch
def numeric_assortativity_coefficient(G, attribute, nodes: Incomplete | None = None): ...

View File

@@ -1,12 +1,18 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def attribute_mixing_dict(G, attribute, nodes: Incomplete | None = None, normalized: bool = False): ...
@_dispatch
def attribute_mixing_matrix(
G, attribute, nodes: Incomplete | None = None, mapping: Incomplete | None = None, normalized: bool = True
): ...
@_dispatch
def degree_mixing_dict(
G, x: str = "out", y: str = "in", weight: Incomplete | None = None, nodes: Incomplete | None = None, normalized: bool = False
): ...
@_dispatch
def degree_mixing_matrix(
G,
x: str = "out",
@@ -16,4 +22,5 @@ def degree_mixing_matrix(
normalized: bool = True,
mapping: Incomplete | None = None,
): ...
@_dispatch
def mixing_dict(xy, normalized: bool = False): ...

View File

@@ -1,5 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def average_neighbor_degree(
G, source: str = "out", target: str = "out", nodes: Incomplete | None = None, weight: Incomplete | None = None
): ...

View File

@@ -1,7 +1,11 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.utils.backends import _dispatch
@_dispatch
def node_attribute_xy(G, attribute, nodes: Incomplete | None = None) -> Generator[Incomplete, None, None]: ...
@_dispatch
def node_degree_xy(
G, x: str = "out", y: str = "in", weight: Incomplete | None = None, nodes: Incomplete | None = None
) -> Generator[Incomplete, None, None]: ...

View File

@@ -1,2 +1,6 @@
from networkx.utils.backends import _dispatch
@_dispatch
def find_asteroidal_triple(G): ...
@_dispatch
def is_at_free(G): ...

View File

@@ -1,8 +1,16 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def color(G): ...
@_dispatch
def is_bipartite(G): ...
@_dispatch
def is_bipartite_node_set(G, nodes): ...
@_dispatch
def sets(G, top_nodes: Incomplete | None = None): ...
@_dispatch
def density(B, nodes): ...
@_dispatch
def degrees(B, nodes, weight: Incomplete | None = None): ...

View File

@@ -1,3 +1,8 @@
from networkx.utils.backends import _dispatch
@_dispatch
def degree_centrality(G, nodes): ...
@_dispatch
def betweenness_centrality(G, nodes): ...
@_dispatch
def closeness_centrality(G, nodes, normalized: bool = True): ...

View File

@@ -1,8 +1,13 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def latapy_clustering(G, nodes: Incomplete | None = None, mode: str = "dot"): ...
clustering = latapy_clustering
@_dispatch
def average_clustering(G, nodes: Incomplete | None = None, mode: str = "dot"): ...
@_dispatch
def robins_alexander_clustering(G): ...

View File

@@ -1,3 +1,6 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def min_edge_cover(G, matching_algorithm: Incomplete | None = None): ...

View File

@@ -1,8 +1,13 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.utils.backends import _dispatch
@_dispatch
def write_edgelist(G, path, comments: str = "#", delimiter: str = " ", data: bool = True, encoding: str = "utf-8") -> None: ...
@_dispatch
def generate_edgelist(G, delimiter: str = " ", data: bool = True) -> Generator[Incomplete, None, None]: ...
@_dispatch
def parse_edgelist(
lines,
comments: str = "#",
@@ -11,6 +16,7 @@ def parse_edgelist(
nodetype: Incomplete | None = None,
data: bool = True,
): ...
@_dispatch
def read_edgelist(
path,
comments: str = "#",

View File

@@ -1,10 +1,20 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def complete_bipartite_graph(n1, n2, create_using: Incomplete | None = None): ...
@_dispatch
def configuration_model(aseq, bseq, create_using: Incomplete | None = None, seed: Incomplete | None = None): ...
@_dispatch
def havel_hakimi_graph(aseq, bseq, create_using: Incomplete | None = None): ...
@_dispatch
def reverse_havel_hakimi_graph(aseq, bseq, create_using: Incomplete | None = None): ...
@_dispatch
def alternating_havel_hakimi_graph(aseq, bseq, create_using: Incomplete | None = None): ...
@_dispatch
def preferential_attachment_graph(aseq, p, create_using: Incomplete | None = None, seed: Incomplete | None = None): ...
@_dispatch
def random_graph(n, m, p, seed: Incomplete | None = None, directed: bool = False): ...
@_dispatch
def gnmk_random_graph(n, m, k, seed: Incomplete | None = None, directed: bool = False): ...

View File

@@ -1,9 +1,15 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def hopcroft_karp_matching(G, top_nodes: Incomplete | None = None): ...
@_dispatch
def eppstein_matching(G, top_nodes: Incomplete | None = None): ...
@_dispatch
def to_vertex_cover(G, matching, top_nodes: Incomplete | None = None): ...
maximum_matching = hopcroft_karp_matching
@_dispatch
def minimum_weight_full_matching(G, top_nodes: Incomplete | None = None, weight: str = "weight"): ...

View File

@@ -1,5 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def biadjacency_matrix(
G,
row_order,
@@ -8,4 +11,5 @@ def biadjacency_matrix(
weight: str = "weight",
format: str = "csr",
): ...
@_dispatch
def from_biadjacency_matrix(A, create_using: Incomplete | None = None, edge_attribute: str = "weight"): ...

View File

@@ -1,7 +1,14 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def projected_graph(B, nodes, multigraph: bool = False): ...
@_dispatch
def weighted_projected_graph(B, nodes, ratio: bool = False): ...
@_dispatch
def collaboration_weighted_projected_graph(B, nodes): ...
@_dispatch
def overlap_weighted_projected_graph(B, nodes, jaccard: bool = True): ...
@_dispatch
def generic_weighted_projected_graph(B, nodes, weight_function: Incomplete | None = None): ...

View File

@@ -1,3 +1,6 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def node_redundancy(G, nodes: Incomplete | None = None): ...

View File

@@ -1,3 +1,6 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def spectral_bipartivity(G, nodes: Incomplete | None = None, weight: str = "weight"): ...

View File

@@ -3,6 +3,7 @@ from collections.abc import Generator, Iterable
from typing import Literal, TypeVar, overload
from networkx.classes.graph import Graph, _Node
from networkx.utils.backends import _dispatch
_U = TypeVar("_U")
@@ -110,4 +111,5 @@ def edge_boundary(
keys: Literal[True],
default: _U | None = None,
) -> Generator[tuple[_Node, _Node, int, dict[str, _U]], None, None]: ...
@_dispatch
def node_boundary(G: Graph[_Node], nbunch1: Iterable[_Node], nbunch2: Iterable[_Node] | None = None) -> set[_Node]: ...

View File

@@ -3,8 +3,11 @@ from collections.abc import Callable, Generator
from typing import Literal, overload
from networkx.classes.graph import Graph, _Node
from networkx.utils.backends import _dispatch
@_dispatch
def bridges(G: Graph[_Node], root: _Node | None = None) -> Generator[_Node, None, None]: ...
@_dispatch
def has_bridges(G: Graph[_Node], root: Incomplete | None = None) -> bool: ...
@overload
def local_bridges(

View File

@@ -1,5 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def betweenness_centrality(
G,
k: Incomplete | None = None,
@@ -8,6 +11,7 @@ def betweenness_centrality(
endpoints: bool = False,
seed: Incomplete | None = None,
): ...
@_dispatch
def edge_betweenness_centrality(
G, k: Incomplete | None = None, normalized: bool = True, weight: Incomplete | None = None, seed: Incomplete | None = None
): ...

View File

@@ -1,4 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def betweenness_centrality_subset(G, sources, targets, normalized: bool = False, weight: Incomplete | None = None): ...
@_dispatch
def edge_betweenness_centrality_subset(G, sources, targets, normalized: bool = False, weight: Incomplete | None = None): ...

View File

@@ -1,6 +1,10 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def closeness_centrality(G, u: Incomplete | None = None, distance: Incomplete | None = None, wf_improved: bool = True): ...
@_dispatch
def incremental_closeness_centrality(
G, edge, prev_cc: Incomplete | None = None, insertion: bool = True, wf_improved: bool = True
): ...

View File

@@ -1,5 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def approximate_current_flow_betweenness_centrality(
G,
normalized: bool = True,
@@ -10,9 +13,11 @@ def approximate_current_flow_betweenness_centrality(
kmax: int = 10000,
seed: Incomplete | None = None,
): ...
@_dispatch
def current_flow_betweenness_centrality(
G, normalized: bool = True, weight: Incomplete | None = None, dtype=..., solver: str = "full"
): ...
@_dispatch
def edge_current_flow_betweenness_centrality(
G, normalized: bool = True, weight: Incomplete | None = None, dtype=..., solver: str = "full"
): ...

View File

@@ -1,8 +1,12 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def current_flow_betweenness_centrality_subset(
G, sources, targets, normalized: bool = True, weight: Incomplete | None = None, dtype=..., solver: str = "lu"
): ...
@_dispatch
def edge_current_flow_betweenness_centrality_subset(
G, sources, targets, normalized: bool = True, weight: Incomplete | None = None, dtype=..., solver: str = "lu"
): ...

View File

@@ -1,5 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def current_flow_closeness_centrality(G, weight: Incomplete | None = None, dtype=..., solver: str = "lu"): ...
information_centrality = current_flow_closeness_centrality

View File

@@ -1,3 +1,8 @@
from networkx.utils.backends import _dispatch
@_dispatch
def degree_centrality(G): ...
@_dispatch
def in_degree_centrality(G): ...
@_dispatch
def out_degree_centrality(G): ...

View File

@@ -1,5 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def dispersion(
G,
u: Incomplete | None = None,

View File

@@ -1,6 +1,10 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def eigenvector_centrality(
G, max_iter: int = 100, tol: float = 1e-06, nstart: Incomplete | None = None, weight: Incomplete | None = None
): ...
@_dispatch
def eigenvector_centrality_numpy(G, weight: Incomplete | None = None, max_iter: int = 50, tol: float = 0): ...

View File

@@ -1,6 +1,9 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.utils.backends import _dispatch
@_dispatch
def flow_matrix_row(G, weight: Incomplete | None = None, dtype=..., solver: str = "lu") -> Generator[Incomplete, None, None]: ...
class InverseLaplacian:

View File

@@ -1,6 +1,10 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def group_betweenness_centrality(G, C, normalized: bool = True, weight: Incomplete | None = None, endpoints: bool = False): ...
@_dispatch
def prominent_group(
G,
k,
@@ -10,7 +14,11 @@ def prominent_group(
normalized: bool = True,
greedy: bool = False,
): ...
@_dispatch
def group_closeness_centrality(G, S, weight: Incomplete | None = None): ...
@_dispatch
def group_degree_centrality(G, S): ...
@_dispatch
def group_in_degree_centrality(G, S): ...
@_dispatch
def group_out_degree_centrality(G, S): ...

View File

@@ -1,5 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def harmonic_centrality(
G, nbunch: Incomplete | None = None, distance: Incomplete | None = None, sources: Incomplete | None = None
): ...

View File

@@ -1,5 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def katz_centrality(
G,
alpha: float = 0.1,
@@ -10,6 +13,7 @@ def katz_centrality(
normalized: bool = True,
weight: Incomplete | None = None,
): ...
@_dispatch
def katz_centrality_numpy(
G, alpha: float = 0.1, beta: float = 1.0, normalized: bool = True, weight: Incomplete | None = None
): ...

View File

@@ -1,5 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def laplacian_centrality(
G,
normalized: bool = True,

View File

@@ -1,11 +1,15 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
__all__ = ["load_centrality", "edge_load_centrality"]
@_dispatch
def newman_betweenness_centrality(
G, v: Incomplete | None = None, cutoff: Incomplete | None = None, normalized: bool = True, weight: Incomplete | None = None
): ...
load_centrality = newman_betweenness_centrality
@_dispatch
def edge_load_centrality(G, cutoff: bool = False): ...

View File

@@ -1,5 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def percolation_centrality(
G, attribute: str = "percolation", states: Incomplete | None = None, weight: Incomplete | None = None
): ...

View File

@@ -1,6 +1,10 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def global_reaching_centrality(G, weight: Incomplete | None = None, normalized: bool = True): ...
@_dispatch
def local_reaching_centrality(
G, v, paths: Incomplete | None = None, weight: Incomplete | None = None, normalized: bool = True
): ...

View File

@@ -1 +1,4 @@
from networkx.utils.backends import _dispatch
@_dispatch
def second_order_centrality(G): ...

View File

@@ -1,4 +1,10 @@
from networkx.utils.backends import _dispatch
@_dispatch
def subgraph_centrality_exp(G): ...
@_dispatch
def subgraph_centrality(G): ...
@_dispatch
def communicability_betweenness_centrality(G): ...
@_dispatch
def estrada_index(G): ...

View File

@@ -1,3 +1,8 @@
from networkx.utils.backends import _dispatch
@_dispatch
def trophic_levels(G, weight: str = "weight"): ...
@_dispatch
def trophic_differences(G, weight: str = "weight"): ...
@_dispatch
def trophic_incoherence_parameter(G, weight: str = "weight", cannibalism: bool = False): ...

View File

@@ -1,3 +1,6 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def voterank(G, number_of_nodes: Incomplete | None = None): ...

View File

@@ -1,5 +1,7 @@
from collections.abc import Generator
from networkx.classes.graph import Graph, _Node
from networkx.utils.backends import _dispatch
@_dispatch
def chain_decomposition(G: Graph[_Node], root: _Node | None = None) -> Generator[list[tuple[_Node, _Node]], None, None]: ...

View File

@@ -3,10 +3,15 @@ from collections.abc import Generator, Hashable
from networkx.classes.graph import Graph, _Node
from networkx.exception import NetworkXException
from networkx.utils.backends import _dispatch
class NetworkXTreewidthBoundExceeded(NetworkXException): ...
@_dispatch
def is_chordal(G: Graph[Hashable]) -> bool: ...
@_dispatch
def find_induced_nodes(G: Graph[_Node], s: _Node, t: _Node, treewidth_bound: float = sys.maxsize) -> set[_Node]: ...
@_dispatch
def chordal_graph_cliques(G: Graph[_Node]) -> Generator[frozenset[_Node], None, None]: ...
@_dispatch
def chordal_graph_treewidth(G: Graph[Hashable]) -> int: ...

View File

@@ -1,18 +1,22 @@
from _typeshed import SupportsGetItem, Unused
from collections.abc import Container, Generator, Iterable, Iterator, Sized
from collections.abc import Generator, Iterable, Iterator, Sized
from typing import overload
from networkx.classes.graph import Graph, _Node
from networkx.utils.backends import _dispatch
@_dispatch
def enumerate_all_cliques(G: Graph[_Node]) -> Generator[list[_Node], None, None]: ...
@_dispatch
def find_cliques(G: Graph[_Node], nodes: SupportsGetItem[slice, _Node] | None = None) -> Generator[list[_Node], None, None]: ...
@_dispatch
def find_cliques_recursive(G: Graph[_Node], nodes: SupportsGetItem[slice, _Node] | None = None) -> Iterator[list[_Node]]: ...
@_dispatch
def make_max_clique_graph(G: Graph[_Node], create_using: type[Graph[_Node]] | None = None) -> Graph[_Node]: ...
@_dispatch
def make_clique_bipartite(
G: Graph[_Node], fpos: Unused = None, create_using: type[Graph[_Node]] | None = None, name: Unused = None
) -> Graph[_Node]: ...
def graph_clique_number(G: Graph[_Node], cliques: Iterable[_Node] | None = None) -> int: ...
def graph_number_of_cliques(G: Graph[_Node], cliques: Sized | None = None) -> int: ...
@overload
def node_clique_number( # type: ignore[misc] # Incompatible return types
G: Graph[_Node],
@@ -24,17 +28,3 @@ def node_clique_number( # type: ignore[misc] # Incompatible return types
def node_clique_number(
G: Graph[_Node], nodes: _Node, cliques: Iterable[Sized] | None = None, separate_nodes: Unused = False
) -> int: ...
@overload
def number_of_cliques( # type: ignore[misc] # Incompatible return types
G: Graph[_Node], nodes: list[_Node] | None = None, cliques: Iterable[Container[_Node]] | None = None
) -> dict[_Node, int]: ...
@overload
def number_of_cliques(G: Graph[_Node], nodes: _Node, cliques: Iterable[Container[_Node]] | None = None) -> int: ...
@overload
def cliques_containing_node( # type: ignore[misc] # Incompatible return types
G: Graph[_Node], nodes: list[_Node] | None = None, cliques: Iterable[Container[_Node]] | None = None
) -> dict[_Node, list[_Node]]: ...
@overload
def cliques_containing_node(
G: Graph[_Node], nodes: _Node, cliques: Iterable[Container[_Node]] | None = None
) -> Generator[list[_Node], None, None]: ...

View File

@@ -1,8 +1,16 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def triangles(G, nodes: Incomplete | None = None): ...
@_dispatch
def average_clustering(G, nodes: Incomplete | None = None, weight: Incomplete | None = None, count_zeros: bool = True): ...
@_dispatch
def clustering(G, nodes: Incomplete | None = None, weight: Incomplete | None = None): ...
@_dispatch
def transitivity(G): ...
@_dispatch
def square_clustering(G, nodes: Incomplete | None = None): ...
@_dispatch
def generalized_degree(G, nodes: Incomplete | None = None): ...

View File

@@ -1 +1,4 @@
from networkx.utils.backends import _dispatch
@_dispatch
def equitable_color(G, num_colors): ...

View File

@@ -1,6 +1,8 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.utils.backends import _dispatch
__all__ = [
"greedy_color",
"strategy_connected_sequential",
@@ -13,14 +15,23 @@ __all__ = [
"strategy_smallest_last",
]
@_dispatch
def strategy_largest_first(G, colors): ...
@_dispatch
def strategy_random_sequential(G, colors, seed: Incomplete | None = None): ...
@_dispatch
def strategy_smallest_last(G, colors): ...
@_dispatch
def strategy_independent_set(G, colors) -> Generator[Incomplete, Incomplete, None]: ...
@_dispatch
def strategy_connected_sequential_bfs(G, colors): ...
@_dispatch
def strategy_connected_sequential_dfs(G, colors): ...
@_dispatch
def strategy_connected_sequential(G, colors, traversal: str = "bfs") -> Generator[Incomplete, None, None]: ...
@_dispatch
def strategy_saturation_largest_first(G, colors) -> Generator[Incomplete, None, Incomplete]: ...
@_dispatch
def greedy_color(G, strategy: str = "largest_first", interchange: bool = False): ...
class _Node:

View File

@@ -1,2 +1,6 @@
from networkx.utils.backends import _dispatch
@_dispatch
def communicability(G): ...
@_dispatch
def communicability_exp(G): ...

View File

@@ -1,3 +1,6 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def asyn_fluidc(G, k, max_iter: int = 100, seed: Incomplete | None = None): ...

View File

@@ -1,4 +1,7 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.utils.backends import _dispatch
@_dispatch
def girvan_newman(G, most_valuable_edge: Incomplete | None = None) -> Generator[Incomplete, None, Incomplete]: ...

View File

@@ -1 +1,4 @@
from networkx.utils.backends import _dispatch
@_dispatch
def is_partition(G, communities): ...

View File

@@ -1,4 +1,7 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.utils.backends import _dispatch
@_dispatch
def k_clique_communities(G, k, cliques: Incomplete | None = None) -> Generator[Incomplete, None, None]: ...

View File

@@ -1,5 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def kernighan_lin_bisection(
G, partition: Incomplete | None = None, max_iter: int = 10, weight: str = "weight", seed: Incomplete | None = None
): ...

View File

@@ -1,7 +1,11 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.utils.backends import _dispatch
@_dispatch
def asyn_lpa_communities(
G, weight: Incomplete | None = None, seed: Incomplete | None = None
) -> Generator[Incomplete, Incomplete, None]: ...
@_dispatch
def label_propagation_communities(G): ...

View File

@@ -1,9 +1,13 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.utils.backends import _dispatch
@_dispatch
def louvain_communities(
G, weight: str = "weight", resolution: float = 1, threshold: float = 1e-07, seed: Incomplete | None = None
): ...
@_dispatch
def louvain_partitions(
G, weight: str = "weight", resolution: float = 1, threshold: float = 1e-07, seed: Incomplete | None = None
) -> Generator[Incomplete, None, None]: ...

View File

@@ -1,3 +1,6 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def lukes_partitioning(G, max_size, node_weight: Incomplete | None = None, edge_weight: Incomplete | None = None): ...

View File

@@ -1,6 +1,10 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def greedy_modularity_communities(
G, weight: Incomplete | None = None, resolution: float = 1, cutoff: int = 1, best_n: Incomplete | None = None
): ...
@_dispatch
def naive_greedy_modularity_communities(G, resolution: float = 1, weight: Incomplete | None = None): ...

View File

@@ -1,9 +1,12 @@
from networkx.exception import NetworkXError
from networkx.utils.backends import _dispatch
__all__ = ["modularity", "partition_quality"]
class NotAPartition(NetworkXError):
def __init__(self, G, collection) -> None: ...
@_dispatch
def modularity(G, communities, weight: str = "weight", resolution: float = 1): ...
@_dispatch
def partition_quality(G, partition): ...

View File

@@ -1,6 +1,11 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.utils.backends import _dispatch
@_dispatch
def attracting_components(G) -> Generator[Incomplete, None, None]: ...
@_dispatch
def number_attracting_components(G): ...
@_dispatch
def is_attracting_component(G): ...

View File

@@ -1,7 +1,13 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.utils.backends import _dispatch
@_dispatch
def is_biconnected(G): ...
@_dispatch
def biconnected_component_edges(G) -> Generator[Incomplete, Incomplete, None]: ...
@_dispatch
def biconnected_components(G) -> Generator[Incomplete, None, None]: ...
@_dispatch
def articulation_points(G) -> Generator[Incomplete, None, None]: ...

View File

@@ -1,7 +1,13 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.utils.backends import _dispatch
@_dispatch
def connected_components(G) -> Generator[Incomplete, None, None]: ...
@_dispatch
def number_connected_components(G): ...
@_dispatch
def is_connected(G): ...
@_dispatch
def node_connected_component(G, n): ...

View File

@@ -1,3 +1,6 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def is_semiconnected(G, topo_order: Incomplete | None = None): ...

View File

@@ -2,10 +2,17 @@ from collections.abc import Generator, Hashable, Iterable
from networkx.classes.digraph import DiGraph
from networkx.classes.graph import Graph, _Node
from networkx.utils.backends import _dispatch
@_dispatch
def strongly_connected_components(G: Graph[_Node]) -> Generator[set[_Node], None, None]: ...
@_dispatch
def kosaraju_strongly_connected_components(G: Graph[_Node], source: _Node | None = None) -> Generator[set[_Node], None, None]: ...
@_dispatch
def strongly_connected_components_recursive(G: Graph[_Node]) -> Generator[set[_Node], None, None]: ...
@_dispatch
def number_strongly_connected_components(G: Graph[Hashable]) -> int: ...
@_dispatch
def is_strongly_connected(G: Graph[Hashable]) -> bool: ...
@_dispatch
def condensation(G: DiGraph[_Node], scc: Iterable[Iterable[_Node]] | None = None) -> DiGraph[int]: ...

View File

@@ -1,7 +1,11 @@
from collections.abc import Generator, Hashable
from networkx.classes.graph import Graph, _Node
from networkx.utils.backends import _dispatch
@_dispatch
def weakly_connected_components(G: Graph[_Node]) -> Generator[set[_Node], None, None]: ...
@_dispatch
def number_weakly_connected_components(G: Graph[Hashable]) -> int: ...
@_dispatch
def is_weakly_connected(G: Graph[Hashable]) -> bool: ...

View File

@@ -1,6 +1,7 @@
from _typeshed import Incomplete
from networkx.algorithms.flow import edmonds_karp
from networkx.utils.backends import _dispatch
__all__ = [
"average_node_connectivity",
@@ -13,6 +14,7 @@ __all__ = [
default_flow_func = edmonds_karp
@_dispatch
def local_node_connectivity(
G,
s,
@@ -22,9 +24,13 @@ def local_node_connectivity(
residual: Incomplete | None = None,
cutoff: Incomplete | None = None,
): ...
@_dispatch
def node_connectivity(G, s: Incomplete | None = None, t: Incomplete | None = None, flow_func: Incomplete | None = None): ...
@_dispatch
def average_node_connectivity(G, flow_func: Incomplete | None = None): ...
@_dispatch
def all_pairs_node_connectivity(G, nbunch: Incomplete | None = None, flow_func: Incomplete | None = None): ...
@_dispatch
def local_edge_connectivity(
G,
s,
@@ -34,6 +40,7 @@ def local_edge_connectivity(
residual: Incomplete | None = None,
cutoff: Incomplete | None = None,
): ...
@_dispatch
def edge_connectivity(
G,
s: Incomplete | None = None,

View File

@@ -1,16 +1,21 @@
from _typeshed import Incomplete
from networkx.algorithms.flow import edmonds_karp
from networkx.utils.backends import _dispatch
__all__ = ["minimum_st_node_cut", "minimum_node_cut", "minimum_st_edge_cut", "minimum_edge_cut"]
default_flow_func = edmonds_karp
@_dispatch
def minimum_st_edge_cut(
G, s, t, flow_func: Incomplete | None = None, auxiliary: Incomplete | None = None, residual: Incomplete | None = None
): ...
@_dispatch
def minimum_st_node_cut(
G, s, t, flow_func: Incomplete | None = None, auxiliary: Incomplete | None = None, residual: Incomplete | None = None
): ...
@_dispatch
def minimum_node_cut(G, s: Incomplete | None = None, t: Incomplete | None = None, flow_func: Incomplete | None = None): ...
@_dispatch
def minimum_edge_cut(G, s: Incomplete | None = None, t: Incomplete | None = None, flow_func: Incomplete | None = None): ...

View File

@@ -2,11 +2,13 @@ from _typeshed import Incomplete
from collections.abc import Generator
from networkx.algorithms.flow import edmonds_karp
from networkx.utils.backends import _dispatch
__all__ = ["edge_disjoint_paths", "node_disjoint_paths"]
default_flow_func = edmonds_karp
@_dispatch
def edge_disjoint_paths(
G,
s,
@@ -16,6 +18,7 @@ def edge_disjoint_paths(
auxiliary: Incomplete | None = None,
residual: Incomplete | None = None,
) -> Generator[Incomplete, None, None]: ...
@_dispatch
def node_disjoint_paths(
G,
s,

View File

@@ -1,9 +1,13 @@
from collections.abc import Generator, Hashable
from networkx.classes.graph import Graph, _Node
from networkx.utils.backends import _dispatch
@_dispatch
def is_k_edge_connected(G: Graph[Hashable], k: int): ...
@_dispatch
def is_locally_k_edge_connected(G, s, t, k): ...
@_dispatch
def k_edge_augmentation(
G: Graph[_Node],
k: int,

View File

@@ -1,8 +1,13 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.utils.backends import _dispatch
@_dispatch
def k_edge_components(G, k): ...
@_dispatch
def k_edge_subgraphs(G, k): ...
@_dispatch
def bridge_components(G) -> Generator[Incomplete, Incomplete, None]: ...
class EdgeComponentAuxGraph:

View File

@@ -1,9 +1,11 @@
from _typeshed import Incomplete
from networkx.algorithms.flow import edmonds_karp
from networkx.utils.backends import _dispatch
__all__ = ["k_components"]
default_flow_func = edmonds_karp
@_dispatch
def k_components(G, flow_func: Incomplete | None = None): ...

View File

@@ -2,9 +2,11 @@ from _typeshed import Incomplete
from collections.abc import Generator
from networkx.algorithms.flow import edmonds_karp
from networkx.utils.backends import _dispatch
__all__ = ["all_node_cuts"]
default_flow_func = edmonds_karp
@_dispatch
def all_node_cuts(G, k: Incomplete | None = None, flow_func: Incomplete | None = None) -> Generator[Incomplete, None, None]: ...

View File

@@ -1 +1,4 @@
from networkx.utils.backends import _dispatch
@_dispatch
def stoer_wagner(G, weight: str = "weight", heap=...): ...

View File

@@ -1,2 +1,6 @@
from networkx.utils.backends import _dispatch
@_dispatch
def build_auxiliary_node_connectivity(G): ...
@_dispatch
def build_auxiliary_edge_connectivity(G): ...

View File

@@ -1,9 +1,18 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def core_number(G): ...
@_dispatch
def k_core(G, k: Incomplete | None = None, core_number: Incomplete | None = None): ...
@_dispatch
def k_shell(G, k: Incomplete | None = None, core_number: Incomplete | None = None): ...
@_dispatch
def k_crust(G, k: Incomplete | None = None, core_number: Incomplete | None = None): ...
@_dispatch
def k_corona(G, k, core_number: Incomplete | None = None): ...
@_dispatch
def k_truss(G, k): ...
@_dispatch
def onion_layers(G): ...

View File

@@ -1,4 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def min_edge_cover(G, matching_algorithm: Incomplete | None = None): ...
@_dispatch
def is_edge_cover(G, cover): ...

View File

@@ -1,10 +1,20 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def cut_size(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ...
@_dispatch
def volume(G, S, weight: Incomplete | None = None): ...
@_dispatch
def normalized_cut_size(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ...
@_dispatch
def conductance(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ...
@_dispatch
def edge_expansion(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ...
@_dispatch
def mixing_expansion(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ...
@_dispatch
def node_expansion(G, S): ...
@_dispatch
def boundary_expansion(G, S): ...

View File

@@ -1,7 +1,11 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.utils.backends import _dispatch
@_dispatch
def cycle_basis(G, root: Incomplete | None = None): ...
@_dispatch
def simple_cycles(G, length_bound: Incomplete | None = None) -> Generator[Incomplete, Incomplete, None]: ...
class _NeighborhoodCache(dict[Incomplete, Incomplete]):
@@ -9,7 +13,11 @@ class _NeighborhoodCache(dict[Incomplete, Incomplete]):
def __init__(self, G) -> None: ...
def __missing__(self, v): ...
@_dispatch
def chordless_cycles(G, length_bound: Incomplete | None = None) -> Generator[Incomplete, Incomplete, None]: ...
@_dispatch
def recursive_simple_cycles(G): ...
@_dispatch
def find_cycle(G, source: Incomplete | None = None, orientation: Incomplete | None = None): ...
@_dispatch
def minimum_cycle_basis(G, weight: Incomplete | None = None): ...

View File

@@ -1,3 +1,8 @@
from networkx.utils.backends import _dispatch
@_dispatch
def d_separated(G, x, y, z): ...
@_dispatch
def minimal_d_separator(G, u, v): ...
@_dispatch
def is_minimal_d_separator(G, u, v, z): ...

View File

@@ -2,21 +2,35 @@ from _typeshed import SupportsRichComparison
from collections.abc import Callable, Generator, Iterable, Reversible
from networkx.classes.graph import Graph, _Node
from networkx.utils.backends import _dispatch
@_dispatch
def descendants(G: Graph[_Node], source: _Node) -> set[_Node]: ...
@_dispatch
def ancestors(G: Graph[_Node], source: _Node) -> set[_Node]: ...
@_dispatch
def is_directed_acyclic_graph(G: Graph[_Node]) -> bool: ...
@_dispatch
def topological_sort(G: Graph[_Node]) -> Generator[_Node, None, None]: ...
@_dispatch
def lexicographical_topological_sort(
G: Graph[_Node], key: Callable[[_Node], SupportsRichComparison] | None = None
) -> Generator[_Node, None, None]: ...
@_dispatch
def all_topological_sorts(G: Graph[_Node]) -> Generator[list[_Node], None, None]: ...
@_dispatch
def is_aperiodic(G: Graph[_Node]) -> bool: ...
@_dispatch
def transitive_closure(G: Graph[_Node], reflexive: bool = False) -> Graph[_Node]: ...
@_dispatch
def transitive_reduction(G: Graph[_Node]) -> Graph[_Node]: ...
@_dispatch
def antichains(G: Graph[_Node], topo_order: Reversible[_Node] | None = None) -> Generator[list[_Node], None, None]: ...
@_dispatch
def dag_longest_path(
G: Graph[_Node], weight: str = "weight", default_weight: int = 1, topo_order: Iterable[_Node] | None = None
) -> list[_Node]: ...
@_dispatch
def dag_longest_path_length(G: Graph[_Node], weight: str = "weight", default_weight: int = 1) -> int: ...
@_dispatch
def dag_to_branching(G: Graph[_Node]) -> Graph[_Node]: ...

View File

@@ -1,9 +1,18 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def eccentricity(G, v: Incomplete | None = None, sp: Incomplete | None = None, weight: Incomplete | None = None): ...
@_dispatch
def diameter(G, e: Incomplete | None = None, usebounds: bool = False, weight: Incomplete | None = None): ...
@_dispatch
def periphery(G, e: Incomplete | None = None, usebounds: bool = False, weight: Incomplete | None = None): ...
@_dispatch
def radius(G, e: Incomplete | None = None, usebounds: bool = False, weight: Incomplete | None = None): ...
@_dispatch
def center(G, e: Incomplete | None = None, usebounds: bool = False, weight: Incomplete | None = None): ...
@_dispatch
def barycenter(G, weight: Incomplete | None = None, attr: Incomplete | None = None, sp: Incomplete | None = None): ...
@_dispatch
def resistance_distance(G, nodeA, nodeB, weight: Incomplete | None = None, invert_weight: bool = True): ...

View File

@@ -1,4 +1,10 @@
from networkx.utils.backends import _dispatch
@_dispatch
def is_distance_regular(G): ...
@_dispatch
def global_parameters(b, c): ...
@_dispatch
def intersection_array(G): ...
@_dispatch
def is_strongly_regular(G): ...

View File

@@ -1,2 +1,6 @@
from networkx.utils.backends import _dispatch
@_dispatch
def immediate_dominators(G, start): ...
@_dispatch
def dominance_frontiers(G, start): ...

View File

@@ -1,4 +1,8 @@
from _typeshed import Incomplete
from networkx.utils.backends import _dispatch
@_dispatch
def dominating_set(G, start_with: Incomplete | None = None): ...
@_dispatch
def is_dominating_set(G, nbunch): ...

View File

@@ -1,3 +1,8 @@
from networkx.utils.backends import _dispatch
@_dispatch
def efficiency(G, u, v): ...
@_dispatch
def global_efficiency(G): ...
@_dispatch
def local_efficiency(G): ...

Some files were not shown because too many files have changed in this diff Show More