Add networkx stubs (#10544)

Co-authored-by: Avasam <samuel.06@hotmail.com>
Co-authored-by: Audrey Dutcher <audrey@rhelmot.io>
This commit is contained in:
Audrey Dutcher
2023-12-14 19:52:04 -07:00
committed by GitHub
parent b6740d0bf4
commit 658dd55c41
276 changed files with 4237 additions and 0 deletions

View File

@@ -49,6 +49,7 @@
"stubs/mysqlclient",
"stubs/oauthlib",
"stubs/openpyxl",
"stubs/networkx",
"stubs/passlib",
"stubs/peewee",
"stubs/pexpect",

View File

@@ -0,0 +1,9 @@
version = "3.1"
upstream_repository = "https://github.com/networkx/networkx"
requires = ["numpy"]
partial_stub = true
[tool.stubtest]
ignore_missing_stub = true
# stub_uploader won't allow pandas-stubs in the requires field https://github.com/typeshed-internal/stub_uploader/issues/90
stubtest_requirements = ["pandas"]

View File

@@ -0,0 +1,24 @@
from networkx.algorithms import *
from networkx.classes import *
from networkx.classes import filters as filters
from networkx.convert import *
from networkx.convert_matrix import *
from networkx.drawing import *
from networkx.exception import *
from networkx.generators import *
from networkx.linalg import *
from networkx.readwrite import *
from networkx.relabel import *
from . import (
algorithms as algorithms,
classes as classes,
convert as convert,
convert_matrix as convert_matrix,
drawing as drawing,
generators as generators,
linalg as linalg,
readwrite as readwrite,
relabel as relabel,
utils as utils,
)

View File

@@ -0,0 +1,135 @@
from networkx.algorithms import (
approximation as approximation,
assortativity as assortativity,
bipartite as bipartite,
centrality as centrality,
chordal as chordal,
clique as clique,
cluster as cluster,
coloring as coloring,
community as community,
components as components,
connectivity as connectivity,
flow as flow,
isomorphism as isomorphism,
link_analysis as link_analysis,
lowest_common_ancestors as lowest_common_ancestors,
node_classification as node_classification,
operators as operators,
shortest_paths as shortest_paths,
tournament as tournament,
traversal as traversal,
tree as tree,
)
from networkx.algorithms.assortativity import *
from networkx.algorithms.asteroidal import *
from networkx.algorithms.bipartite import (
complete_bipartite_graph as complete_bipartite_graph,
is_bipartite as is_bipartite,
projected_graph as projected_graph,
)
from networkx.algorithms.boundary import *
from networkx.algorithms.bridges import *
from networkx.algorithms.centrality import *
from networkx.algorithms.chains import *
from networkx.algorithms.chordal import *
from networkx.algorithms.clique import *
from networkx.algorithms.cluster import *
from networkx.algorithms.coloring import *
from networkx.algorithms.communicability_alg import *
from networkx.algorithms.components import *
from networkx.algorithms.connectivity import (
all_node_cuts as all_node_cuts,
all_pairs_node_connectivity as all_pairs_node_connectivity,
average_node_connectivity as average_node_connectivity,
edge_connectivity as edge_connectivity,
edge_disjoint_paths as edge_disjoint_paths,
is_k_edge_connected as is_k_edge_connected,
k_components as k_components,
k_edge_augmentation as k_edge_augmentation,
k_edge_components as k_edge_components,
k_edge_subgraphs as k_edge_subgraphs,
minimum_edge_cut as minimum_edge_cut,
minimum_node_cut as minimum_node_cut,
node_connectivity as node_connectivity,
node_disjoint_paths as node_disjoint_paths,
stoer_wagner as stoer_wagner,
)
from networkx.algorithms.core import *
from networkx.algorithms.covering import *
from networkx.algorithms.cuts import *
from networkx.algorithms.cycles import *
from networkx.algorithms.d_separation import *
from networkx.algorithms.dag import *
from networkx.algorithms.distance_measures import *
from networkx.algorithms.distance_regular import *
from networkx.algorithms.dominance import *
from networkx.algorithms.dominating import *
from networkx.algorithms.efficiency_measures import *
from networkx.algorithms.euler import *
from networkx.algorithms.flow import (
capacity_scaling as capacity_scaling,
cost_of_flow as cost_of_flow,
gomory_hu_tree as gomory_hu_tree,
max_flow_min_cost as max_flow_min_cost,
maximum_flow as maximum_flow,
maximum_flow_value as maximum_flow_value,
min_cost_flow as min_cost_flow,
min_cost_flow_cost as min_cost_flow_cost,
minimum_cut as minimum_cut,
minimum_cut_value as minimum_cut_value,
network_simplex as network_simplex,
)
from networkx.algorithms.graph_hashing import *
from networkx.algorithms.graphical import *
from networkx.algorithms.hierarchy import *
from networkx.algorithms.hybrid import *
from networkx.algorithms.isolate import *
from networkx.algorithms.isomorphism import (
could_be_isomorphic as could_be_isomorphic,
fast_could_be_isomorphic as fast_could_be_isomorphic,
faster_could_be_isomorphic as faster_could_be_isomorphic,
is_isomorphic as is_isomorphic,
)
from networkx.algorithms.isomorphism.vf2pp import *
from networkx.algorithms.link_analysis import *
from networkx.algorithms.link_prediction import *
from networkx.algorithms.lowest_common_ancestors import *
from networkx.algorithms.matching import *
from networkx.algorithms.minors import *
from networkx.algorithms.mis import *
from networkx.algorithms.moral import *
from networkx.algorithms.non_randomness import *
from networkx.algorithms.operators import *
from networkx.algorithms.planar_drawing import *
from networkx.algorithms.planarity import *
from networkx.algorithms.polynomials import *
from networkx.algorithms.reciprocity import *
from networkx.algorithms.regular import *
from networkx.algorithms.richclub import *
from networkx.algorithms.shortest_paths import *
from networkx.algorithms.similarity import *
from networkx.algorithms.simple_paths import *
from networkx.algorithms.smallworld import *
from networkx.algorithms.smetric import *
from networkx.algorithms.sparsifiers import *
from networkx.algorithms.structuralholes import *
from networkx.algorithms.summarization import *
from networkx.algorithms.swap import *
from networkx.algorithms.traversal import *
from networkx.algorithms.tree.branchings import (
ArborescenceIterator as ArborescenceIterator,
maximum_branching as maximum_branching,
maximum_spanning_arborescence as maximum_spanning_arborescence,
minimum_branching as minimum_branching,
minimum_spanning_arborescence as minimum_spanning_arborescence,
)
from networkx.algorithms.tree.coding import *
from networkx.algorithms.tree.decomposition import *
from networkx.algorithms.tree.mst import *
from networkx.algorithms.tree.operations import *
from networkx.algorithms.tree.recognition import *
from networkx.algorithms.triads import *
from networkx.algorithms.vitality import *
from networkx.algorithms.voronoi import *
from networkx.algorithms.wiener import *

View File

@@ -0,0 +1,13 @@
from networkx.algorithms.approximation.clique import *
from networkx.algorithms.approximation.clustering_coefficient import *
from networkx.algorithms.approximation.connectivity import *
from networkx.algorithms.approximation.distance_measures import *
from networkx.algorithms.approximation.dominating_set import *
from networkx.algorithms.approximation.kcomponents import *
from networkx.algorithms.approximation.matching import *
from networkx.algorithms.approximation.maxcut import *
from networkx.algorithms.approximation.ramsey import *
from networkx.algorithms.approximation.steinertree import *
from networkx.algorithms.approximation.traveling_salesman import *
from networkx.algorithms.approximation.treewidth import *
from networkx.algorithms.approximation.vertex_cover import *

View File

@@ -0,0 +1,4 @@
def maximum_independent_set(G): ...
def max_clique(G): ...
def clique_removal(G): ...
def large_clique_size(G): ...

View File

@@ -0,0 +1,3 @@
from _typeshed import Incomplete
def average_clustering(G, trials: int = 1000, seed: Incomplete | None = None): ...

View File

@@ -0,0 +1,5 @@
from _typeshed import Incomplete
def local_node_connectivity(G, source, target, cutoff: Incomplete | None = None): ...
def node_connectivity(G, s: Incomplete | None = None, t: Incomplete | None = None): ...
def all_pairs_node_connectivity(G, nbunch: Incomplete | None = None, cutoff: Incomplete | None = None): ...

View File

@@ -0,0 +1,3 @@
from _typeshed import Incomplete
def diameter(G, seed: Incomplete | None = None): ...

View File

@@ -0,0 +1,4 @@
from _typeshed import Incomplete
def min_weighted_dominating_set(G, weight: Incomplete | None = None): ...
def min_edge_dominating_set(G): ...

View File

@@ -0,0 +1 @@
def k_components(G, min_density: float = 0.95): ...

View File

@@ -0,0 +1 @@
def min_maximal_matching(G): ...

View File

@@ -0,0 +1,4 @@
from _typeshed import Incomplete
def randomized_partitioning(G, seed: Incomplete | None = None, p: float = 0.5, weight: Incomplete | None = None): ...
def one_exchange(G, initial_cut: Incomplete | None = None, seed: Incomplete | None = None, weight: Incomplete | None = None): ...

View File

@@ -0,0 +1 @@
def ramsey_R2(G): ...

View File

@@ -0,0 +1,4 @@
from _typeshed import Incomplete
def metric_closure(G, weight: str = "weight"): ...
def steiner_tree(G, terminal_nodes, weight: str = "weight", method: Incomplete | None = None): ...

View File

@@ -0,0 +1,33 @@
from _typeshed import Incomplete
def christofides(G, weight: str = "weight", tree: Incomplete | None = None): ...
def traveling_salesman_problem(
G, weight: str = "weight", nodes: Incomplete | None = None, cycle: bool = True, method: Incomplete | None = None
): ...
def asadpour_atsp(G, weight: str = "weight", seed: Incomplete | None = None, source: Incomplete | None = None): ...
def greedy_tsp(G, weight: str = "weight", source: Incomplete | None = None): ...
def simulated_annealing_tsp(
G,
init_cycle,
weight: str = "weight",
source: Incomplete | None = None,
# docstring says int, but it can be a float and does become a float mid-equation if alpha is also a float
temp: float = 100,
move: str = "1-1",
max_iterations: int = 10,
N_inner: int = 100,
alpha: float = 0.01,
seed: Incomplete | None = None,
): ...
def threshold_accepting_tsp(
G,
init_cycle,
weight: str = "weight",
source: Incomplete | None = None,
threshold: float = 1,
move: str = "1-1",
max_iterations: int = 10,
N_inner: int = 100,
alpha: float = 0.1,
seed: Incomplete | None = None,
): ...

View File

@@ -0,0 +1,11 @@
from _typeshed import Incomplete
__all__ = ["treewidth_min_degree", "treewidth_min_fill_in"]
def treewidth_min_degree(G): ...
def treewidth_min_fill_in(G): ...
class MinDegreeHeuristic:
count: Incomplete
def __init__(self, graph) -> None: ...
def best_node(self, graph): ...

View File

@@ -0,0 +1,3 @@
from _typeshed import Incomplete
def min_weighted_vertex_cover(G, weight: Incomplete | None = None): ...

View File

@@ -0,0 +1,5 @@
from networkx.algorithms.assortativity.connectivity import *
from networkx.algorithms.assortativity.correlation import *
from networkx.algorithms.assortativity.mixing import *
from networkx.algorithms.assortativity.neighbor_degree import *
from networkx.algorithms.assortativity.pairs import *

View File

@@ -0,0 +1,5 @@
from _typeshed import Incomplete
def average_degree_connectivity(
G, source: str = "in+out", target: str = "in+out", nodes: Incomplete | None = None, weight: Incomplete | None = None
): ...

View File

@@ -0,0 +1,10 @@
from _typeshed import Incomplete
def degree_assortativity_coefficient(
G, x: str = "out", y: str = "in", weight: Incomplete | None = None, nodes: Incomplete | None = None
): ...
def degree_pearson_correlation_coefficient(
G, x: str = "out", y: str = "in", weight: Incomplete | None = None, nodes: Incomplete | None = None
): ...
def attribute_assortativity_coefficient(G, attribute, nodes: Incomplete | None = None): ...
def numeric_assortativity_coefficient(G, attribute, nodes: Incomplete | None = None): ...

View File

@@ -0,0 +1,19 @@
from _typeshed import Incomplete
def attribute_mixing_dict(G, attribute, nodes: Incomplete | None = None, normalized: bool = False): ...
def attribute_mixing_matrix(
G, attribute, nodes: Incomplete | None = None, mapping: Incomplete | None = None, normalized: bool = True
): ...
def degree_mixing_dict(
G, x: str = "out", y: str = "in", weight: Incomplete | None = None, nodes: Incomplete | None = None, normalized: bool = False
): ...
def degree_mixing_matrix(
G,
x: str = "out",
y: str = "in",
weight: Incomplete | None = None,
nodes: Incomplete | None = None,
normalized: bool = True,
mapping: Incomplete | None = None,
): ...
def mixing_dict(xy, normalized: bool = False): ...

View File

@@ -0,0 +1,5 @@
from _typeshed import Incomplete
def average_neighbor_degree(
G, source: str = "out", target: str = "out", nodes: Incomplete | None = None, weight: Incomplete | None = None
): ...

View File

@@ -0,0 +1,7 @@
from _typeshed import Incomplete
from collections.abc import Generator
def node_attribute_xy(G, attribute, nodes: Incomplete | None = None) -> Generator[Incomplete, None, None]: ...
def node_degree_xy(
G, x: str = "out", y: str = "in", weight: Incomplete | None = None, nodes: Incomplete | None = None
) -> Generator[Incomplete, None, None]: ...

View File

@@ -0,0 +1,2 @@
def find_asteroidal_triple(G): ...
def is_at_free(G): ...

View File

@@ -0,0 +1,11 @@
from networkx.algorithms.bipartite.basic import *
from networkx.algorithms.bipartite.centrality import *
from networkx.algorithms.bipartite.cluster import *
from networkx.algorithms.bipartite.covering import *
from networkx.algorithms.bipartite.edgelist import *
from networkx.algorithms.bipartite.generators import *
from networkx.algorithms.bipartite.matching import *
from networkx.algorithms.bipartite.matrix import *
from networkx.algorithms.bipartite.projection import *
from networkx.algorithms.bipartite.redundancy import *
from networkx.algorithms.bipartite.spectral import *

View File

@@ -0,0 +1,8 @@
from _typeshed import Incomplete
def color(G): ...
def is_bipartite(G): ...
def is_bipartite_node_set(G, nodes): ...
def sets(G, top_nodes: Incomplete | None = None): ...
def density(B, nodes): ...
def degrees(B, nodes, weight: Incomplete | None = None): ...

View File

@@ -0,0 +1,3 @@
def degree_centrality(G, nodes): ...
def betweenness_centrality(G, nodes): ...
def closeness_centrality(G, nodes, normalized: bool = True): ...

View File

@@ -0,0 +1,8 @@
from _typeshed import Incomplete
def latapy_clustering(G, nodes: Incomplete | None = None, mode: str = "dot"): ...
clustering = latapy_clustering
def average_clustering(G, nodes: Incomplete | None = None, mode: str = "dot"): ...
def robins_alexander_clustering(G): ...

View File

@@ -0,0 +1,3 @@
from _typeshed import Incomplete
def min_edge_cover(G, matching_algorithm: Incomplete | None = None): ...

View File

@@ -0,0 +1,23 @@
from _typeshed import Incomplete
from collections.abc import Generator
def write_edgelist(G, path, comments: str = "#", delimiter: str = " ", data: bool = True, encoding: str = "utf-8") -> None: ...
def generate_edgelist(G, delimiter: str = " ", data: bool = True) -> Generator[Incomplete, None, None]: ...
def parse_edgelist(
lines,
comments: str = "#",
delimiter: Incomplete | None = None,
create_using: Incomplete | None = None,
nodetype: Incomplete | None = None,
data: bool = True,
): ...
def read_edgelist(
path,
comments: str = "#",
delimiter: Incomplete | None = None,
create_using: Incomplete | None = None,
nodetype: Incomplete | None = None,
data: bool = True,
edgetype: Incomplete | None = None,
encoding: str = "utf-8",
): ...

View File

@@ -0,0 +1,10 @@
from _typeshed import Incomplete
def complete_bipartite_graph(n1, n2, create_using: Incomplete | None = None): ...
def configuration_model(aseq, bseq, create_using: Incomplete | None = None, seed: Incomplete | None = None): ...
def havel_hakimi_graph(aseq, bseq, create_using: Incomplete | None = None): ...
def reverse_havel_hakimi_graph(aseq, bseq, create_using: Incomplete | None = None): ...
def alternating_havel_hakimi_graph(aseq, bseq, create_using: Incomplete | None = None): ...
def preferential_attachment_graph(aseq, p, create_using: Incomplete | None = None, seed: Incomplete | None = None): ...
def random_graph(n, m, p, seed: Incomplete | None = None, directed: bool = False): ...
def gnmk_random_graph(n, m, k, seed: Incomplete | None = None, directed: bool = False): ...

View File

@@ -0,0 +1,9 @@
from _typeshed import Incomplete
def hopcroft_karp_matching(G, top_nodes: Incomplete | None = None): ...
def eppstein_matching(G, top_nodes: Incomplete | None = None): ...
def to_vertex_cover(G, matching, top_nodes: Incomplete | None = None): ...
maximum_matching = hopcroft_karp_matching
def minimum_weight_full_matching(G, top_nodes: Incomplete | None = None, weight: str = "weight"): ...

View File

@@ -0,0 +1,11 @@
from _typeshed import Incomplete
def biadjacency_matrix(
G,
row_order,
column_order: Incomplete | None = None,
dtype: Incomplete | None = None,
weight: str = "weight",
format: str = "csr",
): ...
def from_biadjacency_matrix(A, create_using: Incomplete | None = None, edge_attribute: str = "weight"): ...

View File

@@ -0,0 +1,7 @@
from _typeshed import Incomplete
def projected_graph(B, nodes, multigraph: bool = False): ...
def weighted_projected_graph(B, nodes, ratio: bool = False): ...
def collaboration_weighted_projected_graph(B, nodes): ...
def overlap_weighted_projected_graph(B, nodes, jaccard: bool = True): ...
def generic_weighted_projected_graph(B, nodes, weight_function: Incomplete | None = None): ...

View File

@@ -0,0 +1,3 @@
from _typeshed import Incomplete
def node_redundancy(G, nodes: Incomplete | None = None): ...

View File

@@ -0,0 +1,3 @@
from _typeshed import Incomplete
def spectral_bipartivity(G, nodes: Incomplete | None = None, weight: str = "weight"): ...

View File

@@ -0,0 +1,114 @@
from _typeshed import Incomplete
from collections.abc import Generator, Iterable
from typing import TypeVar, overload
from typing_extensions import Literal
from networkx.classes.graph import Graph, _Node
_U = TypeVar("_U")
@overload
def edge_boundary(
G: Graph[_Node],
nbunch1: Iterable[_Node],
nbunch2: Iterable[_Node] | None = None,
data: Literal[False] = False,
keys: Literal[False] = False,
default=None,
) -> Generator[tuple[_Node, _Node], None, None]: ...
@overload
def edge_boundary(
G: Graph[_Node],
nbunch1: Iterable[_Node],
nbunch2: Iterable[_Node] | None,
data: Literal[True],
keys: Literal[False] = False,
default=None,
) -> Generator[tuple[_Node, _Node, dict[str, Incomplete]], None, None]: ...
@overload
def edge_boundary(
G: Graph[_Node],
nbunch1: Iterable[_Node],
nbunch2: Iterable[_Node] | None = None,
*,
data: Literal[True],
keys: Literal[False] = False,
default=None,
) -> Generator[tuple[_Node, _Node, dict[str, Incomplete]], None, None]: ...
@overload
def edge_boundary(
G: Graph[_Node],
nbunch1: Iterable[_Node],
nbunch2: Iterable[_Node] | None,
data: str,
keys: Literal[False] = False,
default: _U | None = None,
) -> Generator[tuple[_Node, _Node, dict[str, _U]], None, None]: ...
@overload
def edge_boundary(
G: Graph[_Node],
nbunch1: Iterable[_Node],
nbunch2: Iterable[_Node] | None = None,
*,
data: str,
keys: Literal[False] = False,
default: _U | None = None,
) -> Generator[tuple[_Node, _Node, dict[str, _U]], None, None]: ...
@overload
def edge_boundary(
G: Graph[_Node],
nbunch1: Iterable[_Node],
nbunch2: Iterable[_Node] | None,
data: Literal[False],
keys: Literal[True],
default=None,
) -> Generator[tuple[_Node, _Node, int], None, None]: ...
@overload
def edge_boundary(
G: Graph[_Node],
nbunch1: Iterable[_Node],
nbunch2: Iterable[_Node] | None = None,
data: Literal[False] = False,
*,
keys: Literal[True],
default=None,
) -> Generator[tuple[_Node, _Node, int], None, None]: ...
@overload
def edge_boundary(
G: Graph[_Node],
nbunch1: Iterable[_Node],
nbunch2: Iterable[_Node] | None,
data: Literal[True],
keys: Literal[True],
default=None,
) -> Generator[tuple[_Node, _Node, int, dict[str, Incomplete]], None, None]: ...
@overload
def edge_boundary(
G: Graph[_Node],
nbunch1: Iterable[_Node],
nbunch2: Iterable[_Node] | None = None,
*,
data: Literal[True],
keys: Literal[True],
default=None,
) -> Generator[tuple[_Node, _Node, int, dict[str, Incomplete]], None, None]: ...
@overload
def edge_boundary(
G: Graph[_Node],
nbunch1: Iterable[_Node],
nbunch2: Iterable[_Node] | None,
data: str,
keys: Literal[True],
default: _U | None = None,
) -> Generator[tuple[_Node, _Node, int, dict[str, _U]], None, None]: ...
@overload
def edge_boundary(
G: Graph[_Node],
nbunch1: Iterable[_Node],
nbunch2: Iterable[_Node] | None = None,
*,
data: str,
keys: Literal[True],
default: _U | None = None,
) -> Generator[tuple[_Node, _Node, int, dict[str, _U]], None, None]: ...
def node_boundary(G: Graph[_Node], nbunch1: Iterable[_Node], nbunch2: Iterable[_Node] | None = None) -> set[_Node]: ...

View File

@@ -0,0 +1,17 @@
from _typeshed import Incomplete
from collections.abc import Callable, Generator
from typing import overload
from typing_extensions import Literal
from networkx.classes.graph import Graph, _Node
def bridges(G: Graph[_Node], root: _Node | None = None) -> Generator[_Node, None, None]: ...
def has_bridges(G: Graph[_Node], root: Incomplete | None = None) -> bool: ...
@overload
def local_bridges(
G: Graph[_Node], with_span: Literal[False], weight: str | Callable[[_Node], float] | None = None
) -> Generator[tuple[_Node, _Node], None, None]: ...
@overload
def local_bridges(
G: Graph[_Node], with_span: Literal[True] = True, weight: str | Callable[[_Node], float] | None = None
) -> Generator[tuple[_Node, _Node, int], None, None]: ...

View File

@@ -0,0 +1,20 @@
from .betweenness import *
from .betweenness_subset import *
from .closeness import *
from .current_flow_betweenness import *
from .current_flow_betweenness_subset import *
from .current_flow_closeness import *
from .degree_alg import *
from .dispersion import *
from .eigenvector import *
from .group import *
from .harmonic import *
from .katz import *
from .laplacian import *
from .load import *
from .percolation import *
from .reaching import *
from .second_order import *
from .subgraph_alg import *
from .trophic import *
from .voterank_alg import *

View File

@@ -0,0 +1,13 @@
from _typeshed import Incomplete
def betweenness_centrality(
G,
k: Incomplete | None = None,
normalized: bool = True,
weight: Incomplete | None = None,
endpoints: bool = False,
seed: Incomplete | None = None,
): ...
def edge_betweenness_centrality(
G, k: Incomplete | None = None, normalized: bool = True, weight: Incomplete | None = None, seed: Incomplete | None = None
): ...

View File

@@ -0,0 +1,4 @@
from _typeshed import Incomplete
def betweenness_centrality_subset(G, sources, targets, normalized: bool = False, weight: Incomplete | None = None): ...
def edge_betweenness_centrality_subset(G, sources, targets, normalized: bool = False, weight: Incomplete | None = None): ...

View File

@@ -0,0 +1,6 @@
from _typeshed import Incomplete
def closeness_centrality(G, u: Incomplete | None = None, distance: Incomplete | None = None, wf_improved: bool = True): ...
def incremental_closeness_centrality(
G, edge, prev_cc: Incomplete | None = None, insertion: bool = True, wf_improved: bool = True
): ...

View File

@@ -0,0 +1,18 @@
from _typeshed import Incomplete
def approximate_current_flow_betweenness_centrality(
G,
normalized: bool = True,
weight: Incomplete | None = None,
dtype=...,
solver: str = "full",
epsilon: float = 0.5,
kmax: int = 10000,
seed: Incomplete | None = None,
): ...
def current_flow_betweenness_centrality(
G, normalized: bool = True, weight: Incomplete | None = None, dtype=..., solver: str = "full"
): ...
def edge_current_flow_betweenness_centrality(
G, normalized: bool = True, weight: Incomplete | None = None, dtype=..., solver: str = "full"
): ...

View File

@@ -0,0 +1,8 @@
from _typeshed import Incomplete
def current_flow_betweenness_centrality_subset(
G, sources, targets, normalized: bool = True, weight: Incomplete | None = None, dtype=..., solver: str = "lu"
): ...
def edge_current_flow_betweenness_centrality_subset(
G, sources, targets, normalized: bool = True, weight: Incomplete | None = None, dtype=..., solver: str = "lu"
): ...

View File

@@ -0,0 +1,5 @@
from _typeshed import Incomplete
def current_flow_closeness_centrality(G, weight: Incomplete | None = None, dtype=..., solver: str = "lu"): ...
information_centrality = current_flow_closeness_centrality

View File

@@ -0,0 +1,3 @@
def degree_centrality(G): ...
def in_degree_centrality(G): ...
def out_degree_centrality(G): ...

View File

@@ -0,0 +1,11 @@
from _typeshed import Incomplete
def dispersion(
G,
u: Incomplete | None = None,
v: Incomplete | None = None,
normalized: bool = True,
alpha: float = 1.0,
b: float = 0.0,
c: float = 0.0,
): ...

View File

@@ -0,0 +1,6 @@
from _typeshed import Incomplete
def eigenvector_centrality(
G, max_iter: int = 100, tol: float = 1e-06, nstart: Incomplete | None = None, weight: Incomplete | None = None
): ...
def eigenvector_centrality_numpy(G, weight: Incomplete | None = None, max_iter: int = 50, tol: float = 0): ...

View File

@@ -0,0 +1,36 @@
from _typeshed import Incomplete
from collections.abc import Generator
def flow_matrix_row(G, weight: Incomplete | None = None, dtype=..., solver: str = "lu") -> Generator[Incomplete, None, None]: ...
class InverseLaplacian:
dtype: Incomplete
n: Incomplete
w: Incomplete
C: Incomplete
L1: Incomplete
def __init__(self, L, width: Incomplete | None = None, dtype: Incomplete | None = None) -> None: ...
def init_solver(self, L) -> None: ...
def solve(self, r) -> None: ...
def solve_inverse(self, r) -> None: ...
def get_rows(self, r1, r2): ...
def get_row(self, r): ...
def width(self, L): ...
class FullInverseLaplacian(InverseLaplacian):
IL: Incomplete
def init_solver(self, L) -> None: ...
def solve(self, rhs): ...
def solve_inverse(self, r): ...
class SuperLUInverseLaplacian(InverseLaplacian):
lusolve: Incomplete
def init_solver(self, L) -> None: ...
def solve_inverse(self, r): ...
def solve(self, rhs): ...
class CGInverseLaplacian(InverseLaplacian):
M: Incomplete
def init_solver(self, L) -> None: ...
def solve(self, rhs): ...
def solve_inverse(self, r): ...

View File

@@ -0,0 +1,16 @@
from _typeshed import Incomplete
def group_betweenness_centrality(G, C, normalized: bool = True, weight: Incomplete | None = None, endpoints: bool = False): ...
def prominent_group(
G,
k,
weight: Incomplete | None = None,
C: Incomplete | None = None,
endpoints: bool = False,
normalized: bool = True,
greedy: bool = False,
): ...
def group_closeness_centrality(G, S, weight: Incomplete | None = None): ...
def group_degree_centrality(G, S): ...
def group_in_degree_centrality(G, S): ...
def group_out_degree_centrality(G, S): ...

View File

@@ -0,0 +1,5 @@
from _typeshed import Incomplete
def harmonic_centrality(
G, nbunch: Incomplete | None = None, distance: Incomplete | None = None, sources: Incomplete | None = None
): ...

View File

@@ -0,0 +1,15 @@
from _typeshed import Incomplete
def katz_centrality(
G,
alpha: float = 0.1,
beta: float = 1.0,
max_iter: int = 1000,
tol: float = 1e-06,
nstart: Incomplete | None = None,
normalized: bool = True,
weight: Incomplete | None = None,
): ...
def katz_centrality_numpy(
G, alpha: float = 0.1, beta: float = 1.0, normalized: bool = True, weight: Incomplete | None = None
): ...

View File

@@ -0,0 +1,10 @@
from _typeshed import Incomplete
def laplacian_centrality(
G,
normalized: bool = True,
nodelist: Incomplete | None = None,
weight: str = "weight",
walk_type: Incomplete | None = None,
alpha: float = 0.95,
): ...

View File

@@ -0,0 +1,11 @@
from _typeshed import Incomplete
__all__ = ["load_centrality", "edge_load_centrality"]
def newman_betweenness_centrality(
G, v: Incomplete | None = None, cutoff: Incomplete | None = None, normalized: bool = True, weight: Incomplete | None = None
): ...
load_centrality = newman_betweenness_centrality
def edge_load_centrality(G, cutoff: bool = False): ...

View File

@@ -0,0 +1,5 @@
from _typeshed import Incomplete
def percolation_centrality(
G, attribute: str = "percolation", states: Incomplete | None = None, weight: Incomplete | None = None
): ...

View File

@@ -0,0 +1,6 @@
from _typeshed import Incomplete
def global_reaching_centrality(G, weight: Incomplete | None = None, normalized: bool = True): ...
def local_reaching_centrality(
G, v, paths: Incomplete | None = None, weight: Incomplete | None = None, normalized: bool = True
): ...

View File

@@ -0,0 +1 @@
def second_order_centrality(G): ...

View File

@@ -0,0 +1,4 @@
def subgraph_centrality_exp(G): ...
def subgraph_centrality(G): ...
def communicability_betweenness_centrality(G): ...
def estrada_index(G): ...

View File

@@ -0,0 +1,3 @@
def trophic_levels(G, weight: str = "weight"): ...
def trophic_differences(G, weight: str = "weight"): ...
def trophic_incoherence_parameter(G, weight: str = "weight", cannibalism: bool = False): ...

View File

@@ -0,0 +1,3 @@
from _typeshed import Incomplete
def voterank(G, number_of_nodes: Incomplete | None = None): ...

View File

@@ -0,0 +1,5 @@
from collections.abc import Generator
from networkx.classes.graph import Graph, _Node
def chain_decomposition(G: Graph[_Node], root: _Node | None = None) -> Generator[list[tuple[_Node, _Node]], None, None]: ...

View File

@@ -0,0 +1,12 @@
import sys
from collections.abc import Generator, Hashable
from networkx.classes.graph import Graph, _Node
from networkx.exception import NetworkXException
class NetworkXTreewidthBoundExceeded(NetworkXException): ...
def is_chordal(G: Graph[Hashable]) -> bool: ...
def find_induced_nodes(G: Graph[_Node], s: _Node, t: _Node, treewidth_bound: float = sys.maxsize) -> set[_Node]: ...
def chordal_graph_cliques(G: Graph[_Node]) -> Generator[frozenset[_Node], None, None]: ...
def chordal_graph_treewidth(G: Graph[Hashable]) -> int: ...

View File

@@ -0,0 +1,40 @@
from _typeshed import SupportsGetItem, Unused
from collections.abc import Container, Generator, Iterable, Iterator, Sized
from typing import overload
from networkx.classes.graph import Graph, _Node
def enumerate_all_cliques(G: Graph[_Node]) -> Generator[list[_Node], None, None]: ...
def find_cliques(G: Graph[_Node], nodes: SupportsGetItem[slice, _Node] | None = None) -> Generator[list[_Node], None, None]: ...
def find_cliques_recursive(G: Graph[_Node], nodes: SupportsGetItem[slice, _Node] | None = None) -> Iterator[list[_Node]]: ...
def make_max_clique_graph(G: Graph[_Node], create_using: type[Graph[_Node]] | None = None) -> Graph[_Node]: ...
def make_clique_bipartite(
G: Graph[_Node], fpos: Unused = None, create_using: type[Graph[_Node]] | None = None, name: Unused = None
) -> Graph[_Node]: ...
def graph_clique_number(G: Graph[_Node], cliques: Iterable[_Node] | None = None) -> int: ...
def graph_number_of_cliques(G: Graph[_Node], cliques: Sized | None = None) -> int: ...
@overload
def node_clique_number( # type: ignore[misc] # Incompatible return types
G: Graph[_Node],
nodes: Iterable[_Node] | None = None,
cliques: Iterable[Iterable[_Node]] | None = None,
separate_nodes: Unused = False,
) -> dict[_Node, int]: ...
@overload
def node_clique_number(
G: Graph[_Node], nodes: _Node, cliques: Iterable[Sized] | None = None, separate_nodes: Unused = False
) -> int: ...
@overload
def number_of_cliques( # type: ignore[misc] # Incompatible return types
G: Graph[_Node], nodes: list[_Node] | None = None, cliques: Iterable[Container[_Node]] | None = None
) -> dict[_Node, int]: ...
@overload
def number_of_cliques(G: Graph[_Node], nodes: _Node, cliques: Iterable[Container[_Node]] | None = None) -> int: ...
@overload
def cliques_containing_node( # type: ignore[misc] # Incompatible return types
G: Graph[_Node], nodes: list[_Node] | None = None, cliques: Iterable[Container[_Node]] | None = None
) -> dict[_Node, list[_Node]]: ...
@overload
def cliques_containing_node(
G: Graph[_Node], nodes: _Node, cliques: Iterable[Container[_Node]] | None = None
) -> Generator[list[_Node], None, None]: ...

View File

@@ -0,0 +1,8 @@
from _typeshed import Incomplete
def triangles(G, nodes: Incomplete | None = None): ...
def average_clustering(G, nodes: Incomplete | None = None, weight: Incomplete | None = None, count_zeros: bool = True): ...
def clustering(G, nodes: Incomplete | None = None, weight: Incomplete | None = None): ...
def transitivity(G): ...
def square_clustering(G, nodes: Incomplete | None = None): ...
def generalized_degree(G, nodes: Incomplete | None = None): ...

View File

@@ -0,0 +1,4 @@
from networkx.algorithms.coloring.equitable_coloring import equitable_color as equitable_color
from networkx.algorithms.coloring.greedy_coloring import *
__all__ = ["greedy_color", "equitable_color"]

View File

@@ -0,0 +1 @@
def equitable_color(G, num_colors): ...

View File

@@ -0,0 +1,43 @@
from _typeshed import Incomplete
from collections.abc import Generator
__all__ = [
"greedy_color",
"strategy_connected_sequential",
"strategy_connected_sequential_bfs",
"strategy_connected_sequential_dfs",
"strategy_independent_set",
"strategy_largest_first",
"strategy_random_sequential",
"strategy_saturation_largest_first",
"strategy_smallest_last",
]
def strategy_largest_first(G, colors): ...
def strategy_random_sequential(G, colors, seed: Incomplete | None = None): ...
def strategy_smallest_last(G, colors): ...
def strategy_independent_set(G, colors) -> Generator[Incomplete, Incomplete, None]: ...
def strategy_connected_sequential_bfs(G, colors): ...
def strategy_connected_sequential_dfs(G, colors): ...
def strategy_connected_sequential(G, colors, traversal: str = "bfs") -> Generator[Incomplete, None, None]: ...
def strategy_saturation_largest_first(G, colors) -> Generator[Incomplete, None, Incomplete]: ...
def greedy_color(G, strategy: str = "largest_first", interchange: bool = False): ...
class _Node:
node_id: Incomplete
color: int
adj_list: Incomplete
adj_color: Incomplete
def __init__(self, node_id, n) -> None: ...
def assign_color(self, adj_entry, color) -> None: ...
def clear_color(self, adj_entry, color) -> None: ...
def iter_neighbors(self) -> Generator[Incomplete, None, None]: ...
def iter_neighbors_color(self, color) -> Generator[Incomplete, None, None]: ...
class _AdjEntry:
node_id: Incomplete
next: Incomplete
mate: Incomplete
col_next: Incomplete
col_prev: Incomplete
def __init__(self, node_id) -> None: ...

View File

@@ -0,0 +1,2 @@
def communicability(G): ...
def communicability_exp(G): ...

View File

@@ -0,0 +1,10 @@
from networkx.algorithms.community.asyn_fluid import *
from networkx.algorithms.community.centrality import *
from networkx.algorithms.community.community_utils import *
from networkx.algorithms.community.kclique import *
from networkx.algorithms.community.kernighan_lin import *
from networkx.algorithms.community.label_propagation import *
from networkx.algorithms.community.louvain import *
from networkx.algorithms.community.lukes import *
from networkx.algorithms.community.modularity_max import *
from networkx.algorithms.community.quality import *

View File

@@ -0,0 +1,3 @@
from _typeshed import Incomplete
def asyn_fluidc(G, k, max_iter: int = 100, seed: Incomplete | None = None): ...

View File

@@ -0,0 +1,4 @@
from _typeshed import Incomplete
from collections.abc import Generator
def girvan_newman(G, most_valuable_edge: Incomplete | None = None) -> Generator[Incomplete, None, Incomplete]: ...

View File

@@ -0,0 +1 @@
def is_partition(G, communities): ...

View File

@@ -0,0 +1,4 @@
from _typeshed import Incomplete
from collections.abc import Generator
def k_clique_communities(G, k, cliques: Incomplete | None = None) -> Generator[Incomplete, None, None]: ...

View File

@@ -0,0 +1,5 @@
from _typeshed import Incomplete
def kernighan_lin_bisection(
G, partition: Incomplete | None = None, max_iter: int = 10, weight: str = "weight", seed: Incomplete | None = None
): ...

View File

@@ -0,0 +1,7 @@
from _typeshed import Incomplete
from collections.abc import Generator
def asyn_lpa_communities(
G, weight: Incomplete | None = None, seed: Incomplete | None = None
) -> Generator[Incomplete, Incomplete, None]: ...
def label_propagation_communities(G): ...

View File

@@ -0,0 +1,9 @@
from _typeshed import Incomplete
from collections.abc import Generator
def louvain_communities(
G, weight: str = "weight", resolution: float = 1, threshold: float = 1e-07, seed: Incomplete | None = None
): ...
def louvain_partitions(
G, weight: str = "weight", resolution: float = 1, threshold: float = 1e-07, seed: Incomplete | None = None
) -> Generator[Incomplete, None, None]: ...

View File

@@ -0,0 +1,3 @@
from _typeshed import Incomplete
def lukes_partitioning(G, max_size, node_weight: Incomplete | None = None, edge_weight: Incomplete | None = None): ...

View File

@@ -0,0 +1,6 @@
from _typeshed import Incomplete
def greedy_modularity_communities(
G, weight: Incomplete | None = None, resolution: float = 1, cutoff: int = 1, best_n: Incomplete | None = None
): ...
def naive_greedy_modularity_communities(G, resolution: float = 1, weight: Incomplete | None = None): ...

View File

@@ -0,0 +1,9 @@
from networkx.exception import NetworkXError
__all__ = ["modularity", "partition_quality"]
class NotAPartition(NetworkXError):
def __init__(self, G, collection) -> None: ...
def modularity(G, communities, weight: str = "weight", resolution: float = 1): ...
def partition_quality(G, partition): ...

View File

@@ -0,0 +1,6 @@
from .attracting import *
from .biconnected import *
from .connected import *
from .semiconnected import *
from .strongly_connected import *
from .weakly_connected import *

View File

@@ -0,0 +1,6 @@
from _typeshed import Incomplete
from collections.abc import Generator
def attracting_components(G) -> Generator[Incomplete, None, None]: ...
def number_attracting_components(G): ...
def is_attracting_component(G): ...

View File

@@ -0,0 +1,7 @@
from _typeshed import Incomplete
from collections.abc import Generator
def is_biconnected(G): ...
def biconnected_component_edges(G) -> Generator[Incomplete, Incomplete, None]: ...
def biconnected_components(G) -> Generator[Incomplete, None, None]: ...
def articulation_points(G) -> Generator[Incomplete, None, None]: ...

View File

@@ -0,0 +1,7 @@
from _typeshed import Incomplete
from collections.abc import Generator
def connected_components(G) -> Generator[Incomplete, None, None]: ...
def number_connected_components(G): ...
def is_connected(G): ...
def node_connected_component(G, n): ...

View File

@@ -0,0 +1,3 @@
from _typeshed import Incomplete
def is_semiconnected(G, topo_order: Incomplete | None = None): ...

View File

@@ -0,0 +1,11 @@
from collections.abc import Generator, Hashable, Iterable
from networkx.classes.digraph import DiGraph
from networkx.classes.graph import Graph, _Node
def strongly_connected_components(G: Graph[_Node]) -> Generator[set[_Node], None, None]: ...
def kosaraju_strongly_connected_components(G: Graph[_Node], source: _Node | None = None) -> Generator[set[_Node], None, None]: ...
def strongly_connected_components_recursive(G: Graph[_Node]) -> Generator[set[_Node], None, None]: ...
def number_strongly_connected_components(G: Graph[Hashable]) -> int: ...
def is_strongly_connected(G: Graph[Hashable]) -> bool: ...
def condensation(G: DiGraph[_Node], scc: Iterable[Iterable[_Node]] | None = None) -> DiGraph[int]: ...

View File

@@ -0,0 +1,7 @@
from collections.abc import Generator, Hashable
from networkx.classes.graph import Graph, _Node
def weakly_connected_components(G: Graph[_Node]) -> Generator[set[_Node], None, None]: ...
def number_weakly_connected_components(G: Graph[Hashable]) -> int: ...
def is_weakly_connected(G: Graph[Hashable]) -> bool: ...

View File

@@ -0,0 +1,9 @@
from .connectivity import *
from .cuts import *
from .disjoint_paths import *
from .edge_augmentation import *
from .edge_kcomponents import *
from .kcomponents import *
from .kcutsets import *
from .stoerwagner import *
from .utils import *

View File

@@ -0,0 +1,43 @@
from _typeshed import Incomplete
from networkx.algorithms.flow import edmonds_karp
__all__ = [
"average_node_connectivity",
"local_node_connectivity",
"node_connectivity",
"local_edge_connectivity",
"edge_connectivity",
"all_pairs_node_connectivity",
]
default_flow_func = edmonds_karp
def local_node_connectivity(
G,
s,
t,
flow_func: Incomplete | None = None,
auxiliary: Incomplete | None = None,
residual: Incomplete | None = None,
cutoff: Incomplete | None = None,
): ...
def node_connectivity(G, s: Incomplete | None = None, t: Incomplete | None = None, flow_func: Incomplete | None = None): ...
def average_node_connectivity(G, flow_func: Incomplete | None = None): ...
def all_pairs_node_connectivity(G, nbunch: Incomplete | None = None, flow_func: Incomplete | None = None): ...
def local_edge_connectivity(
G,
s,
t,
flow_func: Incomplete | None = None,
auxiliary: Incomplete | None = None,
residual: Incomplete | None = None,
cutoff: Incomplete | None = None,
): ...
def edge_connectivity(
G,
s: Incomplete | None = None,
t: Incomplete | None = None,
flow_func: Incomplete | None = None,
cutoff: Incomplete | None = None,
): ...

View File

@@ -0,0 +1,16 @@
from _typeshed import Incomplete
from networkx.algorithms.flow import edmonds_karp
__all__ = ["minimum_st_node_cut", "minimum_node_cut", "minimum_st_edge_cut", "minimum_edge_cut"]
default_flow_func = edmonds_karp
def minimum_st_edge_cut(
G, s, t, flow_func: Incomplete | None = None, auxiliary: Incomplete | None = None, residual: Incomplete | None = None
): ...
def minimum_st_node_cut(
G, s, t, flow_func: Incomplete | None = None, auxiliary: Incomplete | None = None, residual: Incomplete | None = None
): ...
def minimum_node_cut(G, s: Incomplete | None = None, t: Incomplete | None = None, flow_func: Incomplete | None = None): ...
def minimum_edge_cut(G, s: Incomplete | None = None, t: Incomplete | None = None, flow_func: Incomplete | None = None): ...

View File

@@ -0,0 +1,27 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.algorithms.flow import edmonds_karp
__all__ = ["edge_disjoint_paths", "node_disjoint_paths"]
default_flow_func = edmonds_karp
def edge_disjoint_paths(
G,
s,
t,
flow_func: Incomplete | None = None,
cutoff: Incomplete | None = None,
auxiliary: Incomplete | None = None,
residual: Incomplete | None = None,
) -> Generator[Incomplete, None, None]: ...
def node_disjoint_paths(
G,
s,
t,
flow_func: Incomplete | None = None,
cutoff: Incomplete | None = None,
auxiliary: Incomplete | None = None,
residual: Incomplete | None = None,
) -> Generator[Incomplete, None, None]: ...

View File

@@ -0,0 +1,13 @@
from collections.abc import Generator, Hashable
from networkx.classes.graph import Graph, _Node
def is_k_edge_connected(G: Graph[Hashable], k: int): ...
def is_locally_k_edge_connected(G, s, t, k): ...
def k_edge_augmentation(
G: Graph[_Node],
k: int,
avail: tuple[_Node, _Node] | tuple[_Node, _Node, dict[str, int]] | None = None,
weight: str | None = None,
partial: bool = False,
) -> Generator[tuple[_Node, _Node], None, None]: ...

View File

@@ -0,0 +1,14 @@
from _typeshed import Incomplete
from collections.abc import Generator
def k_edge_components(G, k): ...
def k_edge_subgraphs(G, k): ...
def bridge_components(G) -> Generator[Incomplete, Incomplete, None]: ...
class EdgeComponentAuxGraph:
A: Incomplete
H: Incomplete
@classmethod
def construct(cls, G): ...
def k_edge_components(self, k) -> Generator[Incomplete, Incomplete, None]: ...
def k_edge_subgraphs(self, k) -> Generator[Incomplete, Incomplete, None]: ...

View File

@@ -0,0 +1,9 @@
from _typeshed import Incomplete
from networkx.algorithms.flow import edmonds_karp
__all__ = ["k_components"]
default_flow_func = edmonds_karp
def k_components(G, flow_func: Incomplete | None = None): ...

View File

@@ -0,0 +1,10 @@
from _typeshed import Incomplete
from collections.abc import Generator
from networkx.algorithms.flow import edmonds_karp
__all__ = ["all_node_cuts"]
default_flow_func = edmonds_karp
def all_node_cuts(G, k: Incomplete | None = None, flow_func: Incomplete | None = None) -> Generator[Incomplete, None, None]: ...

View File

@@ -0,0 +1 @@
def stoer_wagner(G, weight: str = "weight", heap=...): ...

View File

@@ -0,0 +1,2 @@
def build_auxiliary_node_connectivity(G): ...
def build_auxiliary_edge_connectivity(G): ...

View File

@@ -0,0 +1,9 @@
from _typeshed import Incomplete
def core_number(G): ...
def k_core(G, k: Incomplete | None = None, core_number: Incomplete | None = None): ...
def k_shell(G, k: Incomplete | None = None, core_number: Incomplete | None = None): ...
def k_crust(G, k: Incomplete | None = None, core_number: Incomplete | None = None): ...
def k_corona(G, k, core_number: Incomplete | None = None): ...
def k_truss(G, k): ...
def onion_layers(G): ...

View File

@@ -0,0 +1,4 @@
from _typeshed import Incomplete
def min_edge_cover(G, matching_algorithm: Incomplete | None = None): ...
def is_edge_cover(G, cover): ...

View File

@@ -0,0 +1,10 @@
from _typeshed import Incomplete
def cut_size(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ...
def volume(G, S, weight: Incomplete | None = None): ...
def normalized_cut_size(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ...
def conductance(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ...
def edge_expansion(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ...
def mixing_expansion(G, S, T: Incomplete | None = None, weight: Incomplete | None = None): ...
def node_expansion(G, S): ...
def boundary_expansion(G, S): ...

Some files were not shown because too many files have changed in this diff Show More