CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-rustworkx

A high-performance graph library for Python implemented in Rust, providing fast graph algorithms and data structures for computational tasks

Pending
Overview
Eval results
Files

centrality.mddocs/

Centrality Measures

Comprehensive collection of node and edge centrality algorithms for identifying important vertices and edges in networks. rustworkx provides efficient implementations of major centrality measures with parallel processing support for large graphs.

Capabilities

Betweenness Centrality

Measures the extent to which a node lies on paths between other nodes, identifying nodes that serve as bridges in the network.

def betweenness_centrality(graph, normalized: bool = True, endpoints: bool = False, parallel_threshold: int = 50) -> dict:
    """
    Compute betweenness centrality for all nodes.
    
    Betweenness centrality measures the fraction of all-pairs shortest
    paths that pass through each node.
    
    Parameters:
    - graph: Input graph (PyGraph or PyDiGraph)
    - normalized (bool): Normalize by number of node pairs
    - endpoints (bool): Include endpoints in path length calculation
    - parallel_threshold (int): Node count for parallel execution
    
    Returns:
    dict: Mapping of node indices to betweenness centrality scores
    """

def edge_betweenness_centrality(graph, normalized: bool = True, parallel_threshold: int = 50):
    """
    Compute betweenness centrality for all edges.
    
    Edge betweenness centrality measures the fraction of all-pairs
    shortest paths that pass through each edge.
    
    Parameters:
    - graph: Input graph
    - normalized (bool): Normalize by number of node pairs  
    - parallel_threshold (int): Node count for parallel execution
    
    Returns:
    EdgeCentralityMapping: Mapping of edge indices to centrality scores
    """

Closeness Centrality

Measures how close a node is to all other nodes in the graph based on shortest path distances.

def closeness_centrality(graph, wf_improved: bool = True, parallel_threshold: int = 50) -> dict:
    """
    Compute closeness centrality for all nodes.
    
    Closeness centrality is the reciprocal of the average shortest
    path distance to all other reachable nodes.
    
    Parameters:
    - graph: Input graph (PyGraph or PyDiGraph)
    - wf_improved (bool): Use Wasserman-Faust improved formula for disconnected graphs
    - parallel_threshold (int): Node count for parallel execution
    
    Returns:
    dict: Mapping of node indices to closeness centrality scores
    """

def newman_weighted_closeness_centrality(graph, weight_fn = None, wf_improved: bool = True, default_weight: float = 1.0, parallel_threshold: int = 50):
    """
    Compute weighted closeness centrality using Newman's method.
    
    Extends standard closeness centrality to weighted graphs where
    edge weights represent connection strength (higher = stronger).
    
    Parameters:
    - graph: Input graph (PyGraph or PyDiGraph)
    - weight_fn (callable, optional): Function to extract connection strength
    - wf_improved (bool): Use improved formula for disconnected components
    - default_weight (float): Default edge weight
    - parallel_threshold (int): Node count for parallel execution
    
    Returns:
    CentralityMapping: Mapping of node indices to weighted closeness scores
    """

Degree Centrality

Measures the fraction of nodes that a particular node is connected to, normalized by the maximum possible degree.

def degree_centrality(graph):
    """
    Compute degree centrality for all nodes.
    
    Degree centrality is the fraction of nodes that each node is
    connected to, providing a measure of local connectivity.
    
    Parameters:
    - graph: Input graph (PyGraph or PyDiGraph)
    
    Returns:
    CentralityMapping: Mapping of node indices to degree centrality scores
    """

Eigenvector Centrality

Measures the influence of a node in the network based on the principle that connections to high-scoring nodes contribute more to the score.

def eigenvector_centrality(graph, weight_fn = None, default_weight: float = 1.0, max_iter: int = 100, tol: float = 1e-6):
    """
    Compute eigenvector centrality using power iteration method.
    
    Eigenvector centrality assigns higher scores to nodes connected
    to other high-scoring nodes, measuring recursive importance.
    
    Parameters:
    - graph: Input graph (PyGraph or PyDiGraph)
    - weight_fn (callable, optional): Function to extract edge weight
    - default_weight (float): Default edge weight
    - max_iter (int): Maximum iterations for convergence
    - tol (float): Convergence tolerance
    
    Returns:
    CentralityMapping: Mapping of node indices to eigenvector centrality scores
    
    Note: Convergence is not guaranteed. May raise FailedToConverge.
    """

Katz Centrality

Generalization of eigenvector centrality that includes a constant bias term, ensuring all nodes have some base importance.

def katz_centrality(graph, alpha: float = 0.1, beta = 1.0, weight_fn = None, default_weight: float = 1.0, max_iter: int = 100, tol: float = 1e-6):
    """
    Compute Katz centrality with attenuation factor and bias term.
    
    Katz centrality extends eigenvector centrality by adding immediate
    neighborhood weights, ensuring all nodes have base importance.
    
    Parameters:
    - graph: Input graph (PyGraph or PyDiGraph)
    - alpha (float): Attenuation factor controlling recursive influence
    - beta (float or dict): Immediate neighborhood weights (bias term)
    - weight_fn (callable, optional): Function to extract edge weight
    - default_weight (float): Default edge weight  
    - max_iter (int): Maximum iterations for convergence
    - tol (float): Convergence tolerance
    
    Returns:
    CentralityMapping: Mapping of node indices to Katz centrality scores
    
    Note: If beta is dict, must contain all node indices as keys.
    """

Usage Examples

Basic Centrality Analysis

import rustworkx as rx

# Create sample network
graph = rx.PyGraph()
nodes = graph.add_nodes_from(['A', 'B', 'C', 'D', 'E'])
graph.add_edges_from([
    (nodes[0], nodes[1], 1),  # A-B
    (nodes[1], nodes[2], 1),  # B-C  
    (nodes[1], nodes[3], 1),  # B-D
    (nodes[2], nodes[4], 1),  # C-E
    (nodes[3], nodes[4], 1),  # D-E
])

# Compute various centrality measures
betweenness = rx.betweenness_centrality(graph)
closeness = rx.closeness_centrality(graph)
degree = rx.degree_centrality(graph)
eigenvector = rx.eigenvector_centrality(graph)

print("Betweenness centrality:", betweenness)
print("Closeness centrality:", closeness)
print("Degree centrality:", degree)
print("Eigenvector centrality:", eigenvector)

Weighted Centrality Analysis

# Weighted graph analysis
weighted_graph = rx.PyGraph()
nodes = weighted_graph.add_nodes_from(['Hub', 'A', 'B', 'C'])
weighted_graph.add_edges_from([
    (nodes[0], nodes[1], 0.8),  # Strong connection
    (nodes[0], nodes[2], 0.9),  # Very strong
    (nodes[0], nodes[3], 0.3),  # Weak connection
    (nodes[1], nodes[2], 0.1),  # Very weak
])

# Weighted closeness using Newman's method
weighted_closeness = rx.newman_weighted_closeness_centrality(
    weighted_graph,
    weight_fn=lambda x: x  # Use edge weight as connection strength
)

# Weighted eigenvector centrality
weighted_eigenvector = rx.eigenvector_centrality(
    weighted_graph,
    weight_fn=lambda x: x
)

print("Weighted closeness:", weighted_closeness)
print("Weighted eigenvector:", weighted_eigenvector)

Edge Centrality Analysis

# Identify critical edges in network
edge_centrality = rx.edge_betweenness_centrality(graph)

# Get edge list for interpretation
edges = graph.edge_list()
for i, (source, target) in enumerate(edges):
    if i in edge_centrality:
        print(f"Edge {source}-{target}: {edge_centrality[i]:.3f}")

Katz Centrality with Custom Parameters

# Katz centrality with different node importance
node_importance = {
    nodes[0]: 2.0,  # A has higher base importance
    nodes[1]: 1.5,  # B has moderate importance
    nodes[2]: 1.0,  # C has standard importance
    nodes[3]: 1.0,  # D has standard importance
    nodes[4]: 0.5,  # E has lower importance
}

katz_scores = rx.katz_centrality(
    graph,
    alpha=0.2,  # Higher attenuation factor
    beta=node_importance,  # Custom bias terms
    weight_fn=lambda x: 1.0
)

print("Katz centrality with custom bias:", katz_scores)

Centrality-Based Node Ranking

# Rank nodes by different centrality measures
import operator

def rank_nodes_by_centrality(centrality_scores, node_names):
    """Rank nodes by centrality scores."""
    sorted_items = sorted(centrality_scores.items(), 
                         key=operator.itemgetter(1), 
                         reverse=True)
    return [(node_names[idx], score) for idx, score in sorted_items]

node_names = {i: name for i, name in enumerate(['A', 'B', 'C', 'D', 'E'])}

print("Ranking by betweenness:")
for name, score in rank_nodes_by_centrality(betweenness, node_names):
    print(f"  {name}: {score:.3f}")

print("Ranking by degree:")  
for name, score in rank_nodes_by_centrality(degree, node_names):
    print(f"  {name}: {score:.3f}")

Parallel Processing for Large Graphs

# For large graphs, adjust parallel processing threshold
large_graph = rx.generators.erdos_renyi_gnp_random_graph(1000, 0.01)

# Use lower threshold to enable parallelization
centrality_parallel = rx.betweenness_centrality(
    large_graph, 
    parallel_threshold=100  # Enable parallel processing
)

# Compare with different settings using environment variable
import os
os.environ['RAYON_NUM_THREADS'] = '4'  # Limit to 4 threads

centrality_limited = rx.closeness_centrality(
    large_graph,
    parallel_threshold=100
)

Handling Disconnected Graphs

# Create disconnected graph
disconnected = rx.PyGraph()
# Component 1
comp1_nodes = disconnected.add_nodes_from(['A1', 'B1', 'C1'])
disconnected.add_edges_from([
    (comp1_nodes[0], comp1_nodes[1], 1),
    (comp1_nodes[1], comp1_nodes[2], 1)
])

# Component 2 (isolated)
comp2_nodes = disconnected.add_nodes_from(['A2', 'B2'])
disconnected.add_edges_from([
    (comp2_nodes[0], comp2_nodes[1], 1)
])

# Use improved formula for disconnected graphs
closeness_improved = rx.closeness_centrality(
    disconnected, 
    wf_improved=True  # Wasserman-Faust improved formula
)

closeness_standard = rx.closeness_centrality(
    disconnected,
    wf_improved=False  # Standard formula
)

print("Improved formula:", closeness_improved)
print("Standard formula:", closeness_standard)

Install with Tessl CLI

npx tessl i tessl/pypi-rustworkx

docs

advanced-algorithms.md

analysis.md

centrality.md

core-classes.md

generators.md

index.md

layouts.md

shortest-paths.md

traversal.md

visualization.md

tile.json