2025-07-01
This commit is contained in:
Binary file not shown.
@@ -0,0 +1,37 @@
|
||||
NetworkX is distributed with the 3-clause BSD license.
|
||||
|
||||
::
|
||||
|
||||
Copyright (C) 2004-2024, NetworkX Developers
|
||||
Aric Hagberg <hagberg@lanl.gov>
|
||||
Dan Schult <dschult@colgate.edu>
|
||||
Pieter Swart <swart@lanl.gov>
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following
|
||||
disclaimer in the documentation and/or other materials provided
|
||||
with the distribution.
|
||||
|
||||
* Neither the name of the NetworkX Developers nor the names of its
|
||||
contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
@@ -0,0 +1,165 @@
|
||||
Metadata-Version: 2.1
|
||||
Name: networkx
|
||||
Version: 3.4.2
|
||||
Summary: Python package for creating and manipulating graphs and networks
|
||||
Author-email: Aric Hagberg <hagberg@lanl.gov>
|
||||
Maintainer-email: NetworkX Developers <networkx-discuss@googlegroups.com>
|
||||
Project-URL: Homepage, https://networkx.org/
|
||||
Project-URL: Bug Tracker, https://github.com/networkx/networkx/issues
|
||||
Project-URL: Documentation, https://networkx.org/documentation/stable/
|
||||
Project-URL: Source Code, https://github.com/networkx/networkx
|
||||
Keywords: Networks,Graph Theory,Mathematics,network,graph,discrete mathematics,math
|
||||
Platform: Linux
|
||||
Platform: Mac OSX
|
||||
Platform: Windows
|
||||
Platform: Unix
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Intended Audience :: Science/Research
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.10
|
||||
Classifier: Programming Language :: Python :: 3.11
|
||||
Classifier: Programming Language :: Python :: 3.12
|
||||
Classifier: Programming Language :: Python :: 3.13
|
||||
Classifier: Programming Language :: Python :: 3 :: Only
|
||||
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
||||
Classifier: Topic :: Scientific/Engineering :: Bio-Informatics
|
||||
Classifier: Topic :: Scientific/Engineering :: Information Analysis
|
||||
Classifier: Topic :: Scientific/Engineering :: Mathematics
|
||||
Classifier: Topic :: Scientific/Engineering :: Physics
|
||||
Requires-Python: >=3.10
|
||||
Description-Content-Type: text/x-rst
|
||||
License-File: LICENSE.txt
|
||||
Provides-Extra: default
|
||||
Requires-Dist: numpy >=1.24 ; extra == 'default'
|
||||
Requires-Dist: scipy !=1.11.0,!=1.11.1,>=1.10 ; extra == 'default'
|
||||
Requires-Dist: matplotlib >=3.7 ; extra == 'default'
|
||||
Requires-Dist: pandas >=2.0 ; extra == 'default'
|
||||
Provides-Extra: developer
|
||||
Requires-Dist: changelist ==0.5 ; extra == 'developer'
|
||||
Requires-Dist: pre-commit >=3.2 ; extra == 'developer'
|
||||
Requires-Dist: mypy >=1.1 ; extra == 'developer'
|
||||
Requires-Dist: rtoml ; extra == 'developer'
|
||||
Provides-Extra: doc
|
||||
Requires-Dist: sphinx >=7.3 ; extra == 'doc'
|
||||
Requires-Dist: pydata-sphinx-theme >=0.15 ; extra == 'doc'
|
||||
Requires-Dist: sphinx-gallery >=0.16 ; extra == 'doc'
|
||||
Requires-Dist: numpydoc >=1.8.0 ; extra == 'doc'
|
||||
Requires-Dist: pillow >=9.4 ; extra == 'doc'
|
||||
Requires-Dist: texext >=0.6.7 ; extra == 'doc'
|
||||
Requires-Dist: myst-nb >=1.1 ; extra == 'doc'
|
||||
Requires-Dist: intersphinx-registry ; extra == 'doc'
|
||||
Provides-Extra: example
|
||||
Requires-Dist: osmnx >=1.9 ; extra == 'example'
|
||||
Requires-Dist: momepy >=0.7.2 ; extra == 'example'
|
||||
Requires-Dist: contextily >=1.6 ; extra == 'example'
|
||||
Requires-Dist: seaborn >=0.13 ; extra == 'example'
|
||||
Requires-Dist: cairocffi >=1.7 ; extra == 'example'
|
||||
Requires-Dist: igraph >=0.11 ; extra == 'example'
|
||||
Requires-Dist: scikit-learn >=1.5 ; extra == 'example'
|
||||
Provides-Extra: extra
|
||||
Requires-Dist: lxml >=4.6 ; extra == 'extra'
|
||||
Requires-Dist: pygraphviz >=1.14 ; extra == 'extra'
|
||||
Requires-Dist: pydot >=3.0.1 ; extra == 'extra'
|
||||
Requires-Dist: sympy >=1.10 ; extra == 'extra'
|
||||
Provides-Extra: test
|
||||
Requires-Dist: pytest >=7.2 ; extra == 'test'
|
||||
Requires-Dist: pytest-cov >=4.0 ; extra == 'test'
|
||||
|
||||
NetworkX
|
||||
========
|
||||
|
||||
|
||||
.. image::
|
||||
https://github.com/networkx/networkx/workflows/test/badge.svg?branch=main
|
||||
:target: https://github.com/networkx/networkx/actions?query=workflow%3Atest
|
||||
|
||||
.. image::
|
||||
https://codecov.io/gh/networkx/networkx/branch/main/graph/badge.svg?
|
||||
:target: https://app.codecov.io/gh/networkx/networkx/branch/main
|
||||
|
||||
.. image::
|
||||
https://img.shields.io/pypi/v/networkx.svg?
|
||||
:target: https://pypi.python.org/pypi/networkx
|
||||
|
||||
.. image::
|
||||
https://img.shields.io/pypi/l/networkx.svg?
|
||||
:target: https://github.com/networkx/networkx/blob/main/LICENSE.txt
|
||||
|
||||
.. image::
|
||||
https://img.shields.io/pypi/pyversions/networkx.svg?
|
||||
:target: https://pypi.python.org/pypi/networkx
|
||||
|
||||
.. image::
|
||||
https://img.shields.io/github/labels/networkx/networkx/good%20first%20issue?color=green&label=contribute
|
||||
:target: https://github.com/networkx/networkx/contribute
|
||||
|
||||
|
||||
NetworkX is a Python package for the creation, manipulation,
|
||||
and study of the structure, dynamics, and functions
|
||||
of complex networks.
|
||||
|
||||
- **Website (including documentation):** https://networkx.org
|
||||
- **Mailing list:** https://groups.google.com/forum/#!forum/networkx-discuss
|
||||
- **Source:** https://github.com/networkx/networkx
|
||||
- **Bug reports:** https://github.com/networkx/networkx/issues
|
||||
- **Report a security vulnerability:** https://tidelift.com/security
|
||||
- **Tutorial:** https://networkx.org/documentation/latest/tutorial.html
|
||||
- **GitHub Discussions:** https://github.com/networkx/networkx/discussions
|
||||
- **Discord (Scientific Python) invite link:** https://discord.com/invite/vur45CbwMz
|
||||
- **NetworkX meetings calendar (open to all):** https://scientific-python.org/calendars/networkx.ics
|
||||
|
||||
Simple example
|
||||
--------------
|
||||
|
||||
Find the shortest path between two nodes in an undirected graph:
|
||||
|
||||
.. code:: pycon
|
||||
|
||||
>>> import networkx as nx
|
||||
>>> G = nx.Graph()
|
||||
>>> G.add_edge("A", "B", weight=4)
|
||||
>>> G.add_edge("B", "D", weight=2)
|
||||
>>> G.add_edge("A", "C", weight=3)
|
||||
>>> G.add_edge("C", "D", weight=4)
|
||||
>>> nx.shortest_path(G, "A", "D", weight="weight")
|
||||
['A', 'B', 'D']
|
||||
|
||||
Install
|
||||
-------
|
||||
|
||||
Install the latest released version of NetworkX:
|
||||
|
||||
.. code:: shell
|
||||
|
||||
$ pip install networkx
|
||||
|
||||
Install with all optional dependencies:
|
||||
|
||||
.. code:: shell
|
||||
|
||||
$ pip install networkx[default]
|
||||
|
||||
For additional details,
|
||||
please see the `installation guide <https://networkx.org/documentation/stable/install.html>`_.
|
||||
|
||||
Bugs
|
||||
----
|
||||
|
||||
Please report any bugs that you find `here <https://github.com/networkx/networkx/issues>`_.
|
||||
Or, even better, fork the repository on `GitHub <https://github.com/networkx/networkx>`_
|
||||
and create a pull request (PR). We welcome all changes, big or small, and we
|
||||
will help you make the PR if you are new to `git` (just ask on the issue and/or
|
||||
see the `contributor guide <https://networkx.org/documentation/latest/developer/contribute.html>`_).
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
Released under the `3-Clause BSD license <https://github.com/networkx/networkx/blob/main/LICENSE.txt>`_::
|
||||
|
||||
Copyright (C) 2004-2024 NetworkX Developers
|
||||
Aric Hagberg <hagberg@lanl.gov>
|
||||
Dan Schult <dschult@colgate.edu>
|
||||
Pieter Swart <swart@lanl.gov>
|
||||
@@ -0,0 +1,582 @@
|
||||
networkx/__init__.py,sha256=vV-bYyml9JK5OV8Ic_dctL5ZGR5NqwF4fzd4msR2b9U,1274
|
||||
networkx/conftest.py,sha256=0wpXc9prGYLSw5gG-VvummltLxH5RtKeGwrxFsChe4E,8883
|
||||
networkx/convert.py,sha256=yB_MTl3GEvNb3CgDcBiCrhIN4LlV5N_BN9A0ykhBr7E,16025
|
||||
networkx/convert_matrix.py,sha256=7kc66-0XFGQUox3fVZuapUV4qCprg92ECe9BMjfSpCE,45383
|
||||
networkx/exception.py,sha256=hC8efPfIzOFo0jiWiQbTPaNKuNTuUwhp9RPw--pdv4U,3787
|
||||
networkx/lazy_imports.py,sha256=tYxP13tZ3p8-Qh--Mey4ZXZqQhWgQAbI7xYBZRrBzw0,5764
|
||||
networkx/relabel.py,sha256=0HptAQOBToKhLZzxscd6FQpzVCNMlYmiHjHul69ct8o,10300
|
||||
networkx/algorithms/__init__.py,sha256=oij1HDNcE7GhTPAtuHYT8eGZdH4K_vYaha51X5XoUCY,6559
|
||||
networkx/algorithms/asteroidal.py,sha256=jbN_MmETkCGpSvUWW6W8_Qqa3Syay2BwkX9odcyQFfk,5865
|
||||
networkx/algorithms/boundary.py,sha256=q3JtWssmn9yCB2mBdkjKZjkaxmBhkG9_dJOzmuJiQos,5339
|
||||
networkx/algorithms/bridges.py,sha256=CsxueHDOB9aFM5D8GP83u1ZKGzxF193XBpvmMReAcQk,6066
|
||||
networkx/algorithms/broadcasting.py,sha256=eqqZJ7oDQVCl7P3-PLm-gthzSc-kWnF2D1Yv42GXoGk,4890
|
||||
networkx/algorithms/chains.py,sha256=PPiSq5-GsT1Lsf8fwtGwGDVf1hhv5ZLariWtfzkBbAw,6968
|
||||
networkx/algorithms/chordal.py,sha256=L-ILWdVLWE44OkWmEO_4bSo4z6Ro-_zLglfLfTrwdqQ,13411
|
||||
networkx/algorithms/clique.py,sha256=LrmXvK6KVcjDyUrF5S6JTC2PQ1kTf26Yeb0TjqNy_WA,25872
|
||||
networkx/algorithms/cluster.py,sha256=x7dIotmBaBU3yaIzphjAyA2B-FHS_iiQ5nF-FeinQlU,20359
|
||||
networkx/algorithms/communicability_alg.py,sha256=0tZvZKY-_GUUB7GsRILxabS2jEpI51Udg5ADI9ADGZw,4545
|
||||
networkx/algorithms/core.py,sha256=2QQYUPoMs9F1rgGUlYgIAj6ETy4VefQWG1rl0RMkf9o,19184
|
||||
networkx/algorithms/covering.py,sha256=abt1bRBmiPi1J950uUYfTk4YS4pVhz1zanY01vxqNLg,5294
|
||||
networkx/algorithms/cuts.py,sha256=-J5j6Yi2CrlFsrX4bK-5kFztD6i4X6gihXwxmFC1zYQ,9990
|
||||
networkx/algorithms/cycles.py,sha256=erkLvKZkYfGDwya6Pn_o8cR5CnEnYeJ30Yi4kGr5xvk,43237
|
||||
networkx/algorithms/d_separation.py,sha256=3O_5RIWziPQ5xwRn-yAjH28xrkSaVIVbCFpw7K2Pa2A,27283
|
||||
networkx/algorithms/dag.py,sha256=y2HhZm0-olRZabgo9xczjsWf8ObSeG--VJl3PIEh9cE,45070
|
||||
networkx/algorithms/distance_measures.py,sha256=eauckS80lzTT_0CpZZh1JR7tLCzeiGCviaARDK1MN8k,34195
|
||||
networkx/algorithms/distance_regular.py,sha256=-1QCGLy7OPoNuV2bYJDY4jVot-0LGMobBQ0DubjbhGI,7053
|
||||
networkx/algorithms/dominance.py,sha256=T_z37jx_WSbY_HMfYgqZL6fT-p6PMAlZjSwEVsaLfLE,3450
|
||||
networkx/algorithms/dominating.py,sha256=d4CkSt_hmcwldF5FaOiazZpThYhxAuasRhJgGdExGjc,2669
|
||||
networkx/algorithms/efficiency_measures.py,sha256=VKbLKJgdIbno-YnJaLaCZt7TNXXnQPdz8N99uJCo748,4741
|
||||
networkx/algorithms/euler.py,sha256=yCqKaGchFSRPTRDXq7u1fH2IXZF94wWf9S10K9-Cd6U,14205
|
||||
networkx/algorithms/graph_hashing.py,sha256=0jcfhXY7tChFBV4N0ga4oJCJCHRwawrsDDyNy11uJlk,12556
|
||||
networkx/algorithms/graphical.py,sha256=1NdlhXuGEgUkHPo47EoNTWUMfdeTpiv7BBVM9ty2ivw,15831
|
||||
networkx/algorithms/hierarchy.py,sha256=_KFhCF1Afr2TrkPhqx-1PXUXEtfYLhbRShC58ZKbDGE,1786
|
||||
networkx/algorithms/hybrid.py,sha256=z3sIFMOpja1wlj-lI8YI6OIbSLZWHr66uSqyVESZWXY,6209
|
||||
networkx/algorithms/isolate.py,sha256=4rDH_iGY2WM5igJS-lBcIVb11MrKdoaFhJLieLZ4BAE,2301
|
||||
networkx/algorithms/link_prediction.py,sha256=UYo_LJgoVXcM1iLMXswM2g4jvUJmvxln3e5bVfXxQ10,22253
|
||||
networkx/algorithms/lowest_common_ancestors.py,sha256=xP0hkaJzwrj4evzahYvIjtUhaodj4FYv4JB51PWwVpc,9198
|
||||
networkx/algorithms/matching.py,sha256=bEvhXTFcRa-ZMNugIyM14rJ5QAcQkcZ2j-YJ-PTGQ3w,44550
|
||||
networkx/algorithms/mis.py,sha256=BEMv_dW8R6CjMMXJQGIhS4HpS8A8AkLJJWnz3GstuS4,2344
|
||||
networkx/algorithms/moral.py,sha256=z5lp42k4kqYk7t_FfszVj5KAC7BxXe6Adik3T2qvA6o,1535
|
||||
networkx/algorithms/node_classification.py,sha256=a2mVO7NI2IQF4Cd2Mx7TMLoTEu5HNG9RB5sEHQ19Wdw,6469
|
||||
networkx/algorithms/non_randomness.py,sha256=Uag54gFi5DR5uAQNFXyKKyORQuowTPuhq_QsjZaVMJ4,3068
|
||||
networkx/algorithms/planar_drawing.py,sha256=AXuoT3aFgEtCeMnAaUsRqjxCABdNYZ8Oo9sGOKBQto0,16254
|
||||
networkx/algorithms/planarity.py,sha256=PhIhnecPna-J_v7taoj-Ie175XWayVfcuMDHkj2bWLc,47249
|
||||
networkx/algorithms/polynomials.py,sha256=iP30_mcOlj81Vrzt4iB_ZZxYiRokubs-O1i9RW4pgTw,11278
|
||||
networkx/algorithms/reciprocity.py,sha256=1WMhLbSMkVPxRPlfUvbgO5FgVvJHn1doXQF4WuqSLQk,2855
|
||||
networkx/algorithms/regular.py,sha256=lEhYCP4Yysz8oTdxY8m40oqZcdhjKJuDsEj-P310loI,6794
|
||||
networkx/algorithms/richclub.py,sha256=kARzso3M6wnUcAJo2g8ga_ZtigL2czDNzeUDzBtRfqo,4892
|
||||
networkx/algorithms/similarity.py,sha256=My2MeE7AsIrCfXEkX-IYfbpbcNL3O7ZUkFtlzzF-j_8,61093
|
||||
networkx/algorithms/simple_paths.py,sha256=LFdFNltpt-rRI94x7HVDQooNbgm-urkzGQCxVHfIR5Q,30320
|
||||
networkx/algorithms/smallworld.py,sha256=3xT-z2_CVdp5-Ap8vF6fsd3DiavDYtspFNZrcwcpXG0,13565
|
||||
networkx/algorithms/smetric.py,sha256=_Aj4BIMnafiXbJtLkvAfAnIEMdI9OcVvMy6kk9KKTns,770
|
||||
networkx/algorithms/sparsifiers.py,sha256=4T8pMlh-usEHA2-rZFh-CmZbBY9dcXIHjoqR-oJ2hSw,10048
|
||||
networkx/algorithms/structuralholes.py,sha256=CS89P45_m1JGFGnSGA-FlC2xnt0BYq3O5ky1zkjYEDI,9342
|
||||
networkx/algorithms/summarization.py,sha256=CygTsSthyCKHs0ZTZsCgWnyaT8annQbLpUtahmfY9Sw,23251
|
||||
networkx/algorithms/swap.py,sha256=NVZMmlnkdxgwwNw5GDrc8waNERcdCu52ydHcBdOA_hw,14744
|
||||
networkx/algorithms/threshold.py,sha256=1HBOrQTyEaEp2uoIHsAlTEMpYAYXoRnR-6PaOKIjdZE,31150
|
||||
networkx/algorithms/time_dependent.py,sha256=PAeJ7Yt8kUqbDgvBaz_ZfUFZg-w-vf1gPC0HO6go_TI,5762
|
||||
networkx/algorithms/tournament.py,sha256=nx-PSefooyyYAwhFa9a7SRZSRL_ky5Rq19lYP79-0E8,11579
|
||||
networkx/algorithms/triads.py,sha256=Gf0f6liwgARszL-R4yQle-ogGH4mJkF-gureeUTxGyY,16853
|
||||
networkx/algorithms/vitality.py,sha256=8M1cubIydO49El2kwVCURHZ2UwCtfGVFeGS8-JYt1ko,2289
|
||||
networkx/algorithms/voronoi.py,sha256=07SnSpxLDz4k6K59Jo-VTNA-Qy5knaHfBC-y_5vAOLQ,3183
|
||||
networkx/algorithms/walks.py,sha256=0JOLhpAyeNzmF8EtlVlYOWEPJJvCIltt7tbk1Vx52dI,2427
|
||||
networkx/algorithms/wiener.py,sha256=WOUG0L5xDKpY4uspyI-oDo1hWuHxbUnTFZEe_-IAx5M,7639
|
||||
networkx/algorithms/approximation/__init__.py,sha256=CydjSsAU3qlxRwDTvgLyjQRgIuhL1e1STrjPdfqtfSE,1178
|
||||
networkx/algorithms/approximation/clique.py,sha256=b4cnWMJXmmgCyjMI8A_doHZeKS_RQbGqm2L01OpT_Jg,7691
|
||||
networkx/algorithms/approximation/clustering_coefficient.py,sha256=SWpSLEhW3DJc1n2fHlSbJSGg3wdoJkN5Y4_tnntn0Ws,2164
|
||||
networkx/algorithms/approximation/connectivity.py,sha256=aVXSfUiWEG4gUL0R1u6WZ-h-wheuLP1_suO_pRFB8M4,13118
|
||||
networkx/algorithms/approximation/distance_measures.py,sha256=UEkmKagNw9sj8kiUDdbAeYuzvZ31pgLMXqzliqMkG84,5805
|
||||
networkx/algorithms/approximation/dominating_set.py,sha256=5fC90w1CgYR4Xkpqact8iukKY0i57bMmyJW-A9CToUQ,4710
|
||||
networkx/algorithms/approximation/kcomponents.py,sha256=MDkoyQbk0gSAm3ZZK35VOsiLJDv7wiDsxfzH5O-ObFs,13285
|
||||
networkx/algorithms/approximation/matching.py,sha256=PFof5m9AIq9Xr5Kaa_-mYxI1IBBP7HEkjf-R9wVE3bo,1175
|
||||
networkx/algorithms/approximation/maxcut.py,sha256=eTQZqsDQAAUaufni-aDJAY2UzIcajDhRMdj-AcqVkPs,4333
|
||||
networkx/algorithms/approximation/ramsey.py,sha256=W5tX7BOQJIM_qNsBeUhCXVWMD8DFdeTycYyk08k4Sqk,1358
|
||||
networkx/algorithms/approximation/steinertree.py,sha256=dbPciMrLHmb1XWa0v7-v3qNFJ3Z7pZD0n4RAGF8eob4,8048
|
||||
networkx/algorithms/approximation/traveling_salesman.py,sha256=af4HEUYtuoulBpAQZJT2zqoUu8x08NtL1cQL5uGqe9E,55943
|
||||
networkx/algorithms/approximation/treewidth.py,sha256=Yu944jTE9MODBo1QiZjxbAGmHiC5MXZZTNV1YrLfz9o,8216
|
||||
networkx/algorithms/approximation/vertex_cover.py,sha256=oIi_yg5O-IisnfmrSof1P4HD-fsZpW69RpvkR_SM5Og,2803
|
||||
networkx/algorithms/approximation/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/approximation/tests/test_approx_clust_coeff.py,sha256=PGOVEKf2BcJu1vvjZrgTlBBpwM8V6t7yCANjyS9nWF0,1171
|
||||
networkx/algorithms/approximation/tests/test_clique.py,sha256=s6HQB-lK3RAu_ftpe2NvIiMu0Ol8tpAdbGvWzucNL6k,3021
|
||||
networkx/algorithms/approximation/tests/test_connectivity.py,sha256=gDG6tsgP3ux7Dgu0x7r0nso7_yknIxicV42Gq0It5pc,5952
|
||||
networkx/algorithms/approximation/tests/test_distance_measures.py,sha256=axgOojplJIgXdopgkjxjAgvzGTQ1FV1oJ5NG-7ICalo,2023
|
||||
networkx/algorithms/approximation/tests/test_dominating_set.py,sha256=l4pBDY7pK7Fxw-S4tOlNcxf-j2j5GpHPJ9f4TrMs1sI,2686
|
||||
networkx/algorithms/approximation/tests/test_kcomponents.py,sha256=tTljP1FHzXrUwi-oBz5AQcibRw1NgR4N5UE0a2OrOUA,9346
|
||||
networkx/algorithms/approximation/tests/test_matching.py,sha256=nitZncaM0605kaIu1NO6_5TFV2--nohUCO46XTD_lnM,186
|
||||
networkx/algorithms/approximation/tests/test_maxcut.py,sha256=U6CDZFSLfYDII-1nX9XB7avSz10kTx88vNazJFoLQ1k,2804
|
||||
networkx/algorithms/approximation/tests/test_ramsey.py,sha256=h36Ol39csHbIoTDBxbxMgn4371iVUGZ3a2N6l7d56lI,1143
|
||||
networkx/algorithms/approximation/tests/test_steinertree.py,sha256=rxkj8OWDWFqSE5MI3XC4NSOgyNUzYVfxKSskutOPtbQ,9671
|
||||
networkx/algorithms/approximation/tests/test_traveling_salesman.py,sha256=lLnnWvs88JBkhkf4Cg8qBipSvRnjn9W9WvOKZ-Gew6Q,30842
|
||||
networkx/algorithms/approximation/tests/test_treewidth.py,sha256=MWFFcmjO0QxM8FS8iXSCtfGnk6eqG2kFyv1u2qnSeUo,9096
|
||||
networkx/algorithms/approximation/tests/test_vertex_cover.py,sha256=FobHNhG9CAMeB_AOEprUs-7XQdPoc1YvfmXhozDZ8pM,1942
|
||||
networkx/algorithms/assortativity/__init__.py,sha256=ov3HRRbeYB_6Qezvxp1OTl77GBpw-EWkWGUzgfT8G9c,294
|
||||
networkx/algorithms/assortativity/connectivity.py,sha256=-V0C5MTqtErl86N-gyrZ487MUyiG5x1QFEZKurOpIJA,4220
|
||||
networkx/algorithms/assortativity/correlation.py,sha256=0rc4FDi-e8eQRia7gpFrTqjIy-J7V2GtSwOb4QN6WZk,8689
|
||||
networkx/algorithms/assortativity/mixing.py,sha256=RRqqkuVwo71LosJLDbeVCVBikqC7I_XZORdsonQsf9Y,7586
|
||||
networkx/algorithms/assortativity/neighbor_degree.py,sha256=UMaQWKBkOZ0ZgC8xGt5fXEz8OL1rgwYjt2zKbKEqofI,5282
|
||||
networkx/algorithms/assortativity/pairs.py,sha256=w7xnaWxDDteluHoCsqunLlcM6nlcBenO_5Nz87oOEnE,3841
|
||||
networkx/algorithms/assortativity/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/assortativity/tests/base_test.py,sha256=MNeQMLA3oBUCM8TSyNbBQ_uW0nDc1GEZYdNdUwePAm4,2651
|
||||
networkx/algorithms/assortativity/tests/test_connectivity.py,sha256=Js841GQLYTLWvc6xZhnyqj-JtyrnS0ska1TFYntxyXA,4978
|
||||
networkx/algorithms/assortativity/tests/test_correlation.py,sha256=1_D9GjLDnlT8Uy28lUn2fS1AHp2XBwiMpIl2OhRNDXk,5069
|
||||
networkx/algorithms/assortativity/tests/test_mixing.py,sha256=u-LIccNn-TeIAM766UtzUJQlY7NAbxF4EsUoKINzmlo,6820
|
||||
networkx/algorithms/assortativity/tests/test_neighbor_degree.py,sha256=ODP2M8jCaFr_l3ODwpwaz20-KqU2IFaEfJRBK53mpE8,3968
|
||||
networkx/algorithms/assortativity/tests/test_pairs.py,sha256=t05qP_-gfkbiR6aTLtE1owYl9otBSsuJcRkuZsa63UQ,3008
|
||||
networkx/algorithms/bipartite/__init__.py,sha256=811Xu3D1Qx8ncqRshHoN3gWZ_A04Hb2qxzoGuc5vBa4,3825
|
||||
networkx/algorithms/bipartite/basic.py,sha256=JPC2gGuPvFA6q2CuI5mqLX_9QUGxrsQ8cIwcS0e9P4U,8375
|
||||
networkx/algorithms/bipartite/centrality.py,sha256=G280bAqeyXyCmes5NpRqUv2Tc-EHWrMshJ3_f4uqV9U,9156
|
||||
networkx/algorithms/bipartite/cluster.py,sha256=ZDAo7NM69woVY8fNwRjbAz6Wwb99CE650lMmv1v0Omc,6935
|
||||
networkx/algorithms/bipartite/covering.py,sha256=B3ITc016Kk70NBv-1lb30emXnfjlMIQJ7M-FIPCZip0,2163
|
||||
networkx/algorithms/bipartite/edgelist.py,sha256=l6JqWqedRGde0sOz7oLK-xe9azq_VEYec0-GPlFUIbg,11364
|
||||
networkx/algorithms/bipartite/extendability.py,sha256=OrYHlS4ruQST-dlQOuleiqHFKpVVNOvrG5aDNFgfckg,3989
|
||||
networkx/algorithms/bipartite/generators.py,sha256=PfnR6S9gKw5OK_JuGMChltWxyd_i8_KYFq1WpRlsL-A,20439
|
||||
networkx/algorithms/bipartite/matching.py,sha256=xsT048Ok_uM0Zhpdc34qswV1zaCGOlJQnsbGTDsm5oo,21637
|
||||
networkx/algorithms/bipartite/matrix.py,sha256=RuoILyPHjORW0Y_Bcf-vSH_K6-bSUjiTN9JTjnik5HE,6156
|
||||
networkx/algorithms/bipartite/projection.py,sha256=YIUlreqQQ6IPE37OXF32zNIdzEGeyR8aY-7iUENZYVA,17252
|
||||
networkx/algorithms/bipartite/redundancy.py,sha256=Mnkz0LbNXS0haxtLQ5naorR6C2tNLUbkNS_3PANFxbg,3402
|
||||
networkx/algorithms/bipartite/spectral.py,sha256=fu2grV1the_e_G-e_lUdhk8Y9XFe6_p2tPmx3RKntFw,1902
|
||||
networkx/algorithms/bipartite/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/bipartite/tests/test_basic.py,sha256=gzbtsQqPi85BznX5REdGBBJVyr9aH4nO06c3eEI4634,4291
|
||||
networkx/algorithms/bipartite/tests/test_centrality.py,sha256=PABPbrIyoAziEEQKXsZLl2jT36N8DZpNRzEO-jeu89Y,6362
|
||||
networkx/algorithms/bipartite/tests/test_cluster.py,sha256=O0VsPVt8vcY_E1FjjLJX2xaUbhVViI5MP6_gLTbEpos,2801
|
||||
networkx/algorithms/bipartite/tests/test_covering.py,sha256=EGVxYQsyLXE5yY5N5u6D4wZq2NcZe9OwlYpEuY6DF3o,1221
|
||||
networkx/algorithms/bipartite/tests/test_edgelist.py,sha256=fK35tSekG_-9Ewr5Bhl1bRdwAy247Z9zZ4dQFFDQ9xw,8471
|
||||
networkx/algorithms/bipartite/tests/test_extendability.py,sha256=XgPmg6bWiHAF1iQ75_r2NqUxExOQNZRUeYUPzlCa5-E,7043
|
||||
networkx/algorithms/bipartite/tests/test_generators.py,sha256=DB9NEapShvX9L5Dpj1OF8bs8LOu5n3zvew60WZhYChQ,13241
|
||||
networkx/algorithms/bipartite/tests/test_matching.py,sha256=3-2DMl3tF-g4_xNHvEuY4fZW7S5cqMTO_GUpcz1gkeQ,11973
|
||||
networkx/algorithms/bipartite/tests/test_matrix.py,sha256=1MymSi1dCUqAhTt82O2nBzjriNQtFRk6TxWGJ2FBW4k,3094
|
||||
networkx/algorithms/bipartite/tests/test_project.py,sha256=FBjkys3JYYzEG4aq_CsQrtm41edZibWI_uDAQ0b4wqM,15134
|
||||
networkx/algorithms/bipartite/tests/test_redundancy.py,sha256=utxcrQaTrkcEN3kqtObgKNpLZai8B5sMAqLyXatOuUo,917
|
||||
networkx/algorithms/bipartite/tests/test_spectral_bipartivity.py,sha256=1jGDgrIx3-TWOCNMSC4zxmZa7LHyMU69DXh3h12Bjag,2358
|
||||
networkx/algorithms/centrality/__init__.py,sha256=Er3YoYoj76UfY4P6I0L-0fCQkO7mMU0b3NLsTT2RGWI,558
|
||||
networkx/algorithms/centrality/betweenness.py,sha256=9kXlMR9T1IHDJ55x2fSMfjuLBy402AREJfQKUW1LfFo,14383
|
||||
networkx/algorithms/centrality/betweenness_subset.py,sha256=mkVJdEmR1G8kFoS-KN-jwhUyR_CUiB8DXneGqsqyB6U,9336
|
||||
networkx/algorithms/centrality/closeness.py,sha256=ehkntG-gApT9uhWJjGaEZQ-tEQ-hdxDT7luf-uVPNAE,10281
|
||||
networkx/algorithms/centrality/current_flow_betweenness.py,sha256=zZRqgrB06uDzgwWJ_FLUF3DSrgkER1tvakYZHX8DbSY,11848
|
||||
networkx/algorithms/centrality/current_flow_betweenness_subset.py,sha256=2qtLgf_3ft5qdDvHFrfYUt6zeQi42Nw7XBpSZRboJIA,8107
|
||||
networkx/algorithms/centrality/current_flow_closeness.py,sha256=IvecI8BZE4SgKayEXhKowIJw7S2fD_dN__N-f9TW-ME,3327
|
||||
networkx/algorithms/centrality/degree_alg.py,sha256=EFTA1b_GWUbmBy5R9beRQp7yh1X_NwZtk5L6is-mFGk,3894
|
||||
networkx/algorithms/centrality/dispersion.py,sha256=M12L2KiVPrC2-SyCXMF0kvxLelgcmvXJkLT_cBHoCTw,3631
|
||||
networkx/algorithms/centrality/eigenvector.py,sha256=LAxVqaT3LmuQw20__t1KrgLKPF1Cz-PkTaiSrgPC1FU,13623
|
||||
networkx/algorithms/centrality/flow_matrix.py,sha256=Y65m6VbWyYjNK0CInE_lufyEkKy9-TyPmBeXb-Gkz70,3834
|
||||
networkx/algorithms/centrality/group.py,sha256=-YaVfnJ6HKT6b1P-IhyUKtJvXk0ZSnC2Jz4XP6hjkyE,27960
|
||||
networkx/algorithms/centrality/harmonic.py,sha256=ZPp8FYFgSUZS0QBxUbzhi39qiv_EN7COirxZEYiTCIM,2847
|
||||
networkx/algorithms/centrality/katz.py,sha256=uVGHAyjqndSd4y4idHjkv0mUhmKmHU5vaEfNWfiKlzc,11042
|
||||
networkx/algorithms/centrality/laplacian.py,sha256=8-qloyxvFc33xlfpj7Xol8qeOvPAg_Z0BHVZGSxjnmc,5640
|
||||
networkx/algorithms/centrality/load.py,sha256=M2EdPX4gJEYGjMBIJMFKRWGI9uYHbFOWYxsILeaJuOE,6859
|
||||
networkx/algorithms/centrality/percolation.py,sha256=YJB8iYgbpjJ3EYK8pl26iSnjgfFsK31ufytRHnUTYYE,4419
|
||||
networkx/algorithms/centrality/reaching.py,sha256=OFWHlDUtCaQXHWxAfEgPpinej-0anLJQZsCvh3D8gME,7243
|
||||
networkx/algorithms/centrality/second_order.py,sha256=4CTboP95B6gUtAtSKLfeeE4s9oq0_3hXsXczxL6c_g8,5012
|
||||
networkx/algorithms/centrality/subgraph_alg.py,sha256=HtwSPYMRUxhaAuvMA90Qu2i1smXSpVpLRtHlBohnpSc,9513
|
||||
networkx/algorithms/centrality/trophic.py,sha256=q--TsLcfGNCSet_A6oLVf7CGWQBvDxDOlkjozduZfxY,4679
|
||||
networkx/algorithms/centrality/voterank_alg.py,sha256=z_1eq8rSDadEO5W5BbAg1zuOJj2di4FUCkmOwiuK12I,3231
|
||||
networkx/algorithms/centrality/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/centrality/tests/test_betweenness_centrality.py,sha256=pKoPAP1hnQSgrOxYeW5-LdUiFDANiwTn_NdOdgccbo8,26795
|
||||
networkx/algorithms/centrality/tests/test_betweenness_centrality_subset.py,sha256=HrHMcgOL69Z6y679SbqZIjkQOnqrYSz24gt17AJ9q-o,12554
|
||||
networkx/algorithms/centrality/tests/test_closeness_centrality.py,sha256=Ziz_LMgRJHT1pz_sgT4oCZPmOeWJL7OmfUSI8UCC1dI,10210
|
||||
networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py,sha256=VOxx1A7iSGtdEbzJYea_sW_Hv0S71-oo1CVX7Rqd5RY,7870
|
||||
networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality_subset.py,sha256=JfRGgPuiF-vJu5fc2_pcJYREEboxcK_dmy-np39c4Aw,5839
|
||||
networkx/algorithms/centrality/tests/test_current_flow_closeness.py,sha256=vflQeoNKngrGUiRb3XNlm2X9wR4vKgMSW_sCyMUCQi8,1379
|
||||
networkx/algorithms/centrality/tests/test_degree_centrality.py,sha256=Jn_p5lThA3__ZBTDAORwo_EchjXKKkK1NwU_73HHI6M,4101
|
||||
networkx/algorithms/centrality/tests/test_dispersion.py,sha256=ROgl_5bGhcNXonNW3ylsvUcA0NCwynsQu_scic371Gw,1959
|
||||
networkx/algorithms/centrality/tests/test_eigenvector_centrality.py,sha256=A6REmarGOuDmq3GcSYemyadlFLv24sErIGLtDcL9GO4,5255
|
||||
networkx/algorithms/centrality/tests/test_group.py,sha256=833ME4tGlOGQZz8YANw4MSyeVPpjbyCdYh5X88GOprw,8685
|
||||
networkx/algorithms/centrality/tests/test_harmonic_centrality.py,sha256=wI7nStX_kIFJoZQY_i8DXXlZBOJzVnQfOP8yidX0PAU,3867
|
||||
networkx/algorithms/centrality/tests/test_katz_centrality.py,sha256=JL0bZZsJe2MQFL6urXgY82wCAwucUvhjaShYZPxpL6U,11240
|
||||
networkx/algorithms/centrality/tests/test_laplacian_centrality.py,sha256=vY-NULtr_U_GxUMwfAZB-iccxIRTiqqUN4Q8HRNpzSo,5916
|
||||
networkx/algorithms/centrality/tests/test_load_centrality.py,sha256=Vv3zSW89iELN-8KNbUclmkhOe1LzKdF7U_w34nYovIo,11343
|
||||
networkx/algorithms/centrality/tests/test_percolation_centrality.py,sha256=ycQ1fvEZZcWAfqL11urT7yHiEP77usJDSG25OQiDM2s,2591
|
||||
networkx/algorithms/centrality/tests/test_reaching.py,sha256=_JVeO1Ri-KybdnGCJ_yNPtJQmT_g77z0DAkU0JYFVGQ,5090
|
||||
networkx/algorithms/centrality/tests/test_second_order_centrality.py,sha256=ce0wQ4T33lu23wskzGUnBS7X4BSODlvAX1S5KxlLzOA,1999
|
||||
networkx/algorithms/centrality/tests/test_subgraph.py,sha256=vhE9Uh-_Hlk49k-ny6ORHCgqk7LWH8OHIYOEYM96uz0,3729
|
||||
networkx/algorithms/centrality/tests/test_trophic.py,sha256=_lmwb0_78iX_cxgUKHjCRCSxohVMkRrkKqSaB5QV3ys,8705
|
||||
networkx/algorithms/centrality/tests/test_voterank.py,sha256=tN5u7pKAnJ_4AiwhPW6EuJZz7FLIG2jYqLKcXFi2urk,1687
|
||||
networkx/algorithms/coloring/__init__.py,sha256=P1cmqrAjcaCdObkNZ1e6Hp__ZpxBAhQx0iIipOVW8jg,182
|
||||
networkx/algorithms/coloring/equitable_coloring.py,sha256=uDcza6PD9qbvwVPUX1MBZbopQdrAEKNk6DpCFkc02tU,16315
|
||||
networkx/algorithms/coloring/greedy_coloring.py,sha256=6Jzcc4iW5KuRVFEEr15v8rBvik3g4maa7_wcjWyyRDI,20046
|
||||
networkx/algorithms/coloring/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/coloring/tests/test_coloring.py,sha256=7v_d1xanjYMZCa3dq2hE2hCcyexwWBTEFV5SoLgQDv4,23697
|
||||
networkx/algorithms/community/__init__.py,sha256=0YrcAVLTxJt3u-htlaSPZ-XSRn0Jg-EQKCMRPmWuw-g,1179
|
||||
networkx/algorithms/community/asyn_fluid.py,sha256=0ktsoOa4JKBKiuE3wmGDcBSUgPlFdGvzNheqINtWKbk,5935
|
||||
networkx/algorithms/community/centrality.py,sha256=Yyv5kyf1hf_L7iQ_ZbG8_FAkP638Sc_3N4tCSoB6J1w,6635
|
||||
networkx/algorithms/community/community_utils.py,sha256=sUi-AcPYyGrYhnjI9ztt-vrSHLl28lKXxTJPfi5N0c8,908
|
||||
networkx/algorithms/community/divisive.py,sha256=yFcKfKkiI6FqEVlBVxLa1fbqI1Yeiqe_A5fpPnYvlAE,6655
|
||||
networkx/algorithms/community/kclique.py,sha256=DTr9iUT_XWv0S3Y79KQl6OXefjztNMc9SAHWhdFOxcU,2460
|
||||
networkx/algorithms/community/kernighan_lin.py,sha256=vPU8Mbpk7_NscMC-gorNoXhsQjkOhgK2YiKOo-u6DvY,4349
|
||||
networkx/algorithms/community/label_propagation.py,sha256=LhzAXSHFCPQ2kG_rPgXb06YKdppO7buApksCC4GI4w8,11878
|
||||
networkx/algorithms/community/louvain.py,sha256=zh5h16hRWzgTv9IUqWiiJKFntZhQbB_EHNYIGViwPas,15365
|
||||
networkx/algorithms/community/lukes.py,sha256=gzqnup95RR2UzUiPpIt8qkepzZ9dCWqHGQSVPIJDMx8,8115
|
||||
networkx/algorithms/community/modularity_max.py,sha256=gzyZrGHNMtTZyqpLFcJHxgzzIsar1m5DktScODoUngk,18082
|
||||
networkx/algorithms/community/quality.py,sha256=dVIkV-CFKdAou0WjgIDmfhnpIIqReRaeL4odg39XAYk,11939
|
||||
networkx/algorithms/community/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/community/tests/test_asyn_fluid.py,sha256=UzAMxJzhN74qUinehR7B1rhU_vsigJ7-cRvcE6jdKyc,3332
|
||||
networkx/algorithms/community/tests/test_centrality.py,sha256=s8q4k5aThR0OgO9CDQk_PXMxfllmf5uC1GlvyUc_8EY,2932
|
||||
networkx/algorithms/community/tests/test_divisive.py,sha256=-Ee40OR-mPDReTngTEhbpx4_uLtNI7cqFkt8cZT9t5Y,3441
|
||||
networkx/algorithms/community/tests/test_kclique.py,sha256=iA0SBqwbDfaD2u7KM6ccs6LfgAQY_xxrnW05UIT_tFA,2413
|
||||
networkx/algorithms/community/tests/test_kernighan_lin.py,sha256=rcFDI9mTq1Nwsi251PwDgi1UoxTMPXAeSy2Cp6GtUQg,2710
|
||||
networkx/algorithms/community/tests/test_label_propagation.py,sha256=IHidFEv7MI781zsdk7XT848rLvLwDk2wBK1FjL-CRv4,7985
|
||||
networkx/algorithms/community/tests/test_louvain.py,sha256=TwW1nlSKWGJeIKr9QOJ8xGehSY6R0Nz01xsnFqzt0Oo,8071
|
||||
networkx/algorithms/community/tests/test_lukes.py,sha256=f_JU-EzY6PwXEkPN8kk5_3NVg6phlX0nrj1f57M49lk,3961
|
||||
networkx/algorithms/community/tests/test_modularity_max.py,sha256=XYyPuDkxL4CYFwnpTdU_qD4GydpqgiRAIJO3CHQN_m4,10617
|
||||
networkx/algorithms/community/tests/test_quality.py,sha256=sZEy10hh3zlelUmww5r2pk5LxpZAht06PC5zCHxV1bs,5275
|
||||
networkx/algorithms/community/tests/test_utils.py,sha256=gomD6rFgAaywxT1Yjdi4ozY-1rC0ina4jgfvWeCvwGE,704
|
||||
networkx/algorithms/components/__init__.py,sha256=Dt74KZWp_cJ_j0lL5hd_S50_hia5DKcC2SjuRnubr6M,173
|
||||
networkx/algorithms/components/attracting.py,sha256=6az3lgqWhHTXaWUUuOPZfW9t7okliAhooFRotQY5JoM,2712
|
||||
networkx/algorithms/components/biconnected.py,sha256=_9GJdPZgqusGKZLzqT9tUSj1XZr2DgohiT6hcHVyil4,12782
|
||||
networkx/algorithms/components/connected.py,sha256=r-jNJJkxoDtFcYiuoteyZb3a2oEHh0j0WBddwsXj_a4,4459
|
||||
networkx/algorithms/components/semiconnected.py,sha256=BaBMFlQ208vuHOo5y1xeV0PDEI3yDUfH6zFb_jkcVhQ,2030
|
||||
networkx/algorithms/components/strongly_connected.py,sha256=i41vDeazdNGqG4weufAKd6axaN2nBKmMzURZBs7WsLI,9542
|
||||
networkx/algorithms/components/weakly_connected.py,sha256=jFHHr0qTZH57IyFIQ8iD5gekgczQXTPRHrtYoXVYYPM,4455
|
||||
networkx/algorithms/components/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/components/tests/test_attracting.py,sha256=b3N3ZR9E5gLSQWGgaqhcRfRs4KBW6GnnkVYeAjdxC_o,2243
|
||||
networkx/algorithms/components/tests/test_biconnected.py,sha256=N-J-dgBgI77ytYUUrXjduLxtDydH7jS-af98fyPBkYc,6036
|
||||
networkx/algorithms/components/tests/test_connected.py,sha256=KMYm55BpbFdGXk_B2WozS9rIagQROd7_k0LT3HFQmr4,4815
|
||||
networkx/algorithms/components/tests/test_semiconnected.py,sha256=q860lIxZF5M2JmDwwdzy-SGSXnrillOefMx23GcJpw0,1792
|
||||
networkx/algorithms/components/tests/test_strongly_connected.py,sha256=Zm7MgUIZbuPPJu66xZH1zfMZQ_3X1YBl2fLCOjph7NQ,6021
|
||||
networkx/algorithms/components/tests/test_weakly_connected.py,sha256=_eUx7226dxme_K2WNmvSIwZXQlKNoCuglWOOC3kFUW4,3083
|
||||
networkx/algorithms/connectivity/__init__.py,sha256=EvYKw8LJn7wyZECHAsuEkIaSl-cV-LhymR6tqcn90p8,281
|
||||
networkx/algorithms/connectivity/connectivity.py,sha256=KuvVbJ0dAmG2h51uFo9IdBIK1G1PYaTZ-XFT78ksZEo,29367
|
||||
networkx/algorithms/connectivity/cuts.py,sha256=d9O6G3fuhjg0GEuDSm6QyYhm3OTBKFZeHC7Tz6IZ0Mg,23015
|
||||
networkx/algorithms/connectivity/disjoint_paths.py,sha256=R0HDHrrhdI1E_do3U6t6oseXsrGJlG7PC89kXCPC1v8,14649
|
||||
networkx/algorithms/connectivity/edge_augmentation.py,sha256=SE7CkLjtxG-q6DZPZH33g6MJcYA1KsJgHm-Pm575gkA,44061
|
||||
networkx/algorithms/connectivity/edge_kcomponents.py,sha256=hqABcfCqZ-rb45I0qYE-X4NtstsKJbxl37FZzzmoXA4,20894
|
||||
networkx/algorithms/connectivity/kcomponents.py,sha256=TtiEvpaKflkdxJ3r37Qsj1qrSzB2rtHzDcxCDO_Aq2Q,8171
|
||||
networkx/algorithms/connectivity/kcutsets.py,sha256=zYohzgkR2FODi_Ew2M9uMLb_a9ZP5fNqcXJwMYy6P7o,9371
|
||||
networkx/algorithms/connectivity/stoerwagner.py,sha256=WodsJEqKgsmTTcyUBk2u3wV_CXeon-cAzveWgIGgFmA,5431
|
||||
networkx/algorithms/connectivity/utils.py,sha256=gL8LmZnK4GKAZQcIPEhVNYmVi18Mqsqwg4O4j_et56s,3217
|
||||
networkx/algorithms/connectivity/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/connectivity/tests/test_connectivity.py,sha256=eSmsi8uQk6MI591JgtSu2elIusb08bmSZS0h9gxb76I,15027
|
||||
networkx/algorithms/connectivity/tests/test_cuts.py,sha256=4F8seWb-sPDDjjVMkh14gst5UQa5f-zDkCsZIdJjVzo,10353
|
||||
networkx/algorithms/connectivity/tests/test_disjoint_paths.py,sha256=NLHReLoXSKoA6KPBNRbjF84ktg5PEaaktIj2AII3SDY,8392
|
||||
networkx/algorithms/connectivity/tests/test_edge_augmentation.py,sha256=d3ymFHyY2G4cpy1Y6wu4ze339qfF2LRp2HmGAIVjnMM,15731
|
||||
networkx/algorithms/connectivity/tests/test_edge_kcomponents.py,sha256=CZ26Dy91WOUqhw1X73mqLGX-WHWzBBIeBCgrp6KK4Zo,16453
|
||||
networkx/algorithms/connectivity/tests/test_kcomponents.py,sha256=ohoSX8GACeszRZdzTiNuWXSFitfU9DzP0hqllS2gvMU,8554
|
||||
networkx/algorithms/connectivity/tests/test_kcutsets.py,sha256=sVKjwQt3FUqtnlY2xuHn6VGY9rvUkYoVp7v5fK-6aJw,8610
|
||||
networkx/algorithms/connectivity/tests/test_stoer_wagner.py,sha256=A291C30_t2CI1erPCqN1W0DoAj3zqNA8fThPIj4Rku0,3011
|
||||
networkx/algorithms/flow/__init__.py,sha256=rVtMUy6dViPLewjDRntmn15QF0bQwiDdQbZZx9j7Drc,341
|
||||
networkx/algorithms/flow/boykovkolmogorov.py,sha256=qFcppmiXz4VKKFd4RbDsiWOqJODtDTHbNr9_UFTjQaU,13334
|
||||
networkx/algorithms/flow/capacityscaling.py,sha256=8rng2qO5kawNSxq2S8BNlUMmdvNSoC6R8ekiBGU8LxU,14469
|
||||
networkx/algorithms/flow/dinitz_alg.py,sha256=I5nnZVsj0aU8-9Cje0umey407epFzpd7BDJpkI6ESK4,8341
|
||||
networkx/algorithms/flow/edmondskarp.py,sha256=PEIwLftevS2VYHaTzzZMSOLPy7QSBPsWPedjx1lR6Cs,8056
|
||||
networkx/algorithms/flow/gomory_hu.py,sha256=EuibaxPl65shGM9Jxvaa9WMwMmoczDvXXc2b0E81cqM,6345
|
||||
networkx/algorithms/flow/maxflow.py,sha256=3_v0FUEHulFrOeSDM1FMcmOF3yTYvxUbLGv3MNTNp1Q,22795
|
||||
networkx/algorithms/flow/mincost.py,sha256=GzMYInS4QcNe0yImGrVXJ0bRd7t5TSSMa9jSeenIoOk,12853
|
||||
networkx/algorithms/flow/networksimplex.py,sha256=32uetoZWj-_7KPO2OJputP0FpTrsQ_qJxntC8XxIVr0,25185
|
||||
networkx/algorithms/flow/preflowpush.py,sha256=CUKZ0-7X9l7P7qH_2n2Immbf8mFm8vocH2SY0tIwjGo,15721
|
||||
networkx/algorithms/flow/shortestaugmentingpath.py,sha256=gXXdkY3nH4d0hXVn0P2-kzfC3DHcuCdrudFdxetflKI,10372
|
||||
networkx/algorithms/flow/utils.py,sha256=bCeiFAiyFe4-ptkCopo_PnQKF9xY5M8Br87hJT3fRWQ,6084
|
||||
networkx/algorithms/flow/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/flow/tests/gl1.gpickle.bz2,sha256=z4-BzrXqruFiGqYLiS2D5ZamFz9vZRc1m2ef89qhsPg,44623
|
||||
networkx/algorithms/flow/tests/gw1.gpickle.bz2,sha256=b3nw6Q-kxR7HkWXxWWPh7YlHdXbga8qmeuYiwmBBGTE,42248
|
||||
networkx/algorithms/flow/tests/netgen-2.gpickle.bz2,sha256=OxfmbN7ajtuNHexyYmx38fZd1GdeP3bcL8T9hKoDjjA,18972
|
||||
networkx/algorithms/flow/tests/test_gomory_hu.py,sha256=aWtbI3AHofIK6LDJnmj9UH1QOfulXsi5NyB7bNyV2Vw,4471
|
||||
networkx/algorithms/flow/tests/test_maxflow.py,sha256=4CtGOqeyloAxFSajaxPfGuyVhE0R3IdJf2SuIg4kHKQ,18940
|
||||
networkx/algorithms/flow/tests/test_maxflow_large_graph.py,sha256=1a7pS0i5sj_kowLelETcHdrf7RmPEhAJnmCT03JZ0K8,4622
|
||||
networkx/algorithms/flow/tests/test_mincost.py,sha256=n4fFLDwDLy7Tau-_ey1CoxZwKhFjk28GLGJjCyxhClk,17816
|
||||
networkx/algorithms/flow/tests/test_networksimplex.py,sha256=bsVxlvHAD0K7aDevCcVaa9uRNNsWAevw6yUKlj2T8No,12103
|
||||
networkx/algorithms/flow/tests/wlm3.gpickle.bz2,sha256=zKy6Hg-_swvsNh8OSOyIyZnTR0_Npd35O9RErOF8-g4,88132
|
||||
networkx/algorithms/isomorphism/__init__.py,sha256=gPRQ-_X6xN2lJZPQNw86IVj4NemGmbQYTejf5yJ32N4,406
|
||||
networkx/algorithms/isomorphism/ismags.py,sha256=TpZP5xDxLITCGOk8DT4EBVaWDbbjzEUT5ZOCDNGAho0,43239
|
||||
networkx/algorithms/isomorphism/isomorph.py,sha256=Yg2Aukv0tVZIQ66jxzDS4DPBjX6DMKwT0_WNH12fsgk,7114
|
||||
networkx/algorithms/isomorphism/isomorphvf2.py,sha256=_IdR1YRm8N9z-HaX2XtzPRq-2j3_jqlcJ8WSrvAyE5g,46785
|
||||
networkx/algorithms/isomorphism/matchhelpers.py,sha256=PaZ7PjmNNsJO9KoeRrf9JgcDHIcFr1tZckQc_ol4e9I,10884
|
||||
networkx/algorithms/isomorphism/temporalisomorphvf2.py,sha256=-1NW81l8kM9orQ2ni9tcNizQzEhOUE9BaBJXjUWqhiI,10948
|
||||
networkx/algorithms/isomorphism/tree_isomorphism.py,sha256=fj1cUspSojUVwmAdWKGzXEHqOawUNJgzfO9QjCEnPLs,9454
|
||||
networkx/algorithms/isomorphism/vf2pp.py,sha256=WNXf7g0u3c8R3IsX2YuP3gWU5sjb0uqjuDSvmtob_QE,36421
|
||||
networkx/algorithms/isomorphism/vf2userfunc.py,sha256=HiPwyr7nJF1QS9w69MzKf6wGvO8cgjvdS5vW59iwCew,7371
|
||||
networkx/algorithms/isomorphism/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/isomorphism/tests/iso_r01_s80.A99,sha256=hKzMtYLUR8Oqp9pmJR6RwG7qo31aNPZcnXy4KHDGhqU,1442
|
||||
networkx/algorithms/isomorphism/tests/iso_r01_s80.B99,sha256=AHx_W2xG4JEcz1xKoN5TwCHVE6-UO2PiMByynkd4TPE,1442
|
||||
networkx/algorithms/isomorphism/tests/si2_b06_m200.A99,sha256=NVnPFA52amNl3qM55G1V9eL9ZlP9NwugBlPf-zekTFU,310
|
||||
networkx/algorithms/isomorphism/tests/si2_b06_m200.B99,sha256=-clIDp05LFNRHA2BghhGTeyuXDqBBqA9XpEzpB7Ku7M,1602
|
||||
networkx/algorithms/isomorphism/tests/test_ismags.py,sha256=8D1jWosarNJ0ZzCYgfwy0mB62YVZAMvG-UF9Q0peRa0,10581
|
||||
networkx/algorithms/isomorphism/tests/test_isomorphism.py,sha256=kF-o4dTjB7Ad0NOHnUGoiOCCNr3MWSmJm_YBc-Wvhgk,2022
|
||||
networkx/algorithms/isomorphism/tests/test_isomorphvf2.py,sha256=qisgeaCLO8ytf09DP7zANsnWdAHPu1lvJl4Gmg2zD6M,11747
|
||||
networkx/algorithms/isomorphism/tests/test_match_helpers.py,sha256=uuTcvjgf2LPqSQzzECPIh0dezw8-a1IN0u42u8TxwAw,2483
|
||||
networkx/algorithms/isomorphism/tests/test_temporalisomorphvf2.py,sha256=k8032J4ItZ4aFHeOraOpiF8y4aPm2O1g44UvUfrQJgg,7343
|
||||
networkx/algorithms/isomorphism/tests/test_tree_isomorphism.py,sha256=0-7waJjupg8AWfQDqrcsJVOgTXk7HePr5kt87MgnPtM,7412
|
||||
networkx/algorithms/isomorphism/tests/test_vf2pp.py,sha256=65RkN1mPWLoxirE7SlIvfaKMJk80b_ZwWG6HTJtlkPg,49924
|
||||
networkx/algorithms/isomorphism/tests/test_vf2pp_helpers.py,sha256=HnXcdy2LTBFX423nIdJ8CbwmfkHFmzf1XNa8-xld5jk,90125
|
||||
networkx/algorithms/isomorphism/tests/test_vf2userfunc.py,sha256=KMRPb-m3fmvRF0vt9laKIzOfwnrkxN2SueLv7JWUuXs,6625
|
||||
networkx/algorithms/link_analysis/__init__.py,sha256=UkcgTDdzsIu-jsJ4jBwP8sF2CsRPC1YcZZT-q5Wlj3I,118
|
||||
networkx/algorithms/link_analysis/hits_alg.py,sha256=OJ2DPKn_qGDBiW7Tln8_vLtJGvBkzWbOxylbHn95ne4,10421
|
||||
networkx/algorithms/link_analysis/pagerank_alg.py,sha256=BlJr6dsDfUNdU0mH8BmqWLt8Hzra-wBwWmQFHArTJc8,17191
|
||||
networkx/algorithms/link_analysis/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/link_analysis/tests/test_hits.py,sha256=QjSZZmrj3rBLNVpKOIHUvJNYM7OJ1b-yjiaglyVzNyw,2547
|
||||
networkx/algorithms/link_analysis/tests/test_pagerank.py,sha256=szFqJoRJrDojANbuAaw7kfX-cLjEne6tOyek3-Cax_4,7283
|
||||
networkx/algorithms/minors/__init__.py,sha256=ceeKdsZ6U1H40ED-KmtVGkbADxeWMTVG07Ja8P7N_Pg,587
|
||||
networkx/algorithms/minors/contraction.py,sha256=EviSuRlx5EsGiWNbGrSSfAYfPV19jzIN8H_l596YHbI,22870
|
||||
networkx/algorithms/minors/tests/test_contraction.py,sha256=YjBXi-byijqbh_OxLpLK7_au5A5YCoVTlta7hnnK4Gg,14213
|
||||
networkx/algorithms/operators/__init__.py,sha256=dJ3xOXvHxSzzM3-YcfvjGTJ_ndxULF1TybkIRzUS87Y,201
|
||||
networkx/algorithms/operators/all.py,sha256=pNIKjEiSBBiUa6zcYZHQIiiHq3C9hnazSyaIpasvBxw,9652
|
||||
networkx/algorithms/operators/binary.py,sha256=mRgkFsPoAw2PuqMIwRmS59vYC2KFJ47dB_lct5HRAh4,12948
|
||||
networkx/algorithms/operators/product.py,sha256=FQkSIduOv-z1ktVzid2T40759S-BmAfTlya88VytuZc,19632
|
||||
networkx/algorithms/operators/unary.py,sha256=Eo2yeTg-F5uODGWSWR_im5VaKZQ97LyATIuKZcAFQR8,1795
|
||||
networkx/algorithms/operators/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/operators/tests/test_all.py,sha256=Pqjv9QiA0875Yl9D5o6c5Ml0t4KHpH2a5jbpAoZQXFc,8250
|
||||
networkx/algorithms/operators/tests/test_binary.py,sha256=QzQTfnkHf1ulVvvNsclfkQgzRGc9hGQdZDyf4F9O5n8,12171
|
||||
networkx/algorithms/operators/tests/test_product.py,sha256=i4pBb5A4NmaCsllR1XizyhUToaQFMuLZ-JrywkQFdbU,15155
|
||||
networkx/algorithms/operators/tests/test_unary.py,sha256=UZdzbt5GI9hnflEizUWXihGqBWmSFJDkzjwVv6wziQE,1415
|
||||
networkx/algorithms/shortest_paths/__init__.py,sha256=Rmxtsje-mPdQyeYhE8TP2NId-iZEOu4eAsWhVRm2Xqk,285
|
||||
networkx/algorithms/shortest_paths/astar.py,sha256=EhUUKwQ6kGBPVXVA7inJN3tb5nr45M99kEDygVcLPf8,8967
|
||||
networkx/algorithms/shortest_paths/dense.py,sha256=rdMTlAwrboZMaA8Hj0RmbEpqNNU9zmBxk5Ljswsg37U,8211
|
||||
networkx/algorithms/shortest_paths/generic.py,sha256=6N22Kf1t-7HFPn2-QoLqbm1kJSKk5dWCimNi8UuYzM4,25738
|
||||
networkx/algorithms/shortest_paths/unweighted.py,sha256=3Up0AF835pSSgSQjzmTK8fw42o0CGc-tsrjenTRfjQc,15642
|
||||
networkx/algorithms/shortest_paths/weighted.py,sha256=AGX34ATlzEi1_cyayRzxeUnEPjauQmbc-nKBng6wAL0,82465
|
||||
networkx/algorithms/shortest_paths/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/shortest_paths/tests/test_astar.py,sha256=G9hrEo2U9c_kzaRTAXYbS1TpcJgF_uqj9249K2qbjAY,8941
|
||||
networkx/algorithms/shortest_paths/tests/test_dense.py,sha256=ievl4gu3Exl_31hp4OKcsAGPb3g3_xFUM4t3NnvrG_A,6747
|
||||
networkx/algorithms/shortest_paths/tests/test_dense_numpy.py,sha256=BNwXCe2wgNPE8o35-shPsFj8l19c_QG6Ye8tkIGphf8,2300
|
||||
networkx/algorithms/shortest_paths/tests/test_generic.py,sha256=oJBKCLIsMA1KTo8q-oG9JQmaxysc7_QSgbBqMImh23c,18456
|
||||
networkx/algorithms/shortest_paths/tests/test_unweighted.py,sha256=kMDgx5JP2QHyOST41zhyUiSc3qajKalAJP6W0Mt3oeg,5891
|
||||
networkx/algorithms/shortest_paths/tests/test_weighted.py,sha256=dmzFBYN3QEDZoun7RAtSe_spsGSbvkDiJSgUf9e-1K8,35038
|
||||
networkx/algorithms/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/tests/test_asteroidal.py,sha256=DnWI5_jnaaZMxtG44XD0K690HZs8ez7HU_9dSR-p6eA,502
|
||||
networkx/algorithms/tests/test_boundary.py,sha256=1OSJh32FYFhAVYB5zqxhZGEXZLS0HPp9kvfHZvWmD3o,6227
|
||||
networkx/algorithms/tests/test_bridges.py,sha256=jSCguECho0GNHnu0vpRh1twyfGP6tWFcaYL1rgvc8mU,4026
|
||||
networkx/algorithms/tests/test_broadcasting.py,sha256=2LIMrKmSGSSWRLy5TR_NzDQD1annA2JohpsbEbVJKfE,2021
|
||||
networkx/algorithms/tests/test_chains.py,sha256=Vhpf0maR3OUaa6aUxC6FNYLeUvBKPZyFimM4_WsLQKo,4364
|
||||
networkx/algorithms/tests/test_chordal.py,sha256=DPdNPY7KtqCsCwYVb4xQfnIm-z35dUJIWxNHtAiQLAQ,4438
|
||||
networkx/algorithms/tests/test_clique.py,sha256=FPIF2f8NLODsz-k_qrHt7DolClV_VdNWSh68oe8-ygI,9413
|
||||
networkx/algorithms/tests/test_cluster.py,sha256=CzYPJm4QY5SL-amMNh2ItPgQ-FjePPG9EBfIKOZHp6s,15883
|
||||
networkx/algorithms/tests/test_communicability.py,sha256=4KK9wU9gAUqHAAAyHwAKpq2dV9g415s_X0qd7Tt83gU,2938
|
||||
networkx/algorithms/tests/test_core.py,sha256=CF7YPX3F2pUtBu2sp4ZEAGRldaBkdgr1ufk6UkrETuA,9555
|
||||
networkx/algorithms/tests/test_covering.py,sha256=EeBjQ5mxcVctgavqXZ255T8ryFocuxjxdVpIxVUNFvw,2718
|
||||
networkx/algorithms/tests/test_cuts.py,sha256=gKm9VDtnmwFli6kgwV1ktEFI_rw84p2Sg02Em6SoW5Q,5376
|
||||
networkx/algorithms/tests/test_cycles.py,sha256=Sp7PSNB8iy_iST90uNDv8mXwiSOXWRYLkUFJz9pwHWU,34424
|
||||
networkx/algorithms/tests/test_d_separation.py,sha256=ZypzMVDpBZo_4qBlieFlj3RVU6vh7tejEZGlu7qcQbc,10929
|
||||
networkx/algorithms/tests/test_dag.py,sha256=aEUvVl7Ht3XC2XdBanoCDNM7vpPb8YELvMQDawLZbhQ,29385
|
||||
networkx/algorithms/tests/test_distance_measures.py,sha256=WHsOxV9mI-PqJa08C65Gd3myv5G7fzDehX_atJLql7Q,26154
|
||||
networkx/algorithms/tests/test_distance_regular.py,sha256=w27OTUtAI0VQv7cikkOdJg4bo4q7xTNIVE8nbU_x7b8,2915
|
||||
networkx/algorithms/tests/test_dominance.py,sha256=QVBj3SarZNm57YKavOLFtwU43xn4fxcEU6chn2Gfuaw,9194
|
||||
networkx/algorithms/tests/test_dominating.py,sha256=hyta7ln6BbHaGlpEUla6jVzh2PRuSjvujLSGXrmwZbc,1228
|
||||
networkx/algorithms/tests/test_efficiency.py,sha256=QKWMvyjCG1Byt-oNp7Rz_qxnVeT77Zk27lrzI1qH0mA,1894
|
||||
networkx/algorithms/tests/test_euler.py,sha256=L4L1ljHVxQxjQQludO2r6k3UZU7WAY_N6WYUjFx1fEk,11209
|
||||
networkx/algorithms/tests/test_graph_hashing.py,sha256=MqRwsNbyRWUy94V7UuDqEREuHxFTSn7-d0HzwSDI2As,24534
|
||||
networkx/algorithms/tests/test_graphical.py,sha256=uhFjvs04odxABToY4IRig_CaUTpAC3SfZRu1p1T7FwY,5366
|
||||
networkx/algorithms/tests/test_hierarchy.py,sha256=uW8DqCdXiAeypkNPKcAYX7aW86CawYH84Q0bW4cDTXo,1184
|
||||
networkx/algorithms/tests/test_hybrid.py,sha256=kQLzaMoqZcKFaJ3D7PKbY2O-FX59XDZ1pN5un8My-tk,720
|
||||
networkx/algorithms/tests/test_isolate.py,sha256=LyR0YYHJDH5vppQzGzGiJK-aaIV17_Jmla8dMf93olg,555
|
||||
networkx/algorithms/tests/test_link_prediction.py,sha256=Jah4vOGDYcWaPSl_iG-0fOXnhu5o8f6wcfakRmWuX7I,20004
|
||||
networkx/algorithms/tests/test_lowest_common_ancestors.py,sha256=GvhYCQMnVYD9LHPCNFgWMAUmOV8V5gko0fe05zi1JwU,13153
|
||||
networkx/algorithms/tests/test_matching.py,sha256=jhehNkApE5RuMPtbjWNeHn0tPqhVz65mL7QakfRA3Vw,20174
|
||||
networkx/algorithms/tests/test_max_weight_clique.py,sha256=M1eoy8OtuQVZkEvNMauV9vqR6hHtOCrtq6INv2qzMyA,6739
|
||||
networkx/algorithms/tests/test_mis.py,sha256=Z2tKoqbs-AFPzEBDYO7S8U-F7usLfZJ2l6j2DpZUts4,1865
|
||||
networkx/algorithms/tests/test_moral.py,sha256=15PZgkx7O9aXQB1npQ2JNqBBkEqPPP2RfeZzKqY-GNU,452
|
||||
networkx/algorithms/tests/test_node_classification.py,sha256=NgJJKUHH1GoD1GE3F4QRYBLM3fUo_En3RNtZvhqCjlg,4663
|
||||
networkx/algorithms/tests/test_non_randomness.py,sha256=xMkJp0F91Qn45EUuMottk1WSDfOQ90TDQfZFDSJ8tkE,1000
|
||||
networkx/algorithms/tests/test_planar_drawing.py,sha256=NN55y2cs9IdZYwUsG-RbI07aGSMx5gp5vnmGLC2vopo,8765
|
||||
networkx/algorithms/tests/test_planarity.py,sha256=rrIGX28JoG_DqINsuY4TSdDloxnz4dkCd3xeRo9Svqs,16386
|
||||
networkx/algorithms/tests/test_polynomials.py,sha256=baI0Kua1pRngRC6Scm5gRRwi1bl0iET5_Xxo3AZTP3A,1983
|
||||
networkx/algorithms/tests/test_reciprocity.py,sha256=X_PXWFOTzuEcyMWpRdwEJfm8lJOfNE_1rb9AAybf4is,1296
|
||||
networkx/algorithms/tests/test_regular.py,sha256=5KGvwhixanEigI0KgeUJ1hWPw7YRGZgNbrMkKcndd5M,2626
|
||||
networkx/algorithms/tests/test_richclub.py,sha256=ql_j69gIoph8d6oD2tzDqu3b-uW884nmEJZQmWANR6k,3965
|
||||
networkx/algorithms/tests/test_similarity.py,sha256=BV5f4DiSQHPsXkSosf29idxGQ_wLiTwEsiHtgDOLLw4,33189
|
||||
networkx/algorithms/tests/test_simple_paths.py,sha256=7U9wCXz4SHK0XeYrs1k2KjYgrYVQDnts2ggQLzU18p0,25181
|
||||
networkx/algorithms/tests/test_smallworld.py,sha256=rfgNCRU6YF55f8sCuA5WmX6MmhDci89Tb4jaz4ALjcQ,2405
|
||||
networkx/algorithms/tests/test_smetric.py,sha256=VM14L4X1AABvINDL9qKXzlech_Q2g4Aee-ozWM2Qrr4,144
|
||||
networkx/algorithms/tests/test_sparsifiers.py,sha256=1GRbQy1vfmwv6eUhP4Io0aykH2VyTJfFWmncrXmTqi4,4044
|
||||
networkx/algorithms/tests/test_structuralholes.py,sha256=NsQfW85GquVUndyHBVo5OMku_C8i8bfE-4WXJr5dILw,5290
|
||||
networkx/algorithms/tests/test_summarization.py,sha256=uNyaUstobIEu6M_Hexik-3YiYTRSy_XO6LUqoE4wazw,21312
|
||||
networkx/algorithms/tests/test_swap.py,sha256=WJtGMkSbAd1Cv06VaUeDVHosNOtdigsqEspyux0ExCs,6144
|
||||
networkx/algorithms/tests/test_threshold.py,sha256=RF_SM5tdMGJfEHETO19mFicnt69UIlvVeuCwI7rxb0M,9751
|
||||
networkx/algorithms/tests/test_time_dependent.py,sha256=NmuV2kDo4nh2MeN0hwcJf0QSDtqMD0dfSeeKSsYBtQ8,13342
|
||||
networkx/algorithms/tests/test_tournament.py,sha256=XF6TwqPwJ7bKKuD7vM1Q7a9NnKerk38lWghvqTekQfk,4159
|
||||
networkx/algorithms/tests/test_triads.py,sha256=anSuYt1ZmV0_aGtSPLl5YxEQZHOuo0QndNADUdZKqdY,9383
|
||||
networkx/algorithms/tests/test_vitality.py,sha256=p5lPWCtVMtbvxDw6TJUaf8vpb0zKPoz5pND722xiypQ,1380
|
||||
networkx/algorithms/tests/test_voronoi.py,sha256=M4B6JtkJUw56ULEWRs1kyVEUsroNrnb5FBq9OioAyHM,3477
|
||||
networkx/algorithms/tests/test_walks.py,sha256=X8cb-YvGHiiqbMEXuKMSdTAb9WtVtbHjIESNSqpJTmU,1499
|
||||
networkx/algorithms/tests/test_wiener.py,sha256=k9ld7wdPq5knS6cjo0hja8aWL-cdxYKGRpDU0z3cvNI,3209
|
||||
networkx/algorithms/traversal/__init__.py,sha256=YtFrfNjciqTOI6jGePQaJ01tRSEQXTHqTGGNhDEDb_8,142
|
||||
networkx/algorithms/traversal/beamsearch.py,sha256=Vn0U4Wck8ICShIAGggv3tVtQWVW0ABEz_hcBsGrql6o,3473
|
||||
networkx/algorithms/traversal/breadth_first_search.py,sha256=iFE-rskYn-oOOEI8ocCbCD3QMH5PX41RP8Xb2Krb2H8,18288
|
||||
networkx/algorithms/traversal/depth_first_search.py,sha256=2V4T3tGujcAtV3W6WcTQUjGAAe3b1rqinONowUhLsa8,16795
|
||||
networkx/algorithms/traversal/edgebfs.py,sha256=s8lugT0l6J8HRmB8dCs3D1UxZa95SGHGyP2WCfaABOc,6244
|
||||
networkx/algorithms/traversal/edgedfs.py,sha256=_s9N4UKaEi8sRtJ604qPHl_NIM92rOLkgec9ZPwZYp0,5957
|
||||
networkx/algorithms/traversal/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/traversal/tests/test_beamsearch.py,sha256=bzUcswZ1qo0ecDZYSER_4enbsW6SjTpb_3Nb3fqmkAo,900
|
||||
networkx/algorithms/traversal/tests/test_bfs.py,sha256=mOMBIo1SEplTa0zQI3XN__UovQgd573t8q2_rxu7e90,6465
|
||||
networkx/algorithms/traversal/tests/test_dfs.py,sha256=EqLV_C-3frQ89C-SD0jtHvWEankNfPXm6M76JDdenq0,10604
|
||||
networkx/algorithms/traversal/tests/test_edgebfs.py,sha256=8oplCu0fct3QipT0JB0-292EA2aOm8zWlMkPedfe6iY,4702
|
||||
networkx/algorithms/traversal/tests/test_edgedfs.py,sha256=HGmC3GUYSn9XLMHQpdefdE6g-Uh3KqbmgEEXBcckdYc,4775
|
||||
networkx/algorithms/tree/__init__.py,sha256=wm_FjX3G7hqJfyNmeEaJsRjZI-8Kkv0Nb5jAmQNXzSc,149
|
||||
networkx/algorithms/tree/branchings.py,sha256=B0c_uKpcnV2SwJMZJRK0BMEz8LkvIcOhv1y0AI0gTnY,34339
|
||||
networkx/algorithms/tree/coding.py,sha256=uFqGL6g1QWjGC4F9MCrsz_8rjWeuMJr5HUumGNsqXV4,13464
|
||||
networkx/algorithms/tree/decomposition.py,sha256=lY_rqx9JxnLEkp1wiAv0mX62PGPwGQ6SW4Jp48o8aiw,3071
|
||||
networkx/algorithms/tree/mst.py,sha256=nvaqotj00pnqAMY6_mOr8YLAAd2u-ApefXzzWU_4JVo,46140
|
||||
networkx/algorithms/tree/operations.py,sha256=1N6AH0vfY2QyyYBH_OOE0b7dS7dx9-pT3cOTQVmE1A0,4042
|
||||
networkx/algorithms/tree/recognition.py,sha256=bYnaDN0ZaIWTgq0tbPEHAcdxQBWZpDvWypZarBbA334,7569
|
||||
networkx/algorithms/tree/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/algorithms/tree/tests/test_branchings.py,sha256=uSMc57nLXLBRgm_ERqUSNSrTfq9R3ZWDFkyrG3KR8Vs,17727
|
||||
networkx/algorithms/tree/tests/test_coding.py,sha256=XC6SbfA2zVGH4FyJJyv6o8eOnBu7FNzNot3SKs7QmEo,3955
|
||||
networkx/algorithms/tree/tests/test_decomposition.py,sha256=vnl_xoQzi1LnlZL25vXOZWwvaWmon3-x222OKt4eDqE,1871
|
||||
networkx/algorithms/tree/tests/test_mst.py,sha256=ad6kAEpAF9PH1FyD_jHa2xnAtgBGs75sYGTY0s530BQ,31631
|
||||
networkx/algorithms/tree/tests/test_operations.py,sha256=ybU96kROTVJRTyjLG7JSJjYlPxaWmYjUVJqbXV5VGGI,1961
|
||||
networkx/algorithms/tree/tests/test_recognition.py,sha256=qeMEIvg-j2MqaU-TNIQhCcXxao8vTBy0wjpU7jr2iw8,4521
|
||||
networkx/classes/__init__.py,sha256=Q9oONJrnTFs874SGpwcbV_kyJTDcrLI69GFt99MiE6I,364
|
||||
networkx/classes/coreviews.py,sha256=9koRKORoAkI0spB-yMCkqXvry7mMd6hmSPhBab3SzcE,13143
|
||||
networkx/classes/digraph.py,sha256=Fup1GbADCpXKLA12M67RbhA0cm6BGi_4cIxBLsjHEtc,48101
|
||||
networkx/classes/filters.py,sha256=PCy7BsoIby8VcamqDjZQiNAe_5egI0WKUq-y5nc9unQ,2817
|
||||
networkx/classes/function.py,sha256=H6ho_EtU8zRTNv4VCaLb_BY_56PplSiftktk-OkqdcU,38898
|
||||
networkx/classes/graph.py,sha256=bc5yHCeDu0XyfBOR0nRx9rEMOpry9FdezxqiCIYPH1E,71102
|
||||
networkx/classes/graphviews.py,sha256=ulUTLozEK_hj_4TGHdgvxveR2-rb92Q14jjxH4oH4Go,8520
|
||||
networkx/classes/multidigraph.py,sha256=aOqjfSJ6Lx9l-1zwCIMNYRW0mW1wPDniEcRWQ8gKmYY,36351
|
||||
networkx/classes/multigraph.py,sha256=PSZR7QgyszlO5PqzhxI954LySqLHq-589OQrCOtC9pw,47248
|
||||
networkx/classes/reportviews.py,sha256=u0hNZqaWXCfLMP_lq835XCIVStkZQJ9HaQPeDPPoo88,46132
|
||||
networkx/classes/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/classes/tests/dispatch_interface.py,sha256=OA4t1XQX7Qqm3pGhTZYKno4c_zbIcvpSWstO_LXIVRo,6479
|
||||
networkx/classes/tests/historical_tests.py,sha256=nrv0ccvUMtp714VEV1I9UTwWz8ohgujbC-Xnxpc7kU8,16174
|
||||
networkx/classes/tests/test_coreviews.py,sha256=qzdozzWK8vLag-CAUqrXAM2CZZwMFN5vMu6Tdrwdf-E,12128
|
||||
networkx/classes/tests/test_digraph.py,sha256=uw0FuEu3y_YI-PSGuQCRytFpXLF7Eye2fqLJaKbXkBc,12283
|
||||
networkx/classes/tests/test_digraph_historical.py,sha256=Q8DGba1o0xRZfdsQxreq9naREFSgVhbaZOvTT7W8mdc,3684
|
||||
networkx/classes/tests/test_filters.py,sha256=fBLig8z548gsBBlQw6VJdGZb4IcqJj7_0mi2Fd2ncEM,5851
|
||||
networkx/classes/tests/test_function.py,sha256=a7fsmmdOSX-OYTEP0RV27vh4e_jyZh9w6SX1iABMVq0,34997
|
||||
networkx/classes/tests/test_graph.py,sha256=77t7pk1Pmz-txewyD2Dv19Vva6vWpWCtJSPtFx-EY_Y,30913
|
||||
networkx/classes/tests/test_graph_historical.py,sha256=Jl3aCS1BtwoCRdajMKDZcMQRypkOis0J_XU2LHEmYUE,274
|
||||
networkx/classes/tests/test_graphviews.py,sha256=i4x3ii8--PPg_pK4YA8aMR1axUQCdXZYpzmB05iEAOg,11466
|
||||
networkx/classes/tests/test_multidigraph.py,sha256=ryTKegCoYixXbAqOn3mIt9vSMb5666Dv-pfMkXEjoUE,16342
|
||||
networkx/classes/tests/test_multigraph.py,sha256=0vFQO3RCJaBpzXvnQzdWa_qYLHNo_I9DICYhPZJNUMk,18777
|
||||
networkx/classes/tests/test_reportviews.py,sha256=dNL6fMMsumYKU4Q_kx-vsXB3GU9xTQxrQn45qoa8e8I,41919
|
||||
networkx/classes/tests/test_special.py,sha256=IJsmqCS9LrTDoZ11KPmo-UOI7xEskL7NyduEJNPMNqs,4103
|
||||
networkx/classes/tests/test_subgraphviews.py,sha256=1dcJHq3F00LyoFSu6CTFPqS7DFIkWK1PyQu4QvJh5ko,13223
|
||||
networkx/drawing/__init__.py,sha256=rnTFNzLc4fis1hTAEpnWTC80neAR88-llVQ-LObN-i4,160
|
||||
networkx/drawing/layout.py,sha256=eA5YJ2xA-AYDL1WFICETTqfnl7amjRYfkgOJEJbigvw,50243
|
||||
networkx/drawing/nx_agraph.py,sha256=bbtLuusDb4vNu6EPF9rgBdJsP-DaIFyzPgxBn5KEA1I,13937
|
||||
networkx/drawing/nx_latex.py,sha256=zSGYPpn3wewWaEBCJerq6gRb5RmKP9SY0sLWhyqD8Xo,24805
|
||||
networkx/drawing/nx_pydot.py,sha256=b_USURmDffy1KWh2ue1vMf99-zCJiETldIDS7HGHitc,9591
|
||||
networkx/drawing/nx_pylab.py,sha256=WTqktY5niRF56x4bsotbGrastccRtgUcsxjqYt8Oe_s,66369
|
||||
networkx/drawing/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/drawing/tests/test_agraph.py,sha256=BzrfyQYEtaUbxOzhROLE6njzpG6ZxW7QkRdpVholLAY,8789
|
||||
networkx/drawing/tests/test_latex.py,sha256=_Wng73kMltC-_sUoxdo2uBL2bkEc7HMqkKhwo9ZDJGA,8710
|
||||
networkx/drawing/tests/test_layout.py,sha256=IWl7cCFb_eGowjACF5tki9voThOzHNpPgjkonzH-pzQ,20611
|
||||
networkx/drawing/tests/test_pydot.py,sha256=X9b66gWqMgdTEyRJ7Zmy5kL9cr22waI688K9BJUf4Bk,4973
|
||||
networkx/drawing/tests/test_pylab.py,sha256=KgWiNwgkdSn-A36-DP68ZFibbb6JWV4SOJA7O433Y5U,35921
|
||||
networkx/drawing/tests/baseline/test_house_with_colors.png,sha256=FQi9pIRFwjq4gvgB8cDdBHL5euQUJFw6sQlABf2kRVo,21918
|
||||
networkx/generators/__init__.py,sha256=EoYB5c5ZE4rsNKZvl1TRQy2Vo2D3T2H-YunyD2i6sa0,1366
|
||||
networkx/generators/atlas.dat.gz,sha256=c_xBbfAWSSNgd1HLdZ9K6B3rX2VQvyW-Wcht47dH5B0,8887
|
||||
networkx/generators/atlas.py,sha256=07Xegzj5j_SiApgzgve2rSTXp0nmWwCw7-1keUjbvRo,5606
|
||||
networkx/generators/classic.py,sha256=68lCnSeo50uV1yoc6ZvjnckR7lAbrhUdniyEogczvB4,32000
|
||||
networkx/generators/cographs.py,sha256=-WR4_yrNk_X5nj7egb7A22eKPVymOdIYM-IftSRH4WA,1891
|
||||
networkx/generators/community.py,sha256=_p_4OfItbg8nS0b3EvojCXZ8cESdC-0Gj67V5w2veuM,34911
|
||||
networkx/generators/degree_seq.py,sha256=97XUApgQZrpSxyXODgVLP9drX5rEF-Xb40bYqaBGSj0,30173
|
||||
networkx/generators/directed.py,sha256=Vcg0zeWFS2-F99bFmhXj4mzlCy_yoBuuqjnSx5I-Dco,15696
|
||||
networkx/generators/duplication.py,sha256=hmYAHJBez7WlfdVGGa288JFUBHoIUdVqEGCodApKOr4,5831
|
||||
networkx/generators/ego.py,sha256=TZ-o05FpvVPAdXFBLjjfa2FnAcZwlgqr_1jMdLTzFSg,1900
|
||||
networkx/generators/expanders.py,sha256=nJMys4kHNHZzC5jkyCFftw1W_6cF_r82eGTqn7cNrDo,14455
|
||||
networkx/generators/geometric.py,sha256=cCrx1HdlLc08klO6bBzb-g0GUfF2AaktCrJOmhDSWUo,39610
|
||||
networkx/generators/harary_graph.py,sha256=N6vzXKrW-ZU-xDc2ZTF_Gf7kb0LRQVRfK2oLBQvyVO8,6159
|
||||
networkx/generators/internet_as_graphs.py,sha256=Y_pQaGhe183X6dXH4ocqIK3DzXRz0oXE-AKwsL1yCHk,14172
|
||||
networkx/generators/intersection.py,sha256=EFm0AOjnqyp8KcT7kGWqANq-_vq9kQ0d_0DzVyQyP-o,4101
|
||||
networkx/generators/interval_graph.py,sha256=ZTmdgQbBx3M6sysGWXbGyngYYOC1TAXD3Ozkw4deQFw,2204
|
||||
networkx/generators/joint_degree_seq.py,sha256=nyp86NC_4XvzvwpwwzKrrCSz1i_4bESSDtVjWvpkWFg,24773
|
||||
networkx/generators/lattice.py,sha256=kVCvTahWPQGNbok6maXfaqGzm88UuxhP7D9BkKhGW1o,13500
|
||||
networkx/generators/line.py,sha256=4mFH60EsHvb4wW34E45Byl_rXjDPICD59caoAtOE8VI,17531
|
||||
networkx/generators/mycielski.py,sha256=xBX2m77sCzumoH5cAGitksvEEW-ocbCnbdaN7fKUtVk,3314
|
||||
networkx/generators/nonisomorphic_trees.py,sha256=gE7uPB-uaE6rEfaimmR9bqobso5yclcCG6u8zwZlS48,6453
|
||||
networkx/generators/random_clustered.py,sha256=i_NdvvchHvsvbwgQtoWSY_pLwvhO9Lh02MSZXzgGb7c,4183
|
||||
networkx/generators/random_graphs.py,sha256=qi_AjT9Hx5M6ujgTe-DBVIsY9LwqPPZjuAHFMIaQOOc,51346
|
||||
networkx/generators/small.py,sha256=Xs9JNTtoLiShg7fF7_VRJ-G18JGSt4JEMmhhtpS51r8,28171
|
||||
networkx/generators/social.py,sha256=IUVgWVMUmRaUG28U0KzB--0DtKLdCFDz54tkJ69W4ms,23437
|
||||
networkx/generators/spectral_graph_forge.py,sha256=kF4SCE3dcgwBA9bMys5O-mCf529dFhraw3Zmy9GRnQ4,4240
|
||||
networkx/generators/stochastic.py,sha256=Qg9vWm9EOug2OQVIHL_dZ5HrXc16lxnWyzX52KWNEPI,1981
|
||||
networkx/generators/sudoku.py,sha256=kLM2AP0H4966uYiNO1oAFEmv5qBftU_bOfYucRxexM0,4288
|
||||
networkx/generators/time_series.py,sha256=_DMiY9X95O_9sK2BSeeTb2yMWfStBwKFWwn6FUOXN4Q,2439
|
||||
networkx/generators/trees.py,sha256=2a8MsKTzQfFLBESG5oocbTaMv0cYX0vtedFD29eiOFA,36533
|
||||
networkx/generators/triads.py,sha256=7kScTf3ITDi3qsSa-IvGMpa9diEaFwQnRuIf3Tv4UBI,2452
|
||||
networkx/generators/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/generators/tests/test_atlas.py,sha256=nwXJL4O5jUqhTwqhkPxHY8s3KXHQTDEdsfbg4MsSzVQ,2530
|
||||
networkx/generators/tests/test_classic.py,sha256=PlEZOxT8XADzyDL-GIItEx66hW4bvVl4UCo4ojX4m80,24021
|
||||
networkx/generators/tests/test_cographs.py,sha256=Khqvx15VNWHjNkMeEpsio3oJAi8KoiYqfTqKVbQWT9U,458
|
||||
networkx/generators/tests/test_community.py,sha256=FGcDo3Ajb-yYc5kUkFbVfOJVMG-YppbAtjgBPcVzjLc,11311
|
||||
networkx/generators/tests/test_degree_seq.py,sha256=in6lg1pwcAg1N08MA3lQdr3lnm2-aoUy3BRm6Yj_OBQ,7093
|
||||
networkx/generators/tests/test_directed.py,sha256=A01l9R-VBauEN7UEtLkkp9SubjjrnC_QWR2w0Q5GHq0,5259
|
||||
networkx/generators/tests/test_duplication.py,sha256=UdIGDF_fishanWid1xO_aH4NDfie8xpIqd26qndhOqI,3155
|
||||
networkx/generators/tests/test_ego.py,sha256=8v1Qjmkli9wIhhUuqzgqCzysr0C1Z2C3oJMCUoNvgY4,1327
|
||||
networkx/generators/tests/test_expanders.py,sha256=0X78pbB1PnW4pxa7UvlA5lzq6u0ZCfnvMBMYJvLHYH0,5602
|
||||
networkx/generators/tests/test_geometric.py,sha256=gnVm4dam_Er88YwaNpNZC6mjJjfgwMYhyLOtU9oPn1o,18087
|
||||
networkx/generators/tests/test_harary_graph.py,sha256=GiX5LXXJaNxzjvd-Nyw_QuARzbFGkA6zE1R1eX8mclw,4936
|
||||
networkx/generators/tests/test_internet_as_graphs.py,sha256=QmzkOnWg9bcSrv31UcaD6Cko55AV-GPLLY5Aqb_Dmvs,6795
|
||||
networkx/generators/tests/test_intersection.py,sha256=hcIit5fKfOn3VjMhz9KqovZK9tzxZfmC6ezvA7gZAvM,819
|
||||
networkx/generators/tests/test_interval_graph.py,sha256=JYMi-QMkJQdBU9uOdfm0Xr6MEYqIbhU5oSDa6D3tSb0,4277
|
||||
networkx/generators/tests/test_joint_degree_seq.py,sha256=8TXTZI3Um2gBXtP-4yhGKf9vCi78-NVmWZw9r9WG3F8,4270
|
||||
networkx/generators/tests/test_lattice.py,sha256=q4Ri-dH9mKhfq0PNX9xMeYRUiP0JlPBr7piSruZlFlg,9290
|
||||
networkx/generators/tests/test_line.py,sha256=vXncJuny2j5ulCJyT01Rt1tTwPib4XelS3dJDdJXjx0,10378
|
||||
networkx/generators/tests/test_mycielski.py,sha256=fwZLO1ybcltRy6TzCel8tPBil1oZWv9QSXs779H6Xt0,946
|
||||
networkx/generators/tests/test_nonisomorphic_trees.py,sha256=g5zkb0T7mkb2AdT-GkIGPXvahh9lv-f-XddJ80Y0Zfg,2454
|
||||
networkx/generators/tests/test_random_clustered.py,sha256=SalHqWvpnXA3QrDRMjLx15dk2c4Us8Ck52clUERoUI8,1297
|
||||
networkx/generators/tests/test_random_graphs.py,sha256=RTrKahiDHdXIb2ScFzQk3vrxncnMOE3W5LyJfIPvuKc,18925
|
||||
networkx/generators/tests/test_small.py,sha256=K4-sSBZca3UMP1deUOWlkSzpanJBAT-vQdr11PMI_QY,7060
|
||||
networkx/generators/tests/test_spectral_graph_forge.py,sha256=x4jyTiQiydaUPWYaGsNFsIB47PAzSSwQYCNXGa2B4SU,1594
|
||||
networkx/generators/tests/test_stochastic.py,sha256=f-5KD3RpoQf369gXHH7KGebE19g5lCkXR_alcwmFm_s,2179
|
||||
networkx/generators/tests/test_sudoku.py,sha256=dgOmk-B7MxCVkbHdZzsLZppQ61FAArVy4McSVL8Afzo,1968
|
||||
networkx/generators/tests/test_time_series.py,sha256=rgmFcitlKa_kF6TzJ2ze91lSmNJlqjhvgrYet0AUZx8,2230
|
||||
networkx/generators/tests/test_trees.py,sha256=Pvh0MvTKaRuZuwWL-wpJIC0zlBAcnTirpSLJi-9c7qc,7006
|
||||
networkx/generators/tests/test_triads.py,sha256=K8anVEP8R90Y172IrKIOrYRWRJBGeqxNqU9isX7Ybxs,333
|
||||
networkx/linalg/__init__.py,sha256=7iyNZ_YYBnlsW8zSfhUgvEkywOrUWfpIuyS86ZOKlG8,568
|
||||
networkx/linalg/algebraicconnectivity.py,sha256=3nOW8g21_8B_J_cCj6UYMVqGUHEI_T3827LcyoSxJvI,21149
|
||||
networkx/linalg/attrmatrix.py,sha256=Mwiw5dvIvjDY7Bwlb4sy85KzfoP02EF64CfG_GvJsro,15509
|
||||
networkx/linalg/bethehessianmatrix.py,sha256=Ii4NX6mo90W3MppCRcYn9dRW_MsEkVdA9TH6x7JhX8o,2697
|
||||
networkx/linalg/graphmatrix.py,sha256=NIs2uWGS_8lJJ5IQ8Og9aIWHawghtlCDWifqOIKV2-c,5623
|
||||
networkx/linalg/laplacianmatrix.py,sha256=iRHHabmb9S4ChDPx3Yn2-WIEQFd_flFD3AZkA4k-oyY,20536
|
||||
networkx/linalg/modularitymatrix.py,sha256=R_VITtgIkGenxlsCLN4u6CYxj3_HiPXfeU29yarntRo,4706
|
||||
networkx/linalg/spectrum.py,sha256=aRY7ApYv5HxrO_4O8brxpZRw3SJU3fYzlgMwhEIXcrc,4215
|
||||
networkx/linalg/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/linalg/tests/test_algebraic_connectivity.py,sha256=Kj2ct6gQ71xXFP7usAbFLJxD7ZdtTzneHiFJQOoVCUQ,13737
|
||||
networkx/linalg/tests/test_attrmatrix.py,sha256=XD3YuPc5yXKWbhwVSI8YiV_wABWM-rLtwf1uwwWlnI0,2833
|
||||
networkx/linalg/tests/test_bethehessian.py,sha256=0r-Do902ywV10TyqTlIJ2Ls3iMqM6sSs2PZbod7kWBM,1327
|
||||
networkx/linalg/tests/test_graphmatrix.py,sha256=e5YSH9ih1VL64nnYgZFDvLyKbP3BFqpp0jY6t-8b2eY,8708
|
||||
networkx/linalg/tests/test_laplacian.py,sha256=0AGJwezqohoQtrmTZ94Gvg5vISMCB7_G2QdJl7JFTXg,14081
|
||||
networkx/linalg/tests/test_modularity.py,sha256=mfKUvwc3bj6Rud1aG4oK3Eu1qg12o6cB8-pv5ZFicYY,3115
|
||||
networkx/linalg/tests/test_spectrum.py,sha256=agP2DsiEIvtkNUkT94mdPtJjwnobnjMTUOwjIQa4giA,2828
|
||||
networkx/readwrite/__init__.py,sha256=TvSbnGEHQ5F9CY2tkpjWYOyrUj6BeW3sc6P4_IczbKA,561
|
||||
networkx/readwrite/adjlist.py,sha256=FjVdLlrWLi7mVuKHzO16AO6CVFqA6TCJu3GxLxSOXbU,8435
|
||||
networkx/readwrite/edgelist.py,sha256=pjnG_o3_usmgthIpscQRJWHZZ8b3-39Uqgj0OF9qE_g,14237
|
||||
networkx/readwrite/gexf.py,sha256=wq50Twz2o9XuoeR6awNYcftZDP-MRSztnsukmKiE3cQ,39693
|
||||
networkx/readwrite/gml.py,sha256=5TWaEGaQv33f8F5i5IciQp8YbK0MKNxb1E5GQcKC02M,31150
|
||||
networkx/readwrite/graph6.py,sha256=q1CmarzZ_jW_A15fU3YyKFl5OhtN-qWUrVcfc0ZTv6w,11401
|
||||
networkx/readwrite/graphml.py,sha256=12KKKXLDyMIif-KB4ZWdvMpgnuRqQ2EEqwY9TX3jkj8,39318
|
||||
networkx/readwrite/leda.py,sha256=VjpyUYeAWPD4TQSyvcC-ftcTeg6Pow9zJJqNuiGZ0zU,2797
|
||||
networkx/readwrite/multiline_adjlist.py,sha256=_3SB2719ceBdJjYPkyAZUPuCebcHX_Zwk6mQDs4OcTQ,11301
|
||||
networkx/readwrite/p2g.py,sha256=0Mi8yvV0Hy6Bo4cbCKYjNp0_0ALYmNNCUMer4w1bkrY,3092
|
||||
networkx/readwrite/pajek.py,sha256=9j3sRjLzPQxqQFdEoTCOwICpdAf7G39cdls04dhErns,8738
|
||||
networkx/readwrite/sparse6.py,sha256=MFih4PCNJSY4UFuJxBNxYjBT9_11UpIbPQrySSiE9bg,10315
|
||||
networkx/readwrite/text.py,sha256=9u43d_m2xcoJKl5rKQ-3N0kIdr3m4xzX2i1y05xDDbM,29163
|
||||
networkx/readwrite/json_graph/__init__.py,sha256=37XJPMmilcwwo8KqouLWUly7Yv5tZ7IKraMHbBRx3fI,677
|
||||
networkx/readwrite/json_graph/adjacency.py,sha256=WM6fdncV87WDLPOfF-IbOlOOBMX0utUjJ09UsxtwRAo,4716
|
||||
networkx/readwrite/json_graph/cytoscape.py,sha256=kX6_p24F4CnDdT0D5lYrD0-jypyMdmqnGQEXKR1_kH4,5338
|
||||
networkx/readwrite/json_graph/node_link.py,sha256=QUre2tj2j6PXdwz6J3ExxyoLsfX-Vs5lDwjcYQjSDcM,10792
|
||||
networkx/readwrite/json_graph/tree.py,sha256=K4rF4Kds4g0JhgcPTrrR_I3Pswpze8yCVH4M-WF9nn0,3851
|
||||
networkx/readwrite/json_graph/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/readwrite/json_graph/tests/test_adjacency.py,sha256=jueQE3Z_W5BZuCjr0hEsOWSfoQ2fP51p0o0m7IcXUuE,2456
|
||||
networkx/readwrite/json_graph/tests/test_cytoscape.py,sha256=vFoDzcSRI9THlmp4Fu2HHhIF9AUmECWs5mftVWjaWWs,2044
|
||||
networkx/readwrite/json_graph/tests/test_node_link.py,sha256=q0mqy5fqZFxxHQb18tmFXUOOp_oTP1Ye5bEWzTnXEFo,6468
|
||||
networkx/readwrite/json_graph/tests/test_tree.py,sha256=zBXv3_db2XGxFs3XQ35btNf_ku52aLXXiHZmmX4ixAs,1352
|
||||
networkx/readwrite/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/readwrite/tests/test_adjlist.py,sha256=t5RL85eDQFPUIdh8W4kozY_P7PMJU2LwSXjWZGE-4Aw,10134
|
||||
networkx/readwrite/tests/test_edgelist.py,sha256=cmOqVSpVO-FTdFRUAz40_e2sSmmB9xV6uYmfvw5cNhQ,10113
|
||||
networkx/readwrite/tests/test_gexf.py,sha256=Tbqueeh0XRQ8vtmGwXcyy9K3tWPlnLu6Gop0Hy4cZcc,19405
|
||||
networkx/readwrite/tests/test_gml.py,sha256=8_2nBU6n8zLHkApiuKkZNH-xMRSdA1G8ZH3Lvjspizg,21391
|
||||
networkx/readwrite/tests/test_graph6.py,sha256=DAi58D_G3j2UGk6VpfGkLGzfSAl318TIbuXSKKZ102U,6067
|
||||
networkx/readwrite/tests/test_graphml.py,sha256=MrU3AkdqNQ6gVLtOQrZUx39pV7PjS_ETu5uuT5Ce6BI,67573
|
||||
networkx/readwrite/tests/test_leda.py,sha256=_5F4nLLQ1oAZQMZtTQoFncZL0Oc-IsztFBglEdQeH3k,1392
|
||||
networkx/readwrite/tests/test_p2g.py,sha256=drsdod5amV9TGCk-qE2RwsvAop78IKEI1WguVFfd9rs,1320
|
||||
networkx/readwrite/tests/test_pajek.py,sha256=-bT-y26OmWgpLcvk-qvVfOEa-DTcQPwV2qKB99roOrk,4629
|
||||
networkx/readwrite/tests/test_sparse6.py,sha256=cqFHWz4G_kMawaRqceofN4K-JlkmPx3BEaDXkU8DD0o,5284
|
||||
networkx/readwrite/tests/test_text.py,sha256=x1N97hD31HPkj9Wn2PYti5-gcwaFNnStkaN_38HKnIg,55319
|
||||
networkx/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/tests/test_all_random_functions.py,sha256=VWBH5Uov3DswxuRDzlMPuDlcryPyWVaCvt2_OMw-dIQ,8673
|
||||
networkx/tests/test_convert.py,sha256=SoIVrqJFF9Gu9Jff_apfbpqg8QhkfC6QW4qzoSM-ukM,12731
|
||||
networkx/tests/test_convert_numpy.py,sha256=jw-iEj7wVAVXd5rlOxTBMHQD63m90q5RBQxv1ee9dNw,19065
|
||||
networkx/tests/test_convert_pandas.py,sha256=2LrQrGkxdlvEZxKmMvyptUvOsTsAcbo8u6siSVbnV3M,13346
|
||||
networkx/tests/test_convert_scipy.py,sha256=C2cY_8dgBksO0uttkhyCnjACXtC6KHjxqHUk47P5wH8,10436
|
||||
networkx/tests/test_exceptions.py,sha256=XYkpPzqMepSw3MPRUJN5LcFsUsy3YT_fiRDhm0OeAeQ,927
|
||||
networkx/tests/test_import.py,sha256=Gm4ujfH9JkQtDrSjOlwXXXUuubI057wskKLCkF6Z92k,220
|
||||
networkx/tests/test_lazy_imports.py,sha256=nKykNQPt_ZV8JxCH_EkwwcPNayAgZGQVf89e8I7uIlI,2680
|
||||
networkx/tests/test_relabel.py,sha256=dffbjiW_VUAQe7iD8knFS_KepUITt0F6xuwf7daWwKw,14517
|
||||
networkx/utils/__init__.py,sha256=7pxleRNpBWuL3FEQz3CzKLn17b6_eSwkM7dqnL1okDk,302
|
||||
networkx/utils/backends.py,sha256=pXioKWl33QJEcL9_FDufk2xv12_8bpgYniJYouJEq4M,113169
|
||||
networkx/utils/configs.py,sha256=v3p9eXPPllCcMqX33VhVQUVeXOm7eAn9xdASDEqMmP8,15023
|
||||
networkx/utils/decorators.py,sha256=aj07nVz7CW1TaYMBpSiHdRuw_U3_o0XdGoRyLMrJeXg,44836
|
||||
networkx/utils/heaps.py,sha256=HUZuETHfELEqiXdMBPmD9fA2KiACVhp6iEahcrjFxYM,10391
|
||||
networkx/utils/mapped_queue.py,sha256=WdIRk27D_ArmPs9tdpvQLQCV4Tmus212BQhxsFIMYgk,10184
|
||||
networkx/utils/misc.py,sha256=BN_VscZjoishxgbjwxm1PVG_J_jHqpuMd9bffwK6Q5M,21293
|
||||
networkx/utils/random_sequence.py,sha256=KzKh0BRMri0MBZlzxHNMl3qRTy2DnBexW3eDzmxKab4,4237
|
||||
networkx/utils/rcm.py,sha256=9tpXSK-wwLXFcq3ypXilYNAaJAKrmMiMwp4R78OLvuI,4624
|
||||
networkx/utils/union_find.py,sha256=NxKlBlyS71A1Wlnt28L-wyZoI9ExZvJth_0e2XSVris,3338
|
||||
networkx/utils/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
||||
networkx/utils/tests/test__init.py,sha256=QE0i-lNE4pG2eYjB2mZ0uw7jPD-7TdL7Y9p73JoWQmo,363
|
||||
networkx/utils/tests/test_backends.py,sha256=5fwka8bdEBtvPhjN49dMcgKsUBIVHEmMag6uckBAAd8,6108
|
||||
networkx/utils/tests/test_config.py,sha256=nImFv-UMS3uo9DFgYejSCNRm2mOUVHldKFFAY4t_2mQ,7414
|
||||
networkx/utils/tests/test_decorators.py,sha256=dm3b5yiQPlnlT_4pSm0FwK-xBGV9dcnhv14Vh9Jiz1o,14050
|
||||
networkx/utils/tests/test_heaps.py,sha256=qCuWMzpcMH1Gwu014CAams78o151QD5YL0mB1fz16Yw,3711
|
||||
networkx/utils/tests/test_mapped_queue.py,sha256=l1Nguzz68Fv91FnAT7y7B0GXSoje9uoWiObHo7TliGM,7354
|
||||
networkx/utils/tests/test_misc.py,sha256=zkD1pYO4xBuBxlGe-nU8okcX6hfDMgu0OJZGu4TMrN0,8671
|
||||
networkx/utils/tests/test_random_sequence.py,sha256=Ou-IeCFybibZuycoin5gUQzzC-iy5yanZFmrqvdGt6Q,925
|
||||
networkx/utils/tests/test_rcm.py,sha256=UvUAkgmQMGk_Nn94TJyQsle4A5SLQFqMQWld1tiQ2lk,1421
|
||||
networkx/utils/tests/test_unionfind.py,sha256=j-DF5XyeJzq1hoeAgN5Nye2Au7EPD040t8oS4Aw2IwU,1579
|
||||
networkx-3.4.2.dist-info/LICENSE.txt,sha256=W0M7kPdV65u9Bv7_HRpPXyMsUgihhWlBmeRfqV12J5I,1763
|
||||
networkx-3.4.2.dist-info/METADATA,sha256=LlJl3ah27zxE_vqelVNJm8VkyJoWyV44va-bq8VW0dc,6349
|
||||
networkx-3.4.2.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
|
||||
networkx-3.4.2.dist-info/entry_points.txt,sha256=H2jZaDsDJ_i9H2SwWpwuFel8BrZ9xHKuvh-DQAWW9lQ,94
|
||||
networkx-3.4.2.dist-info/top_level.txt,sha256=s3Mk-7KOlu-kD39w8Xg_KXoP5Z_MVvgB-upkyuOE4Hk,9
|
||||
networkx-3.4.2.dist-info/RECORD,,
|
||||
@@ -0,0 +1,5 @@
|
||||
Wheel-Version: 1.0
|
||||
Generator: setuptools (75.2.0)
|
||||
Root-Is-Purelib: true
|
||||
Tag: py3-none-any
|
||||
|
||||
+2
@@ -0,0 +1,2 @@
|
||||
[networkx.backends]
|
||||
nx_loopback = networkx.classes.tests.dispatch_interface:backend_interface
|
||||
@@ -0,0 +1 @@
|
||||
networkx
|
||||
@@ -0,0 +1,53 @@
|
||||
"""
|
||||
NetworkX
|
||||
========
|
||||
|
||||
NetworkX is a Python package for the creation, manipulation, and study of the
|
||||
structure, dynamics, and functions of complex networks.
|
||||
|
||||
See https://networkx.org for complete documentation.
|
||||
"""
|
||||
|
||||
__version__ = "3.4.2"
|
||||
|
||||
|
||||
# These are imported in order as listed
|
||||
from networkx.lazy_imports import _lazy_import
|
||||
|
||||
from networkx.exception import *
|
||||
|
||||
from networkx import utils
|
||||
from networkx.utils import _clear_cache, _dispatchable
|
||||
|
||||
# load_and_call entry_points, set configs
|
||||
config = utils.backends._set_configs_from_environment()
|
||||
utils.config = utils.configs.config = config # type: ignore[attr-defined]
|
||||
|
||||
from networkx import classes
|
||||
from networkx.classes import filters
|
||||
from networkx.classes import *
|
||||
|
||||
from networkx import convert
|
||||
from networkx.convert import *
|
||||
|
||||
from networkx import convert_matrix
|
||||
from networkx.convert_matrix import *
|
||||
|
||||
from networkx import relabel
|
||||
from networkx.relabel import *
|
||||
|
||||
from networkx import generators
|
||||
from networkx.generators import *
|
||||
|
||||
from networkx import readwrite
|
||||
from networkx.readwrite import *
|
||||
|
||||
# Need to test with SciPy, when available
|
||||
from networkx import algorithms
|
||||
from networkx.algorithms import *
|
||||
|
||||
from networkx import linalg
|
||||
from networkx.linalg import *
|
||||
|
||||
from networkx import drawing
|
||||
from networkx.drawing import *
|
||||
@@ -0,0 +1,133 @@
|
||||
from networkx.algorithms.assortativity import *
|
||||
from networkx.algorithms.asteroidal import *
|
||||
from networkx.algorithms.boundary import *
|
||||
from networkx.algorithms.broadcasting import *
|
||||
from networkx.algorithms.bridges import *
|
||||
from networkx.algorithms.chains import *
|
||||
from networkx.algorithms.centrality import *
|
||||
from networkx.algorithms.chordal import *
|
||||
from networkx.algorithms.cluster import *
|
||||
from networkx.algorithms.clique import *
|
||||
from networkx.algorithms.communicability_alg import *
|
||||
from networkx.algorithms.components import *
|
||||
from networkx.algorithms.coloring import *
|
||||
from networkx.algorithms.core import *
|
||||
from networkx.algorithms.covering import *
|
||||
from networkx.algorithms.cycles import *
|
||||
from networkx.algorithms.cuts import *
|
||||
from networkx.algorithms.d_separation import *
|
||||
from networkx.algorithms.dag import *
|
||||
from networkx.algorithms.distance_measures import *
|
||||
from networkx.algorithms.distance_regular import *
|
||||
from networkx.algorithms.dominance import *
|
||||
from networkx.algorithms.dominating import *
|
||||
from networkx.algorithms.efficiency_measures import *
|
||||
from networkx.algorithms.euler import *
|
||||
from networkx.algorithms.graphical import *
|
||||
from networkx.algorithms.hierarchy import *
|
||||
from networkx.algorithms.hybrid import *
|
||||
from networkx.algorithms.link_analysis import *
|
||||
from networkx.algorithms.link_prediction import *
|
||||
from networkx.algorithms.lowest_common_ancestors import *
|
||||
from networkx.algorithms.isolate import *
|
||||
from networkx.algorithms.matching import *
|
||||
from networkx.algorithms.minors import *
|
||||
from networkx.algorithms.mis import *
|
||||
from networkx.algorithms.moral import *
|
||||
from networkx.algorithms.non_randomness import *
|
||||
from networkx.algorithms.operators import *
|
||||
from networkx.algorithms.planarity import *
|
||||
from networkx.algorithms.planar_drawing import *
|
||||
from networkx.algorithms.polynomials import *
|
||||
from networkx.algorithms.reciprocity import *
|
||||
from networkx.algorithms.regular import *
|
||||
from networkx.algorithms.richclub import *
|
||||
from networkx.algorithms.shortest_paths import *
|
||||
from networkx.algorithms.similarity import *
|
||||
from networkx.algorithms.graph_hashing import *
|
||||
from networkx.algorithms.simple_paths import *
|
||||
from networkx.algorithms.smallworld import *
|
||||
from networkx.algorithms.smetric import *
|
||||
from networkx.algorithms.structuralholes import *
|
||||
from networkx.algorithms.sparsifiers import *
|
||||
from networkx.algorithms.summarization import *
|
||||
from networkx.algorithms.swap import *
|
||||
from networkx.algorithms.time_dependent import *
|
||||
from networkx.algorithms.traversal import *
|
||||
from networkx.algorithms.triads import *
|
||||
from networkx.algorithms.vitality import *
|
||||
from networkx.algorithms.voronoi import *
|
||||
from networkx.algorithms.walks import *
|
||||
from networkx.algorithms.wiener import *
|
||||
|
||||
# Make certain subpackages available to the user as direct imports from
|
||||
# the `networkx` namespace.
|
||||
from networkx.algorithms import approximation
|
||||
from networkx.algorithms import assortativity
|
||||
from networkx.algorithms import bipartite
|
||||
from networkx.algorithms import node_classification
|
||||
from networkx.algorithms import centrality
|
||||
from networkx.algorithms import chordal
|
||||
from networkx.algorithms import cluster
|
||||
from networkx.algorithms import clique
|
||||
from networkx.algorithms import components
|
||||
from networkx.algorithms import connectivity
|
||||
from networkx.algorithms import community
|
||||
from networkx.algorithms import coloring
|
||||
from networkx.algorithms import flow
|
||||
from networkx.algorithms import isomorphism
|
||||
from networkx.algorithms import link_analysis
|
||||
from networkx.algorithms import lowest_common_ancestors
|
||||
from networkx.algorithms import operators
|
||||
from networkx.algorithms import shortest_paths
|
||||
from networkx.algorithms import tournament
|
||||
from networkx.algorithms import traversal
|
||||
from networkx.algorithms import tree
|
||||
|
||||
# Make certain functions from some of the previous subpackages available
|
||||
# to the user as direct imports from the `networkx` namespace.
|
||||
from networkx.algorithms.bipartite import complete_bipartite_graph
|
||||
from networkx.algorithms.bipartite import is_bipartite
|
||||
from networkx.algorithms.bipartite import projected_graph
|
||||
from networkx.algorithms.connectivity import all_pairs_node_connectivity
|
||||
from networkx.algorithms.connectivity import all_node_cuts
|
||||
from networkx.algorithms.connectivity import average_node_connectivity
|
||||
from networkx.algorithms.connectivity import edge_connectivity
|
||||
from networkx.algorithms.connectivity import edge_disjoint_paths
|
||||
from networkx.algorithms.connectivity import k_components
|
||||
from networkx.algorithms.connectivity import k_edge_components
|
||||
from networkx.algorithms.connectivity import k_edge_subgraphs
|
||||
from networkx.algorithms.connectivity import k_edge_augmentation
|
||||
from networkx.algorithms.connectivity import is_k_edge_connected
|
||||
from networkx.algorithms.connectivity import minimum_edge_cut
|
||||
from networkx.algorithms.connectivity import minimum_node_cut
|
||||
from networkx.algorithms.connectivity import node_connectivity
|
||||
from networkx.algorithms.connectivity import node_disjoint_paths
|
||||
from networkx.algorithms.connectivity import stoer_wagner
|
||||
from networkx.algorithms.flow import capacity_scaling
|
||||
from networkx.algorithms.flow import cost_of_flow
|
||||
from networkx.algorithms.flow import gomory_hu_tree
|
||||
from networkx.algorithms.flow import max_flow_min_cost
|
||||
from networkx.algorithms.flow import maximum_flow
|
||||
from networkx.algorithms.flow import maximum_flow_value
|
||||
from networkx.algorithms.flow import min_cost_flow
|
||||
from networkx.algorithms.flow import min_cost_flow_cost
|
||||
from networkx.algorithms.flow import minimum_cut
|
||||
from networkx.algorithms.flow import minimum_cut_value
|
||||
from networkx.algorithms.flow import network_simplex
|
||||
from networkx.algorithms.isomorphism import could_be_isomorphic
|
||||
from networkx.algorithms.isomorphism import fast_could_be_isomorphic
|
||||
from networkx.algorithms.isomorphism import faster_could_be_isomorphic
|
||||
from networkx.algorithms.isomorphism import is_isomorphic
|
||||
from networkx.algorithms.isomorphism.vf2pp import *
|
||||
from networkx.algorithms.tree.branchings import maximum_branching
|
||||
from networkx.algorithms.tree.branchings import maximum_spanning_arborescence
|
||||
from networkx.algorithms.tree.branchings import minimum_branching
|
||||
from networkx.algorithms.tree.branchings import minimum_spanning_arborescence
|
||||
from networkx.algorithms.tree.branchings import ArborescenceIterator
|
||||
from networkx.algorithms.tree.coding import *
|
||||
from networkx.algorithms.tree.decomposition import *
|
||||
from networkx.algorithms.tree.mst import *
|
||||
from networkx.algorithms.tree.operations import *
|
||||
from networkx.algorithms.tree.recognition import *
|
||||
from networkx.algorithms.tournament import is_tournament
|
||||
+25
@@ -0,0 +1,25 @@
|
||||
"""Approximations of graph properties and Heuristic methods for optimization.
|
||||
|
||||
The functions in this class are not imported into the top-level ``networkx``
|
||||
namespace so the easiest way to use them is with::
|
||||
|
||||
>>> from networkx.algorithms import approximation
|
||||
|
||||
Another option is to import the specific function with
|
||||
``from networkx.algorithms.approximation import function_name``.
|
||||
|
||||
"""
|
||||
|
||||
from networkx.algorithms.approximation.clustering_coefficient import *
|
||||
from networkx.algorithms.approximation.clique import *
|
||||
from networkx.algorithms.approximation.connectivity import *
|
||||
from networkx.algorithms.approximation.distance_measures import *
|
||||
from networkx.algorithms.approximation.dominating_set import *
|
||||
from networkx.algorithms.approximation.kcomponents import *
|
||||
from networkx.algorithms.approximation.matching import *
|
||||
from networkx.algorithms.approximation.ramsey import *
|
||||
from networkx.algorithms.approximation.steinertree import *
|
||||
from networkx.algorithms.approximation.traveling_salesman import *
|
||||
from networkx.algorithms.approximation.treewidth import *
|
||||
from networkx.algorithms.approximation.vertex_cover import *
|
||||
from networkx.algorithms.approximation.maxcut import *
|
||||
+259
@@ -0,0 +1,259 @@
|
||||
"""Functions for computing large cliques and maximum independent sets."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.approximation import ramsey
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
__all__ = [
|
||||
"clique_removal",
|
||||
"max_clique",
|
||||
"large_clique_size",
|
||||
"maximum_independent_set",
|
||||
]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def maximum_independent_set(G):
|
||||
"""Returns an approximate maximum independent set.
|
||||
|
||||
Independent set or stable set is a set of vertices in a graph, no two of
|
||||
which are adjacent. That is, it is a set I of vertices such that for every
|
||||
two vertices in I, there is no edge connecting the two. Equivalently, each
|
||||
edge in the graph has at most one endpoint in I. The size of an independent
|
||||
set is the number of vertices it contains [1]_.
|
||||
|
||||
A maximum independent set is a largest independent set for a given graph G
|
||||
and its size is denoted $\\alpha(G)$. The problem of finding such a set is called
|
||||
the maximum independent set problem and is an NP-hard optimization problem.
|
||||
As such, it is unlikely that there exists an efficient algorithm for finding
|
||||
a maximum independent set of a graph.
|
||||
|
||||
The Independent Set algorithm is based on [2]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
Undirected graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
iset : Set
|
||||
The apx-maximum independent set
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(10)
|
||||
>>> nx.approximation.maximum_independent_set(G)
|
||||
{0, 2, 4, 6, 9}
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If the graph is directed or is a multigraph.
|
||||
|
||||
Notes
|
||||
-----
|
||||
Finds the $O(|V|/(log|V|)^2)$ apx of independent set in the worst case.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] `Wikipedia: Independent set
|
||||
<https://en.wikipedia.org/wiki/Independent_set_(graph_theory)>`_
|
||||
.. [2] Boppana, R., & Halldórsson, M. M. (1992).
|
||||
Approximating maximum independent sets by excluding subgraphs.
|
||||
BIT Numerical Mathematics, 32(2), 180–196. Springer.
|
||||
"""
|
||||
iset, _ = clique_removal(G)
|
||||
return iset
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def max_clique(G):
|
||||
r"""Find the Maximum Clique
|
||||
|
||||
Finds the $O(|V|/(log|V|)^2)$ apx of maximum clique/independent set
|
||||
in the worst case.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
Undirected graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
clique : set
|
||||
The apx-maximum clique of the graph
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(10)
|
||||
>>> nx.approximation.max_clique(G)
|
||||
{8, 9}
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If the graph is directed or is a multigraph.
|
||||
|
||||
Notes
|
||||
-----
|
||||
A clique in an undirected graph G = (V, E) is a subset of the vertex set
|
||||
`C \subseteq V` such that for every two vertices in C there exists an edge
|
||||
connecting the two. This is equivalent to saying that the subgraph
|
||||
induced by C is complete (in some cases, the term clique may also refer
|
||||
to the subgraph).
|
||||
|
||||
A maximum clique is a clique of the largest possible size in a given graph.
|
||||
The clique number `\omega(G)` of a graph G is the number of
|
||||
vertices in a maximum clique in G. The intersection number of
|
||||
G is the smallest number of cliques that together cover all edges of G.
|
||||
|
||||
https://en.wikipedia.org/wiki/Maximum_clique
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Boppana, R., & Halldórsson, M. M. (1992).
|
||||
Approximating maximum independent sets by excluding subgraphs.
|
||||
BIT Numerical Mathematics, 32(2), 180–196. Springer.
|
||||
doi:10.1007/BF01994876
|
||||
"""
|
||||
# finding the maximum clique in a graph is equivalent to finding
|
||||
# the independent set in the complementary graph
|
||||
cgraph = nx.complement(G)
|
||||
iset, _ = clique_removal(cgraph)
|
||||
return iset
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def clique_removal(G):
|
||||
r"""Repeatedly remove cliques from the graph.
|
||||
|
||||
Results in a $O(|V|/(\log |V|)^2)$ approximation of maximum clique
|
||||
and independent set. Returns the largest independent set found, along
|
||||
with found maximal cliques.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
Undirected graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
max_ind_cliques : (set, list) tuple
|
||||
2-tuple of Maximal Independent Set and list of maximal cliques (sets).
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(10)
|
||||
>>> nx.approximation.clique_removal(G)
|
||||
({0, 2, 4, 6, 9}, [{0, 1}, {2, 3}, {4, 5}, {6, 7}, {8, 9}])
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If the graph is directed or is a multigraph.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Boppana, R., & Halldórsson, M. M. (1992).
|
||||
Approximating maximum independent sets by excluding subgraphs.
|
||||
BIT Numerical Mathematics, 32(2), 180–196. Springer.
|
||||
"""
|
||||
graph = G.copy()
|
||||
c_i, i_i = ramsey.ramsey_R2(graph)
|
||||
cliques = [c_i]
|
||||
isets = [i_i]
|
||||
while graph:
|
||||
graph.remove_nodes_from(c_i)
|
||||
c_i, i_i = ramsey.ramsey_R2(graph)
|
||||
if c_i:
|
||||
cliques.append(c_i)
|
||||
if i_i:
|
||||
isets.append(i_i)
|
||||
# Determine the largest independent set as measured by cardinality.
|
||||
maxiset = max(isets, key=len)
|
||||
return maxiset, cliques
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def large_clique_size(G):
|
||||
"""Find the size of a large clique in a graph.
|
||||
|
||||
A *clique* is a subset of nodes in which each pair of nodes is
|
||||
adjacent. This function is a heuristic for finding the size of a
|
||||
large clique in the graph.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
k: integer
|
||||
The size of a large clique in the graph.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(10)
|
||||
>>> nx.approximation.large_clique_size(G)
|
||||
2
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If the graph is directed or is a multigraph.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This implementation is from [1]_. Its worst case time complexity is
|
||||
:math:`O(n d^2)`, where *n* is the number of nodes in the graph and
|
||||
*d* is the maximum degree.
|
||||
|
||||
This function is a heuristic, which means it may work well in
|
||||
practice, but there is no rigorous mathematical guarantee on the
|
||||
ratio between the returned number and the actual largest clique size
|
||||
in the graph.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Pattabiraman, Bharath, et al.
|
||||
"Fast Algorithms for the Maximum Clique Problem on Massive Graphs
|
||||
with Applications to Overlapping Community Detection."
|
||||
*Internet Mathematics* 11.4-5 (2015): 421--448.
|
||||
<https://doi.org/10.1080/15427951.2014.986778>
|
||||
|
||||
See also
|
||||
--------
|
||||
|
||||
:func:`networkx.algorithms.approximation.clique.max_clique`
|
||||
A function that returns an approximate maximum clique with a
|
||||
guarantee on the approximation ratio.
|
||||
|
||||
:mod:`networkx.algorithms.clique`
|
||||
Functions for finding the exact maximum clique in a graph.
|
||||
|
||||
"""
|
||||
degrees = G.degree
|
||||
|
||||
def _clique_heuristic(G, U, size, best_size):
|
||||
if not U:
|
||||
return max(best_size, size)
|
||||
u = max(U, key=degrees)
|
||||
U.remove(u)
|
||||
N_prime = {v for v in G[u] if degrees[v] >= best_size}
|
||||
return _clique_heuristic(G, U & N_prime, size + 1, best_size)
|
||||
|
||||
best_size = 0
|
||||
nodes = (u for u in G if degrees[u] >= best_size)
|
||||
for u in nodes:
|
||||
neighbors = {v for v in G[u] if degrees[v] >= best_size}
|
||||
best_size = _clique_heuristic(G, neighbors, 1, best_size)
|
||||
return best_size
|
||||
+71
@@ -0,0 +1,71 @@
|
||||
import networkx as nx
|
||||
from networkx.utils import not_implemented_for, py_random_state
|
||||
|
||||
__all__ = ["average_clustering"]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@py_random_state(2)
|
||||
@nx._dispatchable(name="approximate_average_clustering")
|
||||
def average_clustering(G, trials=1000, seed=None):
|
||||
r"""Estimates the average clustering coefficient of G.
|
||||
|
||||
The local clustering of each node in `G` is the fraction of triangles
|
||||
that actually exist over all possible triangles in its neighborhood.
|
||||
The average clustering coefficient of a graph `G` is the mean of
|
||||
local clusterings.
|
||||
|
||||
This function finds an approximate average clustering coefficient
|
||||
for G by repeating `n` times (defined in `trials`) the following
|
||||
experiment: choose a node at random, choose two of its neighbors
|
||||
at random, and check if they are connected. The approximate
|
||||
coefficient is the fraction of triangles found over the number
|
||||
of trials [1]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
trials : integer
|
||||
Number of trials to perform (default 1000).
|
||||
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
|
||||
Returns
|
||||
-------
|
||||
c : float
|
||||
Approximated average clustering coefficient.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import approximation
|
||||
>>> G = nx.erdos_renyi_graph(10, 0.2, seed=10)
|
||||
>>> approximation.average_clustering(G, trials=1000, seed=10)
|
||||
0.214
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If G is directed.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Schank, Thomas, and Dorothea Wagner. Approximating clustering
|
||||
coefficient and transitivity. Universität Karlsruhe, Fakultät für
|
||||
Informatik, 2004.
|
||||
https://doi.org/10.5445/IR/1000001239
|
||||
|
||||
"""
|
||||
n = len(G)
|
||||
triangles = 0
|
||||
nodes = list(G)
|
||||
for i in [int(seed.random() * n) for i in range(trials)]:
|
||||
nbrs = list(G[nodes[i]])
|
||||
if len(nbrs) < 2:
|
||||
continue
|
||||
u, v = seed.sample(nbrs, 2)
|
||||
if u in G[v]:
|
||||
triangles += 1
|
||||
return triangles / trials
|
||||
+412
@@ -0,0 +1,412 @@
|
||||
"""Fast approximation for node connectivity"""
|
||||
|
||||
import itertools
|
||||
from operator import itemgetter
|
||||
|
||||
import networkx as nx
|
||||
|
||||
__all__ = [
|
||||
"local_node_connectivity",
|
||||
"node_connectivity",
|
||||
"all_pairs_node_connectivity",
|
||||
]
|
||||
|
||||
|
||||
@nx._dispatchable(name="approximate_local_node_connectivity")
|
||||
def local_node_connectivity(G, source, target, cutoff=None):
|
||||
"""Compute node connectivity between source and target.
|
||||
|
||||
Pairwise or local node connectivity between two distinct and nonadjacent
|
||||
nodes is the minimum number of nodes that must be removed (minimum
|
||||
separating cutset) to disconnect them. By Menger's theorem, this is equal
|
||||
to the number of node independent paths (paths that share no nodes other
|
||||
than source and target). Which is what we compute in this function.
|
||||
|
||||
This algorithm is a fast approximation that gives an strict lower
|
||||
bound on the actual number of node independent paths between two nodes [1]_.
|
||||
It works for both directed and undirected graphs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
||||
G : NetworkX graph
|
||||
|
||||
source : node
|
||||
Starting node for node connectivity
|
||||
|
||||
target : node
|
||||
Ending node for node connectivity
|
||||
|
||||
cutoff : integer
|
||||
Maximum node connectivity to consider. If None, the minimum degree
|
||||
of source or target is used as a cutoff. Default value None.
|
||||
|
||||
Returns
|
||||
-------
|
||||
k: integer
|
||||
pairwise node connectivity
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> # Platonic octahedral graph has node connectivity 4
|
||||
>>> # for each non adjacent node pair
|
||||
>>> from networkx.algorithms import approximation as approx
|
||||
>>> G = nx.octahedral_graph()
|
||||
>>> approx.local_node_connectivity(G, 0, 5)
|
||||
4
|
||||
|
||||
Notes
|
||||
-----
|
||||
This algorithm [1]_ finds node independents paths between two nodes by
|
||||
computing their shortest path using BFS, marking the nodes of the path
|
||||
found as 'used' and then searching other shortest paths excluding the
|
||||
nodes marked as used until no more paths exist. It is not exact because
|
||||
a shortest path could use nodes that, if the path were longer, may belong
|
||||
to two different node independent paths. Thus it only guarantees an
|
||||
strict lower bound on node connectivity.
|
||||
|
||||
Note that the authors propose a further refinement, losing accuracy and
|
||||
gaining speed, which is not implemented yet.
|
||||
|
||||
See also
|
||||
--------
|
||||
all_pairs_node_connectivity
|
||||
node_connectivity
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
|
||||
Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
|
||||
http://eclectic.ss.uci.edu/~drwhite/working.pdf
|
||||
|
||||
"""
|
||||
if target == source:
|
||||
raise nx.NetworkXError("source and target have to be different nodes.")
|
||||
|
||||
# Maximum possible node independent paths
|
||||
if G.is_directed():
|
||||
possible = min(G.out_degree(source), G.in_degree(target))
|
||||
else:
|
||||
possible = min(G.degree(source), G.degree(target))
|
||||
|
||||
K = 0
|
||||
if not possible:
|
||||
return K
|
||||
|
||||
if cutoff is None:
|
||||
cutoff = float("inf")
|
||||
|
||||
exclude = set()
|
||||
for i in range(min(possible, cutoff)):
|
||||
try:
|
||||
path = _bidirectional_shortest_path(G, source, target, exclude)
|
||||
exclude.update(set(path))
|
||||
K += 1
|
||||
except nx.NetworkXNoPath:
|
||||
break
|
||||
|
||||
return K
|
||||
|
||||
|
||||
@nx._dispatchable(name="approximate_node_connectivity")
|
||||
def node_connectivity(G, s=None, t=None):
|
||||
r"""Returns an approximation for node connectivity for a graph or digraph G.
|
||||
|
||||
Node connectivity is equal to the minimum number of nodes that
|
||||
must be removed to disconnect G or render it trivial. By Menger's theorem,
|
||||
this is equal to the number of node independent paths (paths that
|
||||
share no nodes other than source and target).
|
||||
|
||||
If source and target nodes are provided, this function returns the
|
||||
local node connectivity: the minimum number of nodes that must be
|
||||
removed to break all paths from source to target in G.
|
||||
|
||||
This algorithm is based on a fast approximation that gives an strict lower
|
||||
bound on the actual number of node independent paths between two nodes [1]_.
|
||||
It works for both directed and undirected graphs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
Undirected graph
|
||||
|
||||
s : node
|
||||
Source node. Optional. Default value: None.
|
||||
|
||||
t : node
|
||||
Target node. Optional. Default value: None.
|
||||
|
||||
Returns
|
||||
-------
|
||||
K : integer
|
||||
Node connectivity of G, or local node connectivity if source
|
||||
and target are provided.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> # Platonic octahedral graph is 4-node-connected
|
||||
>>> from networkx.algorithms import approximation as approx
|
||||
>>> G = nx.octahedral_graph()
|
||||
>>> approx.node_connectivity(G)
|
||||
4
|
||||
|
||||
Notes
|
||||
-----
|
||||
This algorithm [1]_ finds node independents paths between two nodes by
|
||||
computing their shortest path using BFS, marking the nodes of the path
|
||||
found as 'used' and then searching other shortest paths excluding the
|
||||
nodes marked as used until no more paths exist. It is not exact because
|
||||
a shortest path could use nodes that, if the path were longer, may belong
|
||||
to two different node independent paths. Thus it only guarantees an
|
||||
strict lower bound on node connectivity.
|
||||
|
||||
See also
|
||||
--------
|
||||
all_pairs_node_connectivity
|
||||
local_node_connectivity
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
|
||||
Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
|
||||
http://eclectic.ss.uci.edu/~drwhite/working.pdf
|
||||
|
||||
"""
|
||||
if (s is not None and t is None) or (s is None and t is not None):
|
||||
raise nx.NetworkXError("Both source and target must be specified.")
|
||||
|
||||
# Local node connectivity
|
||||
if s is not None and t is not None:
|
||||
if s not in G:
|
||||
raise nx.NetworkXError(f"node {s} not in graph")
|
||||
if t not in G:
|
||||
raise nx.NetworkXError(f"node {t} not in graph")
|
||||
return local_node_connectivity(G, s, t)
|
||||
|
||||
# Global node connectivity
|
||||
if G.is_directed():
|
||||
connected_func = nx.is_weakly_connected
|
||||
iter_func = itertools.permutations
|
||||
|
||||
def neighbors(v):
|
||||
return itertools.chain(G.predecessors(v), G.successors(v))
|
||||
|
||||
else:
|
||||
connected_func = nx.is_connected
|
||||
iter_func = itertools.combinations
|
||||
neighbors = G.neighbors
|
||||
|
||||
if not connected_func(G):
|
||||
return 0
|
||||
|
||||
# Choose a node with minimum degree
|
||||
v, minimum_degree = min(G.degree(), key=itemgetter(1))
|
||||
# Node connectivity is bounded by minimum degree
|
||||
K = minimum_degree
|
||||
# compute local node connectivity with all non-neighbors nodes
|
||||
# and store the minimum
|
||||
for w in set(G) - set(neighbors(v)) - {v}:
|
||||
K = min(K, local_node_connectivity(G, v, w, cutoff=K))
|
||||
# Same for non adjacent pairs of neighbors of v
|
||||
for x, y in iter_func(neighbors(v), 2):
|
||||
if y not in G[x] and x != y:
|
||||
K = min(K, local_node_connectivity(G, x, y, cutoff=K))
|
||||
return K
|
||||
|
||||
|
||||
@nx._dispatchable(name="approximate_all_pairs_node_connectivity")
|
||||
def all_pairs_node_connectivity(G, nbunch=None, cutoff=None):
|
||||
"""Compute node connectivity between all pairs of nodes.
|
||||
|
||||
Pairwise or local node connectivity between two distinct and nonadjacent
|
||||
nodes is the minimum number of nodes that must be removed (minimum
|
||||
separating cutset) to disconnect them. By Menger's theorem, this is equal
|
||||
to the number of node independent paths (paths that share no nodes other
|
||||
than source and target). Which is what we compute in this function.
|
||||
|
||||
This algorithm is a fast approximation that gives an strict lower
|
||||
bound on the actual number of node independent paths between two nodes [1]_.
|
||||
It works for both directed and undirected graphs.
|
||||
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
nbunch: container
|
||||
Container of nodes. If provided node connectivity will be computed
|
||||
only over pairs of nodes in nbunch.
|
||||
|
||||
cutoff : integer
|
||||
Maximum node connectivity to consider. If None, the minimum degree
|
||||
of source or target is used as a cutoff in each pair of nodes.
|
||||
Default value None.
|
||||
|
||||
Returns
|
||||
-------
|
||||
K : dictionary
|
||||
Dictionary, keyed by source and target, of pairwise node connectivity
|
||||
|
||||
Examples
|
||||
--------
|
||||
A 3 node cycle with one extra node attached has connectivity 2 between all
|
||||
nodes in the cycle and connectivity 1 between the extra node and the rest:
|
||||
|
||||
>>> G = nx.cycle_graph(3)
|
||||
>>> G.add_edge(2, 3)
|
||||
>>> import pprint # for nice dictionary formatting
|
||||
>>> pprint.pprint(nx.all_pairs_node_connectivity(G))
|
||||
{0: {1: 2, 2: 2, 3: 1},
|
||||
1: {0: 2, 2: 2, 3: 1},
|
||||
2: {0: 2, 1: 2, 3: 1},
|
||||
3: {0: 1, 1: 1, 2: 1}}
|
||||
|
||||
See Also
|
||||
--------
|
||||
local_node_connectivity
|
||||
node_connectivity
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
|
||||
Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
|
||||
http://eclectic.ss.uci.edu/~drwhite/working.pdf
|
||||
"""
|
||||
if nbunch is None:
|
||||
nbunch = G
|
||||
else:
|
||||
nbunch = set(nbunch)
|
||||
|
||||
directed = G.is_directed()
|
||||
if directed:
|
||||
iter_func = itertools.permutations
|
||||
else:
|
||||
iter_func = itertools.combinations
|
||||
|
||||
all_pairs = {n: {} for n in nbunch}
|
||||
|
||||
for u, v in iter_func(nbunch, 2):
|
||||
k = local_node_connectivity(G, u, v, cutoff=cutoff)
|
||||
all_pairs[u][v] = k
|
||||
if not directed:
|
||||
all_pairs[v][u] = k
|
||||
|
||||
return all_pairs
|
||||
|
||||
|
||||
def _bidirectional_shortest_path(G, source, target, exclude):
|
||||
"""Returns shortest path between source and target ignoring nodes in the
|
||||
container 'exclude'.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
||||
G : NetworkX graph
|
||||
|
||||
source : node
|
||||
Starting node for path
|
||||
|
||||
target : node
|
||||
Ending node for path
|
||||
|
||||
exclude: container
|
||||
Container for nodes to exclude from the search for shortest paths
|
||||
|
||||
Returns
|
||||
-------
|
||||
path: list
|
||||
Shortest path between source and target ignoring nodes in 'exclude'
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNoPath
|
||||
If there is no path or if nodes are adjacent and have only one path
|
||||
between them
|
||||
|
||||
Notes
|
||||
-----
|
||||
This function and its helper are originally from
|
||||
networkx.algorithms.shortest_paths.unweighted and are modified to
|
||||
accept the extra parameter 'exclude', which is a container for nodes
|
||||
already used in other paths that should be ignored.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] White, Douglas R., and Mark Newman. 2001 A Fast Algorithm for
|
||||
Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
|
||||
http://eclectic.ss.uci.edu/~drwhite/working.pdf
|
||||
|
||||
"""
|
||||
# call helper to do the real work
|
||||
results = _bidirectional_pred_succ(G, source, target, exclude)
|
||||
pred, succ, w = results
|
||||
|
||||
# build path from pred+w+succ
|
||||
path = []
|
||||
# from source to w
|
||||
while w is not None:
|
||||
path.append(w)
|
||||
w = pred[w]
|
||||
path.reverse()
|
||||
# from w to target
|
||||
w = succ[path[-1]]
|
||||
while w is not None:
|
||||
path.append(w)
|
||||
w = succ[w]
|
||||
|
||||
return path
|
||||
|
||||
|
||||
def _bidirectional_pred_succ(G, source, target, exclude):
|
||||
# does BFS from both source and target and meets in the middle
|
||||
# excludes nodes in the container "exclude" from the search
|
||||
|
||||
# handle either directed or undirected
|
||||
if G.is_directed():
|
||||
Gpred = G.predecessors
|
||||
Gsucc = G.successors
|
||||
else:
|
||||
Gpred = G.neighbors
|
||||
Gsucc = G.neighbors
|
||||
|
||||
# predecessor and successors in search
|
||||
pred = {source: None}
|
||||
succ = {target: None}
|
||||
|
||||
# initialize fringes, start with forward
|
||||
forward_fringe = [source]
|
||||
reverse_fringe = [target]
|
||||
|
||||
level = 0
|
||||
|
||||
while forward_fringe and reverse_fringe:
|
||||
# Make sure that we iterate one step forward and one step backwards
|
||||
# thus source and target will only trigger "found path" when they are
|
||||
# adjacent and then they can be safely included in the container 'exclude'
|
||||
level += 1
|
||||
if level % 2 != 0:
|
||||
this_level = forward_fringe
|
||||
forward_fringe = []
|
||||
for v in this_level:
|
||||
for w in Gsucc(v):
|
||||
if w in exclude:
|
||||
continue
|
||||
if w not in pred:
|
||||
forward_fringe.append(w)
|
||||
pred[w] = v
|
||||
if w in succ:
|
||||
return pred, succ, w # found path
|
||||
else:
|
||||
this_level = reverse_fringe
|
||||
reverse_fringe = []
|
||||
for v in this_level:
|
||||
for w in Gpred(v):
|
||||
if w in exclude:
|
||||
continue
|
||||
if w not in succ:
|
||||
succ[w] = v
|
||||
reverse_fringe.append(w)
|
||||
if w in pred:
|
||||
return pred, succ, w # found path
|
||||
|
||||
raise nx.NetworkXNoPath(f"No path between {source} and {target}.")
|
||||
+150
@@ -0,0 +1,150 @@
|
||||
"""Distance measures approximated metrics."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils.decorators import py_random_state
|
||||
|
||||
__all__ = ["diameter"]
|
||||
|
||||
|
||||
@py_random_state(1)
|
||||
@nx._dispatchable(name="approximate_diameter")
|
||||
def diameter(G, seed=None):
|
||||
"""Returns a lower bound on the diameter of the graph G.
|
||||
|
||||
The function computes a lower bound on the diameter (i.e., the maximum eccentricity)
|
||||
of a directed or undirected graph G. The procedure used varies depending on the graph
|
||||
being directed or not.
|
||||
|
||||
If G is an `undirected` graph, then the function uses the `2-sweep` algorithm [1]_.
|
||||
The main idea is to pick the farthest node from a random node and return its eccentricity.
|
||||
|
||||
Otherwise, if G is a `directed` graph, the function uses the `2-dSweep` algorithm [2]_,
|
||||
The procedure starts by selecting a random source node $s$ from which it performs a
|
||||
forward and a backward BFS. Let $a_1$ and $a_2$ be the farthest nodes in the forward and
|
||||
backward cases, respectively. Then, it computes the backward eccentricity of $a_1$ using
|
||||
a backward BFS and the forward eccentricity of $a_2$ using a forward BFS.
|
||||
Finally, it returns the best lower bound between the two.
|
||||
|
||||
In both cases, the time complexity is linear with respect to the size of G.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
|
||||
Returns
|
||||
-------
|
||||
d : integer
|
||||
Lower Bound on the Diameter of G
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(10) # undirected graph
|
||||
>>> nx.diameter(G)
|
||||
9
|
||||
>>> G = nx.cycle_graph(3, create_using=nx.DiGraph) # directed graph
|
||||
>>> nx.diameter(G)
|
||||
2
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If the graph is empty or
|
||||
If the graph is undirected and not connected or
|
||||
If the graph is directed and not strongly connected.
|
||||
|
||||
See Also
|
||||
--------
|
||||
networkx.algorithms.distance_measures.diameter
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Magnien, Clémence, Matthieu Latapy, and Michel Habib.
|
||||
*Fast computation of empirically tight bounds for the diameter of massive graphs.*
|
||||
Journal of Experimental Algorithmics (JEA), 2009.
|
||||
https://arxiv.org/pdf/0904.2728.pdf
|
||||
.. [2] Crescenzi, Pierluigi, Roberto Grossi, Leonardo Lanzi, and Andrea Marino.
|
||||
*On computing the diameter of real-world directed (weighted) graphs.*
|
||||
International Symposium on Experimental Algorithms. Springer, Berlin, Heidelberg, 2012.
|
||||
https://courses.cs.ut.ee/MTAT.03.238/2014_fall/uploads/Main/diameter.pdf
|
||||
"""
|
||||
# if G is empty
|
||||
if not G:
|
||||
raise nx.NetworkXError("Expected non-empty NetworkX graph!")
|
||||
# if there's only a node
|
||||
if G.number_of_nodes() == 1:
|
||||
return 0
|
||||
# if G is directed
|
||||
if G.is_directed():
|
||||
return _two_sweep_directed(G, seed)
|
||||
# else if G is undirected
|
||||
return _two_sweep_undirected(G, seed)
|
||||
|
||||
|
||||
def _two_sweep_undirected(G, seed):
|
||||
"""Helper function for finding a lower bound on the diameter
|
||||
for undirected Graphs.
|
||||
|
||||
The idea is to pick the farthest node from a random node
|
||||
and return its eccentricity.
|
||||
|
||||
``G`` is a NetworkX undirected graph.
|
||||
|
||||
.. note::
|
||||
|
||||
``seed`` is a random.Random or numpy.random.RandomState instance
|
||||
"""
|
||||
# select a random source node
|
||||
source = seed.choice(list(G))
|
||||
# get the distances to the other nodes
|
||||
distances = nx.shortest_path_length(G, source)
|
||||
# if some nodes have not been visited, then the graph is not connected
|
||||
if len(distances) != len(G):
|
||||
raise nx.NetworkXError("Graph not connected.")
|
||||
# take a node that is (one of) the farthest nodes from the source
|
||||
*_, node = distances
|
||||
# return the eccentricity of the node
|
||||
return nx.eccentricity(G, node)
|
||||
|
||||
|
||||
def _two_sweep_directed(G, seed):
|
||||
"""Helper function for finding a lower bound on the diameter
|
||||
for directed Graphs.
|
||||
|
||||
It implements 2-dSweep, the directed version of the 2-sweep algorithm.
|
||||
The algorithm follows the following steps.
|
||||
1. Select a source node $s$ at random.
|
||||
2. Perform a forward BFS from $s$ to select a node $a_1$ at the maximum
|
||||
distance from the source, and compute $LB_1$, the backward eccentricity of $a_1$.
|
||||
3. Perform a backward BFS from $s$ to select a node $a_2$ at the maximum
|
||||
distance from the source, and compute $LB_2$, the forward eccentricity of $a_2$.
|
||||
4. Return the maximum between $LB_1$ and $LB_2$.
|
||||
|
||||
``G`` is a NetworkX directed graph.
|
||||
|
||||
.. note::
|
||||
|
||||
``seed`` is a random.Random or numpy.random.RandomState instance
|
||||
"""
|
||||
# get a new digraph G' with the edges reversed in the opposite direction
|
||||
G_reversed = G.reverse()
|
||||
# select a random source node
|
||||
source = seed.choice(list(G))
|
||||
# compute forward distances from source
|
||||
forward_distances = nx.shortest_path_length(G, source)
|
||||
# compute backward distances from source
|
||||
backward_distances = nx.shortest_path_length(G_reversed, source)
|
||||
# if either the source can't reach every node or not every node
|
||||
# can reach the source, then the graph is not strongly connected
|
||||
n = len(G)
|
||||
if len(forward_distances) != n or len(backward_distances) != n:
|
||||
raise nx.NetworkXError("DiGraph not strongly connected.")
|
||||
# take a node a_1 at the maximum distance from the source in G
|
||||
*_, a_1 = forward_distances
|
||||
# take a node a_2 at the maximum distance from the source in G_reversed
|
||||
*_, a_2 = backward_distances
|
||||
# return the max between the backward eccentricity of a_1 and the forward eccentricity of a_2
|
||||
return max(nx.eccentricity(G_reversed, a_1), nx.eccentricity(G, a_2))
|
||||
+149
@@ -0,0 +1,149 @@
|
||||
"""Functions for finding node and edge dominating sets.
|
||||
|
||||
A `dominating set`_ for an undirected graph *G* with vertex set *V*
|
||||
and edge set *E* is a subset *D* of *V* such that every vertex not in
|
||||
*D* is adjacent to at least one member of *D*. An `edge dominating set`_
|
||||
is a subset *F* of *E* such that every edge not in *F* is
|
||||
incident to an endpoint of at least one edge in *F*.
|
||||
|
||||
.. _dominating set: https://en.wikipedia.org/wiki/Dominating_set
|
||||
.. _edge dominating set: https://en.wikipedia.org/wiki/Edge_dominating_set
|
||||
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
|
||||
from ...utils import not_implemented_for
|
||||
from ..matching import maximal_matching
|
||||
|
||||
__all__ = ["min_weighted_dominating_set", "min_edge_dominating_set"]
|
||||
|
||||
|
||||
# TODO Why doesn't this algorithm work for directed graphs?
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable(node_attrs="weight")
|
||||
def min_weighted_dominating_set(G, weight=None):
|
||||
r"""Returns a dominating set that approximates the minimum weight node
|
||||
dominating set.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
Undirected graph.
|
||||
|
||||
weight : string
|
||||
The node attribute storing the weight of an node. If provided,
|
||||
the node attribute with this key must be a number for each
|
||||
node. If not provided, each node is assumed to have weight one.
|
||||
|
||||
Returns
|
||||
-------
|
||||
min_weight_dominating_set : set
|
||||
A set of nodes, the sum of whose weights is no more than `(\log
|
||||
w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of
|
||||
each node in the graph and `w(V^*)` denotes the sum of the
|
||||
weights of each node in the minimum weight dominating set.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.Graph([(0, 1), (0, 4), (1, 4), (1, 2), (2, 3), (3, 4), (2, 5)])
|
||||
>>> nx.approximation.min_weighted_dominating_set(G)
|
||||
{1, 2, 4}
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If G is directed.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This algorithm computes an approximate minimum weighted dominating
|
||||
set for the graph `G`. The returned solution has weight `(\log
|
||||
w(V)) w(V^*)`, where `w(V)` denotes the sum of the weights of each
|
||||
node in the graph and `w(V^*)` denotes the sum of the weights of
|
||||
each node in the minimum weight dominating set for the graph.
|
||||
|
||||
This implementation of the algorithm runs in $O(m)$ time, where $m$
|
||||
is the number of edges in the graph.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Vazirani, Vijay V.
|
||||
*Approximation Algorithms*.
|
||||
Springer Science & Business Media, 2001.
|
||||
|
||||
"""
|
||||
# The unique dominating set for the null graph is the empty set.
|
||||
if len(G) == 0:
|
||||
return set()
|
||||
|
||||
# This is the dominating set that will eventually be returned.
|
||||
dom_set = set()
|
||||
|
||||
def _cost(node_and_neighborhood):
|
||||
"""Returns the cost-effectiveness of greedily choosing the given
|
||||
node.
|
||||
|
||||
`node_and_neighborhood` is a two-tuple comprising a node and its
|
||||
closed neighborhood.
|
||||
|
||||
"""
|
||||
v, neighborhood = node_and_neighborhood
|
||||
return G.nodes[v].get(weight, 1) / len(neighborhood - dom_set)
|
||||
|
||||
# This is a set of all vertices not already covered by the
|
||||
# dominating set.
|
||||
vertices = set(G)
|
||||
# This is a dictionary mapping each node to the closed neighborhood
|
||||
# of that node.
|
||||
neighborhoods = {v: {v} | set(G[v]) for v in G}
|
||||
|
||||
# Continue until all vertices are adjacent to some node in the
|
||||
# dominating set.
|
||||
while vertices:
|
||||
# Find the most cost-effective node to add, along with its
|
||||
# closed neighborhood.
|
||||
dom_node, min_set = min(neighborhoods.items(), key=_cost)
|
||||
# Add the node to the dominating set and reduce the remaining
|
||||
# set of nodes to cover.
|
||||
dom_set.add(dom_node)
|
||||
del neighborhoods[dom_node]
|
||||
vertices -= min_set
|
||||
|
||||
return dom_set
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def min_edge_dominating_set(G):
|
||||
r"""Returns minimum cardinality edge dominating set.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
Undirected graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
min_edge_dominating_set : set
|
||||
Returns a set of dominating edges whose size is no more than 2 * OPT.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.petersen_graph()
|
||||
>>> nx.approximation.min_edge_dominating_set(G)
|
||||
{(0, 1), (4, 9), (6, 8), (5, 7), (2, 3)}
|
||||
|
||||
Raises
|
||||
------
|
||||
ValueError
|
||||
If the input graph `G` is empty.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The algorithm computes an approximate solution to the edge dominating set
|
||||
problem. The result is no more than 2 * OPT in terms of size of the set.
|
||||
Runtime of the algorithm is $O(|E|)$.
|
||||
"""
|
||||
if not G:
|
||||
raise ValueError("Expected non-empty NetworkX graph!")
|
||||
return maximal_matching(G)
|
||||
+369
@@ -0,0 +1,369 @@
|
||||
"""Fast approximation for k-component structure"""
|
||||
|
||||
import itertools
|
||||
from collections import defaultdict
|
||||
from collections.abc import Mapping
|
||||
from functools import cached_property
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.approximation import local_node_connectivity
|
||||
from networkx.exception import NetworkXError
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
__all__ = ["k_components"]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable(name="approximate_k_components")
|
||||
def k_components(G, min_density=0.95):
|
||||
r"""Returns the approximate k-component structure of a graph G.
|
||||
|
||||
A `k`-component is a maximal subgraph of a graph G that has, at least,
|
||||
node connectivity `k`: we need to remove at least `k` nodes to break it
|
||||
into more components. `k`-components have an inherent hierarchical
|
||||
structure because they are nested in terms of connectivity: a connected
|
||||
graph can contain several 2-components, each of which can contain
|
||||
one or more 3-components, and so forth.
|
||||
|
||||
This implementation is based on the fast heuristics to approximate
|
||||
the `k`-component structure of a graph [1]_. Which, in turn, it is based on
|
||||
a fast approximation algorithm for finding good lower bounds of the number
|
||||
of node independent paths between two nodes [2]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
Undirected graph
|
||||
|
||||
min_density : Float
|
||||
Density relaxation threshold. Default value 0.95
|
||||
|
||||
Returns
|
||||
-------
|
||||
k_components : dict
|
||||
Dictionary with connectivity level `k` as key and a list of
|
||||
sets of nodes that form a k-component of level `k` as values.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If G is directed.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> # Petersen graph has 10 nodes and it is triconnected, thus all
|
||||
>>> # nodes are in a single component on all three connectivity levels
|
||||
>>> from networkx.algorithms import approximation as apxa
|
||||
>>> G = nx.petersen_graph()
|
||||
>>> k_components = apxa.k_components(G)
|
||||
|
||||
Notes
|
||||
-----
|
||||
The logic of the approximation algorithm for computing the `k`-component
|
||||
structure [1]_ is based on repeatedly applying simple and fast algorithms
|
||||
for `k`-cores and biconnected components in order to narrow down the
|
||||
number of pairs of nodes over which we have to compute White and Newman's
|
||||
approximation algorithm for finding node independent paths [2]_. More
|
||||
formally, this algorithm is based on Whitney's theorem, which states
|
||||
an inclusion relation among node connectivity, edge connectivity, and
|
||||
minimum degree for any graph G. This theorem implies that every
|
||||
`k`-component is nested inside a `k`-edge-component, which in turn,
|
||||
is contained in a `k`-core. Thus, this algorithm computes node independent
|
||||
paths among pairs of nodes in each biconnected part of each `k`-core,
|
||||
and repeats this procedure for each `k` from 3 to the maximal core number
|
||||
of a node in the input graph.
|
||||
|
||||
Because, in practice, many nodes of the core of level `k` inside a
|
||||
bicomponent actually are part of a component of level k, the auxiliary
|
||||
graph needed for the algorithm is likely to be very dense. Thus, we use
|
||||
a complement graph data structure (see `AntiGraph`) to save memory.
|
||||
AntiGraph only stores information of the edges that are *not* present
|
||||
in the actual auxiliary graph. When applying algorithms to this
|
||||
complement graph data structure, it behaves as if it were the dense
|
||||
version.
|
||||
|
||||
See also
|
||||
--------
|
||||
k_components
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Torrents, J. and F. Ferraro (2015) Structural Cohesion:
|
||||
Visualization and Heuristics for Fast Computation.
|
||||
https://arxiv.org/pdf/1503.04476v1
|
||||
|
||||
.. [2] White, Douglas R., and Mark Newman (2001) A Fast Algorithm for
|
||||
Node-Independent Paths. Santa Fe Institute Working Paper #01-07-035
|
||||
https://www.santafe.edu/research/results/working-papers/fast-approximation-algorithms-for-finding-node-ind
|
||||
|
||||
.. [3] Moody, J. and D. White (2003). Social cohesion and embeddedness:
|
||||
A hierarchical conception of social groups.
|
||||
American Sociological Review 68(1), 103--28.
|
||||
https://doi.org/10.2307/3088904
|
||||
|
||||
"""
|
||||
# Dictionary with connectivity level (k) as keys and a list of
|
||||
# sets of nodes that form a k-component as values
|
||||
k_components = defaultdict(list)
|
||||
# make a few functions local for speed
|
||||
node_connectivity = local_node_connectivity
|
||||
k_core = nx.k_core
|
||||
core_number = nx.core_number
|
||||
biconnected_components = nx.biconnected_components
|
||||
combinations = itertools.combinations
|
||||
# Exact solution for k = {1,2}
|
||||
# There is a linear time algorithm for triconnectivity, if we had an
|
||||
# implementation available we could start from k = 4.
|
||||
for component in nx.connected_components(G):
|
||||
# isolated nodes have connectivity 0
|
||||
comp = set(component)
|
||||
if len(comp) > 1:
|
||||
k_components[1].append(comp)
|
||||
for bicomponent in nx.biconnected_components(G):
|
||||
# avoid considering dyads as bicomponents
|
||||
bicomp = set(bicomponent)
|
||||
if len(bicomp) > 2:
|
||||
k_components[2].append(bicomp)
|
||||
# There is no k-component of k > maximum core number
|
||||
# \kappa(G) <= \lambda(G) <= \delta(G)
|
||||
g_cnumber = core_number(G)
|
||||
max_core = max(g_cnumber.values())
|
||||
for k in range(3, max_core + 1):
|
||||
C = k_core(G, k, core_number=g_cnumber)
|
||||
for nodes in biconnected_components(C):
|
||||
# Build a subgraph SG induced by the nodes that are part of
|
||||
# each biconnected component of the k-core subgraph C.
|
||||
if len(nodes) < k:
|
||||
continue
|
||||
SG = G.subgraph(nodes)
|
||||
# Build auxiliary graph
|
||||
H = _AntiGraph()
|
||||
H.add_nodes_from(SG.nodes())
|
||||
for u, v in combinations(SG, 2):
|
||||
K = node_connectivity(SG, u, v, cutoff=k)
|
||||
if k > K:
|
||||
H.add_edge(u, v)
|
||||
for h_nodes in biconnected_components(H):
|
||||
if len(h_nodes) <= k:
|
||||
continue
|
||||
SH = H.subgraph(h_nodes)
|
||||
for Gc in _cliques_heuristic(SG, SH, k, min_density):
|
||||
for k_nodes in biconnected_components(Gc):
|
||||
Gk = nx.k_core(SG.subgraph(k_nodes), k)
|
||||
if len(Gk) <= k:
|
||||
continue
|
||||
k_components[k].append(set(Gk))
|
||||
return k_components
|
||||
|
||||
|
||||
def _cliques_heuristic(G, H, k, min_density):
|
||||
h_cnumber = nx.core_number(H)
|
||||
for i, c_value in enumerate(sorted(set(h_cnumber.values()), reverse=True)):
|
||||
cands = {n for n, c in h_cnumber.items() if c == c_value}
|
||||
# Skip checking for overlap for the highest core value
|
||||
if i == 0:
|
||||
overlap = False
|
||||
else:
|
||||
overlap = set.intersection(
|
||||
*[{x for x in H[n] if x not in cands} for n in cands]
|
||||
)
|
||||
if overlap and len(overlap) < k:
|
||||
SH = H.subgraph(cands | overlap)
|
||||
else:
|
||||
SH = H.subgraph(cands)
|
||||
sh_cnumber = nx.core_number(SH)
|
||||
SG = nx.k_core(G.subgraph(SH), k)
|
||||
while not (_same(sh_cnumber) and nx.density(SH) >= min_density):
|
||||
# This subgraph must be writable => .copy()
|
||||
SH = H.subgraph(SG).copy()
|
||||
if len(SH) <= k:
|
||||
break
|
||||
sh_cnumber = nx.core_number(SH)
|
||||
sh_deg = dict(SH.degree())
|
||||
min_deg = min(sh_deg.values())
|
||||
SH.remove_nodes_from(n for n, d in sh_deg.items() if d == min_deg)
|
||||
SG = nx.k_core(G.subgraph(SH), k)
|
||||
else:
|
||||
yield SG
|
||||
|
||||
|
||||
def _same(measure, tol=0):
|
||||
vals = set(measure.values())
|
||||
if (max(vals) - min(vals)) <= tol:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class _AntiGraph(nx.Graph):
|
||||
"""
|
||||
Class for complement graphs.
|
||||
|
||||
The main goal is to be able to work with big and dense graphs with
|
||||
a low memory footprint.
|
||||
|
||||
In this class you add the edges that *do not exist* in the dense graph,
|
||||
the report methods of the class return the neighbors, the edges and
|
||||
the degree as if it was the dense graph. Thus it's possible to use
|
||||
an instance of this class with some of NetworkX functions. In this
|
||||
case we only use k-core, connected_components, and biconnected_components.
|
||||
"""
|
||||
|
||||
all_edge_dict = {"weight": 1}
|
||||
|
||||
def single_edge_dict(self):
|
||||
return self.all_edge_dict
|
||||
|
||||
edge_attr_dict_factory = single_edge_dict # type: ignore[assignment]
|
||||
|
||||
def __getitem__(self, n):
|
||||
"""Returns a dict of neighbors of node n in the dense graph.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
n : node
|
||||
A node in the graph.
|
||||
|
||||
Returns
|
||||
-------
|
||||
adj_dict : dictionary
|
||||
The adjacency dictionary for nodes connected to n.
|
||||
|
||||
"""
|
||||
all_edge_dict = self.all_edge_dict
|
||||
return {
|
||||
node: all_edge_dict for node in set(self._adj) - set(self._adj[n]) - {n}
|
||||
}
|
||||
|
||||
def neighbors(self, n):
|
||||
"""Returns an iterator over all neighbors of node n in the
|
||||
dense graph.
|
||||
"""
|
||||
try:
|
||||
return iter(set(self._adj) - set(self._adj[n]) - {n})
|
||||
except KeyError as err:
|
||||
raise NetworkXError(f"The node {n} is not in the graph.") from err
|
||||
|
||||
class AntiAtlasView(Mapping):
|
||||
"""An adjacency inner dict for AntiGraph"""
|
||||
|
||||
def __init__(self, graph, node):
|
||||
self._graph = graph
|
||||
self._atlas = graph._adj[node]
|
||||
self._node = node
|
||||
|
||||
def __len__(self):
|
||||
return len(self._graph) - len(self._atlas) - 1
|
||||
|
||||
def __iter__(self):
|
||||
return (n for n in self._graph if n not in self._atlas and n != self._node)
|
||||
|
||||
def __getitem__(self, nbr):
|
||||
nbrs = set(self._graph._adj) - set(self._atlas) - {self._node}
|
||||
if nbr in nbrs:
|
||||
return self._graph.all_edge_dict
|
||||
raise KeyError(nbr)
|
||||
|
||||
class AntiAdjacencyView(AntiAtlasView):
|
||||
"""An adjacency outer dict for AntiGraph"""
|
||||
|
||||
def __init__(self, graph):
|
||||
self._graph = graph
|
||||
self._atlas = graph._adj
|
||||
|
||||
def __len__(self):
|
||||
return len(self._atlas)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._graph)
|
||||
|
||||
def __getitem__(self, node):
|
||||
if node not in self._graph:
|
||||
raise KeyError(node)
|
||||
return self._graph.AntiAtlasView(self._graph, node)
|
||||
|
||||
@cached_property
|
||||
def adj(self):
|
||||
return self.AntiAdjacencyView(self)
|
||||
|
||||
def subgraph(self, nodes):
|
||||
"""This subgraph method returns a full AntiGraph. Not a View"""
|
||||
nodes = set(nodes)
|
||||
G = _AntiGraph()
|
||||
G.add_nodes_from(nodes)
|
||||
for n in G:
|
||||
Gnbrs = G.adjlist_inner_dict_factory()
|
||||
G._adj[n] = Gnbrs
|
||||
for nbr, d in self._adj[n].items():
|
||||
if nbr in G._adj:
|
||||
Gnbrs[nbr] = d
|
||||
G._adj[nbr][n] = d
|
||||
G.graph = self.graph
|
||||
return G
|
||||
|
||||
class AntiDegreeView(nx.reportviews.DegreeView):
|
||||
def __iter__(self):
|
||||
all_nodes = set(self._succ)
|
||||
for n in self._nodes:
|
||||
nbrs = all_nodes - set(self._succ[n]) - {n}
|
||||
yield (n, len(nbrs))
|
||||
|
||||
def __getitem__(self, n):
|
||||
nbrs = set(self._succ) - set(self._succ[n]) - {n}
|
||||
# AntiGraph is a ThinGraph so all edges have weight 1
|
||||
return len(nbrs) + (n in nbrs)
|
||||
|
||||
@cached_property
|
||||
def degree(self):
|
||||
"""Returns an iterator for (node, degree) and degree for single node.
|
||||
|
||||
The node degree is the number of edges adjacent to the node.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
nbunch : iterable container, optional (default=all nodes)
|
||||
A container of nodes. The container will be iterated
|
||||
through once.
|
||||
|
||||
weight : string or None, optional (default=None)
|
||||
The edge attribute that holds the numerical value used
|
||||
as a weight. If None, then each edge has weight 1.
|
||||
The degree is the sum of the edge weights adjacent to the node.
|
||||
|
||||
Returns
|
||||
-------
|
||||
deg:
|
||||
Degree of the node, if a single node is passed as argument.
|
||||
nd_iter : an iterator
|
||||
The iterator returns two-tuples of (node, degree).
|
||||
|
||||
See Also
|
||||
--------
|
||||
degree
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> G.degree(0) # node 0 with degree 1
|
||||
1
|
||||
>>> list(G.degree([0, 1]))
|
||||
[(0, 1), (1, 2)]
|
||||
|
||||
"""
|
||||
return self.AntiDegreeView(self)
|
||||
|
||||
def adjacency(self):
|
||||
"""Returns an iterator of (node, adjacency set) tuples for all nodes
|
||||
in the dense graph.
|
||||
|
||||
This is the fastest way to look at every edge.
|
||||
For directed graphs, only outgoing adjacencies are included.
|
||||
|
||||
Returns
|
||||
-------
|
||||
adj_iter : iterator
|
||||
An iterator of (node, adjacency set) for all nodes in
|
||||
the graph.
|
||||
|
||||
"""
|
||||
for n in self._adj:
|
||||
yield (n, set(self._adj) - set(self._adj[n]) - {n})
|
||||
+44
@@ -0,0 +1,44 @@
|
||||
"""
|
||||
**************
|
||||
Graph Matching
|
||||
**************
|
||||
|
||||
Given a graph G = (V,E), a matching M in G is a set of pairwise non-adjacent
|
||||
edges; that is, no two edges share a common vertex.
|
||||
|
||||
`Wikipedia: Matching <https://en.wikipedia.org/wiki/Matching_(graph_theory)>`_
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["min_maximal_matching"]
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def min_maximal_matching(G):
|
||||
r"""Returns the minimum maximal matching of G. That is, out of all maximal
|
||||
matchings of the graph G, the smallest is returned.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
Undirected graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
min_maximal_matching : set
|
||||
Returns a set of edges such that no two edges share a common endpoint
|
||||
and every edge not in the set shares some common endpoint in the set.
|
||||
Cardinality will be 2*OPT in the worst case.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The algorithm computes an approximate solution for the minimum maximal
|
||||
cardinality matching problem. The solution is no more than 2 * OPT in size.
|
||||
Runtime is $O(|E|)$.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Vazirani, Vijay Approximation Algorithms (2001)
|
||||
"""
|
||||
return nx.maximal_matching(G)
|
||||
+143
@@ -0,0 +1,143 @@
|
||||
import networkx as nx
|
||||
from networkx.utils.decorators import not_implemented_for, py_random_state
|
||||
|
||||
__all__ = ["randomized_partitioning", "one_exchange"]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@py_random_state(1)
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def randomized_partitioning(G, seed=None, p=0.5, weight=None):
|
||||
"""Compute a random partitioning of the graph nodes and its cut value.
|
||||
|
||||
A partitioning is calculated by observing each node
|
||||
and deciding to add it to the partition with probability `p`,
|
||||
returning a random cut and its corresponding value (the
|
||||
sum of weights of edges connecting different partitions).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
|
||||
p : scalar
|
||||
Probability for each node to be part of the first partition.
|
||||
Should be in [0,1]
|
||||
|
||||
weight : object
|
||||
Edge attribute key to use as weight. If not specified, edges
|
||||
have weight one.
|
||||
|
||||
Returns
|
||||
-------
|
||||
cut_size : scalar
|
||||
Value of the minimum cut.
|
||||
|
||||
partition : pair of node sets
|
||||
A partitioning of the nodes that defines a minimum cut.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.complete_graph(5)
|
||||
>>> cut_size, partition = nx.approximation.randomized_partitioning(G, seed=1)
|
||||
>>> cut_size
|
||||
6
|
||||
>>> partition
|
||||
({0, 3, 4}, {1, 2})
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If the graph is directed or is a multigraph.
|
||||
"""
|
||||
cut = {node for node in G.nodes() if seed.random() < p}
|
||||
cut_size = nx.algorithms.cut_size(G, cut, weight=weight)
|
||||
partition = (cut, G.nodes - cut)
|
||||
return cut_size, partition
|
||||
|
||||
|
||||
def _swap_node_partition(cut, node):
|
||||
return cut - {node} if node in cut else cut.union({node})
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@py_random_state(2)
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def one_exchange(G, initial_cut=None, seed=None, weight=None):
|
||||
"""Compute a partitioning of the graphs nodes and the corresponding cut value.
|
||||
|
||||
Use a greedy one exchange strategy to find a locally maximal cut
|
||||
and its value, it works by finding the best node (one that gives
|
||||
the highest gain to the cut value) to add to the current cut
|
||||
and repeats this process until no improvement can be made.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : networkx Graph
|
||||
Graph to find a maximum cut for.
|
||||
|
||||
initial_cut : set
|
||||
Cut to use as a starting point. If not supplied the algorithm
|
||||
starts with an empty cut.
|
||||
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
|
||||
weight : object
|
||||
Edge attribute key to use as weight. If not specified, edges
|
||||
have weight one.
|
||||
|
||||
Returns
|
||||
-------
|
||||
cut_value : scalar
|
||||
Value of the maximum cut.
|
||||
|
||||
partition : pair of node sets
|
||||
A partitioning of the nodes that defines a maximum cut.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.complete_graph(5)
|
||||
>>> curr_cut_size, partition = nx.approximation.one_exchange(G, seed=1)
|
||||
>>> curr_cut_size
|
||||
6
|
||||
>>> partition
|
||||
({0, 2}, {1, 3, 4})
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If the graph is directed or is a multigraph.
|
||||
"""
|
||||
if initial_cut is None:
|
||||
initial_cut = set()
|
||||
cut = set(initial_cut)
|
||||
current_cut_size = nx.algorithms.cut_size(G, cut, weight=weight)
|
||||
while True:
|
||||
nodes = list(G.nodes())
|
||||
# Shuffling the nodes ensures random tie-breaks in the following call to max
|
||||
seed.shuffle(nodes)
|
||||
best_node_to_swap = max(
|
||||
nodes,
|
||||
key=lambda v: nx.algorithms.cut_size(
|
||||
G, _swap_node_partition(cut, v), weight=weight
|
||||
),
|
||||
default=None,
|
||||
)
|
||||
potential_cut = _swap_node_partition(cut, best_node_to_swap)
|
||||
potential_cut_size = nx.algorithms.cut_size(G, potential_cut, weight=weight)
|
||||
|
||||
if potential_cut_size > current_cut_size:
|
||||
cut = potential_cut
|
||||
current_cut_size = potential_cut_size
|
||||
else:
|
||||
break
|
||||
|
||||
partition = (cut, G.nodes - cut)
|
||||
return current_cut_size, partition
|
||||
+53
@@ -0,0 +1,53 @@
|
||||
"""
|
||||
Ramsey numbers.
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
from ...utils import arbitrary_element
|
||||
|
||||
__all__ = ["ramsey_R2"]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def ramsey_R2(G):
|
||||
r"""Compute the largest clique and largest independent set in `G`.
|
||||
|
||||
This can be used to estimate bounds for the 2-color
|
||||
Ramsey number `R(2;s,t)` for `G`.
|
||||
|
||||
This is a recursive implementation which could run into trouble
|
||||
for large recursions. Note that self-loop edges are ignored.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
Undirected graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
max_pair : (set, set) tuple
|
||||
Maximum clique, Maximum independent set.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If the graph is directed or is a multigraph.
|
||||
"""
|
||||
if not G:
|
||||
return set(), set()
|
||||
|
||||
node = arbitrary_element(G)
|
||||
nbrs = (nbr for nbr in nx.all_neighbors(G, node) if nbr != node)
|
||||
nnbrs = nx.non_neighbors(G, node)
|
||||
c_1, i_1 = ramsey_R2(G.subgraph(nbrs).copy())
|
||||
c_2, i_2 = ramsey_R2(G.subgraph(nnbrs).copy())
|
||||
|
||||
c_1.add(node)
|
||||
i_2.add(node)
|
||||
# Choose the larger of the two cliques and the larger of the two
|
||||
# independent sets, according to cardinality.
|
||||
return max(c_1, c_2, key=len), max(i_1, i_2, key=len)
|
||||
+231
@@ -0,0 +1,231 @@
|
||||
from itertools import chain
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import not_implemented_for, pairwise
|
||||
|
||||
__all__ = ["metric_closure", "steiner_tree"]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable(edge_attrs="weight", returns_graph=True)
|
||||
def metric_closure(G, weight="weight"):
|
||||
"""Return the metric closure of a graph.
|
||||
|
||||
The metric closure of a graph *G* is the complete graph in which each edge
|
||||
is weighted by the shortest path distance between the nodes in *G* .
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
NetworkX graph
|
||||
Metric closure of the graph `G`.
|
||||
|
||||
"""
|
||||
M = nx.Graph()
|
||||
|
||||
Gnodes = set(G)
|
||||
|
||||
# check for connected graph while processing first node
|
||||
all_paths_iter = nx.all_pairs_dijkstra(G, weight=weight)
|
||||
u, (distance, path) = next(all_paths_iter)
|
||||
if Gnodes - set(distance):
|
||||
msg = "G is not a connected graph. metric_closure is not defined."
|
||||
raise nx.NetworkXError(msg)
|
||||
Gnodes.remove(u)
|
||||
for v in Gnodes:
|
||||
M.add_edge(u, v, distance=distance[v], path=path[v])
|
||||
|
||||
# first node done -- now process the rest
|
||||
for u, (distance, path) in all_paths_iter:
|
||||
Gnodes.remove(u)
|
||||
for v in Gnodes:
|
||||
M.add_edge(u, v, distance=distance[v], path=path[v])
|
||||
|
||||
return M
|
||||
|
||||
|
||||
def _mehlhorn_steiner_tree(G, terminal_nodes, weight):
|
||||
paths = nx.multi_source_dijkstra_path(G, terminal_nodes)
|
||||
|
||||
d_1 = {}
|
||||
s = {}
|
||||
for v in G.nodes():
|
||||
s[v] = paths[v][0]
|
||||
d_1[(v, s[v])] = len(paths[v]) - 1
|
||||
|
||||
# G1-G4 names match those from the Mehlhorn 1988 paper.
|
||||
G_1_prime = nx.Graph()
|
||||
for u, v, data in G.edges(data=True):
|
||||
su, sv = s[u], s[v]
|
||||
weight_here = d_1[(u, su)] + data.get(weight, 1) + d_1[(v, sv)]
|
||||
if not G_1_prime.has_edge(su, sv):
|
||||
G_1_prime.add_edge(su, sv, weight=weight_here)
|
||||
else:
|
||||
new_weight = min(weight_here, G_1_prime[su][sv]["weight"])
|
||||
G_1_prime.add_edge(su, sv, weight=new_weight)
|
||||
|
||||
G_2 = nx.minimum_spanning_edges(G_1_prime, data=True)
|
||||
|
||||
G_3 = nx.Graph()
|
||||
for u, v, d in G_2:
|
||||
path = nx.shortest_path(G, u, v, weight)
|
||||
for n1, n2 in pairwise(path):
|
||||
G_3.add_edge(n1, n2)
|
||||
|
||||
G_3_mst = list(nx.minimum_spanning_edges(G_3, data=False))
|
||||
if G.is_multigraph():
|
||||
G_3_mst = (
|
||||
(u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in G_3_mst
|
||||
)
|
||||
G_4 = G.edge_subgraph(G_3_mst).copy()
|
||||
_remove_nonterminal_leaves(G_4, terminal_nodes)
|
||||
return G_4.edges()
|
||||
|
||||
|
||||
def _kou_steiner_tree(G, terminal_nodes, weight):
|
||||
# H is the subgraph induced by terminal_nodes in the metric closure M of G.
|
||||
M = metric_closure(G, weight=weight)
|
||||
H = M.subgraph(terminal_nodes)
|
||||
|
||||
# Use the 'distance' attribute of each edge provided by M.
|
||||
mst_edges = nx.minimum_spanning_edges(H, weight="distance", data=True)
|
||||
|
||||
# Create an iterator over each edge in each shortest path; repeats are okay
|
||||
mst_all_edges = chain.from_iterable(pairwise(d["path"]) for u, v, d in mst_edges)
|
||||
if G.is_multigraph():
|
||||
mst_all_edges = (
|
||||
(u, v, min(G[u][v], key=lambda k: G[u][v][k][weight]))
|
||||
for u, v in mst_all_edges
|
||||
)
|
||||
|
||||
# Find the MST again, over this new set of edges
|
||||
G_S = G.edge_subgraph(mst_all_edges)
|
||||
T_S = nx.minimum_spanning_edges(G_S, weight="weight", data=False)
|
||||
|
||||
# Leaf nodes that are not terminal might still remain; remove them here
|
||||
T_H = G.edge_subgraph(T_S).copy()
|
||||
_remove_nonterminal_leaves(T_H, terminal_nodes)
|
||||
|
||||
return T_H.edges()
|
||||
|
||||
|
||||
def _remove_nonterminal_leaves(G, terminals):
|
||||
terminal_set = set(terminals)
|
||||
leaves = {n for n in G if len(set(G[n]) - {n}) == 1}
|
||||
nonterminal_leaves = leaves - terminal_set
|
||||
|
||||
while nonterminal_leaves:
|
||||
# Removing a node may create new non-terminal leaves, so we limit
|
||||
# search for candidate non-terminal nodes to neighbors of current
|
||||
# non-terminal nodes
|
||||
candidate_leaves = set.union(*(set(G[n]) for n in nonterminal_leaves))
|
||||
candidate_leaves -= nonterminal_leaves | terminal_set
|
||||
# Remove current set of non-terminal nodes
|
||||
G.remove_nodes_from(nonterminal_leaves)
|
||||
# Find any new non-terminal nodes from the set of candidates
|
||||
leaves = {n for n in candidate_leaves if len(set(G[n]) - {n}) == 1}
|
||||
nonterminal_leaves = leaves - terminal_set
|
||||
|
||||
|
||||
ALGORITHMS = {
|
||||
"kou": _kou_steiner_tree,
|
||||
"mehlhorn": _mehlhorn_steiner_tree,
|
||||
}
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable(preserve_all_attrs=True, returns_graph=True)
|
||||
def steiner_tree(G, terminal_nodes, weight="weight", method=None):
|
||||
r"""Return an approximation to the minimum Steiner tree of a graph.
|
||||
|
||||
The minimum Steiner tree of `G` w.r.t a set of `terminal_nodes` (also *S*)
|
||||
is a tree within `G` that spans those nodes and has minimum size (sum of
|
||||
edge weights) among all such trees.
|
||||
|
||||
The approximation algorithm is specified with the `method` keyword
|
||||
argument. All three available algorithms produce a tree whose weight is
|
||||
within a ``(2 - (2 / l))`` factor of the weight of the optimal Steiner tree,
|
||||
where ``l`` is the minimum number of leaf nodes across all possible Steiner
|
||||
trees.
|
||||
|
||||
* ``"kou"`` [2]_ (runtime $O(|S| |V|^2)$) computes the minimum spanning tree of
|
||||
the subgraph of the metric closure of *G* induced by the terminal nodes,
|
||||
where the metric closure of *G* is the complete graph in which each edge is
|
||||
weighted by the shortest path distance between the nodes in *G*.
|
||||
|
||||
* ``"mehlhorn"`` [3]_ (runtime $O(|E|+|V|\log|V|)$) modifies Kou et al.'s
|
||||
algorithm, beginning by finding the closest terminal node for each
|
||||
non-terminal. This data is used to create a complete graph containing only
|
||||
the terminal nodes, in which edge is weighted with the shortest path
|
||||
distance between them. The algorithm then proceeds in the same way as Kou
|
||||
et al..
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
terminal_nodes : list
|
||||
A list of terminal nodes for which minimum steiner tree is
|
||||
to be found.
|
||||
|
||||
weight : string (default = 'weight')
|
||||
Use the edge attribute specified by this string as the edge weight.
|
||||
Any edge attribute not present defaults to 1.
|
||||
|
||||
method : string, optional (default = 'mehlhorn')
|
||||
The algorithm to use to approximate the Steiner tree.
|
||||
Supported options: 'kou', 'mehlhorn'.
|
||||
Other inputs produce a ValueError.
|
||||
|
||||
Returns
|
||||
-------
|
||||
NetworkX graph
|
||||
Approximation to the minimum steiner tree of `G` induced by
|
||||
`terminal_nodes` .
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If `G` is directed.
|
||||
|
||||
ValueError
|
||||
If the specified `method` is not supported.
|
||||
|
||||
Notes
|
||||
-----
|
||||
For multigraphs, the edge between two nodes with minimum weight is the
|
||||
edge put into the Steiner tree.
|
||||
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Steiner_tree_problem on Wikipedia.
|
||||
https://en.wikipedia.org/wiki/Steiner_tree_problem
|
||||
.. [2] Kou, L., G. Markowsky, and L. Berman. 1981.
|
||||
‘A Fast Algorithm for Steiner Trees’.
|
||||
Acta Informatica 15 (2): 141–45.
|
||||
https://doi.org/10.1007/BF00288961.
|
||||
.. [3] Mehlhorn, Kurt. 1988.
|
||||
‘A Faster Approximation Algorithm for the Steiner Problem in Graphs’.
|
||||
Information Processing Letters 27 (3): 125–28.
|
||||
https://doi.org/10.1016/0020-0190(88)90066-X.
|
||||
"""
|
||||
if method is None:
|
||||
method = "mehlhorn"
|
||||
|
||||
try:
|
||||
algo = ALGORITHMS[method]
|
||||
except KeyError as e:
|
||||
raise ValueError(f"{method} is not a valid choice for an algorithm.") from e
|
||||
|
||||
edges = algo(G, terminal_nodes, weight)
|
||||
# For multigraph we should add the minimal weight edge keys
|
||||
if G.is_multigraph():
|
||||
edges = (
|
||||
(u, v, min(G[u][v], key=lambda k: G[u][v][k][weight])) for u, v in edges
|
||||
)
|
||||
T = G.edge_subgraph(edges)
|
||||
return T
|
||||
+41
@@ -0,0 +1,41 @@
|
||||
import networkx as nx
|
||||
from networkx.algorithms.approximation import average_clustering
|
||||
|
||||
# This approximation has to be exact in regular graphs
|
||||
# with no triangles or with all possible triangles.
|
||||
|
||||
|
||||
def test_petersen():
|
||||
# Actual coefficient is 0
|
||||
G = nx.petersen_graph()
|
||||
assert average_clustering(G, trials=len(G) // 2) == nx.average_clustering(G)
|
||||
|
||||
|
||||
def test_petersen_seed():
|
||||
# Actual coefficient is 0
|
||||
G = nx.petersen_graph()
|
||||
assert average_clustering(G, trials=len(G) // 2, seed=1) == nx.average_clustering(G)
|
||||
|
||||
|
||||
def test_tetrahedral():
|
||||
# Actual coefficient is 1
|
||||
G = nx.tetrahedral_graph()
|
||||
assert average_clustering(G, trials=len(G) // 2) == nx.average_clustering(G)
|
||||
|
||||
|
||||
def test_dodecahedral():
|
||||
# Actual coefficient is 0
|
||||
G = nx.dodecahedral_graph()
|
||||
assert average_clustering(G, trials=len(G) // 2) == nx.average_clustering(G)
|
||||
|
||||
|
||||
def test_empty():
|
||||
G = nx.empty_graph(5)
|
||||
assert average_clustering(G, trials=len(G) // 2) == 0
|
||||
|
||||
|
||||
def test_complete():
|
||||
G = nx.complete_graph(5)
|
||||
assert average_clustering(G, trials=len(G) // 2) == 1
|
||||
G = nx.complete_graph(7)
|
||||
assert average_clustering(G, trials=len(G) // 2) == 1
|
||||
+112
@@ -0,0 +1,112 @@
|
||||
"""Unit tests for the :mod:`networkx.algorithms.approximation.clique` module."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.approximation import (
|
||||
clique_removal,
|
||||
large_clique_size,
|
||||
max_clique,
|
||||
maximum_independent_set,
|
||||
)
|
||||
|
||||
|
||||
def is_independent_set(G, nodes):
|
||||
"""Returns True if and only if `nodes` is a clique in `G`.
|
||||
|
||||
`G` is a NetworkX graph. `nodes` is an iterable of nodes in
|
||||
`G`.
|
||||
|
||||
"""
|
||||
return G.subgraph(nodes).number_of_edges() == 0
|
||||
|
||||
|
||||
def is_clique(G, nodes):
|
||||
"""Returns True if and only if `nodes` is an independent set
|
||||
in `G`.
|
||||
|
||||
`G` is an undirected simple graph. `nodes` is an iterable of
|
||||
nodes in `G`.
|
||||
|
||||
"""
|
||||
H = G.subgraph(nodes)
|
||||
n = len(H)
|
||||
return H.number_of_edges() == n * (n - 1) // 2
|
||||
|
||||
|
||||
class TestCliqueRemoval:
|
||||
"""Unit tests for the
|
||||
:func:`~networkx.algorithms.approximation.clique_removal` function.
|
||||
|
||||
"""
|
||||
|
||||
def test_trivial_graph(self):
|
||||
G = nx.trivial_graph()
|
||||
independent_set, cliques = clique_removal(G)
|
||||
assert is_independent_set(G, independent_set)
|
||||
assert all(is_clique(G, clique) for clique in cliques)
|
||||
# In fact, we should only have 1-cliques, that is, singleton nodes.
|
||||
assert all(len(clique) == 1 for clique in cliques)
|
||||
|
||||
def test_complete_graph(self):
|
||||
G = nx.complete_graph(10)
|
||||
independent_set, cliques = clique_removal(G)
|
||||
assert is_independent_set(G, independent_set)
|
||||
assert all(is_clique(G, clique) for clique in cliques)
|
||||
|
||||
def test_barbell_graph(self):
|
||||
G = nx.barbell_graph(10, 5)
|
||||
independent_set, cliques = clique_removal(G)
|
||||
assert is_independent_set(G, independent_set)
|
||||
assert all(is_clique(G, clique) for clique in cliques)
|
||||
|
||||
|
||||
class TestMaxClique:
|
||||
"""Unit tests for the :func:`networkx.algorithms.approximation.max_clique`
|
||||
function.
|
||||
|
||||
"""
|
||||
|
||||
def test_null_graph(self):
|
||||
G = nx.null_graph()
|
||||
assert len(max_clique(G)) == 0
|
||||
|
||||
def test_complete_graph(self):
|
||||
graph = nx.complete_graph(30)
|
||||
# this should return the entire graph
|
||||
mc = max_clique(graph)
|
||||
assert 30 == len(mc)
|
||||
|
||||
def test_maximal_by_cardinality(self):
|
||||
"""Tests that the maximal clique is computed according to maximum
|
||||
cardinality of the sets.
|
||||
|
||||
For more information, see pull request #1531.
|
||||
|
||||
"""
|
||||
G = nx.complete_graph(5)
|
||||
G.add_edge(4, 5)
|
||||
clique = max_clique(G)
|
||||
assert len(clique) > 1
|
||||
|
||||
G = nx.lollipop_graph(30, 2)
|
||||
clique = max_clique(G)
|
||||
assert len(clique) > 2
|
||||
|
||||
|
||||
def test_large_clique_size():
|
||||
G = nx.complete_graph(9)
|
||||
nx.add_cycle(G, [9, 10, 11])
|
||||
G.add_edge(8, 9)
|
||||
G.add_edge(1, 12)
|
||||
G.add_node(13)
|
||||
|
||||
assert large_clique_size(G) == 9
|
||||
G.remove_node(5)
|
||||
assert large_clique_size(G) == 8
|
||||
G.remove_edge(2, 3)
|
||||
assert large_clique_size(G) == 7
|
||||
|
||||
|
||||
def test_independent_set():
|
||||
# smoke test
|
||||
G = nx.Graph()
|
||||
assert len(maximum_independent_set(G)) == 0
|
||||
+199
@@ -0,0 +1,199 @@
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms import approximation as approx
|
||||
|
||||
|
||||
def test_global_node_connectivity():
|
||||
# Figure 1 chapter on Connectivity
|
||||
G = nx.Graph()
|
||||
G.add_edges_from(
|
||||
[
|
||||
(1, 2),
|
||||
(1, 3),
|
||||
(1, 4),
|
||||
(1, 5),
|
||||
(2, 3),
|
||||
(2, 6),
|
||||
(3, 4),
|
||||
(3, 6),
|
||||
(4, 6),
|
||||
(4, 7),
|
||||
(5, 7),
|
||||
(6, 8),
|
||||
(6, 9),
|
||||
(7, 8),
|
||||
(7, 10),
|
||||
(8, 11),
|
||||
(9, 10),
|
||||
(9, 11),
|
||||
(10, 11),
|
||||
]
|
||||
)
|
||||
assert 2 == approx.local_node_connectivity(G, 1, 11)
|
||||
assert 2 == approx.node_connectivity(G)
|
||||
assert 2 == approx.node_connectivity(G, 1, 11)
|
||||
|
||||
|
||||
def test_white_harary1():
|
||||
# Figure 1b white and harary (2001)
|
||||
# A graph with high adhesion (edge connectivity) and low cohesion
|
||||
# (node connectivity)
|
||||
G = nx.disjoint_union(nx.complete_graph(4), nx.complete_graph(4))
|
||||
G.remove_node(7)
|
||||
for i in range(4, 7):
|
||||
G.add_edge(0, i)
|
||||
G = nx.disjoint_union(G, nx.complete_graph(4))
|
||||
G.remove_node(G.order() - 1)
|
||||
for i in range(7, 10):
|
||||
G.add_edge(0, i)
|
||||
assert 1 == approx.node_connectivity(G)
|
||||
|
||||
|
||||
def test_complete_graphs():
|
||||
for n in range(5, 25, 5):
|
||||
G = nx.complete_graph(n)
|
||||
assert n - 1 == approx.node_connectivity(G)
|
||||
assert n - 1 == approx.node_connectivity(G, 0, 3)
|
||||
|
||||
|
||||
def test_empty_graphs():
|
||||
for k in range(5, 25, 5):
|
||||
G = nx.empty_graph(k)
|
||||
assert 0 == approx.node_connectivity(G)
|
||||
assert 0 == approx.node_connectivity(G, 0, 3)
|
||||
|
||||
|
||||
def test_petersen():
|
||||
G = nx.petersen_graph()
|
||||
assert 3 == approx.node_connectivity(G)
|
||||
assert 3 == approx.node_connectivity(G, 0, 5)
|
||||
|
||||
|
||||
# Approximation fails with tutte graph
|
||||
# def test_tutte():
|
||||
# G = nx.tutte_graph()
|
||||
# assert_equal(3, approx.node_connectivity(G))
|
||||
|
||||
|
||||
def test_dodecahedral():
|
||||
G = nx.dodecahedral_graph()
|
||||
assert 3 == approx.node_connectivity(G)
|
||||
assert 3 == approx.node_connectivity(G, 0, 5)
|
||||
|
||||
|
||||
def test_octahedral():
|
||||
G = nx.octahedral_graph()
|
||||
assert 4 == approx.node_connectivity(G)
|
||||
assert 4 == approx.node_connectivity(G, 0, 5)
|
||||
|
||||
|
||||
# Approximation can fail with icosahedral graph depending
|
||||
# on iteration order.
|
||||
# def test_icosahedral():
|
||||
# G=nx.icosahedral_graph()
|
||||
# assert_equal(5, approx.node_connectivity(G))
|
||||
# assert_equal(5, approx.node_connectivity(G, 0, 5))
|
||||
|
||||
|
||||
def test_only_source():
|
||||
G = nx.complete_graph(5)
|
||||
pytest.raises(nx.NetworkXError, approx.node_connectivity, G, s=0)
|
||||
|
||||
|
||||
def test_only_target():
|
||||
G = nx.complete_graph(5)
|
||||
pytest.raises(nx.NetworkXError, approx.node_connectivity, G, t=0)
|
||||
|
||||
|
||||
def test_missing_source():
|
||||
G = nx.path_graph(4)
|
||||
pytest.raises(nx.NetworkXError, approx.node_connectivity, G, 10, 1)
|
||||
|
||||
|
||||
def test_missing_target():
|
||||
G = nx.path_graph(4)
|
||||
pytest.raises(nx.NetworkXError, approx.node_connectivity, G, 1, 10)
|
||||
|
||||
|
||||
def test_source_equals_target():
|
||||
G = nx.complete_graph(5)
|
||||
pytest.raises(nx.NetworkXError, approx.local_node_connectivity, G, 0, 0)
|
||||
|
||||
|
||||
def test_directed_node_connectivity():
|
||||
G = nx.cycle_graph(10, create_using=nx.DiGraph()) # only one direction
|
||||
D = nx.cycle_graph(10).to_directed() # 2 reciprocal edges
|
||||
assert 1 == approx.node_connectivity(G)
|
||||
assert 1 == approx.node_connectivity(G, 1, 4)
|
||||
assert 2 == approx.node_connectivity(D)
|
||||
assert 2 == approx.node_connectivity(D, 1, 4)
|
||||
|
||||
|
||||
class TestAllPairsNodeConnectivityApprox:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
cls.path = nx.path_graph(7)
|
||||
cls.directed_path = nx.path_graph(7, create_using=nx.DiGraph())
|
||||
cls.cycle = nx.cycle_graph(7)
|
||||
cls.directed_cycle = nx.cycle_graph(7, create_using=nx.DiGraph())
|
||||
cls.gnp = nx.gnp_random_graph(30, 0.1)
|
||||
cls.directed_gnp = nx.gnp_random_graph(30, 0.1, directed=True)
|
||||
cls.K20 = nx.complete_graph(20)
|
||||
cls.K10 = nx.complete_graph(10)
|
||||
cls.K5 = nx.complete_graph(5)
|
||||
cls.G_list = [
|
||||
cls.path,
|
||||
cls.directed_path,
|
||||
cls.cycle,
|
||||
cls.directed_cycle,
|
||||
cls.gnp,
|
||||
cls.directed_gnp,
|
||||
cls.K10,
|
||||
cls.K5,
|
||||
cls.K20,
|
||||
]
|
||||
|
||||
def test_cycles(self):
|
||||
K_undir = approx.all_pairs_node_connectivity(self.cycle)
|
||||
for source in K_undir:
|
||||
for target, k in K_undir[source].items():
|
||||
assert k == 2
|
||||
K_dir = approx.all_pairs_node_connectivity(self.directed_cycle)
|
||||
for source in K_dir:
|
||||
for target, k in K_dir[source].items():
|
||||
assert k == 1
|
||||
|
||||
def test_complete(self):
|
||||
for G in [self.K10, self.K5, self.K20]:
|
||||
K = approx.all_pairs_node_connectivity(G)
|
||||
for source in K:
|
||||
for target, k in K[source].items():
|
||||
assert k == len(G) - 1
|
||||
|
||||
def test_paths(self):
|
||||
K_undir = approx.all_pairs_node_connectivity(self.path)
|
||||
for source in K_undir:
|
||||
for target, k in K_undir[source].items():
|
||||
assert k == 1
|
||||
K_dir = approx.all_pairs_node_connectivity(self.directed_path)
|
||||
for source in K_dir:
|
||||
for target, k in K_dir[source].items():
|
||||
if source < target:
|
||||
assert k == 1
|
||||
else:
|
||||
assert k == 0
|
||||
|
||||
def test_cutoff(self):
|
||||
for G in [self.K10, self.K5, self.K20]:
|
||||
for mp in [2, 3, 4]:
|
||||
paths = approx.all_pairs_node_connectivity(G, cutoff=mp)
|
||||
for source in paths:
|
||||
for target, K in paths[source].items():
|
||||
assert K == mp
|
||||
|
||||
def test_all_pairs_connectivity_nbunch(self):
|
||||
G = nx.complete_graph(5)
|
||||
nbunch = [0, 2, 3]
|
||||
C = approx.all_pairs_node_connectivity(G, nbunch=nbunch)
|
||||
assert len(C) == len(nbunch)
|
||||
+59
@@ -0,0 +1,59 @@
|
||||
"""Unit tests for the :mod:`networkx.algorithms.approximation.distance_measures` module."""
|
||||
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.approximation import diameter
|
||||
|
||||
|
||||
class TestDiameter:
|
||||
"""Unit tests for the approximate diameter function
|
||||
:func:`~networkx.algorithms.approximation.distance_measures.diameter`.
|
||||
"""
|
||||
|
||||
def test_null_graph(self):
|
||||
"""Test empty graph."""
|
||||
G = nx.null_graph()
|
||||
with pytest.raises(
|
||||
nx.NetworkXError, match="Expected non-empty NetworkX graph!"
|
||||
):
|
||||
diameter(G)
|
||||
|
||||
def test_undirected_non_connected(self):
|
||||
"""Test an undirected disconnected graph."""
|
||||
graph = nx.path_graph(10)
|
||||
graph.remove_edge(3, 4)
|
||||
with pytest.raises(nx.NetworkXError, match="Graph not connected."):
|
||||
diameter(graph)
|
||||
|
||||
def test_directed_non_strongly_connected(self):
|
||||
"""Test a directed non strongly connected graph."""
|
||||
graph = nx.path_graph(10, create_using=nx.DiGraph())
|
||||
with pytest.raises(nx.NetworkXError, match="DiGraph not strongly connected."):
|
||||
diameter(graph)
|
||||
|
||||
def test_complete_undirected_graph(self):
|
||||
"""Test a complete undirected graph."""
|
||||
graph = nx.complete_graph(10)
|
||||
assert diameter(graph) == 1
|
||||
|
||||
def test_complete_directed_graph(self):
|
||||
"""Test a complete directed graph."""
|
||||
graph = nx.complete_graph(10, create_using=nx.DiGraph())
|
||||
assert diameter(graph) == 1
|
||||
|
||||
def test_undirected_path_graph(self):
|
||||
"""Test an undirected path graph with 10 nodes."""
|
||||
graph = nx.path_graph(10)
|
||||
assert diameter(graph) == 9
|
||||
|
||||
def test_directed_path_graph(self):
|
||||
"""Test a directed path graph with 10 nodes."""
|
||||
graph = nx.path_graph(10).to_directed()
|
||||
assert diameter(graph) == 9
|
||||
|
||||
def test_single_node(self):
|
||||
"""Test a graph which contains just a node."""
|
||||
graph = nx.Graph()
|
||||
graph.add_node(1)
|
||||
assert diameter(graph) == 0
|
||||
+78
@@ -0,0 +1,78 @@
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.approximation import (
|
||||
min_edge_dominating_set,
|
||||
min_weighted_dominating_set,
|
||||
)
|
||||
|
||||
|
||||
class TestMinWeightDominatingSet:
|
||||
def test_min_weighted_dominating_set(self):
|
||||
graph = nx.Graph()
|
||||
graph.add_edge(1, 2)
|
||||
graph.add_edge(1, 5)
|
||||
graph.add_edge(2, 3)
|
||||
graph.add_edge(2, 5)
|
||||
graph.add_edge(3, 4)
|
||||
graph.add_edge(3, 6)
|
||||
graph.add_edge(5, 6)
|
||||
|
||||
vertices = {1, 2, 3, 4, 5, 6}
|
||||
# due to ties, this might be hard to test tight bounds
|
||||
dom_set = min_weighted_dominating_set(graph)
|
||||
for vertex in vertices - dom_set:
|
||||
neighbors = set(graph.neighbors(vertex))
|
||||
assert len(neighbors & dom_set) > 0, "Non dominating set found!"
|
||||
|
||||
def test_star_graph(self):
|
||||
"""Tests that an approximate dominating set for the star graph,
|
||||
even when the center node does not have the smallest integer
|
||||
label, gives just the center node.
|
||||
|
||||
For more information, see #1527.
|
||||
|
||||
"""
|
||||
# Create a star graph in which the center node has the highest
|
||||
# label instead of the lowest.
|
||||
G = nx.star_graph(10)
|
||||
G = nx.relabel_nodes(G, {0: 9, 9: 0})
|
||||
assert min_weighted_dominating_set(G) == {9}
|
||||
|
||||
def test_null_graph(self):
|
||||
"""Tests that the unique dominating set for the null graph is an empty set"""
|
||||
G = nx.Graph()
|
||||
assert min_weighted_dominating_set(G) == set()
|
||||
|
||||
def test_min_edge_dominating_set(self):
|
||||
graph = nx.path_graph(5)
|
||||
dom_set = min_edge_dominating_set(graph)
|
||||
|
||||
# this is a crappy way to test, but good enough for now.
|
||||
for edge in graph.edges():
|
||||
if edge in dom_set:
|
||||
continue
|
||||
else:
|
||||
u, v = edge
|
||||
found = False
|
||||
for dom_edge in dom_set:
|
||||
found |= u == dom_edge[0] or u == dom_edge[1]
|
||||
assert found, "Non adjacent edge found!"
|
||||
|
||||
graph = nx.complete_graph(10)
|
||||
dom_set = min_edge_dominating_set(graph)
|
||||
|
||||
# this is a crappy way to test, but good enough for now.
|
||||
for edge in graph.edges():
|
||||
if edge in dom_set:
|
||||
continue
|
||||
else:
|
||||
u, v = edge
|
||||
found = False
|
||||
for dom_edge in dom_set:
|
||||
found |= u == dom_edge[0] or u == dom_edge[1]
|
||||
assert found, "Non adjacent edge found!"
|
||||
|
||||
graph = nx.Graph() # empty Networkx graph
|
||||
with pytest.raises(ValueError, match="Expected non-empty NetworkX graph!"):
|
||||
min_edge_dominating_set(graph)
|
||||
+303
@@ -0,0 +1,303 @@
|
||||
# Test for approximation to k-components algorithm
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.approximation import k_components
|
||||
from networkx.algorithms.approximation.kcomponents import _AntiGraph, _same
|
||||
|
||||
|
||||
def build_k_number_dict(k_components):
|
||||
k_num = {}
|
||||
for k, comps in sorted(k_components.items()):
|
||||
for comp in comps:
|
||||
for node in comp:
|
||||
k_num[node] = k
|
||||
return k_num
|
||||
|
||||
|
||||
##
|
||||
# Some nice synthetic graphs
|
||||
##
|
||||
|
||||
|
||||
def graph_example_1():
|
||||
G = nx.convert_node_labels_to_integers(
|
||||
nx.grid_graph([5, 5]), label_attribute="labels"
|
||||
)
|
||||
rlabels = nx.get_node_attributes(G, "labels")
|
||||
labels = {v: k for k, v in rlabels.items()}
|
||||
|
||||
for nodes in [
|
||||
(labels[(0, 0)], labels[(1, 0)]),
|
||||
(labels[(0, 4)], labels[(1, 4)]),
|
||||
(labels[(3, 0)], labels[(4, 0)]),
|
||||
(labels[(3, 4)], labels[(4, 4)]),
|
||||
]:
|
||||
new_node = G.order() + 1
|
||||
# Petersen graph is triconnected
|
||||
P = nx.petersen_graph()
|
||||
G = nx.disjoint_union(G, P)
|
||||
# Add two edges between the grid and P
|
||||
G.add_edge(new_node + 1, nodes[0])
|
||||
G.add_edge(new_node, nodes[1])
|
||||
# K5 is 4-connected
|
||||
K = nx.complete_graph(5)
|
||||
G = nx.disjoint_union(G, K)
|
||||
# Add three edges between P and K5
|
||||
G.add_edge(new_node + 2, new_node + 11)
|
||||
G.add_edge(new_node + 3, new_node + 12)
|
||||
G.add_edge(new_node + 4, new_node + 13)
|
||||
# Add another K5 sharing a node
|
||||
G = nx.disjoint_union(G, K)
|
||||
nbrs = G[new_node + 10]
|
||||
G.remove_node(new_node + 10)
|
||||
for nbr in nbrs:
|
||||
G.add_edge(new_node + 17, nbr)
|
||||
G.add_edge(new_node + 16, new_node + 5)
|
||||
return G
|
||||
|
||||
|
||||
def torrents_and_ferraro_graph():
|
||||
G = nx.convert_node_labels_to_integers(
|
||||
nx.grid_graph([5, 5]), label_attribute="labels"
|
||||
)
|
||||
rlabels = nx.get_node_attributes(G, "labels")
|
||||
labels = {v: k for k, v in rlabels.items()}
|
||||
|
||||
for nodes in [(labels[(0, 4)], labels[(1, 4)]), (labels[(3, 4)], labels[(4, 4)])]:
|
||||
new_node = G.order() + 1
|
||||
# Petersen graph is triconnected
|
||||
P = nx.petersen_graph()
|
||||
G = nx.disjoint_union(G, P)
|
||||
# Add two edges between the grid and P
|
||||
G.add_edge(new_node + 1, nodes[0])
|
||||
G.add_edge(new_node, nodes[1])
|
||||
# K5 is 4-connected
|
||||
K = nx.complete_graph(5)
|
||||
G = nx.disjoint_union(G, K)
|
||||
# Add three edges between P and K5
|
||||
G.add_edge(new_node + 2, new_node + 11)
|
||||
G.add_edge(new_node + 3, new_node + 12)
|
||||
G.add_edge(new_node + 4, new_node + 13)
|
||||
# Add another K5 sharing a node
|
||||
G = nx.disjoint_union(G, K)
|
||||
nbrs = G[new_node + 10]
|
||||
G.remove_node(new_node + 10)
|
||||
for nbr in nbrs:
|
||||
G.add_edge(new_node + 17, nbr)
|
||||
# Commenting this makes the graph not biconnected !!
|
||||
# This stupid mistake make one reviewer very angry :P
|
||||
G.add_edge(new_node + 16, new_node + 8)
|
||||
|
||||
for nodes in [(labels[(0, 0)], labels[(1, 0)]), (labels[(3, 0)], labels[(4, 0)])]:
|
||||
new_node = G.order() + 1
|
||||
# Petersen graph is triconnected
|
||||
P = nx.petersen_graph()
|
||||
G = nx.disjoint_union(G, P)
|
||||
# Add two edges between the grid and P
|
||||
G.add_edge(new_node + 1, nodes[0])
|
||||
G.add_edge(new_node, nodes[1])
|
||||
# K5 is 4-connected
|
||||
K = nx.complete_graph(5)
|
||||
G = nx.disjoint_union(G, K)
|
||||
# Add three edges between P and K5
|
||||
G.add_edge(new_node + 2, new_node + 11)
|
||||
G.add_edge(new_node + 3, new_node + 12)
|
||||
G.add_edge(new_node + 4, new_node + 13)
|
||||
# Add another K5 sharing two nodes
|
||||
G = nx.disjoint_union(G, K)
|
||||
nbrs = G[new_node + 10]
|
||||
G.remove_node(new_node + 10)
|
||||
for nbr in nbrs:
|
||||
G.add_edge(new_node + 17, nbr)
|
||||
nbrs2 = G[new_node + 9]
|
||||
G.remove_node(new_node + 9)
|
||||
for nbr in nbrs2:
|
||||
G.add_edge(new_node + 18, nbr)
|
||||
return G
|
||||
|
||||
|
||||
# Helper function
|
||||
|
||||
|
||||
def _check_connectivity(G):
|
||||
result = k_components(G)
|
||||
for k, components in result.items():
|
||||
if k < 3:
|
||||
continue
|
||||
for component in components:
|
||||
C = G.subgraph(component)
|
||||
K = nx.node_connectivity(C)
|
||||
assert K >= k
|
||||
|
||||
|
||||
def test_torrents_and_ferraro_graph():
|
||||
G = torrents_and_ferraro_graph()
|
||||
_check_connectivity(G)
|
||||
|
||||
|
||||
def test_example_1():
|
||||
G = graph_example_1()
|
||||
_check_connectivity(G)
|
||||
|
||||
|
||||
def test_karate_0():
|
||||
G = nx.karate_club_graph()
|
||||
_check_connectivity(G)
|
||||
|
||||
|
||||
def test_karate_1():
|
||||
karate_k_num = {
|
||||
0: 4,
|
||||
1: 4,
|
||||
2: 4,
|
||||
3: 4,
|
||||
4: 3,
|
||||
5: 3,
|
||||
6: 3,
|
||||
7: 4,
|
||||
8: 4,
|
||||
9: 2,
|
||||
10: 3,
|
||||
11: 1,
|
||||
12: 2,
|
||||
13: 4,
|
||||
14: 2,
|
||||
15: 2,
|
||||
16: 2,
|
||||
17: 2,
|
||||
18: 2,
|
||||
19: 3,
|
||||
20: 2,
|
||||
21: 2,
|
||||
22: 2,
|
||||
23: 3,
|
||||
24: 3,
|
||||
25: 3,
|
||||
26: 2,
|
||||
27: 3,
|
||||
28: 3,
|
||||
29: 3,
|
||||
30: 4,
|
||||
31: 3,
|
||||
32: 4,
|
||||
33: 4,
|
||||
}
|
||||
approx_karate_k_num = karate_k_num.copy()
|
||||
approx_karate_k_num[24] = 2
|
||||
approx_karate_k_num[25] = 2
|
||||
G = nx.karate_club_graph()
|
||||
k_comps = k_components(G)
|
||||
k_num = build_k_number_dict(k_comps)
|
||||
assert k_num in (karate_k_num, approx_karate_k_num)
|
||||
|
||||
|
||||
def test_example_1_detail_3_and_4():
|
||||
G = graph_example_1()
|
||||
result = k_components(G)
|
||||
# In this example graph there are 8 3-components, 4 with 15 nodes
|
||||
# and 4 with 5 nodes.
|
||||
assert len(result[3]) == 8
|
||||
assert len([c for c in result[3] if len(c) == 15]) == 4
|
||||
assert len([c for c in result[3] if len(c) == 5]) == 4
|
||||
# There are also 8 4-components all with 5 nodes.
|
||||
assert len(result[4]) == 8
|
||||
assert all(len(c) == 5 for c in result[4])
|
||||
# Finally check that the k-components detected have actually node
|
||||
# connectivity >= k.
|
||||
for k, components in result.items():
|
||||
if k < 3:
|
||||
continue
|
||||
for component in components:
|
||||
K = nx.node_connectivity(G.subgraph(component))
|
||||
assert K >= k
|
||||
|
||||
|
||||
def test_directed():
|
||||
with pytest.raises(nx.NetworkXNotImplemented):
|
||||
G = nx.gnp_random_graph(10, 0.4, directed=True)
|
||||
kc = k_components(G)
|
||||
|
||||
|
||||
def test_same():
|
||||
equal = {"A": 2, "B": 2, "C": 2}
|
||||
slightly_different = {"A": 2, "B": 1, "C": 2}
|
||||
different = {"A": 2, "B": 8, "C": 18}
|
||||
assert _same(equal)
|
||||
assert not _same(slightly_different)
|
||||
assert _same(slightly_different, tol=1)
|
||||
assert not _same(different)
|
||||
assert not _same(different, tol=4)
|
||||
|
||||
|
||||
class TestAntiGraph:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
cls.Gnp = nx.gnp_random_graph(20, 0.8, seed=42)
|
||||
cls.Anp = _AntiGraph(nx.complement(cls.Gnp))
|
||||
cls.Gd = nx.davis_southern_women_graph()
|
||||
cls.Ad = _AntiGraph(nx.complement(cls.Gd))
|
||||
cls.Gk = nx.karate_club_graph()
|
||||
cls.Ak = _AntiGraph(nx.complement(cls.Gk))
|
||||
cls.GA = [(cls.Gnp, cls.Anp), (cls.Gd, cls.Ad), (cls.Gk, cls.Ak)]
|
||||
|
||||
def test_size(self):
|
||||
for G, A in self.GA:
|
||||
n = G.order()
|
||||
s = len(list(G.edges())) + len(list(A.edges()))
|
||||
assert s == (n * (n - 1)) / 2
|
||||
|
||||
def test_degree(self):
|
||||
for G, A in self.GA:
|
||||
assert sorted(G.degree()) == sorted(A.degree())
|
||||
|
||||
def test_core_number(self):
|
||||
for G, A in self.GA:
|
||||
assert nx.core_number(G) == nx.core_number(A)
|
||||
|
||||
def test_connected_components(self):
|
||||
# ccs are same unless isolated nodes or any node has degree=len(G)-1
|
||||
# graphs in self.GA avoid this problem
|
||||
for G, A in self.GA:
|
||||
gc = [set(c) for c in nx.connected_components(G)]
|
||||
ac = [set(c) for c in nx.connected_components(A)]
|
||||
for comp in ac:
|
||||
assert comp in gc
|
||||
|
||||
def test_adj(self):
|
||||
for G, A in self.GA:
|
||||
for n, nbrs in G.adj.items():
|
||||
a_adj = sorted((n, sorted(ad)) for n, ad in A.adj.items())
|
||||
g_adj = sorted((n, sorted(ad)) for n, ad in G.adj.items())
|
||||
assert a_adj == g_adj
|
||||
|
||||
def test_adjacency(self):
|
||||
for G, A in self.GA:
|
||||
a_adj = list(A.adjacency())
|
||||
for n, nbrs in G.adjacency():
|
||||
assert (n, set(nbrs)) in a_adj
|
||||
|
||||
def test_neighbors(self):
|
||||
for G, A in self.GA:
|
||||
node = list(G.nodes())[0]
|
||||
assert set(G.neighbors(node)) == set(A.neighbors(node))
|
||||
|
||||
def test_node_not_in_graph(self):
|
||||
for G, A in self.GA:
|
||||
node = "non_existent_node"
|
||||
pytest.raises(nx.NetworkXError, A.neighbors, node)
|
||||
pytest.raises(nx.NetworkXError, G.neighbors, node)
|
||||
|
||||
def test_degree_thingraph(self):
|
||||
for G, A in self.GA:
|
||||
node = list(G.nodes())[0]
|
||||
nodes = list(G.nodes())[1:4]
|
||||
assert G.degree(node) == A.degree(node)
|
||||
assert sum(d for n, d in G.degree()) == sum(d for n, d in A.degree())
|
||||
# AntiGraph is a ThinGraph, so all the weights are 1
|
||||
assert sum(d for n, d in A.degree()) == sum(
|
||||
d for n, d in A.degree(weight="weight")
|
||||
)
|
||||
assert sum(d for n, d in G.degree(nodes)) == sum(
|
||||
d for n, d in A.degree(nodes)
|
||||
)
|
||||
+8
@@ -0,0 +1,8 @@
|
||||
import networkx as nx
|
||||
import networkx.algorithms.approximation as a
|
||||
|
||||
|
||||
def test_min_maximal_matching():
|
||||
# smoke test
|
||||
G = nx.Graph()
|
||||
assert len(a.min_maximal_matching(G)) == 0
|
||||
+94
@@ -0,0 +1,94 @@
|
||||
import random
|
||||
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.approximation import maxcut
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"f", (nx.approximation.randomized_partitioning, nx.approximation.one_exchange)
|
||||
)
|
||||
@pytest.mark.parametrize("graph_constructor", (nx.DiGraph, nx.MultiGraph))
|
||||
def test_raises_on_directed_and_multigraphs(f, graph_constructor):
|
||||
G = graph_constructor([(0, 1), (1, 2)])
|
||||
with pytest.raises(nx.NetworkXNotImplemented):
|
||||
f(G)
|
||||
|
||||
|
||||
def _is_valid_cut(G, set1, set2):
|
||||
union = set1.union(set2)
|
||||
assert union == set(G.nodes)
|
||||
assert len(set1) + len(set2) == G.number_of_nodes()
|
||||
|
||||
|
||||
def _cut_is_locally_optimal(G, cut_size, set1):
|
||||
# test if cut can be locally improved
|
||||
for i, node in enumerate(set1):
|
||||
cut_size_without_node = nx.algorithms.cut_size(
|
||||
G, set1 - {node}, weight="weight"
|
||||
)
|
||||
assert cut_size_without_node <= cut_size
|
||||
|
||||
|
||||
def test_random_partitioning():
|
||||
G = nx.complete_graph(5)
|
||||
_, (set1, set2) = maxcut.randomized_partitioning(G, seed=5)
|
||||
_is_valid_cut(G, set1, set2)
|
||||
|
||||
|
||||
def test_random_partitioning_all_to_one():
|
||||
G = nx.complete_graph(5)
|
||||
_, (set1, set2) = maxcut.randomized_partitioning(G, p=1)
|
||||
_is_valid_cut(G, set1, set2)
|
||||
assert len(set1) == G.number_of_nodes()
|
||||
assert len(set2) == 0
|
||||
|
||||
|
||||
def test_one_exchange_basic():
|
||||
G = nx.complete_graph(5)
|
||||
random.seed(5)
|
||||
for u, v, w in G.edges(data=True):
|
||||
w["weight"] = random.randrange(-100, 100, 1) / 10
|
||||
|
||||
initial_cut = set(random.sample(sorted(G.nodes()), k=5))
|
||||
cut_size, (set1, set2) = maxcut.one_exchange(
|
||||
G, initial_cut, weight="weight", seed=5
|
||||
)
|
||||
|
||||
_is_valid_cut(G, set1, set2)
|
||||
_cut_is_locally_optimal(G, cut_size, set1)
|
||||
|
||||
|
||||
def test_one_exchange_optimal():
|
||||
# Greedy one exchange should find the optimal solution for this graph (14)
|
||||
G = nx.Graph()
|
||||
G.add_edge(1, 2, weight=3)
|
||||
G.add_edge(1, 3, weight=3)
|
||||
G.add_edge(1, 4, weight=3)
|
||||
G.add_edge(1, 5, weight=3)
|
||||
G.add_edge(2, 3, weight=5)
|
||||
|
||||
cut_size, (set1, set2) = maxcut.one_exchange(G, weight="weight", seed=5)
|
||||
|
||||
_is_valid_cut(G, set1, set2)
|
||||
_cut_is_locally_optimal(G, cut_size, set1)
|
||||
# check global optimality
|
||||
assert cut_size == 14
|
||||
|
||||
|
||||
def test_negative_weights():
|
||||
G = nx.complete_graph(5)
|
||||
random.seed(5)
|
||||
for u, v, w in G.edges(data=True):
|
||||
w["weight"] = -1 * random.random()
|
||||
|
||||
initial_cut = set(random.sample(sorted(G.nodes()), k=5))
|
||||
cut_size, (set1, set2) = maxcut.one_exchange(G, initial_cut, weight="weight")
|
||||
|
||||
# make sure it is a valid cut
|
||||
_is_valid_cut(G, set1, set2)
|
||||
# check local optimality
|
||||
_cut_is_locally_optimal(G, cut_size, set1)
|
||||
# test that all nodes are in the same partition
|
||||
assert len(set1) == len(G.nodes) or len(set2) == len(G.nodes)
|
||||
+31
@@ -0,0 +1,31 @@
|
||||
import networkx as nx
|
||||
import networkx.algorithms.approximation as apxa
|
||||
|
||||
|
||||
def test_ramsey():
|
||||
# this should only find the complete graph
|
||||
graph = nx.complete_graph(10)
|
||||
c, i = apxa.ramsey_R2(graph)
|
||||
cdens = nx.density(graph.subgraph(c))
|
||||
assert cdens == 1.0, "clique not correctly found by ramsey!"
|
||||
idens = nx.density(graph.subgraph(i))
|
||||
assert idens == 0.0, "i-set not correctly found by ramsey!"
|
||||
|
||||
# this trivial graph has no cliques. should just find i-sets
|
||||
graph = nx.trivial_graph()
|
||||
c, i = apxa.ramsey_R2(graph)
|
||||
assert c == {0}, "clique not correctly found by ramsey!"
|
||||
assert i == {0}, "i-set not correctly found by ramsey!"
|
||||
|
||||
graph = nx.barbell_graph(10, 5, nx.Graph())
|
||||
c, i = apxa.ramsey_R2(graph)
|
||||
cdens = nx.density(graph.subgraph(c))
|
||||
assert cdens == 1.0, "clique not correctly found by ramsey!"
|
||||
idens = nx.density(graph.subgraph(i))
|
||||
assert idens == 0.0, "i-set not correctly found by ramsey!"
|
||||
|
||||
# add self-loops and test again
|
||||
graph.add_edges_from([(n, n) for n in range(0, len(graph), 2)])
|
||||
cc, ii = apxa.ramsey_R2(graph)
|
||||
assert cc == c
|
||||
assert ii == i
|
||||
+265
@@ -0,0 +1,265 @@
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.approximation.steinertree import (
|
||||
_remove_nonterminal_leaves,
|
||||
metric_closure,
|
||||
steiner_tree,
|
||||
)
|
||||
from networkx.utils import edges_equal
|
||||
|
||||
|
||||
class TestSteinerTree:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
G1 = nx.Graph()
|
||||
G1.add_edge(1, 2, weight=10)
|
||||
G1.add_edge(2, 3, weight=10)
|
||||
G1.add_edge(3, 4, weight=10)
|
||||
G1.add_edge(4, 5, weight=10)
|
||||
G1.add_edge(5, 6, weight=10)
|
||||
G1.add_edge(2, 7, weight=1)
|
||||
G1.add_edge(7, 5, weight=1)
|
||||
|
||||
G2 = nx.Graph()
|
||||
G2.add_edge(0, 5, weight=6)
|
||||
G2.add_edge(1, 2, weight=2)
|
||||
G2.add_edge(1, 5, weight=3)
|
||||
G2.add_edge(2, 4, weight=4)
|
||||
G2.add_edge(3, 5, weight=5)
|
||||
G2.add_edge(4, 5, weight=1)
|
||||
|
||||
G3 = nx.Graph()
|
||||
G3.add_edge(1, 2, weight=8)
|
||||
G3.add_edge(1, 9, weight=3)
|
||||
G3.add_edge(1, 8, weight=6)
|
||||
G3.add_edge(1, 10, weight=2)
|
||||
G3.add_edge(1, 14, weight=3)
|
||||
G3.add_edge(2, 3, weight=6)
|
||||
G3.add_edge(3, 4, weight=3)
|
||||
G3.add_edge(3, 10, weight=2)
|
||||
G3.add_edge(3, 11, weight=1)
|
||||
G3.add_edge(4, 5, weight=1)
|
||||
G3.add_edge(4, 11, weight=1)
|
||||
G3.add_edge(5, 6, weight=4)
|
||||
G3.add_edge(5, 11, weight=2)
|
||||
G3.add_edge(5, 12, weight=1)
|
||||
G3.add_edge(5, 13, weight=3)
|
||||
G3.add_edge(6, 7, weight=2)
|
||||
G3.add_edge(6, 12, weight=3)
|
||||
G3.add_edge(6, 13, weight=1)
|
||||
G3.add_edge(7, 8, weight=3)
|
||||
G3.add_edge(7, 9, weight=3)
|
||||
G3.add_edge(7, 11, weight=5)
|
||||
G3.add_edge(7, 13, weight=2)
|
||||
G3.add_edge(7, 14, weight=4)
|
||||
G3.add_edge(8, 9, weight=2)
|
||||
G3.add_edge(9, 14, weight=1)
|
||||
G3.add_edge(10, 11, weight=2)
|
||||
G3.add_edge(10, 14, weight=1)
|
||||
G3.add_edge(11, 12, weight=1)
|
||||
G3.add_edge(11, 14, weight=7)
|
||||
G3.add_edge(12, 14, weight=3)
|
||||
G3.add_edge(12, 15, weight=1)
|
||||
G3.add_edge(13, 14, weight=4)
|
||||
G3.add_edge(13, 15, weight=1)
|
||||
G3.add_edge(14, 15, weight=2)
|
||||
|
||||
cls.G1 = G1
|
||||
cls.G2 = G2
|
||||
cls.G3 = G3
|
||||
cls.G1_term_nodes = [1, 2, 3, 4, 5]
|
||||
cls.G2_term_nodes = [0, 2, 3]
|
||||
cls.G3_term_nodes = [1, 3, 5, 6, 8, 10, 11, 12, 13]
|
||||
|
||||
cls.methods = ["kou", "mehlhorn"]
|
||||
|
||||
def test_connected_metric_closure(self):
|
||||
G = self.G1.copy()
|
||||
G.add_node(100)
|
||||
pytest.raises(nx.NetworkXError, metric_closure, G)
|
||||
|
||||
def test_metric_closure(self):
|
||||
M = metric_closure(self.G1)
|
||||
mc = [
|
||||
(1, 2, {"distance": 10, "path": [1, 2]}),
|
||||
(1, 3, {"distance": 20, "path": [1, 2, 3]}),
|
||||
(1, 4, {"distance": 22, "path": [1, 2, 7, 5, 4]}),
|
||||
(1, 5, {"distance": 12, "path": [1, 2, 7, 5]}),
|
||||
(1, 6, {"distance": 22, "path": [1, 2, 7, 5, 6]}),
|
||||
(1, 7, {"distance": 11, "path": [1, 2, 7]}),
|
||||
(2, 3, {"distance": 10, "path": [2, 3]}),
|
||||
(2, 4, {"distance": 12, "path": [2, 7, 5, 4]}),
|
||||
(2, 5, {"distance": 2, "path": [2, 7, 5]}),
|
||||
(2, 6, {"distance": 12, "path": [2, 7, 5, 6]}),
|
||||
(2, 7, {"distance": 1, "path": [2, 7]}),
|
||||
(3, 4, {"distance": 10, "path": [3, 4]}),
|
||||
(3, 5, {"distance": 12, "path": [3, 2, 7, 5]}),
|
||||
(3, 6, {"distance": 22, "path": [3, 2, 7, 5, 6]}),
|
||||
(3, 7, {"distance": 11, "path": [3, 2, 7]}),
|
||||
(4, 5, {"distance": 10, "path": [4, 5]}),
|
||||
(4, 6, {"distance": 20, "path": [4, 5, 6]}),
|
||||
(4, 7, {"distance": 11, "path": [4, 5, 7]}),
|
||||
(5, 6, {"distance": 10, "path": [5, 6]}),
|
||||
(5, 7, {"distance": 1, "path": [5, 7]}),
|
||||
(6, 7, {"distance": 11, "path": [6, 5, 7]}),
|
||||
]
|
||||
assert edges_equal(list(M.edges(data=True)), mc)
|
||||
|
||||
def test_steiner_tree(self):
|
||||
valid_steiner_trees = [
|
||||
[
|
||||
[
|
||||
(1, 2, {"weight": 10}),
|
||||
(2, 3, {"weight": 10}),
|
||||
(2, 7, {"weight": 1}),
|
||||
(3, 4, {"weight": 10}),
|
||||
(5, 7, {"weight": 1}),
|
||||
],
|
||||
[
|
||||
(1, 2, {"weight": 10}),
|
||||
(2, 7, {"weight": 1}),
|
||||
(3, 4, {"weight": 10}),
|
||||
(4, 5, {"weight": 10}),
|
||||
(5, 7, {"weight": 1}),
|
||||
],
|
||||
[
|
||||
(1, 2, {"weight": 10}),
|
||||
(2, 3, {"weight": 10}),
|
||||
(2, 7, {"weight": 1}),
|
||||
(4, 5, {"weight": 10}),
|
||||
(5, 7, {"weight": 1}),
|
||||
],
|
||||
],
|
||||
[
|
||||
[
|
||||
(0, 5, {"weight": 6}),
|
||||
(1, 2, {"weight": 2}),
|
||||
(1, 5, {"weight": 3}),
|
||||
(3, 5, {"weight": 5}),
|
||||
],
|
||||
[
|
||||
(0, 5, {"weight": 6}),
|
||||
(4, 2, {"weight": 4}),
|
||||
(4, 5, {"weight": 1}),
|
||||
(3, 5, {"weight": 5}),
|
||||
],
|
||||
],
|
||||
[
|
||||
[
|
||||
(1, 10, {"weight": 2}),
|
||||
(3, 10, {"weight": 2}),
|
||||
(3, 11, {"weight": 1}),
|
||||
(5, 12, {"weight": 1}),
|
||||
(6, 13, {"weight": 1}),
|
||||
(8, 9, {"weight": 2}),
|
||||
(9, 14, {"weight": 1}),
|
||||
(10, 14, {"weight": 1}),
|
||||
(11, 12, {"weight": 1}),
|
||||
(12, 15, {"weight": 1}),
|
||||
(13, 15, {"weight": 1}),
|
||||
]
|
||||
],
|
||||
]
|
||||
for method in self.methods:
|
||||
for G, term_nodes, valid_trees in zip(
|
||||
[self.G1, self.G2, self.G3],
|
||||
[self.G1_term_nodes, self.G2_term_nodes, self.G3_term_nodes],
|
||||
valid_steiner_trees,
|
||||
):
|
||||
S = steiner_tree(G, term_nodes, method=method)
|
||||
assert any(
|
||||
edges_equal(list(S.edges(data=True)), valid_tree)
|
||||
for valid_tree in valid_trees
|
||||
)
|
||||
|
||||
def test_multigraph_steiner_tree(self):
|
||||
G = nx.MultiGraph()
|
||||
G.add_edges_from(
|
||||
[
|
||||
(1, 2, 0, {"weight": 1}),
|
||||
(2, 3, 0, {"weight": 999}),
|
||||
(2, 3, 1, {"weight": 1}),
|
||||
(3, 4, 0, {"weight": 1}),
|
||||
(3, 5, 0, {"weight": 1}),
|
||||
]
|
||||
)
|
||||
terminal_nodes = [2, 4, 5]
|
||||
expected_edges = [
|
||||
(2, 3, 1, {"weight": 1}), # edge with key 1 has lower weight
|
||||
(3, 4, 0, {"weight": 1}),
|
||||
(3, 5, 0, {"weight": 1}),
|
||||
]
|
||||
for method in self.methods:
|
||||
S = steiner_tree(G, terminal_nodes, method=method)
|
||||
assert edges_equal(S.edges(data=True, keys=True), expected_edges)
|
||||
|
||||
def test_remove_nonterminal_leaves(self):
|
||||
G = nx.path_graph(10)
|
||||
_remove_nonterminal_leaves(G, [4, 5, 6])
|
||||
|
||||
assert list(G) == [4, 5, 6] # only the terminal nodes are left
|
||||
|
||||
|
||||
@pytest.mark.parametrize("method", ("kou", "mehlhorn"))
|
||||
def test_steiner_tree_weight_attribute(method):
|
||||
G = nx.star_graph(4)
|
||||
# Add an edge attribute that is named something other than "weight"
|
||||
nx.set_edge_attributes(G, {e: 10 for e in G.edges}, name="distance")
|
||||
H = nx.approximation.steiner_tree(G, [1, 3], method=method, weight="distance")
|
||||
assert nx.utils.edges_equal(H.edges, [(0, 1), (0, 3)])
|
||||
|
||||
|
||||
@pytest.mark.parametrize("method", ("kou", "mehlhorn"))
|
||||
def test_steiner_tree_multigraph_weight_attribute(method):
|
||||
G = nx.cycle_graph(3, create_using=nx.MultiGraph)
|
||||
nx.set_edge_attributes(G, {e: 10 for e in G.edges}, name="distance")
|
||||
G.add_edge(2, 0, distance=5)
|
||||
H = nx.approximation.steiner_tree(G, list(G), method=method, weight="distance")
|
||||
assert len(H.edges) == 2 and H.has_edge(2, 0, key=1)
|
||||
assert sum(dist for *_, dist in H.edges(data="distance")) == 15
|
||||
|
||||
|
||||
@pytest.mark.parametrize("method", (None, "mehlhorn", "kou"))
|
||||
def test_steiner_tree_methods(method):
|
||||
G = nx.star_graph(4)
|
||||
expected = nx.Graph([(0, 1), (0, 3)])
|
||||
st = nx.approximation.steiner_tree(G, [1, 3], method=method)
|
||||
assert nx.utils.edges_equal(st.edges, expected.edges)
|
||||
|
||||
|
||||
def test_steiner_tree_method_invalid():
|
||||
G = nx.star_graph(4)
|
||||
with pytest.raises(
|
||||
ValueError, match="invalid_method is not a valid choice for an algorithm."
|
||||
):
|
||||
nx.approximation.steiner_tree(G, terminal_nodes=[1, 3], method="invalid_method")
|
||||
|
||||
|
||||
def test_steiner_tree_remove_non_terminal_leaves_self_loop_edges():
|
||||
# To verify that the last step of the steiner tree approximation
|
||||
# behaves in the case where a non-terminal leaf has a self loop edge
|
||||
G = nx.path_graph(10)
|
||||
|
||||
# Add self loops to the terminal nodes
|
||||
G.add_edges_from([(2, 2), (3, 3), (4, 4), (7, 7), (8, 8)])
|
||||
|
||||
# Remove non-terminal leaves
|
||||
_remove_nonterminal_leaves(G, [4, 5, 6, 7])
|
||||
|
||||
# The terminal nodes should be left
|
||||
assert list(G) == [4, 5, 6, 7] # only the terminal nodes are left
|
||||
|
||||
|
||||
def test_steiner_tree_non_terminal_leaves_multigraph_self_loop_edges():
|
||||
# To verify that the last step of the steiner tree approximation
|
||||
# behaves in the case where a non-terminal leaf has a self loop edge
|
||||
G = nx.MultiGraph()
|
||||
G.add_edges_from([(i, i + 1) for i in range(10)])
|
||||
G.add_edges_from([(2, 2), (3, 3), (4, 4), (4, 4), (7, 7)])
|
||||
|
||||
# Remove non-terminal leaves
|
||||
_remove_nonterminal_leaves(G, [4, 5, 6, 7])
|
||||
|
||||
# Only the terminal nodes should be left
|
||||
assert list(G) == [4, 5, 6, 7]
|
||||
+977
@@ -0,0 +1,977 @@
|
||||
"""Unit tests for the traveling_salesman module."""
|
||||
|
||||
import random
|
||||
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
import networkx.algorithms.approximation as nx_app
|
||||
|
||||
pairwise = nx.utils.pairwise
|
||||
|
||||
|
||||
def test_christofides_hamiltonian():
|
||||
random.seed(42)
|
||||
G = nx.complete_graph(20)
|
||||
for u, v in G.edges():
|
||||
G[u][v]["weight"] = random.randint(0, 10)
|
||||
|
||||
H = nx.Graph()
|
||||
H.add_edges_from(pairwise(nx_app.christofides(G)))
|
||||
H.remove_edges_from(nx.find_cycle(H))
|
||||
assert len(H.edges) == 0
|
||||
|
||||
tree = nx.minimum_spanning_tree(G, weight="weight")
|
||||
H = nx.Graph()
|
||||
H.add_edges_from(pairwise(nx_app.christofides(G, tree)))
|
||||
H.remove_edges_from(nx.find_cycle(H))
|
||||
assert len(H.edges) == 0
|
||||
|
||||
|
||||
def test_christofides_incomplete_graph():
|
||||
G = nx.complete_graph(10)
|
||||
G.remove_edge(0, 1)
|
||||
pytest.raises(nx.NetworkXError, nx_app.christofides, G)
|
||||
|
||||
|
||||
def test_christofides_ignore_selfloops():
|
||||
G = nx.complete_graph(5)
|
||||
G.add_edge(3, 3)
|
||||
cycle = nx_app.christofides(G)
|
||||
assert len(cycle) - 1 == len(G) == len(set(cycle))
|
||||
|
||||
|
||||
# set up graphs for other tests
|
||||
class TestBase:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
cls.DG = nx.DiGraph()
|
||||
cls.DG.add_weighted_edges_from(
|
||||
{
|
||||
("A", "B", 3),
|
||||
("A", "C", 17),
|
||||
("A", "D", 14),
|
||||
("B", "A", 3),
|
||||
("B", "C", 12),
|
||||
("B", "D", 16),
|
||||
("C", "A", 13),
|
||||
("C", "B", 12),
|
||||
("C", "D", 4),
|
||||
("D", "A", 14),
|
||||
("D", "B", 15),
|
||||
("D", "C", 2),
|
||||
}
|
||||
)
|
||||
cls.DG_cycle = ["D", "C", "B", "A", "D"]
|
||||
cls.DG_cost = 31.0
|
||||
|
||||
cls.DG2 = nx.DiGraph()
|
||||
cls.DG2.add_weighted_edges_from(
|
||||
{
|
||||
("A", "B", 3),
|
||||
("A", "C", 17),
|
||||
("A", "D", 14),
|
||||
("B", "A", 30),
|
||||
("B", "C", 2),
|
||||
("B", "D", 16),
|
||||
("C", "A", 33),
|
||||
("C", "B", 32),
|
||||
("C", "D", 34),
|
||||
("D", "A", 14),
|
||||
("D", "B", 15),
|
||||
("D", "C", 2),
|
||||
}
|
||||
)
|
||||
cls.DG2_cycle = ["D", "A", "B", "C", "D"]
|
||||
cls.DG2_cost = 53.0
|
||||
|
||||
cls.unweightedUG = nx.complete_graph(5, nx.Graph())
|
||||
cls.unweightedDG = nx.complete_graph(5, nx.DiGraph())
|
||||
|
||||
cls.incompleteUG = nx.Graph()
|
||||
cls.incompleteUG.add_weighted_edges_from({(0, 1, 1), (1, 2, 3)})
|
||||
cls.incompleteDG = nx.DiGraph()
|
||||
cls.incompleteDG.add_weighted_edges_from({(0, 1, 1), (1, 2, 3)})
|
||||
|
||||
cls.UG = nx.Graph()
|
||||
cls.UG.add_weighted_edges_from(
|
||||
{
|
||||
("A", "B", 3),
|
||||
("A", "C", 17),
|
||||
("A", "D", 14),
|
||||
("B", "C", 12),
|
||||
("B", "D", 16),
|
||||
("C", "D", 4),
|
||||
}
|
||||
)
|
||||
cls.UG_cycle = ["D", "C", "B", "A", "D"]
|
||||
cls.UG_cost = 33.0
|
||||
|
||||
cls.UG2 = nx.Graph()
|
||||
cls.UG2.add_weighted_edges_from(
|
||||
{
|
||||
("A", "B", 1),
|
||||
("A", "C", 15),
|
||||
("A", "D", 5),
|
||||
("B", "C", 16),
|
||||
("B", "D", 8),
|
||||
("C", "D", 3),
|
||||
}
|
||||
)
|
||||
cls.UG2_cycle = ["D", "C", "B", "A", "D"]
|
||||
cls.UG2_cost = 25.0
|
||||
|
||||
|
||||
def validate_solution(soln, cost, exp_soln, exp_cost):
|
||||
assert soln == exp_soln
|
||||
assert cost == exp_cost
|
||||
|
||||
|
||||
def validate_symmetric_solution(soln, cost, exp_soln, exp_cost):
|
||||
assert soln == exp_soln or soln == exp_soln[::-1]
|
||||
assert cost == exp_cost
|
||||
|
||||
|
||||
class TestGreedyTSP(TestBase):
|
||||
def test_greedy(self):
|
||||
cycle = nx_app.greedy_tsp(self.DG, source="D")
|
||||
cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_solution(cycle, cost, ["D", "C", "B", "A", "D"], 31.0)
|
||||
|
||||
cycle = nx_app.greedy_tsp(self.DG2, source="D")
|
||||
cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_solution(cycle, cost, ["D", "C", "B", "A", "D"], 78.0)
|
||||
|
||||
cycle = nx_app.greedy_tsp(self.UG, source="D")
|
||||
cost = sum(self.UG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_solution(cycle, cost, ["D", "C", "B", "A", "D"], 33.0)
|
||||
|
||||
cycle = nx_app.greedy_tsp(self.UG2, source="D")
|
||||
cost = sum(self.UG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_solution(cycle, cost, ["D", "C", "A", "B", "D"], 27.0)
|
||||
|
||||
def test_not_complete_graph(self):
|
||||
pytest.raises(nx.NetworkXError, nx_app.greedy_tsp, self.incompleteUG)
|
||||
pytest.raises(nx.NetworkXError, nx_app.greedy_tsp, self.incompleteDG)
|
||||
|
||||
def test_not_weighted_graph(self):
|
||||
nx_app.greedy_tsp(self.unweightedUG)
|
||||
nx_app.greedy_tsp(self.unweightedDG)
|
||||
|
||||
def test_two_nodes(self):
|
||||
G = nx.Graph()
|
||||
G.add_weighted_edges_from({(1, 2, 1)})
|
||||
cycle = nx_app.greedy_tsp(G)
|
||||
cost = sum(G[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_solution(cycle, cost, [1, 2, 1], 2)
|
||||
|
||||
def test_ignore_selfloops(self):
|
||||
G = nx.complete_graph(5)
|
||||
G.add_edge(3, 3)
|
||||
cycle = nx_app.greedy_tsp(G)
|
||||
assert len(cycle) - 1 == len(G) == len(set(cycle))
|
||||
|
||||
|
||||
class TestSimulatedAnnealingTSP(TestBase):
|
||||
tsp = staticmethod(nx_app.simulated_annealing_tsp)
|
||||
|
||||
def test_simulated_annealing_directed(self):
|
||||
cycle = self.tsp(self.DG, "greedy", source="D", seed=42)
|
||||
cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_solution(cycle, cost, self.DG_cycle, self.DG_cost)
|
||||
|
||||
initial_sol = ["D", "B", "A", "C", "D"]
|
||||
cycle = self.tsp(self.DG, initial_sol, source="D", seed=42)
|
||||
cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_solution(cycle, cost, self.DG_cycle, self.DG_cost)
|
||||
|
||||
initial_sol = ["D", "A", "C", "B", "D"]
|
||||
cycle = self.tsp(self.DG, initial_sol, move="1-0", source="D", seed=42)
|
||||
cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_solution(cycle, cost, self.DG_cycle, self.DG_cost)
|
||||
|
||||
cycle = self.tsp(self.DG2, "greedy", source="D", seed=42)
|
||||
cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_solution(cycle, cost, self.DG2_cycle, self.DG2_cost)
|
||||
|
||||
cycle = self.tsp(self.DG2, "greedy", move="1-0", source="D", seed=42)
|
||||
cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_solution(cycle, cost, self.DG2_cycle, self.DG2_cost)
|
||||
|
||||
def test_simulated_annealing_undirected(self):
|
||||
cycle = self.tsp(self.UG, "greedy", source="D", seed=42)
|
||||
cost = sum(self.UG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_solution(cycle, cost, self.UG_cycle, self.UG_cost)
|
||||
|
||||
cycle = self.tsp(self.UG2, "greedy", source="D", seed=42)
|
||||
cost = sum(self.UG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_symmetric_solution(cycle, cost, self.UG2_cycle, self.UG2_cost)
|
||||
|
||||
cycle = self.tsp(self.UG2, "greedy", move="1-0", source="D", seed=42)
|
||||
cost = sum(self.UG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_symmetric_solution(cycle, cost, self.UG2_cycle, self.UG2_cost)
|
||||
|
||||
def test_error_on_input_order_mistake(self):
|
||||
# see issue #4846 https://github.com/networkx/networkx/issues/4846
|
||||
pytest.raises(TypeError, self.tsp, self.UG, weight="weight")
|
||||
pytest.raises(nx.NetworkXError, self.tsp, self.UG, "weight")
|
||||
|
||||
def test_not_complete_graph(self):
|
||||
pytest.raises(nx.NetworkXError, self.tsp, self.incompleteUG, "greedy", source=0)
|
||||
pytest.raises(nx.NetworkXError, self.tsp, self.incompleteDG, "greedy", source=0)
|
||||
|
||||
def test_ignore_selfloops(self):
|
||||
G = nx.complete_graph(5)
|
||||
G.add_edge(3, 3)
|
||||
cycle = self.tsp(G, "greedy")
|
||||
assert len(cycle) - 1 == len(G) == len(set(cycle))
|
||||
|
||||
def test_not_weighted_graph(self):
|
||||
self.tsp(self.unweightedUG, "greedy")
|
||||
self.tsp(self.unweightedDG, "greedy")
|
||||
|
||||
def test_two_nodes(self):
|
||||
G = nx.Graph()
|
||||
G.add_weighted_edges_from({(1, 2, 1)})
|
||||
|
||||
cycle = self.tsp(G, "greedy", source=1, seed=42)
|
||||
cost = sum(G[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_solution(cycle, cost, [1, 2, 1], 2)
|
||||
|
||||
cycle = self.tsp(G, [1, 2, 1], source=1, seed=42)
|
||||
cost = sum(G[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
validate_solution(cycle, cost, [1, 2, 1], 2)
|
||||
|
||||
def test_failure_of_costs_too_high_when_iterations_low(self):
|
||||
# Simulated Annealing Version:
|
||||
# set number of moves low and alpha high
|
||||
cycle = self.tsp(
|
||||
self.DG2, "greedy", source="D", move="1-0", alpha=1, N_inner=1, seed=42
|
||||
)
|
||||
cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
print(cycle, cost)
|
||||
assert cost > self.DG2_cost
|
||||
|
||||
# Try with an incorrect initial guess
|
||||
initial_sol = ["D", "A", "B", "C", "D"]
|
||||
cycle = self.tsp(
|
||||
self.DG,
|
||||
initial_sol,
|
||||
source="D",
|
||||
move="1-0",
|
||||
alpha=0.1,
|
||||
N_inner=1,
|
||||
max_iterations=1,
|
||||
seed=42,
|
||||
)
|
||||
cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
print(cycle, cost)
|
||||
assert cost > self.DG_cost
|
||||
|
||||
|
||||
class TestThresholdAcceptingTSP(TestSimulatedAnnealingTSP):
|
||||
tsp = staticmethod(nx_app.threshold_accepting_tsp)
|
||||
|
||||
def test_failure_of_costs_too_high_when_iterations_low(self):
|
||||
# Threshold Version:
|
||||
# set number of moves low and number of iterations low
|
||||
cycle = self.tsp(
|
||||
self.DG2,
|
||||
"greedy",
|
||||
source="D",
|
||||
move="1-0",
|
||||
N_inner=1,
|
||||
max_iterations=1,
|
||||
seed=4,
|
||||
)
|
||||
cost = sum(self.DG2[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
assert cost > self.DG2_cost
|
||||
|
||||
# set threshold too low
|
||||
initial_sol = ["D", "A", "B", "C", "D"]
|
||||
cycle = self.tsp(
|
||||
self.DG, initial_sol, source="D", move="1-0", threshold=-3, seed=42
|
||||
)
|
||||
cost = sum(self.DG[n][nbr]["weight"] for n, nbr in pairwise(cycle))
|
||||
assert cost > self.DG_cost
|
||||
|
||||
|
||||
# Tests for function traveling_salesman_problem
|
||||
def test_TSP_method():
|
||||
G = nx.cycle_graph(9)
|
||||
G[4][5]["weight"] = 10
|
||||
|
||||
# Test using the old currying method
|
||||
sa_tsp = lambda G, weight: nx_app.simulated_annealing_tsp(
|
||||
G, "greedy", weight, source=4, seed=1
|
||||
)
|
||||
|
||||
path = nx_app.traveling_salesman_problem(
|
||||
G,
|
||||
method=sa_tsp,
|
||||
cycle=False,
|
||||
)
|
||||
print(path)
|
||||
assert path == [4, 3, 2, 1, 0, 8, 7, 6, 5]
|
||||
|
||||
|
||||
def test_TSP_unweighted():
|
||||
G = nx.cycle_graph(9)
|
||||
path = nx_app.traveling_salesman_problem(G, nodes=[3, 6], cycle=False)
|
||||
assert path in ([3, 4, 5, 6], [6, 5, 4, 3])
|
||||
|
||||
cycle = nx_app.traveling_salesman_problem(G, nodes=[3, 6])
|
||||
assert cycle in ([3, 4, 5, 6, 5, 4, 3], [6, 5, 4, 3, 4, 5, 6])
|
||||
|
||||
|
||||
def test_TSP_weighted():
|
||||
G = nx.cycle_graph(9)
|
||||
G[0][1]["weight"] = 2
|
||||
G[1][2]["weight"] = 2
|
||||
G[2][3]["weight"] = 2
|
||||
G[3][4]["weight"] = 4
|
||||
G[4][5]["weight"] = 5
|
||||
G[5][6]["weight"] = 4
|
||||
G[6][7]["weight"] = 2
|
||||
G[7][8]["weight"] = 2
|
||||
G[8][0]["weight"] = 2
|
||||
tsp = nx_app.traveling_salesman_problem
|
||||
|
||||
# path between 3 and 6
|
||||
expected_paths = ([3, 2, 1, 0, 8, 7, 6], [6, 7, 8, 0, 1, 2, 3])
|
||||
# cycle between 3 and 6
|
||||
expected_cycles = (
|
||||
[3, 2, 1, 0, 8, 7, 6, 7, 8, 0, 1, 2, 3],
|
||||
[6, 7, 8, 0, 1, 2, 3, 2, 1, 0, 8, 7, 6],
|
||||
)
|
||||
# path through all nodes
|
||||
expected_tourpaths = ([5, 6, 7, 8, 0, 1, 2, 3, 4], [4, 3, 2, 1, 0, 8, 7, 6, 5])
|
||||
|
||||
# Check default method
|
||||
cycle = tsp(G, nodes=[3, 6], weight="weight")
|
||||
assert cycle in expected_cycles
|
||||
|
||||
path = tsp(G, nodes=[3, 6], weight="weight", cycle=False)
|
||||
assert path in expected_paths
|
||||
|
||||
tourpath = tsp(G, weight="weight", cycle=False)
|
||||
assert tourpath in expected_tourpaths
|
||||
|
||||
# Check all methods
|
||||
methods = [
|
||||
(nx_app.christofides, {}),
|
||||
(nx_app.greedy_tsp, {}),
|
||||
(
|
||||
nx_app.simulated_annealing_tsp,
|
||||
{"init_cycle": "greedy"},
|
||||
),
|
||||
(
|
||||
nx_app.threshold_accepting_tsp,
|
||||
{"init_cycle": "greedy"},
|
||||
),
|
||||
]
|
||||
for method, kwargs in methods:
|
||||
cycle = tsp(G, nodes=[3, 6], weight="weight", method=method, **kwargs)
|
||||
assert cycle in expected_cycles
|
||||
|
||||
path = tsp(
|
||||
G, nodes=[3, 6], weight="weight", method=method, cycle=False, **kwargs
|
||||
)
|
||||
assert path in expected_paths
|
||||
|
||||
tourpath = tsp(G, weight="weight", method=method, cycle=False, **kwargs)
|
||||
assert tourpath in expected_tourpaths
|
||||
|
||||
|
||||
def test_TSP_incomplete_graph_short_path():
|
||||
G = nx.cycle_graph(9)
|
||||
G.add_edges_from([(4, 9), (9, 10), (10, 11), (11, 0)])
|
||||
G[4][5]["weight"] = 5
|
||||
|
||||
cycle = nx_app.traveling_salesman_problem(G)
|
||||
print(cycle)
|
||||
assert len(cycle) == 17 and len(set(cycle)) == 12
|
||||
|
||||
# make sure that cutting one edge out of complete graph formulation
|
||||
# cuts out many edges out of the path of the TSP
|
||||
path = nx_app.traveling_salesman_problem(G, cycle=False)
|
||||
print(path)
|
||||
assert len(path) == 13 and len(set(path)) == 12
|
||||
|
||||
|
||||
def test_held_karp_ascent():
|
||||
"""
|
||||
Test the Held-Karp relaxation with the ascent method
|
||||
"""
|
||||
import networkx.algorithms.approximation.traveling_salesman as tsp
|
||||
|
||||
np = pytest.importorskip("numpy")
|
||||
pytest.importorskip("scipy")
|
||||
|
||||
# Adjacency matrix from page 1153 of the 1970 Held and Karp paper
|
||||
# which have been edited to be directional, but also symmetric
|
||||
G_array = np.array(
|
||||
[
|
||||
[0, 97, 60, 73, 17, 52],
|
||||
[97, 0, 41, 52, 90, 30],
|
||||
[60, 41, 0, 21, 35, 41],
|
||||
[73, 52, 21, 0, 95, 46],
|
||||
[17, 90, 35, 95, 0, 81],
|
||||
[52, 30, 41, 46, 81, 0],
|
||||
]
|
||||
)
|
||||
|
||||
solution_edges = [(1, 3), (2, 4), (3, 2), (4, 0), (5, 1), (0, 5)]
|
||||
|
||||
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
||||
opt_hk, z_star = tsp.held_karp_ascent(G)
|
||||
|
||||
# Check that the optimal weights are the same
|
||||
assert round(opt_hk, 2) == 207.00
|
||||
# Check that the z_stars are the same
|
||||
solution = nx.DiGraph()
|
||||
solution.add_edges_from(solution_edges)
|
||||
assert nx.utils.edges_equal(z_star.edges, solution.edges)
|
||||
|
||||
|
||||
def test_ascent_fractional_solution():
|
||||
"""
|
||||
Test the ascent method using a modified version of Figure 2 on page 1140
|
||||
in 'The Traveling Salesman Problem and Minimum Spanning Trees' by Held and
|
||||
Karp
|
||||
"""
|
||||
import networkx.algorithms.approximation.traveling_salesman as tsp
|
||||
|
||||
np = pytest.importorskip("numpy")
|
||||
pytest.importorskip("scipy")
|
||||
|
||||
# This version of Figure 2 has all of the edge weights multiplied by 100
|
||||
# and is a complete directed graph with infinite edge weights for the
|
||||
# edges not listed in the original graph
|
||||
G_array = np.array(
|
||||
[
|
||||
[0, 100, 100, 100000, 100000, 1],
|
||||
[100, 0, 100, 100000, 1, 100000],
|
||||
[100, 100, 0, 1, 100000, 100000],
|
||||
[100000, 100000, 1, 0, 100, 100],
|
||||
[100000, 1, 100000, 100, 0, 100],
|
||||
[1, 100000, 100000, 100, 100, 0],
|
||||
]
|
||||
)
|
||||
|
||||
solution_z_star = {
|
||||
(0, 1): 5 / 12,
|
||||
(0, 2): 5 / 12,
|
||||
(0, 5): 5 / 6,
|
||||
(1, 0): 5 / 12,
|
||||
(1, 2): 1 / 3,
|
||||
(1, 4): 5 / 6,
|
||||
(2, 0): 5 / 12,
|
||||
(2, 1): 1 / 3,
|
||||
(2, 3): 5 / 6,
|
||||
(3, 2): 5 / 6,
|
||||
(3, 4): 1 / 3,
|
||||
(3, 5): 1 / 2,
|
||||
(4, 1): 5 / 6,
|
||||
(4, 3): 1 / 3,
|
||||
(4, 5): 1 / 2,
|
||||
(5, 0): 5 / 6,
|
||||
(5, 3): 1 / 2,
|
||||
(5, 4): 1 / 2,
|
||||
}
|
||||
|
||||
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
||||
opt_hk, z_star = tsp.held_karp_ascent(G)
|
||||
|
||||
# Check that the optimal weights are the same
|
||||
assert round(opt_hk, 2) == 303.00
|
||||
# Check that the z_stars are the same
|
||||
assert {key: round(z_star[key], 4) for key in z_star} == {
|
||||
key: round(solution_z_star[key], 4) for key in solution_z_star
|
||||
}
|
||||
|
||||
|
||||
def test_ascent_method_asymmetric():
|
||||
"""
|
||||
Tests the ascent method using a truly asymmetric graph for which the
|
||||
solution has been brute forced
|
||||
"""
|
||||
import networkx.algorithms.approximation.traveling_salesman as tsp
|
||||
|
||||
np = pytest.importorskip("numpy")
|
||||
pytest.importorskip("scipy")
|
||||
|
||||
G_array = np.array(
|
||||
[
|
||||
[0, 26, 63, 59, 69, 31, 41],
|
||||
[62, 0, 91, 53, 75, 87, 47],
|
||||
[47, 82, 0, 90, 15, 9, 18],
|
||||
[68, 19, 5, 0, 58, 34, 93],
|
||||
[11, 58, 53, 55, 0, 61, 79],
|
||||
[88, 75, 13, 76, 98, 0, 40],
|
||||
[41, 61, 55, 88, 46, 45, 0],
|
||||
]
|
||||
)
|
||||
|
||||
solution_edges = [(0, 1), (1, 3), (3, 2), (2, 5), (5, 6), (4, 0), (6, 4)]
|
||||
|
||||
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
||||
opt_hk, z_star = tsp.held_karp_ascent(G)
|
||||
|
||||
# Check that the optimal weights are the same
|
||||
assert round(opt_hk, 2) == 190.00
|
||||
# Check that the z_stars match.
|
||||
solution = nx.DiGraph()
|
||||
solution.add_edges_from(solution_edges)
|
||||
assert nx.utils.edges_equal(z_star.edges, solution.edges)
|
||||
|
||||
|
||||
def test_ascent_method_asymmetric_2():
|
||||
"""
|
||||
Tests the ascent method using a truly asymmetric graph for which the
|
||||
solution has been brute forced
|
||||
"""
|
||||
import networkx.algorithms.approximation.traveling_salesman as tsp
|
||||
|
||||
np = pytest.importorskip("numpy")
|
||||
pytest.importorskip("scipy")
|
||||
|
||||
G_array = np.array(
|
||||
[
|
||||
[0, 45, 39, 92, 29, 31],
|
||||
[72, 0, 4, 12, 21, 60],
|
||||
[81, 6, 0, 98, 70, 53],
|
||||
[49, 71, 59, 0, 98, 94],
|
||||
[74, 95, 24, 43, 0, 47],
|
||||
[56, 43, 3, 65, 22, 0],
|
||||
]
|
||||
)
|
||||
|
||||
solution_edges = [(0, 5), (5, 4), (1, 3), (3, 0), (2, 1), (4, 2)]
|
||||
|
||||
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
||||
opt_hk, z_star = tsp.held_karp_ascent(G)
|
||||
|
||||
# Check that the optimal weights are the same
|
||||
assert round(opt_hk, 2) == 144.00
|
||||
# Check that the z_stars match.
|
||||
solution = nx.DiGraph()
|
||||
solution.add_edges_from(solution_edges)
|
||||
assert nx.utils.edges_equal(z_star.edges, solution.edges)
|
||||
|
||||
|
||||
def test_held_karp_ascent_asymmetric_3():
|
||||
"""
|
||||
Tests the ascent method using a truly asymmetric graph with a fractional
|
||||
solution for which the solution has been brute forced.
|
||||
|
||||
In this graph their are two different optimal, integral solutions (which
|
||||
are also the overall atsp solutions) to the Held Karp relaxation. However,
|
||||
this particular graph has two different tours of optimal value and the
|
||||
possible solutions in the held_karp_ascent function are not stored in an
|
||||
ordered data structure.
|
||||
"""
|
||||
import networkx.algorithms.approximation.traveling_salesman as tsp
|
||||
|
||||
np = pytest.importorskip("numpy")
|
||||
pytest.importorskip("scipy")
|
||||
|
||||
G_array = np.array(
|
||||
[
|
||||
[0, 1, 5, 2, 7, 4],
|
||||
[7, 0, 7, 7, 1, 4],
|
||||
[4, 7, 0, 9, 2, 1],
|
||||
[7, 2, 7, 0, 4, 4],
|
||||
[5, 5, 4, 4, 0, 3],
|
||||
[3, 9, 1, 3, 4, 0],
|
||||
]
|
||||
)
|
||||
|
||||
solution1_edges = [(0, 3), (1, 4), (2, 5), (3, 1), (4, 2), (5, 0)]
|
||||
|
||||
solution2_edges = [(0, 3), (3, 1), (1, 4), (4, 5), (2, 0), (5, 2)]
|
||||
|
||||
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
||||
opt_hk, z_star = tsp.held_karp_ascent(G)
|
||||
|
||||
assert round(opt_hk, 2) == 13.00
|
||||
# Check that the z_stars are the same
|
||||
solution1 = nx.DiGraph()
|
||||
solution1.add_edges_from(solution1_edges)
|
||||
solution2 = nx.DiGraph()
|
||||
solution2.add_edges_from(solution2_edges)
|
||||
assert nx.utils.edges_equal(z_star.edges, solution1.edges) or nx.utils.edges_equal(
|
||||
z_star.edges, solution2.edges
|
||||
)
|
||||
|
||||
|
||||
def test_held_karp_ascent_fractional_asymmetric():
|
||||
"""
|
||||
Tests the ascent method using a truly asymmetric graph with a fractional
|
||||
solution for which the solution has been brute forced
|
||||
"""
|
||||
import networkx.algorithms.approximation.traveling_salesman as tsp
|
||||
|
||||
np = pytest.importorskip("numpy")
|
||||
pytest.importorskip("scipy")
|
||||
|
||||
G_array = np.array(
|
||||
[
|
||||
[0, 100, 150, 100000, 100000, 1],
|
||||
[150, 0, 100, 100000, 1, 100000],
|
||||
[100, 150, 0, 1, 100000, 100000],
|
||||
[100000, 100000, 1, 0, 150, 100],
|
||||
[100000, 2, 100000, 100, 0, 150],
|
||||
[2, 100000, 100000, 150, 100, 0],
|
||||
]
|
||||
)
|
||||
|
||||
solution_z_star = {
|
||||
(0, 1): 5 / 12,
|
||||
(0, 2): 5 / 12,
|
||||
(0, 5): 5 / 6,
|
||||
(1, 0): 5 / 12,
|
||||
(1, 2): 5 / 12,
|
||||
(1, 4): 5 / 6,
|
||||
(2, 0): 5 / 12,
|
||||
(2, 1): 5 / 12,
|
||||
(2, 3): 5 / 6,
|
||||
(3, 2): 5 / 6,
|
||||
(3, 4): 5 / 12,
|
||||
(3, 5): 5 / 12,
|
||||
(4, 1): 5 / 6,
|
||||
(4, 3): 5 / 12,
|
||||
(4, 5): 5 / 12,
|
||||
(5, 0): 5 / 6,
|
||||
(5, 3): 5 / 12,
|
||||
(5, 4): 5 / 12,
|
||||
}
|
||||
|
||||
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
||||
opt_hk, z_star = tsp.held_karp_ascent(G)
|
||||
|
||||
# Check that the optimal weights are the same
|
||||
assert round(opt_hk, 2) == 304.00
|
||||
# Check that the z_stars are the same
|
||||
assert {key: round(z_star[key], 4) for key in z_star} == {
|
||||
key: round(solution_z_star[key], 4) for key in solution_z_star
|
||||
}
|
||||
|
||||
|
||||
def test_spanning_tree_distribution():
|
||||
"""
|
||||
Test that we can create an exponential distribution of spanning trees such
|
||||
that the probability of each tree is proportional to the product of edge
|
||||
weights.
|
||||
|
||||
Results of this test have been confirmed with hypothesis testing from the
|
||||
created distribution.
|
||||
|
||||
This test uses the symmetric, fractional Held Karp solution.
|
||||
"""
|
||||
import networkx.algorithms.approximation.traveling_salesman as tsp
|
||||
|
||||
pytest.importorskip("numpy")
|
||||
pytest.importorskip("scipy")
|
||||
|
||||
z_star = {
|
||||
(0, 1): 5 / 12,
|
||||
(0, 2): 5 / 12,
|
||||
(0, 5): 5 / 6,
|
||||
(1, 0): 5 / 12,
|
||||
(1, 2): 1 / 3,
|
||||
(1, 4): 5 / 6,
|
||||
(2, 0): 5 / 12,
|
||||
(2, 1): 1 / 3,
|
||||
(2, 3): 5 / 6,
|
||||
(3, 2): 5 / 6,
|
||||
(3, 4): 1 / 3,
|
||||
(3, 5): 1 / 2,
|
||||
(4, 1): 5 / 6,
|
||||
(4, 3): 1 / 3,
|
||||
(4, 5): 1 / 2,
|
||||
(5, 0): 5 / 6,
|
||||
(5, 3): 1 / 2,
|
||||
(5, 4): 1 / 2,
|
||||
}
|
||||
|
||||
solution_gamma = {
|
||||
(0, 1): -0.6383,
|
||||
(0, 2): -0.6827,
|
||||
(0, 5): 0,
|
||||
(1, 2): -1.0781,
|
||||
(1, 4): 0,
|
||||
(2, 3): 0,
|
||||
(5, 3): -0.2820,
|
||||
(5, 4): -0.3327,
|
||||
(4, 3): -0.9927,
|
||||
}
|
||||
|
||||
# The undirected support of z_star
|
||||
G = nx.MultiGraph()
|
||||
for u, v in z_star:
|
||||
if (u, v) in G.edges or (v, u) in G.edges:
|
||||
continue
|
||||
G.add_edge(u, v)
|
||||
|
||||
gamma = tsp.spanning_tree_distribution(G, z_star)
|
||||
|
||||
assert {key: round(gamma[key], 4) for key in gamma} == solution_gamma
|
||||
|
||||
|
||||
def test_asadpour_tsp():
|
||||
"""
|
||||
Test the complete asadpour tsp algorithm with the fractional, symmetric
|
||||
Held Karp solution. This test also uses an incomplete graph as input.
|
||||
"""
|
||||
# This version of Figure 2 has all of the edge weights multiplied by 100
|
||||
# and the 0 weight edges have a weight of 1.
|
||||
pytest.importorskip("numpy")
|
||||
pytest.importorskip("scipy")
|
||||
|
||||
edge_list = [
|
||||
(0, 1, 100),
|
||||
(0, 2, 100),
|
||||
(0, 5, 1),
|
||||
(1, 2, 100),
|
||||
(1, 4, 1),
|
||||
(2, 3, 1),
|
||||
(3, 4, 100),
|
||||
(3, 5, 100),
|
||||
(4, 5, 100),
|
||||
(1, 0, 100),
|
||||
(2, 0, 100),
|
||||
(5, 0, 1),
|
||||
(2, 1, 100),
|
||||
(4, 1, 1),
|
||||
(3, 2, 1),
|
||||
(4, 3, 100),
|
||||
(5, 3, 100),
|
||||
(5, 4, 100),
|
||||
]
|
||||
|
||||
G = nx.DiGraph()
|
||||
G.add_weighted_edges_from(edge_list)
|
||||
|
||||
tour = nx_app.traveling_salesman_problem(
|
||||
G, weight="weight", method=nx_app.asadpour_atsp, seed=19
|
||||
)
|
||||
|
||||
# Check that the returned list is a valid tour. Because this is an
|
||||
# incomplete graph, the conditions are not as strict. We need the tour to
|
||||
#
|
||||
# Start and end at the same node
|
||||
# Pass through every vertex at least once
|
||||
# Have a total cost at most ln(6) / ln(ln(6)) = 3.0723 times the optimal
|
||||
#
|
||||
# For the second condition it is possible to have the tour pass through the
|
||||
# same vertex more then. Imagine that the tour on the complete version takes
|
||||
# an edge not in the original graph. In the output this is substituted with
|
||||
# the shortest path between those vertices, allowing vertices to appear more
|
||||
# than once.
|
||||
#
|
||||
# Even though we are using a fixed seed, multiple tours have been known to
|
||||
# be returned. The first two are from the original development of this test,
|
||||
# and the third one from issue #5913 on GitHub. If other tours are returned,
|
||||
# add it on the list of expected tours.
|
||||
expected_tours = [
|
||||
[1, 4, 5, 0, 2, 3, 2, 1],
|
||||
[3, 2, 0, 1, 4, 5, 3],
|
||||
[3, 2, 1, 0, 5, 4, 3],
|
||||
]
|
||||
|
||||
assert tour in expected_tours
|
||||
|
||||
|
||||
def test_asadpour_real_world():
|
||||
"""
|
||||
This test uses airline prices between the six largest cities in the US.
|
||||
|
||||
* New York City -> JFK
|
||||
* Los Angeles -> LAX
|
||||
* Chicago -> ORD
|
||||
* Houston -> IAH
|
||||
* Phoenix -> PHX
|
||||
* Philadelphia -> PHL
|
||||
|
||||
Flight prices from August 2021 using Delta or American airlines to get
|
||||
nonstop flight. The brute force solution found the optimal tour to cost $872
|
||||
|
||||
This test also uses the `source` keyword argument to ensure that the tour
|
||||
always starts at city 0.
|
||||
"""
|
||||
np = pytest.importorskip("numpy")
|
||||
pytest.importorskip("scipy")
|
||||
|
||||
G_array = np.array(
|
||||
[
|
||||
# JFK LAX ORD IAH PHX PHL
|
||||
[0, 243, 199, 208, 169, 183], # JFK
|
||||
[277, 0, 217, 123, 127, 252], # LAX
|
||||
[297, 197, 0, 197, 123, 177], # ORD
|
||||
[303, 169, 197, 0, 117, 117], # IAH
|
||||
[257, 127, 160, 117, 0, 319], # PHX
|
||||
[183, 332, 217, 117, 319, 0], # PHL
|
||||
]
|
||||
)
|
||||
|
||||
node_list = ["JFK", "LAX", "ORD", "IAH", "PHX", "PHL"]
|
||||
|
||||
expected_tours = [
|
||||
["JFK", "LAX", "PHX", "ORD", "IAH", "PHL", "JFK"],
|
||||
["JFK", "ORD", "PHX", "LAX", "IAH", "PHL", "JFK"],
|
||||
]
|
||||
|
||||
G = nx.from_numpy_array(G_array, nodelist=node_list, create_using=nx.DiGraph)
|
||||
|
||||
tour = nx_app.traveling_salesman_problem(
|
||||
G, weight="weight", method=nx_app.asadpour_atsp, seed=37, source="JFK"
|
||||
)
|
||||
|
||||
assert tour in expected_tours
|
||||
|
||||
|
||||
def test_asadpour_real_world_path():
|
||||
"""
|
||||
This test uses airline prices between the six largest cities in the US. This
|
||||
time using a path, not a cycle.
|
||||
|
||||
* New York City -> JFK
|
||||
* Los Angeles -> LAX
|
||||
* Chicago -> ORD
|
||||
* Houston -> IAH
|
||||
* Phoenix -> PHX
|
||||
* Philadelphia -> PHL
|
||||
|
||||
Flight prices from August 2021 using Delta or American airlines to get
|
||||
nonstop flight. The brute force solution found the optimal tour to cost $872
|
||||
"""
|
||||
np = pytest.importorskip("numpy")
|
||||
pytest.importorskip("scipy")
|
||||
|
||||
G_array = np.array(
|
||||
[
|
||||
# JFK LAX ORD IAH PHX PHL
|
||||
[0, 243, 199, 208, 169, 183], # JFK
|
||||
[277, 0, 217, 123, 127, 252], # LAX
|
||||
[297, 197, 0, 197, 123, 177], # ORD
|
||||
[303, 169, 197, 0, 117, 117], # IAH
|
||||
[257, 127, 160, 117, 0, 319], # PHX
|
||||
[183, 332, 217, 117, 319, 0], # PHL
|
||||
]
|
||||
)
|
||||
|
||||
node_list = ["JFK", "LAX", "ORD", "IAH", "PHX", "PHL"]
|
||||
|
||||
expected_paths = [
|
||||
["ORD", "PHX", "LAX", "IAH", "PHL", "JFK"],
|
||||
["JFK", "PHL", "IAH", "ORD", "PHX", "LAX"],
|
||||
]
|
||||
|
||||
G = nx.from_numpy_array(G_array, nodelist=node_list, create_using=nx.DiGraph)
|
||||
|
||||
path = nx_app.traveling_salesman_problem(
|
||||
G, weight="weight", cycle=False, method=nx_app.asadpour_atsp, seed=56
|
||||
)
|
||||
|
||||
assert path in expected_paths
|
||||
|
||||
|
||||
def test_asadpour_disconnected_graph():
|
||||
"""
|
||||
Test that the proper exception is raised when asadpour_atsp is given an
|
||||
disconnected graph.
|
||||
"""
|
||||
|
||||
G = nx.complete_graph(4, create_using=nx.DiGraph)
|
||||
# have to set edge weights so that if the exception is not raised, the
|
||||
# function will complete and we will fail the test
|
||||
nx.set_edge_attributes(G, 1, "weight")
|
||||
G.add_node(5)
|
||||
|
||||
pytest.raises(nx.NetworkXError, nx_app.asadpour_atsp, G)
|
||||
|
||||
|
||||
def test_asadpour_incomplete_graph():
|
||||
"""
|
||||
Test that the proper exception is raised when asadpour_atsp is given an
|
||||
incomplete graph
|
||||
"""
|
||||
|
||||
G = nx.complete_graph(4, create_using=nx.DiGraph)
|
||||
# have to set edge weights so that if the exception is not raised, the
|
||||
# function will complete and we will fail the test
|
||||
nx.set_edge_attributes(G, 1, "weight")
|
||||
G.remove_edge(0, 1)
|
||||
|
||||
pytest.raises(nx.NetworkXError, nx_app.asadpour_atsp, G)
|
||||
|
||||
|
||||
def test_asadpour_empty_graph():
|
||||
"""
|
||||
Test the asadpour_atsp function with an empty graph
|
||||
"""
|
||||
G = nx.DiGraph()
|
||||
|
||||
pytest.raises(nx.NetworkXError, nx_app.asadpour_atsp, G)
|
||||
|
||||
|
||||
@pytest.mark.slow
|
||||
def test_asadpour_integral_held_karp():
|
||||
"""
|
||||
This test uses an integral held karp solution and the held karp function
|
||||
will return a graph rather than a dict, bypassing most of the asadpour
|
||||
algorithm.
|
||||
|
||||
At first glance, this test probably doesn't look like it ensures that we
|
||||
skip the rest of the asadpour algorithm, but it does. We are not fixing a
|
||||
see for the random number generator, so if we sample any spanning trees
|
||||
the approximation would be different basically every time this test is
|
||||
executed but it is not since held karp is deterministic and we do not
|
||||
reach the portion of the code with the dependence on random numbers.
|
||||
"""
|
||||
np = pytest.importorskip("numpy")
|
||||
|
||||
G_array = np.array(
|
||||
[
|
||||
[0, 26, 63, 59, 69, 31, 41],
|
||||
[62, 0, 91, 53, 75, 87, 47],
|
||||
[47, 82, 0, 90, 15, 9, 18],
|
||||
[68, 19, 5, 0, 58, 34, 93],
|
||||
[11, 58, 53, 55, 0, 61, 79],
|
||||
[88, 75, 13, 76, 98, 0, 40],
|
||||
[41, 61, 55, 88, 46, 45, 0],
|
||||
]
|
||||
)
|
||||
|
||||
G = nx.from_numpy_array(G_array, create_using=nx.DiGraph)
|
||||
|
||||
for _ in range(2):
|
||||
tour = nx_app.traveling_salesman_problem(G, method=nx_app.asadpour_atsp)
|
||||
|
||||
assert [1, 3, 2, 5, 2, 6, 4, 0, 1] == tour
|
||||
|
||||
|
||||
def test_directed_tsp_impossible():
|
||||
"""
|
||||
Test the asadpour algorithm with a graph without a hamiltonian circuit
|
||||
"""
|
||||
pytest.importorskip("numpy")
|
||||
|
||||
# In this graph, once we leave node 0 we cannot return
|
||||
edges = [
|
||||
(0, 1, 10),
|
||||
(0, 2, 11),
|
||||
(0, 3, 12),
|
||||
(1, 2, 4),
|
||||
(1, 3, 6),
|
||||
(2, 1, 3),
|
||||
(2, 3, 2),
|
||||
(3, 1, 5),
|
||||
(3, 2, 1),
|
||||
]
|
||||
|
||||
G = nx.DiGraph()
|
||||
G.add_weighted_edges_from(edges)
|
||||
|
||||
pytest.raises(nx.NetworkXError, nx_app.traveling_salesman_problem, G)
|
||||
+280
@@ -0,0 +1,280 @@
|
||||
import itertools
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.approximation import (
|
||||
treewidth_min_degree,
|
||||
treewidth_min_fill_in,
|
||||
)
|
||||
from networkx.algorithms.approximation.treewidth import (
|
||||
MinDegreeHeuristic,
|
||||
min_fill_in_heuristic,
|
||||
)
|
||||
|
||||
|
||||
def is_tree_decomp(graph, decomp):
|
||||
"""Check if the given tree decomposition is valid."""
|
||||
for x in graph.nodes():
|
||||
appear_once = False
|
||||
for bag in decomp.nodes():
|
||||
if x in bag:
|
||||
appear_once = True
|
||||
break
|
||||
assert appear_once
|
||||
|
||||
# Check if each connected pair of nodes are at least once together in a bag
|
||||
for x, y in graph.edges():
|
||||
appear_together = False
|
||||
for bag in decomp.nodes():
|
||||
if x in bag and y in bag:
|
||||
appear_together = True
|
||||
break
|
||||
assert appear_together
|
||||
|
||||
# Check if the nodes associated with vertex v form a connected subset of T
|
||||
for v in graph.nodes():
|
||||
subset = []
|
||||
for bag in decomp.nodes():
|
||||
if v in bag:
|
||||
subset.append(bag)
|
||||
sub_graph = decomp.subgraph(subset)
|
||||
assert nx.is_connected(sub_graph)
|
||||
|
||||
|
||||
class TestTreewidthMinDegree:
|
||||
"""Unit tests for the min_degree function"""
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
"""Setup for different kinds of trees"""
|
||||
cls.complete = nx.Graph()
|
||||
cls.complete.add_edge(1, 2)
|
||||
cls.complete.add_edge(2, 3)
|
||||
cls.complete.add_edge(1, 3)
|
||||
|
||||
cls.small_tree = nx.Graph()
|
||||
cls.small_tree.add_edge(1, 3)
|
||||
cls.small_tree.add_edge(4, 3)
|
||||
cls.small_tree.add_edge(2, 3)
|
||||
cls.small_tree.add_edge(3, 5)
|
||||
cls.small_tree.add_edge(5, 6)
|
||||
cls.small_tree.add_edge(5, 7)
|
||||
cls.small_tree.add_edge(6, 7)
|
||||
|
||||
cls.deterministic_graph = nx.Graph()
|
||||
cls.deterministic_graph.add_edge(0, 1) # deg(0) = 1
|
||||
|
||||
cls.deterministic_graph.add_edge(1, 2) # deg(1) = 2
|
||||
|
||||
cls.deterministic_graph.add_edge(2, 3)
|
||||
cls.deterministic_graph.add_edge(2, 4) # deg(2) = 3
|
||||
|
||||
cls.deterministic_graph.add_edge(3, 4)
|
||||
cls.deterministic_graph.add_edge(3, 5)
|
||||
cls.deterministic_graph.add_edge(3, 6) # deg(3) = 4
|
||||
|
||||
cls.deterministic_graph.add_edge(4, 5)
|
||||
cls.deterministic_graph.add_edge(4, 6)
|
||||
cls.deterministic_graph.add_edge(4, 7) # deg(4) = 5
|
||||
|
||||
cls.deterministic_graph.add_edge(5, 6)
|
||||
cls.deterministic_graph.add_edge(5, 7)
|
||||
cls.deterministic_graph.add_edge(5, 8)
|
||||
cls.deterministic_graph.add_edge(5, 9) # deg(5) = 6
|
||||
|
||||
cls.deterministic_graph.add_edge(6, 7)
|
||||
cls.deterministic_graph.add_edge(6, 8)
|
||||
cls.deterministic_graph.add_edge(6, 9) # deg(6) = 6
|
||||
|
||||
cls.deterministic_graph.add_edge(7, 8)
|
||||
cls.deterministic_graph.add_edge(7, 9) # deg(7) = 5
|
||||
|
||||
cls.deterministic_graph.add_edge(8, 9) # deg(8) = 4
|
||||
|
||||
def test_petersen_graph(self):
|
||||
"""Test Petersen graph tree decomposition result"""
|
||||
G = nx.petersen_graph()
|
||||
_, decomp = treewidth_min_degree(G)
|
||||
is_tree_decomp(G, decomp)
|
||||
|
||||
def test_small_tree_treewidth(self):
|
||||
"""Test small tree
|
||||
|
||||
Test if the computed treewidth of the known self.small_tree is 2.
|
||||
As we know which value we can expect from our heuristic, values other
|
||||
than two are regressions
|
||||
"""
|
||||
G = self.small_tree
|
||||
# the order of removal should be [1,2,4]3[5,6,7]
|
||||
# (with [] denoting any order of the containing nodes)
|
||||
# resulting in treewidth 2 for the heuristic
|
||||
treewidth, _ = treewidth_min_fill_in(G)
|
||||
assert treewidth == 2
|
||||
|
||||
def test_heuristic_abort(self):
|
||||
"""Test heuristic abort condition for fully connected graph"""
|
||||
graph = {}
|
||||
for u in self.complete:
|
||||
graph[u] = set()
|
||||
for v in self.complete[u]:
|
||||
if u != v: # ignore self-loop
|
||||
graph[u].add(v)
|
||||
|
||||
deg_heuristic = MinDegreeHeuristic(graph)
|
||||
node = deg_heuristic.best_node(graph)
|
||||
if node is None:
|
||||
pass
|
||||
else:
|
||||
assert False
|
||||
|
||||
def test_empty_graph(self):
|
||||
"""Test empty graph"""
|
||||
G = nx.Graph()
|
||||
_, _ = treewidth_min_degree(G)
|
||||
|
||||
def test_two_component_graph(self):
|
||||
G = nx.Graph()
|
||||
G.add_node(1)
|
||||
G.add_node(2)
|
||||
treewidth, _ = treewidth_min_degree(G)
|
||||
assert treewidth == 0
|
||||
|
||||
def test_not_sortable_nodes(self):
|
||||
G = nx.Graph([(0, "a")])
|
||||
treewidth_min_degree(G)
|
||||
|
||||
def test_heuristic_first_steps(self):
|
||||
"""Test first steps of min_degree heuristic"""
|
||||
graph = {
|
||||
n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph
|
||||
}
|
||||
deg_heuristic = MinDegreeHeuristic(graph)
|
||||
elim_node = deg_heuristic.best_node(graph)
|
||||
print(f"Graph {graph}:")
|
||||
steps = []
|
||||
|
||||
while elim_node is not None:
|
||||
print(f"Removing {elim_node}:")
|
||||
steps.append(elim_node)
|
||||
nbrs = graph[elim_node]
|
||||
|
||||
for u, v in itertools.permutations(nbrs, 2):
|
||||
if v not in graph[u]:
|
||||
graph[u].add(v)
|
||||
|
||||
for u in graph:
|
||||
if elim_node in graph[u]:
|
||||
graph[u].remove(elim_node)
|
||||
|
||||
del graph[elim_node]
|
||||
print(f"Graph {graph}:")
|
||||
elim_node = deg_heuristic.best_node(graph)
|
||||
|
||||
# check only the first 5 elements for equality
|
||||
assert steps[:5] == [0, 1, 2, 3, 4]
|
||||
|
||||
|
||||
class TestTreewidthMinFillIn:
|
||||
"""Unit tests for the treewidth_min_fill_in function."""
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
"""Setup for different kinds of trees"""
|
||||
cls.complete = nx.Graph()
|
||||
cls.complete.add_edge(1, 2)
|
||||
cls.complete.add_edge(2, 3)
|
||||
cls.complete.add_edge(1, 3)
|
||||
|
||||
cls.small_tree = nx.Graph()
|
||||
cls.small_tree.add_edge(1, 2)
|
||||
cls.small_tree.add_edge(2, 3)
|
||||
cls.small_tree.add_edge(3, 4)
|
||||
cls.small_tree.add_edge(1, 4)
|
||||
cls.small_tree.add_edge(2, 4)
|
||||
cls.small_tree.add_edge(4, 5)
|
||||
cls.small_tree.add_edge(5, 6)
|
||||
cls.small_tree.add_edge(5, 7)
|
||||
cls.small_tree.add_edge(6, 7)
|
||||
|
||||
cls.deterministic_graph = nx.Graph()
|
||||
cls.deterministic_graph.add_edge(1, 2)
|
||||
cls.deterministic_graph.add_edge(1, 3)
|
||||
cls.deterministic_graph.add_edge(3, 4)
|
||||
cls.deterministic_graph.add_edge(2, 4)
|
||||
cls.deterministic_graph.add_edge(3, 5)
|
||||
cls.deterministic_graph.add_edge(4, 5)
|
||||
cls.deterministic_graph.add_edge(3, 6)
|
||||
cls.deterministic_graph.add_edge(5, 6)
|
||||
|
||||
def test_petersen_graph(self):
|
||||
"""Test Petersen graph tree decomposition result"""
|
||||
G = nx.petersen_graph()
|
||||
_, decomp = treewidth_min_fill_in(G)
|
||||
is_tree_decomp(G, decomp)
|
||||
|
||||
def test_small_tree_treewidth(self):
|
||||
"""Test if the computed treewidth of the known self.small_tree is 2"""
|
||||
G = self.small_tree
|
||||
# the order of removal should be [1,2,4]3[5,6,7]
|
||||
# (with [] denoting any order of the containing nodes)
|
||||
# resulting in treewidth 2 for the heuristic
|
||||
treewidth, _ = treewidth_min_fill_in(G)
|
||||
assert treewidth == 2
|
||||
|
||||
def test_heuristic_abort(self):
|
||||
"""Test if min_fill_in returns None for fully connected graph"""
|
||||
graph = {}
|
||||
for u in self.complete:
|
||||
graph[u] = set()
|
||||
for v in self.complete[u]:
|
||||
if u != v: # ignore self-loop
|
||||
graph[u].add(v)
|
||||
next_node = min_fill_in_heuristic(graph)
|
||||
if next_node is None:
|
||||
pass
|
||||
else:
|
||||
assert False
|
||||
|
||||
def test_empty_graph(self):
|
||||
"""Test empty graph"""
|
||||
G = nx.Graph()
|
||||
_, _ = treewidth_min_fill_in(G)
|
||||
|
||||
def test_two_component_graph(self):
|
||||
G = nx.Graph()
|
||||
G.add_node(1)
|
||||
G.add_node(2)
|
||||
treewidth, _ = treewidth_min_fill_in(G)
|
||||
assert treewidth == 0
|
||||
|
||||
def test_not_sortable_nodes(self):
|
||||
G = nx.Graph([(0, "a")])
|
||||
treewidth_min_fill_in(G)
|
||||
|
||||
def test_heuristic_first_steps(self):
|
||||
"""Test first steps of min_fill_in heuristic"""
|
||||
graph = {
|
||||
n: set(self.deterministic_graph[n]) - {n} for n in self.deterministic_graph
|
||||
}
|
||||
print(f"Graph {graph}:")
|
||||
elim_node = min_fill_in_heuristic(graph)
|
||||
steps = []
|
||||
|
||||
while elim_node is not None:
|
||||
print(f"Removing {elim_node}:")
|
||||
steps.append(elim_node)
|
||||
nbrs = graph[elim_node]
|
||||
|
||||
for u, v in itertools.permutations(nbrs, 2):
|
||||
if v not in graph[u]:
|
||||
graph[u].add(v)
|
||||
|
||||
for u in graph:
|
||||
if elim_node in graph[u]:
|
||||
graph[u].remove(elim_node)
|
||||
|
||||
del graph[elim_node]
|
||||
print(f"Graph {graph}:")
|
||||
elim_node = min_fill_in_heuristic(graph)
|
||||
|
||||
# check only the first 2 elements for equality
|
||||
assert steps[:2] == [6, 5]
|
||||
+68
@@ -0,0 +1,68 @@
|
||||
import networkx as nx
|
||||
from networkx.algorithms.approximation import min_weighted_vertex_cover
|
||||
|
||||
|
||||
def is_cover(G, node_cover):
|
||||
return all({u, v} & node_cover for u, v in G.edges())
|
||||
|
||||
|
||||
class TestMWVC:
|
||||
"""Unit tests for the approximate minimum weighted vertex cover
|
||||
function,
|
||||
:func:`~networkx.algorithms.approximation.vertex_cover.min_weighted_vertex_cover`.
|
||||
|
||||
"""
|
||||
|
||||
def test_unweighted_directed(self):
|
||||
# Create a star graph in which half the nodes are directed in
|
||||
# and half are directed out.
|
||||
G = nx.DiGraph()
|
||||
G.add_edges_from((0, v) for v in range(1, 26))
|
||||
G.add_edges_from((v, 0) for v in range(26, 51))
|
||||
cover = min_weighted_vertex_cover(G)
|
||||
assert 1 == len(cover)
|
||||
assert is_cover(G, cover)
|
||||
|
||||
def test_unweighted_undirected(self):
|
||||
# create a simple star graph
|
||||
size = 50
|
||||
sg = nx.star_graph(size)
|
||||
cover = min_weighted_vertex_cover(sg)
|
||||
assert 1 == len(cover)
|
||||
assert is_cover(sg, cover)
|
||||
|
||||
def test_weighted(self):
|
||||
wg = nx.Graph()
|
||||
wg.add_node(0, weight=10)
|
||||
wg.add_node(1, weight=1)
|
||||
wg.add_node(2, weight=1)
|
||||
wg.add_node(3, weight=1)
|
||||
wg.add_node(4, weight=1)
|
||||
|
||||
wg.add_edge(0, 1)
|
||||
wg.add_edge(0, 2)
|
||||
wg.add_edge(0, 3)
|
||||
wg.add_edge(0, 4)
|
||||
|
||||
wg.add_edge(1, 2)
|
||||
wg.add_edge(2, 3)
|
||||
wg.add_edge(3, 4)
|
||||
wg.add_edge(4, 1)
|
||||
|
||||
cover = min_weighted_vertex_cover(wg, weight="weight")
|
||||
csum = sum(wg.nodes[node]["weight"] for node in cover)
|
||||
assert 4 == csum
|
||||
assert is_cover(wg, cover)
|
||||
|
||||
def test_unweighted_self_loop(self):
|
||||
slg = nx.Graph()
|
||||
slg.add_node(0)
|
||||
slg.add_node(1)
|
||||
slg.add_node(2)
|
||||
|
||||
slg.add_edge(0, 1)
|
||||
slg.add_edge(2, 2)
|
||||
|
||||
cover = min_weighted_vertex_cover(slg)
|
||||
assert 2 == len(cover)
|
||||
assert is_cover(slg, cover)
|
||||
+1501
File diff suppressed because it is too large
Load Diff
+252
@@ -0,0 +1,252 @@
|
||||
"""Functions for computing treewidth decomposition.
|
||||
|
||||
Treewidth of an undirected graph is a number associated with the graph.
|
||||
It can be defined as the size of the largest vertex set (bag) in a tree
|
||||
decomposition of the graph minus one.
|
||||
|
||||
`Wikipedia: Treewidth <https://en.wikipedia.org/wiki/Treewidth>`_
|
||||
|
||||
The notions of treewidth and tree decomposition have gained their
|
||||
attractiveness partly because many graph and network problems that are
|
||||
intractable (e.g., NP-hard) on arbitrary graphs become efficiently
|
||||
solvable (e.g., with a linear time algorithm) when the treewidth of the
|
||||
input graphs is bounded by a constant [1]_ [2]_.
|
||||
|
||||
There are two different functions for computing a tree decomposition:
|
||||
:func:`treewidth_min_degree` and :func:`treewidth_min_fill_in`.
|
||||
|
||||
.. [1] Hans L. Bodlaender and Arie M. C. A. Koster. 2010. "Treewidth
|
||||
computations I.Upper bounds". Inf. Comput. 208, 3 (March 2010),259-275.
|
||||
http://dx.doi.org/10.1016/j.ic.2009.03.008
|
||||
|
||||
.. [2] Hans L. Bodlaender. "Discovering Treewidth". Institute of Information
|
||||
and Computing Sciences, Utrecht University.
|
||||
Technical Report UU-CS-2005-018.
|
||||
http://www.cs.uu.nl
|
||||
|
||||
.. [3] K. Wang, Z. Lu, and J. Hicks *Treewidth*.
|
||||
https://web.archive.org/web/20210507025929/http://web.eecs.utk.edu/~cphill25/cs594_spring2015_projects/treewidth.pdf
|
||||
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import sys
|
||||
from heapq import heapify, heappop, heappush
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
__all__ = ["treewidth_min_degree", "treewidth_min_fill_in"]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable(returns_graph=True)
|
||||
def treewidth_min_degree(G):
|
||||
"""Returns a treewidth decomposition using the Minimum Degree heuristic.
|
||||
|
||||
The heuristic chooses the nodes according to their degree, i.e., first
|
||||
the node with the lowest degree is chosen, then the graph is updated
|
||||
and the corresponding node is removed. Next, a new node with the lowest
|
||||
degree is chosen, and so on.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
Treewidth decomposition : (int, Graph) tuple
|
||||
2-tuple with treewidth and the corresponding decomposed tree.
|
||||
"""
|
||||
deg_heuristic = MinDegreeHeuristic(G)
|
||||
return treewidth_decomp(G, lambda graph: deg_heuristic.best_node(graph))
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable(returns_graph=True)
|
||||
def treewidth_min_fill_in(G):
|
||||
"""Returns a treewidth decomposition using the Minimum Fill-in heuristic.
|
||||
|
||||
The heuristic chooses a node from the graph, where the number of edges
|
||||
added turning the neighborhood of the chosen node into clique is as
|
||||
small as possible.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
Treewidth decomposition : (int, Graph) tuple
|
||||
2-tuple with treewidth and the corresponding decomposed tree.
|
||||
"""
|
||||
return treewidth_decomp(G, min_fill_in_heuristic)
|
||||
|
||||
|
||||
class MinDegreeHeuristic:
|
||||
"""Implements the Minimum Degree heuristic.
|
||||
|
||||
The heuristic chooses the nodes according to their degree
|
||||
(number of neighbors), i.e., first the node with the lowest degree is
|
||||
chosen, then the graph is updated and the corresponding node is
|
||||
removed. Next, a new node with the lowest degree is chosen, and so on.
|
||||
"""
|
||||
|
||||
def __init__(self, graph):
|
||||
self._graph = graph
|
||||
|
||||
# nodes that have to be updated in the heap before each iteration
|
||||
self._update_nodes = []
|
||||
|
||||
self._degreeq = [] # a heapq with 3-tuples (degree,unique_id,node)
|
||||
self.count = itertools.count()
|
||||
|
||||
# build heap with initial degrees
|
||||
for n in graph:
|
||||
self._degreeq.append((len(graph[n]), next(self.count), n))
|
||||
heapify(self._degreeq)
|
||||
|
||||
def best_node(self, graph):
|
||||
# update nodes in self._update_nodes
|
||||
for n in self._update_nodes:
|
||||
# insert changed degrees into degreeq
|
||||
heappush(self._degreeq, (len(graph[n]), next(self.count), n))
|
||||
|
||||
# get the next valid (minimum degree) node
|
||||
while self._degreeq:
|
||||
(min_degree, _, elim_node) = heappop(self._degreeq)
|
||||
if elim_node not in graph or len(graph[elim_node]) != min_degree:
|
||||
# outdated entry in degreeq
|
||||
continue
|
||||
elif min_degree == len(graph) - 1:
|
||||
# fully connected: abort condition
|
||||
return None
|
||||
|
||||
# remember to update nodes in the heap before getting the next node
|
||||
self._update_nodes = graph[elim_node]
|
||||
return elim_node
|
||||
|
||||
# the heap is empty: abort
|
||||
return None
|
||||
|
||||
|
||||
def min_fill_in_heuristic(graph):
|
||||
"""Implements the Minimum Degree heuristic.
|
||||
|
||||
Returns the node from the graph, where the number of edges added when
|
||||
turning the neighborhood of the chosen node into clique is as small as
|
||||
possible. This algorithm chooses the nodes using the Minimum Fill-In
|
||||
heuristic. The running time of the algorithm is :math:`O(V^3)` and it uses
|
||||
additional constant memory."""
|
||||
|
||||
if len(graph) == 0:
|
||||
return None
|
||||
|
||||
min_fill_in_node = None
|
||||
|
||||
min_fill_in = sys.maxsize
|
||||
|
||||
# sort nodes by degree
|
||||
nodes_by_degree = sorted(graph, key=lambda x: len(graph[x]))
|
||||
min_degree = len(graph[nodes_by_degree[0]])
|
||||
|
||||
# abort condition (handle complete graph)
|
||||
if min_degree == len(graph) - 1:
|
||||
return None
|
||||
|
||||
for node in nodes_by_degree:
|
||||
num_fill_in = 0
|
||||
nbrs = graph[node]
|
||||
for nbr in nbrs:
|
||||
# count how many nodes in nbrs current nbr is not connected to
|
||||
# subtract 1 for the node itself
|
||||
num_fill_in += len(nbrs - graph[nbr]) - 1
|
||||
if num_fill_in >= 2 * min_fill_in:
|
||||
break
|
||||
|
||||
num_fill_in /= 2 # divide by 2 because of double counting
|
||||
|
||||
if num_fill_in < min_fill_in: # update min-fill-in node
|
||||
if num_fill_in == 0:
|
||||
return node
|
||||
min_fill_in = num_fill_in
|
||||
min_fill_in_node = node
|
||||
|
||||
return min_fill_in_node
|
||||
|
||||
|
||||
@nx._dispatchable(returns_graph=True)
|
||||
def treewidth_decomp(G, heuristic=min_fill_in_heuristic):
|
||||
"""Returns a treewidth decomposition using the passed heuristic.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
heuristic : heuristic function
|
||||
|
||||
Returns
|
||||
-------
|
||||
Treewidth decomposition : (int, Graph) tuple
|
||||
2-tuple with treewidth and the corresponding decomposed tree.
|
||||
"""
|
||||
|
||||
# make dict-of-sets structure
|
||||
graph = {n: set(G[n]) - {n} for n in G}
|
||||
|
||||
# stack containing nodes and neighbors in the order from the heuristic
|
||||
node_stack = []
|
||||
|
||||
# get first node from heuristic
|
||||
elim_node = heuristic(graph)
|
||||
while elim_node is not None:
|
||||
# connect all neighbors with each other
|
||||
nbrs = graph[elim_node]
|
||||
for u, v in itertools.permutations(nbrs, 2):
|
||||
if v not in graph[u]:
|
||||
graph[u].add(v)
|
||||
|
||||
# push node and its current neighbors on stack
|
||||
node_stack.append((elim_node, nbrs))
|
||||
|
||||
# remove node from graph
|
||||
for u in graph[elim_node]:
|
||||
graph[u].remove(elim_node)
|
||||
|
||||
del graph[elim_node]
|
||||
elim_node = heuristic(graph)
|
||||
|
||||
# the abort condition is met; put all remaining nodes into one bag
|
||||
decomp = nx.Graph()
|
||||
first_bag = frozenset(graph.keys())
|
||||
decomp.add_node(first_bag)
|
||||
|
||||
treewidth = len(first_bag) - 1
|
||||
|
||||
while node_stack:
|
||||
# get node and its neighbors from the stack
|
||||
(curr_node, nbrs) = node_stack.pop()
|
||||
|
||||
# find a bag all neighbors are in
|
||||
old_bag = None
|
||||
for bag in decomp.nodes:
|
||||
if nbrs <= bag:
|
||||
old_bag = bag
|
||||
break
|
||||
|
||||
if old_bag is None:
|
||||
# no old_bag was found: just connect to the first_bag
|
||||
old_bag = first_bag
|
||||
|
||||
# create new node for decomposition
|
||||
nbrs.add(curr_node)
|
||||
new_bag = frozenset(nbrs)
|
||||
|
||||
# update treewidth
|
||||
treewidth = max(treewidth, len(new_bag) - 1)
|
||||
|
||||
# add edge to decomposition (implicitly also adds the new node)
|
||||
decomp.add_edge(old_bag, new_bag)
|
||||
|
||||
return treewidth, decomp
|
||||
+83
@@ -0,0 +1,83 @@
|
||||
"""Functions for computing an approximate minimum weight vertex cover.
|
||||
|
||||
A |vertex cover|_ is a subset of nodes such that each edge in the graph
|
||||
is incident to at least one node in the subset.
|
||||
|
||||
.. _vertex cover: https://en.wikipedia.org/wiki/Vertex_cover
|
||||
.. |vertex cover| replace:: *vertex cover*
|
||||
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["min_weighted_vertex_cover"]
|
||||
|
||||
|
||||
@nx._dispatchable(node_attrs="weight")
|
||||
def min_weighted_vertex_cover(G, weight=None):
|
||||
r"""Returns an approximate minimum weighted vertex cover.
|
||||
|
||||
The set of nodes returned by this function is guaranteed to be a
|
||||
vertex cover, and the total weight of the set is guaranteed to be at
|
||||
most twice the total weight of the minimum weight vertex cover. In
|
||||
other words,
|
||||
|
||||
.. math::
|
||||
|
||||
w(S) \leq 2 * w(S^*),
|
||||
|
||||
where $S$ is the vertex cover returned by this function,
|
||||
$S^*$ is the vertex cover of minimum weight out of all vertex
|
||||
covers of the graph, and $w$ is the function that computes the
|
||||
sum of the weights of each node in that given set.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
weight : string, optional (default = None)
|
||||
If None, every node has weight 1. If a string, use this node
|
||||
attribute as the node weight. A node without this attribute is
|
||||
assumed to have weight 1.
|
||||
|
||||
Returns
|
||||
-------
|
||||
min_weighted_cover : set
|
||||
Returns a set of nodes whose weight sum is no more than twice
|
||||
the weight sum of the minimum weight vertex cover.
|
||||
|
||||
Notes
|
||||
-----
|
||||
For a directed graph, a vertex cover has the same definition: a set
|
||||
of nodes such that each edge in the graph is incident to at least
|
||||
one node in the set. Whether the node is the head or tail of the
|
||||
directed edge is ignored.
|
||||
|
||||
This is the local-ratio algorithm for computing an approximate
|
||||
vertex cover. The algorithm greedily reduces the costs over edges,
|
||||
iteratively building a cover. The worst-case runtime of this
|
||||
implementation is $O(m \log n)$, where $n$ is the number
|
||||
of nodes and $m$ the number of edges in the graph.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Bar-Yehuda, R., and Even, S. (1985). "A local-ratio theorem for
|
||||
approximating the weighted vertex cover problem."
|
||||
*Annals of Discrete Mathematics*, 25, 27–46
|
||||
<http://www.cs.technion.ac.il/~reuven/PDF/vc_lr.pdf>
|
||||
|
||||
"""
|
||||
cost = dict(G.nodes(data=weight, default=1))
|
||||
# While there are uncovered edges, choose an uncovered and update
|
||||
# the cost of the remaining edges.
|
||||
cover = set()
|
||||
for u, v in G.edges():
|
||||
if u in cover or v in cover:
|
||||
continue
|
||||
if cost[u] <= cost[v]:
|
||||
cover.add(u)
|
||||
cost[v] -= cost[u]
|
||||
else:
|
||||
cover.add(v)
|
||||
cost[u] -= cost[v]
|
||||
return cover
|
||||
+5
@@ -0,0 +1,5 @@
|
||||
from networkx.algorithms.assortativity.connectivity import *
|
||||
from networkx.algorithms.assortativity.correlation import *
|
||||
from networkx.algorithms.assortativity.mixing import *
|
||||
from networkx.algorithms.assortativity.neighbor_degree import *
|
||||
from networkx.algorithms.assortativity.pairs import *
|
||||
+122
@@ -0,0 +1,122 @@
|
||||
from collections import defaultdict
|
||||
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["average_degree_connectivity"]
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def average_degree_connectivity(
|
||||
G, source="in+out", target="in+out", nodes=None, weight=None
|
||||
):
|
||||
r"""Compute the average degree connectivity of graph.
|
||||
|
||||
The average degree connectivity is the average nearest neighbor degree of
|
||||
nodes with degree k. For weighted graphs, an analogous measure can
|
||||
be computed using the weighted average neighbors degree defined in
|
||||
[1]_, for a node `i`, as
|
||||
|
||||
.. math::
|
||||
|
||||
k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j
|
||||
|
||||
where `s_i` is the weighted degree of node `i`,
|
||||
`w_{ij}` is the weight of the edge that links `i` and `j`,
|
||||
and `N(i)` are the neighbors of node `i`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
source : "in"|"out"|"in+out" (default:"in+out")
|
||||
Directed graphs only. Use "in"- or "out"-degree for source node.
|
||||
|
||||
target : "in"|"out"|"in+out" (default:"in+out"
|
||||
Directed graphs only. Use "in"- or "out"-degree for target node.
|
||||
|
||||
nodes : list or iterable (optional)
|
||||
Compute neighbor connectivity for these nodes. The default is all
|
||||
nodes.
|
||||
|
||||
weight : string or None, optional (default=None)
|
||||
The edge attribute that holds the numerical value used as a weight.
|
||||
If None, then each edge has weight 1.
|
||||
|
||||
Returns
|
||||
-------
|
||||
d : dict
|
||||
A dictionary keyed by degree k with the value of average connectivity.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If either `source` or `target` are not one of 'in',
|
||||
'out', or 'in+out'.
|
||||
If either `source` or `target` is passed for an undirected graph.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> G.edges[1, 2]["weight"] = 3
|
||||
>>> nx.average_degree_connectivity(G)
|
||||
{1: 2.0, 2: 1.5}
|
||||
>>> nx.average_degree_connectivity(G, weight="weight")
|
||||
{1: 2.0, 2: 1.75}
|
||||
|
||||
See Also
|
||||
--------
|
||||
average_neighbor_degree
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani,
|
||||
"The architecture of complex weighted networks".
|
||||
PNAS 101 (11): 3747–3752 (2004).
|
||||
"""
|
||||
# First, determine the type of neighbors and the type of degree to use.
|
||||
if G.is_directed():
|
||||
if source not in ("in", "out", "in+out"):
|
||||
raise nx.NetworkXError('source must be one of "in", "out", or "in+out"')
|
||||
if target not in ("in", "out", "in+out"):
|
||||
raise nx.NetworkXError('target must be one of "in", "out", or "in+out"')
|
||||
direction = {"out": G.out_degree, "in": G.in_degree, "in+out": G.degree}
|
||||
neighbor_funcs = {
|
||||
"out": G.successors,
|
||||
"in": G.predecessors,
|
||||
"in+out": G.neighbors,
|
||||
}
|
||||
source_degree = direction[source]
|
||||
target_degree = direction[target]
|
||||
neighbors = neighbor_funcs[source]
|
||||
# `reverse` indicates whether to look at the in-edge when
|
||||
# computing the weight of an edge.
|
||||
reverse = source == "in"
|
||||
else:
|
||||
if source != "in+out" or target != "in+out":
|
||||
raise nx.NetworkXError(
|
||||
f"source and target arguments are only supported for directed graphs"
|
||||
)
|
||||
source_degree = G.degree
|
||||
target_degree = G.degree
|
||||
neighbors = G.neighbors
|
||||
reverse = False
|
||||
dsum = defaultdict(int)
|
||||
dnorm = defaultdict(int)
|
||||
# Check if `source_nodes` is actually a single node in the graph.
|
||||
source_nodes = source_degree(nodes)
|
||||
if nodes in G:
|
||||
source_nodes = [(nodes, source_degree(nodes))]
|
||||
for n, k in source_nodes:
|
||||
nbrdeg = target_degree(neighbors(n))
|
||||
if weight is None:
|
||||
s = sum(d for n, d in nbrdeg)
|
||||
else: # weight nbr degree by weight of (n,nbr) edge
|
||||
if reverse:
|
||||
s = sum(G[nbr][n].get(weight, 1) * d for nbr, d in nbrdeg)
|
||||
else:
|
||||
s = sum(G[n][nbr].get(weight, 1) * d for nbr, d in nbrdeg)
|
||||
dnorm[k] += source_degree(n, weight=weight)
|
||||
dsum[k] += s
|
||||
|
||||
# normalize
|
||||
return {k: avg if dnorm[k] == 0 else avg / dnorm[k] for k, avg in dsum.items()}
|
||||
+302
@@ -0,0 +1,302 @@
|
||||
"""Node assortativity coefficients and correlation measures."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.assortativity.mixing import (
|
||||
attribute_mixing_matrix,
|
||||
degree_mixing_matrix,
|
||||
)
|
||||
from networkx.algorithms.assortativity.pairs import node_degree_xy
|
||||
|
||||
__all__ = [
|
||||
"degree_pearson_correlation_coefficient",
|
||||
"degree_assortativity_coefficient",
|
||||
"attribute_assortativity_coefficient",
|
||||
"numeric_assortativity_coefficient",
|
||||
]
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def degree_assortativity_coefficient(G, x="out", y="in", weight=None, nodes=None):
|
||||
"""Compute degree assortativity of graph.
|
||||
|
||||
Assortativity measures the similarity of connections
|
||||
in the graph with respect to the node degree.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
x: string ('in','out')
|
||||
The degree type for source node (directed graphs only).
|
||||
|
||||
y: string ('in','out')
|
||||
The degree type for target node (directed graphs only).
|
||||
|
||||
weight: string or None, optional (default=None)
|
||||
The edge attribute that holds the numerical value used
|
||||
as a weight. If None, then each edge has weight 1.
|
||||
The degree is the sum of the edge weights adjacent to the node.
|
||||
|
||||
nodes: list or iterable (optional)
|
||||
Compute degree assortativity only for nodes in container.
|
||||
The default is all nodes.
|
||||
|
||||
Returns
|
||||
-------
|
||||
r : float
|
||||
Assortativity of graph by degree.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> r = nx.degree_assortativity_coefficient(G)
|
||||
>>> print(f"{r:3.1f}")
|
||||
-0.5
|
||||
|
||||
See Also
|
||||
--------
|
||||
attribute_assortativity_coefficient
|
||||
numeric_assortativity_coefficient
|
||||
degree_mixing_dict
|
||||
degree_mixing_matrix
|
||||
|
||||
Notes
|
||||
-----
|
||||
This computes Eq. (21) in Ref. [1]_ , where e is the joint
|
||||
probability distribution (mixing matrix) of the degrees. If G is
|
||||
directed than the matrix e is the joint probability of the
|
||||
user-specified degree type for the source and target.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] M. E. J. Newman, Mixing patterns in networks,
|
||||
Physical Review E, 67 026126, 2003
|
||||
.. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M.
|
||||
Edge direction and the structure of networks, PNAS 107, 10815-20 (2010).
|
||||
"""
|
||||
if nodes is None:
|
||||
nodes = G.nodes
|
||||
|
||||
degrees = None
|
||||
|
||||
if G.is_directed():
|
||||
indeg = (
|
||||
{d for _, d in G.in_degree(nodes, weight=weight)}
|
||||
if "in" in (x, y)
|
||||
else set()
|
||||
)
|
||||
outdeg = (
|
||||
{d for _, d in G.out_degree(nodes, weight=weight)}
|
||||
if "out" in (x, y)
|
||||
else set()
|
||||
)
|
||||
degrees = set.union(indeg, outdeg)
|
||||
else:
|
||||
degrees = {d for _, d in G.degree(nodes, weight=weight)}
|
||||
|
||||
mapping = {d: i for i, d in enumerate(degrees)}
|
||||
M = degree_mixing_matrix(G, x=x, y=y, nodes=nodes, weight=weight, mapping=mapping)
|
||||
|
||||
return _numeric_ac(M, mapping=mapping)
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def degree_pearson_correlation_coefficient(G, x="out", y="in", weight=None, nodes=None):
|
||||
"""Compute degree assortativity of graph.
|
||||
|
||||
Assortativity measures the similarity of connections
|
||||
in the graph with respect to the node degree.
|
||||
|
||||
This is the same as degree_assortativity_coefficient but uses the
|
||||
potentially faster scipy.stats.pearsonr function.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
x: string ('in','out')
|
||||
The degree type for source node (directed graphs only).
|
||||
|
||||
y: string ('in','out')
|
||||
The degree type for target node (directed graphs only).
|
||||
|
||||
weight: string or None, optional (default=None)
|
||||
The edge attribute that holds the numerical value used
|
||||
as a weight. If None, then each edge has weight 1.
|
||||
The degree is the sum of the edge weights adjacent to the node.
|
||||
|
||||
nodes: list or iterable (optional)
|
||||
Compute pearson correlation of degrees only for specified nodes.
|
||||
The default is all nodes.
|
||||
|
||||
Returns
|
||||
-------
|
||||
r : float
|
||||
Assortativity of graph by degree.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> r = nx.degree_pearson_correlation_coefficient(G)
|
||||
>>> print(f"{r:3.1f}")
|
||||
-0.5
|
||||
|
||||
Notes
|
||||
-----
|
||||
This calls scipy.stats.pearsonr.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] M. E. J. Newman, Mixing patterns in networks
|
||||
Physical Review E, 67 026126, 2003
|
||||
.. [2] Foster, J.G., Foster, D.V., Grassberger, P. & Paczuski, M.
|
||||
Edge direction and the structure of networks, PNAS 107, 10815-20 (2010).
|
||||
"""
|
||||
import scipy as sp
|
||||
|
||||
xy = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight)
|
||||
x, y = zip(*xy)
|
||||
return float(sp.stats.pearsonr(x, y)[0])
|
||||
|
||||
|
||||
@nx._dispatchable(node_attrs="attribute")
|
||||
def attribute_assortativity_coefficient(G, attribute, nodes=None):
|
||||
"""Compute assortativity for node attributes.
|
||||
|
||||
Assortativity measures the similarity of connections
|
||||
in the graph with respect to the given attribute.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
attribute : string
|
||||
Node attribute key
|
||||
|
||||
nodes: list or iterable (optional)
|
||||
Compute attribute assortativity for nodes in container.
|
||||
The default is all nodes.
|
||||
|
||||
Returns
|
||||
-------
|
||||
r: float
|
||||
Assortativity of graph for given attribute
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.Graph()
|
||||
>>> G.add_nodes_from([0, 1], color="red")
|
||||
>>> G.add_nodes_from([2, 3], color="blue")
|
||||
>>> G.add_edges_from([(0, 1), (2, 3)])
|
||||
>>> print(nx.attribute_assortativity_coefficient(G, "color"))
|
||||
1.0
|
||||
|
||||
Notes
|
||||
-----
|
||||
This computes Eq. (2) in Ref. [1]_ , (trace(M)-sum(M^2))/(1-sum(M^2)),
|
||||
where M is the joint probability distribution (mixing matrix)
|
||||
of the specified attribute.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] M. E. J. Newman, Mixing patterns in networks,
|
||||
Physical Review E, 67 026126, 2003
|
||||
"""
|
||||
M = attribute_mixing_matrix(G, attribute, nodes)
|
||||
return attribute_ac(M)
|
||||
|
||||
|
||||
@nx._dispatchable(node_attrs="attribute")
|
||||
def numeric_assortativity_coefficient(G, attribute, nodes=None):
|
||||
"""Compute assortativity for numerical node attributes.
|
||||
|
||||
Assortativity measures the similarity of connections
|
||||
in the graph with respect to the given numeric attribute.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
attribute : string
|
||||
Node attribute key.
|
||||
|
||||
nodes: list or iterable (optional)
|
||||
Compute numeric assortativity only for attributes of nodes in
|
||||
container. The default is all nodes.
|
||||
|
||||
Returns
|
||||
-------
|
||||
r: float
|
||||
Assortativity of graph for given attribute
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.Graph()
|
||||
>>> G.add_nodes_from([0, 1], size=2)
|
||||
>>> G.add_nodes_from([2, 3], size=3)
|
||||
>>> G.add_edges_from([(0, 1), (2, 3)])
|
||||
>>> print(nx.numeric_assortativity_coefficient(G, "size"))
|
||||
1.0
|
||||
|
||||
Notes
|
||||
-----
|
||||
This computes Eq. (21) in Ref. [1]_ , which is the Pearson correlation
|
||||
coefficient of the specified (scalar valued) attribute across edges.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] M. E. J. Newman, Mixing patterns in networks
|
||||
Physical Review E, 67 026126, 2003
|
||||
"""
|
||||
if nodes is None:
|
||||
nodes = G.nodes
|
||||
vals = {G.nodes[n][attribute] for n in nodes}
|
||||
mapping = {d: i for i, d in enumerate(vals)}
|
||||
M = attribute_mixing_matrix(G, attribute, nodes, mapping)
|
||||
return _numeric_ac(M, mapping)
|
||||
|
||||
|
||||
def attribute_ac(M):
|
||||
"""Compute assortativity for attribute matrix M.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
M : numpy.ndarray
|
||||
2D ndarray representing the attribute mixing matrix.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This computes Eq. (2) in Ref. [1]_ , (trace(e)-sum(e^2))/(1-sum(e^2)),
|
||||
where e is the joint probability distribution (mixing matrix)
|
||||
of the specified attribute.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] M. E. J. Newman, Mixing patterns in networks,
|
||||
Physical Review E, 67 026126, 2003
|
||||
"""
|
||||
if M.sum() != 1.0:
|
||||
M = M / M.sum()
|
||||
s = (M @ M).sum()
|
||||
t = M.trace()
|
||||
r = (t - s) / (1 - s)
|
||||
return float(r)
|
||||
|
||||
|
||||
def _numeric_ac(M, mapping):
|
||||
# M is a 2D numpy array
|
||||
# numeric assortativity coefficient, pearsonr
|
||||
import numpy as np
|
||||
|
||||
if M.sum() != 1.0:
|
||||
M = M / M.sum()
|
||||
x = np.array(list(mapping.keys()))
|
||||
y = x # x and y have the same support
|
||||
idx = list(mapping.values())
|
||||
a = M.sum(axis=0)
|
||||
b = M.sum(axis=1)
|
||||
vara = (a[idx] * x**2).sum() - ((a[idx] * x).sum()) ** 2
|
||||
varb = (b[idx] * y**2).sum() - ((b[idx] * y).sum()) ** 2
|
||||
xy = np.outer(x, y)
|
||||
ab = np.outer(a[idx], b[idx])
|
||||
return float((xy * (M - ab)).sum() / np.sqrt(vara * varb))
|
||||
+255
@@ -0,0 +1,255 @@
|
||||
"""
|
||||
Mixing matrices for node attributes and degree.
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.assortativity.pairs import node_attribute_xy, node_degree_xy
|
||||
from networkx.utils import dict_to_numpy_array
|
||||
|
||||
__all__ = [
|
||||
"attribute_mixing_matrix",
|
||||
"attribute_mixing_dict",
|
||||
"degree_mixing_matrix",
|
||||
"degree_mixing_dict",
|
||||
"mixing_dict",
|
||||
]
|
||||
|
||||
|
||||
@nx._dispatchable(node_attrs="attribute")
|
||||
def attribute_mixing_dict(G, attribute, nodes=None, normalized=False):
|
||||
"""Returns dictionary representation of mixing matrix for attribute.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
NetworkX graph object.
|
||||
|
||||
attribute : string
|
||||
Node attribute key.
|
||||
|
||||
nodes: list or iterable (optional)
|
||||
Unse nodes in container to build the dict. The default is all nodes.
|
||||
|
||||
normalized : bool (default=False)
|
||||
Return counts if False or probabilities if True.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.Graph()
|
||||
>>> G.add_nodes_from([0, 1], color="red")
|
||||
>>> G.add_nodes_from([2, 3], color="blue")
|
||||
>>> G.add_edge(1, 3)
|
||||
>>> d = nx.attribute_mixing_dict(G, "color")
|
||||
>>> print(d["red"]["blue"])
|
||||
1
|
||||
>>> print(d["blue"]["red"]) # d symmetric for undirected graphs
|
||||
1
|
||||
|
||||
Returns
|
||||
-------
|
||||
d : dictionary
|
||||
Counts or joint probability of occurrence of attribute pairs.
|
||||
"""
|
||||
xy_iter = node_attribute_xy(G, attribute, nodes)
|
||||
return mixing_dict(xy_iter, normalized=normalized)
|
||||
|
||||
|
||||
@nx._dispatchable(node_attrs="attribute")
|
||||
def attribute_mixing_matrix(G, attribute, nodes=None, mapping=None, normalized=True):
|
||||
"""Returns mixing matrix for attribute.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
NetworkX graph object.
|
||||
|
||||
attribute : string
|
||||
Node attribute key.
|
||||
|
||||
nodes: list or iterable (optional)
|
||||
Use only nodes in container to build the matrix. The default is
|
||||
all nodes.
|
||||
|
||||
mapping : dictionary, optional
|
||||
Mapping from node attribute to integer index in matrix.
|
||||
If not specified, an arbitrary ordering will be used.
|
||||
|
||||
normalized : bool (default=True)
|
||||
Return counts if False or probabilities if True.
|
||||
|
||||
Returns
|
||||
-------
|
||||
m: numpy array
|
||||
Counts or joint probability of occurrence of attribute pairs.
|
||||
|
||||
Notes
|
||||
-----
|
||||
If each node has a unique attribute value, the unnormalized mixing matrix
|
||||
will be equal to the adjacency matrix. To get a denser mixing matrix,
|
||||
the rounding can be performed to form groups of nodes with equal values.
|
||||
For example, the exact height of persons in cm (180.79155222, 163.9080892,
|
||||
163.30095355, 167.99016217, 168.21590163, ...) can be rounded to (180, 163,
|
||||
163, 168, 168, ...).
|
||||
|
||||
Definitions of attribute mixing matrix vary on whether the matrix
|
||||
should include rows for attribute values that don't arise. Here we
|
||||
do not include such empty-rows. But you can force them to appear
|
||||
by inputting a `mapping` that includes those values.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(3)
|
||||
>>> gender = {0: "male", 1: "female", 2: "female"}
|
||||
>>> nx.set_node_attributes(G, gender, "gender")
|
||||
>>> mapping = {"male": 0, "female": 1}
|
||||
>>> mix_mat = nx.attribute_mixing_matrix(G, "gender", mapping=mapping)
|
||||
>>> mix_mat
|
||||
array([[0. , 0.25],
|
||||
[0.25, 0.5 ]])
|
||||
"""
|
||||
d = attribute_mixing_dict(G, attribute, nodes)
|
||||
a = dict_to_numpy_array(d, mapping=mapping)
|
||||
if normalized:
|
||||
a = a / a.sum()
|
||||
return a
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def degree_mixing_dict(G, x="out", y="in", weight=None, nodes=None, normalized=False):
|
||||
"""Returns dictionary representation of mixing matrix for degree.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
NetworkX graph object.
|
||||
|
||||
x: string ('in','out')
|
||||
The degree type for source node (directed graphs only).
|
||||
|
||||
y: string ('in','out')
|
||||
The degree type for target node (directed graphs only).
|
||||
|
||||
weight: string or None, optional (default=None)
|
||||
The edge attribute that holds the numerical value used
|
||||
as a weight. If None, then each edge has weight 1.
|
||||
The degree is the sum of the edge weights adjacent to the node.
|
||||
|
||||
normalized : bool (default=False)
|
||||
Return counts if False or probabilities if True.
|
||||
|
||||
Returns
|
||||
-------
|
||||
d: dictionary
|
||||
Counts or joint probability of occurrence of degree pairs.
|
||||
"""
|
||||
xy_iter = node_degree_xy(G, x=x, y=y, nodes=nodes, weight=weight)
|
||||
return mixing_dict(xy_iter, normalized=normalized)
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def degree_mixing_matrix(
|
||||
G, x="out", y="in", weight=None, nodes=None, normalized=True, mapping=None
|
||||
):
|
||||
"""Returns mixing matrix for attribute.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
NetworkX graph object.
|
||||
|
||||
x: string ('in','out')
|
||||
The degree type for source node (directed graphs only).
|
||||
|
||||
y: string ('in','out')
|
||||
The degree type for target node (directed graphs only).
|
||||
|
||||
nodes: list or iterable (optional)
|
||||
Build the matrix using only nodes in container.
|
||||
The default is all nodes.
|
||||
|
||||
weight: string or None, optional (default=None)
|
||||
The edge attribute that holds the numerical value used
|
||||
as a weight. If None, then each edge has weight 1.
|
||||
The degree is the sum of the edge weights adjacent to the node.
|
||||
|
||||
normalized : bool (default=True)
|
||||
Return counts if False or probabilities if True.
|
||||
|
||||
mapping : dictionary, optional
|
||||
Mapping from node degree to integer index in matrix.
|
||||
If not specified, an arbitrary ordering will be used.
|
||||
|
||||
Returns
|
||||
-------
|
||||
m: numpy array
|
||||
Counts, or joint probability, of occurrence of node degree.
|
||||
|
||||
Notes
|
||||
-----
|
||||
Definitions of degree mixing matrix vary on whether the matrix
|
||||
should include rows for degree values that don't arise. Here we
|
||||
do not include such empty-rows. But you can force them to appear
|
||||
by inputting a `mapping` that includes those values. See examples.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.star_graph(3)
|
||||
>>> mix_mat = nx.degree_mixing_matrix(G)
|
||||
>>> mix_mat
|
||||
array([[0. , 0.5],
|
||||
[0.5, 0. ]])
|
||||
|
||||
If you want every possible degree to appear as a row, even if no nodes
|
||||
have that degree, use `mapping` as follows,
|
||||
|
||||
>>> max_degree = max(deg for n, deg in G.degree)
|
||||
>>> mapping = {x: x for x in range(max_degree + 1)} # identity mapping
|
||||
>>> mix_mat = nx.degree_mixing_matrix(G, mapping=mapping)
|
||||
>>> mix_mat
|
||||
array([[0. , 0. , 0. , 0. ],
|
||||
[0. , 0. , 0. , 0.5],
|
||||
[0. , 0. , 0. , 0. ],
|
||||
[0. , 0.5, 0. , 0. ]])
|
||||
"""
|
||||
d = degree_mixing_dict(G, x=x, y=y, nodes=nodes, weight=weight)
|
||||
a = dict_to_numpy_array(d, mapping=mapping)
|
||||
if normalized:
|
||||
a = a / a.sum()
|
||||
return a
|
||||
|
||||
|
||||
def mixing_dict(xy, normalized=False):
|
||||
"""Returns a dictionary representation of mixing matrix.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
xy : list or container of two-tuples
|
||||
Pairs of (x,y) items.
|
||||
|
||||
attribute : string
|
||||
Node attribute key
|
||||
|
||||
normalized : bool (default=False)
|
||||
Return counts if False or probabilities if True.
|
||||
|
||||
Returns
|
||||
-------
|
||||
d: dictionary
|
||||
Counts or Joint probability of occurrence of values in xy.
|
||||
"""
|
||||
d = {}
|
||||
psum = 0.0
|
||||
for x, y in xy:
|
||||
if x not in d:
|
||||
d[x] = {}
|
||||
if y not in d:
|
||||
d[y] = {}
|
||||
v = d[x].get(y, 0)
|
||||
d[x][y] = v + 1
|
||||
psum += 1
|
||||
|
||||
if normalized:
|
||||
for _, jdict in d.items():
|
||||
for j in jdict:
|
||||
jdict[j] /= psum
|
||||
return d
|
||||
+160
@@ -0,0 +1,160 @@
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["average_neighbor_degree"]
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def average_neighbor_degree(G, source="out", target="out", nodes=None, weight=None):
|
||||
r"""Returns the average degree of the neighborhood of each node.
|
||||
|
||||
In an undirected graph, the neighborhood `N(i)` of node `i` contains the
|
||||
nodes that are connected to `i` by an edge.
|
||||
|
||||
For directed graphs, `N(i)` is defined according to the parameter `source`:
|
||||
|
||||
- if source is 'in', then `N(i)` consists of predecessors of node `i`.
|
||||
- if source is 'out', then `N(i)` consists of successors of node `i`.
|
||||
- if source is 'in+out', then `N(i)` is both predecessors and successors.
|
||||
|
||||
The average neighborhood degree of a node `i` is
|
||||
|
||||
.. math::
|
||||
|
||||
k_{nn,i} = \frac{1}{|N(i)|} \sum_{j \in N(i)} k_j
|
||||
|
||||
where `N(i)` are the neighbors of node `i` and `k_j` is
|
||||
the degree of node `j` which belongs to `N(i)`. For weighted
|
||||
graphs, an analogous measure can be defined [1]_,
|
||||
|
||||
.. math::
|
||||
|
||||
k_{nn,i}^{w} = \frac{1}{s_i} \sum_{j \in N(i)} w_{ij} k_j
|
||||
|
||||
where `s_i` is the weighted degree of node `i`, `w_{ij}`
|
||||
is the weight of the edge that links `i` and `j` and
|
||||
`N(i)` are the neighbors of node `i`.
|
||||
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
source : string ("in"|"out"|"in+out"), optional (default="out")
|
||||
Directed graphs only.
|
||||
Use "in"- or "out"-neighbors of source node.
|
||||
|
||||
target : string ("in"|"out"|"in+out"), optional (default="out")
|
||||
Directed graphs only.
|
||||
Use "in"- or "out"-degree for target node.
|
||||
|
||||
nodes : list or iterable, optional (default=G.nodes)
|
||||
Compute neighbor degree only for specified nodes.
|
||||
|
||||
weight : string or None, optional (default=None)
|
||||
The edge attribute that holds the numerical value used as a weight.
|
||||
If None, then each edge has weight 1.
|
||||
|
||||
Returns
|
||||
-------
|
||||
d: dict
|
||||
A dictionary keyed by node to the average degree of its neighbors.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If either `source` or `target` are not one of 'in', 'out', or 'in+out'.
|
||||
If either `source` or `target` is passed for an undirected graph.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> G.edges[0, 1]["weight"] = 5
|
||||
>>> G.edges[2, 3]["weight"] = 3
|
||||
|
||||
>>> nx.average_neighbor_degree(G)
|
||||
{0: 2.0, 1: 1.5, 2: 1.5, 3: 2.0}
|
||||
>>> nx.average_neighbor_degree(G, weight="weight")
|
||||
{0: 2.0, 1: 1.1666666666666667, 2: 1.25, 3: 2.0}
|
||||
|
||||
>>> G = nx.DiGraph()
|
||||
>>> nx.add_path(G, [0, 1, 2, 3])
|
||||
>>> nx.average_neighbor_degree(G, source="in", target="in")
|
||||
{0: 0.0, 1: 0.0, 2: 1.0, 3: 1.0}
|
||||
|
||||
>>> nx.average_neighbor_degree(G, source="out", target="out")
|
||||
{0: 1.0, 1: 1.0, 2: 0.0, 3: 0.0}
|
||||
|
||||
See Also
|
||||
--------
|
||||
average_degree_connectivity
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] A. Barrat, M. Barthélemy, R. Pastor-Satorras, and A. Vespignani,
|
||||
"The architecture of complex weighted networks".
|
||||
PNAS 101 (11): 3747–3752 (2004).
|
||||
"""
|
||||
if G.is_directed():
|
||||
if source == "in":
|
||||
source_degree = G.in_degree
|
||||
elif source == "out":
|
||||
source_degree = G.out_degree
|
||||
elif source == "in+out":
|
||||
source_degree = G.degree
|
||||
else:
|
||||
raise nx.NetworkXError(
|
||||
f"source argument {source} must be 'in', 'out' or 'in+out'"
|
||||
)
|
||||
|
||||
if target == "in":
|
||||
target_degree = G.in_degree
|
||||
elif target == "out":
|
||||
target_degree = G.out_degree
|
||||
elif target == "in+out":
|
||||
target_degree = G.degree
|
||||
else:
|
||||
raise nx.NetworkXError(
|
||||
f"target argument {target} must be 'in', 'out' or 'in+out'"
|
||||
)
|
||||
else:
|
||||
if source != "out" or target != "out":
|
||||
raise nx.NetworkXError(
|
||||
f"source and target arguments are only supported for directed graphs"
|
||||
)
|
||||
source_degree = target_degree = G.degree
|
||||
|
||||
# precompute target degrees -- should *not* be weighted degree
|
||||
t_deg = dict(target_degree())
|
||||
|
||||
# Set up both predecessor and successor neighbor dicts leaving empty if not needed
|
||||
G_P = G_S = {n: {} for n in G}
|
||||
if G.is_directed():
|
||||
# "in" or "in+out" cases: G_P contains predecessors
|
||||
if "in" in source:
|
||||
G_P = G.pred
|
||||
# "out" or "in+out" cases: G_S contains successors
|
||||
if "out" in source:
|
||||
G_S = G.succ
|
||||
else:
|
||||
# undirected leave G_P empty but G_S is the adjacency
|
||||
G_S = G.adj
|
||||
|
||||
# Main loop: Compute average degree of neighbors
|
||||
avg = {}
|
||||
for n, deg in source_degree(nodes, weight=weight):
|
||||
# handle degree zero average
|
||||
if deg == 0:
|
||||
avg[n] = 0.0
|
||||
continue
|
||||
|
||||
# we sum over both G_P and G_S, but one of the two is usually empty.
|
||||
if weight is None:
|
||||
avg[n] = (
|
||||
sum(t_deg[nbr] for nbr in G_S[n]) + sum(t_deg[nbr] for nbr in G_P[n])
|
||||
) / deg
|
||||
else:
|
||||
avg[n] = (
|
||||
sum(dd.get(weight, 1) * t_deg[nbr] for nbr, dd in G_S[n].items())
|
||||
+ sum(dd.get(weight, 1) * t_deg[nbr] for nbr, dd in G_P[n].items())
|
||||
) / deg
|
||||
return avg
|
||||
+127
@@ -0,0 +1,127 @@
|
||||
"""Generators of x-y pairs of node data."""
|
||||
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["node_attribute_xy", "node_degree_xy"]
|
||||
|
||||
|
||||
@nx._dispatchable(node_attrs="attribute")
|
||||
def node_attribute_xy(G, attribute, nodes=None):
|
||||
"""Yields 2-tuples of node attribute values for all edges in `G`.
|
||||
|
||||
This generator yields, for each edge in `G` incident to a node in `nodes`,
|
||||
a 2-tuple of form ``(attribute value, attribute value)`` for the parameter
|
||||
specified node-attribute.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G: NetworkX graph
|
||||
|
||||
attribute: key
|
||||
The node attribute key.
|
||||
|
||||
nodes: list or iterable (optional)
|
||||
Use only edges that are incident to specified nodes.
|
||||
The default is all nodes.
|
||||
|
||||
Yields
|
||||
------
|
||||
(x, y): 2-tuple
|
||||
Generates 2-tuple of (attribute, attribute) values.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.DiGraph()
|
||||
>>> G.add_node(1, color="red")
|
||||
>>> G.add_node(2, color="blue")
|
||||
>>> G.add_node(3, color="green")
|
||||
>>> G.add_edge(1, 2)
|
||||
>>> list(nx.node_attribute_xy(G, "color"))
|
||||
[('red', 'blue')]
|
||||
|
||||
Notes
|
||||
-----
|
||||
For undirected graphs, each edge is produced twice, once for each edge
|
||||
representation (u, v) and (v, u), with the exception of self-loop edges
|
||||
which only appear once.
|
||||
"""
|
||||
if nodes is None:
|
||||
nodes = set(G)
|
||||
else:
|
||||
nodes = set(nodes)
|
||||
Gnodes = G.nodes
|
||||
for u, nbrsdict in G.adjacency():
|
||||
if u not in nodes:
|
||||
continue
|
||||
uattr = Gnodes[u].get(attribute, None)
|
||||
if G.is_multigraph():
|
||||
for v, keys in nbrsdict.items():
|
||||
vattr = Gnodes[v].get(attribute, None)
|
||||
for _ in keys:
|
||||
yield (uattr, vattr)
|
||||
else:
|
||||
for v in nbrsdict:
|
||||
vattr = Gnodes[v].get(attribute, None)
|
||||
yield (uattr, vattr)
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def node_degree_xy(G, x="out", y="in", weight=None, nodes=None):
|
||||
"""Yields 2-tuples of ``(degree, degree)`` values for edges in `G`.
|
||||
|
||||
This generator yields, for each edge in `G` incident to a node in `nodes`,
|
||||
a 2-tuple of form ``(degree, degree)``. The node degrees are weighted
|
||||
when a `weight` attribute is specified.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G: NetworkX graph
|
||||
|
||||
x: string ('in','out')
|
||||
The degree type for source node (directed graphs only).
|
||||
|
||||
y: string ('in','out')
|
||||
The degree type for target node (directed graphs only).
|
||||
|
||||
weight: string or None, optional (default=None)
|
||||
The edge attribute that holds the numerical value used
|
||||
as a weight. If None, then each edge has weight 1.
|
||||
The degree is the sum of the edge weights adjacent to the node.
|
||||
|
||||
nodes: list or iterable (optional)
|
||||
Use only edges that are adjacency to specified nodes.
|
||||
The default is all nodes.
|
||||
|
||||
Yields
|
||||
------
|
||||
(x, y): 2-tuple
|
||||
Generates 2-tuple of (degree, degree) values.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.DiGraph()
|
||||
>>> G.add_edge(1, 2)
|
||||
>>> list(nx.node_degree_xy(G, x="out", y="in"))
|
||||
[(1, 1)]
|
||||
>>> list(nx.node_degree_xy(G, x="in", y="out"))
|
||||
[(0, 0)]
|
||||
|
||||
Notes
|
||||
-----
|
||||
For undirected graphs, each edge is produced twice, once for each edge
|
||||
representation (u, v) and (v, u), with the exception of self-loop edges
|
||||
which only appear once.
|
||||
"""
|
||||
nodes = set(G) if nodes is None else set(nodes)
|
||||
if G.is_directed():
|
||||
direction = {"out": G.out_degree, "in": G.in_degree}
|
||||
xdeg = direction[x]
|
||||
ydeg = direction[y]
|
||||
else:
|
||||
xdeg = ydeg = G.degree
|
||||
|
||||
for u, degu in xdeg(nodes, weight=weight):
|
||||
# use G.edges to treat multigraphs correctly
|
||||
neighbors = (nbr for _, nbr in G.edges(u) if nbr in nodes)
|
||||
for _, degv in ydeg(neighbors, weight=weight):
|
||||
yield degu, degv
|
||||
+81
@@ -0,0 +1,81 @@
|
||||
import networkx as nx
|
||||
|
||||
|
||||
class BaseTestAttributeMixing:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
G = nx.Graph()
|
||||
G.add_nodes_from([0, 1], fish="one")
|
||||
G.add_nodes_from([2, 3], fish="two")
|
||||
G.add_nodes_from([4], fish="red")
|
||||
G.add_nodes_from([5], fish="blue")
|
||||
G.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
|
||||
cls.G = G
|
||||
|
||||
D = nx.DiGraph()
|
||||
D.add_nodes_from([0, 1], fish="one")
|
||||
D.add_nodes_from([2, 3], fish="two")
|
||||
D.add_nodes_from([4], fish="red")
|
||||
D.add_nodes_from([5], fish="blue")
|
||||
D.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
|
||||
cls.D = D
|
||||
|
||||
M = nx.MultiGraph()
|
||||
M.add_nodes_from([0, 1], fish="one")
|
||||
M.add_nodes_from([2, 3], fish="two")
|
||||
M.add_nodes_from([4], fish="red")
|
||||
M.add_nodes_from([5], fish="blue")
|
||||
M.add_edges_from([(0, 1), (0, 1), (2, 3)])
|
||||
cls.M = M
|
||||
|
||||
S = nx.Graph()
|
||||
S.add_nodes_from([0, 1], fish="one")
|
||||
S.add_nodes_from([2, 3], fish="two")
|
||||
S.add_nodes_from([4], fish="red")
|
||||
S.add_nodes_from([5], fish="blue")
|
||||
S.add_edge(0, 0)
|
||||
S.add_edge(2, 2)
|
||||
cls.S = S
|
||||
|
||||
N = nx.Graph()
|
||||
N.add_nodes_from([0, 1], margin=-2)
|
||||
N.add_nodes_from([2, 3], margin=-2)
|
||||
N.add_nodes_from([4], margin=-3)
|
||||
N.add_nodes_from([5], margin=-4)
|
||||
N.add_edges_from([(0, 1), (2, 3), (0, 4), (2, 5)])
|
||||
cls.N = N
|
||||
|
||||
F = nx.Graph()
|
||||
F.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5)
|
||||
F.add_edge(0, 2, weight=1)
|
||||
nx.set_node_attributes(F, dict(F.degree(weight="weight")), "margin")
|
||||
cls.F = F
|
||||
|
||||
K = nx.Graph()
|
||||
K.add_nodes_from([1, 2], margin=-1)
|
||||
K.add_nodes_from([3], margin=1)
|
||||
K.add_nodes_from([4], margin=2)
|
||||
K.add_edges_from([(3, 4), (1, 2), (1, 3)])
|
||||
cls.K = K
|
||||
|
||||
|
||||
class BaseTestDegreeMixing:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
cls.P4 = nx.path_graph(4)
|
||||
cls.D = nx.DiGraph()
|
||||
cls.D.add_edges_from([(0, 2), (0, 3), (1, 3), (2, 3)])
|
||||
cls.D2 = nx.DiGraph()
|
||||
cls.D2.add_edges_from([(0, 3), (1, 0), (1, 2), (2, 4), (4, 1), (4, 3), (4, 2)])
|
||||
cls.M = nx.MultiGraph()
|
||||
nx.add_path(cls.M, range(4))
|
||||
cls.M.add_edge(0, 1)
|
||||
cls.S = nx.Graph()
|
||||
cls.S.add_edges_from([(0, 0), (1, 1)])
|
||||
cls.W = nx.Graph()
|
||||
cls.W.add_edges_from([(0, 3), (1, 3), (2, 3)], weight=0.5)
|
||||
cls.W.add_edge(0, 2, weight=1)
|
||||
S1 = nx.star_graph(4)
|
||||
S2 = nx.star_graph(4)
|
||||
cls.DS = nx.disjoint_union(S1, S2)
|
||||
cls.DS.add_edge(4, 5)
|
||||
+143
@@ -0,0 +1,143 @@
|
||||
from itertools import permutations
|
||||
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
|
||||
|
||||
class TestNeighborConnectivity:
|
||||
def test_degree_p4(self):
|
||||
G = nx.path_graph(4)
|
||||
answer = {1: 2.0, 2: 1.5}
|
||||
nd = nx.average_degree_connectivity(G)
|
||||
assert nd == answer
|
||||
|
||||
D = G.to_directed()
|
||||
answer = {2: 2.0, 4: 1.5}
|
||||
nd = nx.average_degree_connectivity(D)
|
||||
assert nd == answer
|
||||
|
||||
answer = {1: 2.0, 2: 1.5}
|
||||
D = G.to_directed()
|
||||
nd = nx.average_degree_connectivity(D, source="in", target="in")
|
||||
assert nd == answer
|
||||
|
||||
D = G.to_directed()
|
||||
nd = nx.average_degree_connectivity(D, source="in", target="in")
|
||||
assert nd == answer
|
||||
|
||||
def test_degree_p4_weighted(self):
|
||||
G = nx.path_graph(4)
|
||||
G[1][2]["weight"] = 4
|
||||
answer = {1: 2.0, 2: 1.8}
|
||||
nd = nx.average_degree_connectivity(G, weight="weight")
|
||||
assert nd == answer
|
||||
answer = {1: 2.0, 2: 1.5}
|
||||
nd = nx.average_degree_connectivity(G)
|
||||
assert nd == answer
|
||||
|
||||
D = G.to_directed()
|
||||
answer = {2: 2.0, 4: 1.8}
|
||||
nd = nx.average_degree_connectivity(D, weight="weight")
|
||||
assert nd == answer
|
||||
|
||||
answer = {1: 2.0, 2: 1.8}
|
||||
D = G.to_directed()
|
||||
nd = nx.average_degree_connectivity(
|
||||
D, weight="weight", source="in", target="in"
|
||||
)
|
||||
assert nd == answer
|
||||
|
||||
D = G.to_directed()
|
||||
nd = nx.average_degree_connectivity(
|
||||
D, source="in", target="out", weight="weight"
|
||||
)
|
||||
assert nd == answer
|
||||
|
||||
def test_weight_keyword(self):
|
||||
G = nx.path_graph(4)
|
||||
G[1][2]["other"] = 4
|
||||
answer = {1: 2.0, 2: 1.8}
|
||||
nd = nx.average_degree_connectivity(G, weight="other")
|
||||
assert nd == answer
|
||||
answer = {1: 2.0, 2: 1.5}
|
||||
nd = nx.average_degree_connectivity(G, weight=None)
|
||||
assert nd == answer
|
||||
|
||||
D = G.to_directed()
|
||||
answer = {2: 2.0, 4: 1.8}
|
||||
nd = nx.average_degree_connectivity(D, weight="other")
|
||||
assert nd == answer
|
||||
|
||||
answer = {1: 2.0, 2: 1.8}
|
||||
D = G.to_directed()
|
||||
nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in")
|
||||
assert nd == answer
|
||||
|
||||
D = G.to_directed()
|
||||
nd = nx.average_degree_connectivity(D, weight="other", source="in", target="in")
|
||||
assert nd == answer
|
||||
|
||||
def test_degree_barrat(self):
|
||||
G = nx.star_graph(5)
|
||||
G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)])
|
||||
G[0][5]["weight"] = 5
|
||||
nd = nx.average_degree_connectivity(G)[5]
|
||||
assert nd == 1.8
|
||||
nd = nx.average_degree_connectivity(G, weight="weight")[5]
|
||||
assert nd == pytest.approx(3.222222, abs=1e-5)
|
||||
|
||||
def test_zero_deg(self):
|
||||
G = nx.DiGraph()
|
||||
G.add_edge(1, 2)
|
||||
G.add_edge(1, 3)
|
||||
G.add_edge(1, 4)
|
||||
c = nx.average_degree_connectivity(G)
|
||||
assert c == {1: 0, 3: 1}
|
||||
c = nx.average_degree_connectivity(G, source="in", target="in")
|
||||
assert c == {0: 0, 1: 0}
|
||||
c = nx.average_degree_connectivity(G, source="in", target="out")
|
||||
assert c == {0: 0, 1: 3}
|
||||
c = nx.average_degree_connectivity(G, source="in", target="in+out")
|
||||
assert c == {0: 0, 1: 3}
|
||||
c = nx.average_degree_connectivity(G, source="out", target="out")
|
||||
assert c == {0: 0, 3: 0}
|
||||
c = nx.average_degree_connectivity(G, source="out", target="in")
|
||||
assert c == {0: 0, 3: 1}
|
||||
c = nx.average_degree_connectivity(G, source="out", target="in+out")
|
||||
assert c == {0: 0, 3: 1}
|
||||
|
||||
def test_in_out_weight(self):
|
||||
G = nx.DiGraph()
|
||||
G.add_edge(1, 2, weight=1)
|
||||
G.add_edge(1, 3, weight=1)
|
||||
G.add_edge(3, 1, weight=1)
|
||||
for s, t in permutations(["in", "out", "in+out"], 2):
|
||||
c = nx.average_degree_connectivity(G, source=s, target=t)
|
||||
cw = nx.average_degree_connectivity(G, source=s, target=t, weight="weight")
|
||||
assert c == cw
|
||||
|
||||
def test_invalid_source(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
G = nx.DiGraph()
|
||||
nx.average_degree_connectivity(G, source="bogus")
|
||||
|
||||
def test_invalid_target(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
G = nx.DiGraph()
|
||||
nx.average_degree_connectivity(G, target="bogus")
|
||||
|
||||
def test_invalid_undirected_graph(self):
|
||||
G = nx.Graph()
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
nx.average_degree_connectivity(G, target="bogus")
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
nx.average_degree_connectivity(G, source="bogus")
|
||||
|
||||
def test_single_node(self):
|
||||
# TODO Is this really the intended behavior for providing a
|
||||
# single node as the argument `nodes`? Shouldn't the function
|
||||
# just return the connectivity value itself?
|
||||
G = nx.trivial_graph()
|
||||
conn = nx.average_degree_connectivity(G, nodes=0)
|
||||
assert conn == {0: 0}
|
||||
+123
@@ -0,0 +1,123 @@
|
||||
import pytest
|
||||
|
||||
np = pytest.importorskip("numpy")
|
||||
pytest.importorskip("scipy")
|
||||
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.assortativity.correlation import attribute_ac
|
||||
|
||||
from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
|
||||
|
||||
|
||||
class TestDegreeMixingCorrelation(BaseTestDegreeMixing):
|
||||
def test_degree_assortativity_undirected(self):
|
||||
r = nx.degree_assortativity_coefficient(self.P4)
|
||||
np.testing.assert_almost_equal(r, -1.0 / 2, decimal=4)
|
||||
|
||||
def test_degree_assortativity_node_kwargs(self):
|
||||
G = nx.Graph()
|
||||
edges = [(0, 1), (0, 3), (1, 2), (1, 3), (1, 4), (5, 9), (9, 0)]
|
||||
G.add_edges_from(edges)
|
||||
r = nx.degree_assortativity_coefficient(G, nodes=[1, 2, 4])
|
||||
np.testing.assert_almost_equal(r, -1.0, decimal=4)
|
||||
|
||||
def test_degree_assortativity_directed(self):
|
||||
r = nx.degree_assortativity_coefficient(self.D)
|
||||
np.testing.assert_almost_equal(r, -0.57735, decimal=4)
|
||||
|
||||
def test_degree_assortativity_directed2(self):
|
||||
"""Test degree assortativity for a directed graph where the set of
|
||||
in/out degree does not equal the total degree."""
|
||||
r = nx.degree_assortativity_coefficient(self.D2)
|
||||
np.testing.assert_almost_equal(r, 0.14852, decimal=4)
|
||||
|
||||
def test_degree_assortativity_multigraph(self):
|
||||
r = nx.degree_assortativity_coefficient(self.M)
|
||||
np.testing.assert_almost_equal(r, -1.0 / 7.0, decimal=4)
|
||||
|
||||
def test_degree_pearson_assortativity_undirected(self):
|
||||
r = nx.degree_pearson_correlation_coefficient(self.P4)
|
||||
np.testing.assert_almost_equal(r, -1.0 / 2, decimal=4)
|
||||
|
||||
def test_degree_pearson_assortativity_directed(self):
|
||||
r = nx.degree_pearson_correlation_coefficient(self.D)
|
||||
np.testing.assert_almost_equal(r, -0.57735, decimal=4)
|
||||
|
||||
def test_degree_pearson_assortativity_directed2(self):
|
||||
"""Test degree assortativity with Pearson for a directed graph where
|
||||
the set of in/out degree does not equal the total degree."""
|
||||
r = nx.degree_pearson_correlation_coefficient(self.D2)
|
||||
np.testing.assert_almost_equal(r, 0.14852, decimal=4)
|
||||
|
||||
def test_degree_pearson_assortativity_multigraph(self):
|
||||
r = nx.degree_pearson_correlation_coefficient(self.M)
|
||||
np.testing.assert_almost_equal(r, -1.0 / 7.0, decimal=4)
|
||||
|
||||
def test_degree_assortativity_weighted(self):
|
||||
r = nx.degree_assortativity_coefficient(self.W, weight="weight")
|
||||
np.testing.assert_almost_equal(r, -0.1429, decimal=4)
|
||||
|
||||
def test_degree_assortativity_double_star(self):
|
||||
r = nx.degree_assortativity_coefficient(self.DS)
|
||||
np.testing.assert_almost_equal(r, -0.9339, decimal=4)
|
||||
|
||||
|
||||
class TestAttributeMixingCorrelation(BaseTestAttributeMixing):
|
||||
def test_attribute_assortativity_undirected(self):
|
||||
r = nx.attribute_assortativity_coefficient(self.G, "fish")
|
||||
assert r == 6.0 / 22.0
|
||||
|
||||
def test_attribute_assortativity_directed(self):
|
||||
r = nx.attribute_assortativity_coefficient(self.D, "fish")
|
||||
assert r == 1.0 / 3.0
|
||||
|
||||
def test_attribute_assortativity_multigraph(self):
|
||||
r = nx.attribute_assortativity_coefficient(self.M, "fish")
|
||||
assert r == 1.0
|
||||
|
||||
def test_attribute_assortativity_coefficient(self):
|
||||
# from "Mixing patterns in networks"
|
||||
# fmt: off
|
||||
a = np.array([[0.258, 0.016, 0.035, 0.013],
|
||||
[0.012, 0.157, 0.058, 0.019],
|
||||
[0.013, 0.023, 0.306, 0.035],
|
||||
[0.005, 0.007, 0.024, 0.016]])
|
||||
# fmt: on
|
||||
r = attribute_ac(a)
|
||||
np.testing.assert_almost_equal(r, 0.623, decimal=3)
|
||||
|
||||
def test_attribute_assortativity_coefficient2(self):
|
||||
# fmt: off
|
||||
a = np.array([[0.18, 0.02, 0.01, 0.03],
|
||||
[0.02, 0.20, 0.03, 0.02],
|
||||
[0.01, 0.03, 0.16, 0.01],
|
||||
[0.03, 0.02, 0.01, 0.22]])
|
||||
# fmt: on
|
||||
r = attribute_ac(a)
|
||||
np.testing.assert_almost_equal(r, 0.68, decimal=2)
|
||||
|
||||
def test_attribute_assortativity(self):
|
||||
a = np.array([[50, 50, 0], [50, 50, 0], [0, 0, 2]])
|
||||
r = attribute_ac(a)
|
||||
np.testing.assert_almost_equal(r, 0.029, decimal=3)
|
||||
|
||||
def test_attribute_assortativity_negative(self):
|
||||
r = nx.numeric_assortativity_coefficient(self.N, "margin")
|
||||
np.testing.assert_almost_equal(r, -0.2903, decimal=4)
|
||||
|
||||
def test_assortativity_node_kwargs(self):
|
||||
G = nx.Graph()
|
||||
G.add_nodes_from([0, 1], size=2)
|
||||
G.add_nodes_from([2, 3], size=3)
|
||||
G.add_edges_from([(0, 1), (2, 3)])
|
||||
r = nx.numeric_assortativity_coefficient(G, "size", nodes=[0, 3])
|
||||
np.testing.assert_almost_equal(r, 1.0, decimal=4)
|
||||
|
||||
def test_attribute_assortativity_float(self):
|
||||
r = nx.numeric_assortativity_coefficient(self.F, "margin")
|
||||
np.testing.assert_almost_equal(r, -0.1429, decimal=4)
|
||||
|
||||
def test_attribute_assortativity_mixed(self):
|
||||
r = nx.numeric_assortativity_coefficient(self.K, "margin")
|
||||
np.testing.assert_almost_equal(r, 0.4340, decimal=4)
|
||||
+176
@@ -0,0 +1,176 @@
|
||||
import pytest
|
||||
|
||||
np = pytest.importorskip("numpy")
|
||||
|
||||
|
||||
import networkx as nx
|
||||
|
||||
from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
|
||||
|
||||
|
||||
class TestDegreeMixingDict(BaseTestDegreeMixing):
|
||||
def test_degree_mixing_dict_undirected(self):
|
||||
d = nx.degree_mixing_dict(self.P4)
|
||||
d_result = {1: {2: 2}, 2: {1: 2, 2: 2}}
|
||||
assert d == d_result
|
||||
|
||||
def test_degree_mixing_dict_undirected_normalized(self):
|
||||
d = nx.degree_mixing_dict(self.P4, normalized=True)
|
||||
d_result = {1: {2: 1.0 / 3}, 2: {1: 1.0 / 3, 2: 1.0 / 3}}
|
||||
assert d == d_result
|
||||
|
||||
def test_degree_mixing_dict_directed(self):
|
||||
d = nx.degree_mixing_dict(self.D)
|
||||
print(d)
|
||||
d_result = {1: {3: 2}, 2: {1: 1, 3: 1}, 3: {}}
|
||||
assert d == d_result
|
||||
|
||||
def test_degree_mixing_dict_multigraph(self):
|
||||
d = nx.degree_mixing_dict(self.M)
|
||||
d_result = {1: {2: 1}, 2: {1: 1, 3: 3}, 3: {2: 3}}
|
||||
assert d == d_result
|
||||
|
||||
def test_degree_mixing_dict_weighted(self):
|
||||
d = nx.degree_mixing_dict(self.W, weight="weight")
|
||||
d_result = {0.5: {1.5: 1}, 1.5: {1.5: 6, 0.5: 1}}
|
||||
assert d == d_result
|
||||
|
||||
|
||||
class TestDegreeMixingMatrix(BaseTestDegreeMixing):
|
||||
def test_degree_mixing_matrix_undirected(self):
|
||||
# fmt: off
|
||||
a_result = np.array([[0, 2],
|
||||
[2, 2]]
|
||||
)
|
||||
# fmt: on
|
||||
a = nx.degree_mixing_matrix(self.P4, normalized=False)
|
||||
np.testing.assert_equal(a, a_result)
|
||||
a = nx.degree_mixing_matrix(self.P4)
|
||||
np.testing.assert_equal(a, a_result / a_result.sum())
|
||||
|
||||
def test_degree_mixing_matrix_directed(self):
|
||||
# fmt: off
|
||||
a_result = np.array([[0, 0, 2],
|
||||
[1, 0, 1],
|
||||
[0, 0, 0]]
|
||||
)
|
||||
# fmt: on
|
||||
a = nx.degree_mixing_matrix(self.D, normalized=False)
|
||||
np.testing.assert_equal(a, a_result)
|
||||
a = nx.degree_mixing_matrix(self.D)
|
||||
np.testing.assert_equal(a, a_result / a_result.sum())
|
||||
|
||||
def test_degree_mixing_matrix_multigraph(self):
|
||||
# fmt: off
|
||||
a_result = np.array([[0, 1, 0],
|
||||
[1, 0, 3],
|
||||
[0, 3, 0]]
|
||||
)
|
||||
# fmt: on
|
||||
a = nx.degree_mixing_matrix(self.M, normalized=False)
|
||||
np.testing.assert_equal(a, a_result)
|
||||
a = nx.degree_mixing_matrix(self.M)
|
||||
np.testing.assert_equal(a, a_result / a_result.sum())
|
||||
|
||||
def test_degree_mixing_matrix_selfloop(self):
|
||||
# fmt: off
|
||||
a_result = np.array([[2]])
|
||||
# fmt: on
|
||||
a = nx.degree_mixing_matrix(self.S, normalized=False)
|
||||
np.testing.assert_equal(a, a_result)
|
||||
a = nx.degree_mixing_matrix(self.S)
|
||||
np.testing.assert_equal(a, a_result / a_result.sum())
|
||||
|
||||
def test_degree_mixing_matrix_weighted(self):
|
||||
a_result = np.array([[0.0, 1.0], [1.0, 6.0]])
|
||||
a = nx.degree_mixing_matrix(self.W, weight="weight", normalized=False)
|
||||
np.testing.assert_equal(a, a_result)
|
||||
a = nx.degree_mixing_matrix(self.W, weight="weight")
|
||||
np.testing.assert_equal(a, a_result / float(a_result.sum()))
|
||||
|
||||
def test_degree_mixing_matrix_mapping(self):
|
||||
a_result = np.array([[6.0, 1.0], [1.0, 0.0]])
|
||||
mapping = {0.5: 1, 1.5: 0}
|
||||
a = nx.degree_mixing_matrix(
|
||||
self.W, weight="weight", normalized=False, mapping=mapping
|
||||
)
|
||||
np.testing.assert_equal(a, a_result)
|
||||
|
||||
|
||||
class TestAttributeMixingDict(BaseTestAttributeMixing):
|
||||
def test_attribute_mixing_dict_undirected(self):
|
||||
d = nx.attribute_mixing_dict(self.G, "fish")
|
||||
d_result = {
|
||||
"one": {"one": 2, "red": 1},
|
||||
"two": {"two": 2, "blue": 1},
|
||||
"red": {"one": 1},
|
||||
"blue": {"two": 1},
|
||||
}
|
||||
assert d == d_result
|
||||
|
||||
def test_attribute_mixing_dict_directed(self):
|
||||
d = nx.attribute_mixing_dict(self.D, "fish")
|
||||
d_result = {
|
||||
"one": {"one": 1, "red": 1},
|
||||
"two": {"two": 1, "blue": 1},
|
||||
"red": {},
|
||||
"blue": {},
|
||||
}
|
||||
assert d == d_result
|
||||
|
||||
def test_attribute_mixing_dict_multigraph(self):
|
||||
d = nx.attribute_mixing_dict(self.M, "fish")
|
||||
d_result = {"one": {"one": 4}, "two": {"two": 2}}
|
||||
assert d == d_result
|
||||
|
||||
|
||||
class TestAttributeMixingMatrix(BaseTestAttributeMixing):
|
||||
def test_attribute_mixing_matrix_undirected(self):
|
||||
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
|
||||
a_result = np.array([[2, 0, 1, 0], [0, 2, 0, 1], [1, 0, 0, 0], [0, 1, 0, 0]])
|
||||
a = nx.attribute_mixing_matrix(
|
||||
self.G, "fish", mapping=mapping, normalized=False
|
||||
)
|
||||
np.testing.assert_equal(a, a_result)
|
||||
a = nx.attribute_mixing_matrix(self.G, "fish", mapping=mapping)
|
||||
np.testing.assert_equal(a, a_result / a_result.sum())
|
||||
|
||||
def test_attribute_mixing_matrix_directed(self):
|
||||
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
|
||||
a_result = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]])
|
||||
a = nx.attribute_mixing_matrix(
|
||||
self.D, "fish", mapping=mapping, normalized=False
|
||||
)
|
||||
np.testing.assert_equal(a, a_result)
|
||||
a = nx.attribute_mixing_matrix(self.D, "fish", mapping=mapping)
|
||||
np.testing.assert_equal(a, a_result / a_result.sum())
|
||||
|
||||
def test_attribute_mixing_matrix_multigraph(self):
|
||||
mapping = {"one": 0, "two": 1, "red": 2, "blue": 3}
|
||||
a_result = np.array([[4, 0, 0, 0], [0, 2, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])
|
||||
a = nx.attribute_mixing_matrix(
|
||||
self.M, "fish", mapping=mapping, normalized=False
|
||||
)
|
||||
np.testing.assert_equal(a, a_result)
|
||||
a = nx.attribute_mixing_matrix(self.M, "fish", mapping=mapping)
|
||||
np.testing.assert_equal(a, a_result / a_result.sum())
|
||||
|
||||
def test_attribute_mixing_matrix_negative(self):
|
||||
mapping = {-2: 0, -3: 1, -4: 2}
|
||||
a_result = np.array([[4.0, 1.0, 1.0], [1.0, 0.0, 0.0], [1.0, 0.0, 0.0]])
|
||||
a = nx.attribute_mixing_matrix(
|
||||
self.N, "margin", mapping=mapping, normalized=False
|
||||
)
|
||||
np.testing.assert_equal(a, a_result)
|
||||
a = nx.attribute_mixing_matrix(self.N, "margin", mapping=mapping)
|
||||
np.testing.assert_equal(a, a_result / float(a_result.sum()))
|
||||
|
||||
def test_attribute_mixing_matrix_float(self):
|
||||
mapping = {0.5: 1, 1.5: 0}
|
||||
a_result = np.array([[6.0, 1.0], [1.0, 0.0]])
|
||||
a = nx.attribute_mixing_matrix(
|
||||
self.F, "margin", mapping=mapping, normalized=False
|
||||
)
|
||||
np.testing.assert_equal(a, a_result)
|
||||
a = nx.attribute_mixing_matrix(self.F, "margin", mapping=mapping)
|
||||
np.testing.assert_equal(a, a_result / a_result.sum())
|
||||
+108
@@ -0,0 +1,108 @@
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
|
||||
|
||||
class TestAverageNeighbor:
|
||||
def test_degree_p4(self):
|
||||
G = nx.path_graph(4)
|
||||
answer = {0: 2, 1: 1.5, 2: 1.5, 3: 2}
|
||||
nd = nx.average_neighbor_degree(G)
|
||||
assert nd == answer
|
||||
|
||||
D = G.to_directed()
|
||||
nd = nx.average_neighbor_degree(D)
|
||||
assert nd == answer
|
||||
|
||||
D = nx.DiGraph(G.edges(data=True))
|
||||
nd = nx.average_neighbor_degree(D)
|
||||
assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
|
||||
nd = nx.average_neighbor_degree(D, "in", "out")
|
||||
assert nd == {0: 0, 1: 1, 2: 1, 3: 1}
|
||||
nd = nx.average_neighbor_degree(D, "out", "in")
|
||||
assert nd == {0: 1, 1: 1, 2: 1, 3: 0}
|
||||
nd = nx.average_neighbor_degree(D, "in", "in")
|
||||
assert nd == {0: 0, 1: 0, 2: 1, 3: 1}
|
||||
|
||||
def test_degree_p4_weighted(self):
|
||||
G = nx.path_graph(4)
|
||||
G[1][2]["weight"] = 4
|
||||
answer = {0: 2, 1: 1.8, 2: 1.8, 3: 2}
|
||||
nd = nx.average_neighbor_degree(G, weight="weight")
|
||||
assert nd == answer
|
||||
|
||||
D = G.to_directed()
|
||||
nd = nx.average_neighbor_degree(D, weight="weight")
|
||||
assert nd == answer
|
||||
|
||||
D = nx.DiGraph(G.edges(data=True))
|
||||
print(D.edges(data=True))
|
||||
nd = nx.average_neighbor_degree(D, weight="weight")
|
||||
assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
|
||||
nd = nx.average_neighbor_degree(D, "out", "out", weight="weight")
|
||||
assert nd == {0: 1, 1: 1, 2: 0, 3: 0}
|
||||
nd = nx.average_neighbor_degree(D, "in", "in", weight="weight")
|
||||
assert nd == {0: 0, 1: 0, 2: 1, 3: 1}
|
||||
nd = nx.average_neighbor_degree(D, "in", "out", weight="weight")
|
||||
assert nd == {0: 0, 1: 1, 2: 1, 3: 1}
|
||||
nd = nx.average_neighbor_degree(D, "out", "in", weight="weight")
|
||||
assert nd == {0: 1, 1: 1, 2: 1, 3: 0}
|
||||
nd = nx.average_neighbor_degree(D, source="in+out", weight="weight")
|
||||
assert nd == {0: 1.0, 1: 1.0, 2: 0.8, 3: 1.0}
|
||||
nd = nx.average_neighbor_degree(D, target="in+out", weight="weight")
|
||||
assert nd == {0: 2.0, 1: 2.0, 2: 1.0, 3: 0.0}
|
||||
|
||||
D = G.to_directed()
|
||||
nd = nx.average_neighbor_degree(D, weight="weight")
|
||||
assert nd == answer
|
||||
nd = nx.average_neighbor_degree(D, source="out", target="out", weight="weight")
|
||||
assert nd == answer
|
||||
|
||||
D = G.to_directed()
|
||||
nd = nx.average_neighbor_degree(D, source="in", target="in", weight="weight")
|
||||
assert nd == answer
|
||||
|
||||
def test_degree_k4(self):
|
||||
G = nx.complete_graph(4)
|
||||
answer = {0: 3, 1: 3, 2: 3, 3: 3}
|
||||
nd = nx.average_neighbor_degree(G)
|
||||
assert nd == answer
|
||||
|
||||
D = G.to_directed()
|
||||
nd = nx.average_neighbor_degree(D)
|
||||
assert nd == answer
|
||||
|
||||
D = G.to_directed()
|
||||
nd = nx.average_neighbor_degree(D)
|
||||
assert nd == answer
|
||||
|
||||
D = G.to_directed()
|
||||
nd = nx.average_neighbor_degree(D, source="in", target="in")
|
||||
assert nd == answer
|
||||
|
||||
def test_degree_k4_nodes(self):
|
||||
G = nx.complete_graph(4)
|
||||
answer = {1: 3.0, 2: 3.0}
|
||||
nd = nx.average_neighbor_degree(G, nodes=[1, 2])
|
||||
assert nd == answer
|
||||
|
||||
def test_degree_barrat(self):
|
||||
G = nx.star_graph(5)
|
||||
G.add_edges_from([(5, 6), (5, 7), (5, 8), (5, 9)])
|
||||
G[0][5]["weight"] = 5
|
||||
nd = nx.average_neighbor_degree(G)[5]
|
||||
assert nd == 1.8
|
||||
nd = nx.average_neighbor_degree(G, weight="weight")[5]
|
||||
assert nd == pytest.approx(3.222222, abs=1e-5)
|
||||
|
||||
def test_error_invalid_source_target(self):
|
||||
G = nx.path_graph(4)
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
nx.average_neighbor_degree(G, "error")
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
nx.average_neighbor_degree(G, "in", "error")
|
||||
G = G.to_directed()
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
nx.average_neighbor_degree(G, "error")
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
nx.average_neighbor_degree(G, "in", "error")
|
||||
+87
@@ -0,0 +1,87 @@
|
||||
import networkx as nx
|
||||
|
||||
from .base_test import BaseTestAttributeMixing, BaseTestDegreeMixing
|
||||
|
||||
|
||||
class TestAttributeMixingXY(BaseTestAttributeMixing):
|
||||
def test_node_attribute_xy_undirected(self):
|
||||
attrxy = sorted(nx.node_attribute_xy(self.G, "fish"))
|
||||
attrxy_result = sorted(
|
||||
[
|
||||
("one", "one"),
|
||||
("one", "one"),
|
||||
("two", "two"),
|
||||
("two", "two"),
|
||||
("one", "red"),
|
||||
("red", "one"),
|
||||
("blue", "two"),
|
||||
("two", "blue"),
|
||||
]
|
||||
)
|
||||
assert attrxy == attrxy_result
|
||||
|
||||
def test_node_attribute_xy_undirected_nodes(self):
|
||||
attrxy = sorted(nx.node_attribute_xy(self.G, "fish", nodes=["one", "yellow"]))
|
||||
attrxy_result = sorted([])
|
||||
assert attrxy == attrxy_result
|
||||
|
||||
def test_node_attribute_xy_directed(self):
|
||||
attrxy = sorted(nx.node_attribute_xy(self.D, "fish"))
|
||||
attrxy_result = sorted(
|
||||
[("one", "one"), ("two", "two"), ("one", "red"), ("two", "blue")]
|
||||
)
|
||||
assert attrxy == attrxy_result
|
||||
|
||||
def test_node_attribute_xy_multigraph(self):
|
||||
attrxy = sorted(nx.node_attribute_xy(self.M, "fish"))
|
||||
attrxy_result = [
|
||||
("one", "one"),
|
||||
("one", "one"),
|
||||
("one", "one"),
|
||||
("one", "one"),
|
||||
("two", "two"),
|
||||
("two", "two"),
|
||||
]
|
||||
assert attrxy == attrxy_result
|
||||
|
||||
def test_node_attribute_xy_selfloop(self):
|
||||
attrxy = sorted(nx.node_attribute_xy(self.S, "fish"))
|
||||
attrxy_result = [("one", "one"), ("two", "two")]
|
||||
assert attrxy == attrxy_result
|
||||
|
||||
|
||||
class TestDegreeMixingXY(BaseTestDegreeMixing):
|
||||
def test_node_degree_xy_undirected(self):
|
||||
xy = sorted(nx.node_degree_xy(self.P4))
|
||||
xy_result = sorted([(1, 2), (2, 1), (2, 2), (2, 2), (1, 2), (2, 1)])
|
||||
assert xy == xy_result
|
||||
|
||||
def test_node_degree_xy_undirected_nodes(self):
|
||||
xy = sorted(nx.node_degree_xy(self.P4, nodes=[0, 1, -1]))
|
||||
xy_result = sorted([(1, 2), (2, 1)])
|
||||
assert xy == xy_result
|
||||
|
||||
def test_node_degree_xy_directed(self):
|
||||
xy = sorted(nx.node_degree_xy(self.D))
|
||||
xy_result = sorted([(2, 1), (2, 3), (1, 3), (1, 3)])
|
||||
assert xy == xy_result
|
||||
|
||||
def test_node_degree_xy_multigraph(self):
|
||||
xy = sorted(nx.node_degree_xy(self.M))
|
||||
xy_result = sorted(
|
||||
[(2, 3), (2, 3), (3, 2), (3, 2), (2, 3), (3, 2), (1, 2), (2, 1)]
|
||||
)
|
||||
assert xy == xy_result
|
||||
|
||||
def test_node_degree_xy_selfloop(self):
|
||||
xy = sorted(nx.node_degree_xy(self.S))
|
||||
xy_result = sorted([(2, 2), (2, 2)])
|
||||
assert xy == xy_result
|
||||
|
||||
def test_node_degree_xy_weighted(self):
|
||||
G = nx.Graph()
|
||||
G.add_edge(1, 2, weight=7)
|
||||
G.add_edge(2, 3, weight=10)
|
||||
xy = sorted(nx.node_degree_xy(G, weight="weight"))
|
||||
xy_result = sorted([(7, 17), (17, 10), (17, 7), (10, 17)])
|
||||
assert xy == xy_result
|
||||
@@ -0,0 +1,171 @@
|
||||
"""
|
||||
Algorithms for asteroidal triples and asteroidal numbers in graphs.
|
||||
|
||||
An asteroidal triple in a graph G is a set of three non-adjacent vertices
|
||||
u, v and w such that there exist a path between any two of them that avoids
|
||||
closed neighborhood of the third. More formally, v_j, v_k belongs to the same
|
||||
connected component of G - N[v_i], where N[v_i] denotes the closed neighborhood
|
||||
of v_i. A graph which does not contain any asteroidal triples is called
|
||||
an AT-free graph. The class of AT-free graphs is a graph class for which
|
||||
many NP-complete problems are solvable in polynomial time. Amongst them,
|
||||
independent set and coloring.
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
__all__ = ["is_at_free", "find_asteroidal_triple"]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def find_asteroidal_triple(G):
|
||||
r"""Find an asteroidal triple in the given graph.
|
||||
|
||||
An asteroidal triple is a triple of non-adjacent vertices such that
|
||||
there exists a path between any two of them which avoids the closed
|
||||
neighborhood of the third. It checks all independent triples of vertices
|
||||
and whether they are an asteroidal triple or not. This is done with the
|
||||
help of a data structure called a component structure.
|
||||
A component structure encodes information about which vertices belongs to
|
||||
the same connected component when the closed neighborhood of a given vertex
|
||||
is removed from the graph. The algorithm used to check is the trivial
|
||||
one, outlined in [1]_, which has a runtime of
|
||||
:math:`O(|V||\overline{E} + |V||E|)`, where the second term is the
|
||||
creation of the component structure.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX Graph
|
||||
The graph to check whether is AT-free or not
|
||||
|
||||
Returns
|
||||
-------
|
||||
list or None
|
||||
An asteroidal triple is returned as a list of nodes. If no asteroidal
|
||||
triple exists, i.e. the graph is AT-free, then None is returned.
|
||||
The returned value depends on the certificate parameter. The default
|
||||
option is a bool which is True if the graph is AT-free, i.e. the
|
||||
given graph contains no asteroidal triples, and False otherwise, i.e.
|
||||
if the graph contains at least one asteroidal triple.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The component structure and the algorithm is described in [1]_. The current
|
||||
implementation implements the trivial algorithm for simple graphs.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Ekkehard Köhler,
|
||||
"Recognizing Graphs without asteroidal triples",
|
||||
Journal of Discrete Algorithms 2, pages 439-452, 2004.
|
||||
https://www.sciencedirect.com/science/article/pii/S157086670400019X
|
||||
"""
|
||||
V = set(G.nodes)
|
||||
|
||||
if len(V) < 6:
|
||||
# An asteroidal triple cannot exist in a graph with 5 or less vertices.
|
||||
return None
|
||||
|
||||
component_structure = create_component_structure(G)
|
||||
E_complement = set(nx.complement(G).edges)
|
||||
|
||||
for e in E_complement:
|
||||
u = e[0]
|
||||
v = e[1]
|
||||
u_neighborhood = set(G[u]).union([u])
|
||||
v_neighborhood = set(G[v]).union([v])
|
||||
union_of_neighborhoods = u_neighborhood.union(v_neighborhood)
|
||||
for w in V - union_of_neighborhoods:
|
||||
# Check for each pair of vertices whether they belong to the
|
||||
# same connected component when the closed neighborhood of the
|
||||
# third is removed.
|
||||
if (
|
||||
component_structure[u][v] == component_structure[u][w]
|
||||
and component_structure[v][u] == component_structure[v][w]
|
||||
and component_structure[w][u] == component_structure[w][v]
|
||||
):
|
||||
return [u, v, w]
|
||||
return None
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def is_at_free(G):
|
||||
"""Check if a graph is AT-free.
|
||||
|
||||
The method uses the `find_asteroidal_triple` method to recognize
|
||||
an AT-free graph. If no asteroidal triple is found the graph is
|
||||
AT-free and True is returned. If at least one asteroidal triple is
|
||||
found the graph is not AT-free and False is returned.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX Graph
|
||||
The graph to check whether is AT-free or not.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
True if G is AT-free and False otherwise.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.Graph([(0, 1), (0, 2), (1, 2), (1, 3), (1, 4), (4, 5)])
|
||||
>>> nx.is_at_free(G)
|
||||
True
|
||||
|
||||
>>> G = nx.cycle_graph(6)
|
||||
>>> nx.is_at_free(G)
|
||||
False
|
||||
"""
|
||||
return find_asteroidal_triple(G) is None
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def create_component_structure(G):
|
||||
r"""Create component structure for G.
|
||||
|
||||
A *component structure* is an `nxn` array, denoted `c`, where `n` is
|
||||
the number of vertices, where each row and column corresponds to a vertex.
|
||||
|
||||
.. math::
|
||||
c_{uv} = \begin{cases} 0, if v \in N[u] \\
|
||||
k, if v \in component k of G \setminus N[u] \end{cases}
|
||||
|
||||
Where `k` is an arbitrary label for each component. The structure is used
|
||||
to simplify the detection of asteroidal triples.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX Graph
|
||||
Undirected, simple graph.
|
||||
|
||||
Returns
|
||||
-------
|
||||
component_structure : dictionary
|
||||
A dictionary of dictionaries, keyed by pairs of vertices.
|
||||
|
||||
"""
|
||||
V = set(G.nodes)
|
||||
component_structure = {}
|
||||
for v in V:
|
||||
label = 0
|
||||
closed_neighborhood = set(G[v]).union({v})
|
||||
row_dict = {}
|
||||
for u in closed_neighborhood:
|
||||
row_dict[u] = 0
|
||||
|
||||
G_reduced = G.subgraph(set(G.nodes) - closed_neighborhood)
|
||||
for cc in nx.connected_components(G_reduced):
|
||||
label += 1
|
||||
for u in cc:
|
||||
row_dict[u] = label
|
||||
|
||||
component_structure[v] = row_dict
|
||||
|
||||
return component_structure
|
||||
+87
@@ -0,0 +1,87 @@
|
||||
r"""This module provides functions and operations for bipartite
|
||||
graphs. Bipartite graphs `B = (U, V, E)` have two node sets `U,V` and edges in
|
||||
`E` that only connect nodes from opposite sets. It is common in the literature
|
||||
to use an spatial analogy referring to the two node sets as top and bottom nodes.
|
||||
|
||||
The bipartite algorithms are not imported into the networkx namespace
|
||||
at the top level so the easiest way to use them is with:
|
||||
|
||||
>>> from networkx.algorithms import bipartite
|
||||
|
||||
NetworkX does not have a custom bipartite graph class but the Graph()
|
||||
or DiGraph() classes can be used to represent bipartite graphs. However,
|
||||
you have to keep track of which set each node belongs to, and make
|
||||
sure that there is no edge between nodes of the same set. The convention used
|
||||
in NetworkX is to use a node attribute named `bipartite` with values 0 or 1 to
|
||||
identify the sets each node belongs to. This convention is not enforced in
|
||||
the source code of bipartite functions, it's only a recommendation.
|
||||
|
||||
For example:
|
||||
|
||||
>>> B = nx.Graph()
|
||||
>>> # Add nodes with the node attribute "bipartite"
|
||||
>>> B.add_nodes_from([1, 2, 3, 4], bipartite=0)
|
||||
>>> B.add_nodes_from(["a", "b", "c"], bipartite=1)
|
||||
>>> # Add edges only between nodes of opposite node sets
|
||||
>>> B.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
|
||||
|
||||
Many algorithms of the bipartite module of NetworkX require, as an argument, a
|
||||
container with all the nodes that belong to one set, in addition to the bipartite
|
||||
graph `B`. The functions in the bipartite package do not check that the node set
|
||||
is actually correct nor that the input graph is actually bipartite.
|
||||
If `B` is connected, you can find the two node sets using a two-coloring
|
||||
algorithm:
|
||||
|
||||
>>> nx.is_connected(B)
|
||||
True
|
||||
>>> bottom_nodes, top_nodes = bipartite.sets(B)
|
||||
|
||||
However, if the input graph is not connected, there are more than one possible
|
||||
colorations. This is the reason why we require the user to pass a container
|
||||
with all nodes of one bipartite node set as an argument to most bipartite
|
||||
functions. In the face of ambiguity, we refuse the temptation to guess and
|
||||
raise an :exc:`AmbiguousSolution <networkx.AmbiguousSolution>`
|
||||
Exception if the input graph for
|
||||
:func:`bipartite.sets <networkx.algorithms.bipartite.basic.sets>`
|
||||
is disconnected.
|
||||
|
||||
Using the `bipartite` node attribute, you can easily get the two node sets:
|
||||
|
||||
>>> top_nodes = {n for n, d in B.nodes(data=True) if d["bipartite"] == 0}
|
||||
>>> bottom_nodes = set(B) - top_nodes
|
||||
|
||||
So you can easily use the bipartite algorithms that require, as an argument, a
|
||||
container with all nodes that belong to one node set:
|
||||
|
||||
>>> print(round(bipartite.density(B, bottom_nodes), 2))
|
||||
0.5
|
||||
>>> G = bipartite.projected_graph(B, top_nodes)
|
||||
|
||||
All bipartite graph generators in NetworkX build bipartite graphs with the
|
||||
`bipartite` node attribute. Thus, you can use the same approach:
|
||||
|
||||
>>> RB = bipartite.random_graph(5, 7, 0.2)
|
||||
>>> RB_top = {n for n, d in RB.nodes(data=True) if d["bipartite"] == 0}
|
||||
>>> RB_bottom = set(RB) - RB_top
|
||||
>>> list(RB_top)
|
||||
[0, 1, 2, 3, 4]
|
||||
>>> list(RB_bottom)
|
||||
[5, 6, 7, 8, 9, 10, 11]
|
||||
|
||||
For other bipartite graph generators see
|
||||
:mod:`Generators <networkx.algorithms.bipartite.generators>`.
|
||||
|
||||
"""
|
||||
|
||||
from networkx.algorithms.bipartite.basic import *
|
||||
from networkx.algorithms.bipartite.centrality import *
|
||||
from networkx.algorithms.bipartite.cluster import *
|
||||
from networkx.algorithms.bipartite.covering import *
|
||||
from networkx.algorithms.bipartite.edgelist import *
|
||||
from networkx.algorithms.bipartite.matching import *
|
||||
from networkx.algorithms.bipartite.matrix import *
|
||||
from networkx.algorithms.bipartite.projection import *
|
||||
from networkx.algorithms.bipartite.redundancy import *
|
||||
from networkx.algorithms.bipartite.spectral import *
|
||||
from networkx.algorithms.bipartite.generators import *
|
||||
from networkx.algorithms.bipartite.extendability import *
|
||||
+322
@@ -0,0 +1,322 @@
|
||||
"""
|
||||
==========================
|
||||
Bipartite Graph Algorithms
|
||||
==========================
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.components import connected_components
|
||||
from networkx.exception import AmbiguousSolution
|
||||
|
||||
__all__ = [
|
||||
"is_bipartite",
|
||||
"is_bipartite_node_set",
|
||||
"color",
|
||||
"sets",
|
||||
"density",
|
||||
"degrees",
|
||||
]
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def color(G):
|
||||
"""Returns a two-coloring of the graph.
|
||||
|
||||
Raises an exception if the graph is not bipartite.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
color : dictionary
|
||||
A dictionary keyed by node with a 1 or 0 as data for each node color.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If the graph is not two-colorable.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> c = bipartite.color(G)
|
||||
>>> print(c)
|
||||
{0: 1, 1: 0, 2: 1, 3: 0}
|
||||
|
||||
You can use this to set a node attribute indicating the bipartite set:
|
||||
|
||||
>>> nx.set_node_attributes(G, c, "bipartite")
|
||||
>>> print(G.nodes[0]["bipartite"])
|
||||
1
|
||||
>>> print(G.nodes[1]["bipartite"])
|
||||
0
|
||||
"""
|
||||
if G.is_directed():
|
||||
import itertools
|
||||
|
||||
def neighbors(v):
|
||||
return itertools.chain.from_iterable([G.predecessors(v), G.successors(v)])
|
||||
|
||||
else:
|
||||
neighbors = G.neighbors
|
||||
|
||||
color = {}
|
||||
for n in G: # handle disconnected graphs
|
||||
if n in color or len(G[n]) == 0: # skip isolates
|
||||
continue
|
||||
queue = [n]
|
||||
color[n] = 1 # nodes seen with color (1 or 0)
|
||||
while queue:
|
||||
v = queue.pop()
|
||||
c = 1 - color[v] # opposite color of node v
|
||||
for w in neighbors(v):
|
||||
if w in color:
|
||||
if color[w] == color[v]:
|
||||
raise nx.NetworkXError("Graph is not bipartite.")
|
||||
else:
|
||||
color[w] = c
|
||||
queue.append(w)
|
||||
# color isolates with 0
|
||||
color.update(dict.fromkeys(nx.isolates(G), 0))
|
||||
return color
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def is_bipartite(G):
|
||||
"""Returns True if graph G is bipartite, False if not.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> print(bipartite.is_bipartite(G))
|
||||
True
|
||||
|
||||
See Also
|
||||
--------
|
||||
color, is_bipartite_node_set
|
||||
"""
|
||||
try:
|
||||
color(G)
|
||||
return True
|
||||
except nx.NetworkXError:
|
||||
return False
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def is_bipartite_node_set(G, nodes):
|
||||
"""Returns True if nodes and G/nodes are a bipartition of G.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
nodes: list or container
|
||||
Check if nodes are a one of a bipartite set.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> X = set([1, 3])
|
||||
>>> bipartite.is_bipartite_node_set(G, X)
|
||||
True
|
||||
|
||||
Notes
|
||||
-----
|
||||
An exception is raised if the input nodes are not distinct, because in this
|
||||
case some bipartite algorithms will yield incorrect results.
|
||||
For connected graphs the bipartite sets are unique. This function handles
|
||||
disconnected graphs.
|
||||
"""
|
||||
S = set(nodes)
|
||||
|
||||
if len(S) < len(nodes):
|
||||
# this should maybe just return False?
|
||||
raise AmbiguousSolution(
|
||||
"The input node set contains duplicates.\n"
|
||||
"This may lead to incorrect results when using it in bipartite algorithms.\n"
|
||||
"Consider using set(nodes) as the input"
|
||||
)
|
||||
|
||||
for CC in (G.subgraph(c).copy() for c in connected_components(G)):
|
||||
X, Y = sets(CC)
|
||||
if not (
|
||||
(X.issubset(S) and Y.isdisjoint(S)) or (Y.issubset(S) and X.isdisjoint(S))
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def sets(G, top_nodes=None):
|
||||
"""Returns bipartite node sets of graph G.
|
||||
|
||||
Raises an exception if the graph is not bipartite or if the input
|
||||
graph is disconnected and thus more than one valid solution exists.
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
top_nodes : container, optional
|
||||
Container with all nodes in one bipartite node set. If not supplied
|
||||
it will be computed. But if more than one solution exists an exception
|
||||
will be raised.
|
||||
|
||||
Returns
|
||||
-------
|
||||
X : set
|
||||
Nodes from one side of the bipartite graph.
|
||||
Y : set
|
||||
Nodes from the other side.
|
||||
|
||||
Raises
|
||||
------
|
||||
AmbiguousSolution
|
||||
Raised if the input bipartite graph is disconnected and no container
|
||||
with all nodes in one bipartite set is provided. When determining
|
||||
the nodes in each bipartite set more than one valid solution is
|
||||
possible if the input graph is disconnected.
|
||||
NetworkXError
|
||||
Raised if the input graph is not bipartite.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> X, Y = bipartite.sets(G)
|
||||
>>> list(X)
|
||||
[0, 2]
|
||||
>>> list(Y)
|
||||
[1, 3]
|
||||
|
||||
See Also
|
||||
--------
|
||||
color
|
||||
|
||||
"""
|
||||
if G.is_directed():
|
||||
is_connected = nx.is_weakly_connected
|
||||
else:
|
||||
is_connected = nx.is_connected
|
||||
if top_nodes is not None:
|
||||
X = set(top_nodes)
|
||||
Y = set(G) - X
|
||||
else:
|
||||
if not is_connected(G):
|
||||
msg = "Disconnected graph: Ambiguous solution for bipartite sets."
|
||||
raise nx.AmbiguousSolution(msg)
|
||||
c = color(G)
|
||||
X = {n for n, is_top in c.items() if is_top}
|
||||
Y = {n for n, is_top in c.items() if not is_top}
|
||||
return (X, Y)
|
||||
|
||||
|
||||
@nx._dispatchable(graphs="B")
|
||||
def density(B, nodes):
|
||||
"""Returns density of bipartite graph B.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
B : NetworkX graph
|
||||
|
||||
nodes: list or container
|
||||
Nodes in one node set of the bipartite graph.
|
||||
|
||||
Returns
|
||||
-------
|
||||
d : float
|
||||
The bipartite density
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.complete_bipartite_graph(3, 2)
|
||||
>>> X = set([0, 1, 2])
|
||||
>>> bipartite.density(G, X)
|
||||
1.0
|
||||
>>> Y = set([3, 4])
|
||||
>>> bipartite.density(G, Y)
|
||||
1.0
|
||||
|
||||
Notes
|
||||
-----
|
||||
The container of nodes passed as argument must contain all nodes
|
||||
in one of the two bipartite node sets to avoid ambiguity in the
|
||||
case of disconnected graphs.
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
color
|
||||
"""
|
||||
n = len(B)
|
||||
m = nx.number_of_edges(B)
|
||||
nb = len(nodes)
|
||||
nt = n - nb
|
||||
if m == 0: # includes cases n==0 and n==1
|
||||
d = 0.0
|
||||
else:
|
||||
if B.is_directed():
|
||||
d = m / (2 * nb * nt)
|
||||
else:
|
||||
d = m / (nb * nt)
|
||||
return d
|
||||
|
||||
|
||||
@nx._dispatchable(graphs="B", edge_attrs="weight")
|
||||
def degrees(B, nodes, weight=None):
|
||||
"""Returns the degrees of the two node sets in the bipartite graph B.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
B : NetworkX graph
|
||||
|
||||
nodes: list or container
|
||||
Nodes in one node set of the bipartite graph.
|
||||
|
||||
weight : string or None, optional (default=None)
|
||||
The edge attribute that holds the numerical value used as a weight.
|
||||
If None, then each edge has weight 1.
|
||||
The degree is the sum of the edge weights adjacent to the node.
|
||||
|
||||
Returns
|
||||
-------
|
||||
(degX,degY) : tuple of dictionaries
|
||||
The degrees of the two bipartite sets as dictionaries keyed by node.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.complete_bipartite_graph(3, 2)
|
||||
>>> Y = set([3, 4])
|
||||
>>> degX, degY = bipartite.degrees(G, Y)
|
||||
>>> dict(degX)
|
||||
{0: 2, 1: 2, 2: 2}
|
||||
|
||||
Notes
|
||||
-----
|
||||
The container of nodes passed as argument must contain all nodes
|
||||
in one of the two bipartite node sets to avoid ambiguity in the
|
||||
case of disconnected graphs.
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
color, density
|
||||
"""
|
||||
bottom = set(nodes)
|
||||
top = set(B) - bottom
|
||||
return (B.degree(top, weight), B.degree(bottom, weight))
|
||||
+290
@@ -0,0 +1,290 @@
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["degree_centrality", "betweenness_centrality", "closeness_centrality"]
|
||||
|
||||
|
||||
@nx._dispatchable(name="bipartite_degree_centrality")
|
||||
def degree_centrality(G, nodes):
|
||||
r"""Compute the degree centrality for nodes in a bipartite network.
|
||||
|
||||
The degree centrality for a node `v` is the fraction of nodes
|
||||
connected to it.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A bipartite network
|
||||
|
||||
nodes : list or container
|
||||
Container with all nodes in one bipartite node set.
|
||||
|
||||
Returns
|
||||
-------
|
||||
centrality : dictionary
|
||||
Dictionary keyed by node with bipartite degree centrality as the value.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.wheel_graph(5)
|
||||
>>> top_nodes = {0, 1, 2}
|
||||
>>> nx.bipartite.degree_centrality(G, nodes=top_nodes)
|
||||
{0: 2.0, 1: 1.5, 2: 1.5, 3: 1.0, 4: 1.0}
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality
|
||||
closeness_centrality
|
||||
:func:`~networkx.algorithms.bipartite.basic.sets`
|
||||
:func:`~networkx.algorithms.bipartite.basic.is_bipartite`
|
||||
|
||||
Notes
|
||||
-----
|
||||
The nodes input parameter must contain all nodes in one bipartite node set,
|
||||
but the dictionary returned contains all nodes from both bipartite node
|
||||
sets. See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
For unipartite networks, the degree centrality values are
|
||||
normalized by dividing by the maximum possible degree (which is
|
||||
`n-1` where `n` is the number of nodes in G).
|
||||
|
||||
In the bipartite case, the maximum possible degree of a node in a
|
||||
bipartite node set is the number of nodes in the opposite node set
|
||||
[1]_. The degree centrality for a node `v` in the bipartite
|
||||
sets `U` with `n` nodes and `V` with `m` nodes is
|
||||
|
||||
.. math::
|
||||
|
||||
d_{v} = \frac{deg(v)}{m}, \mbox{for} v \in U ,
|
||||
|
||||
d_{v} = \frac{deg(v)}{n}, \mbox{for} v \in V ,
|
||||
|
||||
|
||||
where `deg(v)` is the degree of node `v`.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
|
||||
Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
||||
of Social Network Analysis. Sage Publications.
|
||||
https://dx.doi.org/10.4135/9781446294413.n28
|
||||
"""
|
||||
top = set(nodes)
|
||||
bottom = set(G) - top
|
||||
s = 1.0 / len(bottom)
|
||||
centrality = {n: d * s for n, d in G.degree(top)}
|
||||
s = 1.0 / len(top)
|
||||
centrality.update({n: d * s for n, d in G.degree(bottom)})
|
||||
return centrality
|
||||
|
||||
|
||||
@nx._dispatchable(name="bipartite_betweenness_centrality")
|
||||
def betweenness_centrality(G, nodes):
|
||||
r"""Compute betweenness centrality for nodes in a bipartite network.
|
||||
|
||||
Betweenness centrality of a node `v` is the sum of the
|
||||
fraction of all-pairs shortest paths that pass through `v`.
|
||||
|
||||
Values of betweenness are normalized by the maximum possible
|
||||
value which for bipartite graphs is limited by the relative size
|
||||
of the two node sets [1]_.
|
||||
|
||||
Let `n` be the number of nodes in the node set `U` and
|
||||
`m` be the number of nodes in the node set `V`, then
|
||||
nodes in `U` are normalized by dividing by
|
||||
|
||||
.. math::
|
||||
|
||||
\frac{1}{2} [m^2 (s + 1)^2 + m (s + 1)(2t - s - 1) - t (2s - t + 3)] ,
|
||||
|
||||
where
|
||||
|
||||
.. math::
|
||||
|
||||
s = (n - 1) \div m , t = (n - 1) \mod m ,
|
||||
|
||||
and nodes in `V` are normalized by dividing by
|
||||
|
||||
.. math::
|
||||
|
||||
\frac{1}{2} [n^2 (p + 1)^2 + n (p + 1)(2r - p - 1) - r (2p - r + 3)] ,
|
||||
|
||||
where,
|
||||
|
||||
.. math::
|
||||
|
||||
p = (m - 1) \div n , r = (m - 1) \mod n .
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A bipartite graph
|
||||
|
||||
nodes : list or container
|
||||
Container with all nodes in one bipartite node set.
|
||||
|
||||
Returns
|
||||
-------
|
||||
betweenness : dictionary
|
||||
Dictionary keyed by node with bipartite betweenness centrality
|
||||
as the value.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.cycle_graph(4)
|
||||
>>> top_nodes = {1, 2}
|
||||
>>> nx.bipartite.betweenness_centrality(G, nodes=top_nodes)
|
||||
{0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
|
||||
|
||||
See Also
|
||||
--------
|
||||
degree_centrality
|
||||
closeness_centrality
|
||||
:func:`~networkx.algorithms.bipartite.basic.sets`
|
||||
:func:`~networkx.algorithms.bipartite.basic.is_bipartite`
|
||||
|
||||
Notes
|
||||
-----
|
||||
The nodes input parameter must contain all nodes in one bipartite node set,
|
||||
but the dictionary returned contains all nodes from both node sets.
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
|
||||
Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
||||
of Social Network Analysis. Sage Publications.
|
||||
https://dx.doi.org/10.4135/9781446294413.n28
|
||||
"""
|
||||
top = set(nodes)
|
||||
bottom = set(G) - top
|
||||
n = len(top)
|
||||
m = len(bottom)
|
||||
s, t = divmod(n - 1, m)
|
||||
bet_max_top = (
|
||||
((m**2) * ((s + 1) ** 2))
|
||||
+ (m * (s + 1) * (2 * t - s - 1))
|
||||
- (t * ((2 * s) - t + 3))
|
||||
) / 2.0
|
||||
p, r = divmod(m - 1, n)
|
||||
bet_max_bot = (
|
||||
((n**2) * ((p + 1) ** 2))
|
||||
+ (n * (p + 1) * (2 * r - p - 1))
|
||||
- (r * ((2 * p) - r + 3))
|
||||
) / 2.0
|
||||
betweenness = nx.betweenness_centrality(G, normalized=False, weight=None)
|
||||
for node in top:
|
||||
betweenness[node] /= bet_max_top
|
||||
for node in bottom:
|
||||
betweenness[node] /= bet_max_bot
|
||||
return betweenness
|
||||
|
||||
|
||||
@nx._dispatchable(name="bipartite_closeness_centrality")
|
||||
def closeness_centrality(G, nodes, normalized=True):
|
||||
r"""Compute the closeness centrality for nodes in a bipartite network.
|
||||
|
||||
The closeness of a node is the distance to all other nodes in the
|
||||
graph or in the case that the graph is not connected to all other nodes
|
||||
in the connected component containing that node.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A bipartite network
|
||||
|
||||
nodes : list or container
|
||||
Container with all nodes in one bipartite node set.
|
||||
|
||||
normalized : bool, optional
|
||||
If True (default) normalize by connected component size.
|
||||
|
||||
Returns
|
||||
-------
|
||||
closeness : dictionary
|
||||
Dictionary keyed by node with bipartite closeness centrality
|
||||
as the value.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.wheel_graph(5)
|
||||
>>> top_nodes = {0, 1, 2}
|
||||
>>> nx.bipartite.closeness_centrality(G, nodes=top_nodes)
|
||||
{0: 1.5, 1: 1.2, 2: 1.2, 3: 1.0, 4: 1.0}
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality
|
||||
degree_centrality
|
||||
:func:`~networkx.algorithms.bipartite.basic.sets`
|
||||
:func:`~networkx.algorithms.bipartite.basic.is_bipartite`
|
||||
|
||||
Notes
|
||||
-----
|
||||
The nodes input parameter must contain all nodes in one bipartite node set,
|
||||
but the dictionary returned contains all nodes from both node sets.
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
|
||||
Closeness centrality is normalized by the minimum distance possible.
|
||||
In the bipartite case the minimum distance for a node in one bipartite
|
||||
node set is 1 from all nodes in the other node set and 2 from all
|
||||
other nodes in its own set [1]_. Thus the closeness centrality
|
||||
for node `v` in the two bipartite sets `U` with
|
||||
`n` nodes and `V` with `m` nodes is
|
||||
|
||||
.. math::
|
||||
|
||||
c_{v} = \frac{m + 2(n - 1)}{d}, \mbox{for} v \in U,
|
||||
|
||||
c_{v} = \frac{n + 2(m - 1)}{d}, \mbox{for} v \in V,
|
||||
|
||||
where `d` is the sum of the distances from `v` to all
|
||||
other nodes.
|
||||
|
||||
Higher values of closeness indicate higher centrality.
|
||||
|
||||
As in the unipartite case, setting normalized=True causes the
|
||||
values to normalized further to n-1 / size(G)-1 where n is the
|
||||
number of nodes in the connected part of graph containing the
|
||||
node. If the graph is not completely connected, this algorithm
|
||||
computes the closeness centrality for each connected part
|
||||
separately.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
|
||||
Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
||||
of Social Network Analysis. Sage Publications.
|
||||
https://dx.doi.org/10.4135/9781446294413.n28
|
||||
"""
|
||||
closeness = {}
|
||||
path_length = nx.single_source_shortest_path_length
|
||||
top = set(nodes)
|
||||
bottom = set(G) - top
|
||||
n = len(top)
|
||||
m = len(bottom)
|
||||
for node in top:
|
||||
sp = dict(path_length(G, node))
|
||||
totsp = sum(sp.values())
|
||||
if totsp > 0.0 and len(G) > 1:
|
||||
closeness[node] = (m + 2 * (n - 1)) / totsp
|
||||
if normalized:
|
||||
s = (len(sp) - 1) / (len(G) - 1)
|
||||
closeness[node] *= s
|
||||
else:
|
||||
closeness[node] = 0.0
|
||||
for node in bottom:
|
||||
sp = dict(path_length(G, node))
|
||||
totsp = sum(sp.values())
|
||||
if totsp > 0.0 and len(G) > 1:
|
||||
closeness[node] = (n + 2 * (m - 1)) / totsp
|
||||
if normalized:
|
||||
s = (len(sp) - 1) / (len(G) - 1)
|
||||
closeness[node] *= s
|
||||
else:
|
||||
closeness[node] = 0.0
|
||||
return closeness
|
||||
+278
@@ -0,0 +1,278 @@
|
||||
"""Functions for computing clustering of pairs"""
|
||||
|
||||
import itertools
|
||||
|
||||
import networkx as nx
|
||||
|
||||
__all__ = [
|
||||
"clustering",
|
||||
"average_clustering",
|
||||
"latapy_clustering",
|
||||
"robins_alexander_clustering",
|
||||
]
|
||||
|
||||
|
||||
def cc_dot(nu, nv):
|
||||
return len(nu & nv) / len(nu | nv)
|
||||
|
||||
|
||||
def cc_max(nu, nv):
|
||||
return len(nu & nv) / max(len(nu), len(nv))
|
||||
|
||||
|
||||
def cc_min(nu, nv):
|
||||
return len(nu & nv) / min(len(nu), len(nv))
|
||||
|
||||
|
||||
modes = {"dot": cc_dot, "min": cc_min, "max": cc_max}
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def latapy_clustering(G, nodes=None, mode="dot"):
|
||||
r"""Compute a bipartite clustering coefficient for nodes.
|
||||
|
||||
The bipartite clustering coefficient is a measure of local density
|
||||
of connections defined as [1]_:
|
||||
|
||||
.. math::
|
||||
|
||||
c_u = \frac{\sum_{v \in N(N(u))} c_{uv} }{|N(N(u))|}
|
||||
|
||||
where `N(N(u))` are the second order neighbors of `u` in `G` excluding `u`,
|
||||
and `c_{uv}` is the pairwise clustering coefficient between nodes
|
||||
`u` and `v`.
|
||||
|
||||
The mode selects the function for `c_{uv}` which can be:
|
||||
|
||||
`dot`:
|
||||
|
||||
.. math::
|
||||
|
||||
c_{uv}=\frac{|N(u)\cap N(v)|}{|N(u) \cup N(v)|}
|
||||
|
||||
`min`:
|
||||
|
||||
.. math::
|
||||
|
||||
c_{uv}=\frac{|N(u)\cap N(v)|}{min(|N(u)|,|N(v)|)}
|
||||
|
||||
`max`:
|
||||
|
||||
.. math::
|
||||
|
||||
c_{uv}=\frac{|N(u)\cap N(v)|}{max(|N(u)|,|N(v)|)}
|
||||
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A bipartite graph
|
||||
|
||||
nodes : list or iterable (optional)
|
||||
Compute bipartite clustering for these nodes. The default
|
||||
is all nodes in G.
|
||||
|
||||
mode : string
|
||||
The pairwise bipartite clustering method to be used in the computation.
|
||||
It must be "dot", "max", or "min".
|
||||
|
||||
Returns
|
||||
-------
|
||||
clustering : dictionary
|
||||
A dictionary keyed by node with the clustering coefficient value.
|
||||
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4) # path graphs are bipartite
|
||||
>>> c = bipartite.clustering(G)
|
||||
>>> c[0]
|
||||
0.5
|
||||
>>> c = bipartite.clustering(G, mode="min")
|
||||
>>> c[0]
|
||||
1.0
|
||||
|
||||
See Also
|
||||
--------
|
||||
robins_alexander_clustering
|
||||
average_clustering
|
||||
networkx.algorithms.cluster.square_clustering
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
|
||||
Basic notions for the analysis of large two-mode networks.
|
||||
Social Networks 30(1), 31--48.
|
||||
"""
|
||||
if not nx.algorithms.bipartite.is_bipartite(G):
|
||||
raise nx.NetworkXError("Graph is not bipartite")
|
||||
|
||||
try:
|
||||
cc_func = modes[mode]
|
||||
except KeyError as err:
|
||||
raise nx.NetworkXError(
|
||||
"Mode for bipartite clustering must be: dot, min or max"
|
||||
) from err
|
||||
|
||||
if nodes is None:
|
||||
nodes = G
|
||||
ccs = {}
|
||||
for v in nodes:
|
||||
cc = 0.0
|
||||
nbrs2 = {u for nbr in G[v] for u in G[nbr]} - {v}
|
||||
for u in nbrs2:
|
||||
cc += cc_func(set(G[u]), set(G[v]))
|
||||
if cc > 0.0: # len(nbrs2)>0
|
||||
cc /= len(nbrs2)
|
||||
ccs[v] = cc
|
||||
return ccs
|
||||
|
||||
|
||||
clustering = latapy_clustering
|
||||
|
||||
|
||||
@nx._dispatchable(name="bipartite_average_clustering")
|
||||
def average_clustering(G, nodes=None, mode="dot"):
|
||||
r"""Compute the average bipartite clustering coefficient.
|
||||
|
||||
A clustering coefficient for the whole graph is the average,
|
||||
|
||||
.. math::
|
||||
|
||||
C = \frac{1}{n}\sum_{v \in G} c_v,
|
||||
|
||||
where `n` is the number of nodes in `G`.
|
||||
|
||||
Similar measures for the two bipartite sets can be defined [1]_
|
||||
|
||||
.. math::
|
||||
|
||||
C_X = \frac{1}{|X|}\sum_{v \in X} c_v,
|
||||
|
||||
where `X` is a bipartite set of `G`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
a bipartite graph
|
||||
|
||||
nodes : list or iterable, optional
|
||||
A container of nodes to use in computing the average.
|
||||
The nodes should be either the entire graph (the default) or one of the
|
||||
bipartite sets.
|
||||
|
||||
mode : string
|
||||
The pairwise bipartite clustering method.
|
||||
It must be "dot", "max", or "min"
|
||||
|
||||
Returns
|
||||
-------
|
||||
clustering : float
|
||||
The average bipartite clustering for the given set of nodes or the
|
||||
entire graph if no nodes are specified.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.star_graph(3) # star graphs are bipartite
|
||||
>>> bipartite.average_clustering(G)
|
||||
0.75
|
||||
>>> X, Y = bipartite.sets(G)
|
||||
>>> bipartite.average_clustering(G, X)
|
||||
0.0
|
||||
>>> bipartite.average_clustering(G, Y)
|
||||
1.0
|
||||
|
||||
See Also
|
||||
--------
|
||||
clustering
|
||||
|
||||
Notes
|
||||
-----
|
||||
The container of nodes passed to this function must contain all of the nodes
|
||||
in one of the bipartite sets ("top" or "bottom") in order to compute
|
||||
the correct average bipartite clustering coefficients.
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
|
||||
Basic notions for the analysis of large two-mode networks.
|
||||
Social Networks 30(1), 31--48.
|
||||
"""
|
||||
if nodes is None:
|
||||
nodes = G
|
||||
ccs = latapy_clustering(G, nodes=nodes, mode=mode)
|
||||
return sum(ccs[v] for v in nodes) / len(nodes)
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def robins_alexander_clustering(G):
|
||||
r"""Compute the bipartite clustering of G.
|
||||
|
||||
Robins and Alexander [1]_ defined bipartite clustering coefficient as
|
||||
four times the number of four cycles `C_4` divided by the number of
|
||||
three paths `L_3` in a bipartite graph:
|
||||
|
||||
.. math::
|
||||
|
||||
CC_4 = \frac{4 * C_4}{L_3}
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
a bipartite graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
clustering : float
|
||||
The Robins and Alexander bipartite clustering for the input graph.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.davis_southern_women_graph()
|
||||
>>> print(round(bipartite.robins_alexander_clustering(G), 3))
|
||||
0.468
|
||||
|
||||
See Also
|
||||
--------
|
||||
latapy_clustering
|
||||
networkx.algorithms.cluster.square_clustering
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Robins, G. and M. Alexander (2004). Small worlds among interlocking
|
||||
directors: Network structure and distance in bipartite graphs.
|
||||
Computational & Mathematical Organization Theory 10(1), 69–94.
|
||||
|
||||
"""
|
||||
if G.order() < 4 or G.size() < 3:
|
||||
return 0
|
||||
L_3 = _threepaths(G)
|
||||
if L_3 == 0:
|
||||
return 0
|
||||
C_4 = _four_cycles(G)
|
||||
return (4.0 * C_4) / L_3
|
||||
|
||||
|
||||
def _four_cycles(G):
|
||||
cycles = 0
|
||||
for v in G:
|
||||
for u, w in itertools.combinations(G[v], 2):
|
||||
cycles += len((set(G[u]) & set(G[w])) - {v})
|
||||
return cycles / 4
|
||||
|
||||
|
||||
def _threepaths(G):
|
||||
paths = 0
|
||||
for v in G:
|
||||
for u in G[v]:
|
||||
for w in set(G[u]) - {v}:
|
||||
paths += len(set(G[w]) - {v, u})
|
||||
# Divide by two because we count each three path twice
|
||||
# one for each possible starting point
|
||||
return paths / 2
|
||||
+57
@@ -0,0 +1,57 @@
|
||||
"""Functions related to graph covers."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.bipartite.matching import hopcroft_karp_matching
|
||||
from networkx.algorithms.covering import min_edge_cover as _min_edge_cover
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
__all__ = ["min_edge_cover"]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable(name="bipartite_min_edge_cover")
|
||||
def min_edge_cover(G, matching_algorithm=None):
|
||||
"""Returns a set of edges which constitutes
|
||||
the minimum edge cover of the graph.
|
||||
|
||||
The smallest edge cover can be found in polynomial time by finding
|
||||
a maximum matching and extending it greedily so that all nodes
|
||||
are covered.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
An undirected bipartite graph.
|
||||
|
||||
matching_algorithm : function
|
||||
A function that returns a maximum cardinality matching in a
|
||||
given bipartite graph. The function must take one input, the
|
||||
graph ``G``, and return a dictionary mapping each node to its
|
||||
mate. If not specified,
|
||||
:func:`~networkx.algorithms.bipartite.matching.hopcroft_karp_matching`
|
||||
will be used. Other possibilities include
|
||||
:func:`~networkx.algorithms.bipartite.matching.eppstein_matching`,
|
||||
|
||||
Returns
|
||||
-------
|
||||
set
|
||||
A set of the edges in a minimum edge cover of the graph, given as
|
||||
pairs of nodes. It contains both the edges `(u, v)` and `(v, u)`
|
||||
for given nodes `u` and `v` among the edges of minimum edge cover.
|
||||
|
||||
Notes
|
||||
-----
|
||||
An edge cover of a graph is a set of edges such that every node of
|
||||
the graph is incident to at least one edge of the set.
|
||||
A minimum edge cover is an edge covering of smallest cardinality.
|
||||
|
||||
Due to its implementation, the worst-case running time of this algorithm
|
||||
is bounded by the worst-case running time of the function
|
||||
``matching_algorithm``.
|
||||
"""
|
||||
if G.order() == 0: # Special case for the empty graph
|
||||
return set()
|
||||
if matching_algorithm is None:
|
||||
matching_algorithm = hopcroft_karp_matching
|
||||
return _min_edge_cover(G, matching_algorithm=matching_algorithm)
|
||||
+360
@@ -0,0 +1,360 @@
|
||||
"""
|
||||
********************
|
||||
Bipartite Edge Lists
|
||||
********************
|
||||
Read and write NetworkX graphs as bipartite edge lists.
|
||||
|
||||
Format
|
||||
------
|
||||
You can read or write three formats of edge lists with these functions.
|
||||
|
||||
Node pairs with no data::
|
||||
|
||||
1 2
|
||||
|
||||
Python dictionary as data::
|
||||
|
||||
1 2 {'weight':7, 'color':'green'}
|
||||
|
||||
Arbitrary data::
|
||||
|
||||
1 2 7 green
|
||||
|
||||
For each edge (u, v) the node u is assigned to part 0 and the node v to part 1.
|
||||
"""
|
||||
|
||||
__all__ = ["generate_edgelist", "write_edgelist", "parse_edgelist", "read_edgelist"]
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import not_implemented_for, open_file
|
||||
|
||||
|
||||
@open_file(1, mode="wb")
|
||||
def write_edgelist(G, path, comments="#", delimiter=" ", data=True, encoding="utf-8"):
|
||||
"""Write a bipartite graph as a list of edges.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : Graph
|
||||
A NetworkX bipartite graph
|
||||
path : file or string
|
||||
File or filename to write. If a file is provided, it must be
|
||||
opened in 'wb' mode. Filenames ending in .gz or .bz2 will be compressed.
|
||||
comments : string, optional
|
||||
The character used to indicate the start of a comment
|
||||
delimiter : string, optional
|
||||
The string used to separate values. The default is whitespace.
|
||||
data : bool or list, optional
|
||||
If False write no edge data.
|
||||
If True write a string representation of the edge data dictionary..
|
||||
If a list (or other iterable) is provided, write the keys specified
|
||||
in the list.
|
||||
encoding: string, optional
|
||||
Specify which encoding to use when writing file.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> G.add_nodes_from([0, 2], bipartite=0)
|
||||
>>> G.add_nodes_from([1, 3], bipartite=1)
|
||||
>>> nx.write_edgelist(G, "test.edgelist")
|
||||
>>> fh = open("test.edgelist", "wb")
|
||||
>>> nx.write_edgelist(G, fh)
|
||||
>>> nx.write_edgelist(G, "test.edgelist.gz")
|
||||
>>> nx.write_edgelist(G, "test.edgelist.gz", data=False)
|
||||
|
||||
>>> G = nx.Graph()
|
||||
>>> G.add_edge(1, 2, weight=7, color="red")
|
||||
>>> nx.write_edgelist(G, "test.edgelist", data=False)
|
||||
>>> nx.write_edgelist(G, "test.edgelist", data=["color"])
|
||||
>>> nx.write_edgelist(G, "test.edgelist", data=["color", "weight"])
|
||||
|
||||
See Also
|
||||
--------
|
||||
write_edgelist
|
||||
generate_edgelist
|
||||
"""
|
||||
for line in generate_edgelist(G, delimiter, data):
|
||||
line += "\n"
|
||||
path.write(line.encode(encoding))
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
def generate_edgelist(G, delimiter=" ", data=True):
|
||||
"""Generate a single line of the bipartite graph G in edge list format.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
The graph is assumed to have node attribute `part` set to 0,1 representing
|
||||
the two graph parts
|
||||
|
||||
delimiter : string, optional
|
||||
Separator for node labels
|
||||
|
||||
data : bool or list of keys
|
||||
If False generate no edge data. If True use a dictionary
|
||||
representation of edge data. If a list of keys use a list of data
|
||||
values corresponding to the keys.
|
||||
|
||||
Returns
|
||||
-------
|
||||
lines : string
|
||||
Lines of data in adjlist format.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> G.add_nodes_from([0, 2], bipartite=0)
|
||||
>>> G.add_nodes_from([1, 3], bipartite=1)
|
||||
>>> G[1][2]["weight"] = 3
|
||||
>>> G[2][3]["capacity"] = 12
|
||||
>>> for line in bipartite.generate_edgelist(G, data=False):
|
||||
... print(line)
|
||||
0 1
|
||||
2 1
|
||||
2 3
|
||||
|
||||
>>> for line in bipartite.generate_edgelist(G):
|
||||
... print(line)
|
||||
0 1 {}
|
||||
2 1 {'weight': 3}
|
||||
2 3 {'capacity': 12}
|
||||
|
||||
>>> for line in bipartite.generate_edgelist(G, data=["weight"]):
|
||||
... print(line)
|
||||
0 1
|
||||
2 1 3
|
||||
2 3
|
||||
"""
|
||||
try:
|
||||
part0 = [n for n, d in G.nodes.items() if d["bipartite"] == 0]
|
||||
except BaseException as err:
|
||||
raise AttributeError("Missing node attribute `bipartite`") from err
|
||||
if data is True or data is False:
|
||||
for n in part0:
|
||||
for edge in G.edges(n, data=data):
|
||||
yield delimiter.join(map(str, edge))
|
||||
else:
|
||||
for n in part0:
|
||||
for u, v, d in G.edges(n, data=True):
|
||||
edge = [u, v]
|
||||
try:
|
||||
edge.extend(d[k] for k in data)
|
||||
except KeyError:
|
||||
pass # missing data for this edge, should warn?
|
||||
yield delimiter.join(map(str, edge))
|
||||
|
||||
|
||||
@nx._dispatchable(name="bipartite_parse_edgelist", graphs=None, returns_graph=True)
|
||||
def parse_edgelist(
|
||||
lines, comments="#", delimiter=None, create_using=None, nodetype=None, data=True
|
||||
):
|
||||
"""Parse lines of an edge list representation of a bipartite graph.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
lines : list or iterator of strings
|
||||
Input data in edgelist format
|
||||
comments : string, optional
|
||||
Marker for comment lines
|
||||
delimiter : string, optional
|
||||
Separator for node labels
|
||||
create_using: NetworkX graph container, optional
|
||||
Use given NetworkX graph for holding nodes or edges.
|
||||
nodetype : Python type, optional
|
||||
Convert nodes to this type.
|
||||
data : bool or list of (label,type) tuples
|
||||
If False generate no edge data or if True use a dictionary
|
||||
representation of edge data or a list tuples specifying dictionary
|
||||
key names and types for edge data.
|
||||
|
||||
Returns
|
||||
-------
|
||||
G: NetworkX Graph
|
||||
The bipartite graph corresponding to lines
|
||||
|
||||
Examples
|
||||
--------
|
||||
Edgelist with no data:
|
||||
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> lines = ["1 2", "2 3", "3 4"]
|
||||
>>> G = bipartite.parse_edgelist(lines, nodetype=int)
|
||||
>>> sorted(G.nodes())
|
||||
[1, 2, 3, 4]
|
||||
>>> sorted(G.nodes(data=True))
|
||||
[(1, {'bipartite': 0}), (2, {'bipartite': 0}), (3, {'bipartite': 0}), (4, {'bipartite': 1})]
|
||||
>>> sorted(G.edges())
|
||||
[(1, 2), (2, 3), (3, 4)]
|
||||
|
||||
Edgelist with data in Python dictionary representation:
|
||||
|
||||
>>> lines = ["1 2 {'weight':3}", "2 3 {'weight':27}", "3 4 {'weight':3.0}"]
|
||||
>>> G = bipartite.parse_edgelist(lines, nodetype=int)
|
||||
>>> sorted(G.nodes())
|
||||
[1, 2, 3, 4]
|
||||
>>> sorted(G.edges(data=True))
|
||||
[(1, 2, {'weight': 3}), (2, 3, {'weight': 27}), (3, 4, {'weight': 3.0})]
|
||||
|
||||
Edgelist with data in a list:
|
||||
|
||||
>>> lines = ["1 2 3", "2 3 27", "3 4 3.0"]
|
||||
>>> G = bipartite.parse_edgelist(lines, nodetype=int, data=(("weight", float),))
|
||||
>>> sorted(G.nodes())
|
||||
[1, 2, 3, 4]
|
||||
>>> sorted(G.edges(data=True))
|
||||
[(1, 2, {'weight': 3.0}), (2, 3, {'weight': 27.0}), (3, 4, {'weight': 3.0})]
|
||||
|
||||
See Also
|
||||
--------
|
||||
"""
|
||||
from ast import literal_eval
|
||||
|
||||
G = nx.empty_graph(0, create_using)
|
||||
for line in lines:
|
||||
p = line.find(comments)
|
||||
if p >= 0:
|
||||
line = line[:p]
|
||||
if not len(line):
|
||||
continue
|
||||
# split line, should have 2 or more
|
||||
s = line.rstrip("\n").split(delimiter)
|
||||
if len(s) < 2:
|
||||
continue
|
||||
u = s.pop(0)
|
||||
v = s.pop(0)
|
||||
d = s
|
||||
if nodetype is not None:
|
||||
try:
|
||||
u = nodetype(u)
|
||||
v = nodetype(v)
|
||||
except BaseException as err:
|
||||
raise TypeError(
|
||||
f"Failed to convert nodes {u},{v} to type {nodetype}."
|
||||
) from err
|
||||
|
||||
if len(d) == 0 or data is False:
|
||||
# no data or data type specified
|
||||
edgedata = {}
|
||||
elif data is True:
|
||||
# no edge types specified
|
||||
try: # try to evaluate as dictionary
|
||||
edgedata = dict(literal_eval(" ".join(d)))
|
||||
except BaseException as err:
|
||||
raise TypeError(
|
||||
f"Failed to convert edge data ({d}) to dictionary."
|
||||
) from err
|
||||
else:
|
||||
# convert edge data to dictionary with specified keys and type
|
||||
if len(d) != len(data):
|
||||
raise IndexError(
|
||||
f"Edge data {d} and data_keys {data} are not the same length"
|
||||
)
|
||||
edgedata = {}
|
||||
for (edge_key, edge_type), edge_value in zip(data, d):
|
||||
try:
|
||||
edge_value = edge_type(edge_value)
|
||||
except BaseException as err:
|
||||
raise TypeError(
|
||||
f"Failed to convert {edge_key} data "
|
||||
f"{edge_value} to type {edge_type}."
|
||||
) from err
|
||||
edgedata.update({edge_key: edge_value})
|
||||
G.add_node(u, bipartite=0)
|
||||
G.add_node(v, bipartite=1)
|
||||
G.add_edge(u, v, **edgedata)
|
||||
return G
|
||||
|
||||
|
||||
@open_file(0, mode="rb")
|
||||
@nx._dispatchable(name="bipartite_read_edgelist", graphs=None, returns_graph=True)
|
||||
def read_edgelist(
|
||||
path,
|
||||
comments="#",
|
||||
delimiter=None,
|
||||
create_using=None,
|
||||
nodetype=None,
|
||||
data=True,
|
||||
edgetype=None,
|
||||
encoding="utf-8",
|
||||
):
|
||||
"""Read a bipartite graph from a list of edges.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
path : file or string
|
||||
File or filename to read. If a file is provided, it must be
|
||||
opened in 'rb' mode.
|
||||
Filenames ending in .gz or .bz2 will be uncompressed.
|
||||
comments : string, optional
|
||||
The character used to indicate the start of a comment.
|
||||
delimiter : string, optional
|
||||
The string used to separate values. The default is whitespace.
|
||||
create_using : Graph container, optional,
|
||||
Use specified container to build graph. The default is networkx.Graph,
|
||||
an undirected graph.
|
||||
nodetype : int, float, str, Python type, optional
|
||||
Convert node data from strings to specified type
|
||||
data : bool or list of (label,type) tuples
|
||||
Tuples specifying dictionary key names and types for edge data
|
||||
edgetype : int, float, str, Python type, optional OBSOLETE
|
||||
Convert edge data from strings to specified type and use as 'weight'
|
||||
encoding: string, optional
|
||||
Specify which encoding to use when reading file.
|
||||
|
||||
Returns
|
||||
-------
|
||||
G : graph
|
||||
A networkx Graph or other type specified with create_using
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> G.add_nodes_from([0, 2], bipartite=0)
|
||||
>>> G.add_nodes_from([1, 3], bipartite=1)
|
||||
>>> bipartite.write_edgelist(G, "test.edgelist")
|
||||
>>> G = bipartite.read_edgelist("test.edgelist")
|
||||
|
||||
>>> fh = open("test.edgelist", "rb")
|
||||
>>> G = bipartite.read_edgelist(fh)
|
||||
>>> fh.close()
|
||||
|
||||
>>> G = bipartite.read_edgelist("test.edgelist", nodetype=int)
|
||||
|
||||
Edgelist with data in a list:
|
||||
|
||||
>>> textline = "1 2 3"
|
||||
>>> fh = open("test.edgelist", "w")
|
||||
>>> d = fh.write(textline)
|
||||
>>> fh.close()
|
||||
>>> G = bipartite.read_edgelist(
|
||||
... "test.edgelist", nodetype=int, data=(("weight", float),)
|
||||
... )
|
||||
>>> list(G)
|
||||
[1, 2]
|
||||
>>> list(G.edges(data=True))
|
||||
[(1, 2, {'weight': 3.0})]
|
||||
|
||||
See parse_edgelist() for more examples of formatting.
|
||||
|
||||
See Also
|
||||
--------
|
||||
parse_edgelist
|
||||
|
||||
Notes
|
||||
-----
|
||||
Since nodes must be hashable, the function nodetype must return hashable
|
||||
types (e.g. int, float, str, frozenset - or tuples of those, etc.)
|
||||
"""
|
||||
lines = (line.decode(encoding) for line in path)
|
||||
return parse_edgelist(
|
||||
lines,
|
||||
comments=comments,
|
||||
delimiter=delimiter,
|
||||
create_using=create_using,
|
||||
nodetype=nodetype,
|
||||
data=data,
|
||||
)
|
||||
+105
@@ -0,0 +1,105 @@
|
||||
"""Provides a function for computing the extendability of a graph which is
|
||||
undirected, simple, connected and bipartite and contains at least one perfect matching."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
__all__ = ["maximal_extendability"]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def maximal_extendability(G):
|
||||
"""Computes the extendability of a graph.
|
||||
|
||||
The extendability of a graph is defined as the maximum $k$ for which `G`
|
||||
is $k$-extendable. Graph `G` is $k$-extendable if and only if `G` has a
|
||||
perfect matching and every set of $k$ independent edges can be extended
|
||||
to a perfect matching in `G`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX Graph
|
||||
A fully-connected bipartite graph without self-loops
|
||||
|
||||
Returns
|
||||
-------
|
||||
extendability : int
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If the graph `G` is disconnected.
|
||||
If the graph `G` is not bipartite.
|
||||
If the graph `G` does not contain a perfect matching.
|
||||
If the residual graph of `G` is not strongly connected.
|
||||
|
||||
Notes
|
||||
-----
|
||||
Definition:
|
||||
Let `G` be a simple, connected, undirected and bipartite graph with a perfect
|
||||
matching M and bipartition (U,V). The residual graph of `G`, denoted by $G_M$,
|
||||
is the graph obtained from G by directing the edges of M from V to U and the
|
||||
edges that do not belong to M from U to V.
|
||||
|
||||
Lemma [1]_ :
|
||||
Let M be a perfect matching of `G`. `G` is $k$-extendable if and only if its residual
|
||||
graph $G_M$ is strongly connected and there are $k$ vertex-disjoint directed
|
||||
paths between every vertex of U and every vertex of V.
|
||||
|
||||
Assuming that input graph `G` is undirected, simple, connected, bipartite and contains
|
||||
a perfect matching M, this function constructs the residual graph $G_M$ of G and
|
||||
returns the minimum value among the maximum vertex-disjoint directed paths between
|
||||
every vertex of U and every vertex of V in $G_M$. By combining the definitions
|
||||
and the lemma, this value represents the extendability of the graph `G`.
|
||||
|
||||
Time complexity O($n^3$ $m^2$)) where $n$ is the number of vertices
|
||||
and $m$ is the number of edges.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] "A polynomial algorithm for the extendability problem in bipartite graphs",
|
||||
J. Lakhal, L. Litzler, Information Processing Letters, 1998.
|
||||
.. [2] "On n-extendible graphs", M. D. Plummer, Discrete Mathematics, 31:201–210, 1980
|
||||
https://doi.org/10.1016/0012-365X(80)90037-0
|
||||
|
||||
"""
|
||||
if not nx.is_connected(G):
|
||||
raise nx.NetworkXError("Graph G is not connected")
|
||||
|
||||
if not nx.bipartite.is_bipartite(G):
|
||||
raise nx.NetworkXError("Graph G is not bipartite")
|
||||
|
||||
U, V = nx.bipartite.sets(G)
|
||||
|
||||
maximum_matching = nx.bipartite.hopcroft_karp_matching(G)
|
||||
|
||||
if not nx.is_perfect_matching(G, maximum_matching):
|
||||
raise nx.NetworkXError("Graph G does not contain a perfect matching")
|
||||
|
||||
# list of edges in perfect matching, directed from V to U
|
||||
pm = [(node, maximum_matching[node]) for node in V & maximum_matching.keys()]
|
||||
|
||||
# Direct all the edges of G, from V to U if in matching, else from U to V
|
||||
directed_edges = [
|
||||
(x, y) if (x in V and (x, y) in pm) or (x in U and (y, x) not in pm) else (y, x)
|
||||
for x, y in G.edges
|
||||
]
|
||||
|
||||
# Construct the residual graph of G
|
||||
residual_G = nx.DiGraph()
|
||||
residual_G.add_nodes_from(G)
|
||||
residual_G.add_edges_from(directed_edges)
|
||||
|
||||
if not nx.is_strongly_connected(residual_G):
|
||||
raise nx.NetworkXError("The residual graph of G is not strongly connected")
|
||||
|
||||
# For node-pairs between V & U, keep min of max number of node-disjoint paths
|
||||
# Variable $k$ stands for the extendability of graph G
|
||||
k = float("inf")
|
||||
for u in U:
|
||||
for v in V:
|
||||
num_paths = sum(1 for _ in nx.node_disjoint_paths(residual_G, u, v))
|
||||
k = k if k < num_paths else num_paths
|
||||
return k
|
||||
+604
@@ -0,0 +1,604 @@
|
||||
"""
|
||||
Generators and functions for bipartite graphs.
|
||||
"""
|
||||
|
||||
import math
|
||||
import numbers
|
||||
from functools import reduce
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import nodes_or_number, py_random_state
|
||||
|
||||
__all__ = [
|
||||
"configuration_model",
|
||||
"havel_hakimi_graph",
|
||||
"reverse_havel_hakimi_graph",
|
||||
"alternating_havel_hakimi_graph",
|
||||
"preferential_attachment_graph",
|
||||
"random_graph",
|
||||
"gnmk_random_graph",
|
||||
"complete_bipartite_graph",
|
||||
]
|
||||
|
||||
|
||||
@nx._dispatchable(graphs=None, returns_graph=True)
|
||||
@nodes_or_number([0, 1])
|
||||
def complete_bipartite_graph(n1, n2, create_using=None):
|
||||
"""Returns the complete bipartite graph `K_{n_1,n_2}`.
|
||||
|
||||
The graph is composed of two partitions with nodes 0 to (n1 - 1)
|
||||
in the first and nodes n1 to (n1 + n2 - 1) in the second.
|
||||
Each node in the first is connected to each node in the second.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
n1, n2 : integer or iterable container of nodes
|
||||
If integers, nodes are from `range(n1)` and `range(n1, n1 + n2)`.
|
||||
If a container, the elements are the nodes.
|
||||
create_using : NetworkX graph instance, (default: nx.Graph)
|
||||
Return graph of this type.
|
||||
|
||||
Notes
|
||||
-----
|
||||
Nodes are the integers 0 to `n1 + n2 - 1` unless either n1 or n2 are
|
||||
containers of nodes. If only one of n1 or n2 are integers, that
|
||||
integer is replaced by `range` of that integer.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.complete_bipartite_graph
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using)
|
||||
if G.is_directed():
|
||||
raise nx.NetworkXError("Directed Graph not supported")
|
||||
|
||||
n1, top = n1
|
||||
n2, bottom = n2
|
||||
if isinstance(n1, numbers.Integral) and isinstance(n2, numbers.Integral):
|
||||
bottom = [n1 + i for i in bottom]
|
||||
G.add_nodes_from(top, bipartite=0)
|
||||
G.add_nodes_from(bottom, bipartite=1)
|
||||
if len(G) != len(top) + len(bottom):
|
||||
raise nx.NetworkXError("Inputs n1 and n2 must contain distinct nodes")
|
||||
G.add_edges_from((u, v) for u in top for v in bottom)
|
||||
G.graph["name"] = f"complete_bipartite_graph({len(top)}, {len(bottom)})"
|
||||
return G
|
||||
|
||||
|
||||
@py_random_state(3)
|
||||
@nx._dispatchable(name="bipartite_configuration_model", graphs=None, returns_graph=True)
|
||||
def configuration_model(aseq, bseq, create_using=None, seed=None):
|
||||
"""Returns a random bipartite graph from two given degree sequences.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
aseq : list
|
||||
Degree sequence for node set A.
|
||||
bseq : list
|
||||
Degree sequence for node set B.
|
||||
create_using : NetworkX graph instance, optional
|
||||
Return graph of this type.
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
|
||||
The graph is composed of two partitions. Set A has nodes 0 to
|
||||
(len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
|
||||
Nodes from set A are connected to nodes in set B by choosing
|
||||
randomly from the possible free stubs, one in A and one in B.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
|
||||
If no graph type is specified use MultiGraph with parallel edges.
|
||||
If you want a graph with no parallel edges use create_using=Graph()
|
||||
but then the resulting degree sequences might not be exact.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.configuration_model
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
|
||||
if G.is_directed():
|
||||
raise nx.NetworkXError("Directed Graph not supported")
|
||||
|
||||
# length and sum of each sequence
|
||||
lena = len(aseq)
|
||||
lenb = len(bseq)
|
||||
suma = sum(aseq)
|
||||
sumb = sum(bseq)
|
||||
|
||||
if not suma == sumb:
|
||||
raise nx.NetworkXError(
|
||||
f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
|
||||
)
|
||||
|
||||
G = _add_nodes_with_bipartite_label(G, lena, lenb)
|
||||
|
||||
if len(aseq) == 0 or max(aseq) == 0:
|
||||
return G # done if no edges
|
||||
|
||||
# build lists of degree-repeated vertex numbers
|
||||
stubs = [[v] * aseq[v] for v in range(lena)]
|
||||
astubs = [x for subseq in stubs for x in subseq]
|
||||
|
||||
stubs = [[v] * bseq[v - lena] for v in range(lena, lena + lenb)]
|
||||
bstubs = [x for subseq in stubs for x in subseq]
|
||||
|
||||
# shuffle lists
|
||||
seed.shuffle(astubs)
|
||||
seed.shuffle(bstubs)
|
||||
|
||||
G.add_edges_from([astubs[i], bstubs[i]] for i in range(suma))
|
||||
|
||||
G.name = "bipartite_configuration_model"
|
||||
return G
|
||||
|
||||
|
||||
@nx._dispatchable(name="bipartite_havel_hakimi_graph", graphs=None, returns_graph=True)
|
||||
def havel_hakimi_graph(aseq, bseq, create_using=None):
|
||||
"""Returns a bipartite graph from two given degree sequences using a
|
||||
Havel-Hakimi style construction.
|
||||
|
||||
The graph is composed of two partitions. Set A has nodes 0 to
|
||||
(len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
|
||||
Nodes from the set A are connected to nodes in the set B by
|
||||
connecting the highest degree nodes in set A to the highest degree
|
||||
nodes in set B until all stubs are connected.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
aseq : list
|
||||
Degree sequence for node set A.
|
||||
bseq : list
|
||||
Degree sequence for node set B.
|
||||
create_using : NetworkX graph instance, optional
|
||||
Return graph of this type.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
|
||||
If no graph type is specified use MultiGraph with parallel edges.
|
||||
If you want a graph with no parallel edges use create_using=Graph()
|
||||
but then the resulting degree sequences might not be exact.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.havel_hakimi_graph
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
|
||||
if G.is_directed():
|
||||
raise nx.NetworkXError("Directed Graph not supported")
|
||||
|
||||
# length of the each sequence
|
||||
naseq = len(aseq)
|
||||
nbseq = len(bseq)
|
||||
|
||||
suma = sum(aseq)
|
||||
sumb = sum(bseq)
|
||||
|
||||
if not suma == sumb:
|
||||
raise nx.NetworkXError(
|
||||
f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
|
||||
)
|
||||
|
||||
G = _add_nodes_with_bipartite_label(G, naseq, nbseq)
|
||||
|
||||
if len(aseq) == 0 or max(aseq) == 0:
|
||||
return G # done if no edges
|
||||
|
||||
# build list of degree-repeated vertex numbers
|
||||
astubs = [[aseq[v], v] for v in range(naseq)]
|
||||
bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)]
|
||||
astubs.sort()
|
||||
while astubs:
|
||||
(degree, u) = astubs.pop() # take of largest degree node in the a set
|
||||
if degree == 0:
|
||||
break # done, all are zero
|
||||
# connect the source to largest degree nodes in the b set
|
||||
bstubs.sort()
|
||||
for target in bstubs[-degree:]:
|
||||
v = target[1]
|
||||
G.add_edge(u, v)
|
||||
target[0] -= 1 # note this updates bstubs too.
|
||||
if target[0] == 0:
|
||||
bstubs.remove(target)
|
||||
|
||||
G.name = "bipartite_havel_hakimi_graph"
|
||||
return G
|
||||
|
||||
|
||||
@nx._dispatchable(graphs=None, returns_graph=True)
|
||||
def reverse_havel_hakimi_graph(aseq, bseq, create_using=None):
|
||||
"""Returns a bipartite graph from two given degree sequences using a
|
||||
Havel-Hakimi style construction.
|
||||
|
||||
The graph is composed of two partitions. Set A has nodes 0 to
|
||||
(len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
|
||||
Nodes from set A are connected to nodes in the set B by connecting
|
||||
the highest degree nodes in set A to the lowest degree nodes in
|
||||
set B until all stubs are connected.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
aseq : list
|
||||
Degree sequence for node set A.
|
||||
bseq : list
|
||||
Degree sequence for node set B.
|
||||
create_using : NetworkX graph instance, optional
|
||||
Return graph of this type.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
|
||||
If no graph type is specified use MultiGraph with parallel edges.
|
||||
If you want a graph with no parallel edges use create_using=Graph()
|
||||
but then the resulting degree sequences might not be exact.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.reverse_havel_hakimi_graph
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
|
||||
if G.is_directed():
|
||||
raise nx.NetworkXError("Directed Graph not supported")
|
||||
|
||||
# length of the each sequence
|
||||
lena = len(aseq)
|
||||
lenb = len(bseq)
|
||||
suma = sum(aseq)
|
||||
sumb = sum(bseq)
|
||||
|
||||
if not suma == sumb:
|
||||
raise nx.NetworkXError(
|
||||
f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
|
||||
)
|
||||
|
||||
G = _add_nodes_with_bipartite_label(G, lena, lenb)
|
||||
|
||||
if len(aseq) == 0 or max(aseq) == 0:
|
||||
return G # done if no edges
|
||||
|
||||
# build list of degree-repeated vertex numbers
|
||||
astubs = [[aseq[v], v] for v in range(lena)]
|
||||
bstubs = [[bseq[v - lena], v] for v in range(lena, lena + lenb)]
|
||||
astubs.sort()
|
||||
bstubs.sort()
|
||||
while astubs:
|
||||
(degree, u) = astubs.pop() # take of largest degree node in the a set
|
||||
if degree == 0:
|
||||
break # done, all are zero
|
||||
# connect the source to the smallest degree nodes in the b set
|
||||
for target in bstubs[0:degree]:
|
||||
v = target[1]
|
||||
G.add_edge(u, v)
|
||||
target[0] -= 1 # note this updates bstubs too.
|
||||
if target[0] == 0:
|
||||
bstubs.remove(target)
|
||||
|
||||
G.name = "bipartite_reverse_havel_hakimi_graph"
|
||||
return G
|
||||
|
||||
|
||||
@nx._dispatchable(graphs=None, returns_graph=True)
|
||||
def alternating_havel_hakimi_graph(aseq, bseq, create_using=None):
|
||||
"""Returns a bipartite graph from two given degree sequences using
|
||||
an alternating Havel-Hakimi style construction.
|
||||
|
||||
The graph is composed of two partitions. Set A has nodes 0 to
|
||||
(len(aseq) - 1) and set B has nodes len(aseq) to (len(bseq) - 1).
|
||||
Nodes from the set A are connected to nodes in the set B by
|
||||
connecting the highest degree nodes in set A to alternatively the
|
||||
highest and the lowest degree nodes in set B until all stubs are
|
||||
connected.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
aseq : list
|
||||
Degree sequence for node set A.
|
||||
bseq : list
|
||||
Degree sequence for node set B.
|
||||
create_using : NetworkX graph instance, optional
|
||||
Return graph of this type.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The sum of the two sequences must be equal: sum(aseq)=sum(bseq)
|
||||
If no graph type is specified use MultiGraph with parallel edges.
|
||||
If you want a graph with no parallel edges use create_using=Graph()
|
||||
but then the resulting degree sequences might not be exact.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.alternating_havel_hakimi_graph
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
|
||||
if G.is_directed():
|
||||
raise nx.NetworkXError("Directed Graph not supported")
|
||||
|
||||
# length of the each sequence
|
||||
naseq = len(aseq)
|
||||
nbseq = len(bseq)
|
||||
suma = sum(aseq)
|
||||
sumb = sum(bseq)
|
||||
|
||||
if not suma == sumb:
|
||||
raise nx.NetworkXError(
|
||||
f"invalid degree sequences, sum(aseq)!=sum(bseq),{suma},{sumb}"
|
||||
)
|
||||
|
||||
G = _add_nodes_with_bipartite_label(G, naseq, nbseq)
|
||||
|
||||
if len(aseq) == 0 or max(aseq) == 0:
|
||||
return G # done if no edges
|
||||
# build list of degree-repeated vertex numbers
|
||||
astubs = [[aseq[v], v] for v in range(naseq)]
|
||||
bstubs = [[bseq[v - naseq], v] for v in range(naseq, naseq + nbseq)]
|
||||
while astubs:
|
||||
astubs.sort()
|
||||
(degree, u) = astubs.pop() # take of largest degree node in the a set
|
||||
if degree == 0:
|
||||
break # done, all are zero
|
||||
bstubs.sort()
|
||||
small = bstubs[0 : degree // 2] # add these low degree targets
|
||||
large = bstubs[(-degree + degree // 2) :] # now high degree targets
|
||||
stubs = [x for z in zip(large, small) for x in z] # combine, sorry
|
||||
if len(stubs) < len(small) + len(large): # check for zip truncation
|
||||
stubs.append(large.pop())
|
||||
for target in stubs:
|
||||
v = target[1]
|
||||
G.add_edge(u, v)
|
||||
target[0] -= 1 # note this updates bstubs too.
|
||||
if target[0] == 0:
|
||||
bstubs.remove(target)
|
||||
|
||||
G.name = "bipartite_alternating_havel_hakimi_graph"
|
||||
return G
|
||||
|
||||
|
||||
@py_random_state(3)
|
||||
@nx._dispatchable(graphs=None, returns_graph=True)
|
||||
def preferential_attachment_graph(aseq, p, create_using=None, seed=None):
|
||||
"""Create a bipartite graph with a preferential attachment model from
|
||||
a given single degree sequence.
|
||||
|
||||
The graph is composed of two partitions. Set A has nodes 0 to
|
||||
(len(aseq) - 1) and set B has nodes starting with node len(aseq).
|
||||
The number of nodes in set B is random.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
aseq : list
|
||||
Degree sequence for node set A.
|
||||
p : float
|
||||
Probability that a new bottom node is added.
|
||||
create_using : NetworkX graph instance, optional
|
||||
Return graph of this type.
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Guillaume, J.L. and Latapy, M.,
|
||||
Bipartite graphs as models of complex networks.
|
||||
Physica A: Statistical Mechanics and its Applications,
|
||||
2006, 371(2), pp.795-813.
|
||||
.. [2] Jean-Loup Guillaume and Matthieu Latapy,
|
||||
Bipartite structure of all complex networks,
|
||||
Inf. Process. Lett. 90, 2004, pg. 215-221
|
||||
https://doi.org/10.1016/j.ipl.2004.03.007
|
||||
|
||||
Notes
|
||||
-----
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.preferential_attachment_graph
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using, default=nx.MultiGraph)
|
||||
if G.is_directed():
|
||||
raise nx.NetworkXError("Directed Graph not supported")
|
||||
|
||||
if p > 1:
|
||||
raise nx.NetworkXError(f"probability {p} > 1")
|
||||
|
||||
naseq = len(aseq)
|
||||
G = _add_nodes_with_bipartite_label(G, naseq, 0)
|
||||
vv = [[v] * aseq[v] for v in range(naseq)]
|
||||
while vv:
|
||||
while vv[0]:
|
||||
source = vv[0][0]
|
||||
vv[0].remove(source)
|
||||
if seed.random() < p or len(G) == naseq:
|
||||
target = len(G)
|
||||
G.add_node(target, bipartite=1)
|
||||
G.add_edge(source, target)
|
||||
else:
|
||||
bb = [[b] * G.degree(b) for b in range(naseq, len(G))]
|
||||
# flatten the list of lists into a list.
|
||||
bbstubs = reduce(lambda x, y: x + y, bb)
|
||||
# choose preferentially a bottom node.
|
||||
target = seed.choice(bbstubs)
|
||||
G.add_node(target, bipartite=1)
|
||||
G.add_edge(source, target)
|
||||
vv.remove(vv[0])
|
||||
G.name = "bipartite_preferential_attachment_model"
|
||||
return G
|
||||
|
||||
|
||||
@py_random_state(3)
|
||||
@nx._dispatchable(graphs=None, returns_graph=True)
|
||||
def random_graph(n, m, p, seed=None, directed=False):
|
||||
"""Returns a bipartite random graph.
|
||||
|
||||
This is a bipartite version of the binomial (Erdős-Rényi) graph.
|
||||
The graph is composed of two partitions. Set A has nodes 0 to
|
||||
(n - 1) and set B has nodes n to (n + m - 1).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
n : int
|
||||
The number of nodes in the first bipartite set.
|
||||
m : int
|
||||
The number of nodes in the second bipartite set.
|
||||
p : float
|
||||
Probability for edge creation.
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
directed : bool, optional (default=False)
|
||||
If True return a directed graph
|
||||
|
||||
Notes
|
||||
-----
|
||||
The bipartite random graph algorithm chooses each of the n*m (undirected)
|
||||
or 2*nm (directed) possible edges with probability p.
|
||||
|
||||
This algorithm is $O(n+m)$ where $m$ is the expected number of edges.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.random_graph
|
||||
|
||||
See Also
|
||||
--------
|
||||
gnp_random_graph, configuration_model
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Vladimir Batagelj and Ulrik Brandes,
|
||||
"Efficient generation of large random networks",
|
||||
Phys. Rev. E, 71, 036113, 2005.
|
||||
"""
|
||||
G = nx.Graph()
|
||||
G = _add_nodes_with_bipartite_label(G, n, m)
|
||||
if directed:
|
||||
G = nx.DiGraph(G)
|
||||
G.name = f"fast_gnp_random_graph({n},{m},{p})"
|
||||
|
||||
if p <= 0:
|
||||
return G
|
||||
if p >= 1:
|
||||
return nx.complete_bipartite_graph(n, m)
|
||||
|
||||
lp = math.log(1.0 - p)
|
||||
|
||||
v = 0
|
||||
w = -1
|
||||
while v < n:
|
||||
lr = math.log(1.0 - seed.random())
|
||||
w = w + 1 + int(lr / lp)
|
||||
while w >= m and v < n:
|
||||
w = w - m
|
||||
v = v + 1
|
||||
if v < n:
|
||||
G.add_edge(v, n + w)
|
||||
|
||||
if directed:
|
||||
# use the same algorithm to
|
||||
# add edges from the "m" to "n" set
|
||||
v = 0
|
||||
w = -1
|
||||
while v < n:
|
||||
lr = math.log(1.0 - seed.random())
|
||||
w = w + 1 + int(lr / lp)
|
||||
while w >= m and v < n:
|
||||
w = w - m
|
||||
v = v + 1
|
||||
if v < n:
|
||||
G.add_edge(n + w, v)
|
||||
|
||||
return G
|
||||
|
||||
|
||||
@py_random_state(3)
|
||||
@nx._dispatchable(graphs=None, returns_graph=True)
|
||||
def gnmk_random_graph(n, m, k, seed=None, directed=False):
|
||||
"""Returns a random bipartite graph G_{n,m,k}.
|
||||
|
||||
Produces a bipartite graph chosen randomly out of the set of all graphs
|
||||
with n top nodes, m bottom nodes, and k edges.
|
||||
The graph is composed of two sets of nodes.
|
||||
Set A has nodes 0 to (n - 1) and set B has nodes n to (n + m - 1).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
n : int
|
||||
The number of nodes in the first bipartite set.
|
||||
m : int
|
||||
The number of nodes in the second bipartite set.
|
||||
k : int
|
||||
The number of edges
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
directed : bool, optional (default=False)
|
||||
If True return a directed graph
|
||||
|
||||
Examples
|
||||
--------
|
||||
from nx.algorithms import bipartite
|
||||
G = bipartite.gnmk_random_graph(10,20,50)
|
||||
|
||||
See Also
|
||||
--------
|
||||
gnm_random_graph
|
||||
|
||||
Notes
|
||||
-----
|
||||
If k > m * n then a complete bipartite graph is returned.
|
||||
|
||||
This graph is a bipartite version of the `G_{nm}` random graph model.
|
||||
|
||||
The nodes are assigned the attribute 'bipartite' with the value 0 or 1
|
||||
to indicate which bipartite set the node belongs to.
|
||||
|
||||
This function is not imported in the main namespace.
|
||||
To use it use nx.bipartite.gnmk_random_graph
|
||||
"""
|
||||
G = nx.Graph()
|
||||
G = _add_nodes_with_bipartite_label(G, n, m)
|
||||
if directed:
|
||||
G = nx.DiGraph(G)
|
||||
G.name = f"bipartite_gnm_random_graph({n},{m},{k})"
|
||||
if n == 1 or m == 1:
|
||||
return G
|
||||
max_edges = n * m # max_edges for bipartite networks
|
||||
if k >= max_edges: # Maybe we should raise an exception here
|
||||
return nx.complete_bipartite_graph(n, m, create_using=G)
|
||||
|
||||
top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
|
||||
bottom = list(set(G) - set(top))
|
||||
edge_count = 0
|
||||
while edge_count < k:
|
||||
# generate random edge,u,v
|
||||
u = seed.choice(top)
|
||||
v = seed.choice(bottom)
|
||||
if v in G[u]:
|
||||
continue
|
||||
else:
|
||||
G.add_edge(u, v)
|
||||
edge_count += 1
|
||||
return G
|
||||
|
||||
|
||||
def _add_nodes_with_bipartite_label(G, lena, lenb):
|
||||
G.add_nodes_from(range(lena + lenb))
|
||||
b = dict(zip(range(lena), [0] * lena))
|
||||
b.update(dict(zip(range(lena, lena + lenb), [1] * lenb)))
|
||||
nx.set_node_attributes(G, b, "bipartite")
|
||||
return G
|
||||
+590
@@ -0,0 +1,590 @@
|
||||
# This module uses material from the Wikipedia article Hopcroft--Karp algorithm
|
||||
# <https://en.wikipedia.org/wiki/Hopcroft%E2%80%93Karp_algorithm>, accessed on
|
||||
# January 3, 2015, which is released under the Creative Commons
|
||||
# Attribution-Share-Alike License 3.0
|
||||
# <http://creativecommons.org/licenses/by-sa/3.0/>. That article includes
|
||||
# pseudocode, which has been translated into the corresponding Python code.
|
||||
#
|
||||
# Portions of this module use code from David Eppstein's Python Algorithms and
|
||||
# Data Structures (PADS) library, which is dedicated to the public domain (for
|
||||
# proof, see <http://www.ics.uci.edu/~eppstein/PADS/ABOUT-PADS.txt>).
|
||||
"""Provides functions for computing maximum cardinality matchings and minimum
|
||||
weight full matchings in a bipartite graph.
|
||||
|
||||
If you don't care about the particular implementation of the maximum matching
|
||||
algorithm, simply use the :func:`maximum_matching`. If you do care, you can
|
||||
import one of the named maximum matching algorithms directly.
|
||||
|
||||
For example, to find a maximum matching in the complete bipartite graph with
|
||||
two vertices on the left and three vertices on the right:
|
||||
|
||||
>>> G = nx.complete_bipartite_graph(2, 3)
|
||||
>>> left, right = nx.bipartite.sets(G)
|
||||
>>> list(left)
|
||||
[0, 1]
|
||||
>>> list(right)
|
||||
[2, 3, 4]
|
||||
>>> nx.bipartite.maximum_matching(G)
|
||||
{0: 2, 1: 3, 2: 0, 3: 1}
|
||||
|
||||
The dictionary returned by :func:`maximum_matching` includes a mapping for
|
||||
vertices in both the left and right vertex sets.
|
||||
|
||||
Similarly, :func:`minimum_weight_full_matching` produces, for a complete
|
||||
weighted bipartite graph, a matching whose cardinality is the cardinality of
|
||||
the smaller of the two partitions, and for which the sum of the weights of the
|
||||
edges included in the matching is minimal.
|
||||
|
||||
"""
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.bipartite import sets as bipartite_sets
|
||||
from networkx.algorithms.bipartite.matrix import biadjacency_matrix
|
||||
|
||||
__all__ = [
|
||||
"maximum_matching",
|
||||
"hopcroft_karp_matching",
|
||||
"eppstein_matching",
|
||||
"to_vertex_cover",
|
||||
"minimum_weight_full_matching",
|
||||
]
|
||||
|
||||
INFINITY = float("inf")
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def hopcroft_karp_matching(G, top_nodes=None):
|
||||
"""Returns the maximum cardinality matching of the bipartite graph `G`.
|
||||
|
||||
A matching is a set of edges that do not share any nodes. A maximum
|
||||
cardinality matching is a matching with the most edges possible. It
|
||||
is not always unique. Finding a matching in a bipartite graph can be
|
||||
treated as a networkx flow problem.
|
||||
|
||||
The functions ``hopcroft_karp_matching`` and ``maximum_matching``
|
||||
are aliases of the same function.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Undirected bipartite graph
|
||||
|
||||
top_nodes : container of nodes
|
||||
|
||||
Container with all nodes in one bipartite node set. If not supplied
|
||||
it will be computed. But if more than one solution exists an exception
|
||||
will be raised.
|
||||
|
||||
Returns
|
||||
-------
|
||||
matches : dictionary
|
||||
|
||||
The matching is returned as a dictionary, `matches`, such that
|
||||
``matches[v] == w`` if node `v` is matched to node `w`. Unmatched
|
||||
nodes do not occur as a key in `matches`.
|
||||
|
||||
Raises
|
||||
------
|
||||
AmbiguousSolution
|
||||
Raised if the input bipartite graph is disconnected and no container
|
||||
with all nodes in one bipartite set is provided. When determining
|
||||
the nodes in each bipartite set more than one valid solution is
|
||||
possible if the input graph is disconnected.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This function is implemented with the `Hopcroft--Karp matching algorithm
|
||||
<https://en.wikipedia.org/wiki/Hopcroft%E2%80%93Karp_algorithm>`_ for
|
||||
bipartite graphs.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
maximum_matching
|
||||
hopcroft_karp_matching
|
||||
eppstein_matching
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] John E. Hopcroft and Richard M. Karp. "An n^{5 / 2} Algorithm for
|
||||
Maximum Matchings in Bipartite Graphs" In: **SIAM Journal of Computing**
|
||||
2.4 (1973), pp. 225--231. <https://doi.org/10.1137/0202019>.
|
||||
|
||||
"""
|
||||
|
||||
# First we define some auxiliary search functions.
|
||||
#
|
||||
# If you are a human reading these auxiliary search functions, the "global"
|
||||
# variables `leftmatches`, `rightmatches`, `distances`, etc. are defined
|
||||
# below the functions, so that they are initialized close to the initial
|
||||
# invocation of the search functions.
|
||||
def breadth_first_search():
|
||||
for v in left:
|
||||
if leftmatches[v] is None:
|
||||
distances[v] = 0
|
||||
queue.append(v)
|
||||
else:
|
||||
distances[v] = INFINITY
|
||||
distances[None] = INFINITY
|
||||
while queue:
|
||||
v = queue.popleft()
|
||||
if distances[v] < distances[None]:
|
||||
for u in G[v]:
|
||||
if distances[rightmatches[u]] is INFINITY:
|
||||
distances[rightmatches[u]] = distances[v] + 1
|
||||
queue.append(rightmatches[u])
|
||||
return distances[None] is not INFINITY
|
||||
|
||||
def depth_first_search(v):
|
||||
if v is not None:
|
||||
for u in G[v]:
|
||||
if distances[rightmatches[u]] == distances[v] + 1:
|
||||
if depth_first_search(rightmatches[u]):
|
||||
rightmatches[u] = v
|
||||
leftmatches[v] = u
|
||||
return True
|
||||
distances[v] = INFINITY
|
||||
return False
|
||||
return True
|
||||
|
||||
# Initialize the "global" variables that maintain state during the search.
|
||||
left, right = bipartite_sets(G, top_nodes)
|
||||
leftmatches = {v: None for v in left}
|
||||
rightmatches = {v: None for v in right}
|
||||
distances = {}
|
||||
queue = collections.deque()
|
||||
|
||||
# Implementation note: this counter is incremented as pairs are matched but
|
||||
# it is currently not used elsewhere in the computation.
|
||||
num_matched_pairs = 0
|
||||
while breadth_first_search():
|
||||
for v in left:
|
||||
if leftmatches[v] is None:
|
||||
if depth_first_search(v):
|
||||
num_matched_pairs += 1
|
||||
|
||||
# Strip the entries matched to `None`.
|
||||
leftmatches = {k: v for k, v in leftmatches.items() if v is not None}
|
||||
rightmatches = {k: v for k, v in rightmatches.items() if v is not None}
|
||||
|
||||
# At this point, the left matches and the right matches are inverses of one
|
||||
# another. In other words,
|
||||
#
|
||||
# leftmatches == {v, k for k, v in rightmatches.items()}
|
||||
#
|
||||
# Finally, we combine both the left matches and right matches.
|
||||
return dict(itertools.chain(leftmatches.items(), rightmatches.items()))
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def eppstein_matching(G, top_nodes=None):
|
||||
"""Returns the maximum cardinality matching of the bipartite graph `G`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Undirected bipartite graph
|
||||
|
||||
top_nodes : container
|
||||
|
||||
Container with all nodes in one bipartite node set. If not supplied
|
||||
it will be computed. But if more than one solution exists an exception
|
||||
will be raised.
|
||||
|
||||
Returns
|
||||
-------
|
||||
matches : dictionary
|
||||
|
||||
The matching is returned as a dictionary, `matching`, such that
|
||||
``matching[v] == w`` if node `v` is matched to node `w`. Unmatched
|
||||
nodes do not occur as a key in `matching`.
|
||||
|
||||
Raises
|
||||
------
|
||||
AmbiguousSolution
|
||||
Raised if the input bipartite graph is disconnected and no container
|
||||
with all nodes in one bipartite set is provided. When determining
|
||||
the nodes in each bipartite set more than one valid solution is
|
||||
possible if the input graph is disconnected.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This function is implemented with David Eppstein's version of the algorithm
|
||||
Hopcroft--Karp algorithm (see :func:`hopcroft_karp_matching`), which
|
||||
originally appeared in the `Python Algorithms and Data Structures library
|
||||
(PADS) <http://www.ics.uci.edu/~eppstein/PADS/ABOUT-PADS.txt>`_.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
|
||||
hopcroft_karp_matching
|
||||
|
||||
"""
|
||||
# Due to its original implementation, a directed graph is needed
|
||||
# so that the two sets of bipartite nodes can be distinguished
|
||||
left, right = bipartite_sets(G, top_nodes)
|
||||
G = nx.DiGraph(G.edges(left))
|
||||
# initialize greedy matching (redundant, but faster than full search)
|
||||
matching = {}
|
||||
for u in G:
|
||||
for v in G[u]:
|
||||
if v not in matching:
|
||||
matching[v] = u
|
||||
break
|
||||
while True:
|
||||
# structure residual graph into layers
|
||||
# pred[u] gives the neighbor in the previous layer for u in U
|
||||
# preds[v] gives a list of neighbors in the previous layer for v in V
|
||||
# unmatched gives a list of unmatched vertices in final layer of V,
|
||||
# and is also used as a flag value for pred[u] when u is in the first
|
||||
# layer
|
||||
preds = {}
|
||||
unmatched = []
|
||||
pred = {u: unmatched for u in G}
|
||||
for v in matching:
|
||||
del pred[matching[v]]
|
||||
layer = list(pred)
|
||||
|
||||
# repeatedly extend layering structure by another pair of layers
|
||||
while layer and not unmatched:
|
||||
newLayer = {}
|
||||
for u in layer:
|
||||
for v in G[u]:
|
||||
if v not in preds:
|
||||
newLayer.setdefault(v, []).append(u)
|
||||
layer = []
|
||||
for v in newLayer:
|
||||
preds[v] = newLayer[v]
|
||||
if v in matching:
|
||||
layer.append(matching[v])
|
||||
pred[matching[v]] = v
|
||||
else:
|
||||
unmatched.append(v)
|
||||
|
||||
# did we finish layering without finding any alternating paths?
|
||||
if not unmatched:
|
||||
# TODO - The lines between --- were unused and were thus commented
|
||||
# out. This whole commented chunk should be reviewed to determine
|
||||
# whether it should be built upon or completely removed.
|
||||
# ---
|
||||
# unlayered = {}
|
||||
# for u in G:
|
||||
# # TODO Why is extra inner loop necessary?
|
||||
# for v in G[u]:
|
||||
# if v not in preds:
|
||||
# unlayered[v] = None
|
||||
# ---
|
||||
# TODO Originally, this function returned a three-tuple:
|
||||
#
|
||||
# return (matching, list(pred), list(unlayered))
|
||||
#
|
||||
# For some reason, the documentation for this function
|
||||
# indicated that the second and third elements of the returned
|
||||
# three-tuple would be the vertices in the left and right vertex
|
||||
# sets, respectively, that are also in the maximum independent set.
|
||||
# However, what I think the author meant was that the second
|
||||
# element is the list of vertices that were unmatched and the third
|
||||
# element was the list of vertices that were matched. Since that
|
||||
# seems to be the case, they don't really need to be returned,
|
||||
# since that information can be inferred from the matching
|
||||
# dictionary.
|
||||
|
||||
# All the matched nodes must be a key in the dictionary
|
||||
for key in matching.copy():
|
||||
matching[matching[key]] = key
|
||||
return matching
|
||||
|
||||
# recursively search backward through layers to find alternating paths
|
||||
# recursion returns true if found path, false otherwise
|
||||
def recurse(v):
|
||||
if v in preds:
|
||||
L = preds.pop(v)
|
||||
for u in L:
|
||||
if u in pred:
|
||||
pu = pred.pop(u)
|
||||
if pu is unmatched or recurse(pu):
|
||||
matching[v] = u
|
||||
return True
|
||||
return False
|
||||
|
||||
for v in unmatched:
|
||||
recurse(v)
|
||||
|
||||
|
||||
def _is_connected_by_alternating_path(G, v, matched_edges, unmatched_edges, targets):
|
||||
"""Returns True if and only if the vertex `v` is connected to one of
|
||||
the target vertices by an alternating path in `G`.
|
||||
|
||||
An *alternating path* is a path in which every other edge is in the
|
||||
specified maximum matching (and the remaining edges in the path are not in
|
||||
the matching). An alternating path may have matched edges in the even
|
||||
positions or in the odd positions, as long as the edges alternate between
|
||||
'matched' and 'unmatched'.
|
||||
|
||||
`G` is an undirected bipartite NetworkX graph.
|
||||
|
||||
`v` is a vertex in `G`.
|
||||
|
||||
`matched_edges` is a set of edges present in a maximum matching in `G`.
|
||||
|
||||
`unmatched_edges` is a set of edges not present in a maximum
|
||||
matching in `G`.
|
||||
|
||||
`targets` is a set of vertices.
|
||||
|
||||
"""
|
||||
|
||||
def _alternating_dfs(u, along_matched=True):
|
||||
"""Returns True if and only if `u` is connected to one of the
|
||||
targets by an alternating path.
|
||||
|
||||
`u` is a vertex in the graph `G`.
|
||||
|
||||
If `along_matched` is True, this step of the depth-first search
|
||||
will continue only through edges in the given matching. Otherwise, it
|
||||
will continue only through edges *not* in the given matching.
|
||||
|
||||
"""
|
||||
visited = set()
|
||||
# Follow matched edges when depth is even,
|
||||
# and follow unmatched edges when depth is odd.
|
||||
initial_depth = 0 if along_matched else 1
|
||||
stack = [(u, iter(G[u]), initial_depth)]
|
||||
while stack:
|
||||
parent, children, depth = stack[-1]
|
||||
valid_edges = matched_edges if depth % 2 else unmatched_edges
|
||||
try:
|
||||
child = next(children)
|
||||
if child not in visited:
|
||||
if (parent, child) in valid_edges or (child, parent) in valid_edges:
|
||||
if child in targets:
|
||||
return True
|
||||
visited.add(child)
|
||||
stack.append((child, iter(G[child]), depth + 1))
|
||||
except StopIteration:
|
||||
stack.pop()
|
||||
return False
|
||||
|
||||
# Check for alternating paths starting with edges in the matching, then
|
||||
# check for alternating paths starting with edges not in the
|
||||
# matching.
|
||||
return _alternating_dfs(v, along_matched=True) or _alternating_dfs(
|
||||
v, along_matched=False
|
||||
)
|
||||
|
||||
|
||||
def _connected_by_alternating_paths(G, matching, targets):
|
||||
"""Returns the set of vertices that are connected to one of the target
|
||||
vertices by an alternating path in `G` or are themselves a target.
|
||||
|
||||
An *alternating path* is a path in which every other edge is in the
|
||||
specified maximum matching (and the remaining edges in the path are not in
|
||||
the matching). An alternating path may have matched edges in the even
|
||||
positions or in the odd positions, as long as the edges alternate between
|
||||
'matched' and 'unmatched'.
|
||||
|
||||
`G` is an undirected bipartite NetworkX graph.
|
||||
|
||||
`matching` is a dictionary representing a maximum matching in `G`, as
|
||||
returned by, for example, :func:`maximum_matching`.
|
||||
|
||||
`targets` is a set of vertices.
|
||||
|
||||
"""
|
||||
# Get the set of matched edges and the set of unmatched edges. Only include
|
||||
# one version of each undirected edge (for example, include edge (1, 2) but
|
||||
# not edge (2, 1)). Using frozensets as an intermediary step we do not
|
||||
# require nodes to be orderable.
|
||||
edge_sets = {frozenset((u, v)) for u, v in matching.items()}
|
||||
matched_edges = {tuple(edge) for edge in edge_sets}
|
||||
unmatched_edges = {
|
||||
(u, v) for (u, v) in G.edges() if frozenset((u, v)) not in edge_sets
|
||||
}
|
||||
|
||||
return {
|
||||
v
|
||||
for v in G
|
||||
if v in targets
|
||||
or _is_connected_by_alternating_path(
|
||||
G, v, matched_edges, unmatched_edges, targets
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def to_vertex_cover(G, matching, top_nodes=None):
|
||||
"""Returns the minimum vertex cover corresponding to the given maximum
|
||||
matching of the bipartite graph `G`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Undirected bipartite graph
|
||||
|
||||
matching : dictionary
|
||||
|
||||
A dictionary whose keys are vertices in `G` and whose values are the
|
||||
distinct neighbors comprising the maximum matching for `G`, as returned
|
||||
by, for example, :func:`maximum_matching`. The dictionary *must*
|
||||
represent the maximum matching.
|
||||
|
||||
top_nodes : container
|
||||
|
||||
Container with all nodes in one bipartite node set. If not supplied
|
||||
it will be computed. But if more than one solution exists an exception
|
||||
will be raised.
|
||||
|
||||
Returns
|
||||
-------
|
||||
vertex_cover : :class:`set`
|
||||
|
||||
The minimum vertex cover in `G`.
|
||||
|
||||
Raises
|
||||
------
|
||||
AmbiguousSolution
|
||||
Raised if the input bipartite graph is disconnected and no container
|
||||
with all nodes in one bipartite set is provided. When determining
|
||||
the nodes in each bipartite set more than one valid solution is
|
||||
possible if the input graph is disconnected.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This function is implemented using the procedure guaranteed by `Konig's
|
||||
theorem
|
||||
<https://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29>`_,
|
||||
which proves an equivalence between a maximum matching and a minimum vertex
|
||||
cover in bipartite graphs.
|
||||
|
||||
Since a minimum vertex cover is the complement of a maximum independent set
|
||||
for any graph, one can compute the maximum independent set of a bipartite
|
||||
graph this way:
|
||||
|
||||
>>> G = nx.complete_bipartite_graph(2, 3)
|
||||
>>> matching = nx.bipartite.maximum_matching(G)
|
||||
>>> vertex_cover = nx.bipartite.to_vertex_cover(G, matching)
|
||||
>>> independent_set = set(G) - vertex_cover
|
||||
>>> print(list(independent_set))
|
||||
[2, 3, 4]
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
"""
|
||||
# This is a Python implementation of the algorithm described at
|
||||
# <https://en.wikipedia.org/wiki/K%C3%B6nig%27s_theorem_%28graph_theory%29#Proof>.
|
||||
L, R = bipartite_sets(G, top_nodes)
|
||||
# Let U be the set of unmatched vertices in the left vertex set.
|
||||
unmatched_vertices = set(G) - set(matching)
|
||||
U = unmatched_vertices & L
|
||||
# Let Z be the set of vertices that are either in U or are connected to U
|
||||
# by alternating paths.
|
||||
Z = _connected_by_alternating_paths(G, matching, U)
|
||||
# At this point, every edge either has a right endpoint in Z or a left
|
||||
# endpoint not in Z. This gives us the vertex cover.
|
||||
return (L - Z) | (R & Z)
|
||||
|
||||
|
||||
#: Returns the maximum cardinality matching in the given bipartite graph.
|
||||
#:
|
||||
#: This function is simply an alias for :func:`hopcroft_karp_matching`.
|
||||
maximum_matching = hopcroft_karp_matching
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def minimum_weight_full_matching(G, top_nodes=None, weight="weight"):
|
||||
r"""Returns a minimum weight full matching of the bipartite graph `G`.
|
||||
|
||||
Let :math:`G = ((U, V), E)` be a weighted bipartite graph with real weights
|
||||
:math:`w : E \to \mathbb{R}`. This function then produces a matching
|
||||
:math:`M \subseteq E` with cardinality
|
||||
|
||||
.. math::
|
||||
\lvert M \rvert = \min(\lvert U \rvert, \lvert V \rvert),
|
||||
|
||||
which minimizes the sum of the weights of the edges included in the
|
||||
matching, :math:`\sum_{e \in M} w(e)`, or raises an error if no such
|
||||
matching exists.
|
||||
|
||||
When :math:`\lvert U \rvert = \lvert V \rvert`, this is commonly
|
||||
referred to as a perfect matching; here, since we allow
|
||||
:math:`\lvert U \rvert` and :math:`\lvert V \rvert` to differ, we
|
||||
follow Karp [1]_ and refer to the matching as *full*.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
Undirected bipartite graph
|
||||
|
||||
top_nodes : container
|
||||
|
||||
Container with all nodes in one bipartite node set. If not supplied
|
||||
it will be computed.
|
||||
|
||||
weight : string, optional (default='weight')
|
||||
|
||||
The edge data key used to provide each value in the matrix.
|
||||
If None, then each edge has weight 1.
|
||||
|
||||
Returns
|
||||
-------
|
||||
matches : dictionary
|
||||
|
||||
The matching is returned as a dictionary, `matches`, such that
|
||||
``matches[v] == w`` if node `v` is matched to node `w`. Unmatched
|
||||
nodes do not occur as a key in `matches`.
|
||||
|
||||
Raises
|
||||
------
|
||||
ValueError
|
||||
Raised if no full matching exists.
|
||||
|
||||
ImportError
|
||||
Raised if SciPy is not available.
|
||||
|
||||
Notes
|
||||
-----
|
||||
The problem of determining a minimum weight full matching is also known as
|
||||
the rectangular linear assignment problem. This implementation defers the
|
||||
calculation of the assignment to SciPy.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Richard Manning Karp:
|
||||
An algorithm to Solve the m x n Assignment Problem in Expected Time
|
||||
O(mn log n).
|
||||
Networks, 10(2):143–152, 1980.
|
||||
|
||||
"""
|
||||
import numpy as np
|
||||
import scipy as sp
|
||||
|
||||
left, right = nx.bipartite.sets(G, top_nodes)
|
||||
U = list(left)
|
||||
V = list(right)
|
||||
# We explicitly create the biadjacency matrix having infinities
|
||||
# where edges are missing (as opposed to zeros, which is what one would
|
||||
# get by using toarray on the sparse matrix).
|
||||
weights_sparse = biadjacency_matrix(
|
||||
G, row_order=U, column_order=V, weight=weight, format="coo"
|
||||
)
|
||||
weights = np.full(weights_sparse.shape, np.inf)
|
||||
weights[weights_sparse.row, weights_sparse.col] = weights_sparse.data
|
||||
left_matches = sp.optimize.linear_sum_assignment(weights)
|
||||
d = {U[u]: V[v] for u, v in zip(*left_matches)}
|
||||
# d will contain the matching from edges in left to right; we need to
|
||||
# add the ones from right to left as well.
|
||||
d.update({v: u for u, v in d.items()})
|
||||
return d
|
||||
+168
@@ -0,0 +1,168 @@
|
||||
"""
|
||||
====================
|
||||
Biadjacency matrices
|
||||
====================
|
||||
"""
|
||||
|
||||
import itertools
|
||||
|
||||
import networkx as nx
|
||||
from networkx.convert_matrix import _generate_weighted_edges
|
||||
|
||||
__all__ = ["biadjacency_matrix", "from_biadjacency_matrix"]
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def biadjacency_matrix(
|
||||
G, row_order, column_order=None, dtype=None, weight="weight", format="csr"
|
||||
):
|
||||
r"""Returns the biadjacency matrix of the bipartite graph G.
|
||||
|
||||
Let `G = (U, V, E)` be a bipartite graph with node sets
|
||||
`U = u_{1},...,u_{r}` and `V = v_{1},...,v_{s}`. The biadjacency
|
||||
matrix [1]_ is the `r` x `s` matrix `B` in which `b_{i,j} = 1`
|
||||
if, and only if, `(u_i, v_j) \in E`. If the parameter `weight` is
|
||||
not `None` and matches the name of an edge attribute, its value is
|
||||
used instead of 1.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
row_order : list of nodes
|
||||
The rows of the matrix are ordered according to the list of nodes.
|
||||
|
||||
column_order : list, optional
|
||||
The columns of the matrix are ordered according to the list of nodes.
|
||||
If column_order is None, then the ordering of columns is arbitrary.
|
||||
|
||||
dtype : NumPy data-type, optional
|
||||
A valid NumPy dtype used to initialize the array. If None, then the
|
||||
NumPy default is used.
|
||||
|
||||
weight : string or None, optional (default='weight')
|
||||
The edge data key used to provide each value in the matrix.
|
||||
If None, then each edge has weight 1.
|
||||
|
||||
format : str in {'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'}
|
||||
The type of the matrix to be returned (default 'csr'). For
|
||||
some algorithms different implementations of sparse matrices
|
||||
can perform better. See [2]_ for details.
|
||||
|
||||
Returns
|
||||
-------
|
||||
M : SciPy sparse array
|
||||
Biadjacency matrix representation of the bipartite graph G.
|
||||
|
||||
Notes
|
||||
-----
|
||||
No attempt is made to check that the input graph is bipartite.
|
||||
|
||||
For directed bipartite graphs only successors are considered as neighbors.
|
||||
To obtain an adjacency matrix with ones (or weight values) for both
|
||||
predecessors and successors you have to generate two biadjacency matrices
|
||||
where the rows of one of them are the columns of the other, and then add
|
||||
one to the transpose of the other.
|
||||
|
||||
See Also
|
||||
--------
|
||||
adjacency_matrix
|
||||
from_biadjacency_matrix
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
|
||||
.. [2] Scipy Dev. References, "Sparse Matrices",
|
||||
https://docs.scipy.org/doc/scipy/reference/sparse.html
|
||||
"""
|
||||
import scipy as sp
|
||||
|
||||
nlen = len(row_order)
|
||||
if nlen == 0:
|
||||
raise nx.NetworkXError("row_order is empty list")
|
||||
if len(row_order) != len(set(row_order)):
|
||||
msg = "Ambiguous ordering: `row_order` contained duplicates."
|
||||
raise nx.NetworkXError(msg)
|
||||
if column_order is None:
|
||||
column_order = list(set(G) - set(row_order))
|
||||
mlen = len(column_order)
|
||||
if len(column_order) != len(set(column_order)):
|
||||
msg = "Ambiguous ordering: `column_order` contained duplicates."
|
||||
raise nx.NetworkXError(msg)
|
||||
|
||||
row_index = dict(zip(row_order, itertools.count()))
|
||||
col_index = dict(zip(column_order, itertools.count()))
|
||||
|
||||
if G.number_of_edges() == 0:
|
||||
row, col, data = [], [], []
|
||||
else:
|
||||
row, col, data = zip(
|
||||
*(
|
||||
(row_index[u], col_index[v], d.get(weight, 1))
|
||||
for u, v, d in G.edges(row_order, data=True)
|
||||
if u in row_index and v in col_index
|
||||
)
|
||||
)
|
||||
A = sp.sparse.coo_array((data, (row, col)), shape=(nlen, mlen), dtype=dtype)
|
||||
try:
|
||||
return A.asformat(format)
|
||||
except ValueError as err:
|
||||
raise nx.NetworkXError(f"Unknown sparse array format: {format}") from err
|
||||
|
||||
|
||||
@nx._dispatchable(graphs=None, returns_graph=True)
|
||||
def from_biadjacency_matrix(A, create_using=None, edge_attribute="weight"):
|
||||
r"""Creates a new bipartite graph from a biadjacency matrix given as a
|
||||
SciPy sparse array.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
A: scipy sparse array
|
||||
A biadjacency matrix representation of a graph
|
||||
|
||||
create_using: NetworkX graph
|
||||
Use specified graph for result. The default is Graph()
|
||||
|
||||
edge_attribute: string
|
||||
Name of edge attribute to store matrix numeric value. The data will
|
||||
have the same type as the matrix entry (int, float, (real,imag)).
|
||||
|
||||
Notes
|
||||
-----
|
||||
The nodes are labeled with the attribute `bipartite` set to an integer
|
||||
0 or 1 representing membership in part 0 or part 1 of the bipartite graph.
|
||||
|
||||
If `create_using` is an instance of :class:`networkx.MultiGraph` or
|
||||
:class:`networkx.MultiDiGraph` and the entries of `A` are of
|
||||
type :class:`int`, then this function returns a multigraph (of the same
|
||||
type as `create_using`) with parallel edges. In this case, `edge_attribute`
|
||||
will be ignored.
|
||||
|
||||
See Also
|
||||
--------
|
||||
biadjacency_matrix
|
||||
from_numpy_array
|
||||
|
||||
References
|
||||
----------
|
||||
[1] https://en.wikipedia.org/wiki/Adjacency_matrix#Adjacency_matrix_of_a_bipartite_graph
|
||||
"""
|
||||
G = nx.empty_graph(0, create_using)
|
||||
n, m = A.shape
|
||||
# Make sure we get even the isolated nodes of the graph.
|
||||
G.add_nodes_from(range(n), bipartite=0)
|
||||
G.add_nodes_from(range(n, n + m), bipartite=1)
|
||||
# Create an iterable over (u, v, w) triples and for each triple, add an
|
||||
# edge from u to v with weight w.
|
||||
triples = ((u, n + v, d) for (u, v, d) in _generate_weighted_edges(A))
|
||||
# If the entries in the adjacency matrix are integers and the graph is a
|
||||
# multigraph, then create parallel edges, each with weight 1, for each
|
||||
# entry in the adjacency matrix. Otherwise, create one edge for each
|
||||
# positive entry in the adjacency matrix and set the weight of that edge to
|
||||
# be the entry in the matrix.
|
||||
if A.dtype.kind in ("i", "u") and G.is_multigraph():
|
||||
chain = itertools.chain.from_iterable
|
||||
triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples)
|
||||
G.add_weighted_edges_from(triples, weight=edge_attribute)
|
||||
return G
|
||||
+526
@@ -0,0 +1,526 @@
|
||||
"""One-mode (unipartite) projections of bipartite graphs."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.exception import NetworkXAlgorithmError
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
__all__ = [
|
||||
"projected_graph",
|
||||
"weighted_projected_graph",
|
||||
"collaboration_weighted_projected_graph",
|
||||
"overlap_weighted_projected_graph",
|
||||
"generic_weighted_projected_graph",
|
||||
]
|
||||
|
||||
|
||||
@nx._dispatchable(
|
||||
graphs="B", preserve_node_attrs=True, preserve_graph_attrs=True, returns_graph=True
|
||||
)
|
||||
def projected_graph(B, nodes, multigraph=False):
|
||||
r"""Returns the projection of B onto one of its node sets.
|
||||
|
||||
Returns the graph G that is the projection of the bipartite graph B
|
||||
onto the specified nodes. They retain their attributes and are connected
|
||||
in G if they have a common neighbor in B.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
B : NetworkX graph
|
||||
The input graph should be bipartite.
|
||||
|
||||
nodes : list or iterable
|
||||
Nodes to project onto (the "bottom" nodes).
|
||||
|
||||
multigraph: bool (default=False)
|
||||
If True return a multigraph where the multiple edges represent multiple
|
||||
shared neighbors. They edge key in the multigraph is assigned to the
|
||||
label of the neighbor.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Graph : NetworkX graph or multigraph
|
||||
A graph that is the projection onto the given nodes.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> B = nx.path_graph(4)
|
||||
>>> G = bipartite.projected_graph(B, [1, 3])
|
||||
>>> list(G)
|
||||
[1, 3]
|
||||
>>> list(G.edges())
|
||||
[(1, 3)]
|
||||
|
||||
If nodes `a`, and `b` are connected through both nodes 1 and 2 then
|
||||
building a multigraph results in two edges in the projection onto
|
||||
[`a`, `b`]:
|
||||
|
||||
>>> B = nx.Graph()
|
||||
>>> B.add_edges_from([("a", 1), ("b", 1), ("a", 2), ("b", 2)])
|
||||
>>> G = bipartite.projected_graph(B, ["a", "b"], multigraph=True)
|
||||
>>> print([sorted((u, v)) for u, v in G.edges()])
|
||||
[['a', 'b'], ['a', 'b']]
|
||||
|
||||
Notes
|
||||
-----
|
||||
No attempt is made to verify that the input graph B is bipartite.
|
||||
Returns a simple graph that is the projection of the bipartite graph B
|
||||
onto the set of nodes given in list nodes. If multigraph=True then
|
||||
a multigraph is returned with an edge for every shared neighbor.
|
||||
|
||||
Directed graphs are allowed as input. The output will also then
|
||||
be a directed graph with edges if there is a directed path between
|
||||
the nodes.
|
||||
|
||||
The graph and node properties are (shallow) copied to the projected graph.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
is_bipartite,
|
||||
is_bipartite_node_set,
|
||||
sets,
|
||||
weighted_projected_graph,
|
||||
collaboration_weighted_projected_graph,
|
||||
overlap_weighted_projected_graph,
|
||||
generic_weighted_projected_graph
|
||||
"""
|
||||
if B.is_multigraph():
|
||||
raise nx.NetworkXError("not defined for multigraphs")
|
||||
if B.is_directed():
|
||||
directed = True
|
||||
if multigraph:
|
||||
G = nx.MultiDiGraph()
|
||||
else:
|
||||
G = nx.DiGraph()
|
||||
else:
|
||||
directed = False
|
||||
if multigraph:
|
||||
G = nx.MultiGraph()
|
||||
else:
|
||||
G = nx.Graph()
|
||||
G.graph.update(B.graph)
|
||||
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
||||
for u in nodes:
|
||||
nbrs2 = {v for nbr in B[u] for v in B[nbr] if v != u}
|
||||
if multigraph:
|
||||
for n in nbrs2:
|
||||
if directed:
|
||||
links = set(B[u]) & set(B.pred[n])
|
||||
else:
|
||||
links = set(B[u]) & set(B[n])
|
||||
for l in links:
|
||||
if not G.has_edge(u, n, l):
|
||||
G.add_edge(u, n, key=l)
|
||||
else:
|
||||
G.add_edges_from((u, n) for n in nbrs2)
|
||||
return G
|
||||
|
||||
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable(graphs="B", returns_graph=True)
|
||||
def weighted_projected_graph(B, nodes, ratio=False):
|
||||
r"""Returns a weighted projection of B onto one of its node sets.
|
||||
|
||||
The weighted projected graph is the projection of the bipartite
|
||||
network B onto the specified nodes with weights representing the
|
||||
number of shared neighbors or the ratio between actual shared
|
||||
neighbors and possible shared neighbors if ``ratio is True`` [1]_.
|
||||
The nodes retain their attributes and are connected in the resulting
|
||||
graph if they have an edge to a common node in the original graph.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
B : NetworkX graph
|
||||
The input graph should be bipartite.
|
||||
|
||||
nodes : list or iterable
|
||||
Distinct nodes to project onto (the "bottom" nodes).
|
||||
|
||||
ratio: Bool (default=False)
|
||||
If True, edge weight is the ratio between actual shared neighbors
|
||||
and maximum possible shared neighbors (i.e., the size of the other
|
||||
node set). If False, edges weight is the number of shared neighbors.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Graph : NetworkX graph
|
||||
A graph that is the projection onto the given nodes.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> B = nx.path_graph(4)
|
||||
>>> G = bipartite.weighted_projected_graph(B, [1, 3])
|
||||
>>> list(G)
|
||||
[1, 3]
|
||||
>>> list(G.edges(data=True))
|
||||
[(1, 3, {'weight': 1})]
|
||||
>>> G = bipartite.weighted_projected_graph(B, [1, 3], ratio=True)
|
||||
>>> list(G.edges(data=True))
|
||||
[(1, 3, {'weight': 0.5})]
|
||||
|
||||
Notes
|
||||
-----
|
||||
No attempt is made to verify that the input graph B is bipartite, or that
|
||||
the input nodes are distinct. However, if the length of the input nodes is
|
||||
greater than or equal to the nodes in the graph B, an exception is raised.
|
||||
If the nodes are not distinct but don't raise this error, the output weights
|
||||
will be incorrect.
|
||||
The graph and node properties are (shallow) copied to the projected graph.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
is_bipartite,
|
||||
is_bipartite_node_set,
|
||||
sets,
|
||||
collaboration_weighted_projected_graph,
|
||||
overlap_weighted_projected_graph,
|
||||
generic_weighted_projected_graph
|
||||
projected_graph
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Borgatti, S.P. and Halgin, D. In press. "Analyzing Affiliation
|
||||
Networks". In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
||||
of Social Network Analysis. Sage Publications.
|
||||
"""
|
||||
if B.is_directed():
|
||||
pred = B.pred
|
||||
G = nx.DiGraph()
|
||||
else:
|
||||
pred = B.adj
|
||||
G = nx.Graph()
|
||||
G.graph.update(B.graph)
|
||||
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
||||
n_top = len(B) - len(nodes)
|
||||
|
||||
if n_top < 1:
|
||||
raise NetworkXAlgorithmError(
|
||||
f"the size of the nodes to project onto ({len(nodes)}) is >= the graph size ({len(B)}).\n"
|
||||
"They are either not a valid bipartite partition or contain duplicates"
|
||||
)
|
||||
|
||||
for u in nodes:
|
||||
unbrs = set(B[u])
|
||||
nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u}
|
||||
for v in nbrs2:
|
||||
vnbrs = set(pred[v])
|
||||
common = unbrs & vnbrs
|
||||
if not ratio:
|
||||
weight = len(common)
|
||||
else:
|
||||
weight = len(common) / n_top
|
||||
G.add_edge(u, v, weight=weight)
|
||||
return G
|
||||
|
||||
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable(graphs="B", returns_graph=True)
|
||||
def collaboration_weighted_projected_graph(B, nodes):
|
||||
r"""Newman's weighted projection of B onto one of its node sets.
|
||||
|
||||
The collaboration weighted projection is the projection of the
|
||||
bipartite network B onto the specified nodes with weights assigned
|
||||
using Newman's collaboration model [1]_:
|
||||
|
||||
.. math::
|
||||
|
||||
w_{u, v} = \sum_k \frac{\delta_{u}^{k} \delta_{v}^{k}}{d_k - 1}
|
||||
|
||||
where `u` and `v` are nodes from the bottom bipartite node set,
|
||||
and `k` is a node of the top node set.
|
||||
The value `d_k` is the degree of node `k` in the bipartite
|
||||
network and `\delta_{u}^{k}` is 1 if node `u` is
|
||||
linked to node `k` in the original bipartite graph or 0 otherwise.
|
||||
|
||||
The nodes retain their attributes and are connected in the resulting
|
||||
graph if have an edge to a common node in the original bipartite
|
||||
graph.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
B : NetworkX graph
|
||||
The input graph should be bipartite.
|
||||
|
||||
nodes : list or iterable
|
||||
Nodes to project onto (the "bottom" nodes).
|
||||
|
||||
Returns
|
||||
-------
|
||||
Graph : NetworkX graph
|
||||
A graph that is the projection onto the given nodes.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> B = nx.path_graph(5)
|
||||
>>> B.add_edge(1, 5)
|
||||
>>> G = bipartite.collaboration_weighted_projected_graph(B, [0, 2, 4, 5])
|
||||
>>> list(G)
|
||||
[0, 2, 4, 5]
|
||||
>>> for edge in sorted(G.edges(data=True)):
|
||||
... print(edge)
|
||||
(0, 2, {'weight': 0.5})
|
||||
(0, 5, {'weight': 0.5})
|
||||
(2, 4, {'weight': 1.0})
|
||||
(2, 5, {'weight': 0.5})
|
||||
|
||||
Notes
|
||||
-----
|
||||
No attempt is made to verify that the input graph B is bipartite.
|
||||
The graph and node properties are (shallow) copied to the projected graph.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
is_bipartite,
|
||||
is_bipartite_node_set,
|
||||
sets,
|
||||
weighted_projected_graph,
|
||||
overlap_weighted_projected_graph,
|
||||
generic_weighted_projected_graph,
|
||||
projected_graph
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Scientific collaboration networks: II.
|
||||
Shortest paths, weighted networks, and centrality,
|
||||
M. E. J. Newman, Phys. Rev. E 64, 016132 (2001).
|
||||
"""
|
||||
if B.is_directed():
|
||||
pred = B.pred
|
||||
G = nx.DiGraph()
|
||||
else:
|
||||
pred = B.adj
|
||||
G = nx.Graph()
|
||||
G.graph.update(B.graph)
|
||||
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
||||
for u in nodes:
|
||||
unbrs = set(B[u])
|
||||
nbrs2 = {n for nbr in unbrs for n in B[nbr] if n != u}
|
||||
for v in nbrs2:
|
||||
vnbrs = set(pred[v])
|
||||
common_degree = (len(B[n]) for n in unbrs & vnbrs)
|
||||
weight = sum(1.0 / (deg - 1) for deg in common_degree if deg > 1)
|
||||
G.add_edge(u, v, weight=weight)
|
||||
return G
|
||||
|
||||
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable(graphs="B", returns_graph=True)
|
||||
def overlap_weighted_projected_graph(B, nodes, jaccard=True):
|
||||
r"""Overlap weighted projection of B onto one of its node sets.
|
||||
|
||||
The overlap weighted projection is the projection of the bipartite
|
||||
network B onto the specified nodes with weights representing
|
||||
the Jaccard index between the neighborhoods of the two nodes in the
|
||||
original bipartite network [1]_:
|
||||
|
||||
.. math::
|
||||
|
||||
w_{v, u} = \frac{|N(u) \cap N(v)|}{|N(u) \cup N(v)|}
|
||||
|
||||
or if the parameter 'jaccard' is False, the fraction of common
|
||||
neighbors by minimum of both nodes degree in the original
|
||||
bipartite graph [1]_:
|
||||
|
||||
.. math::
|
||||
|
||||
w_{v, u} = \frac{|N(u) \cap N(v)|}{min(|N(u)|, |N(v)|)}
|
||||
|
||||
The nodes retain their attributes and are connected in the resulting
|
||||
graph if have an edge to a common node in the original bipartite graph.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
B : NetworkX graph
|
||||
The input graph should be bipartite.
|
||||
|
||||
nodes : list or iterable
|
||||
Nodes to project onto (the "bottom" nodes).
|
||||
|
||||
jaccard: Bool (default=True)
|
||||
|
||||
Returns
|
||||
-------
|
||||
Graph : NetworkX graph
|
||||
A graph that is the projection onto the given nodes.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> B = nx.path_graph(5)
|
||||
>>> nodes = [0, 2, 4]
|
||||
>>> G = bipartite.overlap_weighted_projected_graph(B, nodes)
|
||||
>>> list(G)
|
||||
[0, 2, 4]
|
||||
>>> list(G.edges(data=True))
|
||||
[(0, 2, {'weight': 0.5}), (2, 4, {'weight': 0.5})]
|
||||
>>> G = bipartite.overlap_weighted_projected_graph(B, nodes, jaccard=False)
|
||||
>>> list(G.edges(data=True))
|
||||
[(0, 2, {'weight': 1.0}), (2, 4, {'weight': 1.0})]
|
||||
|
||||
Notes
|
||||
-----
|
||||
No attempt is made to verify that the input graph B is bipartite.
|
||||
The graph and node properties are (shallow) copied to the projected graph.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
is_bipartite,
|
||||
is_bipartite_node_set,
|
||||
sets,
|
||||
weighted_projected_graph,
|
||||
collaboration_weighted_projected_graph,
|
||||
generic_weighted_projected_graph,
|
||||
projected_graph
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Borgatti, S.P. and Halgin, D. In press. Analyzing Affiliation
|
||||
Networks. In Carrington, P. and Scott, J. (eds) The Sage Handbook
|
||||
of Social Network Analysis. Sage Publications.
|
||||
|
||||
"""
|
||||
if B.is_directed():
|
||||
pred = B.pred
|
||||
G = nx.DiGraph()
|
||||
else:
|
||||
pred = B.adj
|
||||
G = nx.Graph()
|
||||
G.graph.update(B.graph)
|
||||
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
||||
for u in nodes:
|
||||
unbrs = set(B[u])
|
||||
nbrs2 = {n for nbr in unbrs for n in B[nbr]} - {u}
|
||||
for v in nbrs2:
|
||||
vnbrs = set(pred[v])
|
||||
if jaccard:
|
||||
wt = len(unbrs & vnbrs) / len(unbrs | vnbrs)
|
||||
else:
|
||||
wt = len(unbrs & vnbrs) / min(len(unbrs), len(vnbrs))
|
||||
G.add_edge(u, v, weight=wt)
|
||||
return G
|
||||
|
||||
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable(graphs="B", preserve_all_attrs=True, returns_graph=True)
|
||||
def generic_weighted_projected_graph(B, nodes, weight_function=None):
|
||||
r"""Weighted projection of B with a user-specified weight function.
|
||||
|
||||
The bipartite network B is projected on to the specified nodes
|
||||
with weights computed by a user-specified function. This function
|
||||
must accept as a parameter the neighborhood sets of two nodes and
|
||||
return an integer or a float.
|
||||
|
||||
The nodes retain their attributes and are connected in the resulting graph
|
||||
if they have an edge to a common node in the original graph.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
B : NetworkX graph
|
||||
The input graph should be bipartite.
|
||||
|
||||
nodes : list or iterable
|
||||
Nodes to project onto (the "bottom" nodes).
|
||||
|
||||
weight_function : function
|
||||
This function must accept as parameters the same input graph
|
||||
that this function, and two nodes; and return an integer or a float.
|
||||
The default function computes the number of shared neighbors.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Graph : NetworkX graph
|
||||
A graph that is the projection onto the given nodes.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> # Define some custom weight functions
|
||||
>>> def jaccard(G, u, v):
|
||||
... unbrs = set(G[u])
|
||||
... vnbrs = set(G[v])
|
||||
... return float(len(unbrs & vnbrs)) / len(unbrs | vnbrs)
|
||||
>>> def my_weight(G, u, v, weight="weight"):
|
||||
... w = 0
|
||||
... for nbr in set(G[u]) & set(G[v]):
|
||||
... w += G[u][nbr].get(weight, 1) + G[v][nbr].get(weight, 1)
|
||||
... return w
|
||||
>>> # A complete bipartite graph with 4 nodes and 4 edges
|
||||
>>> B = nx.complete_bipartite_graph(2, 2)
|
||||
>>> # Add some arbitrary weight to the edges
|
||||
>>> for i, (u, v) in enumerate(B.edges()):
|
||||
... B.edges[u, v]["weight"] = i + 1
|
||||
>>> for edge in B.edges(data=True):
|
||||
... print(edge)
|
||||
(0, 2, {'weight': 1})
|
||||
(0, 3, {'weight': 2})
|
||||
(1, 2, {'weight': 3})
|
||||
(1, 3, {'weight': 4})
|
||||
>>> # By default, the weight is the number of shared neighbors
|
||||
>>> G = bipartite.generic_weighted_projected_graph(B, [0, 1])
|
||||
>>> print(list(G.edges(data=True)))
|
||||
[(0, 1, {'weight': 2})]
|
||||
>>> # To specify a custom weight function use the weight_function parameter
|
||||
>>> G = bipartite.generic_weighted_projected_graph(
|
||||
... B, [0, 1], weight_function=jaccard
|
||||
... )
|
||||
>>> print(list(G.edges(data=True)))
|
||||
[(0, 1, {'weight': 1.0})]
|
||||
>>> G = bipartite.generic_weighted_projected_graph(
|
||||
... B, [0, 1], weight_function=my_weight
|
||||
... )
|
||||
>>> print(list(G.edges(data=True)))
|
||||
[(0, 1, {'weight': 10})]
|
||||
|
||||
Notes
|
||||
-----
|
||||
No attempt is made to verify that the input graph B is bipartite.
|
||||
The graph and node properties are (shallow) copied to the projected graph.
|
||||
|
||||
See :mod:`bipartite documentation <networkx.algorithms.bipartite>`
|
||||
for further details on how bipartite graphs are handled in NetworkX.
|
||||
|
||||
See Also
|
||||
--------
|
||||
is_bipartite,
|
||||
is_bipartite_node_set,
|
||||
sets,
|
||||
weighted_projected_graph,
|
||||
collaboration_weighted_projected_graph,
|
||||
overlap_weighted_projected_graph,
|
||||
projected_graph
|
||||
|
||||
"""
|
||||
if B.is_directed():
|
||||
pred = B.pred
|
||||
G = nx.DiGraph()
|
||||
else:
|
||||
pred = B.adj
|
||||
G = nx.Graph()
|
||||
if weight_function is None:
|
||||
|
||||
def weight_function(G, u, v):
|
||||
# Notice that we use set(pred[v]) for handling the directed case.
|
||||
return len(set(G[u]) & set(pred[v]))
|
||||
|
||||
G.graph.update(B.graph)
|
||||
G.add_nodes_from((n, B.nodes[n]) for n in nodes)
|
||||
for u in nodes:
|
||||
nbrs2 = {n for nbr in set(B[u]) for n in B[nbr]} - {u}
|
||||
for v in nbrs2:
|
||||
weight = weight_function(B, u, v)
|
||||
G.add_edge(u, v, weight=weight)
|
||||
return G
|
||||
+112
@@ -0,0 +1,112 @@
|
||||
"""Node redundancy for bipartite graphs."""
|
||||
|
||||
from itertools import combinations
|
||||
|
||||
import networkx as nx
|
||||
from networkx import NetworkXError
|
||||
|
||||
__all__ = ["node_redundancy"]
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def node_redundancy(G, nodes=None):
|
||||
r"""Computes the node redundancy coefficients for the nodes in the bipartite
|
||||
graph `G`.
|
||||
|
||||
The redundancy coefficient of a node `v` is the fraction of pairs of
|
||||
neighbors of `v` that are both linked to other nodes. In a one-mode
|
||||
projection these nodes would be linked together even if `v` were
|
||||
not there.
|
||||
|
||||
More formally, for any vertex `v`, the *redundancy coefficient of `v`* is
|
||||
defined by
|
||||
|
||||
.. math::
|
||||
|
||||
rc(v) = \frac{|\{\{u, w\} \subseteq N(v),
|
||||
\: \exists v' \neq v,\: (v',u) \in E\:
|
||||
\mathrm{and}\: (v',w) \in E\}|}{ \frac{|N(v)|(|N(v)|-1)}{2}},
|
||||
|
||||
where `N(v)` is the set of neighbors of `v` in `G`.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A bipartite graph
|
||||
|
||||
nodes : list or iterable (optional)
|
||||
Compute redundancy for these nodes. The default is all nodes in G.
|
||||
|
||||
Returns
|
||||
-------
|
||||
redundancy : dictionary
|
||||
A dictionary keyed by node with the node redundancy value.
|
||||
|
||||
Examples
|
||||
--------
|
||||
Compute the redundancy coefficient of each node in a graph::
|
||||
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.cycle_graph(4)
|
||||
>>> rc = bipartite.node_redundancy(G)
|
||||
>>> rc[0]
|
||||
1.0
|
||||
|
||||
Compute the average redundancy for the graph::
|
||||
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.cycle_graph(4)
|
||||
>>> rc = bipartite.node_redundancy(G)
|
||||
>>> sum(rc.values()) / len(G)
|
||||
1.0
|
||||
|
||||
Compute the average redundancy for a set of nodes::
|
||||
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.cycle_graph(4)
|
||||
>>> rc = bipartite.node_redundancy(G)
|
||||
>>> nodes = [0, 2]
|
||||
>>> sum(rc[n] for n in nodes) / len(nodes)
|
||||
1.0
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If any of the nodes in the graph (or in `nodes`, if specified) has
|
||||
(out-)degree less than two (which would result in division by zero,
|
||||
according to the definition of the redundancy coefficient).
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Latapy, Matthieu, Clémence Magnien, and Nathalie Del Vecchio (2008).
|
||||
Basic notions for the analysis of large two-mode networks.
|
||||
Social Networks 30(1), 31--48.
|
||||
|
||||
"""
|
||||
if nodes is None:
|
||||
nodes = G
|
||||
if any(len(G[v]) < 2 for v in nodes):
|
||||
raise NetworkXError(
|
||||
"Cannot compute redundancy coefficient for a node"
|
||||
" that has fewer than two neighbors."
|
||||
)
|
||||
# TODO This can be trivially parallelized.
|
||||
return {v: _node_redundancy(G, v) for v in nodes}
|
||||
|
||||
|
||||
def _node_redundancy(G, v):
|
||||
"""Returns the redundancy of the node `v` in the bipartite graph `G`.
|
||||
|
||||
If `G` is a graph with `n` nodes, the redundancy of a node is the ratio
|
||||
of the "overlap" of `v` to the maximum possible overlap of `v`
|
||||
according to its degree. The overlap of `v` is the number of pairs of
|
||||
neighbors that have mutual neighbors themselves, other than `v`.
|
||||
|
||||
`v` must have at least two neighbors in `G`.
|
||||
|
||||
"""
|
||||
n = len(G[v])
|
||||
overlap = sum(
|
||||
1 for (u, w) in combinations(G[v], 2) if (set(G[u]) & set(G[w])) - {v}
|
||||
)
|
||||
return (2 * overlap) / (n * (n - 1))
|
||||
+69
@@ -0,0 +1,69 @@
|
||||
"""
|
||||
Spectral bipartivity measure.
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["spectral_bipartivity"]
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def spectral_bipartivity(G, nodes=None, weight="weight"):
|
||||
"""Returns the spectral bipartivity.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
nodes : list or container optional(default is all nodes)
|
||||
Nodes to return value of spectral bipartivity contribution.
|
||||
|
||||
weight : string or None optional (default = 'weight')
|
||||
Edge data key to use for edge weights. If None, weights set to 1.
|
||||
|
||||
Returns
|
||||
-------
|
||||
sb : float or dict
|
||||
A single number if the keyword nodes is not specified, or
|
||||
a dictionary keyed by node with the spectral bipartivity contribution
|
||||
of that node as the value.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> from networkx.algorithms import bipartite
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> bipartite.spectral_bipartivity(G)
|
||||
1.0
|
||||
|
||||
Notes
|
||||
-----
|
||||
This implementation uses Numpy (dense) matrices which are not efficient
|
||||
for storing large sparse graphs.
|
||||
|
||||
See Also
|
||||
--------
|
||||
color
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of
|
||||
bipartivity in complex networks", PhysRev E 72, 046105 (2005)
|
||||
"""
|
||||
import scipy as sp
|
||||
|
||||
nodelist = list(G) # ordering of nodes in matrix
|
||||
A = nx.to_numpy_array(G, nodelist, weight=weight)
|
||||
expA = sp.linalg.expm(A)
|
||||
expmA = sp.linalg.expm(-A)
|
||||
coshA = 0.5 * (expA + expmA)
|
||||
if nodes is None:
|
||||
# return single number for entire graph
|
||||
return float(coshA.diagonal().sum() / expA.diagonal().sum())
|
||||
else:
|
||||
# contribution for individual nodes
|
||||
index = dict(zip(nodelist, range(len(nodelist))))
|
||||
sb = {}
|
||||
for n in nodes:
|
||||
i = index[n]
|
||||
sb[n] = coshA.item(i, i) / expA.item(i, i)
|
||||
return sb
|
||||
+125
@@ -0,0 +1,125 @@
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms import bipartite
|
||||
|
||||
|
||||
class TestBipartiteBasic:
|
||||
def test_is_bipartite(self):
|
||||
assert bipartite.is_bipartite(nx.path_graph(4))
|
||||
assert bipartite.is_bipartite(nx.DiGraph([(1, 0)]))
|
||||
assert not bipartite.is_bipartite(nx.complete_graph(3))
|
||||
|
||||
def test_bipartite_color(self):
|
||||
G = nx.path_graph(4)
|
||||
c = bipartite.color(G)
|
||||
assert c == {0: 1, 1: 0, 2: 1, 3: 0}
|
||||
|
||||
def test_not_bipartite_color(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
c = bipartite.color(nx.complete_graph(4))
|
||||
|
||||
def test_bipartite_directed(self):
|
||||
G = bipartite.random_graph(10, 10, 0.1, directed=True)
|
||||
assert bipartite.is_bipartite(G)
|
||||
|
||||
def test_bipartite_sets(self):
|
||||
G = nx.path_graph(4)
|
||||
X, Y = bipartite.sets(G)
|
||||
assert X == {0, 2}
|
||||
assert Y == {1, 3}
|
||||
|
||||
def test_bipartite_sets_directed(self):
|
||||
G = nx.path_graph(4)
|
||||
D = G.to_directed()
|
||||
X, Y = bipartite.sets(D)
|
||||
assert X == {0, 2}
|
||||
assert Y == {1, 3}
|
||||
|
||||
def test_bipartite_sets_given_top_nodes(self):
|
||||
G = nx.path_graph(4)
|
||||
top_nodes = [0, 2]
|
||||
X, Y = bipartite.sets(G, top_nodes)
|
||||
assert X == {0, 2}
|
||||
assert Y == {1, 3}
|
||||
|
||||
def test_bipartite_sets_disconnected(self):
|
||||
with pytest.raises(nx.AmbiguousSolution):
|
||||
G = nx.path_graph(4)
|
||||
G.add_edges_from([(5, 6), (6, 7)])
|
||||
X, Y = bipartite.sets(G)
|
||||
|
||||
def test_is_bipartite_node_set(self):
|
||||
G = nx.path_graph(4)
|
||||
|
||||
with pytest.raises(nx.AmbiguousSolution):
|
||||
bipartite.is_bipartite_node_set(G, [1, 1, 2, 3])
|
||||
|
||||
assert bipartite.is_bipartite_node_set(G, [0, 2])
|
||||
assert bipartite.is_bipartite_node_set(G, [1, 3])
|
||||
assert not bipartite.is_bipartite_node_set(G, [1, 2])
|
||||
G.add_edge(10, 20)
|
||||
assert bipartite.is_bipartite_node_set(G, [0, 2, 10])
|
||||
assert bipartite.is_bipartite_node_set(G, [0, 2, 20])
|
||||
assert bipartite.is_bipartite_node_set(G, [1, 3, 10])
|
||||
assert bipartite.is_bipartite_node_set(G, [1, 3, 20])
|
||||
|
||||
def test_bipartite_density(self):
|
||||
G = nx.path_graph(5)
|
||||
X, Y = bipartite.sets(G)
|
||||
density = len(list(G.edges())) / (len(X) * len(Y))
|
||||
assert bipartite.density(G, X) == density
|
||||
D = nx.DiGraph(G.edges())
|
||||
assert bipartite.density(D, X) == density / 2.0
|
||||
assert bipartite.density(nx.Graph(), {}) == 0.0
|
||||
|
||||
def test_bipartite_degrees(self):
|
||||
G = nx.path_graph(5)
|
||||
X = {1, 3}
|
||||
Y = {0, 2, 4}
|
||||
u, d = bipartite.degrees(G, Y)
|
||||
assert dict(u) == {1: 2, 3: 2}
|
||||
assert dict(d) == {0: 1, 2: 2, 4: 1}
|
||||
|
||||
def test_bipartite_weighted_degrees(self):
|
||||
G = nx.path_graph(5)
|
||||
G.add_edge(0, 1, weight=0.1, other=0.2)
|
||||
X = {1, 3}
|
||||
Y = {0, 2, 4}
|
||||
u, d = bipartite.degrees(G, Y, weight="weight")
|
||||
assert dict(u) == {1: 1.1, 3: 2}
|
||||
assert dict(d) == {0: 0.1, 2: 2, 4: 1}
|
||||
u, d = bipartite.degrees(G, Y, weight="other")
|
||||
assert dict(u) == {1: 1.2, 3: 2}
|
||||
assert dict(d) == {0: 0.2, 2: 2, 4: 1}
|
||||
|
||||
def test_biadjacency_matrix_weight(self):
|
||||
pytest.importorskip("scipy")
|
||||
G = nx.path_graph(5)
|
||||
G.add_edge(0, 1, weight=2, other=4)
|
||||
X = [1, 3]
|
||||
Y = [0, 2, 4]
|
||||
M = bipartite.biadjacency_matrix(G, X, weight="weight")
|
||||
assert M[0, 0] == 2
|
||||
M = bipartite.biadjacency_matrix(G, X, weight="other")
|
||||
assert M[0, 0] == 4
|
||||
|
||||
def test_biadjacency_matrix(self):
|
||||
pytest.importorskip("scipy")
|
||||
tops = [2, 5, 10]
|
||||
bots = [5, 10, 15]
|
||||
for i in range(len(tops)):
|
||||
G = bipartite.random_graph(tops[i], bots[i], 0.2)
|
||||
top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
|
||||
M = bipartite.biadjacency_matrix(G, top)
|
||||
assert M.shape[0] == tops[i]
|
||||
assert M.shape[1] == bots[i]
|
||||
|
||||
def test_biadjacency_matrix_order(self):
|
||||
pytest.importorskip("scipy")
|
||||
G = nx.path_graph(5)
|
||||
G.add_edge(0, 1, weight=2)
|
||||
X = [3, 1]
|
||||
Y = [4, 2, 0]
|
||||
M = bipartite.biadjacency_matrix(G, X, Y, weight="weight")
|
||||
assert M[1, 2] == 2
|
||||
+192
@@ -0,0 +1,192 @@
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms import bipartite
|
||||
|
||||
|
||||
class TestBipartiteCentrality:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
cls.P4 = nx.path_graph(4)
|
||||
cls.K3 = nx.complete_bipartite_graph(3, 3)
|
||||
cls.C4 = nx.cycle_graph(4)
|
||||
cls.davis = nx.davis_southern_women_graph()
|
||||
cls.top_nodes = [
|
||||
n for n, d in cls.davis.nodes(data=True) if d["bipartite"] == 0
|
||||
]
|
||||
|
||||
def test_degree_centrality(self):
|
||||
d = bipartite.degree_centrality(self.P4, [1, 3])
|
||||
answer = {0: 0.5, 1: 1.0, 2: 1.0, 3: 0.5}
|
||||
assert d == answer
|
||||
d = bipartite.degree_centrality(self.K3, [0, 1, 2])
|
||||
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
|
||||
assert d == answer
|
||||
d = bipartite.degree_centrality(self.C4, [0, 2])
|
||||
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
|
||||
assert d == answer
|
||||
|
||||
def test_betweenness_centrality(self):
|
||||
c = bipartite.betweenness_centrality(self.P4, [1, 3])
|
||||
answer = {0: 0.0, 1: 1.0, 2: 1.0, 3: 0.0}
|
||||
assert c == answer
|
||||
c = bipartite.betweenness_centrality(self.K3, [0, 1, 2])
|
||||
answer = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125, 4: 0.125, 5: 0.125}
|
||||
assert c == answer
|
||||
c = bipartite.betweenness_centrality(self.C4, [0, 2])
|
||||
answer = {0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
|
||||
assert c == answer
|
||||
|
||||
def test_closeness_centrality(self):
|
||||
c = bipartite.closeness_centrality(self.P4, [1, 3])
|
||||
answer = {0: 2.0 / 3, 1: 1.0, 2: 1.0, 3: 2.0 / 3}
|
||||
assert c == answer
|
||||
c = bipartite.closeness_centrality(self.K3, [0, 1, 2])
|
||||
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0, 4: 1.0, 5: 1.0}
|
||||
assert c == answer
|
||||
c = bipartite.closeness_centrality(self.C4, [0, 2])
|
||||
answer = {0: 1.0, 1: 1.0, 2: 1.0, 3: 1.0}
|
||||
assert c == answer
|
||||
G = nx.Graph()
|
||||
G.add_node(0)
|
||||
G.add_node(1)
|
||||
c = bipartite.closeness_centrality(G, [0])
|
||||
assert c == {0: 0.0, 1: 0.0}
|
||||
c = bipartite.closeness_centrality(G, [1])
|
||||
assert c == {0: 0.0, 1: 0.0}
|
||||
|
||||
def test_bipartite_closeness_centrality_unconnected(self):
|
||||
G = nx.complete_bipartite_graph(3, 3)
|
||||
G.add_edge(6, 7)
|
||||
c = bipartite.closeness_centrality(G, [0, 2, 4, 6], normalized=False)
|
||||
answer = {
|
||||
0: 10.0 / 7,
|
||||
2: 10.0 / 7,
|
||||
4: 10.0 / 7,
|
||||
6: 10.0,
|
||||
1: 10.0 / 7,
|
||||
3: 10.0 / 7,
|
||||
5: 10.0 / 7,
|
||||
7: 10.0,
|
||||
}
|
||||
assert c == answer
|
||||
|
||||
def test_davis_degree_centrality(self):
|
||||
G = self.davis
|
||||
deg = bipartite.degree_centrality(G, self.top_nodes)
|
||||
answer = {
|
||||
"E8": 0.78,
|
||||
"E9": 0.67,
|
||||
"E7": 0.56,
|
||||
"Nora Fayette": 0.57,
|
||||
"Evelyn Jefferson": 0.57,
|
||||
"Theresa Anderson": 0.57,
|
||||
"E6": 0.44,
|
||||
"Sylvia Avondale": 0.50,
|
||||
"Laura Mandeville": 0.50,
|
||||
"Brenda Rogers": 0.50,
|
||||
"Katherina Rogers": 0.43,
|
||||
"E5": 0.44,
|
||||
"Helen Lloyd": 0.36,
|
||||
"E3": 0.33,
|
||||
"Ruth DeSand": 0.29,
|
||||
"Verne Sanderson": 0.29,
|
||||
"E12": 0.33,
|
||||
"Myra Liddel": 0.29,
|
||||
"E11": 0.22,
|
||||
"Eleanor Nye": 0.29,
|
||||
"Frances Anderson": 0.29,
|
||||
"Pearl Oglethorpe": 0.21,
|
||||
"E4": 0.22,
|
||||
"Charlotte McDowd": 0.29,
|
||||
"E10": 0.28,
|
||||
"Olivia Carleton": 0.14,
|
||||
"Flora Price": 0.14,
|
||||
"E2": 0.17,
|
||||
"E1": 0.17,
|
||||
"Dorothy Murchison": 0.14,
|
||||
"E13": 0.17,
|
||||
"E14": 0.17,
|
||||
}
|
||||
for node, value in answer.items():
|
||||
assert value == pytest.approx(deg[node], abs=1e-2)
|
||||
|
||||
def test_davis_betweenness_centrality(self):
|
||||
G = self.davis
|
||||
bet = bipartite.betweenness_centrality(G, self.top_nodes)
|
||||
answer = {
|
||||
"E8": 0.24,
|
||||
"E9": 0.23,
|
||||
"E7": 0.13,
|
||||
"Nora Fayette": 0.11,
|
||||
"Evelyn Jefferson": 0.10,
|
||||
"Theresa Anderson": 0.09,
|
||||
"E6": 0.07,
|
||||
"Sylvia Avondale": 0.07,
|
||||
"Laura Mandeville": 0.05,
|
||||
"Brenda Rogers": 0.05,
|
||||
"Katherina Rogers": 0.05,
|
||||
"E5": 0.04,
|
||||
"Helen Lloyd": 0.04,
|
||||
"E3": 0.02,
|
||||
"Ruth DeSand": 0.02,
|
||||
"Verne Sanderson": 0.02,
|
||||
"E12": 0.02,
|
||||
"Myra Liddel": 0.02,
|
||||
"E11": 0.02,
|
||||
"Eleanor Nye": 0.01,
|
||||
"Frances Anderson": 0.01,
|
||||
"Pearl Oglethorpe": 0.01,
|
||||
"E4": 0.01,
|
||||
"Charlotte McDowd": 0.01,
|
||||
"E10": 0.01,
|
||||
"Olivia Carleton": 0.01,
|
||||
"Flora Price": 0.01,
|
||||
"E2": 0.00,
|
||||
"E1": 0.00,
|
||||
"Dorothy Murchison": 0.00,
|
||||
"E13": 0.00,
|
||||
"E14": 0.00,
|
||||
}
|
||||
for node, value in answer.items():
|
||||
assert value == pytest.approx(bet[node], abs=1e-2)
|
||||
|
||||
def test_davis_closeness_centrality(self):
|
||||
G = self.davis
|
||||
clos = bipartite.closeness_centrality(G, self.top_nodes)
|
||||
answer = {
|
||||
"E8": 0.85,
|
||||
"E9": 0.79,
|
||||
"E7": 0.73,
|
||||
"Nora Fayette": 0.80,
|
||||
"Evelyn Jefferson": 0.80,
|
||||
"Theresa Anderson": 0.80,
|
||||
"E6": 0.69,
|
||||
"Sylvia Avondale": 0.77,
|
||||
"Laura Mandeville": 0.73,
|
||||
"Brenda Rogers": 0.73,
|
||||
"Katherina Rogers": 0.73,
|
||||
"E5": 0.59,
|
||||
"Helen Lloyd": 0.73,
|
||||
"E3": 0.56,
|
||||
"Ruth DeSand": 0.71,
|
||||
"Verne Sanderson": 0.71,
|
||||
"E12": 0.56,
|
||||
"Myra Liddel": 0.69,
|
||||
"E11": 0.54,
|
||||
"Eleanor Nye": 0.67,
|
||||
"Frances Anderson": 0.67,
|
||||
"Pearl Oglethorpe": 0.67,
|
||||
"E4": 0.54,
|
||||
"Charlotte McDowd": 0.60,
|
||||
"E10": 0.55,
|
||||
"Olivia Carleton": 0.59,
|
||||
"Flora Price": 0.59,
|
||||
"E2": 0.52,
|
||||
"E1": 0.52,
|
||||
"Dorothy Murchison": 0.65,
|
||||
"E13": 0.52,
|
||||
"E14": 0.52,
|
||||
}
|
||||
for node, value in answer.items():
|
||||
assert value == pytest.approx(clos[node], abs=1e-2)
|
||||
+84
@@ -0,0 +1,84 @@
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms import bipartite
|
||||
from networkx.algorithms.bipartite.cluster import cc_dot, cc_max, cc_min
|
||||
|
||||
|
||||
def test_pairwise_bipartite_cc_functions():
|
||||
# Test functions for different kinds of bipartite clustering coefficients
|
||||
# between pairs of nodes using 3 example graphs from figure 5 p. 40
|
||||
# Latapy et al (2008)
|
||||
G1 = nx.Graph([(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7)])
|
||||
G2 = nx.Graph([(0, 2), (0, 3), (0, 4), (1, 3), (1, 4), (1, 5)])
|
||||
G3 = nx.Graph(
|
||||
[(0, 2), (0, 3), (0, 4), (0, 5), (0, 6), (1, 5), (1, 6), (1, 7), (1, 8), (1, 9)]
|
||||
)
|
||||
result = {
|
||||
0: [1 / 3.0, 2 / 3.0, 2 / 5.0],
|
||||
1: [1 / 2.0, 2 / 3.0, 2 / 3.0],
|
||||
2: [2 / 8.0, 2 / 5.0, 2 / 5.0],
|
||||
}
|
||||
for i, G in enumerate([G1, G2, G3]):
|
||||
assert bipartite.is_bipartite(G)
|
||||
assert cc_dot(set(G[0]), set(G[1])) == result[i][0]
|
||||
assert cc_min(set(G[0]), set(G[1])) == result[i][1]
|
||||
assert cc_max(set(G[0]), set(G[1])) == result[i][2]
|
||||
|
||||
|
||||
def test_star_graph():
|
||||
G = nx.star_graph(3)
|
||||
# all modes are the same
|
||||
answer = {0: 0, 1: 1, 2: 1, 3: 1}
|
||||
assert bipartite.clustering(G, mode="dot") == answer
|
||||
assert bipartite.clustering(G, mode="min") == answer
|
||||
assert bipartite.clustering(G, mode="max") == answer
|
||||
|
||||
|
||||
def test_not_bipartite():
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.clustering(nx.complete_graph(4))
|
||||
|
||||
|
||||
def test_bad_mode():
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.clustering(nx.path_graph(4), mode="foo")
|
||||
|
||||
|
||||
def test_path_graph():
|
||||
G = nx.path_graph(4)
|
||||
answer = {0: 0.5, 1: 0.5, 2: 0.5, 3: 0.5}
|
||||
assert bipartite.clustering(G, mode="dot") == answer
|
||||
assert bipartite.clustering(G, mode="max") == answer
|
||||
answer = {0: 1, 1: 1, 2: 1, 3: 1}
|
||||
assert bipartite.clustering(G, mode="min") == answer
|
||||
|
||||
|
||||
def test_average_path_graph():
|
||||
G = nx.path_graph(4)
|
||||
assert bipartite.average_clustering(G, mode="dot") == 0.5
|
||||
assert bipartite.average_clustering(G, mode="max") == 0.5
|
||||
assert bipartite.average_clustering(G, mode="min") == 1
|
||||
|
||||
|
||||
def test_ra_clustering_davis():
|
||||
G = nx.davis_southern_women_graph()
|
||||
cc4 = round(bipartite.robins_alexander_clustering(G), 3)
|
||||
assert cc4 == 0.468
|
||||
|
||||
|
||||
def test_ra_clustering_square():
|
||||
G = nx.path_graph(4)
|
||||
G.add_edge(0, 3)
|
||||
assert bipartite.robins_alexander_clustering(G) == 1.0
|
||||
|
||||
|
||||
def test_ra_clustering_zero():
|
||||
G = nx.Graph()
|
||||
assert bipartite.robins_alexander_clustering(G) == 0
|
||||
G.add_nodes_from(range(4))
|
||||
assert bipartite.robins_alexander_clustering(G) == 0
|
||||
G.add_edges_from([(0, 1), (2, 3), (3, 4)])
|
||||
assert bipartite.robins_alexander_clustering(G) == 0
|
||||
G.add_edge(1, 2)
|
||||
assert bipartite.robins_alexander_clustering(G) == 0
|
||||
+33
@@ -0,0 +1,33 @@
|
||||
import networkx as nx
|
||||
from networkx.algorithms import bipartite
|
||||
|
||||
|
||||
class TestMinEdgeCover:
|
||||
"""Tests for :func:`networkx.algorithms.bipartite.min_edge_cover`"""
|
||||
|
||||
def test_empty_graph(self):
|
||||
G = nx.Graph()
|
||||
assert bipartite.min_edge_cover(G) == set()
|
||||
|
||||
def test_graph_single_edge(self):
|
||||
G = nx.Graph()
|
||||
G.add_edge(0, 1)
|
||||
assert bipartite.min_edge_cover(G) == {(0, 1), (1, 0)}
|
||||
|
||||
def test_bipartite_default(self):
|
||||
G = nx.Graph()
|
||||
G.add_nodes_from([1, 2, 3, 4], bipartite=0)
|
||||
G.add_nodes_from(["a", "b", "c"], bipartite=1)
|
||||
G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
|
||||
min_cover = bipartite.min_edge_cover(G)
|
||||
assert nx.is_edge_cover(G, min_cover)
|
||||
assert len(min_cover) == 8
|
||||
|
||||
def test_bipartite_explicit(self):
|
||||
G = nx.Graph()
|
||||
G.add_nodes_from([1, 2, 3, 4], bipartite=0)
|
||||
G.add_nodes_from(["a", "b", "c"], bipartite=1)
|
||||
G.add_edges_from([(1, "a"), (1, "b"), (2, "b"), (2, "c"), (3, "c"), (4, "a")])
|
||||
min_cover = bipartite.min_edge_cover(G, bipartite.eppstein_matching)
|
||||
assert nx.is_edge_cover(G, min_cover)
|
||||
assert len(min_cover) == 8
|
||||
+240
@@ -0,0 +1,240 @@
|
||||
"""
|
||||
Unit tests for bipartite edgelists.
|
||||
"""
|
||||
|
||||
import io
|
||||
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms import bipartite
|
||||
from networkx.utils import edges_equal, graphs_equal, nodes_equal
|
||||
|
||||
|
||||
class TestEdgelist:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
cls.G = nx.Graph(name="test")
|
||||
e = [("a", "b"), ("b", "c"), ("c", "d"), ("d", "e"), ("e", "f"), ("a", "f")]
|
||||
cls.G.add_edges_from(e)
|
||||
cls.G.add_nodes_from(["a", "c", "e"], bipartite=0)
|
||||
cls.G.add_nodes_from(["b", "d", "f"], bipartite=1)
|
||||
cls.G.add_node("g", bipartite=0)
|
||||
cls.DG = nx.DiGraph(cls.G)
|
||||
cls.MG = nx.MultiGraph()
|
||||
cls.MG.add_edges_from([(1, 2), (1, 2), (1, 2)])
|
||||
cls.MG.add_node(1, bipartite=0)
|
||||
cls.MG.add_node(2, bipartite=1)
|
||||
|
||||
def test_read_edgelist_1(self):
|
||||
s = b"""\
|
||||
# comment line
|
||||
1 2
|
||||
# comment line
|
||||
2 3
|
||||
"""
|
||||
bytesIO = io.BytesIO(s)
|
||||
G = bipartite.read_edgelist(bytesIO, nodetype=int)
|
||||
assert edges_equal(G.edges(), [(1, 2), (2, 3)])
|
||||
|
||||
def test_read_edgelist_3(self):
|
||||
s = b"""\
|
||||
# comment line
|
||||
1 2 {'weight':2.0}
|
||||
# comment line
|
||||
2 3 {'weight':3.0}
|
||||
"""
|
||||
bytesIO = io.BytesIO(s)
|
||||
G = bipartite.read_edgelist(bytesIO, nodetype=int, data=False)
|
||||
assert edges_equal(G.edges(), [(1, 2), (2, 3)])
|
||||
|
||||
bytesIO = io.BytesIO(s)
|
||||
G = bipartite.read_edgelist(bytesIO, nodetype=int, data=True)
|
||||
assert edges_equal(
|
||||
G.edges(data=True), [(1, 2, {"weight": 2.0}), (2, 3, {"weight": 3.0})]
|
||||
)
|
||||
|
||||
def test_write_edgelist_1(self):
|
||||
fh = io.BytesIO()
|
||||
G = nx.Graph()
|
||||
G.add_edges_from([(1, 2), (2, 3)])
|
||||
G.add_node(1, bipartite=0)
|
||||
G.add_node(2, bipartite=1)
|
||||
G.add_node(3, bipartite=0)
|
||||
bipartite.write_edgelist(G, fh, data=False)
|
||||
fh.seek(0)
|
||||
assert fh.read() == b"1 2\n3 2\n"
|
||||
|
||||
def test_write_edgelist_2(self):
|
||||
fh = io.BytesIO()
|
||||
G = nx.Graph()
|
||||
G.add_edges_from([(1, 2), (2, 3)])
|
||||
G.add_node(1, bipartite=0)
|
||||
G.add_node(2, bipartite=1)
|
||||
G.add_node(3, bipartite=0)
|
||||
bipartite.write_edgelist(G, fh, data=True)
|
||||
fh.seek(0)
|
||||
assert fh.read() == b"1 2 {}\n3 2 {}\n"
|
||||
|
||||
def test_write_edgelist_3(self):
|
||||
fh = io.BytesIO()
|
||||
G = nx.Graph()
|
||||
G.add_edge(1, 2, weight=2.0)
|
||||
G.add_edge(2, 3, weight=3.0)
|
||||
G.add_node(1, bipartite=0)
|
||||
G.add_node(2, bipartite=1)
|
||||
G.add_node(3, bipartite=0)
|
||||
bipartite.write_edgelist(G, fh, data=True)
|
||||
fh.seek(0)
|
||||
assert fh.read() == b"1 2 {'weight': 2.0}\n3 2 {'weight': 3.0}\n"
|
||||
|
||||
def test_write_edgelist_4(self):
|
||||
fh = io.BytesIO()
|
||||
G = nx.Graph()
|
||||
G.add_edge(1, 2, weight=2.0)
|
||||
G.add_edge(2, 3, weight=3.0)
|
||||
G.add_node(1, bipartite=0)
|
||||
G.add_node(2, bipartite=1)
|
||||
G.add_node(3, bipartite=0)
|
||||
bipartite.write_edgelist(G, fh, data=[("weight")])
|
||||
fh.seek(0)
|
||||
assert fh.read() == b"1 2 2.0\n3 2 3.0\n"
|
||||
|
||||
def test_unicode(self, tmp_path):
|
||||
G = nx.Graph()
|
||||
name1 = chr(2344) + chr(123) + chr(6543)
|
||||
name2 = chr(5543) + chr(1543) + chr(324)
|
||||
G.add_edge(name1, "Radiohead", **{name2: 3})
|
||||
G.add_node(name1, bipartite=0)
|
||||
G.add_node("Radiohead", bipartite=1)
|
||||
|
||||
fname = tmp_path / "edgelist.txt"
|
||||
bipartite.write_edgelist(G, fname)
|
||||
H = bipartite.read_edgelist(fname)
|
||||
assert graphs_equal(G, H)
|
||||
|
||||
def test_latin1_issue(self, tmp_path):
|
||||
G = nx.Graph()
|
||||
name1 = chr(2344) + chr(123) + chr(6543)
|
||||
name2 = chr(5543) + chr(1543) + chr(324)
|
||||
G.add_edge(name1, "Radiohead", **{name2: 3})
|
||||
G.add_node(name1, bipartite=0)
|
||||
G.add_node("Radiohead", bipartite=1)
|
||||
|
||||
fname = tmp_path / "edgelist.txt"
|
||||
with pytest.raises(UnicodeEncodeError):
|
||||
bipartite.write_edgelist(G, fname, encoding="latin-1")
|
||||
|
||||
def test_latin1(self, tmp_path):
|
||||
G = nx.Graph()
|
||||
name1 = "Bj" + chr(246) + "rk"
|
||||
name2 = chr(220) + "ber"
|
||||
G.add_edge(name1, "Radiohead", **{name2: 3})
|
||||
G.add_node(name1, bipartite=0)
|
||||
G.add_node("Radiohead", bipartite=1)
|
||||
|
||||
fname = tmp_path / "edgelist.txt"
|
||||
bipartite.write_edgelist(G, fname, encoding="latin-1")
|
||||
H = bipartite.read_edgelist(fname, encoding="latin-1")
|
||||
assert graphs_equal(G, H)
|
||||
|
||||
def test_edgelist_graph(self, tmp_path):
|
||||
G = self.G
|
||||
fname = tmp_path / "edgelist.txt"
|
||||
bipartite.write_edgelist(G, fname)
|
||||
H = bipartite.read_edgelist(fname)
|
||||
H2 = bipartite.read_edgelist(fname)
|
||||
assert H is not H2 # they should be different graphs
|
||||
G.remove_node("g") # isolated nodes are not written in edgelist
|
||||
assert nodes_equal(list(H), list(G))
|
||||
assert edges_equal(list(H.edges()), list(G.edges()))
|
||||
|
||||
def test_edgelist_integers(self, tmp_path):
|
||||
G = nx.convert_node_labels_to_integers(self.G)
|
||||
fname = tmp_path / "edgelist.txt"
|
||||
bipartite.write_edgelist(G, fname)
|
||||
H = bipartite.read_edgelist(fname, nodetype=int)
|
||||
# isolated nodes are not written in edgelist
|
||||
G.remove_nodes_from(list(nx.isolates(G)))
|
||||
assert nodes_equal(list(H), list(G))
|
||||
assert edges_equal(list(H.edges()), list(G.edges()))
|
||||
|
||||
def test_edgelist_multigraph(self, tmp_path):
|
||||
G = self.MG
|
||||
fname = tmp_path / "edgelist.txt"
|
||||
bipartite.write_edgelist(G, fname)
|
||||
H = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
|
||||
H2 = bipartite.read_edgelist(fname, nodetype=int, create_using=nx.MultiGraph())
|
||||
assert H is not H2 # they should be different graphs
|
||||
assert nodes_equal(list(H), list(G))
|
||||
assert edges_equal(list(H.edges()), list(G.edges()))
|
||||
|
||||
def test_empty_digraph(self):
|
||||
with pytest.raises(nx.NetworkXNotImplemented):
|
||||
bytesIO = io.BytesIO()
|
||||
bipartite.write_edgelist(nx.DiGraph(), bytesIO)
|
||||
|
||||
def test_raise_attribute(self):
|
||||
with pytest.raises(AttributeError):
|
||||
G = nx.path_graph(4)
|
||||
bytesIO = io.BytesIO()
|
||||
bipartite.write_edgelist(G, bytesIO)
|
||||
|
||||
def test_parse_edgelist(self):
|
||||
"""Tests for conditions specific to
|
||||
parse_edge_list method"""
|
||||
|
||||
# ignore strings of length less than 2
|
||||
lines = ["1 2", "2 3", "3 1", "4", " "]
|
||||
G = bipartite.parse_edgelist(lines, nodetype=int)
|
||||
assert list(G.nodes) == [1, 2, 3]
|
||||
|
||||
# Exception raised when node is not convertible
|
||||
# to specified data type
|
||||
with pytest.raises(TypeError, match=".*Failed to convert nodes"):
|
||||
lines = ["a b", "b c", "c a"]
|
||||
G = bipartite.parse_edgelist(lines, nodetype=int)
|
||||
|
||||
# Exception raised when format of data is not
|
||||
# convertible to dictionary object
|
||||
with pytest.raises(TypeError, match=".*Failed to convert edge data"):
|
||||
lines = ["1 2 3", "2 3 4", "3 1 2"]
|
||||
G = bipartite.parse_edgelist(lines, nodetype=int)
|
||||
|
||||
# Exception raised when edge data and data
|
||||
# keys are not of same length
|
||||
with pytest.raises(IndexError):
|
||||
lines = ["1 2 3 4", "2 3 4"]
|
||||
G = bipartite.parse_edgelist(
|
||||
lines, nodetype=int, data=[("weight", int), ("key", int)]
|
||||
)
|
||||
|
||||
# Exception raised when edge data is not
|
||||
# convertible to specified data type
|
||||
with pytest.raises(TypeError, match=".*Failed to convert key data"):
|
||||
lines = ["1 2 3 a", "2 3 4 b"]
|
||||
G = bipartite.parse_edgelist(
|
||||
lines, nodetype=int, data=[("weight", int), ("key", int)]
|
||||
)
|
||||
|
||||
|
||||
def test_bipartite_edgelist_consistent_strip_handling():
|
||||
"""See gh-7462
|
||||
|
||||
Input when printed looks like:
|
||||
|
||||
A B interaction 2
|
||||
B C interaction 4
|
||||
C A interaction
|
||||
|
||||
Note the trailing \\t in the last line, which indicates the existence of
|
||||
an empty data field.
|
||||
"""
|
||||
lines = io.StringIO(
|
||||
"A\tB\tinteraction\t2\nB\tC\tinteraction\t4\nC\tA\tinteraction\t"
|
||||
)
|
||||
descr = [("type", str), ("weight", str)]
|
||||
# Should not raise
|
||||
G = nx.bipartite.parse_edgelist(lines, delimiter="\t", data=descr)
|
||||
expected = [("A", "B", "2"), ("A", "C", ""), ("B", "C", "4")]
|
||||
assert sorted(G.edges(data="weight")) == expected
|
||||
+334
@@ -0,0 +1,334 @@
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
|
||||
|
||||
def test_selfloops_raises():
|
||||
G = nx.ladder_graph(3)
|
||||
G.add_edge(0, 0)
|
||||
with pytest.raises(nx.NetworkXError, match=".*not bipartite"):
|
||||
nx.bipartite.maximal_extendability(G)
|
||||
|
||||
|
||||
def test_disconnected_raises():
|
||||
G = nx.ladder_graph(3)
|
||||
G.add_node("a")
|
||||
with pytest.raises(nx.NetworkXError, match=".*not connected"):
|
||||
nx.bipartite.maximal_extendability(G)
|
||||
|
||||
|
||||
def test_not_bipartite_raises():
|
||||
G = nx.complete_graph(5)
|
||||
with pytest.raises(nx.NetworkXError, match=".*not bipartite"):
|
||||
nx.bipartite.maximal_extendability(G)
|
||||
|
||||
|
||||
def test_no_perfect_matching_raises():
|
||||
G = nx.Graph([(0, 1), (0, 2)])
|
||||
with pytest.raises(nx.NetworkXError, match=".*not contain a perfect matching"):
|
||||
nx.bipartite.maximal_extendability(G)
|
||||
|
||||
|
||||
def test_residual_graph_not_strongly_connected_raises():
|
||||
G = nx.Graph([(1, 2), (2, 3), (3, 4)])
|
||||
with pytest.raises(
|
||||
nx.NetworkXError, match="The residual graph of G is not strongly connected"
|
||||
):
|
||||
nx.bipartite.maximal_extendability(G)
|
||||
|
||||
|
||||
def test_ladder_graph_is_1():
|
||||
G = nx.ladder_graph(3)
|
||||
assert nx.bipartite.maximal_extendability(G) == 1
|
||||
|
||||
|
||||
def test_cubical_graph_is_2():
|
||||
G = nx.cubical_graph()
|
||||
assert nx.bipartite.maximal_extendability(G) == 2
|
||||
|
||||
|
||||
def test_k_is_3():
|
||||
G = nx.Graph(
|
||||
[
|
||||
(1, 6),
|
||||
(1, 7),
|
||||
(1, 8),
|
||||
(1, 9),
|
||||
(2, 6),
|
||||
(2, 7),
|
||||
(2, 8),
|
||||
(2, 10),
|
||||
(3, 6),
|
||||
(3, 8),
|
||||
(3, 9),
|
||||
(3, 10),
|
||||
(4, 7),
|
||||
(4, 8),
|
||||
(4, 9),
|
||||
(4, 10),
|
||||
(5, 6),
|
||||
(5, 7),
|
||||
(5, 9),
|
||||
(5, 10),
|
||||
]
|
||||
)
|
||||
assert nx.bipartite.maximal_extendability(G) == 3
|
||||
|
||||
|
||||
def test_k_is_4():
|
||||
G = nx.Graph(
|
||||
[
|
||||
(8, 1),
|
||||
(8, 2),
|
||||
(8, 3),
|
||||
(8, 4),
|
||||
(8, 5),
|
||||
(9, 1),
|
||||
(9, 2),
|
||||
(9, 3),
|
||||
(9, 4),
|
||||
(9, 7),
|
||||
(10, 1),
|
||||
(10, 2),
|
||||
(10, 3),
|
||||
(10, 4),
|
||||
(10, 6),
|
||||
(11, 1),
|
||||
(11, 2),
|
||||
(11, 5),
|
||||
(11, 6),
|
||||
(11, 7),
|
||||
(12, 1),
|
||||
(12, 3),
|
||||
(12, 5),
|
||||
(12, 6),
|
||||
(12, 7),
|
||||
(13, 2),
|
||||
(13, 4),
|
||||
(13, 5),
|
||||
(13, 6),
|
||||
(13, 7),
|
||||
(14, 3),
|
||||
(14, 4),
|
||||
(14, 5),
|
||||
(14, 6),
|
||||
(14, 7),
|
||||
]
|
||||
)
|
||||
assert nx.bipartite.maximal_extendability(G) == 4
|
||||
|
||||
|
||||
def test_k_is_5():
|
||||
G = nx.Graph(
|
||||
[
|
||||
(8, 1),
|
||||
(8, 2),
|
||||
(8, 3),
|
||||
(8, 4),
|
||||
(8, 5),
|
||||
(8, 6),
|
||||
(9, 1),
|
||||
(9, 2),
|
||||
(9, 3),
|
||||
(9, 4),
|
||||
(9, 5),
|
||||
(9, 7),
|
||||
(10, 1),
|
||||
(10, 2),
|
||||
(10, 3),
|
||||
(10, 4),
|
||||
(10, 6),
|
||||
(10, 7),
|
||||
(11, 1),
|
||||
(11, 2),
|
||||
(11, 3),
|
||||
(11, 5),
|
||||
(11, 6),
|
||||
(11, 7),
|
||||
(12, 1),
|
||||
(12, 2),
|
||||
(12, 4),
|
||||
(12, 5),
|
||||
(12, 6),
|
||||
(12, 7),
|
||||
(13, 1),
|
||||
(13, 3),
|
||||
(13, 4),
|
||||
(13, 5),
|
||||
(13, 6),
|
||||
(13, 7),
|
||||
(14, 2),
|
||||
(14, 3),
|
||||
(14, 4),
|
||||
(14, 5),
|
||||
(14, 6),
|
||||
(14, 7),
|
||||
]
|
||||
)
|
||||
assert nx.bipartite.maximal_extendability(G) == 5
|
||||
|
||||
|
||||
def test_k_is_6():
|
||||
G = nx.Graph(
|
||||
[
|
||||
(9, 1),
|
||||
(9, 2),
|
||||
(9, 3),
|
||||
(9, 4),
|
||||
(9, 5),
|
||||
(9, 6),
|
||||
(9, 7),
|
||||
(10, 1),
|
||||
(10, 2),
|
||||
(10, 3),
|
||||
(10, 4),
|
||||
(10, 5),
|
||||
(10, 6),
|
||||
(10, 8),
|
||||
(11, 1),
|
||||
(11, 2),
|
||||
(11, 3),
|
||||
(11, 4),
|
||||
(11, 5),
|
||||
(11, 7),
|
||||
(11, 8),
|
||||
(12, 1),
|
||||
(12, 2),
|
||||
(12, 3),
|
||||
(12, 4),
|
||||
(12, 6),
|
||||
(12, 7),
|
||||
(12, 8),
|
||||
(13, 1),
|
||||
(13, 2),
|
||||
(13, 3),
|
||||
(13, 5),
|
||||
(13, 6),
|
||||
(13, 7),
|
||||
(13, 8),
|
||||
(14, 1),
|
||||
(14, 2),
|
||||
(14, 4),
|
||||
(14, 5),
|
||||
(14, 6),
|
||||
(14, 7),
|
||||
(14, 8),
|
||||
(15, 1),
|
||||
(15, 3),
|
||||
(15, 4),
|
||||
(15, 5),
|
||||
(15, 6),
|
||||
(15, 7),
|
||||
(15, 8),
|
||||
(16, 2),
|
||||
(16, 3),
|
||||
(16, 4),
|
||||
(16, 5),
|
||||
(16, 6),
|
||||
(16, 7),
|
||||
(16, 8),
|
||||
]
|
||||
)
|
||||
assert nx.bipartite.maximal_extendability(G) == 6
|
||||
|
||||
|
||||
def test_k_is_7():
|
||||
G = nx.Graph(
|
||||
[
|
||||
(1, 11),
|
||||
(1, 12),
|
||||
(1, 13),
|
||||
(1, 14),
|
||||
(1, 15),
|
||||
(1, 16),
|
||||
(1, 17),
|
||||
(1, 18),
|
||||
(2, 11),
|
||||
(2, 12),
|
||||
(2, 13),
|
||||
(2, 14),
|
||||
(2, 15),
|
||||
(2, 16),
|
||||
(2, 17),
|
||||
(2, 19),
|
||||
(3, 11),
|
||||
(3, 12),
|
||||
(3, 13),
|
||||
(3, 14),
|
||||
(3, 15),
|
||||
(3, 16),
|
||||
(3, 17),
|
||||
(3, 20),
|
||||
(4, 11),
|
||||
(4, 12),
|
||||
(4, 13),
|
||||
(4, 14),
|
||||
(4, 15),
|
||||
(4, 16),
|
||||
(4, 17),
|
||||
(4, 18),
|
||||
(4, 19),
|
||||
(4, 20),
|
||||
(5, 11),
|
||||
(5, 12),
|
||||
(5, 13),
|
||||
(5, 14),
|
||||
(5, 15),
|
||||
(5, 16),
|
||||
(5, 17),
|
||||
(5, 18),
|
||||
(5, 19),
|
||||
(5, 20),
|
||||
(6, 11),
|
||||
(6, 12),
|
||||
(6, 13),
|
||||
(6, 14),
|
||||
(6, 15),
|
||||
(6, 16),
|
||||
(6, 17),
|
||||
(6, 18),
|
||||
(6, 19),
|
||||
(6, 20),
|
||||
(7, 11),
|
||||
(7, 12),
|
||||
(7, 13),
|
||||
(7, 14),
|
||||
(7, 15),
|
||||
(7, 16),
|
||||
(7, 17),
|
||||
(7, 18),
|
||||
(7, 19),
|
||||
(7, 20),
|
||||
(8, 11),
|
||||
(8, 12),
|
||||
(8, 13),
|
||||
(8, 14),
|
||||
(8, 15),
|
||||
(8, 16),
|
||||
(8, 17),
|
||||
(8, 18),
|
||||
(8, 19),
|
||||
(8, 20),
|
||||
(9, 11),
|
||||
(9, 12),
|
||||
(9, 13),
|
||||
(9, 14),
|
||||
(9, 15),
|
||||
(9, 16),
|
||||
(9, 17),
|
||||
(9, 18),
|
||||
(9, 19),
|
||||
(9, 20),
|
||||
(10, 11),
|
||||
(10, 12),
|
||||
(10, 13),
|
||||
(10, 14),
|
||||
(10, 15),
|
||||
(10, 16),
|
||||
(10, 17),
|
||||
(10, 18),
|
||||
(10, 19),
|
||||
(10, 20),
|
||||
]
|
||||
)
|
||||
assert nx.bipartite.maximal_extendability(G) == 7
|
||||
+409
@@ -0,0 +1,409 @@
|
||||
import numbers
|
||||
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
|
||||
from ..generators import (
|
||||
alternating_havel_hakimi_graph,
|
||||
complete_bipartite_graph,
|
||||
configuration_model,
|
||||
gnmk_random_graph,
|
||||
havel_hakimi_graph,
|
||||
preferential_attachment_graph,
|
||||
random_graph,
|
||||
reverse_havel_hakimi_graph,
|
||||
)
|
||||
|
||||
"""
|
||||
Generators - Bipartite
|
||||
----------------------
|
||||
"""
|
||||
|
||||
|
||||
class TestGeneratorsBipartite:
|
||||
def test_complete_bipartite_graph(self):
|
||||
G = complete_bipartite_graph(0, 0)
|
||||
assert nx.is_isomorphic(G, nx.null_graph())
|
||||
|
||||
for i in [1, 5]:
|
||||
G = complete_bipartite_graph(i, 0)
|
||||
assert nx.is_isomorphic(G, nx.empty_graph(i))
|
||||
G = complete_bipartite_graph(0, i)
|
||||
assert nx.is_isomorphic(G, nx.empty_graph(i))
|
||||
|
||||
G = complete_bipartite_graph(2, 2)
|
||||
assert nx.is_isomorphic(G, nx.cycle_graph(4))
|
||||
|
||||
G = complete_bipartite_graph(1, 5)
|
||||
assert nx.is_isomorphic(G, nx.star_graph(5))
|
||||
|
||||
G = complete_bipartite_graph(5, 1)
|
||||
assert nx.is_isomorphic(G, nx.star_graph(5))
|
||||
|
||||
# complete_bipartite_graph(m1,m2) is a connected graph with
|
||||
# m1+m2 nodes and m1*m2 edges
|
||||
for m1, m2 in [(5, 11), (7, 3)]:
|
||||
G = complete_bipartite_graph(m1, m2)
|
||||
assert nx.number_of_nodes(G) == m1 + m2
|
||||
assert nx.number_of_edges(G) == m1 * m2
|
||||
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
complete_bipartite_graph(7, 3, create_using=nx.DiGraph)
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
complete_bipartite_graph(7, 3, create_using=nx.MultiDiGraph)
|
||||
|
||||
mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph)
|
||||
assert mG.is_multigraph()
|
||||
assert sorted(mG.edges()) == sorted(G.edges())
|
||||
|
||||
mG = complete_bipartite_graph(7, 3, create_using=nx.MultiGraph)
|
||||
assert mG.is_multigraph()
|
||||
assert sorted(mG.edges()) == sorted(G.edges())
|
||||
|
||||
mG = complete_bipartite_graph(7, 3) # default to Graph
|
||||
assert sorted(mG.edges()) == sorted(G.edges())
|
||||
assert not mG.is_multigraph()
|
||||
assert not mG.is_directed()
|
||||
|
||||
# specify nodes rather than number of nodes
|
||||
for n1, n2 in [([1, 2], "ab"), (3, 2), (3, "ab"), ("ab", 3)]:
|
||||
G = complete_bipartite_graph(n1, n2)
|
||||
if isinstance(n1, numbers.Integral):
|
||||
if isinstance(n2, numbers.Integral):
|
||||
n2 = range(n1, n1 + n2)
|
||||
n1 = range(n1)
|
||||
elif isinstance(n2, numbers.Integral):
|
||||
n2 = range(n2)
|
||||
edges = {(u, v) for u in n1 for v in n2}
|
||||
assert edges == set(G.edges)
|
||||
assert G.size() == len(edges)
|
||||
|
||||
# raise when node sets are not distinct
|
||||
for n1, n2 in [([1, 2], 3), (3, [1, 2]), ("abc", "bcd")]:
|
||||
pytest.raises(nx.NetworkXError, complete_bipartite_graph, n1, n2)
|
||||
|
||||
def test_configuration_model(self):
|
||||
aseq = []
|
||||
bseq = []
|
||||
G = configuration_model(aseq, bseq)
|
||||
assert len(G) == 0
|
||||
|
||||
aseq = [0, 0]
|
||||
bseq = [0, 0]
|
||||
G = configuration_model(aseq, bseq)
|
||||
assert len(G) == 4
|
||||
assert G.number_of_edges() == 0
|
||||
|
||||
aseq = [3, 3, 3, 3]
|
||||
bseq = [2, 2, 2, 2, 2]
|
||||
pytest.raises(nx.NetworkXError, configuration_model, aseq, bseq)
|
||||
|
||||
aseq = [3, 3, 3, 3]
|
||||
bseq = [2, 2, 2, 2, 2, 2]
|
||||
G = configuration_model(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 2, 2, 2]
|
||||
bseq = [3, 3, 3, 3]
|
||||
G = configuration_model(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 1, 1, 1]
|
||||
bseq = [3, 3, 3]
|
||||
G = configuration_model(aseq, bseq)
|
||||
assert G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
|
||||
|
||||
GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
|
||||
assert GU.number_of_nodes() == 6
|
||||
|
||||
GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
|
||||
assert GD.number_of_nodes() == 3
|
||||
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
|
||||
assert not G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
|
||||
pytest.raises(
|
||||
nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph()
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError, configuration_model, aseq, bseq, create_using=nx.DiGraph
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
configuration_model,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.MultiDiGraph,
|
||||
)
|
||||
|
||||
def test_havel_hakimi_graph(self):
|
||||
aseq = []
|
||||
bseq = []
|
||||
G = havel_hakimi_graph(aseq, bseq)
|
||||
assert len(G) == 0
|
||||
|
||||
aseq = [0, 0]
|
||||
bseq = [0, 0]
|
||||
G = havel_hakimi_graph(aseq, bseq)
|
||||
assert len(G) == 4
|
||||
assert G.number_of_edges() == 0
|
||||
|
||||
aseq = [3, 3, 3, 3]
|
||||
bseq = [2, 2, 2, 2, 2]
|
||||
pytest.raises(nx.NetworkXError, havel_hakimi_graph, aseq, bseq)
|
||||
|
||||
bseq = [2, 2, 2, 2, 2, 2]
|
||||
G = havel_hakimi_graph(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 2, 2, 2]
|
||||
bseq = [3, 3, 3, 3]
|
||||
G = havel_hakimi_graph(aseq, bseq)
|
||||
assert G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
|
||||
assert GU.number_of_nodes() == 6
|
||||
|
||||
GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
|
||||
assert GD.number_of_nodes() == 4
|
||||
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
|
||||
assert not G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
|
||||
pytest.raises(
|
||||
nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError, havel_hakimi_graph, aseq, bseq, create_using=nx.DiGraph
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.MultiDiGraph,
|
||||
)
|
||||
|
||||
def test_reverse_havel_hakimi_graph(self):
|
||||
aseq = []
|
||||
bseq = []
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq)
|
||||
assert len(G) == 0
|
||||
|
||||
aseq = [0, 0]
|
||||
bseq = [0, 0]
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq)
|
||||
assert len(G) == 4
|
||||
assert G.number_of_edges() == 0
|
||||
|
||||
aseq = [3, 3, 3, 3]
|
||||
bseq = [2, 2, 2, 2, 2]
|
||||
pytest.raises(nx.NetworkXError, reverse_havel_hakimi_graph, aseq, bseq)
|
||||
|
||||
bseq = [2, 2, 2, 2, 2, 2]
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 2, 2, 2]
|
||||
bseq = [3, 3, 3, 3]
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 1, 1, 1]
|
||||
bseq = [3, 3, 3]
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq)
|
||||
assert G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
|
||||
|
||||
GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
|
||||
assert GU.number_of_nodes() == 6
|
||||
|
||||
GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
|
||||
assert GD.number_of_nodes() == 3
|
||||
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
|
||||
assert not G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
reverse_havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.DiGraph,
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
reverse_havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.DiGraph,
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
reverse_havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.MultiDiGraph,
|
||||
)
|
||||
|
||||
def test_alternating_havel_hakimi_graph(self):
|
||||
aseq = []
|
||||
bseq = []
|
||||
G = alternating_havel_hakimi_graph(aseq, bseq)
|
||||
assert len(G) == 0
|
||||
|
||||
aseq = [0, 0]
|
||||
bseq = [0, 0]
|
||||
G = alternating_havel_hakimi_graph(aseq, bseq)
|
||||
assert len(G) == 4
|
||||
assert G.number_of_edges() == 0
|
||||
|
||||
aseq = [3, 3, 3, 3]
|
||||
bseq = [2, 2, 2, 2, 2]
|
||||
pytest.raises(nx.NetworkXError, alternating_havel_hakimi_graph, aseq, bseq)
|
||||
|
||||
bseq = [2, 2, 2, 2, 2, 2]
|
||||
G = alternating_havel_hakimi_graph(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 2, 2, 2]
|
||||
bseq = [3, 3, 3, 3]
|
||||
G = alternating_havel_hakimi_graph(aseq, bseq)
|
||||
assert sorted(d for n, d in G.degree()) == [2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
|
||||
|
||||
aseq = [2, 2, 2, 1, 1, 1]
|
||||
bseq = [3, 3, 3]
|
||||
G = alternating_havel_hakimi_graph(aseq, bseq)
|
||||
assert G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
assert sorted(d for n, d in G.degree()) == [1, 1, 1, 2, 2, 2, 3, 3, 3]
|
||||
|
||||
GU = nx.projected_graph(nx.Graph(G), range(len(aseq)))
|
||||
assert GU.number_of_nodes() == 6
|
||||
|
||||
GD = nx.projected_graph(nx.Graph(G), range(len(aseq), len(aseq) + len(bseq)))
|
||||
assert GD.number_of_nodes() == 3
|
||||
|
||||
G = reverse_havel_hakimi_graph(aseq, bseq, create_using=nx.Graph)
|
||||
assert not G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
alternating_havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.DiGraph,
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
alternating_havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.DiGraph,
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
alternating_havel_hakimi_graph,
|
||||
aseq,
|
||||
bseq,
|
||||
create_using=nx.MultiDiGraph,
|
||||
)
|
||||
|
||||
def test_preferential_attachment(self):
|
||||
aseq = [3, 2, 1, 1]
|
||||
G = preferential_attachment_graph(aseq, 0.5)
|
||||
assert G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
|
||||
G = preferential_attachment_graph(aseq, 0.5, create_using=nx.Graph)
|
||||
assert not G.is_multigraph()
|
||||
assert not G.is_directed()
|
||||
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
preferential_attachment_graph,
|
||||
aseq,
|
||||
0.5,
|
||||
create_using=nx.DiGraph(),
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
preferential_attachment_graph,
|
||||
aseq,
|
||||
0.5,
|
||||
create_using=nx.DiGraph(),
|
||||
)
|
||||
pytest.raises(
|
||||
nx.NetworkXError,
|
||||
preferential_attachment_graph,
|
||||
aseq,
|
||||
0.5,
|
||||
create_using=nx.DiGraph(),
|
||||
)
|
||||
|
||||
def test_random_graph(self):
|
||||
n = 10
|
||||
m = 20
|
||||
G = random_graph(n, m, 0.9)
|
||||
assert len(G) == 30
|
||||
assert nx.is_bipartite(G)
|
||||
X, Y = nx.algorithms.bipartite.sets(G)
|
||||
assert set(range(n)) == X
|
||||
assert set(range(n, n + m)) == Y
|
||||
|
||||
def test_random_digraph(self):
|
||||
n = 10
|
||||
m = 20
|
||||
G = random_graph(n, m, 0.9, directed=True)
|
||||
assert len(G) == 30
|
||||
assert nx.is_bipartite(G)
|
||||
X, Y = nx.algorithms.bipartite.sets(G)
|
||||
assert set(range(n)) == X
|
||||
assert set(range(n, n + m)) == Y
|
||||
|
||||
def test_gnmk_random_graph(self):
|
||||
n = 10
|
||||
m = 20
|
||||
edges = 100
|
||||
# set seed because sometimes it is not connected
|
||||
# which raises an error in bipartite.sets(G) below.
|
||||
G = gnmk_random_graph(n, m, edges, seed=1234)
|
||||
assert len(G) == n + m
|
||||
assert nx.is_bipartite(G)
|
||||
X, Y = nx.algorithms.bipartite.sets(G)
|
||||
# print(X)
|
||||
assert set(range(n)) == X
|
||||
assert set(range(n, n + m)) == Y
|
||||
assert edges == len(list(G.edges()))
|
||||
|
||||
def test_gnmk_random_graph_complete(self):
|
||||
n = 10
|
||||
m = 20
|
||||
edges = 200
|
||||
G = gnmk_random_graph(n, m, edges)
|
||||
assert len(G) == n + m
|
||||
assert nx.is_bipartite(G)
|
||||
X, Y = nx.algorithms.bipartite.sets(G)
|
||||
# print(X)
|
||||
assert set(range(n)) == X
|
||||
assert set(range(n, n + m)) == Y
|
||||
assert edges == len(list(G.edges()))
|
||||
|
||||
@pytest.mark.parametrize("n", (4, range(4), {0, 1, 2, 3}))
|
||||
@pytest.mark.parametrize("m", (range(4, 7), {4, 5, 6}))
|
||||
def test_complete_bipartite_graph_str(self, n, m):
|
||||
"""Ensure G.name is consistent for all inputs accepted by nodes_or_number.
|
||||
See gh-7396"""
|
||||
G = nx.complete_bipartite_graph(n, m)
|
||||
ans = "Graph named 'complete_bipartite_graph(4, 3)' with 7 nodes and 12 edges"
|
||||
assert str(G) == ans
|
||||
+327
@@ -0,0 +1,327 @@
|
||||
"""Unit tests for the :mod:`networkx.algorithms.bipartite.matching` module."""
|
||||
|
||||
import itertools
|
||||
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.bipartite.matching import (
|
||||
eppstein_matching,
|
||||
hopcroft_karp_matching,
|
||||
maximum_matching,
|
||||
minimum_weight_full_matching,
|
||||
to_vertex_cover,
|
||||
)
|
||||
|
||||
|
||||
class TestMatching:
|
||||
"""Tests for bipartite matching algorithms."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Creates a bipartite graph for use in testing matching algorithms.
|
||||
|
||||
The bipartite graph has a maximum cardinality matching that leaves
|
||||
vertex 1 and vertex 10 unmatched. The first six numbers are the left
|
||||
vertices and the next six numbers are the right vertices.
|
||||
|
||||
"""
|
||||
self.simple_graph = nx.complete_bipartite_graph(2, 3)
|
||||
self.simple_solution = {0: 2, 1: 3, 2: 0, 3: 1}
|
||||
|
||||
edges = [(0, 7), (0, 8), (2, 6), (2, 9), (3, 8), (4, 8), (4, 9), (5, 11)]
|
||||
self.top_nodes = set(range(6))
|
||||
self.graph = nx.Graph()
|
||||
self.graph.add_nodes_from(range(12))
|
||||
self.graph.add_edges_from(edges)
|
||||
|
||||
# Example bipartite graph from issue 2127
|
||||
G = nx.Graph()
|
||||
G.add_nodes_from(
|
||||
[
|
||||
(1, "C"),
|
||||
(1, "B"),
|
||||
(0, "G"),
|
||||
(1, "F"),
|
||||
(1, "E"),
|
||||
(0, "C"),
|
||||
(1, "D"),
|
||||
(1, "I"),
|
||||
(0, "A"),
|
||||
(0, "D"),
|
||||
(0, "F"),
|
||||
(0, "E"),
|
||||
(0, "H"),
|
||||
(1, "G"),
|
||||
(1, "A"),
|
||||
(0, "I"),
|
||||
(0, "B"),
|
||||
(1, "H"),
|
||||
]
|
||||
)
|
||||
G.add_edge((1, "C"), (0, "A"))
|
||||
G.add_edge((1, "B"), (0, "A"))
|
||||
G.add_edge((0, "G"), (1, "I"))
|
||||
G.add_edge((0, "G"), (1, "H"))
|
||||
G.add_edge((1, "F"), (0, "A"))
|
||||
G.add_edge((1, "F"), (0, "C"))
|
||||
G.add_edge((1, "F"), (0, "E"))
|
||||
G.add_edge((1, "E"), (0, "A"))
|
||||
G.add_edge((1, "E"), (0, "C"))
|
||||
G.add_edge((0, "C"), (1, "D"))
|
||||
G.add_edge((0, "C"), (1, "I"))
|
||||
G.add_edge((0, "C"), (1, "G"))
|
||||
G.add_edge((0, "C"), (1, "H"))
|
||||
G.add_edge((1, "D"), (0, "A"))
|
||||
G.add_edge((1, "I"), (0, "A"))
|
||||
G.add_edge((1, "I"), (0, "E"))
|
||||
G.add_edge((0, "A"), (1, "G"))
|
||||
G.add_edge((0, "A"), (1, "H"))
|
||||
G.add_edge((0, "E"), (1, "G"))
|
||||
G.add_edge((0, "E"), (1, "H"))
|
||||
self.disconnected_graph = G
|
||||
|
||||
def check_match(self, matching):
|
||||
"""Asserts that the matching is what we expect from the bipartite graph
|
||||
constructed in the :meth:`setup` fixture.
|
||||
|
||||
"""
|
||||
# For the sake of brevity, rename `matching` to `M`.
|
||||
M = matching
|
||||
matched_vertices = frozenset(itertools.chain(*M.items()))
|
||||
# Assert that the maximum number of vertices (10) is matched.
|
||||
assert matched_vertices == frozenset(range(12)) - {1, 10}
|
||||
# Assert that no vertex appears in two edges, or in other words, that
|
||||
# the matching (u, v) and (v, u) both appear in the matching
|
||||
# dictionary.
|
||||
assert all(u == M[M[u]] for u in range(12) if u in M)
|
||||
|
||||
def check_vertex_cover(self, vertices):
|
||||
"""Asserts that the given set of vertices is the vertex cover we
|
||||
expected from the bipartite graph constructed in the :meth:`setup`
|
||||
fixture.
|
||||
|
||||
"""
|
||||
# By Konig's theorem, the number of edges in a maximum matching equals
|
||||
# the number of vertices in a minimum vertex cover.
|
||||
assert len(vertices) == 5
|
||||
# Assert that the set is truly a vertex cover.
|
||||
for u, v in self.graph.edges():
|
||||
assert u in vertices or v in vertices
|
||||
# TODO Assert that the vertices are the correct ones.
|
||||
|
||||
def test_eppstein_matching(self):
|
||||
"""Tests that David Eppstein's implementation of the Hopcroft--Karp
|
||||
algorithm produces a maximum cardinality matching.
|
||||
|
||||
"""
|
||||
self.check_match(eppstein_matching(self.graph, self.top_nodes))
|
||||
|
||||
def test_hopcroft_karp_matching(self):
|
||||
"""Tests that the Hopcroft--Karp algorithm produces a maximum
|
||||
cardinality matching in a bipartite graph.
|
||||
|
||||
"""
|
||||
self.check_match(hopcroft_karp_matching(self.graph, self.top_nodes))
|
||||
|
||||
def test_to_vertex_cover(self):
|
||||
"""Test for converting a maximum matching to a minimum vertex cover."""
|
||||
matching = maximum_matching(self.graph, self.top_nodes)
|
||||
vertex_cover = to_vertex_cover(self.graph, matching, self.top_nodes)
|
||||
self.check_vertex_cover(vertex_cover)
|
||||
|
||||
def test_eppstein_matching_simple(self):
|
||||
match = eppstein_matching(self.simple_graph)
|
||||
assert match == self.simple_solution
|
||||
|
||||
def test_hopcroft_karp_matching_simple(self):
|
||||
match = hopcroft_karp_matching(self.simple_graph)
|
||||
assert match == self.simple_solution
|
||||
|
||||
def test_eppstein_matching_disconnected(self):
|
||||
with pytest.raises(nx.AmbiguousSolution):
|
||||
match = eppstein_matching(self.disconnected_graph)
|
||||
|
||||
def test_hopcroft_karp_matching_disconnected(self):
|
||||
with pytest.raises(nx.AmbiguousSolution):
|
||||
match = hopcroft_karp_matching(self.disconnected_graph)
|
||||
|
||||
def test_issue_2127(self):
|
||||
"""Test from issue 2127"""
|
||||
# Build the example DAG
|
||||
G = nx.DiGraph()
|
||||
G.add_edge("A", "C")
|
||||
G.add_edge("A", "B")
|
||||
G.add_edge("C", "E")
|
||||
G.add_edge("C", "D")
|
||||
G.add_edge("E", "G")
|
||||
G.add_edge("E", "F")
|
||||
G.add_edge("G", "I")
|
||||
G.add_edge("G", "H")
|
||||
|
||||
tc = nx.transitive_closure(G)
|
||||
btc = nx.Graph()
|
||||
|
||||
# Create a bipartite graph based on the transitive closure of G
|
||||
for v in tc.nodes():
|
||||
btc.add_node((0, v))
|
||||
btc.add_node((1, v))
|
||||
|
||||
for u, v in tc.edges():
|
||||
btc.add_edge((0, u), (1, v))
|
||||
|
||||
top_nodes = {n for n in btc if n[0] == 0}
|
||||
matching = hopcroft_karp_matching(btc, top_nodes)
|
||||
vertex_cover = to_vertex_cover(btc, matching, top_nodes)
|
||||
independent_set = set(G) - {v for _, v in vertex_cover}
|
||||
assert {"B", "D", "F", "I", "H"} == independent_set
|
||||
|
||||
def test_vertex_cover_issue_2384(self):
|
||||
G = nx.Graph([(0, 3), (1, 3), (1, 4), (2, 3)])
|
||||
matching = maximum_matching(G)
|
||||
vertex_cover = to_vertex_cover(G, matching)
|
||||
for u, v in G.edges():
|
||||
assert u in vertex_cover or v in vertex_cover
|
||||
|
||||
def test_vertex_cover_issue_3306(self):
|
||||
G = nx.Graph()
|
||||
edges = [(0, 2), (1, 0), (1, 1), (1, 2), (2, 2)]
|
||||
G.add_edges_from([((i, "L"), (j, "R")) for i, j in edges])
|
||||
|
||||
matching = maximum_matching(G)
|
||||
vertex_cover = to_vertex_cover(G, matching)
|
||||
for u, v in G.edges():
|
||||
assert u in vertex_cover or v in vertex_cover
|
||||
|
||||
def test_unorderable_nodes(self):
|
||||
a = object()
|
||||
b = object()
|
||||
c = object()
|
||||
d = object()
|
||||
e = object()
|
||||
G = nx.Graph([(a, d), (b, d), (b, e), (c, d)])
|
||||
matching = maximum_matching(G)
|
||||
vertex_cover = to_vertex_cover(G, matching)
|
||||
for u, v in G.edges():
|
||||
assert u in vertex_cover or v in vertex_cover
|
||||
|
||||
|
||||
def test_eppstein_matching():
|
||||
"""Test in accordance to issue #1927"""
|
||||
G = nx.Graph()
|
||||
G.add_nodes_from(["a", 2, 3, 4], bipartite=0)
|
||||
G.add_nodes_from([1, "b", "c"], bipartite=1)
|
||||
G.add_edges_from([("a", 1), ("a", "b"), (2, "b"), (2, "c"), (3, "c"), (4, 1)])
|
||||
matching = eppstein_matching(G)
|
||||
assert len(matching) == len(maximum_matching(G))
|
||||
assert all(x in set(matching.keys()) for x in set(matching.values()))
|
||||
|
||||
|
||||
class TestMinimumWeightFullMatching:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
pytest.importorskip("scipy")
|
||||
|
||||
def test_minimum_weight_full_matching_incomplete_graph(self):
|
||||
B = nx.Graph()
|
||||
B.add_nodes_from([1, 2], bipartite=0)
|
||||
B.add_nodes_from([3, 4], bipartite=1)
|
||||
B.add_edge(1, 4, weight=100)
|
||||
B.add_edge(2, 3, weight=100)
|
||||
B.add_edge(2, 4, weight=50)
|
||||
matching = minimum_weight_full_matching(B)
|
||||
assert matching == {1: 4, 2: 3, 4: 1, 3: 2}
|
||||
|
||||
def test_minimum_weight_full_matching_with_no_full_matching(self):
|
||||
B = nx.Graph()
|
||||
B.add_nodes_from([1, 2, 3], bipartite=0)
|
||||
B.add_nodes_from([4, 5, 6], bipartite=1)
|
||||
B.add_edge(1, 4, weight=100)
|
||||
B.add_edge(2, 4, weight=100)
|
||||
B.add_edge(3, 4, weight=50)
|
||||
B.add_edge(3, 5, weight=50)
|
||||
B.add_edge(3, 6, weight=50)
|
||||
with pytest.raises(ValueError):
|
||||
minimum_weight_full_matching(B)
|
||||
|
||||
def test_minimum_weight_full_matching_square(self):
|
||||
G = nx.complete_bipartite_graph(3, 3)
|
||||
G.add_edge(0, 3, weight=400)
|
||||
G.add_edge(0, 4, weight=150)
|
||||
G.add_edge(0, 5, weight=400)
|
||||
G.add_edge(1, 3, weight=400)
|
||||
G.add_edge(1, 4, weight=450)
|
||||
G.add_edge(1, 5, weight=600)
|
||||
G.add_edge(2, 3, weight=300)
|
||||
G.add_edge(2, 4, weight=225)
|
||||
G.add_edge(2, 5, weight=300)
|
||||
matching = minimum_weight_full_matching(G)
|
||||
assert matching == {0: 4, 1: 3, 2: 5, 4: 0, 3: 1, 5: 2}
|
||||
|
||||
def test_minimum_weight_full_matching_smaller_left(self):
|
||||
G = nx.complete_bipartite_graph(3, 4)
|
||||
G.add_edge(0, 3, weight=400)
|
||||
G.add_edge(0, 4, weight=150)
|
||||
G.add_edge(0, 5, weight=400)
|
||||
G.add_edge(0, 6, weight=1)
|
||||
G.add_edge(1, 3, weight=400)
|
||||
G.add_edge(1, 4, weight=450)
|
||||
G.add_edge(1, 5, weight=600)
|
||||
G.add_edge(1, 6, weight=2)
|
||||
G.add_edge(2, 3, weight=300)
|
||||
G.add_edge(2, 4, weight=225)
|
||||
G.add_edge(2, 5, weight=290)
|
||||
G.add_edge(2, 6, weight=3)
|
||||
matching = minimum_weight_full_matching(G)
|
||||
assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1}
|
||||
|
||||
def test_minimum_weight_full_matching_smaller_top_nodes_right(self):
|
||||
G = nx.complete_bipartite_graph(3, 4)
|
||||
G.add_edge(0, 3, weight=400)
|
||||
G.add_edge(0, 4, weight=150)
|
||||
G.add_edge(0, 5, weight=400)
|
||||
G.add_edge(0, 6, weight=1)
|
||||
G.add_edge(1, 3, weight=400)
|
||||
G.add_edge(1, 4, weight=450)
|
||||
G.add_edge(1, 5, weight=600)
|
||||
G.add_edge(1, 6, weight=2)
|
||||
G.add_edge(2, 3, weight=300)
|
||||
G.add_edge(2, 4, weight=225)
|
||||
G.add_edge(2, 5, weight=290)
|
||||
G.add_edge(2, 6, weight=3)
|
||||
matching = minimum_weight_full_matching(G, top_nodes=[3, 4, 5, 6])
|
||||
assert matching == {0: 4, 1: 6, 2: 5, 4: 0, 5: 2, 6: 1}
|
||||
|
||||
def test_minimum_weight_full_matching_smaller_right(self):
|
||||
G = nx.complete_bipartite_graph(4, 3)
|
||||
G.add_edge(0, 4, weight=400)
|
||||
G.add_edge(0, 5, weight=400)
|
||||
G.add_edge(0, 6, weight=300)
|
||||
G.add_edge(1, 4, weight=150)
|
||||
G.add_edge(1, 5, weight=450)
|
||||
G.add_edge(1, 6, weight=225)
|
||||
G.add_edge(2, 4, weight=400)
|
||||
G.add_edge(2, 5, weight=600)
|
||||
G.add_edge(2, 6, weight=290)
|
||||
G.add_edge(3, 4, weight=1)
|
||||
G.add_edge(3, 5, weight=2)
|
||||
G.add_edge(3, 6, weight=3)
|
||||
matching = minimum_weight_full_matching(G)
|
||||
assert matching == {1: 4, 2: 6, 3: 5, 4: 1, 5: 3, 6: 2}
|
||||
|
||||
def test_minimum_weight_full_matching_negative_weights(self):
|
||||
G = nx.complete_bipartite_graph(2, 2)
|
||||
G.add_edge(0, 2, weight=-2)
|
||||
G.add_edge(0, 3, weight=0.2)
|
||||
G.add_edge(1, 2, weight=-2)
|
||||
G.add_edge(1, 3, weight=0.3)
|
||||
matching = minimum_weight_full_matching(G)
|
||||
assert matching == {0: 3, 1: 2, 2: 1, 3: 0}
|
||||
|
||||
def test_minimum_weight_full_matching_different_weight_key(self):
|
||||
G = nx.complete_bipartite_graph(2, 2)
|
||||
G.add_edge(0, 2, mass=2)
|
||||
G.add_edge(0, 3, mass=0.2)
|
||||
G.add_edge(1, 2, mass=1)
|
||||
G.add_edge(1, 3, mass=2)
|
||||
matching = minimum_weight_full_matching(G, weight="mass")
|
||||
assert matching == {0: 3, 1: 2, 2: 1, 3: 0}
|
||||
+84
@@ -0,0 +1,84 @@
|
||||
import pytest
|
||||
|
||||
np = pytest.importorskip("numpy")
|
||||
sp = pytest.importorskip("scipy")
|
||||
sparse = pytest.importorskip("scipy.sparse")
|
||||
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms import bipartite
|
||||
from networkx.utils import edges_equal
|
||||
|
||||
|
||||
class TestBiadjacencyMatrix:
|
||||
def test_biadjacency_matrix_weight(self):
|
||||
G = nx.path_graph(5)
|
||||
G.add_edge(0, 1, weight=2, other=4)
|
||||
X = [1, 3]
|
||||
Y = [0, 2, 4]
|
||||
M = bipartite.biadjacency_matrix(G, X, weight="weight")
|
||||
assert M[0, 0] == 2
|
||||
M = bipartite.biadjacency_matrix(G, X, weight="other")
|
||||
assert M[0, 0] == 4
|
||||
|
||||
def test_biadjacency_matrix(self):
|
||||
tops = [2, 5, 10]
|
||||
bots = [5, 10, 15]
|
||||
for i in range(len(tops)):
|
||||
G = bipartite.random_graph(tops[i], bots[i], 0.2)
|
||||
top = [n for n, d in G.nodes(data=True) if d["bipartite"] == 0]
|
||||
M = bipartite.biadjacency_matrix(G, top)
|
||||
assert M.shape[0] == tops[i]
|
||||
assert M.shape[1] == bots[i]
|
||||
|
||||
def test_biadjacency_matrix_order(self):
|
||||
G = nx.path_graph(5)
|
||||
G.add_edge(0, 1, weight=2)
|
||||
X = [3, 1]
|
||||
Y = [4, 2, 0]
|
||||
M = bipartite.biadjacency_matrix(G, X, Y, weight="weight")
|
||||
assert M[1, 2] == 2
|
||||
|
||||
def test_biadjacency_matrix_empty_graph(self):
|
||||
G = nx.empty_graph(2)
|
||||
M = nx.bipartite.biadjacency_matrix(G, [0])
|
||||
assert np.array_equal(M.toarray(), np.array([[0]]))
|
||||
|
||||
def test_null_graph(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.biadjacency_matrix(nx.Graph(), [])
|
||||
|
||||
def test_empty_graph(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [])
|
||||
|
||||
def test_duplicate_row(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [1, 1])
|
||||
|
||||
def test_duplicate_col(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], [1, 1])
|
||||
|
||||
def test_format_keyword(self):
|
||||
with pytest.raises(nx.NetworkXError):
|
||||
bipartite.biadjacency_matrix(nx.Graph([(1, 0)]), [0], format="foo")
|
||||
|
||||
def test_from_biadjacency_roundtrip(self):
|
||||
B1 = nx.path_graph(5)
|
||||
M = bipartite.biadjacency_matrix(B1, [0, 2, 4])
|
||||
B2 = bipartite.from_biadjacency_matrix(M)
|
||||
assert nx.is_isomorphic(B1, B2)
|
||||
|
||||
def test_from_biadjacency_weight(self):
|
||||
M = sparse.csc_matrix([[1, 2], [0, 3]])
|
||||
B = bipartite.from_biadjacency_matrix(M)
|
||||
assert edges_equal(B.edges(), [(0, 2), (0, 3), (1, 3)])
|
||||
B = bipartite.from_biadjacency_matrix(M, edge_attribute="weight")
|
||||
e = [(0, 2, {"weight": 1}), (0, 3, {"weight": 2}), (1, 3, {"weight": 3})]
|
||||
assert edges_equal(B.edges(data=True), e)
|
||||
|
||||
def test_from_biadjacency_multigraph(self):
|
||||
M = sparse.csc_matrix([[1, 2], [0, 3]])
|
||||
B = bipartite.from_biadjacency_matrix(M, create_using=nx.MultiGraph())
|
||||
assert edges_equal(B.edges(), [(0, 2), (0, 3), (0, 3), (1, 3), (1, 3), (1, 3)])
|
||||
+407
@@ -0,0 +1,407 @@
|
||||
import pytest
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms import bipartite
|
||||
from networkx.utils import edges_equal, nodes_equal
|
||||
|
||||
|
||||
class TestBipartiteProject:
|
||||
def test_path_projected_graph(self):
|
||||
G = nx.path_graph(4)
|
||||
P = bipartite.projected_graph(G, [1, 3])
|
||||
assert nodes_equal(list(P), [1, 3])
|
||||
assert edges_equal(list(P.edges()), [(1, 3)])
|
||||
P = bipartite.projected_graph(G, [0, 2])
|
||||
assert nodes_equal(list(P), [0, 2])
|
||||
assert edges_equal(list(P.edges()), [(0, 2)])
|
||||
G = nx.MultiGraph([(0, 1)])
|
||||
with pytest.raises(nx.NetworkXError, match="not defined for multigraphs"):
|
||||
bipartite.projected_graph(G, [0])
|
||||
|
||||
def test_path_projected_properties_graph(self):
|
||||
G = nx.path_graph(4)
|
||||
G.add_node(1, name="one")
|
||||
G.add_node(2, name="two")
|
||||
P = bipartite.projected_graph(G, [1, 3])
|
||||
assert nodes_equal(list(P), [1, 3])
|
||||
assert edges_equal(list(P.edges()), [(1, 3)])
|
||||
assert P.nodes[1]["name"] == G.nodes[1]["name"]
|
||||
P = bipartite.projected_graph(G, [0, 2])
|
||||
assert nodes_equal(list(P), [0, 2])
|
||||
assert edges_equal(list(P.edges()), [(0, 2)])
|
||||
assert P.nodes[2]["name"] == G.nodes[2]["name"]
|
||||
|
||||
def test_path_collaboration_projected_graph(self):
|
||||
G = nx.path_graph(4)
|
||||
P = bipartite.collaboration_weighted_projected_graph(G, [1, 3])
|
||||
assert nodes_equal(list(P), [1, 3])
|
||||
assert edges_equal(list(P.edges()), [(1, 3)])
|
||||
P[1][3]["weight"] = 1
|
||||
P = bipartite.collaboration_weighted_projected_graph(G, [0, 2])
|
||||
assert nodes_equal(list(P), [0, 2])
|
||||
assert edges_equal(list(P.edges()), [(0, 2)])
|
||||
P[0][2]["weight"] = 1
|
||||
|
||||
def test_directed_path_collaboration_projected_graph(self):
|
||||
G = nx.DiGraph()
|
||||
nx.add_path(G, range(4))
|
||||
P = bipartite.collaboration_weighted_projected_graph(G, [1, 3])
|
||||
assert nodes_equal(list(P), [1, 3])
|
||||
assert edges_equal(list(P.edges()), [(1, 3)])
|
||||
P[1][3]["weight"] = 1
|
||||
P = bipartite.collaboration_weighted_projected_graph(G, [0, 2])
|
||||
assert nodes_equal(list(P), [0, 2])
|
||||
assert edges_equal(list(P.edges()), [(0, 2)])
|
||||
P[0][2]["weight"] = 1
|
||||
|
||||
def test_path_weighted_projected_graph(self):
|
||||
G = nx.path_graph(4)
|
||||
|
||||
with pytest.raises(nx.NetworkXAlgorithmError):
|
||||
bipartite.weighted_projected_graph(G, [1, 2, 3, 3])
|
||||
|
||||
P = bipartite.weighted_projected_graph(G, [1, 3])
|
||||
assert nodes_equal(list(P), [1, 3])
|
||||
assert edges_equal(list(P.edges()), [(1, 3)])
|
||||
P[1][3]["weight"] = 1
|
||||
P = bipartite.weighted_projected_graph(G, [0, 2])
|
||||
assert nodes_equal(list(P), [0, 2])
|
||||
assert edges_equal(list(P.edges()), [(0, 2)])
|
||||
P[0][2]["weight"] = 1
|
||||
|
||||
def test_digraph_weighted_projection(self):
|
||||
G = nx.DiGraph([(0, 1), (1, 2), (2, 3), (3, 4)])
|
||||
P = bipartite.overlap_weighted_projected_graph(G, [1, 3])
|
||||
assert nx.get_edge_attributes(P, "weight") == {(1, 3): 1.0}
|
||||
assert len(P) == 2
|
||||
|
||||
def test_path_weighted_projected_directed_graph(self):
|
||||
G = nx.DiGraph()
|
||||
nx.add_path(G, range(4))
|
||||
P = bipartite.weighted_projected_graph(G, [1, 3])
|
||||
assert nodes_equal(list(P), [1, 3])
|
||||
assert edges_equal(list(P.edges()), [(1, 3)])
|
||||
P[1][3]["weight"] = 1
|
||||
P = bipartite.weighted_projected_graph(G, [0, 2])
|
||||
assert nodes_equal(list(P), [0, 2])
|
||||
assert edges_equal(list(P.edges()), [(0, 2)])
|
||||
P[0][2]["weight"] = 1
|
||||
|
||||
def test_star_projected_graph(self):
|
||||
G = nx.star_graph(3)
|
||||
P = bipartite.projected_graph(G, [1, 2, 3])
|
||||
assert nodes_equal(list(P), [1, 2, 3])
|
||||
assert edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)])
|
||||
P = bipartite.weighted_projected_graph(G, [1, 2, 3])
|
||||
assert nodes_equal(list(P), [1, 2, 3])
|
||||
assert edges_equal(list(P.edges()), [(1, 2), (1, 3), (2, 3)])
|
||||
|
||||
P = bipartite.projected_graph(G, [0])
|
||||
assert nodes_equal(list(P), [0])
|
||||
assert edges_equal(list(P.edges()), [])
|
||||
|
||||
def test_project_multigraph(self):
|
||||
G = nx.Graph()
|
||||
G.add_edge("a", 1)
|
||||
G.add_edge("b", 1)
|
||||
G.add_edge("a", 2)
|
||||
G.add_edge("b", 2)
|
||||
P = bipartite.projected_graph(G, "ab")
|
||||
assert edges_equal(list(P.edges()), [("a", "b")])
|
||||
P = bipartite.weighted_projected_graph(G, "ab")
|
||||
assert edges_equal(list(P.edges()), [("a", "b")])
|
||||
P = bipartite.projected_graph(G, "ab", multigraph=True)
|
||||
assert edges_equal(list(P.edges()), [("a", "b"), ("a", "b")])
|
||||
|
||||
def test_project_collaboration(self):
|
||||
G = nx.Graph()
|
||||
G.add_edge("a", 1)
|
||||
G.add_edge("b", 1)
|
||||
G.add_edge("b", 2)
|
||||
G.add_edge("c", 2)
|
||||
G.add_edge("c", 3)
|
||||
G.add_edge("c", 4)
|
||||
G.add_edge("b", 4)
|
||||
P = bipartite.collaboration_weighted_projected_graph(G, "abc")
|
||||
assert P["a"]["b"]["weight"] == 1
|
||||
assert P["b"]["c"]["weight"] == 2
|
||||
|
||||
def test_directed_projection(self):
|
||||
G = nx.DiGraph()
|
||||
G.add_edge("A", 1)
|
||||
G.add_edge(1, "B")
|
||||
G.add_edge("A", 2)
|
||||
G.add_edge("B", 2)
|
||||
P = bipartite.projected_graph(G, "AB")
|
||||
assert edges_equal(list(P.edges()), [("A", "B")])
|
||||
P = bipartite.weighted_projected_graph(G, "AB")
|
||||
assert edges_equal(list(P.edges()), [("A", "B")])
|
||||
assert P["A"]["B"]["weight"] == 1
|
||||
|
||||
P = bipartite.projected_graph(G, "AB", multigraph=True)
|
||||
assert edges_equal(list(P.edges()), [("A", "B")])
|
||||
|
||||
G = nx.DiGraph()
|
||||
G.add_edge("A", 1)
|
||||
G.add_edge(1, "B")
|
||||
G.add_edge("A", 2)
|
||||
G.add_edge(2, "B")
|
||||
P = bipartite.projected_graph(G, "AB")
|
||||
assert edges_equal(list(P.edges()), [("A", "B")])
|
||||
P = bipartite.weighted_projected_graph(G, "AB")
|
||||
assert edges_equal(list(P.edges()), [("A", "B")])
|
||||
assert P["A"]["B"]["weight"] == 2
|
||||
|
||||
P = bipartite.projected_graph(G, "AB", multigraph=True)
|
||||
assert edges_equal(list(P.edges()), [("A", "B"), ("A", "B")])
|
||||
|
||||
|
||||
class TestBipartiteWeightedProjection:
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
# Tore Opsahl's example
|
||||
# http://toreopsahl.com/2009/05/01/projecting-two-mode-networks-onto-weighted-one-mode-networks/
|
||||
cls.G = nx.Graph()
|
||||
cls.G.add_edge("A", 1)
|
||||
cls.G.add_edge("A", 2)
|
||||
cls.G.add_edge("B", 1)
|
||||
cls.G.add_edge("B", 2)
|
||||
cls.G.add_edge("B", 3)
|
||||
cls.G.add_edge("B", 4)
|
||||
cls.G.add_edge("B", 5)
|
||||
cls.G.add_edge("C", 1)
|
||||
cls.G.add_edge("D", 3)
|
||||
cls.G.add_edge("E", 4)
|
||||
cls.G.add_edge("E", 5)
|
||||
cls.G.add_edge("E", 6)
|
||||
cls.G.add_edge("F", 6)
|
||||
# Graph based on figure 6 from Newman (2001)
|
||||
cls.N = nx.Graph()
|
||||
cls.N.add_edge("A", 1)
|
||||
cls.N.add_edge("A", 2)
|
||||
cls.N.add_edge("A", 3)
|
||||
cls.N.add_edge("B", 1)
|
||||
cls.N.add_edge("B", 2)
|
||||
cls.N.add_edge("B", 3)
|
||||
cls.N.add_edge("C", 1)
|
||||
cls.N.add_edge("D", 1)
|
||||
cls.N.add_edge("E", 3)
|
||||
|
||||
def test_project_weighted_shared(self):
|
||||
edges = [
|
||||
("A", "B", 2),
|
||||
("A", "C", 1),
|
||||
("B", "C", 1),
|
||||
("B", "D", 1),
|
||||
("B", "E", 2),
|
||||
("E", "F", 1),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.weighted_projected_graph(self.G, "ABCDEF")
|
||||
assert edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
edges = [
|
||||
("A", "B", 3),
|
||||
("A", "E", 1),
|
||||
("A", "C", 1),
|
||||
("A", "D", 1),
|
||||
("B", "E", 1),
|
||||
("B", "C", 1),
|
||||
("B", "D", 1),
|
||||
("C", "D", 1),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.weighted_projected_graph(self.N, "ABCDE")
|
||||
assert edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
def test_project_weighted_newman(self):
|
||||
edges = [
|
||||
("A", "B", 1.5),
|
||||
("A", "C", 0.5),
|
||||
("B", "C", 0.5),
|
||||
("B", "D", 1),
|
||||
("B", "E", 2),
|
||||
("E", "F", 1),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.collaboration_weighted_projected_graph(self.G, "ABCDEF")
|
||||
assert edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
edges = [
|
||||
("A", "B", 11 / 6.0),
|
||||
("A", "E", 1 / 2.0),
|
||||
("A", "C", 1 / 3.0),
|
||||
("A", "D", 1 / 3.0),
|
||||
("B", "E", 1 / 2.0),
|
||||
("B", "C", 1 / 3.0),
|
||||
("B", "D", 1 / 3.0),
|
||||
("C", "D", 1 / 3.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.collaboration_weighted_projected_graph(self.N, "ABCDE")
|
||||
assert edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
def test_project_weighted_ratio(self):
|
||||
edges = [
|
||||
("A", "B", 2 / 6.0),
|
||||
("A", "C", 1 / 6.0),
|
||||
("B", "C", 1 / 6.0),
|
||||
("B", "D", 1 / 6.0),
|
||||
("B", "E", 2 / 6.0),
|
||||
("E", "F", 1 / 6.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.weighted_projected_graph(self.G, "ABCDEF", ratio=True)
|
||||
assert edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
edges = [
|
||||
("A", "B", 3 / 3.0),
|
||||
("A", "E", 1 / 3.0),
|
||||
("A", "C", 1 / 3.0),
|
||||
("A", "D", 1 / 3.0),
|
||||
("B", "E", 1 / 3.0),
|
||||
("B", "C", 1 / 3.0),
|
||||
("B", "D", 1 / 3.0),
|
||||
("C", "D", 1 / 3.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.weighted_projected_graph(self.N, "ABCDE", ratio=True)
|
||||
assert edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
def test_project_weighted_overlap(self):
|
||||
edges = [
|
||||
("A", "B", 2 / 2.0),
|
||||
("A", "C", 1 / 1.0),
|
||||
("B", "C", 1 / 1.0),
|
||||
("B", "D", 1 / 1.0),
|
||||
("B", "E", 2 / 3.0),
|
||||
("E", "F", 1 / 1.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF", jaccard=False)
|
||||
assert edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
edges = [
|
||||
("A", "B", 3 / 3.0),
|
||||
("A", "E", 1 / 1.0),
|
||||
("A", "C", 1 / 1.0),
|
||||
("A", "D", 1 / 1.0),
|
||||
("B", "E", 1 / 1.0),
|
||||
("B", "C", 1 / 1.0),
|
||||
("B", "D", 1 / 1.0),
|
||||
("C", "D", 1 / 1.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE", jaccard=False)
|
||||
assert edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
def test_project_weighted_jaccard(self):
|
||||
edges = [
|
||||
("A", "B", 2 / 5.0),
|
||||
("A", "C", 1 / 2.0),
|
||||
("B", "C", 1 / 5.0),
|
||||
("B", "D", 1 / 5.0),
|
||||
("B", "E", 2 / 6.0),
|
||||
("E", "F", 1 / 3.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.overlap_weighted_projected_graph(self.G, "ABCDEF")
|
||||
assert edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in list(P.edges()):
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
edges = [
|
||||
("A", "B", 3 / 3.0),
|
||||
("A", "E", 1 / 3.0),
|
||||
("A", "C", 1 / 3.0),
|
||||
("A", "D", 1 / 3.0),
|
||||
("B", "E", 1 / 3.0),
|
||||
("B", "C", 1 / 3.0),
|
||||
("B", "D", 1 / 3.0),
|
||||
("C", "D", 1 / 1.0),
|
||||
]
|
||||
Panswer = nx.Graph()
|
||||
Panswer.add_weighted_edges_from(edges)
|
||||
P = bipartite.overlap_weighted_projected_graph(self.N, "ABCDE")
|
||||
assert edges_equal(list(P.edges()), Panswer.edges())
|
||||
for u, v in P.edges():
|
||||
assert P[u][v]["weight"] == Panswer[u][v]["weight"]
|
||||
|
||||
def test_generic_weighted_projected_graph_simple(self):
|
||||
def shared(G, u, v):
|
||||
return len(set(G[u]) & set(G[v]))
|
||||
|
||||
B = nx.path_graph(5)
|
||||
G = bipartite.generic_weighted_projected_graph(
|
||||
B, [0, 2, 4], weight_function=shared
|
||||
)
|
||||
assert nodes_equal(list(G), [0, 2, 4])
|
||||
assert edges_equal(
|
||||
list(G.edges(data=True)),
|
||||
[(0, 2, {"weight": 1}), (2, 4, {"weight": 1})],
|
||||
)
|
||||
|
||||
G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4])
|
||||
assert nodes_equal(list(G), [0, 2, 4])
|
||||
assert edges_equal(
|
||||
list(G.edges(data=True)),
|
||||
[(0, 2, {"weight": 1}), (2, 4, {"weight": 1})],
|
||||
)
|
||||
B = nx.DiGraph()
|
||||
nx.add_path(B, range(5))
|
||||
G = bipartite.generic_weighted_projected_graph(B, [0, 2, 4])
|
||||
assert nodes_equal(list(G), [0, 2, 4])
|
||||
assert edges_equal(
|
||||
list(G.edges(data=True)), [(0, 2, {"weight": 1}), (2, 4, {"weight": 1})]
|
||||
)
|
||||
|
||||
def test_generic_weighted_projected_graph_custom(self):
|
||||
def jaccard(G, u, v):
|
||||
unbrs = set(G[u])
|
||||
vnbrs = set(G[v])
|
||||
return len(unbrs & vnbrs) / len(unbrs | vnbrs)
|
||||
|
||||
def my_weight(G, u, v, weight="weight"):
|
||||
w = 0
|
||||
for nbr in set(G[u]) & set(G[v]):
|
||||
w += G.edges[u, nbr].get(weight, 1) + G.edges[v, nbr].get(weight, 1)
|
||||
return w
|
||||
|
||||
B = nx.bipartite.complete_bipartite_graph(2, 2)
|
||||
for i, (u, v) in enumerate(B.edges()):
|
||||
B.edges[u, v]["weight"] = i + 1
|
||||
G = bipartite.generic_weighted_projected_graph(
|
||||
B, [0, 1], weight_function=jaccard
|
||||
)
|
||||
assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 1.0})])
|
||||
G = bipartite.generic_weighted_projected_graph(
|
||||
B, [0, 1], weight_function=my_weight
|
||||
)
|
||||
assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 10})])
|
||||
G = bipartite.generic_weighted_projected_graph(B, [0, 1])
|
||||
assert edges_equal(list(G.edges(data=True)), [(0, 1, {"weight": 2})])
|
||||
+35
@@ -0,0 +1,35 @@
|
||||
"""Unit tests for the :mod:`networkx.algorithms.bipartite.redundancy` module."""
|
||||
|
||||
import pytest
|
||||
|
||||
from networkx import NetworkXError, cycle_graph
|
||||
from networkx.algorithms.bipartite import complete_bipartite_graph, node_redundancy
|
||||
|
||||
|
||||
def test_no_redundant_nodes():
|
||||
G = complete_bipartite_graph(2, 2)
|
||||
|
||||
# when nodes is None
|
||||
rc = node_redundancy(G)
|
||||
assert all(redundancy == 1 for redundancy in rc.values())
|
||||
|
||||
# when set of nodes is specified
|
||||
rc = node_redundancy(G, (2, 3))
|
||||
assert rc == {2: 1.0, 3: 1.0}
|
||||
|
||||
|
||||
def test_redundant_nodes():
|
||||
G = cycle_graph(6)
|
||||
edge = {0, 3}
|
||||
G.add_edge(*edge)
|
||||
redundancy = node_redundancy(G)
|
||||
for v in edge:
|
||||
assert redundancy[v] == 2 / 3
|
||||
for v in set(G) - edge:
|
||||
assert redundancy[v] == 1
|
||||
|
||||
|
||||
def test_not_enough_neighbors():
|
||||
with pytest.raises(NetworkXError):
|
||||
G = complete_bipartite_graph(1, 2)
|
||||
node_redundancy(G)
|
||||
+80
@@ -0,0 +1,80 @@
|
||||
import pytest
|
||||
|
||||
pytest.importorskip("scipy")
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.bipartite import spectral_bipartivity as sb
|
||||
|
||||
# Examples from Figure 1
|
||||
# E. Estrada and J. A. Rodríguez-Velázquez, "Spectral measures of
|
||||
# bipartivity in complex networks", PhysRev E 72, 046105 (2005)
|
||||
|
||||
|
||||
class TestSpectralBipartivity:
|
||||
def test_star_like(self):
|
||||
# star-like
|
||||
|
||||
G = nx.star_graph(2)
|
||||
G.add_edge(1, 2)
|
||||
assert sb(G) == pytest.approx(0.843, abs=1e-3)
|
||||
|
||||
G = nx.star_graph(3)
|
||||
G.add_edge(1, 2)
|
||||
assert sb(G) == pytest.approx(0.871, abs=1e-3)
|
||||
|
||||
G = nx.star_graph(4)
|
||||
G.add_edge(1, 2)
|
||||
assert sb(G) == pytest.approx(0.890, abs=1e-3)
|
||||
|
||||
def test_k23_like(self):
|
||||
# K2,3-like
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(0, 1)
|
||||
assert sb(G) == pytest.approx(0.769, abs=1e-3)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(2, 4)
|
||||
assert sb(G) == pytest.approx(0.829, abs=1e-3)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(2, 4)
|
||||
G.add_edge(3, 4)
|
||||
assert sb(G) == pytest.approx(0.731, abs=1e-3)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(0, 1)
|
||||
G.add_edge(2, 4)
|
||||
assert sb(G) == pytest.approx(0.692, abs=1e-3)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(2, 4)
|
||||
G.add_edge(3, 4)
|
||||
G.add_edge(0, 1)
|
||||
assert sb(G) == pytest.approx(0.645, abs=1e-3)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(2, 4)
|
||||
G.add_edge(3, 4)
|
||||
G.add_edge(2, 3)
|
||||
assert sb(G) == pytest.approx(0.645, abs=1e-3)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(2, 4)
|
||||
G.add_edge(3, 4)
|
||||
G.add_edge(2, 3)
|
||||
G.add_edge(0, 1)
|
||||
assert sb(G) == pytest.approx(0.597, abs=1e-3)
|
||||
|
||||
def test_single_nodes(self):
|
||||
# single nodes
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(2, 4)
|
||||
sbn = sb(G, nodes=[1, 2])
|
||||
assert sbn[1] == pytest.approx(0.85, abs=1e-2)
|
||||
assert sbn[2] == pytest.approx(0.77, abs=1e-2)
|
||||
|
||||
G = nx.complete_bipartite_graph(2, 3)
|
||||
G.add_edge(0, 1)
|
||||
sbn = sb(G, nodes=[1, 2])
|
||||
assert sbn[1] == pytest.approx(0.73, abs=1e-2)
|
||||
assert sbn[2] == pytest.approx(0.82, abs=1e-2)
|
||||
@@ -0,0 +1,168 @@
|
||||
"""Routines to find the boundary of a set of nodes.
|
||||
|
||||
An edge boundary is a set of edges, each of which has exactly one
|
||||
endpoint in a given set of nodes (or, in the case of directed graphs,
|
||||
the set of edges whose source node is in the set).
|
||||
|
||||
A node boundary of a set *S* of nodes is the set of (out-)neighbors of
|
||||
nodes in *S* that are outside *S*.
|
||||
|
||||
"""
|
||||
|
||||
from itertools import chain
|
||||
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["edge_boundary", "node_boundary"]
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs={"data": "default"}, preserve_edge_attrs="data")
|
||||
def edge_boundary(G, nbunch1, nbunch2=None, data=False, keys=False, default=None):
|
||||
"""Returns the edge boundary of `nbunch1`.
|
||||
|
||||
The *edge boundary* of a set *S* with respect to a set *T* is the
|
||||
set of edges (*u*, *v*) such that *u* is in *S* and *v* is in *T*.
|
||||
If *T* is not specified, it is assumed to be the set of all nodes
|
||||
not in *S*.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
nbunch1 : iterable
|
||||
Iterable of nodes in the graph representing the set of nodes
|
||||
whose edge boundary will be returned. (This is the set *S* from
|
||||
the definition above.)
|
||||
|
||||
nbunch2 : iterable
|
||||
Iterable of nodes representing the target (or "exterior") set of
|
||||
nodes. (This is the set *T* from the definition above.) If not
|
||||
specified, this is assumed to be the set of all nodes in `G`
|
||||
not in `nbunch1`.
|
||||
|
||||
keys : bool
|
||||
This parameter has the same meaning as in
|
||||
:meth:`MultiGraph.edges`.
|
||||
|
||||
data : bool or object
|
||||
This parameter has the same meaning as in
|
||||
:meth:`MultiGraph.edges`.
|
||||
|
||||
default : object
|
||||
This parameter has the same meaning as in
|
||||
:meth:`MultiGraph.edges`.
|
||||
|
||||
Returns
|
||||
-------
|
||||
iterator
|
||||
An iterator over the edges in the boundary of `nbunch1` with
|
||||
respect to `nbunch2`. If `keys`, `data`, or `default`
|
||||
are specified and `G` is a multigraph, then edges are returned
|
||||
with keys and/or data, as in :meth:`MultiGraph.edges`.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.wheel_graph(6)
|
||||
|
||||
When nbunch2=None:
|
||||
|
||||
>>> list(nx.edge_boundary(G, (1, 3)))
|
||||
[(1, 0), (1, 2), (1, 5), (3, 0), (3, 2), (3, 4)]
|
||||
|
||||
When nbunch2 is given:
|
||||
|
||||
>>> list(nx.edge_boundary(G, (1, 3), (2, 0)))
|
||||
[(1, 0), (1, 2), (3, 0), (3, 2)]
|
||||
|
||||
Notes
|
||||
-----
|
||||
Any element of `nbunch` that is not in the graph `G` will be
|
||||
ignored.
|
||||
|
||||
`nbunch1` and `nbunch2` are usually meant to be disjoint, but in
|
||||
the interest of speed and generality, that is not required here.
|
||||
|
||||
"""
|
||||
nset1 = {n for n in nbunch1 if n in G}
|
||||
# Here we create an iterator over edges incident to nodes in the set
|
||||
# `nset1`. The `Graph.edges()` method does not provide a guarantee
|
||||
# on the orientation of the edges, so our algorithm below must
|
||||
# handle the case in which exactly one orientation, either (u, v) or
|
||||
# (v, u), appears in this iterable.
|
||||
if G.is_multigraph():
|
||||
edges = G.edges(nset1, data=data, keys=keys, default=default)
|
||||
else:
|
||||
edges = G.edges(nset1, data=data, default=default)
|
||||
# If `nbunch2` is not provided, then it is assumed to be the set
|
||||
# complement of `nbunch1`. For the sake of efficiency, this is
|
||||
# implemented by using the `not in` operator, instead of by creating
|
||||
# an additional set and using the `in` operator.
|
||||
if nbunch2 is None:
|
||||
return (e for e in edges if (e[0] in nset1) ^ (e[1] in nset1))
|
||||
nset2 = set(nbunch2)
|
||||
return (
|
||||
e
|
||||
for e in edges
|
||||
if (e[0] in nset1 and e[1] in nset2) or (e[1] in nset1 and e[0] in nset2)
|
||||
)
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def node_boundary(G, nbunch1, nbunch2=None):
|
||||
"""Returns the node boundary of `nbunch1`.
|
||||
|
||||
The *node boundary* of a set *S* with respect to a set *T* is the
|
||||
set of nodes *v* in *T* such that for some *u* in *S*, there is an
|
||||
edge joining *u* to *v*. If *T* is not specified, it is assumed to
|
||||
be the set of all nodes not in *S*.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph
|
||||
|
||||
nbunch1 : iterable
|
||||
Iterable of nodes in the graph representing the set of nodes
|
||||
whose node boundary will be returned. (This is the set *S* from
|
||||
the definition above.)
|
||||
|
||||
nbunch2 : iterable
|
||||
Iterable of nodes representing the target (or "exterior") set of
|
||||
nodes. (This is the set *T* from the definition above.) If not
|
||||
specified, this is assumed to be the set of all nodes in `G`
|
||||
not in `nbunch1`.
|
||||
|
||||
Returns
|
||||
-------
|
||||
set
|
||||
The node boundary of `nbunch1` with respect to `nbunch2`.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.wheel_graph(6)
|
||||
|
||||
When nbunch2=None:
|
||||
|
||||
>>> list(nx.node_boundary(G, (3, 4)))
|
||||
[0, 2, 5]
|
||||
|
||||
When nbunch2 is given:
|
||||
|
||||
>>> list(nx.node_boundary(G, (3, 4), (0, 1, 5)))
|
||||
[0, 5]
|
||||
|
||||
Notes
|
||||
-----
|
||||
Any element of `nbunch` that is not in the graph `G` will be
|
||||
ignored.
|
||||
|
||||
`nbunch1` and `nbunch2` are usually meant to be disjoint, but in
|
||||
the interest of speed and generality, that is not required here.
|
||||
|
||||
"""
|
||||
nset1 = {n for n in nbunch1 if n in G}
|
||||
bdy = set(chain.from_iterable(G[v] for v in nset1)) - nset1
|
||||
# If `nbunch2` is not specified, it is assumed to be the set
|
||||
# complement of `nbunch1`.
|
||||
if nbunch2 is not None:
|
||||
bdy &= set(nbunch2)
|
||||
return bdy
|
||||
@@ -0,0 +1,205 @@
|
||||
"""Bridge-finding algorithms."""
|
||||
|
||||
from itertools import chain
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
__all__ = ["bridges", "has_bridges", "local_bridges"]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable
|
||||
def bridges(G, root=None):
|
||||
"""Generate all bridges in a graph.
|
||||
|
||||
A *bridge* in a graph is an edge whose removal causes the number of
|
||||
connected components of the graph to increase. Equivalently, a bridge is an
|
||||
edge that does not belong to any cycle. Bridges are also known as cut-edges,
|
||||
isthmuses, or cut arcs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : undirected graph
|
||||
|
||||
root : node (optional)
|
||||
A node in the graph `G`. If specified, only the bridges in the
|
||||
connected component containing this node will be returned.
|
||||
|
||||
Yields
|
||||
------
|
||||
e : edge
|
||||
An edge in the graph whose removal disconnects the graph (or
|
||||
causes the number of connected components to increase).
|
||||
|
||||
Raises
|
||||
------
|
||||
NodeNotFound
|
||||
If `root` is not in the graph `G`.
|
||||
|
||||
NetworkXNotImplemented
|
||||
If `G` is a directed graph.
|
||||
|
||||
Examples
|
||||
--------
|
||||
The barbell graph with parameter zero has a single bridge:
|
||||
|
||||
>>> G = nx.barbell_graph(10, 0)
|
||||
>>> list(nx.bridges(G))
|
||||
[(9, 10)]
|
||||
|
||||
Notes
|
||||
-----
|
||||
This is an implementation of the algorithm described in [1]_. An edge is a
|
||||
bridge if and only if it is not contained in any chain. Chains are found
|
||||
using the :func:`networkx.chain_decomposition` function.
|
||||
|
||||
The algorithm described in [1]_ requires a simple graph. If the provided
|
||||
graph is a multigraph, we convert it to a simple graph and verify that any
|
||||
bridges discovered by the chain decomposition algorithm are not multi-edges.
|
||||
|
||||
Ignoring polylogarithmic factors, the worst-case time complexity is the
|
||||
same as the :func:`networkx.chain_decomposition` function,
|
||||
$O(m + n)$, where $n$ is the number of nodes in the graph and $m$ is
|
||||
the number of edges.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] https://en.wikipedia.org/wiki/Bridge_%28graph_theory%29#Bridge-Finding_with_Chain_Decompositions
|
||||
"""
|
||||
multigraph = G.is_multigraph()
|
||||
H = nx.Graph(G) if multigraph else G
|
||||
chains = nx.chain_decomposition(H, root=root)
|
||||
chain_edges = set(chain.from_iterable(chains))
|
||||
if root is not None:
|
||||
H = H.subgraph(nx.node_connected_component(H, root)).copy()
|
||||
for u, v in H.edges():
|
||||
if (u, v) not in chain_edges and (v, u) not in chain_edges:
|
||||
if multigraph and len(G[u][v]) > 1:
|
||||
continue
|
||||
yield u, v
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable
|
||||
def has_bridges(G, root=None):
|
||||
"""Decide whether a graph has any bridges.
|
||||
|
||||
A *bridge* in a graph is an edge whose removal causes the number of
|
||||
connected components of the graph to increase.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : undirected graph
|
||||
|
||||
root : node (optional)
|
||||
A node in the graph `G`. If specified, only the bridges in the
|
||||
connected component containing this node will be considered.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
Whether the graph (or the connected component containing `root`)
|
||||
has any bridges.
|
||||
|
||||
Raises
|
||||
------
|
||||
NodeNotFound
|
||||
If `root` is not in the graph `G`.
|
||||
|
||||
NetworkXNotImplemented
|
||||
If `G` is a directed graph.
|
||||
|
||||
Examples
|
||||
--------
|
||||
The barbell graph with parameter zero has a single bridge::
|
||||
|
||||
>>> G = nx.barbell_graph(10, 0)
|
||||
>>> nx.has_bridges(G)
|
||||
True
|
||||
|
||||
On the other hand, the cycle graph has no bridges::
|
||||
|
||||
>>> G = nx.cycle_graph(5)
|
||||
>>> nx.has_bridges(G)
|
||||
False
|
||||
|
||||
Notes
|
||||
-----
|
||||
This implementation uses the :func:`networkx.bridges` function, so
|
||||
it shares its worst-case time complexity, $O(m + n)$, ignoring
|
||||
polylogarithmic factors, where $n$ is the number of nodes in the
|
||||
graph and $m$ is the number of edges.
|
||||
|
||||
"""
|
||||
try:
|
||||
next(bridges(G, root=root))
|
||||
except StopIteration:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
@not_implemented_for("multigraph")
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def local_bridges(G, with_span=True, weight=None):
|
||||
"""Iterate over local bridges of `G` optionally computing the span
|
||||
|
||||
A *local bridge* is an edge whose endpoints have no common neighbors.
|
||||
That is, the edge is not part of a triangle in the graph.
|
||||
|
||||
The *span* of a *local bridge* is the shortest path length between
|
||||
the endpoints if the local bridge is removed.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : undirected graph
|
||||
|
||||
with_span : bool
|
||||
If True, yield a 3-tuple `(u, v, span)`
|
||||
|
||||
weight : function, string or None (default: None)
|
||||
If function, used to compute edge weights for the span.
|
||||
If string, the edge data attribute used in calculating span.
|
||||
If None, all edges have weight 1.
|
||||
|
||||
Yields
|
||||
------
|
||||
e : edge
|
||||
The local bridges as an edge 2-tuple of nodes `(u, v)` or
|
||||
as a 3-tuple `(u, v, span)` when `with_span is True`.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If `G` is a directed graph or multigraph.
|
||||
|
||||
Examples
|
||||
--------
|
||||
A cycle graph has every edge a local bridge with span N-1.
|
||||
|
||||
>>> G = nx.cycle_graph(9)
|
||||
>>> (0, 8, 8) in set(nx.local_bridges(G))
|
||||
True
|
||||
"""
|
||||
if with_span is not True:
|
||||
for u, v in G.edges:
|
||||
if not (set(G[u]) & set(G[v])):
|
||||
yield u, v
|
||||
else:
|
||||
wt = nx.weighted._weight_function(G, weight)
|
||||
for u, v in G.edges:
|
||||
if not (set(G[u]) & set(G[v])):
|
||||
enodes = {u, v}
|
||||
|
||||
def hide_edge(n, nbr, d):
|
||||
if n not in enodes or nbr not in enodes:
|
||||
return wt(n, nbr, d)
|
||||
return None
|
||||
|
||||
try:
|
||||
span = nx.shortest_path_length(G, u, v, weight=hide_edge)
|
||||
yield u, v, span
|
||||
except nx.NetworkXNoPath:
|
||||
yield u, v, float("inf")
|
||||
@@ -0,0 +1,155 @@
|
||||
"""Routines to calculate the broadcast time of certain graphs.
|
||||
|
||||
Broadcasting is an information dissemination problem in which a node in a graph,
|
||||
called the originator, must distribute a message to all other nodes by placing
|
||||
a series of calls along the edges of the graph. Once informed, other nodes aid
|
||||
the originator in distributing the message.
|
||||
|
||||
The broadcasting must be completed as quickly as possible subject to the
|
||||
following constraints:
|
||||
- Each call requires one unit of time.
|
||||
- A node can only participate in one call per unit of time.
|
||||
- Each call only involves two adjacent nodes: a sender and a receiver.
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
from networkx import NetworkXError
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
__all__ = [
|
||||
"tree_broadcast_center",
|
||||
"tree_broadcast_time",
|
||||
]
|
||||
|
||||
|
||||
def _get_max_broadcast_value(G, U, v, values):
|
||||
adj = sorted(set(G.neighbors(v)) & U, key=values.get, reverse=True)
|
||||
return max(values[u] + i for i, u in enumerate(adj, start=1))
|
||||
|
||||
|
||||
def _get_broadcast_centers(G, v, values, target):
|
||||
adj = sorted(G.neighbors(v), key=values.get, reverse=True)
|
||||
j = next(i for i, u in enumerate(adj, start=1) if values[u] + i == target)
|
||||
return set([v] + adj[:j])
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def tree_broadcast_center(G):
|
||||
"""Return the Broadcast Center of the tree `G`.
|
||||
|
||||
The broadcast center of a graph G denotes the set of nodes having
|
||||
minimum broadcast time [1]_. This is a linear algorithm for determining
|
||||
the broadcast center of a tree with ``N`` nodes, as a by-product it also
|
||||
determines the broadcast time from the broadcast center.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : undirected graph
|
||||
The graph should be an undirected tree
|
||||
|
||||
Returns
|
||||
-------
|
||||
BC : (int, set) tuple
|
||||
minimum broadcast number of the tree, set of broadcast centers
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If the graph is directed or is a multigraph.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Slater, P.J., Cockayne, E.J., Hedetniemi, S.T,
|
||||
Information dissemination in trees. SIAM J.Comput. 10(4), 692–701 (1981)
|
||||
"""
|
||||
# Assert that the graph G is a tree
|
||||
if not nx.is_tree(G):
|
||||
NetworkXError("Input graph is not a tree")
|
||||
# step 0
|
||||
if G.number_of_nodes() == 2:
|
||||
return 1, set(G.nodes())
|
||||
if G.number_of_nodes() == 1:
|
||||
return 0, set(G.nodes())
|
||||
|
||||
# step 1
|
||||
U = {node for node, deg in G.degree if deg == 1}
|
||||
values = {n: 0 for n in U}
|
||||
T = G.copy()
|
||||
T.remove_nodes_from(U)
|
||||
|
||||
# step 2
|
||||
W = {node for node, deg in T.degree if deg == 1}
|
||||
values.update((w, G.degree[w] - 1) for w in W)
|
||||
|
||||
# step 3
|
||||
while T.number_of_nodes() >= 2:
|
||||
# step 4
|
||||
w = min(W, key=lambda n: values[n])
|
||||
v = next(T.neighbors(w))
|
||||
|
||||
# step 5
|
||||
U.add(w)
|
||||
W.remove(w)
|
||||
T.remove_node(w)
|
||||
|
||||
# step 6
|
||||
if T.degree(v) == 1:
|
||||
# update t(v)
|
||||
values.update({v: _get_max_broadcast_value(G, U, v, values)})
|
||||
W.add(v)
|
||||
|
||||
# step 7
|
||||
v = nx.utils.arbitrary_element(T)
|
||||
b_T = _get_max_broadcast_value(G, U, v, values)
|
||||
return b_T, _get_broadcast_centers(G, v, values, b_T)
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def tree_broadcast_time(G, node=None):
|
||||
"""Return the Broadcast Time of the tree `G`.
|
||||
|
||||
The minimum broadcast time of a node is defined as the minimum amount
|
||||
of time required to complete broadcasting starting from the
|
||||
originator. The broadcast time of a graph is the maximum over
|
||||
all nodes of the minimum broadcast time from that node [1]_.
|
||||
This function returns the minimum broadcast time of `node`.
|
||||
If `node` is None the broadcast time for the graph is returned.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : undirected graph
|
||||
The graph should be an undirected tree
|
||||
node: int, optional
|
||||
index of starting node. If `None`, the algorithm returns the broadcast
|
||||
time of the tree.
|
||||
|
||||
Returns
|
||||
-------
|
||||
BT : int
|
||||
Broadcast Time of a node in a tree
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If the graph is directed or is a multigraph.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Harutyunyan, H. A. and Li, Z.
|
||||
"A Simple Construction of Broadcast Graphs."
|
||||
In Computing and Combinatorics. COCOON 2019
|
||||
(Ed. D. Z. Du and C. Tian.) Springer, pp. 240-253, 2019.
|
||||
"""
|
||||
b_T, b_C = tree_broadcast_center(G)
|
||||
if node is not None:
|
||||
return b_T + min(nx.shortest_path_length(G, node, u) for u in b_C)
|
||||
dist_from_center = dict.fromkeys(G, len(G))
|
||||
for u in b_C:
|
||||
for v, dist in nx.shortest_path_length(G, u).items():
|
||||
if dist < dist_from_center[v]:
|
||||
dist_from_center[v] = dist
|
||||
return b_T + max(dist_from_center.values())
|
||||
+20
@@ -0,0 +1,20 @@
|
||||
from .betweenness import *
|
||||
from .betweenness_subset import *
|
||||
from .closeness import *
|
||||
from .current_flow_betweenness import *
|
||||
from .current_flow_betweenness_subset import *
|
||||
from .current_flow_closeness import *
|
||||
from .degree_alg import *
|
||||
from .dispersion import *
|
||||
from .eigenvector import *
|
||||
from .group import *
|
||||
from .harmonic import *
|
||||
from .katz import *
|
||||
from .load import *
|
||||
from .percolation import *
|
||||
from .reaching import *
|
||||
from .second_order import *
|
||||
from .subgraph_alg import *
|
||||
from .trophic import *
|
||||
from .voterank_alg import *
|
||||
from .laplacian import *
|
||||
+436
@@ -0,0 +1,436 @@
|
||||
"""Betweenness centrality measures."""
|
||||
|
||||
from collections import deque
|
||||
from heapq import heappop, heappush
|
||||
from itertools import count
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.shortest_paths.weighted import _weight_function
|
||||
from networkx.utils import py_random_state
|
||||
from networkx.utils.decorators import not_implemented_for
|
||||
|
||||
__all__ = ["betweenness_centrality", "edge_betweenness_centrality"]
|
||||
|
||||
|
||||
@py_random_state(5)
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def betweenness_centrality(
|
||||
G, k=None, normalized=True, weight=None, endpoints=False, seed=None
|
||||
):
|
||||
r"""Compute the shortest-path betweenness centrality for nodes.
|
||||
|
||||
Betweenness centrality of a node $v$ is the sum of the
|
||||
fraction of all-pairs shortest paths that pass through $v$
|
||||
|
||||
.. math::
|
||||
|
||||
c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)}
|
||||
|
||||
where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
|
||||
shortest $(s, t)$-paths, and $\sigma(s, t|v)$ is the number of
|
||||
those paths passing through some node $v$ other than $s, t$.
|
||||
If $s = t$, $\sigma(s, t) = 1$, and if $v \in {s, t}$,
|
||||
$\sigma(s, t|v) = 0$ [2]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
|
||||
k : int, optional (default=None)
|
||||
If k is not None use k node samples to estimate betweenness.
|
||||
The value of k <= n where n is the number of nodes in the graph.
|
||||
Higher values give better approximation.
|
||||
|
||||
normalized : bool, optional
|
||||
If True the betweenness values are normalized by `2/((n-1)(n-2))`
|
||||
for graphs, and `1/((n-1)(n-2))` for directed graphs where `n`
|
||||
is the number of nodes in G.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If None, all edge weights are considered equal.
|
||||
Otherwise holds the name of the edge attribute used as weight.
|
||||
Weights are used to calculate weighted shortest paths, so they are
|
||||
interpreted as distances.
|
||||
|
||||
endpoints : bool, optional
|
||||
If True include the endpoints in the shortest path counts.
|
||||
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
Note that this is only used if k is not None.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with betweenness centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
edge_betweenness_centrality
|
||||
load_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The algorithm is from Ulrik Brandes [1]_.
|
||||
See [4]_ for the original first published version and [2]_ for details on
|
||||
algorithms for variations and related metrics.
|
||||
|
||||
For approximate betweenness calculations set k=#samples to use
|
||||
k nodes ("pivots") to estimate the betweenness values. For an estimate
|
||||
of the number of pivots needed see [3]_.
|
||||
|
||||
For weighted graphs the edge weights must be greater than zero.
|
||||
Zero edge weights can produce an infinite number of equal length
|
||||
paths between pairs of nodes.
|
||||
|
||||
The total number of paths between source and target is counted
|
||||
differently for directed and undirected graphs. Directed paths
|
||||
are easy to count. Undirected paths are tricky: should a path
|
||||
from "u" to "v" count as 1 undirected path or as 2 directed paths?
|
||||
|
||||
For betweenness_centrality we report the number of undirected
|
||||
paths when G is undirected.
|
||||
|
||||
For betweenness_centrality_subset the reporting is different.
|
||||
If the source and target subsets are the same, then we want
|
||||
to count undirected paths. But if the source and target subsets
|
||||
differ -- for example, if sources is {0} and targets is {1},
|
||||
then we are only counting the paths in one direction. They are
|
||||
undirected paths but we are counting them in a directed way.
|
||||
To count them as undirected paths, each should count as half a path.
|
||||
|
||||
This algorithm is not guaranteed to be correct if edge weights
|
||||
are floating point numbers. As a workaround you can use integer
|
||||
numbers by multiplying the relevant edge attributes by a convenient
|
||||
constant factor (eg 100) and converting to integers.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Ulrik Brandes:
|
||||
A Faster Algorithm for Betweenness Centrality.
|
||||
Journal of Mathematical Sociology 25(2):163-177, 2001.
|
||||
https://doi.org/10.1080/0022250X.2001.9990249
|
||||
.. [2] Ulrik Brandes:
|
||||
On Variants of Shortest-Path Betweenness
|
||||
Centrality and their Generic Computation.
|
||||
Social Networks 30(2):136-145, 2008.
|
||||
https://doi.org/10.1016/j.socnet.2007.11.001
|
||||
.. [3] Ulrik Brandes and Christian Pich:
|
||||
Centrality Estimation in Large Networks.
|
||||
International Journal of Bifurcation and Chaos 17(7):2303-2318, 2007.
|
||||
https://dx.doi.org/10.1142/S0218127407018403
|
||||
.. [4] Linton C. Freeman:
|
||||
A set of measures of centrality based on betweenness.
|
||||
Sociometry 40: 35–41, 1977
|
||||
https://doi.org/10.2307/3033543
|
||||
"""
|
||||
betweenness = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
|
||||
if k is None:
|
||||
nodes = G
|
||||
else:
|
||||
nodes = seed.sample(list(G.nodes()), k)
|
||||
for s in nodes:
|
||||
# single source shortest paths
|
||||
if weight is None: # use BFS
|
||||
S, P, sigma, _ = _single_source_shortest_path_basic(G, s)
|
||||
else: # use Dijkstra's algorithm
|
||||
S, P, sigma, _ = _single_source_dijkstra_path_basic(G, s, weight)
|
||||
# accumulation
|
||||
if endpoints:
|
||||
betweenness, _ = _accumulate_endpoints(betweenness, S, P, sigma, s)
|
||||
else:
|
||||
betweenness, _ = _accumulate_basic(betweenness, S, P, sigma, s)
|
||||
# rescaling
|
||||
betweenness = _rescale(
|
||||
betweenness,
|
||||
len(G),
|
||||
normalized=normalized,
|
||||
directed=G.is_directed(),
|
||||
k=k,
|
||||
endpoints=endpoints,
|
||||
)
|
||||
return betweenness
|
||||
|
||||
|
||||
@py_random_state(4)
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def edge_betweenness_centrality(G, k=None, normalized=True, weight=None, seed=None):
|
||||
r"""Compute betweenness centrality for edges.
|
||||
|
||||
Betweenness centrality of an edge $e$ is the sum of the
|
||||
fraction of all-pairs shortest paths that pass through $e$
|
||||
|
||||
.. math::
|
||||
|
||||
c_B(e) =\sum_{s,t \in V} \frac{\sigma(s, t|e)}{\sigma(s, t)}
|
||||
|
||||
where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
|
||||
shortest $(s, t)$-paths, and $\sigma(s, t|e)$ is the number of
|
||||
those paths passing through edge $e$ [2]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
|
||||
k : int, optional (default=None)
|
||||
If k is not None use k node samples to estimate betweenness.
|
||||
The value of k <= n where n is the number of nodes in the graph.
|
||||
Higher values give better approximation.
|
||||
|
||||
normalized : bool, optional
|
||||
If True the betweenness values are normalized by $2/(n(n-1))$
|
||||
for graphs, and $1/(n(n-1))$ for directed graphs where $n$
|
||||
is the number of nodes in G.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If None, all edge weights are considered equal.
|
||||
Otherwise holds the name of the edge attribute used as weight.
|
||||
Weights are used to calculate weighted shortest paths, so they are
|
||||
interpreted as distances.
|
||||
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
Note that this is only used if k is not None.
|
||||
|
||||
Returns
|
||||
-------
|
||||
edges : dictionary
|
||||
Dictionary of edges with betweenness centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality
|
||||
edge_load
|
||||
|
||||
Notes
|
||||
-----
|
||||
The algorithm is from Ulrik Brandes [1]_.
|
||||
|
||||
For weighted graphs the edge weights must be greater than zero.
|
||||
Zero edge weights can produce an infinite number of equal length
|
||||
paths between pairs of nodes.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] A Faster Algorithm for Betweenness Centrality. Ulrik Brandes,
|
||||
Journal of Mathematical Sociology 25(2):163-177, 2001.
|
||||
https://doi.org/10.1080/0022250X.2001.9990249
|
||||
.. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
|
||||
Centrality and their Generic Computation.
|
||||
Social Networks 30(2):136-145, 2008.
|
||||
https://doi.org/10.1016/j.socnet.2007.11.001
|
||||
"""
|
||||
betweenness = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
|
||||
# b[e]=0 for e in G.edges()
|
||||
betweenness.update(dict.fromkeys(G.edges(), 0.0))
|
||||
if k is None:
|
||||
nodes = G
|
||||
else:
|
||||
nodes = seed.sample(list(G.nodes()), k)
|
||||
for s in nodes:
|
||||
# single source shortest paths
|
||||
if weight is None: # use BFS
|
||||
S, P, sigma, _ = _single_source_shortest_path_basic(G, s)
|
||||
else: # use Dijkstra's algorithm
|
||||
S, P, sigma, _ = _single_source_dijkstra_path_basic(G, s, weight)
|
||||
# accumulation
|
||||
betweenness = _accumulate_edges(betweenness, S, P, sigma, s)
|
||||
# rescaling
|
||||
for n in G: # remove nodes to only return edges
|
||||
del betweenness[n]
|
||||
betweenness = _rescale_e(
|
||||
betweenness, len(G), normalized=normalized, directed=G.is_directed()
|
||||
)
|
||||
if G.is_multigraph():
|
||||
betweenness = _add_edge_keys(G, betweenness, weight=weight)
|
||||
return betweenness
|
||||
|
||||
|
||||
# helpers for betweenness centrality
|
||||
|
||||
|
||||
def _single_source_shortest_path_basic(G, s):
|
||||
S = []
|
||||
P = {}
|
||||
for v in G:
|
||||
P[v] = []
|
||||
sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G
|
||||
D = {}
|
||||
sigma[s] = 1.0
|
||||
D[s] = 0
|
||||
Q = deque([s])
|
||||
while Q: # use BFS to find shortest paths
|
||||
v = Q.popleft()
|
||||
S.append(v)
|
||||
Dv = D[v]
|
||||
sigmav = sigma[v]
|
||||
for w in G[v]:
|
||||
if w not in D:
|
||||
Q.append(w)
|
||||
D[w] = Dv + 1
|
||||
if D[w] == Dv + 1: # this is a shortest path, count paths
|
||||
sigma[w] += sigmav
|
||||
P[w].append(v) # predecessors
|
||||
return S, P, sigma, D
|
||||
|
||||
|
||||
def _single_source_dijkstra_path_basic(G, s, weight):
|
||||
weight = _weight_function(G, weight)
|
||||
# modified from Eppstein
|
||||
S = []
|
||||
P = {}
|
||||
for v in G:
|
||||
P[v] = []
|
||||
sigma = dict.fromkeys(G, 0.0) # sigma[v]=0 for v in G
|
||||
D = {}
|
||||
sigma[s] = 1.0
|
||||
push = heappush
|
||||
pop = heappop
|
||||
seen = {s: 0}
|
||||
c = count()
|
||||
Q = [] # use Q as heap with (distance,node id) tuples
|
||||
push(Q, (0, next(c), s, s))
|
||||
while Q:
|
||||
(dist, _, pred, v) = pop(Q)
|
||||
if v in D:
|
||||
continue # already searched this node.
|
||||
sigma[v] += sigma[pred] # count paths
|
||||
S.append(v)
|
||||
D[v] = dist
|
||||
for w, edgedata in G[v].items():
|
||||
vw_dist = dist + weight(v, w, edgedata)
|
||||
if w not in D and (w not in seen or vw_dist < seen[w]):
|
||||
seen[w] = vw_dist
|
||||
push(Q, (vw_dist, next(c), v, w))
|
||||
sigma[w] = 0.0
|
||||
P[w] = [v]
|
||||
elif vw_dist == seen[w]: # handle equal paths
|
||||
sigma[w] += sigma[v]
|
||||
P[w].append(v)
|
||||
return S, P, sigma, D
|
||||
|
||||
|
||||
def _accumulate_basic(betweenness, S, P, sigma, s):
|
||||
delta = dict.fromkeys(S, 0)
|
||||
while S:
|
||||
w = S.pop()
|
||||
coeff = (1 + delta[w]) / sigma[w]
|
||||
for v in P[w]:
|
||||
delta[v] += sigma[v] * coeff
|
||||
if w != s:
|
||||
betweenness[w] += delta[w]
|
||||
return betweenness, delta
|
||||
|
||||
|
||||
def _accumulate_endpoints(betweenness, S, P, sigma, s):
|
||||
betweenness[s] += len(S) - 1
|
||||
delta = dict.fromkeys(S, 0)
|
||||
while S:
|
||||
w = S.pop()
|
||||
coeff = (1 + delta[w]) / sigma[w]
|
||||
for v in P[w]:
|
||||
delta[v] += sigma[v] * coeff
|
||||
if w != s:
|
||||
betweenness[w] += delta[w] + 1
|
||||
return betweenness, delta
|
||||
|
||||
|
||||
def _accumulate_edges(betweenness, S, P, sigma, s):
|
||||
delta = dict.fromkeys(S, 0)
|
||||
while S:
|
||||
w = S.pop()
|
||||
coeff = (1 + delta[w]) / sigma[w]
|
||||
for v in P[w]:
|
||||
c = sigma[v] * coeff
|
||||
if (v, w) not in betweenness:
|
||||
betweenness[(w, v)] += c
|
||||
else:
|
||||
betweenness[(v, w)] += c
|
||||
delta[v] += c
|
||||
if w != s:
|
||||
betweenness[w] += delta[w]
|
||||
return betweenness
|
||||
|
||||
|
||||
def _rescale(betweenness, n, normalized, directed=False, k=None, endpoints=False):
|
||||
if normalized:
|
||||
if endpoints:
|
||||
if n < 2:
|
||||
scale = None # no normalization
|
||||
else:
|
||||
# Scale factor should include endpoint nodes
|
||||
scale = 1 / (n * (n - 1))
|
||||
elif n <= 2:
|
||||
scale = None # no normalization b=0 for all nodes
|
||||
else:
|
||||
scale = 1 / ((n - 1) * (n - 2))
|
||||
else: # rescale by 2 for undirected graphs
|
||||
if not directed:
|
||||
scale = 0.5
|
||||
else:
|
||||
scale = None
|
||||
if scale is not None:
|
||||
if k is not None:
|
||||
scale = scale * n / k
|
||||
for v in betweenness:
|
||||
betweenness[v] *= scale
|
||||
return betweenness
|
||||
|
||||
|
||||
def _rescale_e(betweenness, n, normalized, directed=False, k=None):
|
||||
if normalized:
|
||||
if n <= 1:
|
||||
scale = None # no normalization b=0 for all nodes
|
||||
else:
|
||||
scale = 1 / (n * (n - 1))
|
||||
else: # rescale by 2 for undirected graphs
|
||||
if not directed:
|
||||
scale = 0.5
|
||||
else:
|
||||
scale = None
|
||||
if scale is not None:
|
||||
if k is not None:
|
||||
scale = scale * n / k
|
||||
for v in betweenness:
|
||||
betweenness[v] *= scale
|
||||
return betweenness
|
||||
|
||||
|
||||
@not_implemented_for("graph")
|
||||
def _add_edge_keys(G, betweenness, weight=None):
|
||||
r"""Adds the corrected betweenness centrality (BC) values for multigraphs.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : NetworkX graph.
|
||||
|
||||
betweenness : dictionary
|
||||
Dictionary mapping adjacent node tuples to betweenness centrality values.
|
||||
|
||||
weight : string or function
|
||||
See `_weight_function` for details. Defaults to `None`.
|
||||
|
||||
Returns
|
||||
-------
|
||||
edges : dictionary
|
||||
The parameter `betweenness` including edges with keys and their
|
||||
betweenness centrality values.
|
||||
|
||||
The BC value is divided among edges of equal weight.
|
||||
"""
|
||||
_weight = _weight_function(G, weight)
|
||||
|
||||
edge_bc = dict.fromkeys(G.edges, 0.0)
|
||||
for u, v in betweenness:
|
||||
d = G[u][v]
|
||||
wt = _weight(u, v, d)
|
||||
keys = [k for k in d if _weight(u, v, {k: d[k]}) == wt]
|
||||
bc = betweenness[(u, v)] / len(keys)
|
||||
for k in keys:
|
||||
edge_bc[(u, v, k)] = bc
|
||||
|
||||
return edge_bc
|
||||
+275
@@ -0,0 +1,275 @@
|
||||
"""Betweenness centrality measures for subsets of nodes."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.centrality.betweenness import (
|
||||
_add_edge_keys,
|
||||
)
|
||||
from networkx.algorithms.centrality.betweenness import (
|
||||
_single_source_dijkstra_path_basic as dijkstra,
|
||||
)
|
||||
from networkx.algorithms.centrality.betweenness import (
|
||||
_single_source_shortest_path_basic as shortest_path,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"betweenness_centrality_subset",
|
||||
"edge_betweenness_centrality_subset",
|
||||
]
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def betweenness_centrality_subset(G, sources, targets, normalized=False, weight=None):
|
||||
r"""Compute betweenness centrality for a subset of nodes.
|
||||
|
||||
.. math::
|
||||
|
||||
c_B(v) =\sum_{s\in S, t \in T} \frac{\sigma(s, t|v)}{\sigma(s, t)}
|
||||
|
||||
where $S$ is the set of sources, $T$ is the set of targets,
|
||||
$\sigma(s, t)$ is the number of shortest $(s, t)$-paths,
|
||||
and $\sigma(s, t|v)$ is the number of those paths
|
||||
passing through some node $v$ other than $s, t$.
|
||||
If $s = t$, $\sigma(s, t) = 1$,
|
||||
and if $v \in {s, t}$, $\sigma(s, t|v) = 0$ [2]_.
|
||||
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
|
||||
sources: list of nodes
|
||||
Nodes to use as sources for shortest paths in betweenness
|
||||
|
||||
targets: list of nodes
|
||||
Nodes to use as targets for shortest paths in betweenness
|
||||
|
||||
normalized : bool, optional
|
||||
If True the betweenness values are normalized by $2/((n-1)(n-2))$
|
||||
for graphs, and $1/((n-1)(n-2))$ for directed graphs where $n$
|
||||
is the number of nodes in G.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If None, all edge weights are considered equal.
|
||||
Otherwise holds the name of the edge attribute used as weight.
|
||||
Weights are used to calculate weighted shortest paths, so they are
|
||||
interpreted as distances.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with betweenness centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
edge_betweenness_centrality
|
||||
load_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The basic algorithm is from [1]_.
|
||||
|
||||
For weighted graphs the edge weights must be greater than zero.
|
||||
Zero edge weights can produce an infinite number of equal length
|
||||
paths between pairs of nodes.
|
||||
|
||||
The normalization might seem a little strange but it is
|
||||
designed to make betweenness_centrality(G) be the same as
|
||||
betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).
|
||||
|
||||
The total number of paths between source and target is counted
|
||||
differently for directed and undirected graphs. Directed paths
|
||||
are easy to count. Undirected paths are tricky: should a path
|
||||
from "u" to "v" count as 1 undirected path or as 2 directed paths?
|
||||
|
||||
For betweenness_centrality we report the number of undirected
|
||||
paths when G is undirected.
|
||||
|
||||
For betweenness_centrality_subset the reporting is different.
|
||||
If the source and target subsets are the same, then we want
|
||||
to count undirected paths. But if the source and target subsets
|
||||
differ -- for example, if sources is {0} and targets is {1},
|
||||
then we are only counting the paths in one direction. They are
|
||||
undirected paths but we are counting them in a directed way.
|
||||
To count them as undirected paths, each should count as half a path.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
|
||||
Journal of Mathematical Sociology 25(2):163-177, 2001.
|
||||
https://doi.org/10.1080/0022250X.2001.9990249
|
||||
.. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
|
||||
Centrality and their Generic Computation.
|
||||
Social Networks 30(2):136-145, 2008.
|
||||
https://doi.org/10.1016/j.socnet.2007.11.001
|
||||
"""
|
||||
b = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
|
||||
for s in sources:
|
||||
# single source shortest paths
|
||||
if weight is None: # use BFS
|
||||
S, P, sigma, _ = shortest_path(G, s)
|
||||
else: # use Dijkstra's algorithm
|
||||
S, P, sigma, _ = dijkstra(G, s, weight)
|
||||
b = _accumulate_subset(b, S, P, sigma, s, targets)
|
||||
b = _rescale(b, len(G), normalized=normalized, directed=G.is_directed())
|
||||
return b
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def edge_betweenness_centrality_subset(
|
||||
G, sources, targets, normalized=False, weight=None
|
||||
):
|
||||
r"""Compute betweenness centrality for edges for a subset of nodes.
|
||||
|
||||
.. math::
|
||||
|
||||
c_B(v) =\sum_{s\in S,t \in T} \frac{\sigma(s, t|e)}{\sigma(s, t)}
|
||||
|
||||
where $S$ is the set of sources, $T$ is the set of targets,
|
||||
$\sigma(s, t)$ is the number of shortest $(s, t)$-paths,
|
||||
and $\sigma(s, t|e)$ is the number of those paths
|
||||
passing through edge $e$ [2]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A networkx graph.
|
||||
|
||||
sources: list of nodes
|
||||
Nodes to use as sources for shortest paths in betweenness
|
||||
|
||||
targets: list of nodes
|
||||
Nodes to use as targets for shortest paths in betweenness
|
||||
|
||||
normalized : bool, optional
|
||||
If True the betweenness values are normalized by `2/(n(n-1))`
|
||||
for graphs, and `1/(n(n-1))` for directed graphs where `n`
|
||||
is the number of nodes in G.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If None, all edge weights are considered equal.
|
||||
Otherwise holds the name of the edge attribute used as weight.
|
||||
Weights are used to calculate weighted shortest paths, so they are
|
||||
interpreted as distances.
|
||||
|
||||
Returns
|
||||
-------
|
||||
edges : dictionary
|
||||
Dictionary of edges with Betweenness centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality
|
||||
edge_load
|
||||
|
||||
Notes
|
||||
-----
|
||||
The basic algorithm is from [1]_.
|
||||
|
||||
For weighted graphs the edge weights must be greater than zero.
|
||||
Zero edge weights can produce an infinite number of equal length
|
||||
paths between pairs of nodes.
|
||||
|
||||
The normalization might seem a little strange but it is the same
|
||||
as in edge_betweenness_centrality() and is designed to make
|
||||
edge_betweenness_centrality(G) be the same as
|
||||
edge_betweenness_centrality_subset(G,sources=G.nodes(),targets=G.nodes()).
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Ulrik Brandes, A Faster Algorithm for Betweenness Centrality.
|
||||
Journal of Mathematical Sociology 25(2):163-177, 2001.
|
||||
https://doi.org/10.1080/0022250X.2001.9990249
|
||||
.. [2] Ulrik Brandes: On Variants of Shortest-Path Betweenness
|
||||
Centrality and their Generic Computation.
|
||||
Social Networks 30(2):136-145, 2008.
|
||||
https://doi.org/10.1016/j.socnet.2007.11.001
|
||||
"""
|
||||
b = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
|
||||
b.update(dict.fromkeys(G.edges(), 0.0)) # b[e] for e in G.edges()
|
||||
for s in sources:
|
||||
# single source shortest paths
|
||||
if weight is None: # use BFS
|
||||
S, P, sigma, _ = shortest_path(G, s)
|
||||
else: # use Dijkstra's algorithm
|
||||
S, P, sigma, _ = dijkstra(G, s, weight)
|
||||
b = _accumulate_edges_subset(b, S, P, sigma, s, targets)
|
||||
for n in G: # remove nodes to only return edges
|
||||
del b[n]
|
||||
b = _rescale_e(b, len(G), normalized=normalized, directed=G.is_directed())
|
||||
if G.is_multigraph():
|
||||
b = _add_edge_keys(G, b, weight=weight)
|
||||
return b
|
||||
|
||||
|
||||
def _accumulate_subset(betweenness, S, P, sigma, s, targets):
|
||||
delta = dict.fromkeys(S, 0.0)
|
||||
target_set = set(targets) - {s}
|
||||
while S:
|
||||
w = S.pop()
|
||||
if w in target_set:
|
||||
coeff = (delta[w] + 1.0) / sigma[w]
|
||||
else:
|
||||
coeff = delta[w] / sigma[w]
|
||||
for v in P[w]:
|
||||
delta[v] += sigma[v] * coeff
|
||||
if w != s:
|
||||
betweenness[w] += delta[w]
|
||||
return betweenness
|
||||
|
||||
|
||||
def _accumulate_edges_subset(betweenness, S, P, sigma, s, targets):
|
||||
"""edge_betweenness_centrality_subset helper."""
|
||||
delta = dict.fromkeys(S, 0)
|
||||
target_set = set(targets)
|
||||
while S:
|
||||
w = S.pop()
|
||||
for v in P[w]:
|
||||
if w in target_set:
|
||||
c = (sigma[v] / sigma[w]) * (1.0 + delta[w])
|
||||
else:
|
||||
c = delta[w] / len(P[w])
|
||||
if (v, w) not in betweenness:
|
||||
betweenness[(w, v)] += c
|
||||
else:
|
||||
betweenness[(v, w)] += c
|
||||
delta[v] += c
|
||||
if w != s:
|
||||
betweenness[w] += delta[w]
|
||||
return betweenness
|
||||
|
||||
|
||||
def _rescale(betweenness, n, normalized, directed=False):
|
||||
"""betweenness_centrality_subset helper."""
|
||||
if normalized:
|
||||
if n <= 2:
|
||||
scale = None # no normalization b=0 for all nodes
|
||||
else:
|
||||
scale = 1.0 / ((n - 1) * (n - 2))
|
||||
else: # rescale by 2 for undirected graphs
|
||||
if not directed:
|
||||
scale = 0.5
|
||||
else:
|
||||
scale = None
|
||||
if scale is not None:
|
||||
for v in betweenness:
|
||||
betweenness[v] *= scale
|
||||
return betweenness
|
||||
|
||||
|
||||
def _rescale_e(betweenness, n, normalized, directed=False):
|
||||
"""edge_betweenness_centrality_subset helper."""
|
||||
if normalized:
|
||||
if n <= 1:
|
||||
scale = None # no normalization b=0 for all nodes
|
||||
else:
|
||||
scale = 1.0 / (n * (n - 1))
|
||||
else: # rescale by 2 for undirected graphs
|
||||
if not directed:
|
||||
scale = 0.5
|
||||
else:
|
||||
scale = None
|
||||
if scale is not None:
|
||||
for v in betweenness:
|
||||
betweenness[v] *= scale
|
||||
return betweenness
|
||||
+282
@@ -0,0 +1,282 @@
|
||||
"""
|
||||
Closeness centrality measures.
|
||||
"""
|
||||
|
||||
import functools
|
||||
|
||||
import networkx as nx
|
||||
from networkx.exception import NetworkXError
|
||||
from networkx.utils.decorators import not_implemented_for
|
||||
|
||||
__all__ = ["closeness_centrality", "incremental_closeness_centrality"]
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="distance")
|
||||
def closeness_centrality(G, u=None, distance=None, wf_improved=True):
|
||||
r"""Compute closeness centrality for nodes.
|
||||
|
||||
Closeness centrality [1]_ of a node `u` is the reciprocal of the
|
||||
average shortest path distance to `u` over all `n-1` reachable nodes.
|
||||
|
||||
.. math::
|
||||
|
||||
C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
|
||||
|
||||
where `d(v, u)` is the shortest-path distance between `v` and `u`,
|
||||
and `n-1` is the number of nodes reachable from `u`. Notice that the
|
||||
closeness distance function computes the incoming distance to `u`
|
||||
for directed graphs. To use outward distance, act on `G.reverse()`.
|
||||
|
||||
Notice that higher values of closeness indicate higher centrality.
|
||||
|
||||
Wasserman and Faust propose an improved formula for graphs with
|
||||
more than one connected component. The result is "a ratio of the
|
||||
fraction of actors in the group who are reachable, to the average
|
||||
distance" from the reachable actors [2]_. You might think this
|
||||
scale factor is inverted but it is not. As is, nodes from small
|
||||
components receive a smaller closeness value. Letting `N` denote
|
||||
the number of nodes in the graph,
|
||||
|
||||
.. math::
|
||||
|
||||
C_{WF}(u) = \frac{n-1}{N-1} \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
u : node, optional
|
||||
Return only the value for node u
|
||||
|
||||
distance : edge attribute key, optional (default=None)
|
||||
Use the specified edge attribute as the edge distance in shortest
|
||||
path calculations. If `None` (the default) all edges have a distance of 1.
|
||||
Absent edge attributes are assigned a distance of 1. Note that no check
|
||||
is performed to ensure that edges have the provided attribute.
|
||||
|
||||
wf_improved : bool, optional (default=True)
|
||||
If True, scale by the fraction of nodes reachable. This gives the
|
||||
Wasserman and Faust improved formula. For single component graphs
|
||||
it is the same as the original formula.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with closeness centrality as the value.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
||||
>>> nx.closeness_centrality(G)
|
||||
{0: 1.0, 1: 1.0, 2: 0.75, 3: 0.75}
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality, load_centrality, eigenvector_centrality,
|
||||
degree_centrality, incremental_closeness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The closeness centrality is normalized to `(n-1)/(|G|-1)` where
|
||||
`n` is the number of nodes in the connected part of graph
|
||||
containing the node. If the graph is not completely connected,
|
||||
this algorithm computes the closeness centrality for each
|
||||
connected part separately scaled by that parts size.
|
||||
|
||||
If the 'distance' keyword is set to an edge attribute key then the
|
||||
shortest-path length will be computed using Dijkstra's algorithm with
|
||||
that edge attribute as the edge weight.
|
||||
|
||||
The closeness centrality uses *inward* distance to a node, not outward.
|
||||
If you want to use outword distances apply the function to `G.reverse()`
|
||||
|
||||
In NetworkX 2.2 and earlier a bug caused Dijkstra's algorithm to use the
|
||||
outward distance rather than the inward distance. If you use a 'distance'
|
||||
keyword and a DiGraph, your results will change between v2.2 and v2.3.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Linton C. Freeman: Centrality in networks: I.
|
||||
Conceptual clarification. Social Networks 1:215-239, 1979.
|
||||
https://doi.org/10.1016/0378-8733(78)90021-7
|
||||
.. [2] pg. 201 of Wasserman, S. and Faust, K.,
|
||||
Social Network Analysis: Methods and Applications, 1994,
|
||||
Cambridge University Press.
|
||||
"""
|
||||
if G.is_directed():
|
||||
G = G.reverse() # create a reversed graph view
|
||||
|
||||
if distance is not None:
|
||||
# use Dijkstra's algorithm with specified attribute as edge weight
|
||||
path_length = functools.partial(
|
||||
nx.single_source_dijkstra_path_length, weight=distance
|
||||
)
|
||||
else:
|
||||
path_length = nx.single_source_shortest_path_length
|
||||
|
||||
if u is None:
|
||||
nodes = G.nodes
|
||||
else:
|
||||
nodes = [u]
|
||||
closeness_dict = {}
|
||||
for n in nodes:
|
||||
sp = path_length(G, n)
|
||||
totsp = sum(sp.values())
|
||||
len_G = len(G)
|
||||
_closeness_centrality = 0.0
|
||||
if totsp > 0.0 and len_G > 1:
|
||||
_closeness_centrality = (len(sp) - 1.0) / totsp
|
||||
# normalize to number of nodes-1 in connected part
|
||||
if wf_improved:
|
||||
s = (len(sp) - 1.0) / (len_G - 1)
|
||||
_closeness_centrality *= s
|
||||
closeness_dict[n] = _closeness_centrality
|
||||
if u is not None:
|
||||
return closeness_dict[u]
|
||||
return closeness_dict
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable(mutates_input=True)
|
||||
def incremental_closeness_centrality(
|
||||
G, edge, prev_cc=None, insertion=True, wf_improved=True
|
||||
):
|
||||
r"""Incremental closeness centrality for nodes.
|
||||
|
||||
Compute closeness centrality for nodes using level-based work filtering
|
||||
as described in Incremental Algorithms for Closeness Centrality by Sariyuce et al.
|
||||
|
||||
Level-based work filtering detects unnecessary updates to the closeness
|
||||
centrality and filters them out.
|
||||
|
||||
---
|
||||
From "Incremental Algorithms for Closeness Centrality":
|
||||
|
||||
Theorem 1: Let :math:`G = (V, E)` be a graph and u and v be two vertices in V
|
||||
such that there is no edge (u, v) in E. Let :math:`G' = (V, E \cup uv)`
|
||||
Then :math:`cc[s] = cc'[s]` if and only if :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`.
|
||||
|
||||
Where :math:`dG(u, v)` denotes the length of the shortest path between
|
||||
two vertices u, v in a graph G, cc[s] is the closeness centrality for a
|
||||
vertex s in V, and cc'[s] is the closeness centrality for a
|
||||
vertex s in V, with the (u, v) edge added.
|
||||
---
|
||||
|
||||
We use Theorem 1 to filter out updates when adding or removing an edge.
|
||||
When adding an edge (u, v), we compute the shortest path lengths from all
|
||||
other nodes to u and to v before the node is added. When removing an edge,
|
||||
we compute the shortest path lengths after the edge is removed. Then we
|
||||
apply Theorem 1 to use previously computed closeness centrality for nodes
|
||||
where :math:`\left|dG(s, u) - dG(s, v)\right| \leq 1`. This works only for
|
||||
undirected, unweighted graphs; the distance argument is not supported.
|
||||
|
||||
Closeness centrality [1]_ of a node `u` is the reciprocal of the
|
||||
sum of the shortest path distances from `u` to all `n-1` other nodes.
|
||||
Since the sum of distances depends on the number of nodes in the
|
||||
graph, closeness is normalized by the sum of minimum possible
|
||||
distances `n-1`.
|
||||
|
||||
.. math::
|
||||
|
||||
C(u) = \frac{n - 1}{\sum_{v=1}^{n-1} d(v, u)},
|
||||
|
||||
where `d(v, u)` is the shortest-path distance between `v` and `u`,
|
||||
and `n` is the number of nodes in the graph.
|
||||
|
||||
Notice that higher values of closeness indicate higher centrality.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
edge : tuple
|
||||
The modified edge (u, v) in the graph.
|
||||
|
||||
prev_cc : dictionary
|
||||
The previous closeness centrality for all nodes in the graph.
|
||||
|
||||
insertion : bool, optional
|
||||
If True (default) the edge was inserted, otherwise it was deleted from the graph.
|
||||
|
||||
wf_improved : bool, optional (default=True)
|
||||
If True, scale by the fraction of nodes reachable. This gives the
|
||||
Wasserman and Faust improved formula. For single component graphs
|
||||
it is the same as the original formula.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with closeness centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality, load_centrality, eigenvector_centrality,
|
||||
degree_centrality, closeness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The closeness centrality is normalized to `(n-1)/(|G|-1)` where
|
||||
`n` is the number of nodes in the connected part of graph
|
||||
containing the node. If the graph is not completely connected,
|
||||
this algorithm computes the closeness centrality for each
|
||||
connected part separately.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Freeman, L.C., 1979. Centrality in networks: I.
|
||||
Conceptual clarification. Social Networks 1, 215--239.
|
||||
https://doi.org/10.1016/0378-8733(78)90021-7
|
||||
.. [2] Sariyuce, A.E. ; Kaya, K. ; Saule, E. ; Catalyiirek, U.V. Incremental
|
||||
Algorithms for Closeness Centrality. 2013 IEEE International Conference on Big Data
|
||||
http://sariyuce.com/papers/bigdata13.pdf
|
||||
"""
|
||||
if prev_cc is not None and set(prev_cc.keys()) != set(G.nodes()):
|
||||
raise NetworkXError("prev_cc and G do not have the same nodes")
|
||||
|
||||
# Unpack edge
|
||||
(u, v) = edge
|
||||
path_length = nx.single_source_shortest_path_length
|
||||
|
||||
if insertion:
|
||||
# For edge insertion, we want shortest paths before the edge is inserted
|
||||
du = path_length(G, u)
|
||||
dv = path_length(G, v)
|
||||
|
||||
G.add_edge(u, v)
|
||||
else:
|
||||
G.remove_edge(u, v)
|
||||
|
||||
# For edge removal, we want shortest paths after the edge is removed
|
||||
du = path_length(G, u)
|
||||
dv = path_length(G, v)
|
||||
|
||||
if prev_cc is None:
|
||||
return nx.closeness_centrality(G)
|
||||
|
||||
nodes = G.nodes()
|
||||
closeness_dict = {}
|
||||
for n in nodes:
|
||||
if n in du and n in dv and abs(du[n] - dv[n]) <= 1:
|
||||
closeness_dict[n] = prev_cc[n]
|
||||
else:
|
||||
sp = path_length(G, n)
|
||||
totsp = sum(sp.values())
|
||||
len_G = len(G)
|
||||
_closeness_centrality = 0.0
|
||||
if totsp > 0.0 and len_G > 1:
|
||||
_closeness_centrality = (len(sp) - 1.0) / totsp
|
||||
# normalize to number of nodes-1 in connected part
|
||||
if wf_improved:
|
||||
s = (len(sp) - 1.0) / (len_G - 1)
|
||||
_closeness_centrality *= s
|
||||
closeness_dict[n] = _closeness_centrality
|
||||
|
||||
# Leave the graph as we found it
|
||||
if insertion:
|
||||
G.remove_edge(u, v)
|
||||
else:
|
||||
G.add_edge(u, v)
|
||||
|
||||
return closeness_dict
|
||||
+342
@@ -0,0 +1,342 @@
|
||||
"""Current-flow betweenness centrality measures."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.centrality.flow_matrix import (
|
||||
CGInverseLaplacian,
|
||||
FullInverseLaplacian,
|
||||
SuperLUInverseLaplacian,
|
||||
flow_matrix_row,
|
||||
)
|
||||
from networkx.utils import (
|
||||
not_implemented_for,
|
||||
py_random_state,
|
||||
reverse_cuthill_mckee_ordering,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"current_flow_betweenness_centrality",
|
||||
"approximate_current_flow_betweenness_centrality",
|
||||
"edge_current_flow_betweenness_centrality",
|
||||
]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@py_random_state(7)
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def approximate_current_flow_betweenness_centrality(
|
||||
G,
|
||||
normalized=True,
|
||||
weight=None,
|
||||
dtype=float,
|
||||
solver="full",
|
||||
epsilon=0.5,
|
||||
kmax=10000,
|
||||
seed=None,
|
||||
):
|
||||
r"""Compute the approximate current-flow betweenness centrality for nodes.
|
||||
|
||||
Approximates the current-flow betweenness centrality within absolute
|
||||
error of epsilon with high probability [1]_.
|
||||
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
normalized : bool, optional (default=True)
|
||||
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
|
||||
n is the number of nodes in G.
|
||||
|
||||
weight : string or None, optional (default=None)
|
||||
Key for edge data used as the edge weight.
|
||||
If None, then use 1 as each edge weight.
|
||||
The weight reflects the capacity or the strength of the
|
||||
edge.
|
||||
|
||||
dtype : data type (float)
|
||||
Default data type for internal matrices.
|
||||
Set to np.float32 for lower memory consumption.
|
||||
|
||||
solver : string (default='full')
|
||||
Type of linear solver to use for computing the flow matrix.
|
||||
Options are "full" (uses most memory), "lu" (recommended), and
|
||||
"cg" (uses least memory).
|
||||
|
||||
epsilon: float
|
||||
Absolute error tolerance.
|
||||
|
||||
kmax: int
|
||||
Maximum number of sample node pairs to use for approximation.
|
||||
|
||||
seed : integer, random_state, or None (default)
|
||||
Indicator of random number generation state.
|
||||
See :ref:`Randomness<randomness>`.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with betweenness centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
current_flow_betweenness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The running time is $O((1/\epsilon^2)m{\sqrt k} \log n)$
|
||||
and the space required is $O(m)$ for $n$ nodes and $m$ edges.
|
||||
|
||||
If the edges have a 'weight' attribute they will be used as
|
||||
weights in this algorithm. Unspecified weights are set to 1.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Ulrik Brandes and Daniel Fleischer:
|
||||
Centrality Measures Based on Current Flow.
|
||||
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
||||
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
||||
https://doi.org/10.1007/978-3-540-31856-9_44
|
||||
"""
|
||||
import numpy as np
|
||||
|
||||
if not nx.is_connected(G):
|
||||
raise nx.NetworkXError("Graph not connected.")
|
||||
solvername = {
|
||||
"full": FullInverseLaplacian,
|
||||
"lu": SuperLUInverseLaplacian,
|
||||
"cg": CGInverseLaplacian,
|
||||
}
|
||||
n = G.number_of_nodes()
|
||||
ordering = list(reverse_cuthill_mckee_ordering(G))
|
||||
# make a copy with integer labels according to rcm ordering
|
||||
# this could be done without a copy if we really wanted to
|
||||
H = nx.relabel_nodes(G, dict(zip(ordering, range(n))))
|
||||
L = nx.laplacian_matrix(H, nodelist=range(n), weight=weight).asformat("csc")
|
||||
L = L.astype(dtype)
|
||||
C = solvername[solver](L, dtype=dtype) # initialize solver
|
||||
betweenness = dict.fromkeys(H, 0.0)
|
||||
nb = (n - 1.0) * (n - 2.0) # normalization factor
|
||||
cstar = n * (n - 1) / nb
|
||||
l = 1 # parameter in approximation, adjustable
|
||||
k = l * int(np.ceil((cstar / epsilon) ** 2 * np.log(n)))
|
||||
if k > kmax:
|
||||
msg = f"Number random pairs k>kmax ({k}>{kmax}) "
|
||||
raise nx.NetworkXError(msg, "Increase kmax or epsilon")
|
||||
cstar2k = cstar / (2 * k)
|
||||
for _ in range(k):
|
||||
s, t = pair = seed.sample(range(n), 2)
|
||||
b = np.zeros(n, dtype=dtype)
|
||||
b[s] = 1
|
||||
b[t] = -1
|
||||
p = C.solve(b)
|
||||
for v in H:
|
||||
if v in pair:
|
||||
continue
|
||||
for nbr in H[v]:
|
||||
w = H[v][nbr].get(weight, 1.0)
|
||||
betweenness[v] += float(w * np.abs(p[v] - p[nbr]) * cstar2k)
|
||||
if normalized:
|
||||
factor = 1.0
|
||||
else:
|
||||
factor = nb / 2.0
|
||||
# remap to original node names and "unnormalize" if required
|
||||
return {ordering[k]: v * factor for k, v in betweenness.items()}
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def current_flow_betweenness_centrality(
|
||||
G, normalized=True, weight=None, dtype=float, solver="full"
|
||||
):
|
||||
r"""Compute current-flow betweenness centrality for nodes.
|
||||
|
||||
Current-flow betweenness centrality uses an electrical current
|
||||
model for information spreading in contrast to betweenness
|
||||
centrality which uses shortest paths.
|
||||
|
||||
Current-flow betweenness centrality is also known as
|
||||
random-walk betweenness centrality [2]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
normalized : bool, optional (default=True)
|
||||
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
|
||||
n is the number of nodes in G.
|
||||
|
||||
weight : string or None, optional (default=None)
|
||||
Key for edge data used as the edge weight.
|
||||
If None, then use 1 as each edge weight.
|
||||
The weight reflects the capacity or the strength of the
|
||||
edge.
|
||||
|
||||
dtype : data type (float)
|
||||
Default data type for internal matrices.
|
||||
Set to np.float32 for lower memory consumption.
|
||||
|
||||
solver : string (default='full')
|
||||
Type of linear solver to use for computing the flow matrix.
|
||||
Options are "full" (uses most memory), "lu" (recommended), and
|
||||
"cg" (uses least memory).
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with betweenness centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
approximate_current_flow_betweenness_centrality
|
||||
betweenness_centrality
|
||||
edge_betweenness_centrality
|
||||
edge_current_flow_betweenness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
|
||||
time [1]_, where $I(n-1)$ is the time needed to compute the
|
||||
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
|
||||
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
|
||||
Laplacian matrix condition number.
|
||||
|
||||
The space required is $O(nw)$ where $w$ is the width of the sparse
|
||||
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
|
||||
|
||||
If the edges have a 'weight' attribute they will be used as
|
||||
weights in this algorithm. Unspecified weights are set to 1.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Centrality Measures Based on Current Flow.
|
||||
Ulrik Brandes and Daniel Fleischer,
|
||||
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
||||
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
||||
https://doi.org/10.1007/978-3-540-31856-9_44
|
||||
|
||||
.. [2] A measure of betweenness centrality based on random walks,
|
||||
M. E. J. Newman, Social Networks 27, 39-54 (2005).
|
||||
"""
|
||||
if not nx.is_connected(G):
|
||||
raise nx.NetworkXError("Graph not connected.")
|
||||
N = G.number_of_nodes()
|
||||
ordering = list(reverse_cuthill_mckee_ordering(G))
|
||||
# make a copy with integer labels according to rcm ordering
|
||||
# this could be done without a copy if we really wanted to
|
||||
H = nx.relabel_nodes(G, dict(zip(ordering, range(N))))
|
||||
betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H
|
||||
for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
|
||||
pos = dict(zip(row.argsort()[::-1], range(N)))
|
||||
for i in range(N):
|
||||
betweenness[s] += (i - pos[i]) * row.item(i)
|
||||
betweenness[t] += (N - i - 1 - pos[i]) * row.item(i)
|
||||
if normalized:
|
||||
nb = (N - 1.0) * (N - 2.0) # normalization factor
|
||||
else:
|
||||
nb = 2.0
|
||||
return {ordering[n]: (b - n) * 2.0 / nb for n, b in betweenness.items()}
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def edge_current_flow_betweenness_centrality(
|
||||
G, normalized=True, weight=None, dtype=float, solver="full"
|
||||
):
|
||||
r"""Compute current-flow betweenness centrality for edges.
|
||||
|
||||
Current-flow betweenness centrality uses an electrical current
|
||||
model for information spreading in contrast to betweenness
|
||||
centrality which uses shortest paths.
|
||||
|
||||
Current-flow betweenness centrality is also known as
|
||||
random-walk betweenness centrality [2]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
normalized : bool, optional (default=True)
|
||||
If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
|
||||
n is the number of nodes in G.
|
||||
|
||||
weight : string or None, optional (default=None)
|
||||
Key for edge data used as the edge weight.
|
||||
If None, then use 1 as each edge weight.
|
||||
The weight reflects the capacity or the strength of the
|
||||
edge.
|
||||
|
||||
dtype : data type (default=float)
|
||||
Default data type for internal matrices.
|
||||
Set to np.float32 for lower memory consumption.
|
||||
|
||||
solver : string (default='full')
|
||||
Type of linear solver to use for computing the flow matrix.
|
||||
Options are "full" (uses most memory), "lu" (recommended), and
|
||||
"cg" (uses least memory).
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of edge tuples with betweenness centrality as the value.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
The algorithm does not support DiGraphs.
|
||||
If the input graph is an instance of DiGraph class, NetworkXError
|
||||
is raised.
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality
|
||||
edge_betweenness_centrality
|
||||
current_flow_betweenness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
|
||||
time [1]_, where $I(n-1)$ is the time needed to compute the
|
||||
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
|
||||
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
|
||||
Laplacian matrix condition number.
|
||||
|
||||
The space required is $O(nw)$ where $w$ is the width of the sparse
|
||||
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
|
||||
|
||||
If the edges have a 'weight' attribute they will be used as
|
||||
weights in this algorithm. Unspecified weights are set to 1.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Centrality Measures Based on Current Flow.
|
||||
Ulrik Brandes and Daniel Fleischer,
|
||||
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
||||
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
||||
https://doi.org/10.1007/978-3-540-31856-9_44
|
||||
|
||||
.. [2] A measure of betweenness centrality based on random walks,
|
||||
M. E. J. Newman, Social Networks 27, 39-54 (2005).
|
||||
"""
|
||||
if not nx.is_connected(G):
|
||||
raise nx.NetworkXError("Graph not connected.")
|
||||
N = G.number_of_nodes()
|
||||
ordering = list(reverse_cuthill_mckee_ordering(G))
|
||||
# make a copy with integer labels according to rcm ordering
|
||||
# this could be done without a copy if we really wanted to
|
||||
H = nx.relabel_nodes(G, dict(zip(ordering, range(N))))
|
||||
edges = (tuple(sorted((u, v))) for u, v in H.edges())
|
||||
betweenness = dict.fromkeys(edges, 0.0)
|
||||
if normalized:
|
||||
nb = (N - 1.0) * (N - 2.0) # normalization factor
|
||||
else:
|
||||
nb = 2.0
|
||||
for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
|
||||
pos = dict(zip(row.argsort()[::-1], range(1, N + 1)))
|
||||
for i in range(N):
|
||||
betweenness[e] += (i + 1 - pos[i]) * row.item(i)
|
||||
betweenness[e] += (N - i - pos[i]) * row.item(i)
|
||||
betweenness[e] /= nb
|
||||
return {(ordering[s], ordering[t]): b for (s, t), b in betweenness.items()}
|
||||
+227
@@ -0,0 +1,227 @@
|
||||
"""Current-flow betweenness centrality measures for subsets of nodes."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.centrality.flow_matrix import flow_matrix_row
|
||||
from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering
|
||||
|
||||
__all__ = [
|
||||
"current_flow_betweenness_centrality_subset",
|
||||
"edge_current_flow_betweenness_centrality_subset",
|
||||
]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def current_flow_betweenness_centrality_subset(
|
||||
G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu"
|
||||
):
|
||||
r"""Compute current-flow betweenness centrality for subsets of nodes.
|
||||
|
||||
Current-flow betweenness centrality uses an electrical current
|
||||
model for information spreading in contrast to betweenness
|
||||
centrality which uses shortest paths.
|
||||
|
||||
Current-flow betweenness centrality is also known as
|
||||
random-walk betweenness centrality [2]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
sources: list of nodes
|
||||
Nodes to use as sources for current
|
||||
|
||||
targets: list of nodes
|
||||
Nodes to use as sinks for current
|
||||
|
||||
normalized : bool, optional (default=True)
|
||||
If True the betweenness values are normalized by b=b/(n-1)(n-2) where
|
||||
n is the number of nodes in G.
|
||||
|
||||
weight : string or None, optional (default=None)
|
||||
Key for edge data used as the edge weight.
|
||||
If None, then use 1 as each edge weight.
|
||||
The weight reflects the capacity or the strength of the
|
||||
edge.
|
||||
|
||||
dtype: data type (float)
|
||||
Default data type for internal matrices.
|
||||
Set to np.float32 for lower memory consumption.
|
||||
|
||||
solver: string (default='lu')
|
||||
Type of linear solver to use for computing the flow matrix.
|
||||
Options are "full" (uses most memory), "lu" (recommended), and
|
||||
"cg" (uses least memory).
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with betweenness centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
approximate_current_flow_betweenness_centrality
|
||||
betweenness_centrality
|
||||
edge_betweenness_centrality
|
||||
edge_current_flow_betweenness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
|
||||
time [1]_, where $I(n-1)$ is the time needed to compute the
|
||||
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
|
||||
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
|
||||
Laplacian matrix condition number.
|
||||
|
||||
The space required is $O(nw)$ where $w$ is the width of the sparse
|
||||
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
|
||||
|
||||
If the edges have a 'weight' attribute they will be used as
|
||||
weights in this algorithm. Unspecified weights are set to 1.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Centrality Measures Based on Current Flow.
|
||||
Ulrik Brandes and Daniel Fleischer,
|
||||
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
||||
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
||||
https://doi.org/10.1007/978-3-540-31856-9_44
|
||||
|
||||
.. [2] A measure of betweenness centrality based on random walks,
|
||||
M. E. J. Newman, Social Networks 27, 39-54 (2005).
|
||||
"""
|
||||
import numpy as np
|
||||
|
||||
from networkx.utils import reverse_cuthill_mckee_ordering
|
||||
|
||||
if not nx.is_connected(G):
|
||||
raise nx.NetworkXError("Graph not connected.")
|
||||
N = G.number_of_nodes()
|
||||
ordering = list(reverse_cuthill_mckee_ordering(G))
|
||||
# make a copy with integer labels according to rcm ordering
|
||||
# this could be done without a copy if we really wanted to
|
||||
mapping = dict(zip(ordering, range(N)))
|
||||
H = nx.relabel_nodes(G, mapping)
|
||||
betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H
|
||||
for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
|
||||
for ss in sources:
|
||||
i = mapping[ss]
|
||||
for tt in targets:
|
||||
j = mapping[tt]
|
||||
betweenness[s] += 0.5 * abs(row.item(i) - row.item(j))
|
||||
betweenness[t] += 0.5 * abs(row.item(i) - row.item(j))
|
||||
if normalized:
|
||||
nb = (N - 1.0) * (N - 2.0) # normalization factor
|
||||
else:
|
||||
nb = 2.0
|
||||
for node in H:
|
||||
betweenness[node] = betweenness[node] / nb + 1.0 / (2 - N)
|
||||
return {ordering[node]: value for node, value in betweenness.items()}
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def edge_current_flow_betweenness_centrality_subset(
|
||||
G, sources, targets, normalized=True, weight=None, dtype=float, solver="lu"
|
||||
):
|
||||
r"""Compute current-flow betweenness centrality for edges using subsets
|
||||
of nodes.
|
||||
|
||||
Current-flow betweenness centrality uses an electrical current
|
||||
model for information spreading in contrast to betweenness
|
||||
centrality which uses shortest paths.
|
||||
|
||||
Current-flow betweenness centrality is also known as
|
||||
random-walk betweenness centrality [2]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
sources: list of nodes
|
||||
Nodes to use as sources for current
|
||||
|
||||
targets: list of nodes
|
||||
Nodes to use as sinks for current
|
||||
|
||||
normalized : bool, optional (default=True)
|
||||
If True the betweenness values are normalized by b=b/(n-1)(n-2) where
|
||||
n is the number of nodes in G.
|
||||
|
||||
weight : string or None, optional (default=None)
|
||||
Key for edge data used as the edge weight.
|
||||
If None, then use 1 as each edge weight.
|
||||
The weight reflects the capacity or the strength of the
|
||||
edge.
|
||||
|
||||
dtype: data type (float)
|
||||
Default data type for internal matrices.
|
||||
Set to np.float32 for lower memory consumption.
|
||||
|
||||
solver: string (default='lu')
|
||||
Type of linear solver to use for computing the flow matrix.
|
||||
Options are "full" (uses most memory), "lu" (recommended), and
|
||||
"cg" (uses least memory).
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dict
|
||||
Dictionary of edge tuples with betweenness centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality
|
||||
edge_betweenness_centrality
|
||||
current_flow_betweenness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
|
||||
time [1]_, where $I(n-1)$ is the time needed to compute the
|
||||
inverse Laplacian. For a full matrix this is $O(n^3)$ but using
|
||||
sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
|
||||
Laplacian matrix condition number.
|
||||
|
||||
The space required is $O(nw)$ where $w$ is the width of the sparse
|
||||
Laplacian matrix. Worse case is $w=n$ for $O(n^2)$.
|
||||
|
||||
If the edges have a 'weight' attribute they will be used as
|
||||
weights in this algorithm. Unspecified weights are set to 1.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Centrality Measures Based on Current Flow.
|
||||
Ulrik Brandes and Daniel Fleischer,
|
||||
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
||||
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
||||
https://doi.org/10.1007/978-3-540-31856-9_44
|
||||
|
||||
.. [2] A measure of betweenness centrality based on random walks,
|
||||
M. E. J. Newman, Social Networks 27, 39-54 (2005).
|
||||
"""
|
||||
import numpy as np
|
||||
|
||||
if not nx.is_connected(G):
|
||||
raise nx.NetworkXError("Graph not connected.")
|
||||
N = G.number_of_nodes()
|
||||
ordering = list(reverse_cuthill_mckee_ordering(G))
|
||||
# make a copy with integer labels according to rcm ordering
|
||||
# this could be done without a copy if we really wanted to
|
||||
mapping = dict(zip(ordering, range(N)))
|
||||
H = nx.relabel_nodes(G, mapping)
|
||||
edges = (tuple(sorted((u, v))) for u, v in H.edges())
|
||||
betweenness = dict.fromkeys(edges, 0.0)
|
||||
if normalized:
|
||||
nb = (N - 1.0) * (N - 2.0) # normalization factor
|
||||
else:
|
||||
nb = 2.0
|
||||
for row, (e) in flow_matrix_row(H, weight=weight, dtype=dtype, solver=solver):
|
||||
for ss in sources:
|
||||
i = mapping[ss]
|
||||
for tt in targets:
|
||||
j = mapping[tt]
|
||||
betweenness[e] += 0.5 * abs(row.item(i) - row.item(j))
|
||||
betweenness[e] /= nb
|
||||
return {(ordering[s], ordering[t]): value for (s, t), value in betweenness.items()}
|
||||
+96
@@ -0,0 +1,96 @@
|
||||
"""Current-flow closeness centrality measures."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.centrality.flow_matrix import (
|
||||
CGInverseLaplacian,
|
||||
FullInverseLaplacian,
|
||||
SuperLUInverseLaplacian,
|
||||
)
|
||||
from networkx.utils import not_implemented_for, reverse_cuthill_mckee_ordering
|
||||
|
||||
__all__ = ["current_flow_closeness_centrality", "information_centrality"]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def current_flow_closeness_centrality(G, weight=None, dtype=float, solver="lu"):
|
||||
"""Compute current-flow closeness centrality for nodes.
|
||||
|
||||
Current-flow closeness centrality is variant of closeness
|
||||
centrality based on effective resistance between nodes in
|
||||
a network. This metric is also known as information centrality.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If None, all edge weights are considered equal.
|
||||
Otherwise holds the name of the edge attribute used as weight.
|
||||
The weight reflects the capacity or the strength of the
|
||||
edge.
|
||||
|
||||
dtype: data type (default=float)
|
||||
Default data type for internal matrices.
|
||||
Set to np.float32 for lower memory consumption.
|
||||
|
||||
solver: string (default='lu')
|
||||
Type of linear solver to use for computing the flow matrix.
|
||||
Options are "full" (uses most memory), "lu" (recommended), and
|
||||
"cg" (uses least memory).
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with current flow closeness centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
closeness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The algorithm is from Brandes [1]_.
|
||||
|
||||
See also [2]_ for the original definition of information centrality.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Ulrik Brandes and Daniel Fleischer,
|
||||
Centrality Measures Based on Current Flow.
|
||||
Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
|
||||
LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
|
||||
https://doi.org/10.1007/978-3-540-31856-9_44
|
||||
|
||||
.. [2] Karen Stephenson and Marvin Zelen:
|
||||
Rethinking centrality: Methods and examples.
|
||||
Social Networks 11(1):1-37, 1989.
|
||||
https://doi.org/10.1016/0378-8733(89)90016-6
|
||||
"""
|
||||
if not nx.is_connected(G):
|
||||
raise nx.NetworkXError("Graph not connected.")
|
||||
solvername = {
|
||||
"full": FullInverseLaplacian,
|
||||
"lu": SuperLUInverseLaplacian,
|
||||
"cg": CGInverseLaplacian,
|
||||
}
|
||||
N = G.number_of_nodes()
|
||||
ordering = list(reverse_cuthill_mckee_ordering(G))
|
||||
# make a copy with integer labels according to rcm ordering
|
||||
# this could be done without a copy if we really wanted to
|
||||
H = nx.relabel_nodes(G, dict(zip(ordering, range(N))))
|
||||
betweenness = dict.fromkeys(H, 0.0) # b[n]=0 for n in H
|
||||
N = H.number_of_nodes()
|
||||
L = nx.laplacian_matrix(H, nodelist=range(N), weight=weight).asformat("csc")
|
||||
L = L.astype(dtype)
|
||||
C2 = solvername[solver](L, width=1, dtype=dtype) # initialize solver
|
||||
for v in H:
|
||||
col = C2.get_row(v)
|
||||
for w in H:
|
||||
betweenness[v] += col.item(v) - 2 * col.item(w)
|
||||
betweenness[w] += col.item(v)
|
||||
return {ordering[node]: 1 / value for node, value in betweenness.items()}
|
||||
|
||||
|
||||
information_centrality = current_flow_closeness_centrality
|
||||
+150
@@ -0,0 +1,150 @@
|
||||
"""Degree centrality measures."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils.decorators import not_implemented_for
|
||||
|
||||
__all__ = ["degree_centrality", "in_degree_centrality", "out_degree_centrality"]
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def degree_centrality(G):
|
||||
"""Compute the degree centrality for nodes.
|
||||
|
||||
The degree centrality for a node v is the fraction of nodes it
|
||||
is connected to.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A networkx graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with degree centrality as the value.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.Graph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
||||
>>> nx.degree_centrality(G)
|
||||
{0: 1.0, 1: 1.0, 2: 0.6666666666666666, 3: 0.6666666666666666}
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality, load_centrality, eigenvector_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The degree centrality values are normalized by dividing by the maximum
|
||||
possible degree in a simple graph n-1 where n is the number of nodes in G.
|
||||
|
||||
For multigraphs or graphs with self loops the maximum degree might
|
||||
be higher than n-1 and values of degree centrality greater than 1
|
||||
are possible.
|
||||
"""
|
||||
if len(G) <= 1:
|
||||
return {n: 1 for n in G}
|
||||
|
||||
s = 1.0 / (len(G) - 1.0)
|
||||
centrality = {n: d * s for n, d in G.degree()}
|
||||
return centrality
|
||||
|
||||
|
||||
@not_implemented_for("undirected")
|
||||
@nx._dispatchable
|
||||
def in_degree_centrality(G):
|
||||
"""Compute the in-degree centrality for nodes.
|
||||
|
||||
The in-degree centrality for a node v is the fraction of nodes its
|
||||
incoming edges are connected to.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with in-degree centrality as values.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If G is undirected.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
||||
>>> nx.in_degree_centrality(G)
|
||||
{0: 0.0, 1: 0.3333333333333333, 2: 0.6666666666666666, 3: 0.6666666666666666}
|
||||
|
||||
See Also
|
||||
--------
|
||||
degree_centrality, out_degree_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The degree centrality values are normalized by dividing by the maximum
|
||||
possible degree in a simple graph n-1 where n is the number of nodes in G.
|
||||
|
||||
For multigraphs or graphs with self loops the maximum degree might
|
||||
be higher than n-1 and values of degree centrality greater than 1
|
||||
are possible.
|
||||
"""
|
||||
if len(G) <= 1:
|
||||
return {n: 1 for n in G}
|
||||
|
||||
s = 1.0 / (len(G) - 1.0)
|
||||
centrality = {n: d * s for n, d in G.in_degree()}
|
||||
return centrality
|
||||
|
||||
|
||||
@not_implemented_for("undirected")
|
||||
@nx._dispatchable
|
||||
def out_degree_centrality(G):
|
||||
"""Compute the out-degree centrality for nodes.
|
||||
|
||||
The out-degree centrality for a node v is the fraction of nodes its
|
||||
outgoing edges are connected to.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with out-degree centrality as values.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If G is undirected.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.DiGraph([(0, 1), (0, 2), (0, 3), (1, 2), (1, 3)])
|
||||
>>> nx.out_degree_centrality(G)
|
||||
{0: 1.0, 1: 0.6666666666666666, 2: 0.0, 3: 0.0}
|
||||
|
||||
See Also
|
||||
--------
|
||||
degree_centrality, in_degree_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The degree centrality values are normalized by dividing by the maximum
|
||||
possible degree in a simple graph n-1 where n is the number of nodes in G.
|
||||
|
||||
For multigraphs or graphs with self loops the maximum degree might
|
||||
be higher than n-1 and values of degree centrality greater than 1
|
||||
are possible.
|
||||
"""
|
||||
if len(G) <= 1:
|
||||
return {n: 1 for n in G}
|
||||
|
||||
s = 1.0 / (len(G) - 1.0)
|
||||
centrality = {n: d * s for n, d in G.out_degree()}
|
||||
return centrality
|
||||
+107
@@ -0,0 +1,107 @@
|
||||
from itertools import combinations
|
||||
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["dispersion"]
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def dispersion(G, u=None, v=None, normalized=True, alpha=1.0, b=0.0, c=0.0):
|
||||
r"""Calculate dispersion between `u` and `v` in `G`.
|
||||
|
||||
A link between two actors (`u` and `v`) has a high dispersion when their
|
||||
mutual ties (`s` and `t`) are not well connected with each other.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
u : node, optional
|
||||
The source for the dispersion score (e.g. ego node of the network).
|
||||
v : node, optional
|
||||
The target of the dispersion score if specified.
|
||||
normalized : bool
|
||||
If True (default) normalize by the embeddedness of the nodes (u and v).
|
||||
alpha, b, c : float
|
||||
Parameters for the normalization procedure. When `normalized` is True,
|
||||
the dispersion value is normalized by::
|
||||
|
||||
result = ((dispersion + b) ** alpha) / (embeddedness + c)
|
||||
|
||||
as long as the denominator is nonzero.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
If u (v) is specified, returns a dictionary of nodes with dispersion
|
||||
score for all "target" ("source") nodes. If neither u nor v is
|
||||
specified, returns a dictionary of dictionaries for all nodes 'u' in the
|
||||
graph with a dispersion score for each node 'v'.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This implementation follows Lars Backstrom and Jon Kleinberg [1]_. Typical
|
||||
usage would be to run dispersion on the ego network $G_u$ if $u$ were
|
||||
specified. Running :func:`dispersion` with neither $u$ nor $v$ specified
|
||||
can take some time to complete.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Romantic Partnerships and the Dispersion of Social Ties:
|
||||
A Network Analysis of Relationship Status on Facebook.
|
||||
Lars Backstrom, Jon Kleinberg.
|
||||
https://arxiv.org/pdf/1310.6753v1.pdf
|
||||
|
||||
"""
|
||||
|
||||
def _dispersion(G_u, u, v):
|
||||
"""dispersion for all nodes 'v' in a ego network G_u of node 'u'"""
|
||||
u_nbrs = set(G_u[u])
|
||||
ST = {n for n in G_u[v] if n in u_nbrs}
|
||||
set_uv = {u, v}
|
||||
# all possible ties of connections that u and b share
|
||||
possib = combinations(ST, 2)
|
||||
total = 0
|
||||
for s, t in possib:
|
||||
# neighbors of s that are in G_u, not including u and v
|
||||
nbrs_s = u_nbrs.intersection(G_u[s]) - set_uv
|
||||
# s and t are not directly connected
|
||||
if t not in nbrs_s:
|
||||
# s and t do not share a connection
|
||||
if nbrs_s.isdisjoint(G_u[t]):
|
||||
# tick for disp(u, v)
|
||||
total += 1
|
||||
# neighbors that u and v share
|
||||
embeddedness = len(ST)
|
||||
|
||||
dispersion_val = total
|
||||
if normalized:
|
||||
dispersion_val = (total + b) ** alpha
|
||||
if embeddedness + c != 0:
|
||||
dispersion_val /= embeddedness + c
|
||||
|
||||
return dispersion_val
|
||||
|
||||
if u is None:
|
||||
# v and u are not specified
|
||||
if v is None:
|
||||
results = {n: {} for n in G}
|
||||
for u in G:
|
||||
for v in G[u]:
|
||||
results[u][v] = _dispersion(G, u, v)
|
||||
# u is not specified, but v is
|
||||
else:
|
||||
results = dict.fromkeys(G[v], {})
|
||||
for u in G[v]:
|
||||
results[u] = _dispersion(G, v, u)
|
||||
else:
|
||||
# u is specified with no target v
|
||||
if v is None:
|
||||
results = dict.fromkeys(G[u], {})
|
||||
for v in G[u]:
|
||||
results[v] = _dispersion(G, u, v)
|
||||
# both u and v are specified
|
||||
else:
|
||||
results = _dispersion(G, u, v)
|
||||
|
||||
return results
|
||||
+357
@@ -0,0 +1,357 @@
|
||||
"""Functions for computing eigenvector centrality."""
|
||||
|
||||
import math
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
__all__ = ["eigenvector_centrality", "eigenvector_centrality_numpy"]
|
||||
|
||||
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def eigenvector_centrality(G, max_iter=100, tol=1.0e-6, nstart=None, weight=None):
|
||||
r"""Compute the eigenvector centrality for the graph G.
|
||||
|
||||
Eigenvector centrality computes the centrality for a node by adding
|
||||
the centrality of its predecessors. The centrality for node $i$ is the
|
||||
$i$-th element of a left eigenvector associated with the eigenvalue $\lambda$
|
||||
of maximum modulus that is positive. Such an eigenvector $x$ is
|
||||
defined up to a multiplicative constant by the equation
|
||||
|
||||
.. math::
|
||||
|
||||
\lambda x^T = x^T A,
|
||||
|
||||
where $A$ is the adjacency matrix of the graph G. By definition of
|
||||
row-column product, the equation above is equivalent to
|
||||
|
||||
.. math::
|
||||
|
||||
\lambda x_i = \sum_{j\to i}x_j.
|
||||
|
||||
That is, adding the eigenvector centralities of the predecessors of
|
||||
$i$ one obtains the eigenvector centrality of $i$ multiplied by
|
||||
$\lambda$. In the case of undirected graphs, $x$ also solves the familiar
|
||||
right-eigenvector equation $Ax = \lambda x$.
|
||||
|
||||
By virtue of the Perron–Frobenius theorem [1]_, if G is strongly
|
||||
connected there is a unique eigenvector $x$, and all its entries
|
||||
are strictly positive.
|
||||
|
||||
If G is not strongly connected there might be several left
|
||||
eigenvectors associated with $\lambda$, and some of their elements
|
||||
might be zero.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A networkx graph.
|
||||
|
||||
max_iter : integer, optional (default=100)
|
||||
Maximum number of power iterations.
|
||||
|
||||
tol : float, optional (default=1.0e-6)
|
||||
Error tolerance (in Euclidean norm) used to check convergence in
|
||||
power iteration.
|
||||
|
||||
nstart : dictionary, optional (default=None)
|
||||
Starting value of power iteration for each node. Must have a nonzero
|
||||
projection on the desired eigenvector for the power method to converge.
|
||||
If None, this implementation uses an all-ones vector, which is a safe
|
||||
choice.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If None, all edge weights are considered equal. Otherwise holds the
|
||||
name of the edge attribute used as weight. In this measure the
|
||||
weight is interpreted as the connection strength.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with eigenvector centrality as the value. The
|
||||
associated vector has unit Euclidean norm and the values are
|
||||
nonegative.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> centrality = nx.eigenvector_centrality(G)
|
||||
>>> sorted((v, f"{c:0.2f}") for v, c in centrality.items())
|
||||
[(0, '0.37'), (1, '0.60'), (2, '0.60'), (3, '0.37')]
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXPointlessConcept
|
||||
If the graph G is the null graph.
|
||||
|
||||
NetworkXError
|
||||
If each value in `nstart` is zero.
|
||||
|
||||
PowerIterationFailedConvergence
|
||||
If the algorithm fails to converge to the specified tolerance
|
||||
within the specified number of iterations of the power iteration
|
||||
method.
|
||||
|
||||
See Also
|
||||
--------
|
||||
eigenvector_centrality_numpy
|
||||
:func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
|
||||
:func:`~networkx.algorithms.link_analysis.hits_alg.hits`
|
||||
|
||||
Notes
|
||||
-----
|
||||
Eigenvector centrality was introduced by Landau [2]_ for chess
|
||||
tournaments. It was later rediscovered by Wei [3]_ and then
|
||||
popularized by Kendall [4]_ in the context of sport ranking. Berge
|
||||
introduced a general definition for graphs based on social connections
|
||||
[5]_. Bonacich [6]_ reintroduced again eigenvector centrality and made
|
||||
it popular in link analysis.
|
||||
|
||||
This function computes the left dominant eigenvector, which corresponds
|
||||
to adding the centrality of predecessors: this is the usual approach.
|
||||
To add the centrality of successors first reverse the graph with
|
||||
``G.reverse()``.
|
||||
|
||||
The implementation uses power iteration [7]_ to compute a dominant
|
||||
eigenvector starting from the provided vector `nstart`. Convergence is
|
||||
guaranteed as long as `nstart` has a nonzero projection on a dominant
|
||||
eigenvector, which certainly happens using the default value.
|
||||
|
||||
The method stops when the change in the computed vector between two
|
||||
iterations is smaller than an error tolerance of ``G.number_of_nodes()
|
||||
* tol`` or after ``max_iter`` iterations, but in the second case it
|
||||
raises an exception.
|
||||
|
||||
This implementation uses $(A + I)$ rather than the adjacency matrix
|
||||
$A$ because the change preserves eigenvectors, but it shifts the
|
||||
spectrum, thus guaranteeing convergence even for networks with
|
||||
negative eigenvalues of maximum modulus.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Abraham Berman and Robert J. Plemmons.
|
||||
"Nonnegative Matrices in the Mathematical Sciences."
|
||||
Classics in Applied Mathematics. SIAM, 1994.
|
||||
|
||||
.. [2] Edmund Landau.
|
||||
"Zur relativen Wertbemessung der Turnierresultate."
|
||||
Deutsches Wochenschach, 11:366–369, 1895.
|
||||
|
||||
.. [3] Teh-Hsing Wei.
|
||||
"The Algebraic Foundations of Ranking Theory."
|
||||
PhD thesis, University of Cambridge, 1952.
|
||||
|
||||
.. [4] Maurice G. Kendall.
|
||||
"Further contributions to the theory of paired comparisons."
|
||||
Biometrics, 11(1):43–62, 1955.
|
||||
https://www.jstor.org/stable/3001479
|
||||
|
||||
.. [5] Claude Berge
|
||||
"Théorie des graphes et ses applications."
|
||||
Dunod, Paris, France, 1958.
|
||||
|
||||
.. [6] Phillip Bonacich.
|
||||
"Technique for analyzing overlapping memberships."
|
||||
Sociological Methodology, 4:176–185, 1972.
|
||||
https://www.jstor.org/stable/270732
|
||||
|
||||
.. [7] Power iteration:: https://en.wikipedia.org/wiki/Power_iteration
|
||||
|
||||
"""
|
||||
if len(G) == 0:
|
||||
raise nx.NetworkXPointlessConcept(
|
||||
"cannot compute centrality for the null graph"
|
||||
)
|
||||
# If no initial vector is provided, start with the all-ones vector.
|
||||
if nstart is None:
|
||||
nstart = {v: 1 for v in G}
|
||||
if all(v == 0 for v in nstart.values()):
|
||||
raise nx.NetworkXError("initial vector cannot have all zero values")
|
||||
# Normalize the initial vector so that each entry is in [0, 1]. This is
|
||||
# guaranteed to never have a divide-by-zero error by the previous line.
|
||||
nstart_sum = sum(nstart.values())
|
||||
x = {k: v / nstart_sum for k, v in nstart.items()}
|
||||
nnodes = G.number_of_nodes()
|
||||
# make up to max_iter iterations
|
||||
for _ in range(max_iter):
|
||||
xlast = x
|
||||
x = xlast.copy() # Start with xlast times I to iterate with (A+I)
|
||||
# do the multiplication y^T = x^T A (left eigenvector)
|
||||
for n in x:
|
||||
for nbr in G[n]:
|
||||
w = G[n][nbr].get(weight, 1) if weight else 1
|
||||
x[nbr] += xlast[n] * w
|
||||
# Normalize the vector. The normalization denominator `norm`
|
||||
# should never be zero by the Perron--Frobenius
|
||||
# theorem. However, in case it is due to numerical error, we
|
||||
# assume the norm to be one instead.
|
||||
norm = math.hypot(*x.values()) or 1
|
||||
x = {k: v / norm for k, v in x.items()}
|
||||
# Check for convergence (in the L_1 norm).
|
||||
if sum(abs(x[n] - xlast[n]) for n in x) < nnodes * tol:
|
||||
return x
|
||||
raise nx.PowerIterationFailedConvergence(max_iter)
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def eigenvector_centrality_numpy(G, weight=None, max_iter=50, tol=0):
|
||||
r"""Compute the eigenvector centrality for the graph `G`.
|
||||
|
||||
Eigenvector centrality computes the centrality for a node by adding
|
||||
the centrality of its predecessors. The centrality for node $i$ is the
|
||||
$i$-th element of a left eigenvector associated with the eigenvalue $\lambda$
|
||||
of maximum modulus that is positive. Such an eigenvector $x$ is
|
||||
defined up to a multiplicative constant by the equation
|
||||
|
||||
.. math::
|
||||
|
||||
\lambda x^T = x^T A,
|
||||
|
||||
where $A$ is the adjacency matrix of the graph `G`. By definition of
|
||||
row-column product, the equation above is equivalent to
|
||||
|
||||
.. math::
|
||||
|
||||
\lambda x_i = \sum_{j\to i}x_j.
|
||||
|
||||
That is, adding the eigenvector centralities of the predecessors of
|
||||
$i$ one obtains the eigenvector centrality of $i$ multiplied by
|
||||
$\lambda$. In the case of undirected graphs, $x$ also solves the familiar
|
||||
right-eigenvector equation $Ax = \lambda x$.
|
||||
|
||||
By virtue of the Perron--Frobenius theorem [1]_, if `G` is (strongly)
|
||||
connected, there is a unique eigenvector $x$, and all its entries
|
||||
are strictly positive.
|
||||
|
||||
However, if `G` is not (strongly) connected, there might be several left
|
||||
eigenvectors associated with $\lambda$, and some of their elements
|
||||
might be zero.
|
||||
Depending on the method used to choose eigenvectors, round-off error can affect
|
||||
which of the infinitely many eigenvectors is reported.
|
||||
This can lead to inconsistent results for the same graph,
|
||||
which the underlying implementation is not robust to.
|
||||
For this reason, only (strongly) connected graphs are accepted.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A connected NetworkX graph.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If ``None``, all edge weights are considered equal. Otherwise holds the
|
||||
name of the edge attribute used as weight. In this measure the
|
||||
weight is interpreted as the connection strength.
|
||||
|
||||
max_iter : integer, optional (default=50)
|
||||
Maximum number of Arnoldi update iterations allowed.
|
||||
|
||||
tol : float, optional (default=0)
|
||||
Relative accuracy for eigenvalues (stopping criterion).
|
||||
The default value of 0 implies machine precision.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dict of nodes
|
||||
Dictionary of nodes with eigenvector centrality as the value. The
|
||||
associated vector has unit Euclidean norm and the values are
|
||||
nonnegative.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> centrality = nx.eigenvector_centrality_numpy(G)
|
||||
>>> print([f"{node} {centrality[node]:0.2f}" for node in centrality])
|
||||
['0 0.37', '1 0.60', '2 0.60', '3 0.37']
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXPointlessConcept
|
||||
If the graph `G` is the null graph.
|
||||
|
||||
ArpackNoConvergence
|
||||
When the requested convergence is not obtained. The currently
|
||||
converged eigenvalues and eigenvectors can be found as
|
||||
eigenvalues and eigenvectors attributes of the exception object.
|
||||
|
||||
AmbiguousSolution
|
||||
If `G` is not connected.
|
||||
|
||||
See Also
|
||||
--------
|
||||
:func:`scipy.sparse.linalg.eigs`
|
||||
eigenvector_centrality
|
||||
:func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
|
||||
:func:`~networkx.algorithms.link_analysis.hits_alg.hits`
|
||||
|
||||
Notes
|
||||
-----
|
||||
Eigenvector centrality was introduced by Landau [2]_ for chess
|
||||
tournaments. It was later rediscovered by Wei [3]_ and then
|
||||
popularized by Kendall [4]_ in the context of sport ranking. Berge
|
||||
introduced a general definition for graphs based on social connections
|
||||
[5]_. Bonacich [6]_ reintroduced again eigenvector centrality and made
|
||||
it popular in link analysis.
|
||||
|
||||
This function computes the left dominant eigenvector, which corresponds
|
||||
to adding the centrality of predecessors: this is the usual approach.
|
||||
To add the centrality of successors first reverse the graph with
|
||||
``G.reverse()``.
|
||||
|
||||
This implementation uses the
|
||||
:func:`SciPy sparse eigenvalue solver<scipy.sparse.linalg.eigs>` (ARPACK)
|
||||
to find the largest eigenvalue/eigenvector pair using Arnoldi iterations
|
||||
[7]_.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Abraham Berman and Robert J. Plemmons.
|
||||
"Nonnegative Matrices in the Mathematical Sciences".
|
||||
Classics in Applied Mathematics. SIAM, 1994.
|
||||
|
||||
.. [2] Edmund Landau.
|
||||
"Zur relativen Wertbemessung der Turnierresultate".
|
||||
Deutsches Wochenschach, 11:366--369, 1895.
|
||||
|
||||
.. [3] Teh-Hsing Wei.
|
||||
"The Algebraic Foundations of Ranking Theory".
|
||||
PhD thesis, University of Cambridge, 1952.
|
||||
|
||||
.. [4] Maurice G. Kendall.
|
||||
"Further contributions to the theory of paired comparisons".
|
||||
Biometrics, 11(1):43--62, 1955.
|
||||
https://www.jstor.org/stable/3001479
|
||||
|
||||
.. [5] Claude Berge.
|
||||
"Théorie des graphes et ses applications".
|
||||
Dunod, Paris, France, 1958.
|
||||
|
||||
.. [6] Phillip Bonacich.
|
||||
"Technique for analyzing overlapping memberships".
|
||||
Sociological Methodology, 4:176--185, 1972.
|
||||
https://www.jstor.org/stable/270732
|
||||
|
||||
.. [7] Arnoldi, W. E. (1951).
|
||||
"The principle of minimized iterations in the solution of the matrix eigenvalue problem".
|
||||
Quarterly of Applied Mathematics. 9 (1): 17--29.
|
||||
https://doi.org/10.1090/qam/42792
|
||||
"""
|
||||
import numpy as np
|
||||
import scipy as sp
|
||||
|
||||
if len(G) == 0:
|
||||
raise nx.NetworkXPointlessConcept(
|
||||
"cannot compute centrality for the null graph"
|
||||
)
|
||||
connected = nx.is_strongly_connected(G) if G.is_directed() else nx.is_connected(G)
|
||||
if not connected: # See gh-6888.
|
||||
raise nx.AmbiguousSolution(
|
||||
"`eigenvector_centrality_numpy` does not give consistent results for disconnected graphs"
|
||||
)
|
||||
M = nx.to_scipy_sparse_array(G, nodelist=list(G), weight=weight, dtype=float)
|
||||
_, eigenvector = sp.sparse.linalg.eigs(
|
||||
M.T, k=1, which="LR", maxiter=max_iter, tol=tol
|
||||
)
|
||||
largest = eigenvector.flatten().real
|
||||
norm = np.sign(largest.sum()) * sp.linalg.norm(largest)
|
||||
return dict(zip(G, (largest / norm).tolist()))
|
||||
+130
@@ -0,0 +1,130 @@
|
||||
# Helpers for current-flow betweenness and current-flow closeness
|
||||
# Lazy computations for inverse Laplacian and flow-matrix rows.
|
||||
import networkx as nx
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def flow_matrix_row(G, weight=None, dtype=float, solver="lu"):
|
||||
# Generate a row of the current-flow matrix
|
||||
import numpy as np
|
||||
|
||||
solvername = {
|
||||
"full": FullInverseLaplacian,
|
||||
"lu": SuperLUInverseLaplacian,
|
||||
"cg": CGInverseLaplacian,
|
||||
}
|
||||
n = G.number_of_nodes()
|
||||
L = nx.laplacian_matrix(G, nodelist=range(n), weight=weight).asformat("csc")
|
||||
L = L.astype(dtype)
|
||||
C = solvername[solver](L, dtype=dtype) # initialize solver
|
||||
w = C.w # w is the Laplacian matrix width
|
||||
# row-by-row flow matrix
|
||||
for u, v in sorted(sorted((u, v)) for u, v in G.edges()):
|
||||
B = np.zeros(w, dtype=dtype)
|
||||
c = G[u][v].get(weight, 1.0)
|
||||
B[u % w] = c
|
||||
B[v % w] = -c
|
||||
# get only the rows needed in the inverse laplacian
|
||||
# and multiply to get the flow matrix row
|
||||
row = B @ C.get_rows(u, v)
|
||||
yield row, (u, v)
|
||||
|
||||
|
||||
# Class to compute the inverse laplacian only for specified rows
|
||||
# Allows computation of the current-flow matrix without storing entire
|
||||
# inverse laplacian matrix
|
||||
class InverseLaplacian:
|
||||
def __init__(self, L, width=None, dtype=None):
|
||||
global np
|
||||
import numpy as np
|
||||
|
||||
(n, n) = L.shape
|
||||
self.dtype = dtype
|
||||
self.n = n
|
||||
if width is None:
|
||||
self.w = self.width(L)
|
||||
else:
|
||||
self.w = width
|
||||
self.C = np.zeros((self.w, n), dtype=dtype)
|
||||
self.L1 = L[1:, 1:]
|
||||
self.init_solver(L)
|
||||
|
||||
def init_solver(self, L):
|
||||
pass
|
||||
|
||||
def solve(self, r):
|
||||
raise nx.NetworkXError("Implement solver")
|
||||
|
||||
def solve_inverse(self, r):
|
||||
raise nx.NetworkXError("Implement solver")
|
||||
|
||||
def get_rows(self, r1, r2):
|
||||
for r in range(r1, r2 + 1):
|
||||
self.C[r % self.w, 1:] = self.solve_inverse(r)
|
||||
return self.C
|
||||
|
||||
def get_row(self, r):
|
||||
self.C[r % self.w, 1:] = self.solve_inverse(r)
|
||||
return self.C[r % self.w]
|
||||
|
||||
def width(self, L):
|
||||
m = 0
|
||||
for i, row in enumerate(L):
|
||||
w = 0
|
||||
y = np.nonzero(row)[-1]
|
||||
if len(y) > 0:
|
||||
v = y - i
|
||||
w = v.max() - v.min() + 1
|
||||
m = max(w, m)
|
||||
return m
|
||||
|
||||
|
||||
class FullInverseLaplacian(InverseLaplacian):
|
||||
def init_solver(self, L):
|
||||
self.IL = np.zeros(L.shape, dtype=self.dtype)
|
||||
self.IL[1:, 1:] = np.linalg.inv(self.L1.todense())
|
||||
|
||||
def solve(self, rhs):
|
||||
s = np.zeros(rhs.shape, dtype=self.dtype)
|
||||
s = self.IL @ rhs
|
||||
return s
|
||||
|
||||
def solve_inverse(self, r):
|
||||
return self.IL[r, 1:]
|
||||
|
||||
|
||||
class SuperLUInverseLaplacian(InverseLaplacian):
|
||||
def init_solver(self, L):
|
||||
import scipy as sp
|
||||
|
||||
self.lusolve = sp.sparse.linalg.factorized(self.L1.tocsc())
|
||||
|
||||
def solve_inverse(self, r):
|
||||
rhs = np.zeros(self.n, dtype=self.dtype)
|
||||
rhs[r] = 1
|
||||
return self.lusolve(rhs[1:])
|
||||
|
||||
def solve(self, rhs):
|
||||
s = np.zeros(rhs.shape, dtype=self.dtype)
|
||||
s[1:] = self.lusolve(rhs[1:])
|
||||
return s
|
||||
|
||||
|
||||
class CGInverseLaplacian(InverseLaplacian):
|
||||
def init_solver(self, L):
|
||||
global sp
|
||||
import scipy as sp
|
||||
|
||||
ilu = sp.sparse.linalg.spilu(self.L1.tocsc())
|
||||
n = self.n - 1
|
||||
self.M = sp.sparse.linalg.LinearOperator(shape=(n, n), matvec=ilu.solve)
|
||||
|
||||
def solve(self, rhs):
|
||||
s = np.zeros(rhs.shape, dtype=self.dtype)
|
||||
s[1:] = sp.sparse.linalg.cg(self.L1, rhs[1:], M=self.M, atol=0)[0]
|
||||
return s
|
||||
|
||||
def solve_inverse(self, r):
|
||||
rhs = np.zeros(self.n, self.dtype)
|
||||
rhs[r] = 1
|
||||
return sp.sparse.linalg.cg(self.L1, rhs[1:], M=self.M, atol=0)[0]
|
||||
+787
@@ -0,0 +1,787 @@
|
||||
"""Group centrality measures."""
|
||||
|
||||
from copy import deepcopy
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.centrality.betweenness import (
|
||||
_accumulate_endpoints,
|
||||
_single_source_dijkstra_path_basic,
|
||||
_single_source_shortest_path_basic,
|
||||
)
|
||||
from networkx.utils.decorators import not_implemented_for
|
||||
|
||||
__all__ = [
|
||||
"group_betweenness_centrality",
|
||||
"group_closeness_centrality",
|
||||
"group_degree_centrality",
|
||||
"group_in_degree_centrality",
|
||||
"group_out_degree_centrality",
|
||||
"prominent_group",
|
||||
]
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def group_betweenness_centrality(G, C, normalized=True, weight=None, endpoints=False):
|
||||
r"""Compute the group betweenness centrality for a group of nodes.
|
||||
|
||||
Group betweenness centrality of a group of nodes $C$ is the sum of the
|
||||
fraction of all-pairs shortest paths that pass through any vertex in $C$
|
||||
|
||||
.. math::
|
||||
|
||||
c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)}
|
||||
|
||||
where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
|
||||
shortest $(s, t)$-paths, and $\sigma(s, t|C)$ is the number of
|
||||
those paths passing through some node in group $C$. Note that
|
||||
$(s, t)$ are not members of the group ($V-C$ is the set of nodes
|
||||
in $V$ that are not in $C$).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
|
||||
C : list or set or list of lists or list of sets
|
||||
A group or a list of groups containing nodes which belong to G, for which group betweenness
|
||||
centrality is to be calculated.
|
||||
|
||||
normalized : bool, optional (default=True)
|
||||
If True, group betweenness is normalized by `1/((|V|-|C|)(|V|-|C|-1))`
|
||||
where `|V|` is the number of nodes in G and `|C|` is the number of nodes in C.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If None, all edge weights are considered equal.
|
||||
Otherwise holds the name of the edge attribute used as weight.
|
||||
The weight of an edge is treated as the length or distance between the two sides.
|
||||
|
||||
endpoints : bool, optional (default=False)
|
||||
If True include the endpoints in the shortest path counts.
|
||||
|
||||
Raises
|
||||
------
|
||||
NodeNotFound
|
||||
If node(s) in C are not present in G.
|
||||
|
||||
Returns
|
||||
-------
|
||||
betweenness : list of floats or float
|
||||
If C is a single group then return a float. If C is a list with
|
||||
several groups then return a list of group betweenness centralities.
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
Group betweenness centrality is described in [1]_ and its importance discussed in [3]_.
|
||||
The initial implementation of the algorithm is mentioned in [2]_. This function uses
|
||||
an improved algorithm presented in [4]_.
|
||||
|
||||
The number of nodes in the group must be a maximum of n - 2 where `n`
|
||||
is the total number of nodes in the graph.
|
||||
|
||||
For weighted graphs the edge weights must be greater than zero.
|
||||
Zero edge weights can produce an infinite number of equal length
|
||||
paths between pairs of nodes.
|
||||
|
||||
The total number of paths between source and target is counted
|
||||
differently for directed and undirected graphs. Directed paths
|
||||
between "u" and "v" are counted as two possible paths (one each
|
||||
direction) while undirected paths between "u" and "v" are counted
|
||||
as one path. Said another way, the sum in the expression above is
|
||||
over all ``s != t`` for directed graphs and for ``s < t`` for undirected graphs.
|
||||
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] M G Everett and S P Borgatti:
|
||||
The Centrality of Groups and Classes.
|
||||
Journal of Mathematical Sociology. 23(3): 181-201. 1999.
|
||||
http://www.analytictech.com/borgatti/group_centrality.htm
|
||||
.. [2] Ulrik Brandes:
|
||||
On Variants of Shortest-Path Betweenness
|
||||
Centrality and their Generic Computation.
|
||||
Social Networks 30(2):136-145, 2008.
|
||||
http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.72.9610&rep=rep1&type=pdf
|
||||
.. [3] Sourav Medya et. al.:
|
||||
Group Centrality Maximization via Network Design.
|
||||
SIAM International Conference on Data Mining, SDM 2018, 126–134.
|
||||
https://sites.cs.ucsb.edu/~arlei/pubs/sdm18.pdf
|
||||
.. [4] Rami Puzis, Yuval Elovici, and Shlomi Dolev.
|
||||
"Fast algorithm for successive computation of group betweenness centrality."
|
||||
https://journals.aps.org/pre/pdf/10.1103/PhysRevE.76.056709
|
||||
|
||||
"""
|
||||
GBC = [] # initialize betweenness
|
||||
list_of_groups = True
|
||||
# check weather C contains one or many groups
|
||||
if any(el in G for el in C):
|
||||
C = [C]
|
||||
list_of_groups = False
|
||||
set_v = {node for group in C for node in group}
|
||||
if set_v - G.nodes: # element(s) of C not in G
|
||||
raise nx.NodeNotFound(f"The node(s) {set_v - G.nodes} are in C but not in G.")
|
||||
|
||||
# pre-processing
|
||||
PB, sigma, D = _group_preprocessing(G, set_v, weight)
|
||||
|
||||
# the algorithm for each group
|
||||
for group in C:
|
||||
group = set(group) # set of nodes in group
|
||||
# initialize the matrices of the sigma and the PB
|
||||
GBC_group = 0
|
||||
sigma_m = deepcopy(sigma)
|
||||
PB_m = deepcopy(PB)
|
||||
sigma_m_v = deepcopy(sigma_m)
|
||||
PB_m_v = deepcopy(PB_m)
|
||||
for v in group:
|
||||
GBC_group += PB_m[v][v]
|
||||
for x in group:
|
||||
for y in group:
|
||||
dxvy = 0
|
||||
dxyv = 0
|
||||
dvxy = 0
|
||||
if not (
|
||||
sigma_m[x][y] == 0 or sigma_m[x][v] == 0 or sigma_m[v][y] == 0
|
||||
):
|
||||
if D[x][v] == D[x][y] + D[y][v]:
|
||||
dxyv = sigma_m[x][y] * sigma_m[y][v] / sigma_m[x][v]
|
||||
if D[x][y] == D[x][v] + D[v][y]:
|
||||
dxvy = sigma_m[x][v] * sigma_m[v][y] / sigma_m[x][y]
|
||||
if D[v][y] == D[v][x] + D[x][y]:
|
||||
dvxy = sigma_m[v][x] * sigma[x][y] / sigma[v][y]
|
||||
sigma_m_v[x][y] = sigma_m[x][y] * (1 - dxvy)
|
||||
PB_m_v[x][y] = PB_m[x][y] - PB_m[x][y] * dxvy
|
||||
if y != v:
|
||||
PB_m_v[x][y] -= PB_m[x][v] * dxyv
|
||||
if x != v:
|
||||
PB_m_v[x][y] -= PB_m[v][y] * dvxy
|
||||
sigma_m, sigma_m_v = sigma_m_v, sigma_m
|
||||
PB_m, PB_m_v = PB_m_v, PB_m
|
||||
|
||||
# endpoints
|
||||
v, c = len(G), len(group)
|
||||
if not endpoints:
|
||||
scale = 0
|
||||
# if the graph is connected then subtract the endpoints from
|
||||
# the count for all the nodes in the graph. else count how many
|
||||
# nodes are connected to the group's nodes and subtract that.
|
||||
if nx.is_directed(G):
|
||||
if nx.is_strongly_connected(G):
|
||||
scale = c * (2 * v - c - 1)
|
||||
elif nx.is_connected(G):
|
||||
scale = c * (2 * v - c - 1)
|
||||
if scale == 0:
|
||||
for group_node1 in group:
|
||||
for node in D[group_node1]:
|
||||
if node != group_node1:
|
||||
if node in group:
|
||||
scale += 1
|
||||
else:
|
||||
scale += 2
|
||||
GBC_group -= scale
|
||||
|
||||
# normalized
|
||||
if normalized:
|
||||
scale = 1 / ((v - c) * (v - c - 1))
|
||||
GBC_group *= scale
|
||||
|
||||
# If undirected than count only the undirected edges
|
||||
elif not G.is_directed():
|
||||
GBC_group /= 2
|
||||
|
||||
GBC.append(GBC_group)
|
||||
if list_of_groups:
|
||||
return GBC
|
||||
return GBC[0]
|
||||
|
||||
|
||||
def _group_preprocessing(G, set_v, weight):
|
||||
sigma = {}
|
||||
delta = {}
|
||||
D = {}
|
||||
betweenness = dict.fromkeys(G, 0)
|
||||
for s in G:
|
||||
if weight is None: # use BFS
|
||||
S, P, sigma[s], D[s] = _single_source_shortest_path_basic(G, s)
|
||||
else: # use Dijkstra's algorithm
|
||||
S, P, sigma[s], D[s] = _single_source_dijkstra_path_basic(G, s, weight)
|
||||
betweenness, delta[s] = _accumulate_endpoints(betweenness, S, P, sigma[s], s)
|
||||
for i in delta[s]: # add the paths from s to i and rescale sigma
|
||||
if s != i:
|
||||
delta[s][i] += 1
|
||||
if weight is not None:
|
||||
sigma[s][i] = sigma[s][i] / 2
|
||||
# building the path betweenness matrix only for nodes that appear in the group
|
||||
PB = dict.fromkeys(G)
|
||||
for group_node1 in set_v:
|
||||
PB[group_node1] = dict.fromkeys(G, 0.0)
|
||||
for group_node2 in set_v:
|
||||
if group_node2 not in D[group_node1]:
|
||||
continue
|
||||
for node in G:
|
||||
# if node is connected to the two group nodes than continue
|
||||
if group_node2 in D[node] and group_node1 in D[node]:
|
||||
if (
|
||||
D[node][group_node2]
|
||||
== D[node][group_node1] + D[group_node1][group_node2]
|
||||
):
|
||||
PB[group_node1][group_node2] += (
|
||||
delta[node][group_node2]
|
||||
* sigma[node][group_node1]
|
||||
* sigma[group_node1][group_node2]
|
||||
/ sigma[node][group_node2]
|
||||
)
|
||||
return PB, sigma, D
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def prominent_group(
|
||||
G, k, weight=None, C=None, endpoints=False, normalized=True, greedy=False
|
||||
):
|
||||
r"""Find the prominent group of size $k$ in graph $G$. The prominence of the
|
||||
group is evaluated by the group betweenness centrality.
|
||||
|
||||
Group betweenness centrality of a group of nodes $C$ is the sum of the
|
||||
fraction of all-pairs shortest paths that pass through any vertex in $C$
|
||||
|
||||
.. math::
|
||||
|
||||
c_B(v) =\sum_{s,t \in V} \frac{\sigma(s, t|v)}{\sigma(s, t)}
|
||||
|
||||
where $V$ is the set of nodes, $\sigma(s, t)$ is the number of
|
||||
shortest $(s, t)$-paths, and $\sigma(s, t|C)$ is the number of
|
||||
those paths passing through some node in group $C$. Note that
|
||||
$(s, t)$ are not members of the group ($V-C$ is the set of nodes
|
||||
in $V$ that are not in $C$).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
|
||||
k : int
|
||||
The number of nodes in the group.
|
||||
|
||||
normalized : bool, optional (default=True)
|
||||
If True, group betweenness is normalized by ``1/((|V|-|C|)(|V|-|C|-1))``
|
||||
where ``|V|`` is the number of nodes in G and ``|C|`` is the number of
|
||||
nodes in C.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If None, all edge weights are considered equal.
|
||||
Otherwise holds the name of the edge attribute used as weight.
|
||||
The weight of an edge is treated as the length or distance between the two sides.
|
||||
|
||||
endpoints : bool, optional (default=False)
|
||||
If True include the endpoints in the shortest path counts.
|
||||
|
||||
C : list or set, optional (default=None)
|
||||
list of nodes which won't be candidates of the prominent group.
|
||||
|
||||
greedy : bool, optional (default=False)
|
||||
Using a naive greedy algorithm in order to find non-optimal prominent
|
||||
group. For scale free networks the results are negligibly below the optimal
|
||||
results.
|
||||
|
||||
Raises
|
||||
------
|
||||
NodeNotFound
|
||||
If node(s) in C are not present in G.
|
||||
|
||||
Returns
|
||||
-------
|
||||
max_GBC : float
|
||||
The group betweenness centrality of the prominent group.
|
||||
|
||||
max_group : list
|
||||
The list of nodes in the prominent group.
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality, group_betweenness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
Group betweenness centrality is described in [1]_ and its importance discussed in [3]_.
|
||||
The algorithm is described in [2]_ and is based on techniques mentioned in [4]_.
|
||||
|
||||
The number of nodes in the group must be a maximum of ``n - 2`` where ``n``
|
||||
is the total number of nodes in the graph.
|
||||
|
||||
For weighted graphs the edge weights must be greater than zero.
|
||||
Zero edge weights can produce an infinite number of equal length
|
||||
paths between pairs of nodes.
|
||||
|
||||
The total number of paths between source and target is counted
|
||||
differently for directed and undirected graphs. Directed paths
|
||||
between "u" and "v" are counted as two possible paths (one each
|
||||
direction) while undirected paths between "u" and "v" are counted
|
||||
as one path. Said another way, the sum in the expression above is
|
||||
over all ``s != t`` for directed graphs and for ``s < t`` for undirected graphs.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] M G Everett and S P Borgatti:
|
||||
The Centrality of Groups and Classes.
|
||||
Journal of Mathematical Sociology. 23(3): 181-201. 1999.
|
||||
http://www.analytictech.com/borgatti/group_centrality.htm
|
||||
.. [2] Rami Puzis, Yuval Elovici, and Shlomi Dolev:
|
||||
"Finding the Most Prominent Group in Complex Networks"
|
||||
AI communications 20(4): 287-296, 2007.
|
||||
https://www.researchgate.net/profile/Rami_Puzis2/publication/220308855
|
||||
.. [3] Sourav Medya et. al.:
|
||||
Group Centrality Maximization via Network Design.
|
||||
SIAM International Conference on Data Mining, SDM 2018, 126–134.
|
||||
https://sites.cs.ucsb.edu/~arlei/pubs/sdm18.pdf
|
||||
.. [4] Rami Puzis, Yuval Elovici, and Shlomi Dolev.
|
||||
"Fast algorithm for successive computation of group betweenness centrality."
|
||||
https://journals.aps.org/pre/pdf/10.1103/PhysRevE.76.056709
|
||||
"""
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
if C is not None:
|
||||
C = set(C)
|
||||
if C - G.nodes: # element(s) of C not in G
|
||||
raise nx.NodeNotFound(f"The node(s) {C - G.nodes} are in C but not in G.")
|
||||
nodes = list(G.nodes - C)
|
||||
else:
|
||||
nodes = list(G.nodes)
|
||||
DF_tree = nx.Graph()
|
||||
DF_tree.__networkx_cache__ = None # Disable caching
|
||||
PB, sigma, D = _group_preprocessing(G, nodes, weight)
|
||||
betweenness = pd.DataFrame.from_dict(PB)
|
||||
if C is not None:
|
||||
for node in C:
|
||||
# remove from the betweenness all the nodes not part of the group
|
||||
betweenness.drop(index=node, inplace=True)
|
||||
betweenness.drop(columns=node, inplace=True)
|
||||
CL = [node for _, node in sorted(zip(np.diag(betweenness), nodes), reverse=True)]
|
||||
max_GBC = 0
|
||||
max_group = []
|
||||
DF_tree.add_node(
|
||||
1,
|
||||
CL=CL,
|
||||
betweenness=betweenness,
|
||||
GBC=0,
|
||||
GM=[],
|
||||
sigma=sigma,
|
||||
cont=dict(zip(nodes, np.diag(betweenness))),
|
||||
)
|
||||
|
||||
# the algorithm
|
||||
DF_tree.nodes[1]["heu"] = 0
|
||||
for i in range(k):
|
||||
DF_tree.nodes[1]["heu"] += DF_tree.nodes[1]["cont"][DF_tree.nodes[1]["CL"][i]]
|
||||
max_GBC, DF_tree, max_group = _dfbnb(
|
||||
G, k, DF_tree, max_GBC, 1, D, max_group, nodes, greedy
|
||||
)
|
||||
|
||||
v = len(G)
|
||||
if not endpoints:
|
||||
scale = 0
|
||||
# if the graph is connected then subtract the endpoints from
|
||||
# the count for all the nodes in the graph. else count how many
|
||||
# nodes are connected to the group's nodes and subtract that.
|
||||
if nx.is_directed(G):
|
||||
if nx.is_strongly_connected(G):
|
||||
scale = k * (2 * v - k - 1)
|
||||
elif nx.is_connected(G):
|
||||
scale = k * (2 * v - k - 1)
|
||||
if scale == 0:
|
||||
for group_node1 in max_group:
|
||||
for node in D[group_node1]:
|
||||
if node != group_node1:
|
||||
if node in max_group:
|
||||
scale += 1
|
||||
else:
|
||||
scale += 2
|
||||
max_GBC -= scale
|
||||
|
||||
# normalized
|
||||
if normalized:
|
||||
scale = 1 / ((v - k) * (v - k - 1))
|
||||
max_GBC *= scale
|
||||
|
||||
# If undirected then count only the undirected edges
|
||||
elif not G.is_directed():
|
||||
max_GBC /= 2
|
||||
max_GBC = float(f"{max_GBC:.2f}")
|
||||
return max_GBC, max_group
|
||||
|
||||
|
||||
def _dfbnb(G, k, DF_tree, max_GBC, root, D, max_group, nodes, greedy):
|
||||
# stopping condition - if we found a group of size k and with higher GBC then prune
|
||||
if len(DF_tree.nodes[root]["GM"]) == k and DF_tree.nodes[root]["GBC"] > max_GBC:
|
||||
return DF_tree.nodes[root]["GBC"], DF_tree, DF_tree.nodes[root]["GM"]
|
||||
# stopping condition - if the size of group members equal to k or there are less than
|
||||
# k - |GM| in the candidate list or the heuristic function plus the GBC is below the
|
||||
# maximal GBC found then prune
|
||||
if (
|
||||
len(DF_tree.nodes[root]["GM"]) == k
|
||||
or len(DF_tree.nodes[root]["CL"]) <= k - len(DF_tree.nodes[root]["GM"])
|
||||
or DF_tree.nodes[root]["GBC"] + DF_tree.nodes[root]["heu"] <= max_GBC
|
||||
):
|
||||
return max_GBC, DF_tree, max_group
|
||||
|
||||
# finding the heuristic of both children
|
||||
node_p, node_m, DF_tree = _heuristic(k, root, DF_tree, D, nodes, greedy)
|
||||
|
||||
# finding the child with the bigger heuristic + GBC and expand
|
||||
# that node first if greedy then only expand the plus node
|
||||
if greedy:
|
||||
max_GBC, DF_tree, max_group = _dfbnb(
|
||||
G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy
|
||||
)
|
||||
|
||||
elif (
|
||||
DF_tree.nodes[node_p]["GBC"] + DF_tree.nodes[node_p]["heu"]
|
||||
> DF_tree.nodes[node_m]["GBC"] + DF_tree.nodes[node_m]["heu"]
|
||||
):
|
||||
max_GBC, DF_tree, max_group = _dfbnb(
|
||||
G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy
|
||||
)
|
||||
max_GBC, DF_tree, max_group = _dfbnb(
|
||||
G, k, DF_tree, max_GBC, node_m, D, max_group, nodes, greedy
|
||||
)
|
||||
else:
|
||||
max_GBC, DF_tree, max_group = _dfbnb(
|
||||
G, k, DF_tree, max_GBC, node_m, D, max_group, nodes, greedy
|
||||
)
|
||||
max_GBC, DF_tree, max_group = _dfbnb(
|
||||
G, k, DF_tree, max_GBC, node_p, D, max_group, nodes, greedy
|
||||
)
|
||||
return max_GBC, DF_tree, max_group
|
||||
|
||||
|
||||
def _heuristic(k, root, DF_tree, D, nodes, greedy):
|
||||
import numpy as np
|
||||
|
||||
# This helper function add two nodes to DF_tree - one left son and the
|
||||
# other right son, finds their heuristic, CL, GBC, and GM
|
||||
node_p = DF_tree.number_of_nodes() + 1
|
||||
node_m = DF_tree.number_of_nodes() + 2
|
||||
added_node = DF_tree.nodes[root]["CL"][0]
|
||||
|
||||
# adding the plus node
|
||||
DF_tree.add_nodes_from([(node_p, deepcopy(DF_tree.nodes[root]))])
|
||||
DF_tree.nodes[node_p]["GM"].append(added_node)
|
||||
DF_tree.nodes[node_p]["GBC"] += DF_tree.nodes[node_p]["cont"][added_node]
|
||||
root_node = DF_tree.nodes[root]
|
||||
for x in nodes:
|
||||
for y in nodes:
|
||||
dxvy = 0
|
||||
dxyv = 0
|
||||
dvxy = 0
|
||||
if not (
|
||||
root_node["sigma"][x][y] == 0
|
||||
or root_node["sigma"][x][added_node] == 0
|
||||
or root_node["sigma"][added_node][y] == 0
|
||||
):
|
||||
if D[x][added_node] == D[x][y] + D[y][added_node]:
|
||||
dxyv = (
|
||||
root_node["sigma"][x][y]
|
||||
* root_node["sigma"][y][added_node]
|
||||
/ root_node["sigma"][x][added_node]
|
||||
)
|
||||
if D[x][y] == D[x][added_node] + D[added_node][y]:
|
||||
dxvy = (
|
||||
root_node["sigma"][x][added_node]
|
||||
* root_node["sigma"][added_node][y]
|
||||
/ root_node["sigma"][x][y]
|
||||
)
|
||||
if D[added_node][y] == D[added_node][x] + D[x][y]:
|
||||
dvxy = (
|
||||
root_node["sigma"][added_node][x]
|
||||
* root_node["sigma"][x][y]
|
||||
/ root_node["sigma"][added_node][y]
|
||||
)
|
||||
DF_tree.nodes[node_p]["sigma"][x][y] = root_node["sigma"][x][y] * (1 - dxvy)
|
||||
DF_tree.nodes[node_p]["betweenness"].loc[y, x] = (
|
||||
root_node["betweenness"][x][y] - root_node["betweenness"][x][y] * dxvy
|
||||
)
|
||||
if y != added_node:
|
||||
DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= (
|
||||
root_node["betweenness"][x][added_node] * dxyv
|
||||
)
|
||||
if x != added_node:
|
||||
DF_tree.nodes[node_p]["betweenness"].loc[y, x] -= (
|
||||
root_node["betweenness"][added_node][y] * dvxy
|
||||
)
|
||||
|
||||
DF_tree.nodes[node_p]["CL"] = [
|
||||
node
|
||||
for _, node in sorted(
|
||||
zip(np.diag(DF_tree.nodes[node_p]["betweenness"]), nodes), reverse=True
|
||||
)
|
||||
if node not in DF_tree.nodes[node_p]["GM"]
|
||||
]
|
||||
DF_tree.nodes[node_p]["cont"] = dict(
|
||||
zip(nodes, np.diag(DF_tree.nodes[node_p]["betweenness"]))
|
||||
)
|
||||
DF_tree.nodes[node_p]["heu"] = 0
|
||||
for i in range(k - len(DF_tree.nodes[node_p]["GM"])):
|
||||
DF_tree.nodes[node_p]["heu"] += DF_tree.nodes[node_p]["cont"][
|
||||
DF_tree.nodes[node_p]["CL"][i]
|
||||
]
|
||||
|
||||
# adding the minus node - don't insert the first node in the CL to GM
|
||||
# Insert minus node only if isn't greedy type algorithm
|
||||
if not greedy:
|
||||
DF_tree.add_nodes_from([(node_m, deepcopy(DF_tree.nodes[root]))])
|
||||
DF_tree.nodes[node_m]["CL"].pop(0)
|
||||
DF_tree.nodes[node_m]["cont"].pop(added_node)
|
||||
DF_tree.nodes[node_m]["heu"] = 0
|
||||
for i in range(k - len(DF_tree.nodes[node_m]["GM"])):
|
||||
DF_tree.nodes[node_m]["heu"] += DF_tree.nodes[node_m]["cont"][
|
||||
DF_tree.nodes[node_m]["CL"][i]
|
||||
]
|
||||
else:
|
||||
node_m = None
|
||||
|
||||
return node_p, node_m, DF_tree
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def group_closeness_centrality(G, S, weight=None):
|
||||
r"""Compute the group closeness centrality for a group of nodes.
|
||||
|
||||
Group closeness centrality of a group of nodes $S$ is a measure
|
||||
of how close the group is to the other nodes in the graph.
|
||||
|
||||
.. math::
|
||||
|
||||
c_{close}(S) = \frac{|V-S|}{\sum_{v \in V-S} d_{S, v}}
|
||||
|
||||
d_{S, v} = min_{u \in S} (d_{u, v})
|
||||
|
||||
where $V$ is the set of nodes, $d_{S, v}$ is the distance of
|
||||
the group $S$ from $v$ defined as above. ($V-S$ is the set of nodes
|
||||
in $V$ that are not in $S$).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
|
||||
S : list or set
|
||||
S is a group of nodes which belong to G, for which group closeness
|
||||
centrality is to be calculated.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If None, all edge weights are considered equal.
|
||||
Otherwise holds the name of the edge attribute used as weight.
|
||||
The weight of an edge is treated as the length or distance between the two sides.
|
||||
|
||||
Raises
|
||||
------
|
||||
NodeNotFound
|
||||
If node(s) in S are not present in G.
|
||||
|
||||
Returns
|
||||
-------
|
||||
closeness : float
|
||||
Group closeness centrality of the group S.
|
||||
|
||||
See Also
|
||||
--------
|
||||
closeness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The measure was introduced in [1]_.
|
||||
The formula implemented here is described in [2]_.
|
||||
|
||||
Higher values of closeness indicate greater centrality.
|
||||
|
||||
It is assumed that 1 / 0 is 0 (required in the case of directed graphs,
|
||||
or when a shortest path length is 0).
|
||||
|
||||
The number of nodes in the group must be a maximum of n - 1 where `n`
|
||||
is the total number of nodes in the graph.
|
||||
|
||||
For directed graphs, the incoming distance is utilized here. To use the
|
||||
outward distance, act on `G.reverse()`.
|
||||
|
||||
For weighted graphs the edge weights must be greater than zero.
|
||||
Zero edge weights can produce an infinite number of equal length
|
||||
paths between pairs of nodes.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] M G Everett and S P Borgatti:
|
||||
The Centrality of Groups and Classes.
|
||||
Journal of Mathematical Sociology. 23(3): 181-201. 1999.
|
||||
http://www.analytictech.com/borgatti/group_centrality.htm
|
||||
.. [2] J. Zhao et. al.:
|
||||
Measuring and Maximizing Group Closeness Centrality over
|
||||
Disk Resident Graphs.
|
||||
WWWConference Proceedings, 2014. 689-694.
|
||||
https://doi.org/10.1145/2567948.2579356
|
||||
"""
|
||||
if G.is_directed():
|
||||
G = G.reverse() # reverse view
|
||||
closeness = 0 # initialize to 0
|
||||
V = set(G) # set of nodes in G
|
||||
S = set(S) # set of nodes in group S
|
||||
V_S = V - S # set of nodes in V but not S
|
||||
shortest_path_lengths = nx.multi_source_dijkstra_path_length(G, S, weight=weight)
|
||||
# accumulation
|
||||
for v in V_S:
|
||||
try:
|
||||
closeness += shortest_path_lengths[v]
|
||||
except KeyError: # no path exists
|
||||
closeness += 0
|
||||
try:
|
||||
closeness = len(V_S) / closeness
|
||||
except ZeroDivisionError: # 1 / 0 assumed as 0
|
||||
closeness = 0
|
||||
return closeness
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def group_degree_centrality(G, S):
|
||||
"""Compute the group degree centrality for a group of nodes.
|
||||
|
||||
Group degree centrality of a group of nodes $S$ is the fraction
|
||||
of non-group members connected to group members.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
|
||||
S : list or set
|
||||
S is a group of nodes which belong to G, for which group degree
|
||||
centrality is to be calculated.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If node(s) in S are not in G.
|
||||
|
||||
Returns
|
||||
-------
|
||||
centrality : float
|
||||
Group degree centrality of the group S.
|
||||
|
||||
See Also
|
||||
--------
|
||||
degree_centrality
|
||||
group_in_degree_centrality
|
||||
group_out_degree_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The measure was introduced in [1]_.
|
||||
|
||||
The number of nodes in the group must be a maximum of n - 1 where `n`
|
||||
is the total number of nodes in the graph.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] M G Everett and S P Borgatti:
|
||||
The Centrality of Groups and Classes.
|
||||
Journal of Mathematical Sociology. 23(3): 181-201. 1999.
|
||||
http://www.analytictech.com/borgatti/group_centrality.htm
|
||||
"""
|
||||
centrality = len(set().union(*[set(G.neighbors(i)) for i in S]) - set(S))
|
||||
centrality /= len(G.nodes()) - len(S)
|
||||
return centrality
|
||||
|
||||
|
||||
@not_implemented_for("undirected")
|
||||
@nx._dispatchable
|
||||
def group_in_degree_centrality(G, S):
|
||||
"""Compute the group in-degree centrality for a group of nodes.
|
||||
|
||||
Group in-degree centrality of a group of nodes $S$ is the fraction
|
||||
of non-group members connected to group members by incoming edges.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
|
||||
S : list or set
|
||||
S is a group of nodes which belong to G, for which group in-degree
|
||||
centrality is to be calculated.
|
||||
|
||||
Returns
|
||||
-------
|
||||
centrality : float
|
||||
Group in-degree centrality of the group S.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If G is undirected.
|
||||
|
||||
NodeNotFound
|
||||
If node(s) in S are not in G.
|
||||
|
||||
See Also
|
||||
--------
|
||||
degree_centrality
|
||||
group_degree_centrality
|
||||
group_out_degree_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The number of nodes in the group must be a maximum of n - 1 where `n`
|
||||
is the total number of nodes in the graph.
|
||||
|
||||
`G.neighbors(i)` gives nodes with an outward edge from i, in a DiGraph,
|
||||
so for group in-degree centrality, the reverse graph is used.
|
||||
"""
|
||||
return group_degree_centrality(G.reverse(), S)
|
||||
|
||||
|
||||
@not_implemented_for("undirected")
|
||||
@nx._dispatchable
|
||||
def group_out_degree_centrality(G, S):
|
||||
"""Compute the group out-degree centrality for a group of nodes.
|
||||
|
||||
Group out-degree centrality of a group of nodes $S$ is the fraction
|
||||
of non-group members connected to group members by outgoing edges.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
|
||||
S : list or set
|
||||
S is a group of nodes which belong to G, for which group in-degree
|
||||
centrality is to be calculated.
|
||||
|
||||
Returns
|
||||
-------
|
||||
centrality : float
|
||||
Group out-degree centrality of the group S.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXNotImplemented
|
||||
If G is undirected.
|
||||
|
||||
NodeNotFound
|
||||
If node(s) in S are not in G.
|
||||
|
||||
See Also
|
||||
--------
|
||||
degree_centrality
|
||||
group_degree_centrality
|
||||
group_in_degree_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The number of nodes in the group must be a maximum of n - 1 where `n`
|
||||
is the total number of nodes in the graph.
|
||||
|
||||
`G.neighbors(i)` gives nodes with an outward edge from i, in a DiGraph,
|
||||
so for group out-degree centrality, the graph itself is used.
|
||||
"""
|
||||
return group_degree_centrality(G, S)
|
||||
+89
@@ -0,0 +1,89 @@
|
||||
"""Functions for computing the harmonic centrality of a graph."""
|
||||
|
||||
from functools import partial
|
||||
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["harmonic_centrality"]
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="distance")
|
||||
def harmonic_centrality(G, nbunch=None, distance=None, sources=None):
|
||||
r"""Compute harmonic centrality for nodes.
|
||||
|
||||
Harmonic centrality [1]_ of a node `u` is the sum of the reciprocal
|
||||
of the shortest path distances from all other nodes to `u`
|
||||
|
||||
.. math::
|
||||
|
||||
C(u) = \sum_{v \neq u} \frac{1}{d(v, u)}
|
||||
|
||||
where `d(v, u)` is the shortest-path distance between `v` and `u`.
|
||||
|
||||
If `sources` is given as an argument, the returned harmonic centrality
|
||||
values are calculated as the sum of the reciprocals of the shortest
|
||||
path distances from the nodes specified in `sources` to `u` instead
|
||||
of from all nodes to `u`.
|
||||
|
||||
Notice that higher values indicate higher centrality.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
nbunch : container (default: all nodes in G)
|
||||
Container of nodes for which harmonic centrality values are calculated.
|
||||
|
||||
sources : container (default: all nodes in G)
|
||||
Container of nodes `v` over which reciprocal distances are computed.
|
||||
Nodes not in `G` are silently ignored.
|
||||
|
||||
distance : edge attribute key, optional (default=None)
|
||||
Use the specified edge attribute as the edge distance in shortest
|
||||
path calculations. If `None`, then each edge will have distance equal to 1.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with harmonic centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality, load_centrality, eigenvector_centrality,
|
||||
degree_centrality, closeness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
If the 'distance' keyword is set to an edge attribute key then the
|
||||
shortest-path length will be computed using Dijkstra's algorithm with
|
||||
that edge attribute as the edge weight.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Boldi, Paolo, and Sebastiano Vigna. "Axioms for centrality."
|
||||
Internet Mathematics 10.3-4 (2014): 222-262.
|
||||
"""
|
||||
|
||||
nbunch = set(G.nbunch_iter(nbunch) if nbunch is not None else G.nodes)
|
||||
sources = set(G.nbunch_iter(sources) if sources is not None else G.nodes)
|
||||
|
||||
centrality = {u: 0 for u in nbunch}
|
||||
|
||||
transposed = False
|
||||
if len(nbunch) < len(sources):
|
||||
transposed = True
|
||||
nbunch, sources = sources, nbunch
|
||||
if nx.is_directed(G):
|
||||
G = nx.reverse(G, copy=False)
|
||||
|
||||
spl = partial(nx.shortest_path_length, G, weight=distance)
|
||||
for v in sources:
|
||||
dist = spl(v)
|
||||
for u in nbunch.intersection(dist):
|
||||
d = dist[u]
|
||||
if d == 0: # handle u == v and edges with 0 weight
|
||||
continue
|
||||
centrality[v if transposed else u] += 1 / d
|
||||
|
||||
return centrality
|
||||
+331
@@ -0,0 +1,331 @@
|
||||
"""Katz centrality."""
|
||||
|
||||
import math
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
__all__ = ["katz_centrality", "katz_centrality_numpy"]
|
||||
|
||||
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def katz_centrality(
|
||||
G,
|
||||
alpha=0.1,
|
||||
beta=1.0,
|
||||
max_iter=1000,
|
||||
tol=1.0e-6,
|
||||
nstart=None,
|
||||
normalized=True,
|
||||
weight=None,
|
||||
):
|
||||
r"""Compute the Katz centrality for the nodes of the graph G.
|
||||
|
||||
Katz centrality computes the centrality for a node based on the centrality
|
||||
of its neighbors. It is a generalization of the eigenvector centrality. The
|
||||
Katz centrality for node $i$ is
|
||||
|
||||
.. math::
|
||||
|
||||
x_i = \alpha \sum_{j} A_{ij} x_j + \beta,
|
||||
|
||||
where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$.
|
||||
|
||||
The parameter $\beta$ controls the initial centrality and
|
||||
|
||||
.. math::
|
||||
|
||||
\alpha < \frac{1}{\lambda_{\max}}.
|
||||
|
||||
Katz centrality computes the relative influence of a node within a
|
||||
network by measuring the number of the immediate neighbors (first
|
||||
degree nodes) and also all other nodes in the network that connect
|
||||
to the node under consideration through these immediate neighbors.
|
||||
|
||||
Extra weight can be provided to immediate neighbors through the
|
||||
parameter $\beta$. Connections made with distant neighbors
|
||||
are, however, penalized by an attenuation factor $\alpha$ which
|
||||
should be strictly less than the inverse largest eigenvalue of the
|
||||
adjacency matrix in order for the Katz centrality to be computed
|
||||
correctly. More information is provided in [1]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
|
||||
alpha : float, optional (default=0.1)
|
||||
Attenuation factor
|
||||
|
||||
beta : scalar or dictionary, optional (default=1.0)
|
||||
Weight attributed to the immediate neighborhood. If not a scalar, the
|
||||
dictionary must have a value for every node.
|
||||
|
||||
max_iter : integer, optional (default=1000)
|
||||
Maximum number of iterations in power method.
|
||||
|
||||
tol : float, optional (default=1.0e-6)
|
||||
Error tolerance used to check convergence in power method iteration.
|
||||
|
||||
nstart : dictionary, optional
|
||||
Starting value of Katz iteration for each node.
|
||||
|
||||
normalized : bool, optional (default=True)
|
||||
If True normalize the resulting values.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If None, all edge weights are considered equal.
|
||||
Otherwise holds the name of the edge attribute used as weight.
|
||||
In this measure the weight is interpreted as the connection strength.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with Katz centrality as the value.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If the parameter `beta` is not a scalar but lacks a value for at least
|
||||
one node
|
||||
|
||||
PowerIterationFailedConvergence
|
||||
If the algorithm fails to converge to the specified tolerance
|
||||
within the specified number of iterations of the power iteration
|
||||
method.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import math
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix
|
||||
>>> centrality = nx.katz_centrality(G, 1 / phi - 0.01)
|
||||
>>> for n, c in sorted(centrality.items()):
|
||||
... print(f"{n} {c:.2f}")
|
||||
0 0.37
|
||||
1 0.60
|
||||
2 0.60
|
||||
3 0.37
|
||||
|
||||
See Also
|
||||
--------
|
||||
katz_centrality_numpy
|
||||
eigenvector_centrality
|
||||
eigenvector_centrality_numpy
|
||||
:func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
|
||||
:func:`~networkx.algorithms.link_analysis.hits_alg.hits`
|
||||
|
||||
Notes
|
||||
-----
|
||||
Katz centrality was introduced by [2]_.
|
||||
|
||||
This algorithm it uses the power method to find the eigenvector
|
||||
corresponding to the largest eigenvalue of the adjacency matrix of ``G``.
|
||||
The parameter ``alpha`` should be strictly less than the inverse of largest
|
||||
eigenvalue of the adjacency matrix for the algorithm to converge.
|
||||
You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest
|
||||
eigenvalue of the adjacency matrix.
|
||||
The iteration will stop after ``max_iter`` iterations or an error tolerance of
|
||||
``number_of_nodes(G) * tol`` has been reached.
|
||||
|
||||
For strongly connected graphs, as $\alpha \to 1/\lambda_{\max}$, and $\beta > 0$,
|
||||
Katz centrality approaches the results for eigenvector centrality.
|
||||
|
||||
For directed graphs this finds "left" eigenvectors which corresponds
|
||||
to the in-edges in the graph. For out-edges Katz centrality,
|
||||
first reverse the graph with ``G.reverse()``.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Mark E. J. Newman:
|
||||
Networks: An Introduction.
|
||||
Oxford University Press, USA, 2010, p. 720.
|
||||
.. [2] Leo Katz:
|
||||
A New Status Index Derived from Sociometric Index.
|
||||
Psychometrika 18(1):39–43, 1953
|
||||
https://link.springer.com/content/pdf/10.1007/BF02289026.pdf
|
||||
"""
|
||||
if len(G) == 0:
|
||||
return {}
|
||||
|
||||
nnodes = G.number_of_nodes()
|
||||
|
||||
if nstart is None:
|
||||
# choose starting vector with entries of 0
|
||||
x = {n: 0 for n in G}
|
||||
else:
|
||||
x = nstart
|
||||
|
||||
try:
|
||||
b = dict.fromkeys(G, float(beta))
|
||||
except (TypeError, ValueError, AttributeError) as err:
|
||||
b = beta
|
||||
if set(beta) != set(G):
|
||||
raise nx.NetworkXError(
|
||||
"beta dictionary must have a value for every node"
|
||||
) from err
|
||||
|
||||
# make up to max_iter iterations
|
||||
for _ in range(max_iter):
|
||||
xlast = x
|
||||
x = dict.fromkeys(xlast, 0)
|
||||
# do the multiplication y^T = Alpha * x^T A + Beta
|
||||
for n in x:
|
||||
for nbr in G[n]:
|
||||
x[nbr] += xlast[n] * G[n][nbr].get(weight, 1)
|
||||
for n in x:
|
||||
x[n] = alpha * x[n] + b[n]
|
||||
|
||||
# check convergence
|
||||
error = sum(abs(x[n] - xlast[n]) for n in x)
|
||||
if error < nnodes * tol:
|
||||
if normalized:
|
||||
# normalize vector
|
||||
try:
|
||||
s = 1.0 / math.hypot(*x.values())
|
||||
except ZeroDivisionError:
|
||||
s = 1.0
|
||||
else:
|
||||
s = 1
|
||||
for n in x:
|
||||
x[n] *= s
|
||||
return x
|
||||
raise nx.PowerIterationFailedConvergence(max_iter)
|
||||
|
||||
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def katz_centrality_numpy(G, alpha=0.1, beta=1.0, normalized=True, weight=None):
|
||||
r"""Compute the Katz centrality for the graph G.
|
||||
|
||||
Katz centrality computes the centrality for a node based on the centrality
|
||||
of its neighbors. It is a generalization of the eigenvector centrality. The
|
||||
Katz centrality for node $i$ is
|
||||
|
||||
.. math::
|
||||
|
||||
x_i = \alpha \sum_{j} A_{ij} x_j + \beta,
|
||||
|
||||
where $A$ is the adjacency matrix of graph G with eigenvalues $\lambda$.
|
||||
|
||||
The parameter $\beta$ controls the initial centrality and
|
||||
|
||||
.. math::
|
||||
|
||||
\alpha < \frac{1}{\lambda_{\max}}.
|
||||
|
||||
Katz centrality computes the relative influence of a node within a
|
||||
network by measuring the number of the immediate neighbors (first
|
||||
degree nodes) and also all other nodes in the network that connect
|
||||
to the node under consideration through these immediate neighbors.
|
||||
|
||||
Extra weight can be provided to immediate neighbors through the
|
||||
parameter $\beta$. Connections made with distant neighbors
|
||||
are, however, penalized by an attenuation factor $\alpha$ which
|
||||
should be strictly less than the inverse largest eigenvalue of the
|
||||
adjacency matrix in order for the Katz centrality to be computed
|
||||
correctly. More information is provided in [1]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph
|
||||
|
||||
alpha : float
|
||||
Attenuation factor
|
||||
|
||||
beta : scalar or dictionary, optional (default=1.0)
|
||||
Weight attributed to the immediate neighborhood. If not a scalar the
|
||||
dictionary must have an value for every node.
|
||||
|
||||
normalized : bool
|
||||
If True normalize the resulting values.
|
||||
|
||||
weight : None or string, optional
|
||||
If None, all edge weights are considered equal.
|
||||
Otherwise holds the name of the edge attribute used as weight.
|
||||
In this measure the weight is interpreted as the connection strength.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with Katz centrality as the value.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If the parameter `beta` is not a scalar but lacks a value for at least
|
||||
one node
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> import math
|
||||
>>> G = nx.path_graph(4)
|
||||
>>> phi = (1 + math.sqrt(5)) / 2.0 # largest eigenvalue of adj matrix
|
||||
>>> centrality = nx.katz_centrality_numpy(G, 1 / phi)
|
||||
>>> for n, c in sorted(centrality.items()):
|
||||
... print(f"{n} {c:.2f}")
|
||||
0 0.37
|
||||
1 0.60
|
||||
2 0.60
|
||||
3 0.37
|
||||
|
||||
See Also
|
||||
--------
|
||||
katz_centrality
|
||||
eigenvector_centrality_numpy
|
||||
eigenvector_centrality
|
||||
:func:`~networkx.algorithms.link_analysis.pagerank_alg.pagerank`
|
||||
:func:`~networkx.algorithms.link_analysis.hits_alg.hits`
|
||||
|
||||
Notes
|
||||
-----
|
||||
Katz centrality was introduced by [2]_.
|
||||
|
||||
This algorithm uses a direct linear solver to solve the above equation.
|
||||
The parameter ``alpha`` should be strictly less than the inverse of largest
|
||||
eigenvalue of the adjacency matrix for there to be a solution.
|
||||
You can use ``max(nx.adjacency_spectrum(G))`` to get $\lambda_{\max}$ the largest
|
||||
eigenvalue of the adjacency matrix.
|
||||
|
||||
For strongly connected graphs, as $\alpha \to 1/\lambda_{\max}$, and $\beta > 0$,
|
||||
Katz centrality approaches the results for eigenvector centrality.
|
||||
|
||||
For directed graphs this finds "left" eigenvectors which corresponds
|
||||
to the in-edges in the graph. For out-edges Katz centrality,
|
||||
first reverse the graph with ``G.reverse()``.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Mark E. J. Newman:
|
||||
Networks: An Introduction.
|
||||
Oxford University Press, USA, 2010, p. 173.
|
||||
.. [2] Leo Katz:
|
||||
A New Status Index Derived from Sociometric Index.
|
||||
Psychometrika 18(1):39–43, 1953
|
||||
https://link.springer.com/content/pdf/10.1007/BF02289026.pdf
|
||||
"""
|
||||
import numpy as np
|
||||
|
||||
if len(G) == 0:
|
||||
return {}
|
||||
try:
|
||||
nodelist = beta.keys()
|
||||
if set(nodelist) != set(G):
|
||||
raise nx.NetworkXError("beta dictionary must have a value for every node")
|
||||
b = np.array(list(beta.values()), dtype=float)
|
||||
except AttributeError:
|
||||
nodelist = list(G)
|
||||
try:
|
||||
b = np.ones((len(nodelist), 1)) * beta
|
||||
except (TypeError, ValueError, AttributeError) as err:
|
||||
raise nx.NetworkXError("beta must be a number") from err
|
||||
|
||||
A = nx.adjacency_matrix(G, nodelist=nodelist, weight=weight).todense().T
|
||||
n = A.shape[0]
|
||||
centrality = np.linalg.solve(np.eye(n, n) - (alpha * A), b).squeeze()
|
||||
|
||||
# Normalize: rely on truediv to cast to float, then tolist to make Python numbers
|
||||
norm = np.sign(sum(centrality)) * np.linalg.norm(centrality) if normalized else 1
|
||||
return dict(zip(nodelist, (centrality / norm).tolist()))
|
||||
+150
@@ -0,0 +1,150 @@
|
||||
"""
|
||||
Laplacian centrality measures.
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["laplacian_centrality"]
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def laplacian_centrality(
|
||||
G, normalized=True, nodelist=None, weight="weight", walk_type=None, alpha=0.95
|
||||
):
|
||||
r"""Compute the Laplacian centrality for nodes in the graph `G`.
|
||||
|
||||
The Laplacian Centrality of a node ``i`` is measured by the drop in the
|
||||
Laplacian Energy after deleting node ``i`` from the graph. The Laplacian Energy
|
||||
is the sum of the squared eigenvalues of a graph's Laplacian matrix.
|
||||
|
||||
.. math::
|
||||
|
||||
C_L(u_i,G) = \frac{(\Delta E)_i}{E_L (G)} = \frac{E_L (G)-E_L (G_i)}{E_L (G)}
|
||||
|
||||
E_L (G) = \sum_{i=0}^n \lambda_i^2
|
||||
|
||||
Where $E_L (G)$ is the Laplacian energy of graph `G`,
|
||||
E_L (G_i) is the Laplacian energy of graph `G` after deleting node ``i``
|
||||
and $\lambda_i$ are the eigenvalues of `G`'s Laplacian matrix.
|
||||
This formula shows the normalized value. Without normalization,
|
||||
the numerator on the right side is returned.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A networkx graph
|
||||
|
||||
normalized : bool (default = True)
|
||||
If True the centrality score is scaled so the sum over all nodes is 1.
|
||||
If False the centrality score for each node is the drop in Laplacian
|
||||
energy when that node is removed.
|
||||
|
||||
nodelist : list, optional (default = None)
|
||||
The rows and columns are ordered according to the nodes in nodelist.
|
||||
If nodelist is None, then the ordering is produced by G.nodes().
|
||||
|
||||
weight: string or None, optional (default=`weight`)
|
||||
Optional parameter `weight` to compute the Laplacian matrix.
|
||||
The edge data key used to compute each value in the matrix.
|
||||
If None, then each edge has weight 1.
|
||||
|
||||
walk_type : string or None, optional (default=None)
|
||||
Optional parameter `walk_type` used when calling
|
||||
:func:`directed_laplacian_matrix <networkx.directed_laplacian_matrix>`.
|
||||
One of ``"random"``, ``"lazy"``, or ``"pagerank"``. If ``walk_type=None``
|
||||
(the default), then a value is selected according to the properties of `G`:
|
||||
- ``walk_type="random"`` if `G` is strongly connected and aperiodic
|
||||
- ``walk_type="lazy"`` if `G` is strongly connected but not aperiodic
|
||||
- ``walk_type="pagerank"`` for all other cases.
|
||||
|
||||
alpha : real (default = 0.95)
|
||||
Optional parameter `alpha` used when calling
|
||||
:func:`directed_laplacian_matrix <networkx.directed_laplacian_matrix>`.
|
||||
(1 - alpha) is the teleportation probability used with pagerank.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with Laplacian centrality as the value.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.Graph()
|
||||
>>> edges = [(0, 1, 4), (0, 2, 2), (2, 1, 1), (1, 3, 2), (1, 4, 2), (4, 5, 1)]
|
||||
>>> G.add_weighted_edges_from(edges)
|
||||
>>> sorted((v, f"{c:0.2f}") for v, c in laplacian_centrality(G).items())
|
||||
[(0, '0.70'), (1, '0.90'), (2, '0.28'), (3, '0.22'), (4, '0.26'), (5, '0.04')]
|
||||
|
||||
Notes
|
||||
-----
|
||||
The algorithm is implemented based on [1]_ with an extension to directed graphs
|
||||
using the ``directed_laplacian_matrix`` function.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXPointlessConcept
|
||||
If the graph `G` is the null graph.
|
||||
ZeroDivisionError
|
||||
If the graph `G` has no edges (is empty) and normalization is requested.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Qi, X., Fuller, E., Wu, Q., Wu, Y., and Zhang, C.-Q. (2012).
|
||||
Laplacian centrality: A new centrality measure for weighted networks.
|
||||
Information Sciences, 194:240-253.
|
||||
https://math.wvu.edu/~cqzhang/Publication-files/my-paper/INS-2012-Laplacian-W.pdf
|
||||
|
||||
See Also
|
||||
--------
|
||||
:func:`~networkx.linalg.laplacianmatrix.directed_laplacian_matrix`
|
||||
:func:`~networkx.linalg.laplacianmatrix.laplacian_matrix`
|
||||
"""
|
||||
import numpy as np
|
||||
import scipy as sp
|
||||
|
||||
if len(G) == 0:
|
||||
raise nx.NetworkXPointlessConcept("null graph has no centrality defined")
|
||||
if G.size(weight=weight) == 0:
|
||||
if normalized:
|
||||
raise ZeroDivisionError("graph with no edges has zero full energy")
|
||||
return {n: 0 for n in G}
|
||||
|
||||
if nodelist is not None:
|
||||
nodeset = set(G.nbunch_iter(nodelist))
|
||||
if len(nodeset) != len(nodelist):
|
||||
raise nx.NetworkXError("nodelist has duplicate nodes or nodes not in G")
|
||||
nodes = nodelist + [n for n in G if n not in nodeset]
|
||||
else:
|
||||
nodelist = nodes = list(G)
|
||||
|
||||
if G.is_directed():
|
||||
lap_matrix = nx.directed_laplacian_matrix(G, nodes, weight, walk_type, alpha)
|
||||
else:
|
||||
lap_matrix = nx.laplacian_matrix(G, nodes, weight).toarray()
|
||||
|
||||
full_energy = np.power(sp.linalg.eigh(lap_matrix, eigvals_only=True), 2).sum()
|
||||
|
||||
# calculate laplacian centrality
|
||||
laplace_centralities_dict = {}
|
||||
for i, node in enumerate(nodelist):
|
||||
# remove row and col i from lap_matrix
|
||||
all_but_i = list(np.arange(lap_matrix.shape[0]))
|
||||
all_but_i.remove(i)
|
||||
A_2 = lap_matrix[all_but_i, :][:, all_but_i]
|
||||
|
||||
# Adjust diagonal for removed row
|
||||
new_diag = lap_matrix.diagonal() - abs(lap_matrix[:, i])
|
||||
np.fill_diagonal(A_2, new_diag[all_but_i])
|
||||
|
||||
if len(all_but_i) > 0: # catches degenerate case of single node
|
||||
new_energy = np.power(sp.linalg.eigh(A_2, eigvals_only=True), 2).sum()
|
||||
else:
|
||||
new_energy = 0.0
|
||||
|
||||
lapl_cent = full_energy - new_energy
|
||||
if normalized:
|
||||
lapl_cent = lapl_cent / full_energy
|
||||
|
||||
laplace_centralities_dict[node] = float(lapl_cent)
|
||||
|
||||
return laplace_centralities_dict
|
||||
+200
@@ -0,0 +1,200 @@
|
||||
"""Load centrality."""
|
||||
|
||||
from operator import itemgetter
|
||||
|
||||
import networkx as nx
|
||||
|
||||
__all__ = ["load_centrality", "edge_load_centrality"]
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def newman_betweenness_centrality(G, v=None, cutoff=None, normalized=True, weight=None):
|
||||
"""Compute load centrality for nodes.
|
||||
|
||||
The load centrality of a node is the fraction of all shortest
|
||||
paths that pass through that node.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A networkx graph.
|
||||
|
||||
normalized : bool, optional (default=True)
|
||||
If True the betweenness values are normalized by b=b/(n-1)(n-2) where
|
||||
n is the number of nodes in G.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If None, edge weights are ignored.
|
||||
Otherwise holds the name of the edge attribute used as weight.
|
||||
The weight of an edge is treated as the length or distance between the two sides.
|
||||
|
||||
cutoff : bool, optional (default=None)
|
||||
If specified, only consider paths of length <= cutoff.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
Load centrality is slightly different than betweenness. It was originally
|
||||
introduced by [2]_. For this load algorithm see [1]_.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Mark E. J. Newman:
|
||||
Scientific collaboration networks. II.
|
||||
Shortest paths, weighted networks, and centrality.
|
||||
Physical Review E 64, 016132, 2001.
|
||||
http://journals.aps.org/pre/abstract/10.1103/PhysRevE.64.016132
|
||||
.. [2] Kwang-Il Goh, Byungnam Kahng and Doochul Kim
|
||||
Universal behavior of Load Distribution in Scale-Free Networks.
|
||||
Physical Review Letters 87(27):1–4, 2001.
|
||||
https://doi.org/10.1103/PhysRevLett.87.278701
|
||||
"""
|
||||
if v is not None: # only one node
|
||||
betweenness = 0.0
|
||||
for source in G:
|
||||
ubetween = _node_betweenness(G, source, cutoff, False, weight)
|
||||
betweenness += ubetween[v] if v in ubetween else 0
|
||||
if normalized:
|
||||
order = G.order()
|
||||
if order <= 2:
|
||||
return betweenness # no normalization b=0 for all nodes
|
||||
betweenness *= 1.0 / ((order - 1) * (order - 2))
|
||||
else:
|
||||
betweenness = {}.fromkeys(G, 0.0)
|
||||
for source in betweenness:
|
||||
ubetween = _node_betweenness(G, source, cutoff, False, weight)
|
||||
for vk in ubetween:
|
||||
betweenness[vk] += ubetween[vk]
|
||||
if normalized:
|
||||
order = G.order()
|
||||
if order <= 2:
|
||||
return betweenness # no normalization b=0 for all nodes
|
||||
scale = 1.0 / ((order - 1) * (order - 2))
|
||||
for v in betweenness:
|
||||
betweenness[v] *= scale
|
||||
return betweenness # all nodes
|
||||
|
||||
|
||||
def _node_betweenness(G, source, cutoff=False, normalized=True, weight=None):
|
||||
"""Node betweenness_centrality helper:
|
||||
|
||||
See betweenness_centrality for what you probably want.
|
||||
This actually computes "load" and not betweenness.
|
||||
See https://networkx.lanl.gov/ticket/103
|
||||
|
||||
This calculates the load of each node for paths from a single source.
|
||||
(The fraction of number of shortests paths from source that go
|
||||
through each node.)
|
||||
|
||||
To get the load for a node you need to do all-pairs shortest paths.
|
||||
|
||||
If weight is not None then use Dijkstra for finding shortest paths.
|
||||
"""
|
||||
# get the predecessor and path length data
|
||||
if weight is None:
|
||||
(pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True)
|
||||
else:
|
||||
(pred, length) = nx.dijkstra_predecessor_and_distance(G, source, cutoff, weight)
|
||||
|
||||
# order the nodes by path length
|
||||
onodes = [(l, vert) for (vert, l) in length.items()]
|
||||
onodes.sort()
|
||||
onodes[:] = [vert for (l, vert) in onodes if l > 0]
|
||||
|
||||
# initialize betweenness
|
||||
between = {}.fromkeys(length, 1.0)
|
||||
|
||||
while onodes:
|
||||
v = onodes.pop()
|
||||
if v in pred:
|
||||
num_paths = len(pred[v]) # Discount betweenness if more than
|
||||
for x in pred[v]: # one shortest path.
|
||||
if x == source: # stop if hit source because all remaining v
|
||||
break # also have pred[v]==[source]
|
||||
between[x] += between[v] / num_paths
|
||||
# remove source
|
||||
for v in between:
|
||||
between[v] -= 1
|
||||
# rescale to be between 0 and 1
|
||||
if normalized:
|
||||
l = len(between)
|
||||
if l > 2:
|
||||
# scale by 1/the number of possible paths
|
||||
scale = 1 / ((l - 1) * (l - 2))
|
||||
for v in between:
|
||||
between[v] *= scale
|
||||
return between
|
||||
|
||||
|
||||
load_centrality = newman_betweenness_centrality
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def edge_load_centrality(G, cutoff=False):
|
||||
"""Compute edge load.
|
||||
|
||||
WARNING: This concept of edge load has not been analysed
|
||||
or discussed outside of NetworkX that we know of.
|
||||
It is based loosely on load_centrality in the sense that
|
||||
it counts the number of shortest paths which cross each edge.
|
||||
This function is for demonstration and testing purposes.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A networkx graph
|
||||
|
||||
cutoff : bool, optional (default=False)
|
||||
If specified, only consider paths of length <= cutoff.
|
||||
|
||||
Returns
|
||||
-------
|
||||
A dict keyed by edge 2-tuple to the number of shortest paths
|
||||
which use that edge. Where more than one path is shortest
|
||||
the count is divided equally among paths.
|
||||
"""
|
||||
betweenness = {}
|
||||
for u, v in G.edges():
|
||||
betweenness[(u, v)] = 0.0
|
||||
betweenness[(v, u)] = 0.0
|
||||
|
||||
for source in G:
|
||||
ubetween = _edge_betweenness(G, source, cutoff=cutoff)
|
||||
for e, ubetweenv in ubetween.items():
|
||||
betweenness[e] += ubetweenv # cumulative total
|
||||
return betweenness
|
||||
|
||||
|
||||
def _edge_betweenness(G, source, nodes=None, cutoff=False):
|
||||
"""Edge betweenness helper."""
|
||||
# get the predecessor data
|
||||
(pred, length) = nx.predecessor(G, source, cutoff=cutoff, return_seen=True)
|
||||
# order the nodes by path length
|
||||
onodes = [n for n, d in sorted(length.items(), key=itemgetter(1))]
|
||||
# initialize betweenness, doesn't account for any edge weights
|
||||
between = {}
|
||||
for u, v in G.edges(nodes):
|
||||
between[(u, v)] = 1.0
|
||||
between[(v, u)] = 1.0
|
||||
|
||||
while onodes: # work through all paths
|
||||
v = onodes.pop()
|
||||
if v in pred:
|
||||
# Discount betweenness if more than one shortest path.
|
||||
num_paths = len(pred[v])
|
||||
for w in pred[v]:
|
||||
if w in pred:
|
||||
# Discount betweenness, mult path
|
||||
num_paths = len(pred[w])
|
||||
for x in pred[w]:
|
||||
between[(w, x)] += between[(v, w)] / num_paths
|
||||
between[(x, w)] += between[(w, v)] / num_paths
|
||||
return between
|
||||
+128
@@ -0,0 +1,128 @@
|
||||
"""Percolation centrality measures."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.algorithms.centrality.betweenness import (
|
||||
_single_source_dijkstra_path_basic as dijkstra,
|
||||
)
|
||||
from networkx.algorithms.centrality.betweenness import (
|
||||
_single_source_shortest_path_basic as shortest_path,
|
||||
)
|
||||
|
||||
__all__ = ["percolation_centrality"]
|
||||
|
||||
|
||||
@nx._dispatchable(node_attrs="attribute", edge_attrs="weight")
|
||||
def percolation_centrality(G, attribute="percolation", states=None, weight=None):
|
||||
r"""Compute the percolation centrality for nodes.
|
||||
|
||||
Percolation centrality of a node $v$, at a given time, is defined
|
||||
as the proportion of ‘percolated paths’ that go through that node.
|
||||
|
||||
This measure quantifies relative impact of nodes based on their
|
||||
topological connectivity, as well as their percolation states.
|
||||
|
||||
Percolation states of nodes are used to depict network percolation
|
||||
scenarios (such as during infection transmission in a social network
|
||||
of individuals, spreading of computer viruses on computer networks, or
|
||||
transmission of disease over a network of towns) over time. In this
|
||||
measure usually the percolation state is expressed as a decimal
|
||||
between 0.0 and 1.0.
|
||||
|
||||
When all nodes are in the same percolated state this measure is
|
||||
equivalent to betweenness centrality.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX graph.
|
||||
|
||||
attribute : None or string, optional (default='percolation')
|
||||
Name of the node attribute to use for percolation state, used
|
||||
if `states` is None. If a node does not set the attribute the
|
||||
state of that node will be set to the default value of 1.
|
||||
If all nodes do not have the attribute all nodes will be set to
|
||||
1 and the centrality measure will be equivalent to betweenness centrality.
|
||||
|
||||
states : None or dict, optional (default=None)
|
||||
Specify percolation states for the nodes, nodes as keys states
|
||||
as values.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If None, all edge weights are considered equal.
|
||||
Otherwise holds the name of the edge attribute used as weight.
|
||||
The weight of an edge is treated as the length or distance between the two sides.
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with percolation centrality as the value.
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
The algorithm is from Mahendra Piraveenan, Mikhail Prokopenko, and
|
||||
Liaquat Hossain [1]_
|
||||
Pair dependencies are calculated and accumulated using [2]_
|
||||
|
||||
For weighted graphs the edge weights must be greater than zero.
|
||||
Zero edge weights can produce an infinite number of equal length
|
||||
paths between pairs of nodes.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Mahendra Piraveenan, Mikhail Prokopenko, Liaquat Hossain
|
||||
Percolation Centrality: Quantifying Graph-Theoretic Impact of Nodes
|
||||
during Percolation in Networks
|
||||
http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0053095
|
||||
.. [2] Ulrik Brandes:
|
||||
A Faster Algorithm for Betweenness Centrality.
|
||||
Journal of Mathematical Sociology 25(2):163-177, 2001.
|
||||
https://doi.org/10.1080/0022250X.2001.9990249
|
||||
"""
|
||||
percolation = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
|
||||
|
||||
nodes = G
|
||||
|
||||
if states is None:
|
||||
states = nx.get_node_attributes(nodes, attribute, default=1)
|
||||
|
||||
# sum of all percolation states
|
||||
p_sigma_x_t = 0.0
|
||||
for v in states.values():
|
||||
p_sigma_x_t += v
|
||||
|
||||
for s in nodes:
|
||||
# single source shortest paths
|
||||
if weight is None: # use BFS
|
||||
S, P, sigma, _ = shortest_path(G, s)
|
||||
else: # use Dijkstra's algorithm
|
||||
S, P, sigma, _ = dijkstra(G, s, weight)
|
||||
# accumulation
|
||||
percolation = _accumulate_percolation(
|
||||
percolation, S, P, sigma, s, states, p_sigma_x_t
|
||||
)
|
||||
|
||||
n = len(G)
|
||||
|
||||
for v in percolation:
|
||||
percolation[v] *= 1 / (n - 2)
|
||||
|
||||
return percolation
|
||||
|
||||
|
||||
def _accumulate_percolation(percolation, S, P, sigma, s, states, p_sigma_x_t):
|
||||
delta = dict.fromkeys(S, 0)
|
||||
while S:
|
||||
w = S.pop()
|
||||
coeff = (1 + delta[w]) / sigma[w]
|
||||
for v in P[w]:
|
||||
delta[v] += sigma[v] * coeff
|
||||
if w != s:
|
||||
# percolation weight
|
||||
pw_s_w = states[s] / (p_sigma_x_t - states[w])
|
||||
percolation[w] += delta[w] * pw_s_w
|
||||
return percolation
|
||||
+209
@@ -0,0 +1,209 @@
|
||||
"""Functions for computing reaching centrality of a node or a graph."""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import pairwise
|
||||
|
||||
__all__ = ["global_reaching_centrality", "local_reaching_centrality"]
|
||||
|
||||
|
||||
def _average_weight(G, path, weight=None):
|
||||
"""Returns the average weight of an edge in a weighted path.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A networkx graph.
|
||||
|
||||
path: list
|
||||
A list of vertices that define the path.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
If None, edge weights are ignored. Then the average weight of an edge
|
||||
is assumed to be the multiplicative inverse of the length of the path.
|
||||
Otherwise holds the name of the edge attribute used as weight.
|
||||
"""
|
||||
path_length = len(path) - 1
|
||||
if path_length <= 0:
|
||||
return 0
|
||||
if weight is None:
|
||||
return 1 / path_length
|
||||
total_weight = sum(G.edges[i, j][weight] for i, j in pairwise(path))
|
||||
return total_weight / path_length
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def global_reaching_centrality(G, weight=None, normalized=True):
|
||||
"""Returns the global reaching centrality of a directed graph.
|
||||
|
||||
The *global reaching centrality* of a weighted directed graph is the
|
||||
average over all nodes of the difference between the local reaching
|
||||
centrality of the node and the greatest local reaching centrality of
|
||||
any node in the graph [1]_. For more information on the local
|
||||
reaching centrality, see :func:`local_reaching_centrality`.
|
||||
Informally, the local reaching centrality is the proportion of the
|
||||
graph that is reachable from the neighbors of the node.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : DiGraph
|
||||
A networkx DiGraph.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
Attribute to use for edge weights. If ``None``, each edge weight
|
||||
is assumed to be one. A higher weight implies a stronger
|
||||
connection between nodes and a *shorter* path length.
|
||||
|
||||
normalized : bool, optional (default=True)
|
||||
Whether to normalize the edge weights by the total sum of edge
|
||||
weights.
|
||||
|
||||
Returns
|
||||
-------
|
||||
h : float
|
||||
The global reaching centrality of the graph.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.DiGraph()
|
||||
>>> G.add_edge(1, 2)
|
||||
>>> G.add_edge(1, 3)
|
||||
>>> nx.global_reaching_centrality(G)
|
||||
1.0
|
||||
>>> G.add_edge(3, 2)
|
||||
>>> nx.global_reaching_centrality(G)
|
||||
0.75
|
||||
|
||||
See also
|
||||
--------
|
||||
local_reaching_centrality
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Mones, Enys, Lilla Vicsek, and Tamás Vicsek.
|
||||
"Hierarchy Measure for Complex Networks."
|
||||
*PLoS ONE* 7.3 (2012): e33799.
|
||||
https://doi.org/10.1371/journal.pone.0033799
|
||||
"""
|
||||
if nx.is_negatively_weighted(G, weight=weight):
|
||||
raise nx.NetworkXError("edge weights must be positive")
|
||||
total_weight = G.size(weight=weight)
|
||||
if total_weight <= 0:
|
||||
raise nx.NetworkXError("Size of G must be positive")
|
||||
# If provided, weights must be interpreted as connection strength
|
||||
# (so higher weights are more likely to be chosen). However, the
|
||||
# shortest path algorithms in NetworkX assume the provided "weight"
|
||||
# is actually a distance (so edges with higher weight are less
|
||||
# likely to be chosen). Therefore we need to invert the weights when
|
||||
# computing shortest paths.
|
||||
#
|
||||
# If weight is None, we leave it as-is so that the shortest path
|
||||
# algorithm can use a faster, unweighted algorithm.
|
||||
if weight is not None:
|
||||
|
||||
def as_distance(u, v, d):
|
||||
return total_weight / d.get(weight, 1)
|
||||
|
||||
shortest_paths = nx.shortest_path(G, weight=as_distance)
|
||||
else:
|
||||
shortest_paths = nx.shortest_path(G)
|
||||
|
||||
centrality = local_reaching_centrality
|
||||
# TODO This can be trivially parallelized.
|
||||
lrc = [
|
||||
centrality(G, node, paths=paths, weight=weight, normalized=normalized)
|
||||
for node, paths in shortest_paths.items()
|
||||
]
|
||||
|
||||
max_lrc = max(lrc)
|
||||
return sum(max_lrc - c for c in lrc) / (len(G) - 1)
|
||||
|
||||
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def local_reaching_centrality(G, v, paths=None, weight=None, normalized=True):
|
||||
"""Returns the local reaching centrality of a node in a directed
|
||||
graph.
|
||||
|
||||
The *local reaching centrality* of a node in a directed graph is the
|
||||
proportion of other nodes reachable from that node [1]_.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : DiGraph
|
||||
A NetworkX DiGraph.
|
||||
|
||||
v : node
|
||||
A node in the directed graph `G`.
|
||||
|
||||
paths : dictionary (default=None)
|
||||
If this is not `None` it must be a dictionary representation
|
||||
of single-source shortest paths, as computed by, for example,
|
||||
:func:`networkx.shortest_path` with source node `v`. Use this
|
||||
keyword argument if you intend to invoke this function many
|
||||
times but don't want the paths to be recomputed each time.
|
||||
|
||||
weight : None or string, optional (default=None)
|
||||
Attribute to use for edge weights. If `None`, each edge weight
|
||||
is assumed to be one. A higher weight implies a stronger
|
||||
connection between nodes and a *shorter* path length.
|
||||
|
||||
normalized : bool, optional (default=True)
|
||||
Whether to normalize the edge weights by the total sum of edge
|
||||
weights.
|
||||
|
||||
Returns
|
||||
-------
|
||||
h : float
|
||||
The local reaching centrality of the node ``v`` in the graph
|
||||
``G``.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.DiGraph()
|
||||
>>> G.add_edges_from([(1, 2), (1, 3)])
|
||||
>>> nx.local_reaching_centrality(G, 3)
|
||||
0.0
|
||||
>>> G.add_edge(3, 2)
|
||||
>>> nx.local_reaching_centrality(G, 3)
|
||||
0.5
|
||||
|
||||
See also
|
||||
--------
|
||||
global_reaching_centrality
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Mones, Enys, Lilla Vicsek, and Tamás Vicsek.
|
||||
"Hierarchy Measure for Complex Networks."
|
||||
*PLoS ONE* 7.3 (2012): e33799.
|
||||
https://doi.org/10.1371/journal.pone.0033799
|
||||
"""
|
||||
# Corner case: graph with single node containing a self-loop
|
||||
if (total_weight := G.size(weight=weight)) > 0 and len(G) == 1:
|
||||
raise nx.NetworkXError(
|
||||
"local_reaching_centrality of a single node with self-loop not well-defined"
|
||||
)
|
||||
if paths is None:
|
||||
if nx.is_negatively_weighted(G, weight=weight):
|
||||
raise nx.NetworkXError("edge weights must be positive")
|
||||
if total_weight <= 0:
|
||||
raise nx.NetworkXError("Size of G must be positive")
|
||||
if weight is not None:
|
||||
# Interpret weights as lengths.
|
||||
def as_distance(u, v, d):
|
||||
return total_weight / d.get(weight, 1)
|
||||
|
||||
paths = nx.shortest_path(G, source=v, weight=as_distance)
|
||||
else:
|
||||
paths = nx.shortest_path(G, source=v)
|
||||
# If the graph is unweighted, simply return the proportion of nodes
|
||||
# reachable from the source node ``v``.
|
||||
if weight is None and G.is_directed():
|
||||
return (len(paths) - 1) / (len(G) - 1)
|
||||
if normalized and weight is not None:
|
||||
norm = G.size(weight=weight) / G.size()
|
||||
else:
|
||||
norm = 1
|
||||
# TODO This can be trivially parallelized.
|
||||
avgw = (_average_weight(G, path, weight=weight) for path in paths.values())
|
||||
sum_avg_weight = sum(avgw) / norm
|
||||
return sum_avg_weight / (len(G) - 1)
|
||||
+141
@@ -0,0 +1,141 @@
|
||||
"""Copyright (c) 2015 – Thomson Licensing, SAS
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted (subject to the limitations in the
|
||||
disclaimer below) provided that the following conditions are met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
* Neither the name of Thomson Licensing, or Technicolor, nor the names
|
||||
of its contributors may be used to endorse or promote products derived
|
||||
from this software without specific prior written permission.
|
||||
|
||||
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE
|
||||
GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT
|
||||
HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
|
||||
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
||||
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
||||
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
||||
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
|
||||
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
||||
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
|
||||
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
# Authors: Erwan Le Merrer (erwan.lemerrer@technicolor.com)
|
||||
|
||||
__all__ = ["second_order_centrality"]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@nx._dispatchable(edge_attrs="weight")
|
||||
def second_order_centrality(G, weight="weight"):
|
||||
"""Compute the second order centrality for nodes of G.
|
||||
|
||||
The second order centrality of a given node is the standard deviation of
|
||||
the return times to that node of a perpetual random walk on G:
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G : graph
|
||||
A NetworkX connected and undirected graph.
|
||||
|
||||
weight : string or None, optional (default="weight")
|
||||
The name of an edge attribute that holds the numerical value
|
||||
used as a weight. If None then each edge has weight 1.
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary keyed by node with second order centrality as the value.
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.star_graph(10)
|
||||
>>> soc = nx.second_order_centrality(G)
|
||||
>>> print(sorted(soc.items(), key=lambda x: x[1])[0][0]) # pick first id
|
||||
0
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXException
|
||||
If the graph G is empty, non connected or has negative weights.
|
||||
|
||||
See Also
|
||||
--------
|
||||
betweenness_centrality
|
||||
|
||||
Notes
|
||||
-----
|
||||
Lower values of second order centrality indicate higher centrality.
|
||||
|
||||
The algorithm is from Kermarrec, Le Merrer, Sericola and Trédan [1]_.
|
||||
|
||||
This code implements the analytical version of the algorithm, i.e.,
|
||||
there is no simulation of a random walk process involved. The random walk
|
||||
is here unbiased (corresponding to eq 6 of the paper [1]_), thus the
|
||||
centrality values are the standard deviations for random walk return times
|
||||
on the transformed input graph G (equal in-degree at each nodes by adding
|
||||
self-loops).
|
||||
|
||||
Complexity of this implementation, made to run locally on a single machine,
|
||||
is O(n^3), with n the size of G, which makes it viable only for small
|
||||
graphs.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Anne-Marie Kermarrec, Erwan Le Merrer, Bruno Sericola, Gilles Trédan
|
||||
"Second order centrality: Distributed assessment of nodes criticity in
|
||||
complex networks", Elsevier Computer Communications 34(5):619-628, 2011.
|
||||
"""
|
||||
import numpy as np
|
||||
|
||||
n = len(G)
|
||||
|
||||
if n == 0:
|
||||
raise nx.NetworkXException("Empty graph.")
|
||||
if not nx.is_connected(G):
|
||||
raise nx.NetworkXException("Non connected graph.")
|
||||
if any(d.get(weight, 0) < 0 for u, v, d in G.edges(data=True)):
|
||||
raise nx.NetworkXException("Graph has negative edge weights.")
|
||||
|
||||
# balancing G for Metropolis-Hastings random walks
|
||||
G = nx.DiGraph(G)
|
||||
in_deg = dict(G.in_degree(weight=weight))
|
||||
d_max = max(in_deg.values())
|
||||
for i, deg in in_deg.items():
|
||||
if deg < d_max:
|
||||
G.add_edge(i, i, weight=d_max - deg)
|
||||
|
||||
P = nx.to_numpy_array(G)
|
||||
P /= P.sum(axis=1)[:, np.newaxis] # to transition probability matrix
|
||||
|
||||
def _Qj(P, j):
|
||||
P = P.copy()
|
||||
P[:, j] = 0
|
||||
return P
|
||||
|
||||
M = np.empty([n, n])
|
||||
|
||||
for i in range(n):
|
||||
M[:, i] = np.linalg.solve(
|
||||
np.identity(n) - _Qj(P, i), np.ones([n, 1])[:, 0]
|
||||
) # eq 3
|
||||
|
||||
return dict(
|
||||
zip(
|
||||
G.nodes,
|
||||
(float(np.sqrt(2 * np.sum(M[:, i]) - n * (n + 1))) for i in range(n)),
|
||||
)
|
||||
) # eq 6
|
||||
+340
@@ -0,0 +1,340 @@
|
||||
"""
|
||||
Subraph centrality and communicability betweenness.
|
||||
"""
|
||||
|
||||
import networkx as nx
|
||||
from networkx.utils import not_implemented_for
|
||||
|
||||
__all__ = [
|
||||
"subgraph_centrality_exp",
|
||||
"subgraph_centrality",
|
||||
"communicability_betweenness_centrality",
|
||||
"estrada_index",
|
||||
]
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def subgraph_centrality_exp(G):
|
||||
r"""Returns the subgraph centrality for each node of G.
|
||||
|
||||
Subgraph centrality of a node `n` is the sum of weighted closed
|
||||
walks of all lengths starting and ending at node `n`. The weights
|
||||
decrease with path length. Each closed walk is associated with a
|
||||
connected subgraph ([1]_).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G: graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes:dictionary
|
||||
Dictionary of nodes with subgraph centrality as the value.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If the graph is not undirected and simple.
|
||||
|
||||
See Also
|
||||
--------
|
||||
subgraph_centrality:
|
||||
Alternative algorithm of the subgraph centrality for each node of G.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This version of the algorithm exponentiates the adjacency matrix.
|
||||
|
||||
The subgraph centrality of a node `u` in G can be found using
|
||||
the matrix exponential of the adjacency matrix of G [1]_,
|
||||
|
||||
.. math::
|
||||
|
||||
SC(u)=(e^A)_{uu} .
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez,
|
||||
"Subgraph centrality in complex networks",
|
||||
Physical Review E 71, 056103 (2005).
|
||||
https://arxiv.org/abs/cond-mat/0504730
|
||||
|
||||
Examples
|
||||
--------
|
||||
(Example from [1]_)
|
||||
>>> G = nx.Graph(
|
||||
... [
|
||||
... (1, 2),
|
||||
... (1, 5),
|
||||
... (1, 8),
|
||||
... (2, 3),
|
||||
... (2, 8),
|
||||
... (3, 4),
|
||||
... (3, 6),
|
||||
... (4, 5),
|
||||
... (4, 7),
|
||||
... (5, 6),
|
||||
... (6, 7),
|
||||
... (7, 8),
|
||||
... ]
|
||||
... )
|
||||
>>> sc = nx.subgraph_centrality_exp(G)
|
||||
>>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)])
|
||||
['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90']
|
||||
"""
|
||||
# alternative implementation that calculates the matrix exponential
|
||||
import scipy as sp
|
||||
|
||||
nodelist = list(G) # ordering of nodes in matrix
|
||||
A = nx.to_numpy_array(G, nodelist)
|
||||
# convert to 0-1 matrix
|
||||
A[A != 0.0] = 1
|
||||
expA = sp.linalg.expm(A)
|
||||
# convert diagonal to dictionary keyed by node
|
||||
sc = dict(zip(nodelist, map(float, expA.diagonal())))
|
||||
return sc
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def subgraph_centrality(G):
|
||||
r"""Returns subgraph centrality for each node in G.
|
||||
|
||||
Subgraph centrality of a node `n` is the sum of weighted closed
|
||||
walks of all lengths starting and ending at node `n`. The weights
|
||||
decrease with path length. Each closed walk is associated with a
|
||||
connected subgraph ([1]_).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G: graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with subgraph centrality as the value.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If the graph is not undirected and simple.
|
||||
|
||||
See Also
|
||||
--------
|
||||
subgraph_centrality_exp:
|
||||
Alternative algorithm of the subgraph centrality for each node of G.
|
||||
|
||||
Notes
|
||||
-----
|
||||
This version of the algorithm computes eigenvalues and eigenvectors
|
||||
of the adjacency matrix.
|
||||
|
||||
Subgraph centrality of a node `u` in G can be found using
|
||||
a spectral decomposition of the adjacency matrix [1]_,
|
||||
|
||||
.. math::
|
||||
|
||||
SC(u)=\sum_{j=1}^{N}(v_{j}^{u})^2 e^{\lambda_{j}},
|
||||
|
||||
where `v_j` is an eigenvector of the adjacency matrix `A` of G
|
||||
corresponding to the eigenvalue `\lambda_j`.
|
||||
|
||||
Examples
|
||||
--------
|
||||
(Example from [1]_)
|
||||
>>> G = nx.Graph(
|
||||
... [
|
||||
... (1, 2),
|
||||
... (1, 5),
|
||||
... (1, 8),
|
||||
... (2, 3),
|
||||
... (2, 8),
|
||||
... (3, 4),
|
||||
... (3, 6),
|
||||
... (4, 5),
|
||||
... (4, 7),
|
||||
... (5, 6),
|
||||
... (6, 7),
|
||||
... (7, 8),
|
||||
... ]
|
||||
... )
|
||||
>>> sc = nx.subgraph_centrality(G)
|
||||
>>> print([f"{node} {sc[node]:0.2f}" for node in sorted(sc)])
|
||||
['1 3.90', '2 3.90', '3 3.64', '4 3.71', '5 3.64', '6 3.71', '7 3.64', '8 3.90']
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Ernesto Estrada, Juan A. Rodriguez-Velazquez,
|
||||
"Subgraph centrality in complex networks",
|
||||
Physical Review E 71, 056103 (2005).
|
||||
https://arxiv.org/abs/cond-mat/0504730
|
||||
|
||||
"""
|
||||
import numpy as np
|
||||
|
||||
nodelist = list(G) # ordering of nodes in matrix
|
||||
A = nx.to_numpy_array(G, nodelist)
|
||||
# convert to 0-1 matrix
|
||||
A[np.nonzero(A)] = 1
|
||||
w, v = np.linalg.eigh(A)
|
||||
vsquare = np.array(v) ** 2
|
||||
expw = np.exp(w)
|
||||
xg = vsquare @ expw
|
||||
# convert vector dictionary keyed by node
|
||||
sc = dict(zip(nodelist, map(float, xg)))
|
||||
return sc
|
||||
|
||||
|
||||
@not_implemented_for("directed")
|
||||
@not_implemented_for("multigraph")
|
||||
@nx._dispatchable
|
||||
def communicability_betweenness_centrality(G):
|
||||
r"""Returns subgraph communicability for all pairs of nodes in G.
|
||||
|
||||
Communicability betweenness measure makes use of the number of walks
|
||||
connecting every pair of nodes as the basis of a betweenness centrality
|
||||
measure.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G: graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
nodes : dictionary
|
||||
Dictionary of nodes with communicability betweenness as the value.
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If the graph is not undirected and simple.
|
||||
|
||||
Notes
|
||||
-----
|
||||
Let `G=(V,E)` be a simple undirected graph with `n` nodes and `m` edges,
|
||||
and `A` denote the adjacency matrix of `G`.
|
||||
|
||||
Let `G(r)=(V,E(r))` be the graph resulting from
|
||||
removing all edges connected to node `r` but not the node itself.
|
||||
|
||||
The adjacency matrix for `G(r)` is `A+E(r)`, where `E(r)` has nonzeros
|
||||
only in row and column `r`.
|
||||
|
||||
The subraph betweenness of a node `r` is [1]_
|
||||
|
||||
.. math::
|
||||
|
||||
\omega_{r} = \frac{1}{C}\sum_{p}\sum_{q}\frac{G_{prq}}{G_{pq}},
|
||||
p\neq q, q\neq r,
|
||||
|
||||
where
|
||||
`G_{prq}=(e^{A}_{pq} - (e^{A+E(r)})_{pq}` is the number of walks
|
||||
involving node r,
|
||||
`G_{pq}=(e^{A})_{pq}` is the number of closed walks starting
|
||||
at node `p` and ending at node `q`,
|
||||
and `C=(n-1)^{2}-(n-1)` is a normalization factor equal to the
|
||||
number of terms in the sum.
|
||||
|
||||
The resulting `\omega_{r}` takes values between zero and one.
|
||||
The lower bound cannot be attained for a connected
|
||||
graph, and the upper bound is attained in the star graph.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] Ernesto Estrada, Desmond J. Higham, Naomichi Hatano,
|
||||
"Communicability Betweenness in Complex Networks"
|
||||
Physica A 388 (2009) 764-774.
|
||||
https://arxiv.org/abs/0905.4102
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
|
||||
>>> cbc = nx.communicability_betweenness_centrality(G)
|
||||
>>> print([f"{node} {cbc[node]:0.2f}" for node in sorted(cbc)])
|
||||
['0 0.03', '1 0.45', '2 0.51', '3 0.45', '4 0.40', '5 0.19', '6 0.03']
|
||||
"""
|
||||
import numpy as np
|
||||
import scipy as sp
|
||||
|
||||
nodelist = list(G) # ordering of nodes in matrix
|
||||
n = len(nodelist)
|
||||
A = nx.to_numpy_array(G, nodelist)
|
||||
# convert to 0-1 matrix
|
||||
A[np.nonzero(A)] = 1
|
||||
expA = sp.linalg.expm(A)
|
||||
mapping = dict(zip(nodelist, range(n)))
|
||||
cbc = {}
|
||||
for v in G:
|
||||
# remove row and col of node v
|
||||
i = mapping[v]
|
||||
row = A[i, :].copy()
|
||||
col = A[:, i].copy()
|
||||
A[i, :] = 0
|
||||
A[:, i] = 0
|
||||
B = (expA - sp.linalg.expm(A)) / expA
|
||||
# sum with row/col of node v and diag set to zero
|
||||
B[i, :] = 0
|
||||
B[:, i] = 0
|
||||
B -= np.diag(np.diag(B))
|
||||
cbc[v] = float(B.sum())
|
||||
# put row and col back
|
||||
A[i, :] = row
|
||||
A[:, i] = col
|
||||
# rescale when more than two nodes
|
||||
order = len(cbc)
|
||||
if order > 2:
|
||||
scale = 1.0 / ((order - 1.0) ** 2 - (order - 1.0))
|
||||
cbc = {node: value * scale for node, value in cbc.items()}
|
||||
return cbc
|
||||
|
||||
|
||||
@nx._dispatchable
|
||||
def estrada_index(G):
|
||||
r"""Returns the Estrada index of a the graph G.
|
||||
|
||||
The Estrada Index is a topological index of folding or 3D "compactness" ([1]_).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
G: graph
|
||||
|
||||
Returns
|
||||
-------
|
||||
estrada index: float
|
||||
|
||||
Raises
|
||||
------
|
||||
NetworkXError
|
||||
If the graph is not undirected and simple.
|
||||
|
||||
Notes
|
||||
-----
|
||||
Let `G=(V,E)` be a simple undirected graph with `n` nodes and let
|
||||
`\lambda_{1}\leq\lambda_{2}\leq\cdots\lambda_{n}`
|
||||
be a non-increasing ordering of the eigenvalues of its adjacency
|
||||
matrix `A`. The Estrada index is ([1]_, [2]_)
|
||||
|
||||
.. math::
|
||||
EE(G)=\sum_{j=1}^n e^{\lambda _j}.
|
||||
|
||||
References
|
||||
----------
|
||||
.. [1] E. Estrada, "Characterization of 3D molecular structure",
|
||||
Chem. Phys. Lett. 319, 713 (2000).
|
||||
https://doi.org/10.1016/S0009-2614(00)00158-5
|
||||
.. [2] José Antonio de la Peñaa, Ivan Gutman, Juan Rada,
|
||||
"Estimating the Estrada index",
|
||||
Linear Algebra and its Applications. 427, 1 (2007).
|
||||
https://doi.org/10.1016/j.laa.2007.06.020
|
||||
|
||||
Examples
|
||||
--------
|
||||
>>> G = nx.Graph([(0, 1), (1, 2), (1, 5), (5, 4), (2, 4), (2, 3), (4, 3), (3, 6)])
|
||||
>>> ei = nx.estrada_index(G)
|
||||
>>> print(f"{ei:0.5}")
|
||||
20.55
|
||||
"""
|
||||
return sum(subgraph_centrality(G).values())
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user