From c3457cc0b8bf8bd8ba650f47a5f71da4ea4815db Mon Sep 17 00:00:00 2001 From: rusty1s Date: Sat, 17 Aug 2024 03:36:35 +0200 Subject: [PATCH 1/7] update --- docs/source/conf.py | 1 + pyg_lib/ops/__init__.py | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 3c235ced6..3ddc258a5 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -20,6 +20,7 @@ 'sphinx.ext.mathjax', 'sphinx.ext.napoleon', 'sphinx.ext.viewcode', + 'sphinx_copybutton', 'pyg', ] diff --git a/pyg_lib/ops/__init__.py b/pyg_lib/ops/__init__.py index bdf8efd53..40ad20460 100644 --- a/pyg_lib/ops/__init__.py +++ b/pyg_lib/ops/__init__.py @@ -104,7 +104,8 @@ def grouped_matmul( r"""Performs dense-dense matrix multiplication according to groups, utilizing dedicated kernels that effectively parallelize over groups. - Example: + .. code-block:: python + inputs = [torch.randn(5, 16), torch.randn(3, 32)] others = [torch.randn(16, 32), torch.randn(32, 64)] @@ -147,7 +148,8 @@ def segment_matmul( the first dimension of :obj:`inputs` as given by :obj:`ptr`, utilizing dedicated kernels that effectively parallelize over groups. - Example: + .. code-block:: python + inputs = torch.randn(8, 16) ptr = torch.tensor([0, 5, 8]) other = torch.randn(2, 16, 32) From 6089ab215397f313a209b549d4ba8bf782dee762 Mon Sep 17 00:00:00 2001 From: rusty1s Date: Sat, 17 Aug 2024 03:41:41 +0200 Subject: [PATCH 2/7] Trigger Build From 503b71c1de5de23be5b114ef901ce59b8f011d77 Mon Sep 17 00:00:00 2001 From: rusty1s Date: Sat, 17 Aug 2024 03:55:02 +0200 Subject: [PATCH 3/7] update --- docs/source/conf.py | 1 + pyg_lib/sampler/__init__.py | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 3ddc258a5..5be4ae2ed 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -21,6 +21,7 @@ 'sphinx.ext.napoleon', 'sphinx.ext.viewcode', 'sphinx_copybutton', + 'sphinx_autodoc_typehints', 'pyg', ] diff --git a/pyg_lib/sampler/__init__.py b/pyg_lib/sampler/__init__.py index 301701d9a..1d211526c 100644 --- a/pyg_lib/sampler/__init__.py +++ b/pyg_lib/sampler/__init__.py @@ -1,4 +1,4 @@ -from typing import List, Tuple, Optional, Dict +from typing import Dict, List, Optional, Tuple import torch from torch import Tensor @@ -34,12 +34,12 @@ def neighbor_sample( binary search to find neighbors that fulfill temporal constraints. Args: - rowptr (torch.Tensor): Compressed source node indices. + rowptr: Compressed source node indices. col (torch.Tensor): Target node indices. seed (torch.Tensor): The seed node indices. - num_neighbors (List[int]): The number of neighbors to sample for each - node in each iteration. If an entry is set to :obj:`-1`, all - neighbors will be included. + num_neighbors: The number of neighbors to sample for each node in each + iteration. + If an entry is set to :obj:`-1`, all neighbors will be included. node_time (torch.Tensor, optional): Timestamps for the nodes in the graph. If set, temporal sampling will be used such that neighbors are guaranteed to fulfill temporal constraints, *i.e.* sampled From a7f9b269c0bb5cc121bbe6347b0f720847c220dc Mon Sep 17 00:00:00 2001 From: rusty1s Date: Sat, 17 Aug 2024 05:01:42 +0200 Subject: [PATCH 4/7] update --- docs/source/conf.py | 9 ++++ pyg_lib/__init__.py | 4 +- pyg_lib/home.py | 4 +- pyg_lib/ops/__init__.py | 98 ++++++++++++++--------------------- pyg_lib/partition/__init__.py | 19 +++---- pyg_lib/sampler/__init__.py | 97 ++++++++++++++++------------------ 6 files changed, 105 insertions(+), 126 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 5be4ae2ed..f70ee47d2 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,3 +1,4 @@ +import copy import datetime import os.path as osp import sys @@ -38,3 +39,11 @@ 'python': ('http://docs.python.org', None), 'torch': ('https://pytorch.org/docs/master', None), } + +typehints_use_rtype = False +typehints_defaults = 'comma' + + +def setup(app): + # Do not drop type hints in signatures: + del app.events.listeners['autodoc-process-signature'] diff --git a/pyg_lib/__init__.py b/pyg_lib/__init__.py index dd83a0882..f0e27b221 100644 --- a/pyg_lib/__init__.py +++ b/pyg_lib/__init__.py @@ -34,15 +34,15 @@ def load_library(lib_name: str) -> None: load_library('libpyg') import pyg_lib.ops # noqa -import pyg_lib.sampler # noqa import pyg_lib.partition # noqa +import pyg_lib.sampler # noqa def cuda_version() -> int: r"""Returns the CUDA version for which :obj:`pyg_lib` was compiled with. Returns: - (int): The CUDA version. + The CUDA version. """ return torch.ops.pyg.cuda_version() diff --git a/pyg_lib/home.py b/pyg_lib/home.py index 0f41c8c79..1674a5856 100644 --- a/pyg_lib/home.py +++ b/pyg_lib/home.py @@ -15,7 +15,7 @@ def get_home_dir() -> str: variable :obj:`$PYG_LIB_HOME` which defaults to :obj:`"~/.cache/pyg_lib"`. Returns: - (str): The cache directory. + The cache directory. """ if _home_dir is not None: return _home_dir @@ -29,7 +29,7 @@ def set_home_dir(path: str): r"""Sets the cache directory used for storing all :obj:`pyg-lib` data. Args: - path (str): The path to a local folder. + path: The path to a local folder. """ global _home_dir _home_dir = path diff --git a/pyg_lib/ops/__init__.py b/pyg_lib/ops/__init__.py index 40ad20460..ecfe43db4 100644 --- a/pyg_lib/ops/__init__.py +++ b/pyg_lib/ops/__init__.py @@ -117,16 +117,12 @@ def grouped_matmul( assert outs[1] == inputs[1] @ others[1] Args: - inputs (List[torch.Tensor]): List of left operand 2D matrices of shapes - :obj:`[N_i, K_i]`. - others (List[torch.Tensor]): List of right operand 2D matrices of - shapes :obj:`[K_i, M_i]`. - biases (List[torch.Tensor], optional): Optional bias terms to apply for - each element. (default: :obj:`None`) + inputs: List of left operand 2D matrices of shapes :obj:`[N_i, K_i]`. + others: List of right operand 2D matrices of shapes :obj:`[K_i, M_i]`. + biases: Optional bias terms to apply for each element. Returns: - List[torch.Tensor]: List of 2D output matrices of shapes - :obj:`[N_i, M_i]`. + List of 2D output matrices of shapes :obj:`[N_i, M_i]`. """ # Combine inputs into a single tuple for autograd: outs = list(GroupedMatmul.apply(tuple(inputs + others))) @@ -160,18 +156,14 @@ def segment_matmul( assert out[5:8] == inputs[5:8] @ other[1] Args: - inputs (torch.Tensor): The left operand 2D matrix of shape - :obj:`[N, K]`. - ptr (torch.Tensor): Compressed vector of shape :obj:`[B + 1]`, holding - the boundaries of segments. For best performance, given as a CPU - tensor. - other (torch.Tensor): The right operand 3D tensor of shape - :obj:`[B, K, M]`. - bias (torch.Tensor, optional): Optional bias term of shape - :obj:`[B, M]` (default: :obj:`None`) + inputs: The left operand 2D matrix of shape :obj:`[N, K]`. + ptr: Compressed vector of shape :obj:`[B + 1]`, holding the boundaries + of segments. For best performance, given as a CPU tensor. + other: The right operand 3D tensor of shape :obj:`[B, K, M]`. + bias: The bias term of shape :obj:`[B, M]`. Returns: - torch.Tensor: The 2D output matrix of shape :obj:`[N, M]`. + The 2D output matrix of shape :obj:`[N, M]`. """ out = torch.ops.pyg.segment_matmul(inputs, ptr, other) if bias is not None: @@ -198,15 +190,13 @@ def sampled_add( being more runtime and memory-efficient. Args: - left (torch.Tensor): The left tensor. - right (torch.Tensor): The right tensor. - left_index (torch.LongTensor, optional): The values to sample from the - :obj:`left` tensor. (default: :obj:`None`) - right_index (torch.LongTensor, optional): The values to sample from the - :obj:`right` tensor. (default: :obj:`None`) + left: The left tensor. + right: The right tensor. + left_index: The values to sample from the :obj:`left` tensor. + right_index: The values to sample from the :obj:`right` tensor. Returns: - torch.Tensor: The output tensor. + The output tensor. """ out = torch.ops.pyg.sampled_op(left, right, left_index, right_index, "add") return out @@ -230,15 +220,13 @@ def sampled_sub( being more runtime and memory-efficient. Args: - left (torch.Tensor): The left tensor. - right (torch.Tensor): The right tensor. - left_index (torch.LongTensor, optional): The values to sample from the - :obj:`left` tensor. (default: :obj:`None`) - right_index (torch.LongTensor, optional): The values to sample from the - :obj:`right` tensor. (default: :obj:`None`) + left: The left tensor. + right: The right tensor. + left_index: The values to sample from the :obj:`left` tensor. + right_index: The values to sample from the :obj:`right` tensor. Returns: - torch.Tensor: The output tensor. + The output tensor. """ out = torch.ops.pyg.sampled_op(left, right, left_index, right_index, "sub") return out @@ -262,15 +250,13 @@ def sampled_mul( thus being more runtime and memory-efficient. Args: - left (torch.Tensor): The left tensor. - right (torch.Tensor): The right tensor. - left_index (torch.LongTensor, optional): The values to sample from the - :obj:`left` tensor. (default: :obj:`None`) - right_index (torch.LongTensor, optional): The values to sample from the - :obj:`right` tensor. (default: :obj:`None`) + left: The left tensor. + right: The right tensor. + left_index: The values to sample from the :obj:`left` tensor. + right_index: The values to sample from the :obj:`right` tensor. Returns: - torch.Tensor: The output tensor. + The output tensor. """ out = torch.ops.pyg.sampled_op(left, right, left_index, right_index, "mul") return out @@ -294,15 +280,13 @@ def sampled_div( being more runtime and memory-efficient. Args: - left (torch.Tensor): The left tensor. - right (torch.Tensor): The right tensor. - left_index (torch.LongTensor, optional): The values to sample from the - :obj:`left` tensor. (default: :obj:`None`) - right_index (torch.LongTensor, optional): The values to sample from the - :obj:`right` tensor. (default: :obj:`None`) + left: The left tensor. + right: The right tensor. + left_index: The values to sample from the :obj:`left` tensor. + right_index: The values to sample from the :obj:`right` tensor. Returns: - torch.Tensor: The output tensor. + The output tensor. """ out = torch.ops.pyg.sampled_op(left, right, left_index, right_index, "div") return out @@ -323,13 +307,12 @@ def index_sort( device. Args: - inputs (torch.Tensor): A vector with positive integer values. - max_value (int, optional): The maximum value stored inside - :obj:`inputs`. This value can be an estimation, but needs to be - greater than or equal to the real maximum. (default: :obj:`None`) + inputs: A vector with positive integer values. + max_value: The maximum value stored inside :obj:`inputs`. This value + can be an estimation, but needs to be greater than or equal to the + real maximum. Returns: - Tuple[torch.LongTensor, torch.LongTensor]: A tuple containing sorted values and indices of the elements in the original :obj:`input` tensor. """ @@ -349,14 +332,6 @@ def softmax_csr( :attr:`ptr`, and then proceeds to compute the softmax individually for each group. - Args: - src (Tensor): The source tensor. - ptr (LongTensor): Groups defined by CSR representation. - dim (int, optional): The dimension in which to normalize. - (default: :obj:`0`) - - :rtype: :class:`Tensor` - Examples: >>> src = torch.randn(4, 4) >>> ptr = torch.tensor([0, 4]) @@ -365,6 +340,11 @@ def softmax_csr( [0.1453, 0.2591, 0.5907, 0.2410], [0.0598, 0.2923, 0.1206, 0.0921], [0.7792, 0.3502, 0.1638, 0.2145]]) + + Args: + src: The source tensor. + ptr: Groups defined by CSR representation. + dim: The dimension in which to normalize. """ dim = dim + src.dim() if dim < 0 else dim return torch.ops.pyg.softmax_csr(src, ptr, dim) diff --git a/pyg_lib/partition/__init__.py b/pyg_lib/partition/__init__.py index 8c499ddd0..08bb31c72 100644 --- a/pyg_lib/partition/__init__.py +++ b/pyg_lib/partition/__init__.py @@ -18,19 +18,16 @@ def metis( `_ paper. Args: - rowptr (torch.Tensor): Compressed source node indices. - col (torch.Tensor): Target node indices. - num_partitions (int): The number of partitions. - node_weight (torch.Tensor, optional): Optional node weights. - (default: :obj:`None`) - edge_weight (torch.Tensor, optional): Optional edge weights. - (default: :obj:`None`) - recursive (bool, optional): If set to :obj:`True`, will use multilevel - recursive bisection instead of multilevel k-way partitioning. - (default: :obj:`False`) + rowptr: Compressed source node indices. + col: Target node indices. + num_partitions: The number of partitions. + node_weight: The node weights. + edge_weight: The edge weights. + recursive: If set to :obj:`True`, will use multilevel recursive + bisection instead of multilevel k-way partitioning. Returns: - torch.Tensor: A vector that assings each node to a partition. + A vector that assings each node to a partition. """ return torch.ops.pyg.metis(rowptr, col, num_partitions, node_weight, edge_weight, recursive) diff --git a/pyg_lib/sampler/__init__.py b/pyg_lib/sampler/__init__.py index 1d211526c..2e1eb70ee 100644 --- a/pyg_lib/sampler/__init__.py +++ b/pyg_lib/sampler/__init__.py @@ -35,52 +35,47 @@ def neighbor_sample( Args: rowptr: Compressed source node indices. - col (torch.Tensor): Target node indices. - seed (torch.Tensor): The seed node indices. + col: Target node indices. + seed: The seed node indices. num_neighbors: The number of neighbors to sample for each node in each iteration. If an entry is set to :obj:`-1`, all neighbors will be included. - node_time (torch.Tensor, optional): Timestamps for the nodes in the - graph. If set, temporal sampling will be used such that neighbors - are guaranteed to fulfill temporal constraints, *i.e.* sampled + node_time: Timestamps for the nodes in the graph. + If set, temporal sampling will be used such that neighbors are + guaranteed to fulfill temporal constraints, *i.e.* sampled nodes have an earlier or equal timestamp than the seed node. If used, the :obj:`col` vector needs to be sorted according to time - within individual neighborhoods. Requires :obj:`disjoint=True`. + within individual neighborhoods. + Requires :obj:`disjoint=True`. Only either :obj:`node_time` or :obj:`edge_time` can be specified. - (default: :obj:`None`) - edge_time (torch.Tensor, optional): Timestamps for the edges in the - graph. If set, temporal sampling will be used such that neighbors - are guaranteed to fulfill temporal constraints, *i.e.* sampled + edge_time: Timestamps for the edges in the graph. + If set, temporal sampling will be used such that neighbors are + guaranteed to fulfill temporal constraints, *i.e.* sampled edges have an earlier or equal timestamp than the seed node. If used, the :obj:`col` vector needs to be sorted according to time - within individual neighborhoods. Requires :obj:`disjoint=True`. + within individual neighborhoods. + Requires :obj:`disjoint=True`. Only either :obj:`node_time` or :obj:`edge_time` can be specified. - (default: :obj:`None`) - seed_time (torch.Tensor, optional): Optional values to override the - timestamp for seed nodes. If not set, will use timestamps in - :obj:`node_time` as default for seed nodes. + seed_time: Optional values to override the timestamp for seed nodes. + If not set, will use timestamps in :obj:`node_time` as default for + seed nodes. Needs to be specified in case edge-level sampling is used via - :obj:`edge_time`. (default: :obj:`None`) - edge_weight (torch.Tensor, optional): If given, will perform biased - sampling based on the weight of each edge. (default: :obj:`None`) - csc (bool, optional): If set to :obj:`True`, assumes that the graph is - given in CSC format :obj:`(colptr, row)`. (default: :obj:`False`) - replace (bool, optional): If set to :obj:`True`, will sample with - replacement. (default: :obj:`False`) - directed (bool, optional): If set to :obj:`False`, will include all - edges between all sampled nodes. (default: :obj:`True`) - disjoint (bool, optional): If set to :obj:`True` , will create disjoint - subgraphs for every seed node. (default: :obj:`False`) - temporal_strategy (string, optional): The sampling strategy when using - temporal sampling (:obj:`"uniform"`, :obj:`"last"`). - (default: :obj:`"uniform"`) - return_edge_id (bool, optional): If set to :obj:`False`, will not - return the indices of edges of the original graph. - (default: :obj: `True`) + :obj:`edge_time`. + edge_weight: If given, will perform biased sampling based on the weight + of each edge. + csc: If set to :obj:`True`, assumes that the graph is given in CSC + format :obj:`(colptr, row)`. + replace: If set to :obj:`True`, will sample with replacement. + directed: If set to :obj:`False`, will include all edges between all + sampled nodes. + disjoint: If set to :obj:`True` , will create disjoint subgraphs for + every seed node. + temporal_strategy: The sampling strategy when using temporal sampling + (:obj:`"uniform"`, :obj:`"last"`). + return_edge_id: If set to :obj:`False`, will not return the indices of + edges of the original graph. Returns: - (torch.Tensor, torch.Tensor, torch.Tensor, Optional[torch.Tensor], - List[int], List[int]): Row indices, col indices of the returned subtree/subgraph, as well as original node indices for all nodes sampled. In addition, may return the indices of edges of the original graph. @@ -176,16 +171,16 @@ def subgraph( :obj:`(rowptr, col)`, containing only the nodes in :obj:`nodes`. Args: - rowptr (torch.Tensor): Compressed source node indices. - col (torch.Tensor): Target node indices. - nodes (torch.Tensor): Node indices of the induced subgraph. - return_edge_id (bool, optional): If set to :obj:`False`, will not + rowptr: Compressed source node indices. + col: Target node indices. + nodes: Node indices of the induced subgraph. + return_edge_id: If set to :obj:`False`, will not return the indices of edges of the original graph contained in the - induced subgraph. (default: :obj:`True`) + induced subgraph. Returns: - (torch.Tensor, torch.Tensor, Optional[torch.Tensor]): Compressed source - node indices and target node indices of the induced subgraph. + Compressed source node indices and target node indices of the induced + subgraph. In addition, may return the indices of edges of the original graph. """ return torch.ops.pyg.subgraph(rowptr, col, nodes, return_edge_id) @@ -205,19 +200,17 @@ def random_walk( `_ paper. Args: - rowptr (torch.Tensor): Compressed source node indices. - col (torch.Tensor): Target node indices. - seed (torch.Tensor): Seed node indices from where random walks start. - walk_length (int): The walk length of a random walk. - p (float, optional): Likelihood of immediately revisiting a node in the - walk. (default: :obj:`1.0`) - q (float, optional): Control parameter to interpolate between - breadth-first strategy and depth-first strategy. - (default: :obj:`1.0`) + rowptr: Compressed source node indices. + col: Target node indices. + seed: Seed node indices from where random walks start. + walk_length: The walk length of a random walk. + p: Likelihood of immediately revisiting a node in the walk. + q: Control parameter to interpolate between breadth-first strategy and + depth-first strategy. Returns: - torch.Tensor: A tensor of shape :obj:`[seed.size(0), walk_length + 1]`, - holding the nodes indices of each walk for each seed node. + A tensor of shape :obj:`[seed.size(0), walk_length + 1]`, holding the + nodes indices of each walk for each seed node. """ return torch.ops.pyg.random_walk(rowptr, col, seed, walk_length, p, q) From ac331297b90d94d14392b47d1a92bd02b4e6d3fa Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 17 Aug 2024 03:02:32 +0000 Subject: [PATCH 5/7] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- docs/source/conf.py | 1 - pyg_lib/sampler/__init__.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index f70ee47d2..7a7783794 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,4 +1,3 @@ -import copy import datetime import os.path as osp import sys diff --git a/pyg_lib/sampler/__init__.py b/pyg_lib/sampler/__init__.py index 2e1eb70ee..11dd53b00 100644 --- a/pyg_lib/sampler/__init__.py +++ b/pyg_lib/sampler/__init__.py @@ -179,7 +179,7 @@ def subgraph( induced subgraph. Returns: - Compressed source node indices and target node indices of the induced + Compressed source node indices and target node indices of the induced subgraph. In addition, may return the indices of edges of the original graph. """ From 82aa7198270f6bc878c4c96396ec40003902684c Mon Sep 17 00:00:00 2001 From: rusty1s Date: Sat, 17 Aug 2024 05:05:42 +0200 Subject: [PATCH 6/7] update --- docs/source/conf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index f70ee47d2..b002f8d52 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,9 +1,9 @@ -import copy import datetime import os.path as osp import sys import pyg_sphinx_theme +from sphinx.application import Sphinx import pyg_lib @@ -44,6 +44,6 @@ typehints_defaults = 'comma' -def setup(app): +def setup(app: Sphinx) -> None: # Do not drop type hints in signatures: del app.events.listeners['autodoc-process-signature'] From 5539f0dc6b486e0a9f160634d45e2a7d905c275c Mon Sep 17 00:00:00 2001 From: rusty1s Date: Sat, 17 Aug 2024 05:07:18 +0200 Subject: [PATCH 7/7] update --- docs/source/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/source/conf.py b/docs/source/conf.py index b002f8d52..a32cf9c2f 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -45,5 +45,6 @@ def setup(app: Sphinx) -> None: + r"""Setup sphinx application.""" # Do not drop type hints in signatures: del app.events.listeners['autodoc-process-signature']