From ebd3899dd194285674508a3cf9f2d97161dc2fd4 Mon Sep 17 00:00:00 2001 From: Lucas Jeub Date: Mon, 16 Dec 2024 14:45:38 +0100 Subject: [PATCH] fix some return type warnings --- python/python/raphtory/__init__.py | 14 +- python/python/raphtory/__init__.pyi | 590 +++++------------- .../python/raphtory/algorithms/__init__.pyi | 98 +-- python/python/raphtory/graph_gen/__init__.pyi | 5 +- .../python/raphtory/graph_loader/__init__.pyi | 12 +- python/python/raphtory/graphql/__init__.pyi | 142 ++--- .../python/raphtory/node_state/__init__.pyi | 48 +- python/python/raphtory/vectors/__init__.pyi | 81 +-- raphtory/src/python/graph/algorithm_result.rs | 39 +- 9 files changed, 333 insertions(+), 696 deletions(-) diff --git a/python/python/raphtory/__init__.py b/python/python/raphtory/__init__.py index a043b3a28..462cc2eb0 100644 --- a/python/python/raphtory/__init__.py +++ b/python/python/raphtory/__init__.py @@ -1,12 +1,12 @@ -import sys +import sys as _sys from .raphtory import * -sys.modules["raphtory.node_state"] = node_state -sys.modules["raphtory.algorithms"] = algorithms -sys.modules["raphtory.graph_gen"] = graph_gen -sys.modules["raphtory.graph_loader"] = graph_loader -sys.modules["raphtory.vectors"] = vectors -sys.modules["raphtory.graphql"] = graphql +_sys.modules["raphtory.node_state"] = node_state +_sys.modules["raphtory.algorithms"] = algorithms +_sys.modules["raphtory.graph_gen"] = graph_gen +_sys.modules["raphtory.graph_loader"] = graph_loader +_sys.modules["raphtory.vectors"] = vectors +_sys.modules["raphtory.graphql"] = graphql __doc__ = raphtory.__doc__ if hasattr(raphtory, "__all__"): diff --git a/python/python/raphtory/__init__.pyi b/python/python/raphtory/__init__.pyi index 9882a820f..72307562f 100644 --- a/python/python/raphtory/__init__.pyi +++ b/python/python/raphtory/__init__.pyi @@ -7,6 +7,7 @@ # # ############################################################################### + from typing import * from raphtory import * from raphtory.algorithms import * @@ -17,38 +18,47 @@ from raphtory.typing import * from datetime import datetime from pandas import DataFrame -class AlgorithmResult(object): +class AlgorithmResult(object): + def __len__(self): """Return len(self).""" def __repr__(self): """Return repr(self).""" - def get(self, key: Any): + def get(self, key: InputNode) -> Optional[Any]: """ Returns the value corresponding to the provided key Arguments: - key: The key of type `H` for which the value is to be retrieved. + key (InputNode): The node for which the value is to be retrieved. + + Returns: + Optional[Any]: The value for the node or `None` if the value does not exist. """ - def get_all(self): + def get_all(self) -> dict[Node, Any]: """ Returns a Dict containing all the nodes (as keys) and their corresponding values (values) or none. Returns: - A dict of nodes and their values + dict[Node, Any]: A dict of nodes and their values """ - def get_all_values(self): - """Returns a a list of all values""" + def get_all_values(self) -> list[Any]: + """ + Get all values - def get_all_with_names(self): + Returns: + list[Any]: the values for each node as a list + """ + + def get_all_with_names(self) -> dict[str, Any]: """ Returns a dict with node names and values Returns: - a dict with node names and values + dict[str, Any]: a dict with node names and values """ def group_by(self): @@ -60,27 +70,37 @@ class AlgorithmResult(object): containing keys of type `H` that share the same value. """ - def max(self): - """Returns a tuple of the max result with its key""" + def max(self) -> Tuple[Node, Any]: + """ + Find node with maximum value + + Returns: + Tuple[Node, Any]: The node and maximum value. + """ def median(self): """Returns a tuple of the median result with its key""" - def min(self): - """Returns a tuple of the min result with its key""" + def min(self) -> Tuple[Node, Any]: + """ + Find node with minimum value + + Returns: + Tuple[Node, Any]: The node and minimum value. + """ - def sort_by_node(self, reverse: Any = True): + def sort_by_node(self, reverse: bool = True) -> list[Tuple[Node, Any]]: """ Sorts by node id in ascending or descending order. Arguments: - reverse: If `true`, sorts the result in descending order; otherwise, sorts in ascending order. Defaults to True. + reverse (bool): If `true`, sorts the result in descending order; otherwise, sorts in ascending order. Defaults to True. Returns: - A sorted list of tuples containing node names and values. + list[Tuple[Node, Any]]: A sorted list of tuples containing nodes and values. """ - def sort_by_node_name(self, reverse: bool = True): + def sort_by_node_name(self, reverse: bool = True) -> list[Tuple[Node, Any]]: """ The function `sort_by_node_name` sorts a vector of tuples containing a node and an optional value by the node name in either ascending or descending order. @@ -91,10 +111,10 @@ class AlgorithmResult(object): ascending order. Returns: - The function sort_by_node_name returns a vector of tuples. Each tuple contains a Node and value + list[Tuple[Node, Any]]: The function sort_by_node_name returns a vector of tuples. Each tuple contains a Node and value """ - def sort_by_value(self, reverse: bool = True): + def sort_by_value(self, reverse: bool = True) -> list[Tuple[Node, Any]]: """ Sorts the `AlgorithmResult` by its values in ascending or descending order. @@ -102,7 +122,7 @@ class AlgorithmResult(object): reverse (bool): If `true`, sorts the result in descending order, otherwise, sorts in ascending order. Defaults to True. Returns: - A sorted vector of tuples containing keys of type `H` and values of type `Y`. + list[Tuple[Node, Any]]: A sorted vector of tuples containing Nodes and values. """ def to_df(self) -> DataFrame: @@ -113,10 +133,7 @@ class AlgorithmResult(object): DataFrame: A `pandas.DataFrame` containing the result """ - def to_string(self): - """Returns a formatted string representation of the algorithm.""" - - def top_k(self, k: int, percentage: bool = False, reverse: bool = True): + def top_k(self, k: int, percentage: bool = False, reverse: bool = True) -> list[Tuple[Node, Any]]: """ Retrieves the top-k elements from the `AlgorithmResult` based on its values. @@ -126,13 +143,13 @@ class AlgorithmResult(object): reverse (bool): If `True`, retrieves the elements in descending order, otherwise, in ascending order. Defaults to True. Returns: - An Option containing a vector of tuples with keys of type `H` and values of type `Y`. + list[Tuple[Node, Any]]: List of tuples with keys of nodes and values of type `Y`. If percentage is true, the returned vector contains the top `k` percentage of elements. If percentage is false, the returned vector contains the top `k` elements. Returns None if the result is empty or if `k` is 0. """ -class ConstProperties(object): +class ConstProperties(object): """A view of constant properties of an entity""" def __contains__(self, key): @@ -206,38 +223,45 @@ class ConstProperties(object): lists the property values """ -class DiskGraphStorage(object): +class DiskGraphStorage(object): + def __repr__(self): """Return repr(self).""" - def append_node_temporal_properties(self, location, chunk_size=20000000): ... - def graph_dir(self): ... + def append_node_temporal_properties(self, location, chunk_size=20000000): + ... + + def graph_dir(self): + ... + @staticmethod - def load_from_dir(graph_dir): ... + def load_from_dir(graph_dir): + ... + @staticmethod - def load_from_pandas(graph_dir, edge_df, time_col, src_col, dst_col): ... + def load_from_pandas(graph_dir, edge_df, time_col, src_col, dst_col): + ... + @staticmethod - def load_from_parquets( - graph_dir, - layer_parquet_cols, - node_properties=None, - chunk_size=10000000, - t_props_chunk_size=10000000, - num_threads=4, - node_type_col=None, - node_id_col=None, - ): ... - def load_node_const_properties(self, location, col_names=None, chunk_size=None): ... + def load_from_parquets(graph_dir, layer_parquet_cols, node_properties=None, chunk_size=10000000, t_props_chunk_size=10000000, num_threads=4, node_type_col=None, node_id_col=None): + ... + + def load_node_const_properties(self, location, col_names=None, chunk_size=None): + ... + def merge_by_sorted_gids(self, other, graph_dir): """ Merge this graph with another `DiskGraph`. Note that both graphs should have nodes that are sorted by their global ids or the resulting graph will be nonsense! """ - def to_events(self): ... - def to_persistent(self): ... + def to_events(self): + ... + + def to_persistent(self): + ... -class Edge(object): +class Edge(object): """ PyEdge is a Python class that represents an edge in the graph. An edge is a directed connection between two nodes. @@ -722,7 +746,7 @@ class Edge(object): Optional[int] """ -class Edges(object): +class Edges(object): """A list of edges that can be iterated over.""" def __bool__(self): @@ -936,7 +960,9 @@ class Edges(object): """ - def history_counts(self): ... + def history_counts(self): + ... + def history_date_time(self): """ Returns all timestamps of edges, when an edge is added or change to an edge is made. @@ -950,7 +976,9 @@ class Edges(object): def id(self): """Returns all ids of the edges.""" - def is_active(self): ... + def is_active(self): + ... + def is_deleted(self): """Check if the edges are deleted""" @@ -1136,12 +1164,7 @@ class Edges(object): Time of edge """ - def to_df( - self, - include_property_history: bool = True, - convert_datetime: bool = False, - explode: bool = False, - ) -> DataFrame: + def to_df(self, include_property_history: bool = True, convert_datetime: bool = False, explode: bool = False) -> DataFrame: """ Converts the graph's edges into a Pandas DataFrame. @@ -1194,7 +1217,7 @@ class Edges(object): Optional[int] """ -class Graph(GraphView): +class Graph(GraphView): """ A temporal graph with event semantics. @@ -1205,7 +1228,9 @@ class Graph(GraphView): def __new__(cls, num_shards: Optional[int] = None) -> Graph: """Create and return a new object. See help(type) for accurate signature.""" - def __reduce__(self): ... + def __reduce__(self): + ... + def add_constant_properties(self, properties: PropInput) -> None: """ Adds static properties to the graph. @@ -1220,15 +1245,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def add_edge( - self, - timestamp: TimeInput, - src: str | int, - dst: str | int, - properties: Optional[PropInput] = None, - layer: Optional[str] = None, - secondary_index=None, - ) -> MutableEdge: + def add_edge(self, timestamp: TimeInput, src: str|int, dst: str|int, properties: Optional[PropInput] = None, layer: Optional[str] = None, secondary_index=None) -> MutableEdge: """ Adds a new edge with the given source and destination nodes and properties to the graph. @@ -1247,14 +1264,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def add_node( - self, - timestamp: TimeInput, - id: str | int, - properties: Optional[PropInput] = None, - node_type: Optional[str] = None, - secondary_index: Optional[int] = None, - ) -> MutableNode: + def add_node(self, timestamp: TimeInput, id: str|int, properties: Optional[PropInput] = None, node_type: Optional[str] = None, secondary_index: Optional[int] = None) -> MutableNode: """ Adds a new node with the given id and properties to the graph. @@ -1272,12 +1282,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def add_properties( - self, - timestamp: TimeInput, - properties: PropInput, - secondary_index: Optional[int] = None, - ) -> None: + def add_properties(self, timestamp: TimeInput, properties: PropInput, secondary_index: Optional[int] = None) -> None: """ Adds properties to the graph. @@ -1304,14 +1309,7 @@ class Graph(GraphView): path (str): The path to the cache file """ - def create_node( - self, - timestamp: TimeInput, - id: str | int, - properties: Optional[PropInput] = None, - node_type: Optional[str] = None, - secondary_index: Optional[int] = None, - ) -> MutableNode: + def create_node(self, timestamp: TimeInput, id: str|int, properties: Optional[PropInput] = None, node_type: Optional[str] = None, secondary_index: Optional[int] = None) -> MutableNode: """ Creates a new node with the given id and properties to the graph. It fails if the node already exists. @@ -1341,7 +1339,7 @@ class Graph(GraphView): Graph """ - def edge(self, src: str | int, dst: str | int) -> Edge: + def edge(self, src: str|int, dst: str|int) -> Edge: """ Gets the edge with the specified source and destination nodes @@ -1353,7 +1351,9 @@ class Graph(GraphView): Edge: the edge with the specified source and destination nodes, or None if the edge does not exist """ - def event_graph(self): ... + def event_graph(self): + ... + def get_all_node_types(self): """ Returns all the node types in the graph. @@ -1449,9 +1449,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def import_node_as( - self, node: Node, new_id: str | int, merge: bool = False - ) -> Node: + def import_node_as(self, node: Node, new_id: str|int, merge: bool = False) -> Node: """ Import a single node into the graph with new id. @@ -1486,9 +1484,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def import_nodes_as( - self, nodes: List[Node], new_ids: List[str | int], merge: bool = False - ) -> None: + def import_nodes_as(self, nodes: List[Node], new_ids: List[str|int], merge: bool = False) -> None: """ Import multiple nodes into the graph with new ids. @@ -1533,16 +1529,7 @@ class Graph(GraphView): Graph """ - def load_edge_props_from_pandas( - self, - df: DataFrame, - src: str, - dst: str, - constant_properties: List[str] = None, - shared_constant_properties: PropInput = None, - layer: str = None, - layer_col: str = None, - ) -> None: + def load_edge_props_from_pandas(self, df: DataFrame, src: str, dst: str, constant_properties: List[str] = None, shared_constant_properties: PropInput = None, layer: str = None, layer_col: str = None) -> None: """ Load edge properties from a Pandas DataFrame. @@ -1562,16 +1549,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_edge_props_from_parquet( - self, - parquet_path: str, - src: str, - dst: str, - constant_properties: List[str] = None, - shared_constant_properties: PropInput = None, - layer: str = None, - layer_col: str = None, - ) -> None: + def load_edge_props_from_parquet(self, parquet_path: str, src: str, dst: str, constant_properties: List[str] = None, shared_constant_properties: PropInput = None, layer: str = None, layer_col: str = None) -> None: """ Load edge properties from parquet file @@ -1591,18 +1569,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_edges_from_pandas( - self, - df: DataFrame, - time: str, - src: str, - dst: str, - properties: List[str] = None, - constant_properties: List[str] = None, - shared_constant_properties: PropInput = None, - layer: str = None, - layer_col: str = None, - ) -> None: + def load_edges_from_pandas(self, df: DataFrame, time: str, src: str, dst: str, properties: List[str] = None, constant_properties: List[str] = None, shared_constant_properties: PropInput = None, layer: str = None, layer_col: str = None) -> None: """ Load edges from a Pandas DataFrame into the graph. @@ -1624,18 +1591,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_edges_from_parquet( - self, - parquet_path: str, - time: str, - src: str, - dst: str, - properties: List[str] = None, - constant_properties: List[str] = None, - shared_constant_properties: PropInput = None, - layer: str = None, - layer_col: str = None, - ) -> None: + def load_edges_from_parquet(self, parquet_path: str, time: str, src: str, dst: str, properties: List[str] = None, constant_properties: List[str] = None, shared_constant_properties: PropInput = None, layer: str = None, layer_col: str = None) -> None: """ Load edges from a Parquet file into the graph. @@ -1669,15 +1625,7 @@ class Graph(GraphView): Graph """ - def load_node_props_from_pandas( - self, - df: DataFrame, - id: str, - node_type: str = None, - node_type_col: str = None, - constant_properties: List[str] = None, - shared_constant_properties: PropInput = None, - ) -> None: + def load_node_props_from_pandas(self, df: DataFrame, id: str, node_type: str = None, node_type_col: str = None, constant_properties: List[str] = None, shared_constant_properties: PropInput = None) -> None: """ Load node properties from a Pandas DataFrame. @@ -1696,15 +1644,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_node_props_from_parquet( - self, - parquet_path: str, - id: str, - node_type: str = None, - node_type_col: str = None, - constant_properties: List[str] = None, - shared_constant_properties: PropInput = None, - ) -> None: + def load_node_props_from_parquet(self, parquet_path: str, id: str, node_type: str = None, node_type_col: str = None, constant_properties: List[str] = None, shared_constant_properties: PropInput = None) -> None: """ Load node properties from a parquet file. @@ -1723,17 +1663,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_nodes_from_pandas( - self, - df: DataFrame, - time: str, - id: str, - node_type: str = None, - node_type_col: str = None, - properties: List[str] = None, - constant_properties: List[str] = None, - shared_constant_properties: PropInput = None, - ) -> None: + def load_nodes_from_pandas(self, df: DataFrame, time: str, id: str, node_type: str = None, node_type_col: str = None, properties: List[str] = None, constant_properties: List[str] = None, shared_constant_properties: PropInput = None) -> None: """ Load nodes from a Pandas DataFrame into the graph. @@ -1754,17 +1684,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def load_nodes_from_parquet( - self, - parquet_path: str, - time: str, - id: str, - node_type: str = None, - node_type_col: str = None, - properties: List[str] = None, - constant_properties: List[str] = None, - shared_constant_properties: PropInput = None, - ) -> None: + def load_nodes_from_parquet(self, parquet_path: str, time: str, id: str, node_type: str = None, node_type_col: str = None, properties: List[str] = None, constant_properties: List[str] = None, shared_constant_properties: PropInput = None) -> None: """ Load nodes from a Parquet file into the graph. @@ -1785,7 +1705,7 @@ class Graph(GraphView): GraphError: If the operation fails. """ - def node(self, id: str | int) -> Node: + def node(self, id: str|int) -> Node: """ Gets the node with the specified id @@ -1826,7 +1746,9 @@ class Graph(GraphView): bytes """ - def to_disk_graph(self, graph_dir): ... + def to_disk_graph(self, graph_dir): + ... + def update_constant_properties(self, properties: PropInput) -> None: """ Updates static properties to the graph. @@ -1844,21 +1766,14 @@ class Graph(GraphView): def write_updates(self): """Persist the new updates by appending them to the cache file.""" -class GraphIndex(object): +class GraphIndex(object): """ A searchable Index for a `Graph`. This allows for fuzzy and exact searches of nodes and edges. This makes use of Tantivity internally to provide the search functionality. To create a graph index, call `graph.index()` on any `Graph` object in python. """ - def fuzzy_search_edges( - self, - query: str, - limit: int = 25, - offset: int = 0, - prefix: bool = False, - levenshtein_distance: int = 0, - ) -> list[Edge]: + def fuzzy_search_edges(self, query: str, limit: int = 25, offset: int = 0, prefix: bool = False, levenshtein_distance: int = 0) -> list[Edge]: """ Searches for edges which match the given query. This uses Tantivy's fuzzy search. @@ -1874,14 +1789,7 @@ class GraphIndex(object): list[Edge]: A list of edges which match the query. The list will be empty if no edges match the query. """ - def fuzzy_search_nodes( - self, - query: str, - limit: int = 25, - offset: int = 0, - prefix: bool = False, - levenshtein_distance: int = 0, - ) -> list[Node]: + def fuzzy_search_nodes(self, query: str, limit: int = 25, offset: int = 0, prefix: bool = False, levenshtein_distance: int = 0) -> list[Node]: """ Searches for nodes which match the given query. This uses Tantivy's fuzzy search. If you would like to better understand the query syntax, please visit our documentation at https://docs.raphtory.com @@ -1925,7 +1833,7 @@ class GraphIndex(object): list[Node]: A list of nodes which match the query. The list will be empty if no nodes match. """ -class GraphView(object): +class GraphView(object): """Graph view is a read-only version of a graph at a certain point in time.""" def __eq__(self, value): @@ -2443,14 +2351,7 @@ class GraphView(object): GraphView: Returns the subgraph """ - def to_networkx( - self, - explode_edges: bool = False, - include_node_properties: bool = True, - include_edge_properties: bool = True, - include_update_history: bool = True, - include_property_history: bool = True, - ): + def to_networkx(self, explode_edges: bool = False, include_node_properties: bool = True, include_edge_properties: bool = True, include_update_history: bool = True, include_property_history: bool = True): """ Returns a graph with NetworkX. @@ -2469,18 +2370,7 @@ class GraphView(object): A Networkx MultiDiGraph. """ - def to_pyvis( - self, - explode_edges=False, - edge_color="#000000", - shape=None, - node_image=None, - edge_weight=None, - edge_label=None, - colour_nodes_by_type=False, - notebook=False, - **kwargs - ): + def to_pyvis(self, explode_edges=False, edge_color='#000000', shape=None, node_image=None, edge_weight=None, edge_label=None, colour_nodes_by_type=False, notebook=False, **kwargs): """ Draw a graph with PyVis. Pyvis is a required dependency. If you intend to use this function make sure that you install Pyvis @@ -2526,17 +2416,7 @@ class GraphView(object): GraphView: The layered view """ - def vectorise( - self, - embedding: Callable[[list], list], - cache: str = None, - overwrite_cache: bool = False, - graph_template: str = None, - node_template: str = None, - edge_template: str = None, - graph_name=None, - verbose: bool = False, - ): + def vectorise(self, embedding: Callable[[list], list], cache: str = None, overwrite_cache: bool = False, graph_template: str = None, node_template: str = None, edge_template: str = None, graph_name=None, verbose: bool = False): """ Create a VectorisedGraph from the current graph @@ -2574,13 +2454,12 @@ class GraphView(object): Optional[int] """ -class MutableEdge(Edge): +class MutableEdge(Edge): + def __repr__(self): """Return repr(self).""" - def add_constant_properties( - self, properties: PropInput, layer: Optional[str] = None - ): + def add_constant_properties(self, properties: PropInput, layer: Optional[str] = None): """ Add constant properties to an edge in the graph. This function is used to add properties to an edge that remain constant and do not @@ -2591,13 +2470,7 @@ class MutableEdge(Edge): layer (str, optional): The layer you want these properties to be added on to. """ - def add_updates( - self, - t: TimeInput, - properties: Optional[PropInput] = None, - layer: Optional[str] = None, - secondary_index: Optional[int] = None, - ) -> None: + def add_updates(self, t: TimeInput, properties: Optional[PropInput] = None, layer: Optional[str] = None, secondary_index: Optional[int] = None) -> None: """ Add updates to an edge in the graph at a specified time. This function allows for the addition of property updates to an edge within the graph. The updates are time-stamped, meaning they are applied at the specified time. @@ -2624,9 +2497,7 @@ class MutableEdge(Edge): layer (str, optional): The layer you want the deletion applied to . """ - def update_constant_properties( - self, properties: PropInput, layer: Optional[str] = None - ): + def update_constant_properties(self, properties: PropInput, layer: Optional[str] = None): """ Update constant properties of an edge in the graph overwriting existing values. This function is used to add properties to an edge that remains constant and does not @@ -2637,7 +2508,8 @@ class MutableEdge(Edge): layer (str, optional): The layer you want these properties to be added on to. """ -class MutableNode(Node): +class MutableNode(Node): + def __repr__(self): """Return repr(self).""" @@ -2651,12 +2523,7 @@ class MutableNode(Node): properties (PropInput): A dictionary of properties to be added to the node. Each key is a string representing the property name, and each value is of type Prop representing the property value. """ - def add_updates( - self, - t: TimeInput, - properties: PropInput = None, - secondary_index: Optional[int] = None, - ) -> None: + def add_updates(self, t: TimeInput, properties: PropInput = None, secondary_index: Optional[int] = None) -> None: """ Add updates to a node in the graph at a specified time. This function allows for the addition of property updates to a node within the graph. The updates are time-stamped, meaning they are applied at the specified time. @@ -2692,7 +2559,7 @@ class MutableNode(Node): properties (PropInput): A dictionary of properties to be added to the node. Each key is a string representing the property name, and each value is of type Prop representing the property value. """ -class Node(object): +class Node(object): """A node (or node) in the graph.""" def __eq__(self, value): @@ -2975,7 +2842,9 @@ class Node(object): An iterator over the neighbours of this node that point into this node. """ - def is_active(self): ... + def is_active(self): + ... + def latest(self): """ Create a view of the Node including all events at the latest time. @@ -3209,7 +3078,7 @@ class Node(object): Optional[int] """ -class Nodes(object): +class Nodes(object): """A list of nodes that can be iterated over.""" def __bool__(self): @@ -3683,9 +3552,7 @@ class Nodes(object): Optional[Datetime]: The earliest datetime that this Nodes is valid or None if the Nodes is valid for all times. """ - def to_df( - self, include_property_history: bool = False, convert_datetime: bool = False - ): + def to_df(self, include_property_history: bool = False, convert_datetime: bool = False): """ Converts the graph's nodes into a Pandas DataFrame. @@ -3702,7 +3569,9 @@ class Nodes(object): If successful, this PyObject will be a Pandas DataFrame. """ - def type_filter(self, node_types): ... + def type_filter(self, node_types): + ... + def valid_layers(self, names: list[str]) -> Nodes: """ Return a view of Nodes containing all layers `names` @@ -3736,13 +3605,15 @@ class Nodes(object): Optional[int] """ -class PersistentGraph(GraphView): +class PersistentGraph(GraphView): """A temporal graph that allows edges and nodes to be deleted.""" def __new__(cls) -> PersistentGraph: """Create and return a new object. See help(type) for accurate signature.""" - def __reduce__(self): ... + def __reduce__(self): + ... + def add_constant_properties(self, properties: dict) -> None: """ Adds static properties to the graph. @@ -3757,15 +3628,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def add_edge( - self, - timestamp: int, - src: str | int, - dst: str | int, - properties: dict = None, - layer: str = None, - secondary_index: Optional[int] = None, - ) -> None: + def add_edge(self, timestamp: int, src: str | int, dst: str | int, properties: dict = None, layer: str = None, secondary_index: Optional[int] = None) -> None: """ Adds a new edge with the given source and destination nodes and properties to the graph. @@ -3784,14 +3647,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def add_node( - self, - timestamp: TimeInput, - id: str | int, - properties: dict = None, - node_type: str = None, - secondary_index: Optional[int] = None, - ) -> None: + def add_node(self, timestamp: TimeInput, id: str | int, properties: dict = None, node_type: str = None, secondary_index: Optional[int] = None) -> None: """ Adds a new node with the given id and properties to the graph. @@ -3809,12 +3665,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def add_properties( - self, - timestamp: TimeInput, - properties: dict, - secondary_index: Optional[int] = None, - ) -> None: + def add_properties(self, timestamp: TimeInput, properties: dict, secondary_index: Optional[int] = None) -> None: """ Adds properties to the graph. @@ -3841,14 +3692,7 @@ class PersistentGraph(GraphView): path (str): The path to the cache file """ - def create_node( - self, - timestamp: TimeInput, - id: str | int, - properties: dict = None, - node_type: str = None, - secondary_index: Optional[int] = None, - ): + def create_node(self, timestamp: TimeInput, id: str | int, properties: dict = None, node_type: str = None, secondary_index: Optional[int] = None): """ Creates a new node with the given id and properties to the graph. It fails if the node already exists. @@ -3866,14 +3710,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def delete_edge( - self, - timestamp: int, - src: str | int, - dst: str | int, - layer: str = None, - secondary_index: Optional[int] = None, - ): + def delete_edge(self, timestamp: int, src: str | int, dst: str | int, layer: str = None, secondary_index: Optional[int] = None): """ Deletes an edge given the timestamp, src and dst nodes and layer (optional) @@ -4017,7 +3854,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def import_node_as(self, node: Node, new_id: str | int, merge: bool = False): + def import_node_as(self, node: Node, new_id: str|int, merge: bool = False): """ Import a single node into the graph with new id. @@ -4054,9 +3891,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def import_nodes_as( - self, nodes: List[Node], new_ids: List[str | int], merge: bool = False - ) -> None: + def import_nodes_as(self, nodes: List[Node], new_ids: List[str|int], merge: bool = False) -> None: """ Import multiple nodes into the graph with new ids. @@ -4090,15 +3925,7 @@ class PersistentGraph(GraphView): PersistentGraph """ - def load_edge_deletions_from_pandas( - self, - df: DataFrame, - time: str, - src: str, - dst: str, - layer: str = None, - layer_col: str = None, - ) -> None: + def load_edge_deletions_from_pandas(self, df: DataFrame, time: str, src: str, dst: str, layer: str = None, layer_col: str = None) -> None: """ Load edges deletions from a Pandas DataFrame into the graph. @@ -4117,15 +3944,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edge_deletions_from_parquet( - self, - parquet_path: str, - time: str, - src: str, - dst: str, - layer: str = None, - layer_col: str = None, - ) -> None: + def load_edge_deletions_from_parquet(self, parquet_path: str, time: str, src: str, dst: str, layer: str = None, layer_col: str = None) -> None: """ Load edges deletions from a Parquet file into the graph. @@ -4144,16 +3963,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edge_props_from_pandas( - self, - df: DataFrame, - src: str, - dst: str, - constant_properties: List[str] = None, - shared_constant_properties: dict = None, - layer: str = None, - layer_col: str = None, - ) -> None: + def load_edge_props_from_pandas(self, df: DataFrame, src: str, dst: str, constant_properties: List[str] = None, shared_constant_properties: dict = None, layer: str = None, layer_col: str = None) -> None: """ Load edge properties from a Pandas DataFrame. @@ -4173,16 +3983,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edge_props_from_parquet( - self, - parquet_path: str, - src: str, - dst: str, - constant_properties: List[str] = None, - shared_constant_properties: dict = None, - layer: str = None, - layer_col: str = None, - ) -> None: + def load_edge_props_from_parquet(self, parquet_path: str, src: str, dst: str, constant_properties: List[str] = None, shared_constant_properties: dict = None, layer: str = None, layer_col: str = None) -> None: """ Load edge properties from parquet file @@ -4202,18 +4003,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edges_from_pandas( - self, - df: DataFrame, - time: str, - src: str, - dst: str, - properties: List[str] = None, - constant_properties: List[str] = None, - shared_constant_properties: dict = None, - layer: str = None, - layer_col: str = None, - ) -> None: + def load_edges_from_pandas(self, df: DataFrame, time: str, src: str, dst: str, properties: List[str] = None, constant_properties: List[str] = None, shared_constant_properties: dict = None, layer: str = None, layer_col: str = None) -> None: """ Load edges from a Pandas DataFrame into the graph. @@ -4235,18 +4025,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_edges_from_parquet( - self, - parquet_path: str, - time: str, - src: str, - dst: str, - properties: List[str] = None, - constant_properties: List[str] = None, - shared_constant_properties: dict = None, - layer: str = None, - layer_col: str = None, - ) -> None: + def load_edges_from_parquet(self, parquet_path: str, time: str, src: str, dst: str, properties: List[str] = None, constant_properties: List[str] = None, shared_constant_properties: dict = None, layer: str = None, layer_col: str = None) -> None: """ Load edges from a Parquet file into the graph. @@ -4280,15 +4059,7 @@ class PersistentGraph(GraphView): PersistentGraph """ - def load_node_props_from_pandas( - self, - df: DataFrame, - id: str, - node_type: str = None, - node_type_col: str = None, - constant_properties: List[str] = None, - shared_constant_properties: dict = None, - ) -> None: + def load_node_props_from_pandas(self, df: DataFrame, id: str, node_type: str = None, node_type_col: str = None, constant_properties: List[str] = None, shared_constant_properties: dict = None) -> None: """ Load node properties from a Pandas DataFrame. @@ -4307,15 +4078,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_node_props_from_parquet( - self, - parquet_path: str, - id: str, - node_type: str = None, - node_type_col: str = None, - constant_properties: List[str] = None, - shared_constant_properties: dict = None, - ) -> None: + def load_node_props_from_parquet(self, parquet_path: str, id: str, node_type: str = None, node_type_col: str = None, constant_properties: List[str] = None, shared_constant_properties: dict = None) -> None: """ Load node properties from a parquet file. @@ -4334,17 +4097,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_nodes_from_pandas( - self, - df: DataFrame, - time: str, - id: str, - node_type: str = None, - node_type_col: str = None, - properties: List[str] = None, - constant_properties: List[str] = None, - shared_constant_properties: dict = None, - ) -> None: + def load_nodes_from_pandas(self, df: DataFrame, time: str, id: str, node_type: str = None, node_type_col: str = None, properties: List[str] = None, constant_properties: List[str] = None, shared_constant_properties: dict = None) -> None: """ Load nodes from a Pandas DataFrame into the graph. @@ -4365,17 +4118,7 @@ class PersistentGraph(GraphView): GraphError: If the operation fails. """ - def load_nodes_from_parquet( - self, - parquet_path: str, - time: str, - id: str, - node_type: str = None, - node_type_col: str = None, - properties: List[str] = None, - constant_properties: List[str] = None, - shared_constant_properties: dict = None, - ) -> None: + def load_nodes_from_parquet(self, parquet_path: str, time: str, id: str, node_type: str = None, node_type_col: str = None, properties: List[str] = None, constant_properties: List[str] = None, shared_constant_properties: dict = None) -> None: """ Load nodes from a Parquet file into the graph. @@ -4407,7 +4150,9 @@ class PersistentGraph(GraphView): The node with the specified id, or None if the node does not exist """ - def persistent_graph(self): ... + def persistent_graph(self): + ... + def save_to_file(self, path: str): """ Saves the PersistentGraph to the given path. @@ -4449,7 +4194,7 @@ class PersistentGraph(GraphView): def write_updates(self): """Persist the new updates by appending them to the cache file.""" -class Prop(object): +class Prop(object): """ A reference to a property used for constructing filters @@ -4495,7 +4240,7 @@ class Prop(object): if they don't have the property """ -class Properties(object): +class Properties(object): """A view of the properties of an entity""" def __contains__(self, key): @@ -4564,19 +4309,25 @@ class Properties(object): fallback to the static property if the temporal value does not exist. """ -class PropertyFilter(object): ... +class PropertyFilter(object): + ... + + +class PyGraphEncoder(object): -class PyGraphEncoder(object): def __call__(self, *args, **kwargs): """Call self as a function.""" - def __getstate__(self): ... + def __getstate__(self): + ... + def __new__(cls) -> PyGraphEncoder: """Create and return a new object. See help(type) for accurate signature.""" - def __setstate__(self): ... + def __setstate__(self): + ... -class TemporalProp(object): +class TemporalProp(object): """A view of a temporal property""" def __eq__(self, value): @@ -4666,7 +4417,9 @@ class TemporalProp(object): (i64, Prop): A tuple containing the time and the minimum property value. """ - def ordered_dedupe(self, latest_time): ... + def ordered_dedupe(self, latest_time): + ... + def sum(self) -> Prop: """ Compute the sum of all property values. @@ -4675,14 +4428,16 @@ class TemporalProp(object): Prop: The sum of all property values. """ - def unique(self): ... + def unique(self): + ... + def value(self): """Get the latest value of the property""" def values(self): """Get the property values for each update""" -class TemporalProperties(object): +class TemporalProperties(object): """A view of the temporal properties of an entity""" def __contains__(self, key): @@ -4766,7 +4521,8 @@ class TemporalProperties(object): list[TemporalProp]: the list of property views """ -class WindowSet(object): +class WindowSet(object): + def __iter__(self): """Implement iter(self).""" diff --git a/python/python/raphtory/algorithms/__init__.pyi b/python/python/raphtory/algorithms/__init__.pyi index 84a1c21c7..c6e8bfd40 100644 --- a/python/python/raphtory/algorithms/__init__.pyi +++ b/python/python/raphtory/algorithms/__init__.pyi @@ -7,6 +7,7 @@ # # ############################################################################### + from typing import * from raphtory import * from raphtory.algorithms import * @@ -17,7 +18,7 @@ from raphtory.typing import * from datetime import datetime from pandas import DataFrame -class Matching(object): +class Matching(object): """A Matching (i.e., a set of edges that do not share any nodes)""" def __bool__(self): @@ -117,12 +118,7 @@ def average_degree(g: GraphView): float : the average degree of the nodes in the graph """ -def balance( - g: GraphView, - name: str = "weight", - direction: Direction = "both", - threads: Optional[int] = None, -) -> AlgorithmResult: +def balance(g: GraphView, name: str = "weight", direction: Direction = "both", threads: Optional[int] = None) -> AlgorithmResult: """ Sums the weights of edges in the graph based on the specified direction. @@ -142,9 +138,7 @@ def balance( """ -def betweenness_centrality( - g: GraphView, k: Optional[int] = None, normalized: bool = True -) -> AlgorithmResult: +def betweenness_centrality(g: GraphView, k: Optional[int] = None, normalized: bool = True) -> AlgorithmResult: """ Computes the betweenness centrality for nodes in a given graph. @@ -158,12 +152,12 @@ def betweenness_centrality( AlgorithmResult: Returns an `AlgorithmResult` containing the betweenness centrality of each node. """ -def cohesive_fruchterman_reingold( - graph, iterations=100, scale=1.0, node_start_size=1.0, cooloff_factor=0.95, dt=0.1 -): +def cohesive_fruchterman_reingold(graph, iterations=100, scale=1.0, node_start_size=1.0, cooloff_factor=0.95, dt=0.1): """Cohesive version of `fruchterman_reingold` that adds virtual edges between isolated nodes""" -def connected_components(g): ... +def connected_components(g): + ... + def degree_centrality(g: GraphView, threads: Optional[int] = None) -> AlgorithmResult: """ Computes the degree centrality of all nodes in the graph. The values are normalized @@ -178,13 +172,7 @@ def degree_centrality(g: GraphView, threads: Optional[int] = None) -> AlgorithmR AlgorithmResult: A result containing a mapping of node names to the computed sum of their associated degree centrality. """ -def dijkstra_single_source_shortest_paths( - g: GraphView, - source: InputNode, - targets: list[InputNode], - direction: Direction = "both", - weight: str = "weight", -) -> dict: +def dijkstra_single_source_shortest_paths(g: GraphView, source: InputNode, targets: list[InputNode], direction: Direction = "both", weight: str = "weight") -> dict: """ Finds the shortest paths from a single source to multiple targets in a graph. @@ -214,14 +202,7 @@ def directed_graph_density(g: GraphView): float : Directed graph density of G. """ -def fast_rp( - g: GraphView, - embedding_dim: int, - normalization_strength: float, - iter_weights: list[float], - seed: Optional[int] = None, - threads: Optional[int] = None, -) -> AlgorithmResult: +def fast_rp(g: GraphView, embedding_dim: int, normalization_strength: float, iter_weights: list[float], seed: Optional[int] = None, threads: Optional[int] = None) -> AlgorithmResult: """ Computes embedding vectors for each vertex of an undirected/bidirectional graph according to the Fast RP algorithm. Original Paper: https://doi.org/10.48550/arXiv.1908.11512 @@ -237,14 +218,7 @@ def fast_rp( AlgorithmResult: Vertices mapped to their corresponding embedding vectors """ -def fruchterman_reingold( - graph: GraphView, - iterations: int | None = 100, - scale: float | None = 1.0, - node_start_size: float | None = 1.0, - cooloff_factor: float | None = 0.95, - dt: float | None = 0.1, -): +def fruchterman_reingold(graph: GraphView, iterations: int | None = 100, scale: float | None = 1.0, node_start_size: float | None = 1.0, cooloff_factor: float | None = 0.95, dt: float | None = 0.1): """ Fruchterman Reingold layout algorithm @@ -453,12 +427,7 @@ def local_triangle_count(g: GraphView, v: InputNode): """ -def louvain( - graph: GraphView, - resolution: float = 1.0, - weight_prop: str | None = None, - tol: None | float = None, -): +def louvain(graph: GraphView, resolution: float = 1.0, weight_prop: str | None = None, tol: None | float = None): """ Louvain algorithm for community detection @@ -502,12 +471,7 @@ def max_out_degree(g: GraphView): int : value of the largest outdegree """ -def max_weight_matching( - graph: GraphView, - weight_prop: Optional[str] = None, - max_cardinality: bool = True, - verify_optimum_flag: bool = False, -) -> Matching: +def max_weight_matching(graph: GraphView, weight_prop: Optional[str] = None, max_cardinality: bool = True, verify_optimum_flag: bool = False) -> Matching: """ Compute a maximum-weighted matching in the general undirected weighted graph given by "edges". If `max_cardinality` is true, only @@ -597,13 +561,7 @@ def out_components(g: GraphView): AlgorithmResult : AlgorithmResult object mapping each node to an array containing the ids of all nodes within their 'out-component' """ -def pagerank( - g: GraphView, - iter_count: int = 20, - max_diff: Optional[float] = None, - use_l2_norm=True, - damping_factor=0.85, -): +def pagerank(g: GraphView, iter_count: int = 20, max_diff: Optional[float] = None, use_l2_norm=True, damping_factor=0.85): """ Pagerank -- pagerank centrality value of the nodes in a graph @@ -622,9 +580,7 @@ def pagerank( AlgorithmResult : AlgorithmResult with string keys and float values mapping node names to their pagerank value. """ -def single_source_shortest_path( - g: GraphView, source: InputNode, cutoff: Optional[int] = None -) -> AlgorithmResult: +def single_source_shortest_path(g: GraphView, source: InputNode, cutoff: Optional[int] = None) -> AlgorithmResult: """ Calculates the single source shortest paths from a given source node. @@ -651,15 +607,7 @@ def strongly_connected_components(g: GraphView): list[list[int]] : List of strongly connected nodes identified by ids """ -def temporal_SEIR( - graph: GraphView, - seeds: int | float | list[InputNode], - infection_prob: float, - initial_infection: int | str | datetime, - recovery_rate: float | None = None, - incubation_rate: float | None = None, - rng_seed: int | None = None, -) -> AlgorithmResult: +def temporal_SEIR(graph: GraphView, seeds: int | float | list[InputNode], infection_prob: float, initial_infection: int | str | datetime, recovery_rate: float | None = None, incubation_rate: float | None = None, rng_seed: int | None = None) -> AlgorithmResult: """ Simulate an SEIR dynamic on the network @@ -683,7 +631,7 @@ def temporal_SEIR( Returns: AlgorithmResult: Returns an `Infected` object for each infected node with attributes - + `infected`: the time stamp of the infection event `active`: the time stamp at which the node actively starts spreading the infection (i.e., the end of the incubation period) @@ -692,9 +640,7 @@ def temporal_SEIR( """ -def temporal_bipartite_graph_projection( - g: GraphView, delta: int, pivot_type -) -> GraphView: +def temporal_bipartite_graph_projection(g: GraphView, delta: int, pivot_type) -> GraphView: """ Projects a temporal bipartite graph into an undirected temporal graph over the pivot node type. Let G be a bipartite graph with node types A and B. Given delta > 0, the projection graph G' pivoting over type B nodes, will make a connection between nodes n1 and n2 (of type A) at time (t1 + t2)/2 if they respectively have an edge at time t1, t2 with the same node of type B in G, and |t2-t1| < delta. @@ -708,13 +654,7 @@ def temporal_bipartite_graph_projection( GraphView: Projected (unipartite) temporal graph. """ -def temporally_reachable_nodes( - g: GraphView, - max_hops: int, - start_time: int, - seed_nodes: list[InputNode], - stop_nodes: Optional[list[InputNode]] = None, -): +def temporally_reachable_nodes(g: GraphView, max_hops: int, start_time: int, seed_nodes: list[InputNode], stop_nodes: Optional[list[InputNode]] = None): """ Temporally reachable nodes -- the nodes that are reachable by a time respecting path followed out from a set of seed nodes at a starting time. diff --git a/python/python/raphtory/graph_gen/__init__.pyi b/python/python/raphtory/graph_gen/__init__.pyi index b532d6fbd..4ba279bec 100644 --- a/python/python/raphtory/graph_gen/__init__.pyi +++ b/python/python/raphtory/graph_gen/__init__.pyi @@ -7,6 +7,7 @@ # # ############################################################################### + from typing import * from raphtory import * from raphtory.algorithms import * @@ -17,9 +18,7 @@ from raphtory.typing import * from datetime import datetime from pandas import DataFrame -def ba_preferential_attachment( - g: Any, nodes_to_add: Any, edges_per_step: Any, seed: Any = None -): +def ba_preferential_attachment(g: Any, nodes_to_add: Any, edges_per_step: Any, seed: Any = None): """ Generates a graph using the preferential attachment model. diff --git a/python/python/raphtory/graph_loader/__init__.pyi b/python/python/raphtory/graph_loader/__init__.pyi index 86ec56683..ad5f0266d 100644 --- a/python/python/raphtory/graph_loader/__init__.pyi +++ b/python/python/raphtory/graph_loader/__init__.pyi @@ -7,6 +7,7 @@ # # ############################################################################### + from typing import * from raphtory import * from raphtory.algorithms import * @@ -67,7 +68,9 @@ def lotr_graph(): def lotr_graph_with_props(): """Same as `lotr_graph()` but with additional properties race and gender for some of the nodes""" -def neo4j_movie_graph(uri, username, password, database=...): ... +def neo4j_movie_graph(uri, username, password, database=...): + ... + def reddit_hyperlink_graph(timeout_seconds: Any = 600): """ Load (a subset of) Reddit hyperlinks dataset into a graph. @@ -107,5 +110,8 @@ def reddit_hyperlink_graph(timeout_seconds: Any = 600): A Graph containing the Reddit hyperlinks dataset """ -def reddit_hyperlink_graph_local(file_path): ... -def stable_coin_graph(path=None, subset=None): ... +def reddit_hyperlink_graph_local(file_path): + ... + +def stable_coin_graph(path=None, subset=None): + ... diff --git a/python/python/raphtory/graphql/__init__.pyi b/python/python/raphtory/graphql/__init__.pyi index 4852e73e9..26c16ea5b 100644 --- a/python/python/raphtory/graphql/__init__.pyi +++ b/python/python/raphtory/graphql/__init__.pyi @@ -7,6 +7,7 @@ # # ############################################################################### + from typing import * from raphtory import * from raphtory.algorithms import * @@ -17,21 +18,10 @@ from raphtory.typing import * from datetime import datetime from pandas import DataFrame -class GraphServer(object): +class GraphServer(object): """A class for defining and running a Raphtory GraphQL server""" - def __new__( - cls, - work_dir, - cache_capacity=None, - cache_tti_seconds=None, - log_level=None, - tracing=None, - otlp_agent_host=None, - otlp_agent_port=None, - otlp_tracing_service_name=None, - config_path=None, - ) -> GraphServer: + def __new__(cls, work_dir, cache_capacity=None, cache_tti_seconds=None, log_level=None, tracing=None, otlp_agent_host=None, otlp_agent_port=None, otlp_tracing_service_name=None, config_path=None) -> GraphServer: """Create and return a new object. See help(type) for accurate signature.""" def run(self, port: int = 1736, timeout_ms: int = 180000): @@ -43,14 +33,7 @@ class GraphServer(object): timeout_ms (int): Timeout for waiting for the server to start. Defaults to 180000. """ - def set_embeddings( - self, - cache: str, - embedding: Optional[Callable] = None, - graph_template: Optional[str] = None, - node_template: Optional[str] = None, - edge_template: Optional[str] = None, - ) -> GraphServer: + def set_embeddings(self, cache: str, embedding: Optional[Callable] = None, graph_template: Optional[str] = None, node_template: Optional[str] = None, edge_template: Optional[str] = None) -> GraphServer: """ Setup the server to vectorise graphs with a default template. @@ -81,9 +64,7 @@ class GraphServer(object): def turn_off_index(self): """Turn off index for all graphs""" - def with_global_search_function( - self, name: str, input: dict, function: Callable - ) -> GraphServer: + def with_global_search_function(self, name: str, input: dict, function: Callable) -> GraphServer: """ Register a function in the GraphQL schema for document search among all the graphs. @@ -101,13 +82,7 @@ class GraphServer(object): GraphServer: A new server object with the function registered """ - def with_vectorised_graphs( - self, - graph_names: list[str], - graph_template: Optional[str] = None, - node_template: Optional[str] = None, - edge_template: Optional[str] = None, - ) -> GraphServer: + def with_vectorised_graphs(self, graph_names: list[str], graph_template: Optional[str] = None, node_template: Optional[str] = None, edge_template: Optional[str] = None) -> GraphServer: """ Vectorise a subset of the graphs of the server. @@ -121,7 +96,7 @@ class GraphServer(object): GraphServer: A new server object containing the vectorised graphs. """ -class GraphqlGraphs(object): +class GraphqlGraphs(object): """ A class for accessing graphs hosted in a Raphtory GraphQL server and running global search for graph documents @@ -146,7 +121,7 @@ class GraphqlGraphs(object): def search_graph_documents_with_scores(self, query, limit, window): """Same as `search_graph_documents` but it also returns the scores alongside the documents""" -class RaphtoryClient(object): +class RaphtoryClient(object): """A client for handling GraphQL operations in the context of Raphtory.""" def __new__(cls, url) -> RaphtoryClient: @@ -269,13 +244,12 @@ class RaphtoryClient(object): The `data` field from the graphQL response after executing the mutation. """ -class RemoteEdge(object): +class RemoteEdge(object): + def __new__(cls, path, client, src, dst) -> RemoteEdge: """Create and return a new object. See help(type) for accurate signature.""" - def add_constant_properties( - self, properties: Dict[str, Prop], layer: Optional[str] = None - ): + def add_constant_properties(self, properties: Dict[str, Prop], layer: Optional[str] = None): """ Add constant properties to the edge within the remote graph. This function is used to add properties to an edge that remain constant and do not @@ -286,12 +260,7 @@ class RemoteEdge(object): layer (str, optional): The layer you want these properties to be added on to. """ - def add_updates( - self, - t: int | str | datetime, - properties: Optional[Dict[str, Prop]] = None, - layer: Optional[str] = None, - ): + def add_updates(self, t: int | str | datetime, properties: Optional[Dict[str, Prop]] = None, layer: Optional[str] = None): """ Add updates to an edge in the remote graph at a specified time. This function allows for the addition of property updates to an edge within the graph. The updates are time-stamped, meaning they are applied at the specified time. @@ -311,9 +280,7 @@ class RemoteEdge(object): layer (str, optional): The layer you want the deletion applied to. """ - def update_constant_properties( - self, properties: Dict[str, Prop], layer: Optional[str] = None - ): + def update_constant_properties(self, properties: Dict[str, Prop], layer: Optional[str] = None): """ Update constant properties of an edge in the remote graph overwriting existing values. This function is used to add properties to an edge that remains constant and does not @@ -324,13 +291,13 @@ class RemoteEdge(object): layer (str, optional): The layer you want these properties to be added on to. """ -class RemoteEdgeAddition(object): - def __new__( - cls, src, dst, layer=None, constant_properties=None, updates=None - ) -> RemoteEdgeAddition: +class RemoteEdgeAddition(object): + + def __new__(cls, src, dst, layer=None, constant_properties=None, updates=None) -> RemoteEdgeAddition: """Create and return a new object. See help(type) for accurate signature.""" -class RemoteGraph(object): +class RemoteGraph(object): + def add_constant_properties(self, properties: dict): """ Adds constant properties to the remote graph. @@ -339,14 +306,7 @@ class RemoteGraph(object): properties (dict): The constant properties of the graph. """ - def add_edge( - self, - timestamp: int | str | datetime, - src: str | int, - dst: str | int, - properties: Optional[dict] = None, - layer: Optional[str] = None, - ): + def add_edge(self, timestamp: int |str | datetime, src: str | int, dst: str | int, properties: Optional[dict] = None, layer: Optional[str] = None): """ Adds a new edge with the given source and destination nodes and properties to the remote graph. @@ -369,13 +329,7 @@ class RemoteGraph(object): updates (List[RemoteEdgeAddition]): The list of updates you want to apply to the remote graph """ - def add_node( - self, - timestamp: int | str | datetime, - id: str | int, - properties: Optional[dict] = None, - node_type: Optional[str] = None, - ): + def add_node(self, timestamp: int|str|datetime, id: str|int, properties: Optional[dict] = None, node_type: Optional[str] = None): """ Adds a new node with the given id and properties to the remote graph. @@ -396,7 +350,7 @@ class RemoteGraph(object): updates (List[RemoteNodeAddition]): The list of updates you want to apply to the remote graph """ - def add_property(self, timestamp: int | str | datetime, properties: dict): + def add_property(self, timestamp: int|str|datetime, properties: dict): """ Adds properties to the remote graph. @@ -405,13 +359,7 @@ class RemoteGraph(object): properties (dict): The temporal properties of the graph. """ - def create_node( - self, - timestamp: int | str | datetime, - id: str | int, - properties: Optional[dict] = None, - node_type: Optional[str] = None, - ): + def create_node(self, timestamp: int|str|datetime, id: str|int, properties: Optional[dict] = None, node_type: Optional[str] = None): """ Create a new node with the given id and properties to the remote graph and fail if the node already exists. @@ -424,13 +372,7 @@ class RemoteGraph(object): RemoteNode """ - def delete_edge( - self, - timestamp: int, - src: str | int, - dst: str | int, - layer: Optional[str] = None, - ): + def delete_edge(self, timestamp: int, src: str|int, dst: str|int, layer: Optional[str] = None): """ Deletes an edge in the remote graph, given the timestamp, src and dst nodes and layer (optional) @@ -444,7 +386,7 @@ class RemoteGraph(object): RemoteEdge """ - def edge(self, src: str | int, dst: str | int): + def edge(self, src: str|int, dst: str|int): """ Gets a remote edge with the specified source and destination nodes @@ -456,7 +398,7 @@ class RemoteGraph(object): RemoteEdge """ - def node(self, id: str | int): + def node(self, id: str|int): """ Gets a remote node with the specified id @@ -475,7 +417,8 @@ class RemoteGraph(object): properties (dict): The constant properties of the graph. """ -class RemoteNode(object): +class RemoteNode(object): + def __new__(cls, path, client, id) -> RemoteNode: """Create and return a new object. See help(type) for accurate signature.""" @@ -489,9 +432,7 @@ class RemoteNode(object): properties (Dict[str, Prop]): A dictionary of properties to be added to the node. """ - def add_updates( - self, t: int | str | datetime, properties: Optional[Dict[str, Prop]] = None - ): + def add_updates(self, t: int | str | datetime, properties: Optional[Dict[str, Prop]] = None): """ Add updates to a node in the remote graph at a specified time. This function allows for the addition of property updates to a node within the graph. The updates are time-stamped, meaning they are applied at the specified time. @@ -520,23 +461,30 @@ class RemoteNode(object): properties (Dict[str, Prop]): A dictionary of properties to be added to the node. """ -class RemoteNodeAddition(object): - def __new__( - cls, name, node_type=None, constant_properties=None, updates=None - ) -> RemoteNodeAddition: +class RemoteNodeAddition(object): + + def __new__(cls, name, node_type=None, constant_properties=None, updates=None) -> RemoteNodeAddition: """Create and return a new object. See help(type) for accurate signature.""" -class RemoteUpdate(object): +class RemoteUpdate(object): + def __new__(cls, time, properties=None) -> RemoteUpdate: """Create and return a new object. See help(type) for accurate signature.""" -class RunningGraphServer(object): +class RunningGraphServer(object): """A Raphtory server handler that also enables querying the server""" - def __enter__(self): ... - def __exit__(self, _exc_type, _exc_val, _exc_tb): ... - def get_client(self): ... + def __enter__(self): + ... + + def __exit__(self, _exc_type, _exc_val, _exc_tb): + ... + + def get_client(self): + ... + def stop(self): """Stop the server and wait for it to finish""" -def encode_graph(graph): ... +def encode_graph(graph): + ... diff --git a/python/python/raphtory/node_state/__init__.pyi b/python/python/raphtory/node_state/__init__.pyi index 27e6f9bc8..55cb5445f 100644 --- a/python/python/raphtory/node_state/__init__.pyi +++ b/python/python/raphtory/node_state/__init__.pyi @@ -7,6 +7,7 @@ # # ############################################################################### + from typing import * from raphtory import * from raphtory.algorithms import * @@ -17,7 +18,7 @@ from raphtory.typing import * from datetime import datetime from pandas import DataFrame -class DegreeView(object): +class DegreeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -469,7 +470,7 @@ class DegreeView(object): Optional[int] """ -class EarliestDateTimeView(object): +class EarliestDateTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -905,7 +906,7 @@ class EarliestDateTimeView(object): Optional[int] """ -class EarliestTimeView(object): +class EarliestTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -1341,7 +1342,7 @@ class EarliestTimeView(object): Optional[int] """ -class HistoryDateTimeView(object): +class HistoryDateTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -1777,7 +1778,7 @@ class HistoryDateTimeView(object): Optional[int] """ -class HistoryView(object): +class HistoryView(object): """A lazy view over node values""" def __eq__(self, value): @@ -2213,7 +2214,7 @@ class HistoryView(object): Optional[int] """ -class IdView(object): +class IdView(object): """A lazy view over node values""" def __eq__(self, value): @@ -2371,7 +2372,7 @@ class IdView(object): Iterator[GID] """ -class LatestDateTimeView(object): +class LatestDateTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -2807,7 +2808,7 @@ class LatestDateTimeView(object): Optional[int] """ -class LatestTimeView(object): +class LatestTimeView(object): """A lazy view over node values""" def __eq__(self, value): @@ -3243,7 +3244,7 @@ class LatestTimeView(object): Optional[int] """ -class NameView(object): +class NameView(object): """A lazy view over node values""" def __eq__(self, value): @@ -3401,7 +3402,8 @@ class NameView(object): Iterator[str] """ -class NodeStateGID(object): +class NodeStateGID(object): + def __eq__(self, value): """Return self==value.""" @@ -3541,7 +3543,8 @@ class NodeStateGID(object): Iterator[GID] """ -class NodeStateListDateTime(object): +class NodeStateListDateTime(object): + def __eq__(self, value): """Return self==value.""" @@ -3681,7 +3684,8 @@ class NodeStateListDateTime(object): Iterator[list[Datetime]] """ -class NodeStateListI64(object): +class NodeStateListI64(object): + def __eq__(self, value): """Return self==value.""" @@ -3821,7 +3825,8 @@ class NodeStateListI64(object): Iterator[list[int]] """ -class NodeStateOptionDateTime(object): +class NodeStateOptionDateTime(object): + def __eq__(self, value): """Return self==value.""" @@ -3961,7 +3966,8 @@ class NodeStateOptionDateTime(object): Iterator[Optional[Datetime]] """ -class NodeStateOptionListDateTime(object): +class NodeStateOptionListDateTime(object): + def __eq__(self, value): """Return self==value.""" @@ -4101,7 +4107,8 @@ class NodeStateOptionListDateTime(object): Iterator[Optional[list[Datetime]]] """ -class NodeStateOptionStr(object): +class NodeStateOptionStr(object): + def __eq__(self, value): """Return self==value.""" @@ -4241,7 +4248,8 @@ class NodeStateOptionStr(object): Iterator[Optional[str]] """ -class NodeStateString(object): +class NodeStateString(object): + def __eq__(self, value): """Return self==value.""" @@ -4381,7 +4389,8 @@ class NodeStateString(object): Iterator[str] """ -class NodeStateU64(object): +class NodeStateU64(object): + def __eq__(self, value): """Return self==value.""" @@ -4537,7 +4546,8 @@ class NodeStateU64(object): Iterator[int] """ -class NodeStateUsize(object): +class NodeStateUsize(object): + def __eq__(self, value): """Return self==value.""" @@ -4693,7 +4703,7 @@ class NodeStateUsize(object): Iterator[int] """ -class NodeTypeView(object): +class NodeTypeView(object): """A lazy view over node values""" def __eq__(self, value): diff --git a/python/python/raphtory/vectors/__init__.pyi b/python/python/raphtory/vectors/__init__.pyi index 445e6caf3..2df87ef99 100644 --- a/python/python/raphtory/vectors/__init__.pyi +++ b/python/python/raphtory/vectors/__init__.pyi @@ -7,6 +7,7 @@ # # ############################################################################### + from typing import * from raphtory import * from raphtory.algorithms import * @@ -17,7 +18,8 @@ from raphtory.typing import * from datetime import datetime from pandas import DataFrame -class Document(object): +class Document(object): + def __new__(cls, content, life=None) -> Document: """Create and return a new object. See help(type) for accurate signature.""" @@ -25,15 +27,23 @@ class Document(object): """Return repr(self).""" @property - def content(self): ... + def content(self): + ... + @property - def embedding(self): ... + def embedding(self): + ... + @property - def entity(self): ... + def entity(self): + ... + @property - def life(self): ... + def life(self): + ... + +class VectorSelection(object): -class VectorSelection(object): def add_edges(self, edges: list): """ Add all the documents associated with the `edges` to the current selection @@ -82,12 +92,7 @@ class VectorSelection(object): window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered """ - def expand_documents_by_similarity( - self, - query: str | list, - limit, - window: Optional[Tuple[int | str, int | str]] = None, - ): + def expand_documents_by_similarity(self, query: str | list, limit, window: Optional[Tuple[int | str, int | str]] = None): """ Add the top `limit` adjacent documents with higher score for `query` to the selection @@ -105,12 +110,7 @@ class VectorSelection(object): window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered """ - def expand_edges_by_similarity( - self, - query: str | list, - limit: int, - window: Optional[Tuple[int | str, int | str]] = None, - ): + def expand_edges_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None): """ Add the top `limit` adjacent edges with higher score for `query` to the selection @@ -122,12 +122,7 @@ class VectorSelection(object): window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered """ - def expand_entities_by_similarity( - self, - query: str | list, - limit, - window: Optional[Tuple[int | str, int | str]] = None, - ): + def expand_entities_by_similarity(self, query: str | list, limit, window: Optional[Tuple[int | str, int | str]] = None): """ Add the top `limit` adjacent entities with higher score for `query` to the selection @@ -145,12 +140,7 @@ class VectorSelection(object): window (Tuple[int | str, int | str], optional): the window where documents need to belong to in order to be considered """ - def expand_nodes_by_similarity( - self, - query: str | list, - limit: int, - window: Optional[Tuple[int | str, int | str]] = None, - ): + def expand_nodes_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None): """ Add the top `limit` adjacent nodes with higher score for `query` to the selection @@ -171,13 +161,9 @@ class VectorSelection(object): def nodes(self): """Return the nodes present in the current selection""" -class VectorisedGraph(object): - def documents_by_similarity( - self, - query: str | list, - limit: int, - window: Optional[Tuple[int | str, int | str]] = None, - ) -> VectorSelection: +class VectorisedGraph(object): + + def documents_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> VectorSelection: """ Search the top scoring documents according to `query` with no more than `limit` documents @@ -190,12 +176,7 @@ class VectorisedGraph(object): VectorSelection: The vector selection resulting from the search """ - def edges_by_similarity( - self, - query: str | list, - limit: int, - window: Optional[Tuple[int | str, int | str]] = None, - ) -> VectorSelection: + def edges_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> VectorSelection: """ Search the top scoring edges according to `query` with no more than `limit` edges @@ -211,12 +192,7 @@ class VectorisedGraph(object): def empty_selection(self): """Return an empty selection of documents""" - def entities_by_similarity( - self, - query: str | list, - limit: int, - window: Optional[Tuple[int | str, int | str]] = None, - ) -> VectorSelection: + def entities_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> VectorSelection: """ Search the top scoring entities according to `query` with no more than `limit` entities @@ -229,12 +205,7 @@ class VectorisedGraph(object): VectorSelection: The vector selection resulting from the search """ - def nodes_by_similarity( - self, - query: str | list, - limit: int, - window: Optional[Tuple[int | str, int | str]] = None, - ) -> VectorSelection: + def nodes_by_similarity(self, query: str | list, limit: int, window: Optional[Tuple[int | str, int | str]] = None) -> VectorSelection: """ Search the top scoring nodes according to `query` with no more than `limit` nodes diff --git a/raphtory/src/python/graph/algorithm_result.rs b/raphtory/src/python/graph/algorithm_result.rs index 9de380102..52c08740a 100644 --- a/raphtory/src/python/graph/algorithm_result.rs +++ b/raphtory/src/python/graph/algorithm_result.rs @@ -58,7 +58,7 @@ macro_rules! py_algorithm_result_base { /// Returns a Dict containing all the nodes (as keys) and their corresponding values (values) or none. /// /// Returns: - /// A dict of nodes and their values + /// dict[Node, Any]: A dict of nodes and their values fn get_all( &self, ) -> std::collections::HashMap< @@ -68,20 +68,21 @@ macro_rules! py_algorithm_result_base { self.0.get_all() } - /// Returns a a list of all values + /// Get all values + /// + /// Returns: + /// list[Any]: the values for each node as a list fn get_all_values(&self) -> std::vec::Vec<$rustValue> { self.0.get_all_values().clone() } - /// Returns a formatted string representation of the algorithm. - fn to_string(&self) -> String { - self.0.repr() - } - /// Returns the value corresponding to the provided key /// /// Arguments: - /// key: The key of type `H` for which the value is to be retrieved. + /// key (InputNode): The node for which the value is to be retrieved. + /// + /// Returns: + /// Optional[Any]: The value for the node or `None` if the value does not exist. fn get(&self, key: $crate::python::utils::PyNodeRef) -> Option<$rustValue> { self.0.get(key).cloned() } @@ -89,7 +90,7 @@ macro_rules! py_algorithm_result_base { /// Returns a dict with node names and values /// /// Returns: - /// a dict with node names and values + /// dict[str, Any]: a dict with node names and values fn get_all_with_names(&self) -> std::collections::HashMap { self.0.get_all_with_names() } @@ -97,10 +98,10 @@ macro_rules! py_algorithm_result_base { /// Sorts by node id in ascending or descending order. /// /// Arguments: - /// reverse: If `true`, sorts the result in descending order; otherwise, sorts in ascending order. Defaults to True. + /// reverse (bool): If `true`, sorts the result in descending order; otherwise, sorts in ascending order. Defaults to True. /// /// Returns: - /// A sorted list of tuples containing node names and values. + /// list[Tuple[Node, Any]]: A sorted list of tuples containing nodes and values. #[pyo3(signature = (reverse=true))] fn sort_by_node( &self, @@ -160,7 +161,7 @@ macro_rules! py_algorithm_result_partial_ord { /// reverse (bool): If `true`, sorts the result in descending order, otherwise, sorts in ascending order. Defaults to True. /// /// Returns: - /// A sorted vector of tuples containing keys of type `H` and values of type `Y`. + /// list[Tuple[Node, Any]]: A sorted vector of tuples containing Nodes and values. #[pyo3(signature = (reverse=true))] fn sort_by_value( &self, @@ -181,7 +182,7 @@ macro_rules! py_algorithm_result_partial_ord { /// ascending order. /// /// Returns: - /// The function sort_by_node_name returns a vector of tuples. Each tuple contains a Node and value + /// list[Tuple[Node, Any]]: The function sort_by_node_name returns a vector of tuples. Each tuple contains a Node and value #[pyo3(signature = (reverse=true))] fn sort_by_node_name( &self, @@ -201,7 +202,7 @@ macro_rules! py_algorithm_result_partial_ord { /// reverse (bool): If `True`, retrieves the elements in descending order, otherwise, in ascending order. Defaults to True. /// /// Returns: - /// An Option containing a vector of tuples with keys of type `H` and values of type `Y`. + /// list[Tuple[Node, Any]]: List of tuples with keys of nodes and values of type `Y`. /// If percentage is true, the returned vector contains the top `k` percentage of elements. /// If percentage is false, the returned vector contains the top `k` elements. /// Returns None if the result is empty or if `k` is 0. @@ -218,7 +219,10 @@ macro_rules! py_algorithm_result_partial_ord { self.0.top_k(k, percentage, reverse) } - /// Returns a tuple of the min result with its key + /// Find node with minimum value + /// + /// Returns: + /// Tuple[Node, Any]: The node and minimum value. fn min( &self, ) -> Option<( @@ -228,7 +232,10 @@ macro_rules! py_algorithm_result_partial_ord { self.0.min() } - /// Returns a tuple of the max result with its key + /// Find node with maximum value + /// + /// Returns: + /// Tuple[Node, Any]: The node and maximum value. fn max( &self, ) -> Option<(