diff --git a/pometry-storage-private b/pometry-storage-private index 8ac845b0db..0d993ebd36 160000 --- a/pometry-storage-private +++ b/pometry-storage-private @@ -1 +1 @@ -Subproject commit 8ac845b0dbeee01db27ef6ecfafd22ab8ad0e262 +Subproject commit 0d993ebd36ba637903c53b61b8e868ff0af63ca0 diff --git a/python/python/raphtory/__init__.py b/python/python/raphtory/__init__.py index 494f6be6e4..a043b3a280 100644 --- a/python/python/raphtory/__init__.py +++ b/python/python/raphtory/__init__.py @@ -1,6 +1,7 @@ import sys from .raphtory import * +sys.modules["raphtory.node_state"] = node_state sys.modules["raphtory.algorithms"] = algorithms sys.modules["raphtory.graph_gen"] = graph_gen sys.modules["raphtory.graph_loader"] = graph_loader diff --git a/python/python/raphtory/__init__.pyi b/python/python/raphtory/__init__.pyi index d000b539d3..0f53ef53ec 100644 --- a/python/python/raphtory/__init__.pyi +++ b/python/python/raphtory/__init__.pyi @@ -9,7 +9,9 @@ from typing import * from raphtory import * +from raphtory.algorithms import * from raphtory.vectors import * +from raphtory.node_state import * from raphtory.graphql import * from raphtory.typing import * from datetime import datetime @@ -67,12 +69,12 @@ class AlgorithmResult(object): def min(self): """Returns a tuple of the min result with its key""" - def sort_by_node(self, reverse=True): + def sort_by_node(self, reverse: Any = True): """ Sorts by node id in ascending or descending order. Arguments: - `reverse`: If `true`, sorts the result in descending order; otherwise, sorts in ascending order. + reverse: If `true`, sorts the result in descending order; otherwise, sorts in ascending order. Defaults to True. Returns: A sorted list of tuples containing node names and values. @@ -84,7 +86,7 @@ class AlgorithmResult(object): value by the node name in either ascending or descending order. Arguments: - reverse (bool): A boolean value indicating whether the sorting should be done in reverse order or not. + reverse (bool): A boolean value indicating whether the sorting should be done in reverse order or not. Defaults to True. If reverse is true, the sorting will be done in descending order, otherwise it will be done in ascending order. @@ -97,7 +99,7 @@ class AlgorithmResult(object): Sorts the `AlgorithmResult` by its values in ascending or descending order. Arguments: - reverse (bool): If `true`, sorts the result in descending order; otherwise, sorts in ascending order. + reverse (bool): If `true`, sorts the result in descending order, otherwise, sorts in ascending order. Defaults to True. Returns: A sorted vector of tuples containing keys of type `H` and values of type `Y`. @@ -120,8 +122,8 @@ class AlgorithmResult(object): Arguments: k (int): The number of elements to retrieve. - percentage (bool): If `true`, the `k` parameter is treated as a percentage of total elements. - reverse (bool): If `true`, retrieves the elements in descending order; otherwise, in ascending order. + percentage (bool): If `True`, the `k` parameter is treated as a percentage of total elements. Defaults to False. + reverse (bool): If `True`, retrieves the elements in descending order, otherwise, in ascending order. Defaults to True. Returns: An Option containing a vector of tuples with keys of type `H` and values of type `Y`. @@ -274,7 +276,7 @@ class Edge(object): start (TimeInput): The start time of the window. Returns: - A Edge object. + Edge """ def at(self, time: TimeInput): @@ -285,7 +287,7 @@ class Edge(object): time (TimeInput): The time of the window. Returns: - A Edge object. + Edge """ def before(self, end: TimeInput): @@ -296,7 +298,7 @@ class Edge(object): end (TimeInput): The end time of the window. Returns: - A Edge object. + Edge """ @property @@ -359,7 +361,7 @@ class Edge(object): Gets the latest time that this Edge is valid. Returns: - The latest time that this Edge is valid or None if the Edge is valid for all times. + Optional[int]: The latest time that this Edge is valid or None if the Edge is valid for all times. """ @property @@ -368,7 +370,7 @@ class Edge(object): Gets the latest datetime that this Edge is valid Returns: - The latest datetime that this Edge is valid or None if the Edge is valid for all times. + Optional[Datetime]: The latest datetime that this Edge is valid or None if the Edge is valid for all times. """ def exclude_layer(self, name: str) -> Edge: @@ -432,8 +434,16 @@ class Edge(object): """Explodes an edge and returns all instances it had been updated as seperate edges""" def explode_layers(self): ... - def has_layer(self, name): - """Check if Edge has the layer `"name"`""" + def has_layer(self, name: str): + """ + Check if Edge has the layer `"name"` + + Arguments: + name (str): the name of the layer to check + + Returns: + bool + """ def history(self) -> List[int]: """ @@ -499,7 +509,7 @@ class Edge(object): Create a view of the Edge including all events at the latest time. Returns: - A Edge object. + Edge """ @property @@ -520,11 +530,14 @@ class Edge(object): int: The latest time of an edge """ - def layer(self, name) -> Edge: + def layer(self, name: str) -> Edge: """ Return a view of Edge containing the layer `"name"` Errors if the layer does not exist + Arguments: + name (str): then name of the layer. + Returns: Edge: The layered view """ @@ -594,7 +607,7 @@ class Edge(object): Arguments: end (TimeInput): the new end time of the window Returns: - A Edge object. + Edge """ def shrink_start(self, start: TimeInput): @@ -605,7 +618,7 @@ class Edge(object): start (TimeInput): the new start time of the window Returns: - A Edge object. + Edge """ def shrink_window(self, start: TimeInput, end: TimeInput): @@ -628,7 +641,7 @@ class Edge(object): time (TimeInput): The time of the window. Returns: - A Edge object. + Edge """ def snapshot_latest(self): @@ -638,7 +651,7 @@ class Edge(object): This is equivalent to a no-op for `EventGraph`s and `latest()` for `PersitentGraph`s Returns: - A Edge object. + Edge """ @property @@ -651,7 +664,7 @@ class Edge(object): Gets the start time for rolling and expanding windows for this Edge Returns: - The earliest time that this Edge is valid or None if the Edge is valid for all times. + Optional[int]: The earliest time that this Edge is valid or None if the Edge is valid for all times. """ @property @@ -660,7 +673,7 @@ class Edge(object): Gets the earliest datetime that this Edge is valid Returns: - The earliest datetime that this Edge is valid or None if the Edge is valid for all times. + Optional[Datetime]: The earliest datetime that this Edge is valid or None if the Edge is valid for all times. """ @property @@ -693,12 +706,17 @@ class Edge(object): end (TimeInput | None): The end time of the window (unbounded if `None`). Returns: - r A Edge object. + r Edge """ @property def window_size(self): - """Get the window size (difference between start and end) for this Edge""" + """ + Get the window size (difference between start and end) for this Edge + + Returns: + Optional[int] + """ class Edges(object): """A list of edges that can be iterated over.""" @@ -723,7 +741,7 @@ class Edges(object): start (TimeInput): The start time of the window. Returns: - A Edges object. + Edges """ def at(self, time: TimeInput): @@ -734,7 +752,7 @@ class Edges(object): time (TimeInput): The time of the window. Returns: - A Edges object. + Edges """ def before(self, end: TimeInput): @@ -745,7 +763,7 @@ class Edges(object): end (TimeInput): The end time of the window. Returns: - A Edges object. + Edges """ def collect(self) -> list[Edge]: @@ -819,7 +837,7 @@ class Edges(object): Gets the latest time that this Edges is valid. Returns: - The latest time that this Edges is valid or None if the Edges is valid for all times. + Optional[int]: The latest time that this Edges is valid or None if the Edges is valid for all times. """ @property @@ -828,7 +846,7 @@ class Edges(object): Gets the latest datetime that this Edges is valid Returns: - The latest datetime that this Edges is valid or None if the Edges is valid for all times. + Optional[Datetime]: The latest datetime that this Edges is valid or None if the Edges is valid for all times. """ def exclude_layer(self, name: str) -> Edges: @@ -892,8 +910,16 @@ class Edges(object): """Explodes an edge and returns all instances it had been updated as seperate edges""" def explode_layers(self): ... - def has_layer(self, name): - """Check if Edges has the layer `"name"`""" + def has_layer(self, name: str): + """ + Check if Edges has the layer `"name"` + + Arguments: + name (str): the name of the layer to check + + Returns: + bool + """ def history(self): """ @@ -933,7 +959,7 @@ class Edges(object): Create a view of the Edges including all events at the latest time. Returns: - A Edges object. + Edges """ @property @@ -954,11 +980,14 @@ class Edges(object): Latest time of the edges. """ - def layer(self, name) -> Edges: + def layer(self, name: str) -> Edges: """ Return a view of Edges containing the layer `"name"` Errors if the layer does not exist + Arguments: + name (str): then name of the layer. + Returns: Edges: The layered view """ @@ -1023,7 +1052,7 @@ class Edges(object): Arguments: end (TimeInput): the new end time of the window Returns: - A Edges object. + Edges """ def shrink_start(self, start: TimeInput): @@ -1034,7 +1063,7 @@ class Edges(object): start (TimeInput): the new start time of the window Returns: - A Edges object. + Edges """ def shrink_window(self, start: TimeInput, end: TimeInput): @@ -1057,7 +1086,7 @@ class Edges(object): time (TimeInput): The time of the window. Returns: - A Edges object. + Edges """ def snapshot_latest(self): @@ -1067,7 +1096,7 @@ class Edges(object): This is equivalent to a no-op for `EventGraph`s and `latest()` for `PersitentGraph`s Returns: - A Edges object. + Edges """ @property @@ -1080,7 +1109,7 @@ class Edges(object): Gets the start time for rolling and expanding windows for this Edges Returns: - The earliest time that this Edges is valid or None if the Edges is valid for all times. + Optional[int]: The earliest time that this Edges is valid or None if the Edges is valid for all times. """ @property @@ -1089,7 +1118,7 @@ class Edges(object): Gets the earliest datetime that this Edges is valid Returns: - The earliest datetime that this Edges is valid or None if the Edges is valid for all times. + Optional[Datetime]: The earliest datetime that this Edges is valid or None if the Edges is valid for all times. """ @property @@ -1147,12 +1176,17 @@ class Edges(object): end (TimeInput | None): The end time of the window (unbounded if `None`). Returns: - r A Edges object. + r Edges """ @property def window_size(self): - """Get the window size (difference between start and end) for this Edges""" + """ + Get the window size (difference between start and end) for this Edges + + Returns: + Optional[int] + """ class Graph(GraphView): """ @@ -1162,7 +1196,7 @@ class Graph(GraphView): num_shards (int, optional): The number of locks to use in the storage to allow for multithreaded updates. """ - def __new__(self, num_shards: Optional[int] = None) -> Graph: + def __new__(cls, num_shards: Optional[int] = None) -> Graph: """Create and return a new object. See help(type) for accurate signature.""" def __reduce__(self): ... @@ -1743,7 +1777,7 @@ class GraphView(object): start (TimeInput): The start time of the window. Returns: - A GraphView object. + GraphView """ def at(self, time: TimeInput): @@ -1754,7 +1788,7 @@ class GraphView(object): time (TimeInput): The time of the window. Returns: - A GraphView object. + GraphView """ def before(self, end: TimeInput): @@ -1765,31 +1799,31 @@ class GraphView(object): end (TimeInput): The end time of the window. Returns: - A GraphView object. + GraphView """ - def count_edges(self): + def count_edges(self) -> int: """ Number of edges in the graph Returns: - the number of edges in the graph + int: the number of edges in the graph """ - def count_nodes(self): + def count_nodes(self) -> int: """ Number of nodes in the graph Returns: - the number of nodes in the graph + int: the number of nodes in the graph """ - def count_temporal_edges(self): + def count_temporal_edges(self) -> int: """ Number of edges in the graph Returns: - the number of temporal edges in the graph + int: the number of temporal edges in the graph """ def default_layer(self) -> GraphView: @@ -1805,7 +1839,7 @@ class GraphView(object): DateTime of earliest activity in the graph Returns: - the datetime of the earliest activity in the graph + Optional[Datetime]: the datetime of the earliest activity in the graph """ @property @@ -1814,10 +1848,10 @@ class GraphView(object): Timestamp of earliest activity in the graph Returns: - the timestamp of the earliest activity in the graph + Optional[int]: the timestamp of the earliest activity in the graph """ - def edge(self, src: str or int, dst: str or int): + def edge(self, src: str or int, dst: str or int) -> Optional[Edge]: """ Gets the edge with the specified source and destination nodes @@ -1826,7 +1860,7 @@ class GraphView(object): dst (str or int): the destination node id Returns: - the edge with the specified source and destination nodes, or None if the edge does not exist + Optional[Edge]: the edge with the specified source and destination nodes, or None if the edge does not exist """ @property @@ -1835,7 +1869,7 @@ class GraphView(object): Gets all edges in the graph Returns: - the edges in the graph + Edges: the edges in the graph """ @property @@ -1844,7 +1878,7 @@ class GraphView(object): Gets the latest time that this GraphView is valid. Returns: - The latest time that this GraphView is valid or None if the GraphView is valid for all times. + Optional[int]: The latest time that this GraphView is valid or None if the GraphView is valid for all times. """ @property @@ -1853,7 +1887,7 @@ class GraphView(object): Gets the latest datetime that this GraphView is valid Returns: - The latest datetime that this GraphView is valid or None if the GraphView is valid for all times. + Optional[Datetime]: The latest datetime that this GraphView is valid or None if the GraphView is valid for all times. """ def exclude_layer(self, name: str) -> GraphView: @@ -1880,15 +1914,15 @@ class GraphView(object): GraphView: The layered view """ - def exclude_nodes(self, nodes): + def exclude_nodes(self, nodes: list[InputNode]) -> GraphView: """ Returns a subgraph given a set of nodes that are excluded from the subgraph Arguments: - * `nodes`: set of nodes + nodes (list[InputNode]): set of nodes Returns: - GraphView - Returns the subgraph + GraphView: Returns the subgraph """ def exclude_valid_layer(self, name: str) -> GraphView: @@ -1960,25 +1994,25 @@ class GraphView(object): GraphView: The filtered view """ - def find_edges(self, properties_dict): + def find_edges(self, properties_dict) -> list[Edge]: """ Get the edges that match the properties name and value Arguments: - property_dict (dict): the properties name and value + property_dict (dict[str, Prop]): the properties name and value Returns: - the edges that match the properties name and value + list[Edge]: the edges that match the properties name and value """ - def find_nodes(self, properties_dict): + def find_nodes(self, properties_dict) -> list[Node]: """ Get the nodes that match the properties name and value Arguments: - property_dict (dict): the properties name and value + property_dict (dict[str, Prop]): the properties name and value Returns: - the nodes that match the properties name and value + list[Node]: the nodes that match the properties name and value """ - def has_edge(self, src: str or int, dst: str or int): + def has_edge(self, src: str or int, dst: str or int) -> bool: """ Returns true if the graph contains the specified edge @@ -1987,13 +2021,21 @@ class GraphView(object): dst (str or int): the destination node id Returns: - true if the graph contains the specified edge, false otherwise + bool: true if the graph contains the specified edge, false otherwise + """ + + def has_layer(self, name: str): """ + Check if GraphView has the layer `"name"` - def has_layer(self, name): - """Check if GraphView has the layer `"name"`""" + Arguments: + name (str): the name of the layer to check - def has_node(self, id: str or int): + Returns: + bool + """ + + def has_node(self, id: str or int) -> bool: """ Returns true if the graph contains the specified node @@ -2001,7 +2043,7 @@ class GraphView(object): id (str or int): the node id Returns: - true if the graph contains the specified node, false otherwise + bool: true if the graph contains the specified node, false otherwise """ def index(self): @@ -2019,7 +2061,7 @@ class GraphView(object): Create a view of the GraphView including all events at the latest time. Returns: - A GraphView object. + GraphView """ @property @@ -2028,7 +2070,7 @@ class GraphView(object): DateTime of latest activity in the graph Returns: - the datetime of the latest activity in the graph + Optional[Datetime]: the datetime of the latest activity in the graph """ @property @@ -2037,14 +2079,17 @@ class GraphView(object): Timestamp of latest activity in the graph Returns: - the timestamp of the latest activity in the graph + Optional[int]: the timestamp of the latest activity in the graph """ - def layer(self, name) -> GraphView: + def layer(self, name: str) -> GraphView: """ Return a view of GraphView containing the layer `"name"` Errors if the layer does not exist + Arguments: + name (str): then name of the layer. + Returns: GraphView: The layered view """ @@ -2061,15 +2106,15 @@ class GraphView(object): GraphView: The layered view """ - def materialize(self): + def materialize(self) -> GraphView: """ Returns a 'materialized' clone of the graph view - i.e. a new graph with a copy of the data seen within the view instead of just a mask over the original graph Returns: - GraphView - Returns a graph clone + GraphView: Returns a graph clone """ - def node(self, id: str or int): + def node(self, id: str or int) -> Optional[Node]: """ Gets the node with the specified id @@ -2077,7 +2122,7 @@ class GraphView(object): id (str or int): the node id Returns: - the node with the specified id, or None if the node does not exist + Optional[Node]: the node with the specified id, or None if the node does not exist """ @property @@ -2086,7 +2131,7 @@ class GraphView(object): Gets the nodes in the graph Returns: - the nodes in the graph + Nodes: the nodes in the graph """ @property @@ -2096,7 +2141,7 @@ class GraphView(object): Returns: - HashMap - Properties paired with their names + Properties: Properties paired with their names """ def rolling(self, window: int | str, step: int | str | None = None) -> WindowSet: @@ -2121,7 +2166,7 @@ class GraphView(object): Arguments: end (TimeInput): the new end time of the window Returns: - A GraphView object. + GraphView """ def shrink_start(self, start: TimeInput): @@ -2132,7 +2177,7 @@ class GraphView(object): start (TimeInput): the new start time of the window Returns: - A GraphView object. + GraphView """ def shrink_window(self, start: TimeInput, end: TimeInput): @@ -2155,7 +2200,7 @@ class GraphView(object): time (TimeInput): The time of the window. Returns: - A GraphView object. + GraphView """ def snapshot_latest(self): @@ -2165,7 +2210,7 @@ class GraphView(object): This is equivalent to a no-op for `EventGraph`s and `latest()` for `PersitentGraph`s Returns: - A GraphView object. + GraphView """ @property @@ -2174,7 +2219,7 @@ class GraphView(object): Gets the start time for rolling and expanding windows for this GraphView Returns: - The earliest time that this GraphView is valid or None if the GraphView is valid for all times. + Optional[int]: The earliest time that this GraphView is valid or None if the GraphView is valid for all times. """ @property @@ -2183,29 +2228,29 @@ class GraphView(object): Gets the earliest datetime that this GraphView is valid Returns: - The earliest datetime that this GraphView is valid or None if the GraphView is valid for all times. + Optional[Datetime]: The earliest datetime that this GraphView is valid or None if the GraphView is valid for all times. """ - def subgraph(self, nodes): + def subgraph(self, nodes: list[InputNode]) -> GraphView: """ Returns a subgraph given a set of nodes Arguments: - * `nodes`: set of nodes + nodes (list[InputNode]): set of nodes Returns: - GraphView - Returns the subgraph + GraphView: Returns the subgraph """ - def subgraph_node_types(self, node_types): + def subgraph_node_types(self, node_types: list[str]) -> GraphView: """ Returns a subgraph filtered by node types given a set of node types Arguments: - * `node_types`: set of node types + node_types (list[str]): set of node types Returns: - GraphView - Returns the subgraph + GraphView: Returns the subgraph """ def to_networkx( @@ -2272,7 +2317,12 @@ class GraphView(object): @property def unique_layers(self): - """Return all the layer ids in the graph""" + """ + Return all the layer ids in the graph + + Returns: + list[str] + """ def valid_layers(self, names: list[str]) -> GraphView: """ @@ -2322,12 +2372,17 @@ class GraphView(object): end (TimeInput | None): The end time of the window (unbounded if `None`). Returns: - r A GraphView object. + r GraphView """ @property def window_size(self): - """Get the window size (difference between start and end) for this GraphView""" + """ + Get the window size (difference between start and end) for this GraphView + + Returns: + Optional[int] + """ class MutableEdge(Edge): def __repr__(self): @@ -2398,7 +2453,7 @@ class MutableNode(Node): properties (PropInput): A dictionary of properties to be added to the node. Each key is a string representing the property name, and each value is of type Prop representing the property value. """ - def add_updates(self, t: TimeInput, properties: PropInput = None) -> Result: + def add_updates(self, t: TimeInput, properties: PropInput = None): """ Add updates to a node in the graph at a specified time. This function allows for the addition of property updates to a node within the graph. The updates are time-stamped, meaning they are applied at the specified time. @@ -2406,9 +2461,6 @@ class MutableNode(Node): Parameters: t (TimeInput): The timestamp at which the updates should be applied. properties (PropInput): A dictionary of properties to update. Each key is a string representing the property name, and each value is of type Prop representing the property value. If None, no properties are updated. - - Returns: - Result: A result object indicating success or failure. On failure, it contains a GraphError. """ def set_node_type(self, new_type: str): @@ -2468,7 +2520,7 @@ class Node(object): start (TimeInput): The start time of the window. Returns: - A Node object. + Node """ def at(self, time: TimeInput): @@ -2479,7 +2531,7 @@ class Node(object): time (TimeInput): The time of the window. Returns: - A Node object. + Node """ def before(self, end: TimeInput): @@ -2490,7 +2542,7 @@ class Node(object): end (TimeInput): The end time of the window. Returns: - A Node object. + Node """ def default_layer(self) -> Node: @@ -2542,7 +2594,7 @@ class Node(object): Gets the latest time that this Node is valid. Returns: - The latest time that this Node is valid or None if the Node is valid for all times. + Optional[int]: The latest time that this Node is valid or None if the Node is valid for all times. """ @property @@ -2551,7 +2603,7 @@ class Node(object): Gets the latest datetime that this Node is valid Returns: - The latest datetime that this Node is valid or None if the Node is valid for all times. + Optional[Datetime]: The latest datetime that this Node is valid or None if the Node is valid for all times. """ def exclude_layer(self, name: str) -> Node: @@ -2647,8 +2699,16 @@ class Node(object): Node: The filtered view """ - def has_layer(self, name): - """Check if Node has the layer `"name"`""" + def has_layer(self, name: str): + """ + Check if Node has the layer `"name"` + + Arguments: + name (str): the name of the layer to check + + Returns: + bool + """ def history(self) -> List[int]: """ @@ -2658,12 +2718,12 @@ class Node(object): List[int]: A list of unix timestamps of the event history of node. """ - def history_date_time(self) -> List[Datetime]: + def history_date_time(self) -> List[datetime]: """ Returns the history of a node, including node additions and changes made to node. Returns: - List[Datetime]: A list of timestamps of the event history of node. + List[datetime]: A list of timestamps of the event history of node. """ @@ -2711,7 +2771,7 @@ class Node(object): Create a view of the Node including all events at the latest time. Returns: - A Node object. + Node """ @property @@ -2735,11 +2795,14 @@ class Node(object): int: The latest time that the node exists as an integer. """ - def layer(self, name) -> Node: + def layer(self, name: str) -> Node: """ Return a view of Node containing the layer `"name"` Errors if the layer does not exist + Arguments: + name (str): then name of the layer. + Returns: Node: The layered view """ @@ -2838,7 +2901,7 @@ class Node(object): Arguments: end (TimeInput): the new end time of the window Returns: - A Node object. + Node """ def shrink_start(self, start: TimeInput): @@ -2849,7 +2912,7 @@ class Node(object): start (TimeInput): the new start time of the window Returns: - A Node object. + Node """ def shrink_window(self, start: TimeInput, end: TimeInput): @@ -2872,7 +2935,7 @@ class Node(object): time (TimeInput): The time of the window. Returns: - A Node object. + Node """ def snapshot_latest(self): @@ -2882,7 +2945,7 @@ class Node(object): This is equivalent to a no-op for `EventGraph`s and `latest()` for `PersitentGraph`s Returns: - A Node object. + Node """ @property @@ -2891,7 +2954,7 @@ class Node(object): Gets the start time for rolling and expanding windows for this Node Returns: - The earliest time that this Node is valid or None if the Node is valid for all times. + Optional[int]: The earliest time that this Node is valid or None if the Node is valid for all times. """ @property @@ -2900,7 +2963,7 @@ class Node(object): Gets the earliest datetime that this Node is valid Returns: - The earliest datetime that this Node is valid or None if the Node is valid for all times. + Optional[Datetime]: The earliest datetime that this Node is valid or None if the Node is valid for all times. """ def valid_layers(self, names: list[str]) -> Node: @@ -2924,12 +2987,17 @@ class Node(object): end (TimeInput | None): The end time of the window (unbounded if `None`). Returns: - r A Node object. + r Node """ @property def window_size(self): - """Get the window size (difference between start and end) for this Node""" + """ + Get the window size (difference between start and end) for this Node + + Returns: + Optional[int] + """ class Nodes(object): """A list of nodes that can be iterated over.""" @@ -2975,7 +3043,7 @@ class Nodes(object): start (TimeInput): The start time of the window. Returns: - A Nodes object. + Nodes """ def at(self, time: TimeInput): @@ -2986,7 +3054,7 @@ class Nodes(object): time (TimeInput): The time of the window. Returns: - A Nodes object. + Nodes """ def before(self, end: TimeInput): @@ -2997,7 +3065,7 @@ class Nodes(object): end (TimeInput): The end time of the window. Returns: - A Nodes object. + Nodes """ def collect(self) -> list[Node]: @@ -3052,7 +3120,7 @@ class Nodes(object): Gets the latest time that this Nodes is valid. Returns: - The latest time that this Nodes is valid or None if the Nodes is valid for all times. + Optional[int]: The latest time that this Nodes is valid or None if the Nodes is valid for all times. """ @property @@ -3061,7 +3129,7 @@ class Nodes(object): Gets the latest datetime that this Nodes is valid Returns: - The latest datetime that this Nodes is valid or None if the Nodes is valid for all times. + Optional[Datetime]: The latest datetime that this Nodes is valid or None if the Nodes is valid for all times. """ def exclude_layer(self, name: str) -> Nodes: @@ -3157,8 +3225,16 @@ class Nodes(object): Nodes: The filtered view """ - def has_layer(self, name): - """Check if Nodes has the layer `"name"`""" + def has_layer(self, name: str): + """ + Check if Nodes has the layer `"name"` + + Arguments: + name (str): the name of the layer to check + + Returns: + bool + """ def history(self): """ @@ -3215,7 +3291,7 @@ class Nodes(object): Create a view of the Nodes including all events at the latest time. Returns: - A Nodes object. + Nodes """ @property @@ -3231,11 +3307,14 @@ class Nodes(object): def latest_time(self): """Returns an iterator over the nodes latest time""" - def layer(self, name) -> Nodes: + def layer(self, name: str) -> Nodes: """ Return a view of Nodes containing the layer `"name"` Errors if the layer does not exist + Arguments: + name (str): then name of the layer. + Returns: Nodes: The layered view """ @@ -3329,7 +3408,7 @@ class Nodes(object): Arguments: end (TimeInput): the new end time of the window Returns: - A Nodes object. + Nodes """ def shrink_start(self, start: TimeInput): @@ -3340,7 +3419,7 @@ class Nodes(object): start (TimeInput): the new start time of the window Returns: - A Nodes object. + Nodes """ def shrink_window(self, start: TimeInput, end: TimeInput): @@ -3363,7 +3442,7 @@ class Nodes(object): time (TimeInput): The time of the window. Returns: - A Nodes object. + Nodes """ def snapshot_latest(self): @@ -3373,7 +3452,7 @@ class Nodes(object): This is equivalent to a no-op for `EventGraph`s and `latest()` for `PersitentGraph`s Returns: - A Nodes object. + Nodes """ @property @@ -3382,7 +3461,7 @@ class Nodes(object): Gets the start time for rolling and expanding windows for this Nodes Returns: - The earliest time that this Nodes is valid or None if the Nodes is valid for all times. + Optional[int]: The earliest time that this Nodes is valid or None if the Nodes is valid for all times. """ @property @@ -3391,7 +3470,7 @@ class Nodes(object): Gets the earliest datetime that this Nodes is valid Returns: - The earliest datetime that this Nodes is valid or None if the Nodes is valid for all times. + Optional[Datetime]: The earliest datetime that this Nodes is valid or None if the Nodes is valid for all times. """ def to_df( @@ -3435,17 +3514,22 @@ class Nodes(object): end (TimeInput | None): The end time of the window (unbounded if `None`). Returns: - r A Nodes object. + r Nodes """ @property def window_size(self): - """Get the window size (difference between start and end) for this Nodes""" + """ + Get the window size (difference between start and end) for this Nodes + + Returns: + Optional[int] + """ class PersistentGraph(GraphView): """A temporal graph that allows edges and nodes to be deleted.""" - def __new__(self) -> PersistentGraph: + def __new__(cls) -> PersistentGraph: """Create and return a new object. See help(type) for accurate signature.""" def __reduce__(self): ... @@ -4048,7 +4132,7 @@ class Prop(object): def __ne__(self, value): """Return self!=value.""" - def __new__(self, name) -> Prop: + def __new__(cls, name) -> Prop: """Create and return a new object. See help(type) for accurate signature.""" def any(self, values): @@ -4142,7 +4226,7 @@ class PyGraphEncoder(object): """Call self as a function.""" def __getstate__(self): ... - def __new__(self) -> PyGraphEncoder: + def __new__(cls) -> PyGraphEncoder: """Create and return a new object. See help(type) for accurate signature.""" def __setstate__(self): ... diff --git a/python/python/raphtory/algorithms/__init__.pyi b/python/python/raphtory/algorithms/__init__.pyi index 9ec8c261da..c98f4dbbbc 100644 --- a/python/python/raphtory/algorithms/__init__.pyi +++ b/python/python/raphtory/algorithms/__init__.pyi @@ -9,7 +9,9 @@ from typing import * from raphtory import * +from raphtory.algorithms import * from raphtory.vectors import * +from raphtory.node_state import * from raphtory.graphql import * from raphtory.typing import * from datetime import datetime @@ -651,7 +653,7 @@ def strongly_connected_components(g: GraphView): def temporal_SEIR( graph: GraphView, - seeds: int | float | list[Node], + seeds: int | float | list[InputNode], infection_prob: float, initial_infection: int | str | datetime, recovery_rate: float | None = None, @@ -665,8 +667,8 @@ def temporal_SEIR( Arguments: graph (GraphView): the graph view - seeds (int | float | list[Node]): the seeding strategy to use for the initial infection (if `int`, choose fixed number - of nodes at random, if `float` infect each node with this probability, if `[Node]` + seeds (int | float | list[InputNode]): the seeding strategy to use for the initial infection (if `int`, choose fixed number + of nodes at random, if `float` infect each node with this probability, if `list` initially infect the specified nodes infection_prob (float): the probability for a contact between infected and susceptible nodes to lead to a transmission diff --git a/python/python/raphtory/graph_gen/__init__.pyi b/python/python/raphtory/graph_gen/__init__.pyi index 43456b0213..b532d6fbd7 100644 --- a/python/python/raphtory/graph_gen/__init__.pyi +++ b/python/python/raphtory/graph_gen/__init__.pyi @@ -9,7 +9,9 @@ from typing import * from raphtory import * +from raphtory.algorithms import * from raphtory.vectors import * +from raphtory.node_state import * from raphtory.graphql import * from raphtory.typing import * from datetime import datetime diff --git a/python/python/raphtory/graph_loader/__init__.pyi b/python/python/raphtory/graph_loader/__init__.pyi index 99a7ba12d1..f16cb055d3 100644 --- a/python/python/raphtory/graph_loader/__init__.pyi +++ b/python/python/raphtory/graph_loader/__init__.pyi @@ -9,7 +9,9 @@ from typing import * from raphtory import * +from raphtory.algorithms import * from raphtory.vectors import * +from raphtory.node_state import * from raphtory.graphql import * from raphtory.typing import * from datetime import datetime diff --git a/python/python/raphtory/graphql/__init__.pyi b/python/python/raphtory/graphql/__init__.pyi index c66635a92c..4852e73e93 100644 --- a/python/python/raphtory/graphql/__init__.pyi +++ b/python/python/raphtory/graphql/__init__.pyi @@ -9,7 +9,9 @@ from typing import * from raphtory import * +from raphtory.algorithms import * from raphtory.vectors import * +from raphtory.node_state import * from raphtory.graphql import * from raphtory.typing import * from datetime import datetime @@ -19,7 +21,7 @@ class GraphServer(object): """A class for defining and running a Raphtory GraphQL server""" def __new__( - self, + cls, work_dir, cache_capacity=None, cache_tti_seconds=None, @@ -147,7 +149,7 @@ class GraphqlGraphs(object): class RaphtoryClient(object): """A client for handling GraphQL operations in the context of Raphtory.""" - def __new__(self, url) -> RaphtoryClient: + def __new__(cls, url) -> RaphtoryClient: """Create and return a new object. See help(type) for accurate signature.""" def copy_graph(self, path, new_path): @@ -268,7 +270,7 @@ class RaphtoryClient(object): """ class RemoteEdge(object): - def __new__(self, path, client, src, dst) -> RemoteEdge: + def __new__(cls, path, client, src, dst) -> RemoteEdge: """Create and return a new object. See help(type) for accurate signature.""" def add_constant_properties( @@ -324,7 +326,7 @@ class RemoteEdge(object): class RemoteEdgeAddition(object): def __new__( - self, src, dst, layer=None, constant_properties=None, updates=None + cls, src, dst, layer=None, constant_properties=None, updates=None ) -> RemoteEdgeAddition: """Create and return a new object. See help(type) for accurate signature.""" @@ -474,7 +476,7 @@ class RemoteGraph(object): """ class RemoteNode(object): - def __new__(self, path, client, id) -> RemoteNode: + def __new__(cls, path, client, id) -> RemoteNode: """Create and return a new object. See help(type) for accurate signature.""" def add_constant_properties(self, properties: Dict[str, Prop]): @@ -520,12 +522,12 @@ class RemoteNode(object): class RemoteNodeAddition(object): def __new__( - self, name, node_type=None, constant_properties=None, updates=None + cls, name, node_type=None, constant_properties=None, updates=None ) -> RemoteNodeAddition: """Create and return a new object. See help(type) for accurate signature.""" class RemoteUpdate(object): - def __new__(self, time, properties=None) -> RemoteUpdate: + def __new__(cls, time, properties=None) -> RemoteUpdate: """Create and return a new object. See help(type) for accurate signature.""" class RunningGraphServer(object): diff --git a/python/python/raphtory/node_state/__init__.pyi b/python/python/raphtory/node_state/__init__.pyi new file mode 100644 index 0000000000..27e6f9bc80 --- /dev/null +++ b/python/python/raphtory/node_state/__init__.pyi @@ -0,0 +1,4852 @@ +############################################################################### +# # +# AUTOGENERATED TYPE STUB FILE # +# # +# This file was automatically generated. Do not modify it directly. # +# Any changes made here may be lost when the file is regenerated. # +# # +############################################################################### + +from typing import * +from raphtory import * +from raphtory.algorithms import * +from raphtory.vectors import * +from raphtory.node_state import * +from raphtory.graphql import * +from raphtory.typing import * +from datetime import datetime +from pandas import DataFrame + +class DegreeView(object): + """A lazy view over node values""" + + def __eq__(self, value): + """Return self==value.""" + + def __ge__(self, value): + """Return self>=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self DegreeView: + """ + Return a view of DegreeView containing only the default edge layer + Returns: + DegreeView: The layered view + """ + + @property + def end(self): + """ + Gets the latest time that this DegreeView is valid. + + Returns: + Optional[int]: The latest time that this DegreeView is valid or None if the DegreeView is valid for all times. + """ + + @property + def end_date_time(self): + """ + Gets the latest datetime that this DegreeView is valid + + Returns: + Optional[Datetime]: The latest datetime that this DegreeView is valid or None if the DegreeView is valid for all times. + """ + + def exclude_layer(self, name: str) -> DegreeView: + """ + Return a view of DegreeView containing all layers except the excluded `name` + Errors if any of the layers do not exist. + + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + DegreeView: The layered view + """ + + def exclude_layers(self, names: list[str]) -> DegreeView: + """ + Return a view of DegreeView containing all layers except the excluded `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + DegreeView: The layered view + """ + + def exclude_valid_layer(self, name: str) -> DegreeView: + """ + Return a view of DegreeView containing all layers except the excluded `name` + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + DegreeView: The layered view + """ + + def exclude_valid_layers(self, names: list[str]) -> DegreeView: + """ + Return a view of DegreeView containing all layers except the excluded `names` + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + DegreeView: The layered view + """ + + def expanding(self, step: int | str) -> WindowSet: + """ + Creates a `WindowSet` with the given `step` size using an expanding window. + + An expanding window is a window that grows by `step` size at each iteration. + + Arguments: + step (int | str): The step size of the window. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def has_layer(self, name: str): + """ + Check if DegreeView has the layer `"name"` + + Arguments: + name (str): the name of the layer to check + + Returns: + bool + """ + + def items(self): + """ + Returns: + Iterator[Tuple[Node, int]] + """ + + def latest(self): + """ + Create a view of the DegreeView including all events at the latest time. + + Returns: + DegreeView + """ + + def layer(self, name: str) -> DegreeView: + """ + Return a view of DegreeView containing the layer `"name"` + Errors if the layer does not exist + + Arguments: + name (str): then name of the layer. + + Returns: + DegreeView: The layered view + """ + + def layers(self, names: list[str]) -> DegreeView: + """ + Return a view of DegreeView containing all layers `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + DegreeView: The layered view + """ + + def max(self): + """ + Return the maximum value + + Returns: + Optional[int] + """ + + def max_item(self): + """ + Return largest value and corresponding node + + Returns: + Optional[Tuple[Node, int]] + """ + + def mean(self): + """ + mean of values over all nodes + + Returns: + float + """ + + def median(self): + """ + Return the median value + + Returns: + Optional[int] + """ + + def median_item(self): + """ + Return medain value and corresponding node + + Returns: + Optional[Tuple[Node, int]] + """ + + def min(self): + """ + Return the minimum value + + Returns: + Optional[int] + """ + + def min_item(self): + """ + Return smallest value and corresponding node + + Returns: + Optional[Tuple[Node, int]] + """ + + def nodes(self): + """ + Iterate over nodes + + Returns: + Iterator[Node] + """ + + def rolling(self, window: int | str, step: int | str | None = None) -> WindowSet: + """ + Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. + + A rolling window is a window that moves forward by `step` size at each iteration. + + Arguments: + window (int | str): The size of the window. + step (int | str | None): The step size of the window. + `step` defaults to `window`. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def shrink_end(self, end: TimeInput): + """ + Set the end of the window to the smaller of `end` and `self.end()` + + Arguments: + end (TimeInput): the new end time of the window + Returns: + DegreeView + """ + + def shrink_start(self, start: TimeInput): + """ + Set the start of the window to the larger of `start` and `self.start()` + + Arguments: + start (TimeInput): the new start time of the window + + Returns: + DegreeView + """ + + def shrink_window(self, start: TimeInput, end: TimeInput): + """ + Shrink both the start and end of the window (same as calling `shrink_start` followed by `shrink_end` but more efficient) + + Arguments: + start (TimeInput): the new start time for the window + end (TimeInput): the new end time for the window + + """ + + def snapshot_at(self, time: TimeInput): + """ + Create a view of the DegreeView including all events that have not been explicitly deleted at `time`. + + This is equivalent to `before(time + 1)` for `EventGraph`s and `at(time)` for `PersitentGraph`s + + Arguments: + time (TimeInput): The time of the window. + + Returns: + DegreeView + """ + + def snapshot_latest(self): + """ + Create a view of the DegreeView including all events that have not been explicitly deleted at the latest time. + + This is equivalent to a no-op for `EventGraph`s and `latest()` for `PersitentGraph`s + + Returns: + DegreeView + """ + + def sorted(self, reverse: bool = False): + """ + Sort by value + + Arguments: + reverse (bool): If `True`, sort in descending order, otherwise ascending. Defaults to False. + + Returns: + NodeStateUsize + """ + + def sorted_by_id(self): + """ + Sort results by node id + + Returns: + NodeStateUsize + """ + + @property + def start(self): + """ + Gets the start time for rolling and expanding windows for this DegreeView + + Returns: + Optional[int]: The earliest time that this DegreeView is valid or None if the DegreeView is valid for all times. + """ + + @property + def start_date_time(self): + """ + Gets the earliest datetime that this DegreeView is valid + + Returns: + Optional[Datetime]: The earliest datetime that this DegreeView is valid or None if the DegreeView is valid for all times. + """ + + def sum(self): + """ + sum of values over all nodes + + Returns: + int + """ + + def top_k(self, k: int): + """ + Compute the k largest values + + Arguments: + k (int): The number of values to return + + Returns: + NodeStateUsize + """ + + def valid_layers(self, names: list[str]) -> DegreeView: + """ + Return a view of DegreeView containing all layers `names` + Any layers that do not exist are ignored + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + DegreeView: The layered view + """ + + def values(self): + """ + Returns: + Iterator[int] + """ + + def window(self, start: TimeInput | None, end: TimeInput | None): + """ + Create a view of the DegreeView including all events between `start` (inclusive) and `end` (exclusive) + + Arguments: + start (TimeInput | None): The start time of the window (unbounded if `None`). + end (TimeInput | None): The end time of the window (unbounded if `None`). + + Returns: + r DegreeView + """ + + @property + def window_size(self): + """ + Get the window size (difference between start and end) for this DegreeView + + Returns: + Optional[int] + """ + +class EarliestDateTimeView(object): + """A lazy view over node values""" + + def __eq__(self, value): + """Return self==value.""" + + def __ge__(self, value): + """Return self>=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self EarliestDateTimeView: + """ + Return a view of EarliestDateTimeView containing only the default edge layer + Returns: + EarliestDateTimeView: The layered view + """ + + @property + def end(self): + """ + Gets the latest time that this EarliestDateTimeView is valid. + + Returns: + Optional[int]: The latest time that this EarliestDateTimeView is valid or None if the EarliestDateTimeView is valid for all times. + """ + + @property + def end_date_time(self): + """ + Gets the latest datetime that this EarliestDateTimeView is valid + + Returns: + Optional[Datetime]: The latest datetime that this EarliestDateTimeView is valid or None if the EarliestDateTimeView is valid for all times. + """ + + def exclude_layer(self, name: str) -> EarliestDateTimeView: + """ + Return a view of EarliestDateTimeView containing all layers except the excluded `name` + Errors if any of the layers do not exist. + + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + EarliestDateTimeView: The layered view + """ + + def exclude_layers(self, names: list[str]) -> EarliestDateTimeView: + """ + Return a view of EarliestDateTimeView containing all layers except the excluded `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + EarliestDateTimeView: The layered view + """ + + def exclude_valid_layer(self, name: str) -> EarliestDateTimeView: + """ + Return a view of EarliestDateTimeView containing all layers except the excluded `name` + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + EarliestDateTimeView: The layered view + """ + + def exclude_valid_layers(self, names: list[str]) -> EarliestDateTimeView: + """ + Return a view of EarliestDateTimeView containing all layers except the excluded `names` + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + EarliestDateTimeView: The layered view + """ + + def expanding(self, step: int | str) -> WindowSet: + """ + Creates a `WindowSet` with the given `step` size using an expanding window. + + An expanding window is a window that grows by `step` size at each iteration. + + Arguments: + step (int | str): The step size of the window. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def has_layer(self, name: str): + """ + Check if EarliestDateTimeView has the layer `"name"` + + Arguments: + name (str): the name of the layer to check + + Returns: + bool + """ + + def items(self): + """ + Returns: + Iterator[Tuple[Node, Optional[Datetime]]] + """ + + def latest(self): + """ + Create a view of the EarliestDateTimeView including all events at the latest time. + + Returns: + EarliestDateTimeView + """ + + def layer(self, name: str) -> EarliestDateTimeView: + """ + Return a view of EarliestDateTimeView containing the layer `"name"` + Errors if the layer does not exist + + Arguments: + name (str): then name of the layer. + + Returns: + EarliestDateTimeView: The layered view + """ + + def layers(self, names: list[str]) -> EarliestDateTimeView: + """ + Return a view of EarliestDateTimeView containing all layers `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + EarliestDateTimeView: The layered view + """ + + def max(self): + """ + Return the maximum value + + Returns: + Optional[Optional[Datetime]] + """ + + def max_item(self): + """ + Return largest value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[Datetime]]] + """ + + def median(self): + """ + Return the median value + + Returns: + Optional[Optional[Datetime]] + """ + + def median_item(self): + """ + Return medain value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[Datetime]]] + """ + + def min(self): + """ + Return the minimum value + + Returns: + Optional[Optional[Datetime]] + """ + + def min_item(self): + """ + Return smallest value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[Datetime]]] + """ + + def nodes(self): + """ + Iterate over nodes + + Returns: + Iterator[Node] + """ + + def rolling(self, window: int | str, step: int | str | None = None) -> WindowSet: + """ + Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. + + A rolling window is a window that moves forward by `step` size at each iteration. + + Arguments: + window (int | str): The size of the window. + step (int | str | None): The step size of the window. + `step` defaults to `window`. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def shrink_end(self, end: TimeInput): + """ + Set the end of the window to the smaller of `end` and `self.end()` + + Arguments: + end (TimeInput): the new end time of the window + Returns: + EarliestDateTimeView + """ + + def shrink_start(self, start: TimeInput): + """ + Set the start of the window to the larger of `start` and `self.start()` + + Arguments: + start (TimeInput): the new start time of the window + + Returns: + EarliestDateTimeView + """ + + def shrink_window(self, start: TimeInput, end: TimeInput): + """ + Shrink both the start and end of the window (same as calling `shrink_start` followed by `shrink_end` but more efficient) + + Arguments: + start (TimeInput): the new start time for the window + end (TimeInput): the new end time for the window + + """ + + def snapshot_at(self, time: TimeInput): + """ + Create a view of the EarliestDateTimeView including all events that have not been explicitly deleted at `time`. + + This is equivalent to `before(time + 1)` for `EventGraph`s and `at(time)` for `PersitentGraph`s + + Arguments: + time (TimeInput): The time of the window. + + Returns: + EarliestDateTimeView + """ + + def snapshot_latest(self): + """ + Create a view of the EarliestDateTimeView including all events that have not been explicitly deleted at the latest time. + + This is equivalent to a no-op for `EventGraph`s and `latest()` for `PersitentGraph`s + + Returns: + EarliestDateTimeView + """ + + def sorted(self, reverse: bool = False): + """ + Sort by value + + Arguments: + reverse (bool): If `True`, sort in descending order, otherwise ascending. Defaults to False. + + Returns: + NodeStateOptionDateTime + """ + + def sorted_by_id(self): + """ + Sort results by node id + + Returns: + NodeStateOptionDateTime + """ + + @property + def start(self): + """ + Gets the start time for rolling and expanding windows for this EarliestDateTimeView + + Returns: + Optional[int]: The earliest time that this EarliestDateTimeView is valid or None if the EarliestDateTimeView is valid for all times. + """ + + @property + def start_date_time(self): + """ + Gets the earliest datetime that this EarliestDateTimeView is valid + + Returns: + Optional[Datetime]: The earliest datetime that this EarliestDateTimeView is valid or None if the EarliestDateTimeView is valid for all times. + """ + + def top_k(self, k: int): + """ + Compute the k largest values + + Arguments: + k (int): The number of values to return + + Returns: + NodeStateOptionDateTime + """ + + def valid_layers(self, names: list[str]) -> EarliestDateTimeView: + """ + Return a view of EarliestDateTimeView containing all layers `names` + Any layers that do not exist are ignored + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + EarliestDateTimeView: The layered view + """ + + def values(self): + """ + Returns: + Iterator[Optional[Datetime]] + """ + + def window(self, start: TimeInput | None, end: TimeInput | None): + """ + Create a view of the EarliestDateTimeView including all events between `start` (inclusive) and `end` (exclusive) + + Arguments: + start (TimeInput | None): The start time of the window (unbounded if `None`). + end (TimeInput | None): The end time of the window (unbounded if `None`). + + Returns: + r EarliestDateTimeView + """ + + @property + def window_size(self): + """ + Get the window size (difference between start and end) for this EarliestDateTimeView + + Returns: + Optional[int] + """ + +class EarliestTimeView(object): + """A lazy view over node values""" + + def __eq__(self, value): + """Return self==value.""" + + def __ge__(self, value): + """Return self>=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self EarliestTimeView: + """ + Return a view of EarliestTimeView containing only the default edge layer + Returns: + EarliestTimeView: The layered view + """ + + @property + def end(self): + """ + Gets the latest time that this EarliestTimeView is valid. + + Returns: + Optional[int]: The latest time that this EarliestTimeView is valid or None if the EarliestTimeView is valid for all times. + """ + + @property + def end_date_time(self): + """ + Gets the latest datetime that this EarliestTimeView is valid + + Returns: + Optional[Datetime]: The latest datetime that this EarliestTimeView is valid or None if the EarliestTimeView is valid for all times. + """ + + def exclude_layer(self, name: str) -> EarliestTimeView: + """ + Return a view of EarliestTimeView containing all layers except the excluded `name` + Errors if any of the layers do not exist. + + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + EarliestTimeView: The layered view + """ + + def exclude_layers(self, names: list[str]) -> EarliestTimeView: + """ + Return a view of EarliestTimeView containing all layers except the excluded `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + EarliestTimeView: The layered view + """ + + def exclude_valid_layer(self, name: str) -> EarliestTimeView: + """ + Return a view of EarliestTimeView containing all layers except the excluded `name` + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + EarliestTimeView: The layered view + """ + + def exclude_valid_layers(self, names: list[str]) -> EarliestTimeView: + """ + Return a view of EarliestTimeView containing all layers except the excluded `names` + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + EarliestTimeView: The layered view + """ + + def expanding(self, step: int | str) -> WindowSet: + """ + Creates a `WindowSet` with the given `step` size using an expanding window. + + An expanding window is a window that grows by `step` size at each iteration. + + Arguments: + step (int | str): The step size of the window. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def has_layer(self, name: str): + """ + Check if EarliestTimeView has the layer `"name"` + + Arguments: + name (str): the name of the layer to check + + Returns: + bool + """ + + def items(self): + """ + Returns: + Iterator[Tuple[Node, Optional[int]]] + """ + + def latest(self): + """ + Create a view of the EarliestTimeView including all events at the latest time. + + Returns: + EarliestTimeView + """ + + def layer(self, name: str) -> EarliestTimeView: + """ + Return a view of EarliestTimeView containing the layer `"name"` + Errors if the layer does not exist + + Arguments: + name (str): then name of the layer. + + Returns: + EarliestTimeView: The layered view + """ + + def layers(self, names: list[str]) -> EarliestTimeView: + """ + Return a view of EarliestTimeView containing all layers `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + EarliestTimeView: The layered view + """ + + def max(self): + """ + Return the maximum value + + Returns: + Optional[Optional[int]] + """ + + def max_item(self): + """ + Return largest value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[int]]] + """ + + def median(self): + """ + Return the median value + + Returns: + Optional[Optional[int]] + """ + + def median_item(self): + """ + Return medain value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[int]]] + """ + + def min(self): + """ + Return the minimum value + + Returns: + Optional[Optional[int]] + """ + + def min_item(self): + """ + Return smallest value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[int]]] + """ + + def nodes(self): + """ + Iterate over nodes + + Returns: + Iterator[Node] + """ + + def rolling(self, window: int | str, step: int | str | None = None) -> WindowSet: + """ + Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. + + A rolling window is a window that moves forward by `step` size at each iteration. + + Arguments: + window (int | str): The size of the window. + step (int | str | None): The step size of the window. + `step` defaults to `window`. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def shrink_end(self, end: TimeInput): + """ + Set the end of the window to the smaller of `end` and `self.end()` + + Arguments: + end (TimeInput): the new end time of the window + Returns: + EarliestTimeView + """ + + def shrink_start(self, start: TimeInput): + """ + Set the start of the window to the larger of `start` and `self.start()` + + Arguments: + start (TimeInput): the new start time of the window + + Returns: + EarliestTimeView + """ + + def shrink_window(self, start: TimeInput, end: TimeInput): + """ + Shrink both the start and end of the window (same as calling `shrink_start` followed by `shrink_end` but more efficient) + + Arguments: + start (TimeInput): the new start time for the window + end (TimeInput): the new end time for the window + + """ + + def snapshot_at(self, time: TimeInput): + """ + Create a view of the EarliestTimeView including all events that have not been explicitly deleted at `time`. + + This is equivalent to `before(time + 1)` for `EventGraph`s and `at(time)` for `PersitentGraph`s + + Arguments: + time (TimeInput): The time of the window. + + Returns: + EarliestTimeView + """ + + def snapshot_latest(self): + """ + Create a view of the EarliestTimeView including all events that have not been explicitly deleted at the latest time. + + This is equivalent to a no-op for `EventGraph`s and `latest()` for `PersitentGraph`s + + Returns: + EarliestTimeView + """ + + def sorted(self, reverse: bool = False): + """ + Sort by value + + Arguments: + reverse (bool): If `True`, sort in descending order, otherwise ascending. Defaults to False. + + Returns: + NodeStateOptionI64 + """ + + def sorted_by_id(self): + """ + Sort results by node id + + Returns: + NodeStateOptionI64 + """ + + @property + def start(self): + """ + Gets the start time for rolling and expanding windows for this EarliestTimeView + + Returns: + Optional[int]: The earliest time that this EarliestTimeView is valid or None if the EarliestTimeView is valid for all times. + """ + + @property + def start_date_time(self): + """ + Gets the earliest datetime that this EarliestTimeView is valid + + Returns: + Optional[Datetime]: The earliest datetime that this EarliestTimeView is valid or None if the EarliestTimeView is valid for all times. + """ + + def top_k(self, k: int): + """ + Compute the k largest values + + Arguments: + k (int): The number of values to return + + Returns: + NodeStateOptionI64 + """ + + def valid_layers(self, names: list[str]) -> EarliestTimeView: + """ + Return a view of EarliestTimeView containing all layers `names` + Any layers that do not exist are ignored + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + EarliestTimeView: The layered view + """ + + def values(self): + """ + Returns: + Iterator[Optional[int]] + """ + + def window(self, start: TimeInput | None, end: TimeInput | None): + """ + Create a view of the EarliestTimeView including all events between `start` (inclusive) and `end` (exclusive) + + Arguments: + start (TimeInput | None): The start time of the window (unbounded if `None`). + end (TimeInput | None): The end time of the window (unbounded if `None`). + + Returns: + r EarliestTimeView + """ + + @property + def window_size(self): + """ + Get the window size (difference between start and end) for this EarliestTimeView + + Returns: + Optional[int] + """ + +class HistoryDateTimeView(object): + """A lazy view over node values""" + + def __eq__(self, value): + """Return self==value.""" + + def __ge__(self, value): + """Return self>=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self HistoryDateTimeView: + """ + Return a view of HistoryDateTimeView containing only the default edge layer + Returns: + HistoryDateTimeView: The layered view + """ + + @property + def end(self): + """ + Gets the latest time that this HistoryDateTimeView is valid. + + Returns: + Optional[int]: The latest time that this HistoryDateTimeView is valid or None if the HistoryDateTimeView is valid for all times. + """ + + @property + def end_date_time(self): + """ + Gets the latest datetime that this HistoryDateTimeView is valid + + Returns: + Optional[Datetime]: The latest datetime that this HistoryDateTimeView is valid or None if the HistoryDateTimeView is valid for all times. + """ + + def exclude_layer(self, name: str) -> HistoryDateTimeView: + """ + Return a view of HistoryDateTimeView containing all layers except the excluded `name` + Errors if any of the layers do not exist. + + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + HistoryDateTimeView: The layered view + """ + + def exclude_layers(self, names: list[str]) -> HistoryDateTimeView: + """ + Return a view of HistoryDateTimeView containing all layers except the excluded `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + HistoryDateTimeView: The layered view + """ + + def exclude_valid_layer(self, name: str) -> HistoryDateTimeView: + """ + Return a view of HistoryDateTimeView containing all layers except the excluded `name` + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + HistoryDateTimeView: The layered view + """ + + def exclude_valid_layers(self, names: list[str]) -> HistoryDateTimeView: + """ + Return a view of HistoryDateTimeView containing all layers except the excluded `names` + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + HistoryDateTimeView: The layered view + """ + + def expanding(self, step: int | str) -> WindowSet: + """ + Creates a `WindowSet` with the given `step` size using an expanding window. + + An expanding window is a window that grows by `step` size at each iteration. + + Arguments: + step (int | str): The step size of the window. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def has_layer(self, name: str): + """ + Check if HistoryDateTimeView has the layer `"name"` + + Arguments: + name (str): the name of the layer to check + + Returns: + bool + """ + + def items(self): + """ + Returns: + Iterator[Tuple[Node, Optional[list[Datetime]]]] + """ + + def latest(self): + """ + Create a view of the HistoryDateTimeView including all events at the latest time. + + Returns: + HistoryDateTimeView + """ + + def layer(self, name: str) -> HistoryDateTimeView: + """ + Return a view of HistoryDateTimeView containing the layer `"name"` + Errors if the layer does not exist + + Arguments: + name (str): then name of the layer. + + Returns: + HistoryDateTimeView: The layered view + """ + + def layers(self, names: list[str]) -> HistoryDateTimeView: + """ + Return a view of HistoryDateTimeView containing all layers `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + HistoryDateTimeView: The layered view + """ + + def max(self): + """ + Return the maximum value + + Returns: + Optional[Optional[list[Datetime]]] + """ + + def max_item(self): + """ + Return largest value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[list[Datetime]]]] + """ + + def median(self): + """ + Return the median value + + Returns: + Optional[Optional[list[Datetime]]] + """ + + def median_item(self): + """ + Return medain value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[list[Datetime]]]] + """ + + def min(self): + """ + Return the minimum value + + Returns: + Optional[Optional[list[Datetime]]] + """ + + def min_item(self): + """ + Return smallest value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[list[Datetime]]]] + """ + + def nodes(self): + """ + Iterate over nodes + + Returns: + Iterator[Node] + """ + + def rolling(self, window: int | str, step: int | str | None = None) -> WindowSet: + """ + Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. + + A rolling window is a window that moves forward by `step` size at each iteration. + + Arguments: + window (int | str): The size of the window. + step (int | str | None): The step size of the window. + `step` defaults to `window`. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def shrink_end(self, end: TimeInput): + """ + Set the end of the window to the smaller of `end` and `self.end()` + + Arguments: + end (TimeInput): the new end time of the window + Returns: + HistoryDateTimeView + """ + + def shrink_start(self, start: TimeInput): + """ + Set the start of the window to the larger of `start` and `self.start()` + + Arguments: + start (TimeInput): the new start time of the window + + Returns: + HistoryDateTimeView + """ + + def shrink_window(self, start: TimeInput, end: TimeInput): + """ + Shrink both the start and end of the window (same as calling `shrink_start` followed by `shrink_end` but more efficient) + + Arguments: + start (TimeInput): the new start time for the window + end (TimeInput): the new end time for the window + + """ + + def snapshot_at(self, time: TimeInput): + """ + Create a view of the HistoryDateTimeView including all events that have not been explicitly deleted at `time`. + + This is equivalent to `before(time + 1)` for `EventGraph`s and `at(time)` for `PersitentGraph`s + + Arguments: + time (TimeInput): The time of the window. + + Returns: + HistoryDateTimeView + """ + + def snapshot_latest(self): + """ + Create a view of the HistoryDateTimeView including all events that have not been explicitly deleted at the latest time. + + This is equivalent to a no-op for `EventGraph`s and `latest()` for `PersitentGraph`s + + Returns: + HistoryDateTimeView + """ + + def sorted(self, reverse: bool = False): + """ + Sort by value + + Arguments: + reverse (bool): If `True`, sort in descending order, otherwise ascending. Defaults to False. + + Returns: + NodeStateOptionListDateTime + """ + + def sorted_by_id(self): + """ + Sort results by node id + + Returns: + NodeStateOptionListDateTime + """ + + @property + def start(self): + """ + Gets the start time for rolling and expanding windows for this HistoryDateTimeView + + Returns: + Optional[int]: The earliest time that this HistoryDateTimeView is valid or None if the HistoryDateTimeView is valid for all times. + """ + + @property + def start_date_time(self): + """ + Gets the earliest datetime that this HistoryDateTimeView is valid + + Returns: + Optional[Datetime]: The earliest datetime that this HistoryDateTimeView is valid or None if the HistoryDateTimeView is valid for all times. + """ + + def top_k(self, k: int): + """ + Compute the k largest values + + Arguments: + k (int): The number of values to return + + Returns: + NodeStateOptionListDateTime + """ + + def valid_layers(self, names: list[str]) -> HistoryDateTimeView: + """ + Return a view of HistoryDateTimeView containing all layers `names` + Any layers that do not exist are ignored + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + HistoryDateTimeView: The layered view + """ + + def values(self): + """ + Returns: + Iterator[Optional[list[Datetime]]] + """ + + def window(self, start: TimeInput | None, end: TimeInput | None): + """ + Create a view of the HistoryDateTimeView including all events between `start` (inclusive) and `end` (exclusive) + + Arguments: + start (TimeInput | None): The start time of the window (unbounded if `None`). + end (TimeInput | None): The end time of the window (unbounded if `None`). + + Returns: + r HistoryDateTimeView + """ + + @property + def window_size(self): + """ + Get the window size (difference between start and end) for this HistoryDateTimeView + + Returns: + Optional[int] + """ + +class HistoryView(object): + """A lazy view over node values""" + + def __eq__(self, value): + """Return self==value.""" + + def __ge__(self, value): + """Return self>=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self HistoryView: + """ + Return a view of HistoryView containing only the default edge layer + Returns: + HistoryView: The layered view + """ + + @property + def end(self): + """ + Gets the latest time that this HistoryView is valid. + + Returns: + Optional[int]: The latest time that this HistoryView is valid or None if the HistoryView is valid for all times. + """ + + @property + def end_date_time(self): + """ + Gets the latest datetime that this HistoryView is valid + + Returns: + Optional[Datetime]: The latest datetime that this HistoryView is valid or None if the HistoryView is valid for all times. + """ + + def exclude_layer(self, name: str) -> HistoryView: + """ + Return a view of HistoryView containing all layers except the excluded `name` + Errors if any of the layers do not exist. + + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + HistoryView: The layered view + """ + + def exclude_layers(self, names: list[str]) -> HistoryView: + """ + Return a view of HistoryView containing all layers except the excluded `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + HistoryView: The layered view + """ + + def exclude_valid_layer(self, name: str) -> HistoryView: + """ + Return a view of HistoryView containing all layers except the excluded `name` + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + HistoryView: The layered view + """ + + def exclude_valid_layers(self, names: list[str]) -> HistoryView: + """ + Return a view of HistoryView containing all layers except the excluded `names` + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + HistoryView: The layered view + """ + + def expanding(self, step: int | str) -> WindowSet: + """ + Creates a `WindowSet` with the given `step` size using an expanding window. + + An expanding window is a window that grows by `step` size at each iteration. + + Arguments: + step (int | str): The step size of the window. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def has_layer(self, name: str): + """ + Check if HistoryView has the layer `"name"` + + Arguments: + name (str): the name of the layer to check + + Returns: + bool + """ + + def items(self): + """ + Returns: + Iterator[Tuple[Node, list[int]]] + """ + + def latest(self): + """ + Create a view of the HistoryView including all events at the latest time. + + Returns: + HistoryView + """ + + def layer(self, name: str) -> HistoryView: + """ + Return a view of HistoryView containing the layer `"name"` + Errors if the layer does not exist + + Arguments: + name (str): then name of the layer. + + Returns: + HistoryView: The layered view + """ + + def layers(self, names: list[str]) -> HistoryView: + """ + Return a view of HistoryView containing all layers `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + HistoryView: The layered view + """ + + def max(self): + """ + Return the maximum value + + Returns: + Optional[list[int]] + """ + + def max_item(self): + """ + Return largest value and corresponding node + + Returns: + Optional[Tuple[Node, list[int]]] + """ + + def median(self): + """ + Return the median value + + Returns: + Optional[list[int]] + """ + + def median_item(self): + """ + Return medain value and corresponding node + + Returns: + Optional[Tuple[Node, list[int]]] + """ + + def min(self): + """ + Return the minimum value + + Returns: + Optional[list[int]] + """ + + def min_item(self): + """ + Return smallest value and corresponding node + + Returns: + Optional[Tuple[Node, list[int]]] + """ + + def nodes(self): + """ + Iterate over nodes + + Returns: + Iterator[Node] + """ + + def rolling(self, window: int | str, step: int | str | None = None) -> WindowSet: + """ + Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. + + A rolling window is a window that moves forward by `step` size at each iteration. + + Arguments: + window (int | str): The size of the window. + step (int | str | None): The step size of the window. + `step` defaults to `window`. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def shrink_end(self, end: TimeInput): + """ + Set the end of the window to the smaller of `end` and `self.end()` + + Arguments: + end (TimeInput): the new end time of the window + Returns: + HistoryView + """ + + def shrink_start(self, start: TimeInput): + """ + Set the start of the window to the larger of `start` and `self.start()` + + Arguments: + start (TimeInput): the new start time of the window + + Returns: + HistoryView + """ + + def shrink_window(self, start: TimeInput, end: TimeInput): + """ + Shrink both the start and end of the window (same as calling `shrink_start` followed by `shrink_end` but more efficient) + + Arguments: + start (TimeInput): the new start time for the window + end (TimeInput): the new end time for the window + + """ + + def snapshot_at(self, time: TimeInput): + """ + Create a view of the HistoryView including all events that have not been explicitly deleted at `time`. + + This is equivalent to `before(time + 1)` for `EventGraph`s and `at(time)` for `PersitentGraph`s + + Arguments: + time (TimeInput): The time of the window. + + Returns: + HistoryView + """ + + def snapshot_latest(self): + """ + Create a view of the HistoryView including all events that have not been explicitly deleted at the latest time. + + This is equivalent to a no-op for `EventGraph`s and `latest()` for `PersitentGraph`s + + Returns: + HistoryView + """ + + def sorted(self, reverse: bool = False): + """ + Sort by value + + Arguments: + reverse (bool): If `True`, sort in descending order, otherwise ascending. Defaults to False. + + Returns: + NodeStateListI64 + """ + + def sorted_by_id(self): + """ + Sort results by node id + + Returns: + NodeStateListI64 + """ + + @property + def start(self): + """ + Gets the start time for rolling and expanding windows for this HistoryView + + Returns: + Optional[int]: The earliest time that this HistoryView is valid or None if the HistoryView is valid for all times. + """ + + @property + def start_date_time(self): + """ + Gets the earliest datetime that this HistoryView is valid + + Returns: + Optional[Datetime]: The earliest datetime that this HistoryView is valid or None if the HistoryView is valid for all times. + """ + + def top_k(self, k: int): + """ + Compute the k largest values + + Arguments: + k (int): The number of values to return + + Returns: + NodeStateListI64 + """ + + def valid_layers(self, names: list[str]) -> HistoryView: + """ + Return a view of HistoryView containing all layers `names` + Any layers that do not exist are ignored + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + HistoryView: The layered view + """ + + def values(self): + """ + Returns: + Iterator[list[int]] + """ + + def window(self, start: TimeInput | None, end: TimeInput | None): + """ + Create a view of the HistoryView including all events between `start` (inclusive) and `end` (exclusive) + + Arguments: + start (TimeInput | None): The start time of the window (unbounded if `None`). + end (TimeInput | None): The end time of the window (unbounded if `None`). + + Returns: + r HistoryView + """ + + @property + def window_size(self): + """ + Get the window size (difference between start and end) for this HistoryView + + Returns: + Optional[int] + """ + +class IdView(object): + """A lazy view over node values""" + + def __eq__(self, value): + """Return self==value.""" + + def __ge__(self, value): + """Return self>=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self LatestDateTimeView: + """ + Return a view of LatestDateTimeView containing only the default edge layer + Returns: + LatestDateTimeView: The layered view + """ + + @property + def end(self): + """ + Gets the latest time that this LatestDateTimeView is valid. + + Returns: + Optional[int]: The latest time that this LatestDateTimeView is valid or None if the LatestDateTimeView is valid for all times. + """ + + @property + def end_date_time(self): + """ + Gets the latest datetime that this LatestDateTimeView is valid + + Returns: + Optional[Datetime]: The latest datetime that this LatestDateTimeView is valid or None if the LatestDateTimeView is valid for all times. + """ + + def exclude_layer(self, name: str) -> LatestDateTimeView: + """ + Return a view of LatestDateTimeView containing all layers except the excluded `name` + Errors if any of the layers do not exist. + + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + LatestDateTimeView: The layered view + """ + + def exclude_layers(self, names: list[str]) -> LatestDateTimeView: + """ + Return a view of LatestDateTimeView containing all layers except the excluded `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + LatestDateTimeView: The layered view + """ + + def exclude_valid_layer(self, name: str) -> LatestDateTimeView: + """ + Return a view of LatestDateTimeView containing all layers except the excluded `name` + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + LatestDateTimeView: The layered view + """ + + def exclude_valid_layers(self, names: list[str]) -> LatestDateTimeView: + """ + Return a view of LatestDateTimeView containing all layers except the excluded `names` + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + LatestDateTimeView: The layered view + """ + + def expanding(self, step: int | str) -> WindowSet: + """ + Creates a `WindowSet` with the given `step` size using an expanding window. + + An expanding window is a window that grows by `step` size at each iteration. + + Arguments: + step (int | str): The step size of the window. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def has_layer(self, name: str): + """ + Check if LatestDateTimeView has the layer `"name"` + + Arguments: + name (str): the name of the layer to check + + Returns: + bool + """ + + def items(self): + """ + Returns: + Iterator[Tuple[Node, Optional[Datetime]]] + """ + + def latest(self): + """ + Create a view of the LatestDateTimeView including all events at the latest time. + + Returns: + LatestDateTimeView + """ + + def layer(self, name: str) -> LatestDateTimeView: + """ + Return a view of LatestDateTimeView containing the layer `"name"` + Errors if the layer does not exist + + Arguments: + name (str): then name of the layer. + + Returns: + LatestDateTimeView: The layered view + """ + + def layers(self, names: list[str]) -> LatestDateTimeView: + """ + Return a view of LatestDateTimeView containing all layers `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + LatestDateTimeView: The layered view + """ + + def max(self): + """ + Return the maximum value + + Returns: + Optional[Optional[Datetime]] + """ + + def max_item(self): + """ + Return largest value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[Datetime]]] + """ + + def median(self): + """ + Return the median value + + Returns: + Optional[Optional[Datetime]] + """ + + def median_item(self): + """ + Return medain value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[Datetime]]] + """ + + def min(self): + """ + Return the minimum value + + Returns: + Optional[Optional[Datetime]] + """ + + def min_item(self): + """ + Return smallest value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[Datetime]]] + """ + + def nodes(self): + """ + Iterate over nodes + + Returns: + Iterator[Node] + """ + + def rolling(self, window: int | str, step: int | str | None = None) -> WindowSet: + """ + Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. + + A rolling window is a window that moves forward by `step` size at each iteration. + + Arguments: + window (int | str): The size of the window. + step (int | str | None): The step size of the window. + `step` defaults to `window`. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def shrink_end(self, end: TimeInput): + """ + Set the end of the window to the smaller of `end` and `self.end()` + + Arguments: + end (TimeInput): the new end time of the window + Returns: + LatestDateTimeView + """ + + def shrink_start(self, start: TimeInput): + """ + Set the start of the window to the larger of `start` and `self.start()` + + Arguments: + start (TimeInput): the new start time of the window + + Returns: + LatestDateTimeView + """ + + def shrink_window(self, start: TimeInput, end: TimeInput): + """ + Shrink both the start and end of the window (same as calling `shrink_start` followed by `shrink_end` but more efficient) + + Arguments: + start (TimeInput): the new start time for the window + end (TimeInput): the new end time for the window + + """ + + def snapshot_at(self, time: TimeInput): + """ + Create a view of the LatestDateTimeView including all events that have not been explicitly deleted at `time`. + + This is equivalent to `before(time + 1)` for `EventGraph`s and `at(time)` for `PersitentGraph`s + + Arguments: + time (TimeInput): The time of the window. + + Returns: + LatestDateTimeView + """ + + def snapshot_latest(self): + """ + Create a view of the LatestDateTimeView including all events that have not been explicitly deleted at the latest time. + + This is equivalent to a no-op for `EventGraph`s and `latest()` for `PersitentGraph`s + + Returns: + LatestDateTimeView + """ + + def sorted(self, reverse: bool = False): + """ + Sort by value + + Arguments: + reverse (bool): If `True`, sort in descending order, otherwise ascending. Defaults to False. + + Returns: + NodeStateOptionDateTime + """ + + def sorted_by_id(self): + """ + Sort results by node id + + Returns: + NodeStateOptionDateTime + """ + + @property + def start(self): + """ + Gets the start time for rolling and expanding windows for this LatestDateTimeView + + Returns: + Optional[int]: The earliest time that this LatestDateTimeView is valid or None if the LatestDateTimeView is valid for all times. + """ + + @property + def start_date_time(self): + """ + Gets the earliest datetime that this LatestDateTimeView is valid + + Returns: + Optional[Datetime]: The earliest datetime that this LatestDateTimeView is valid or None if the LatestDateTimeView is valid for all times. + """ + + def top_k(self, k: int): + """ + Compute the k largest values + + Arguments: + k (int): The number of values to return + + Returns: + NodeStateOptionDateTime + """ + + def valid_layers(self, names: list[str]) -> LatestDateTimeView: + """ + Return a view of LatestDateTimeView containing all layers `names` + Any layers that do not exist are ignored + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + LatestDateTimeView: The layered view + """ + + def values(self): + """ + Returns: + Iterator[Optional[Datetime]] + """ + + def window(self, start: TimeInput | None, end: TimeInput | None): + """ + Create a view of the LatestDateTimeView including all events between `start` (inclusive) and `end` (exclusive) + + Arguments: + start (TimeInput | None): The start time of the window (unbounded if `None`). + end (TimeInput | None): The end time of the window (unbounded if `None`). + + Returns: + r LatestDateTimeView + """ + + @property + def window_size(self): + """ + Get the window size (difference between start and end) for this LatestDateTimeView + + Returns: + Optional[int] + """ + +class LatestTimeView(object): + """A lazy view over node values""" + + def __eq__(self, value): + """Return self==value.""" + + def __ge__(self, value): + """Return self>=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self LatestTimeView: + """ + Return a view of LatestTimeView containing only the default edge layer + Returns: + LatestTimeView: The layered view + """ + + @property + def end(self): + """ + Gets the latest time that this LatestTimeView is valid. + + Returns: + Optional[int]: The latest time that this LatestTimeView is valid or None if the LatestTimeView is valid for all times. + """ + + @property + def end_date_time(self): + """ + Gets the latest datetime that this LatestTimeView is valid + + Returns: + Optional[Datetime]: The latest datetime that this LatestTimeView is valid or None if the LatestTimeView is valid for all times. + """ + + def exclude_layer(self, name: str) -> LatestTimeView: + """ + Return a view of LatestTimeView containing all layers except the excluded `name` + Errors if any of the layers do not exist. + + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + LatestTimeView: The layered view + """ + + def exclude_layers(self, names: list[str]) -> LatestTimeView: + """ + Return a view of LatestTimeView containing all layers except the excluded `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + LatestTimeView: The layered view + """ + + def exclude_valid_layer(self, name: str) -> LatestTimeView: + """ + Return a view of LatestTimeView containing all layers except the excluded `name` + Arguments: + name (str): layer name that is excluded for the new view + + Returns: + LatestTimeView: The layered view + """ + + def exclude_valid_layers(self, names: list[str]) -> LatestTimeView: + """ + Return a view of LatestTimeView containing all layers except the excluded `names` + Arguments: + names (list[str]): list of layer names that are excluded for the new view + + Returns: + LatestTimeView: The layered view + """ + + def expanding(self, step: int | str) -> WindowSet: + """ + Creates a `WindowSet` with the given `step` size using an expanding window. + + An expanding window is a window that grows by `step` size at each iteration. + + Arguments: + step (int | str): The step size of the window. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def has_layer(self, name: str): + """ + Check if LatestTimeView has the layer `"name"` + + Arguments: + name (str): the name of the layer to check + + Returns: + bool + """ + + def items(self): + """ + Returns: + Iterator[Tuple[Node, Optional[int]]] + """ + + def latest(self): + """ + Create a view of the LatestTimeView including all events at the latest time. + + Returns: + LatestTimeView + """ + + def layer(self, name: str) -> LatestTimeView: + """ + Return a view of LatestTimeView containing the layer `"name"` + Errors if the layer does not exist + + Arguments: + name (str): then name of the layer. + + Returns: + LatestTimeView: The layered view + """ + + def layers(self, names: list[str]) -> LatestTimeView: + """ + Return a view of LatestTimeView containing all layers `names` + Errors if any of the layers do not exist. + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + LatestTimeView: The layered view + """ + + def max(self): + """ + Return the maximum value + + Returns: + Optional[Optional[int]] + """ + + def max_item(self): + """ + Return largest value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[int]]] + """ + + def median(self): + """ + Return the median value + + Returns: + Optional[Optional[int]] + """ + + def median_item(self): + """ + Return medain value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[int]]] + """ + + def min(self): + """ + Return the minimum value + + Returns: + Optional[Optional[int]] + """ + + def min_item(self): + """ + Return smallest value and corresponding node + + Returns: + Optional[Tuple[Node, Optional[int]]] + """ + + def nodes(self): + """ + Iterate over nodes + + Returns: + Iterator[Node] + """ + + def rolling(self, window: int | str, step: int | str | None = None) -> WindowSet: + """ + Creates a `WindowSet` with the given `window` size and optional `step` using a rolling window. + + A rolling window is a window that moves forward by `step` size at each iteration. + + Arguments: + window (int | str): The size of the window. + step (int | str | None): The step size of the window. + `step` defaults to `window`. + + Returns: + WindowSet: A `WindowSet` object. + """ + + def shrink_end(self, end: TimeInput): + """ + Set the end of the window to the smaller of `end` and `self.end()` + + Arguments: + end (TimeInput): the new end time of the window + Returns: + LatestTimeView + """ + + def shrink_start(self, start: TimeInput): + """ + Set the start of the window to the larger of `start` and `self.start()` + + Arguments: + start (TimeInput): the new start time of the window + + Returns: + LatestTimeView + """ + + def shrink_window(self, start: TimeInput, end: TimeInput): + """ + Shrink both the start and end of the window (same as calling `shrink_start` followed by `shrink_end` but more efficient) + + Arguments: + start (TimeInput): the new start time for the window + end (TimeInput): the new end time for the window + + """ + + def snapshot_at(self, time: TimeInput): + """ + Create a view of the LatestTimeView including all events that have not been explicitly deleted at `time`. + + This is equivalent to `before(time + 1)` for `EventGraph`s and `at(time)` for `PersitentGraph`s + + Arguments: + time (TimeInput): The time of the window. + + Returns: + LatestTimeView + """ + + def snapshot_latest(self): + """ + Create a view of the LatestTimeView including all events that have not been explicitly deleted at the latest time. + + This is equivalent to a no-op for `EventGraph`s and `latest()` for `PersitentGraph`s + + Returns: + LatestTimeView + """ + + def sorted(self, reverse: bool = False): + """ + Sort by value + + Arguments: + reverse (bool): If `True`, sort in descending order, otherwise ascending. Defaults to False. + + Returns: + NodeStateOptionI64 + """ + + def sorted_by_id(self): + """ + Sort results by node id + + Returns: + NodeStateOptionI64 + """ + + @property + def start(self): + """ + Gets the start time for rolling and expanding windows for this LatestTimeView + + Returns: + Optional[int]: The earliest time that this LatestTimeView is valid or None if the LatestTimeView is valid for all times. + """ + + @property + def start_date_time(self): + """ + Gets the earliest datetime that this LatestTimeView is valid + + Returns: + Optional[Datetime]: The earliest datetime that this LatestTimeView is valid or None if the LatestTimeView is valid for all times. + """ + + def top_k(self, k: int): + """ + Compute the k largest values + + Arguments: + k (int): The number of values to return + + Returns: + NodeStateOptionI64 + """ + + def valid_layers(self, names: list[str]) -> LatestTimeView: + """ + Return a view of LatestTimeView containing all layers `names` + Any layers that do not exist are ignored + + Arguments: + names (list[str]): list of layer names for the new view + + Returns: + LatestTimeView: The layered view + """ + + def values(self): + """ + Returns: + Iterator[Optional[int]] + """ + + def window(self, start: TimeInput | None, end: TimeInput | None): + """ + Create a view of the LatestTimeView including all events between `start` (inclusive) and `end` (exclusive) + + Arguments: + start (TimeInput | None): The start time of the window (unbounded if `None`). + end (TimeInput | None): The end time of the window (unbounded if `None`). + + Returns: + r LatestTimeView + """ + + @property + def window_size(self): + """ + Get the window size (difference between start and end) for this LatestTimeView + + Returns: + Optional[int] + """ + +class NameView(object): + """A lazy view over node values""" + + def __eq__(self, value): + """Return self==value.""" + + def __ge__(self, value): + """Return self>=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self=value.""" + + def __getitem__(self, key): + """Return self[key].""" + + def __gt__(self, value): + """Return self>value.""" + + def __iter__(self): + """Implement iter(self).""" + + def __le__(self, value): + """Return self<=value.""" + + def __len__(self): + """Return len(self).""" + + def __lt__(self, value): + """Return self Document: + def __new__(cls, content, life=None) -> Document: """Create and return a new object. See help(type) for accurate signature.""" def __repr__(self): diff --git a/python/scripts/gen-stubs.py b/python/scripts/gen-stubs.py index 336535dcb7..b8b3647672 100755 --- a/python/scripts/gen-stubs.py +++ b/python/scripts/gen-stubs.py @@ -1,10 +1,11 @@ #!/usr/bin/env python3 - +import ast import inspect import logging import textwrap import types from importlib import import_module +from logging import ERROR from pathlib import Path from types import ( BuiltinFunctionType, @@ -13,15 +14,14 @@ MethodDescriptorType, ModuleType, ) +import builtins from typing import * -from raphtory import * -from raphtory.graphql import * -from raphtory.typing import * from docstring_parser import parse, DocstringStyle, DocstringParam, ParseError -from datetime import datetime -from pandas import DataFrame logger = logging.getLogger(__name__) +fn_logger = logging.getLogger(__name__) +cls_logger = logging.getLogger(__name__) + TARGET_MODULES = ["raphtory", "builtins"] TAB = " " * 4 @@ -44,13 +44,19 @@ imports = """ from typing import * from raphtory import * +from raphtory.algorithms import * from raphtory.vectors import * +from raphtory.node_state import * from raphtory.graphql import * from raphtory.typing import * from datetime import datetime from pandas import DataFrame """ +# imports for type checking +global_ns = {} +exec(imports, global_ns) + def format_type(obj) -> str: if isinstance(obj, type): @@ -65,6 +71,18 @@ def format_type(obj) -> str: return repr(obj) +class AnnotationError(Exception): + pass + + +def validate_annotation(annotation: str): + parsed = ast.parse(f"_: {annotation}") + for node in ast.walk(parsed.body[0].annotation): + if isinstance(node, ast.Name): + if node.id not in global_ns and node.id not in builtins.__dict__: + raise AnnotationError(f"Unknown type {node.id}") + + def format_param(param: inspect.Parameter) -> str: if param.kind == param.VAR_KEYWORD: name = f"**{param.name}" @@ -107,23 +125,38 @@ def same_default(doc_default: Optional[str], param_default: Any) -> bool: def clean_parameter( - param: inspect.Parameter, type_annotations: dict[str, dict[str, Any]] + param: inspect.Parameter, + type_annotations: dict[str, dict[str, Any]], ): annotations = {} if param.default is not inspect.Parameter.empty: annotations["default"] = format_type(param.default) - if param.name in type_annotations: - annotations["annotation"] = type_annotations[param.name]["annotation"] - if "default" in type_annotations[param.name]: - default_from_docs = type_annotations[param.name]["default"] - if param.default is not param.empty and param.default is not ...: - if not same_default(default_from_docs, param.default): - fn_logger.warning( - f"mismatched default value: docs={repr(default_from_docs)}, signature={param.default}" - ) + doc_annotation = type_annotations.pop(param.name, None) + if doc_annotation is not None: + annotations["annotation"] = doc_annotation["annotation"] + default_from_docs = doc_annotation.get("default", None) + if default_from_docs is not None: + if param.default is not param.empty: + if param.default is not ...: + if not same_default(default_from_docs, param.default): + fn_logger.warning( + f"mismatched default value: docs={repr(default_from_docs)}, signature={param.default}" + ) + else: + annotations["default"] = default_from_docs else: - annotations["default"] = default_from_docs + fn_logger.error( + f"parameter {param.name} has default value {repr(default_from_docs)} in documentation but no default in signature." + ) + else: + if param.default is not param.empty and param.default is not None: + fn_logger.warning( + f"default value for parameter {param.name} with value {repr(param.default)} in signature is not documented" + ) + else: + if param.name not in {"self", "cls"}: + fn_logger.warning(f"missing parameter {param.name} in docs.") return param.replace(**annotations) @@ -142,6 +175,10 @@ def clean_signature( decorator = None new_params = [clean_parameter(p, type_annotations) for p in sig.parameters.values()] + for param_name, annotations in type_annotations.items(): + fn_logger.warning( + f"parameter {param_name} appears in documentation but does not exist." + ) sig = sig.replace(parameters=new_params) if return_type is not None: sig = sig.replace(return_annotation=return_type) @@ -153,17 +190,24 @@ def insert_self(signature: inspect.Signature) -> inspect.Signature: return signature.replace(parameters=[self_param, *signature.parameters.values()]) +def insert_cls(signature: inspect.Signature) -> inspect.Signature: + cls_param = inspect.Parameter("cls", kind=inspect.Parameter.POSITIONAL_OR_KEYWORD) + return signature.replace(parameters=[cls_param, *signature.parameters.values()]) + + def cls_signature(cls: type) -> Optional[inspect.Signature]: try: - return insert_self(inspect.signature(cls)) + return inspect.signature(cls) except ValueError: pass -def from_raphtory(obj) -> bool: +def from_raphtory(obj, name: str) -> bool: module = inspect.getmodule(obj) if module: - return any(module.__name__.startswith(target) for target in TARGET_MODULES) + return module.__name__ == name or any( + module.__name__.startswith(target) for target in TARGET_MODULES + ) return False @@ -184,20 +228,24 @@ def extract_param_annotation(param: DocstringParam) -> dict: else: type_val = param.type_name try: - eval(type_val) + validate_annotation(type_val) + if param.is_optional: + type_val = f"Optional[{type_val}]" + res["annotation"] = type_val except Exception as e: - raise ParseError(f"Invalid type name {type_val}: {e}") + fn_logger.error( + f"Invalid annotation {repr(type_val)} for parameter {param.arg_name}: {e}" + ) - if param.is_optional: - type_val = f"Optional[{type_val}]" - res["annotation"] = type_val if param.default is not None or param.is_optional: if param.default is not None: try: - eval(param.default) + validate_annotation(param.default) + res["default"] = param.default except Exception as e: - raise ParseError(f"Invalid default value {param.default}: {e}") - res["default"] = param.default + fn_logger.error( + f"Invalid default value {repr(param.default)} for parameter {param.arg_name}: {e}" + ) return res @@ -217,6 +265,11 @@ def extract_types( } if parse_result.returns is not None: return_type = parse_result.returns.type_name + try: + validate_annotation(return_type) + except Exception as e: + fn_logger.error(f"Invalid return type {repr(return_type)}: {e}") + return_type = None else: return_type = None return type_annotations, return_type @@ -234,8 +287,6 @@ def gen_fn( signature_overwrite: Optional[inspect.Signature] = None, docs_overwrite: Optional[str] = None, ) -> str: - global fn_logger - fn_logger = logging.getLogger(repr(function)) init_tab = TAB if is_method else "" fn_tab = TAB * 2 if is_method else TAB type_annotations, return_type = extract_types(function, docs_overwrite) @@ -247,6 +298,9 @@ def gen_fn( type_annotations, return_type, ) + if name == "__new__": + # new is special and not a class method + decorator = None fn_str = f"{init_tab}def {name}{signature}:\n{docstr}" @@ -272,15 +326,26 @@ def gen_class(cls: type, name) -> str: contents = list(vars(cls).items()) contents.sort(key=lambda x: x[0]) entities: list[str] = [] - + global cls_logger + global fn_logger + cls_logger = logger.getChild(name) for obj_name, entity in contents: + fn_logger = cls_logger.getChild(obj_name) + if obj_name.startswith("__") and not ( + obj_name == "__init__" or obj_name == "__new__" + ): + # Cannot get doc strings for __ methods (except for __new__ which we special-case by passing in the class docstring) + fn_logger.setLevel(ERROR) entity = inspect.unwrap(entity) if obj_name == "__init__" or obj_name == "__new__": # Get __init__ signature from class info signature = cls_signature(cls) if signature is not None: if obj_name == "__new__": - signature = signature.replace(return_annotation=name) + signature = insert_cls(signature.replace(return_annotation=name)) + else: + signature = insert_self(signature) + entities.append( gen_fn( entity, @@ -304,21 +369,20 @@ def gen_class(cls: type, name) -> str: return f"class {name}{bases}: \n{docstr}\n{str_entities}" -def gen_module(module: ModuleType, name: str, path: Path) -> None: - logging.info("starting") +def gen_module(module: ModuleType, name: str, path: Path, log_path) -> None: + global logger + global fn_logger objs = list(vars(module).items()) objs.sort(key=lambda x: x[0]) - stubs: List[str] = [] modules: List[(ModuleType, str)] = [] path = path / name - global logger - logger = logging.getLogger(str(path)) - + logger = logging.getLogger(log_path) for obj_name, obj in objs: - if isinstance(obj, type) and from_raphtory(obj): + if isinstance(obj, type) and from_raphtory(obj, name): stubs.append(gen_class(obj, obj_name)) elif isinstance(obj, BuiltinFunctionType): + fn_logger = logger.getChild(obj_name) stubs.append(gen_fn(obj, obj_name)) elif isinstance(obj, ModuleType) and obj.__loader__ is None: modules.append((obj, obj_name)) @@ -328,13 +392,15 @@ def gen_module(module: ModuleType, name: str, path: Path) -> None: file = path / "__init__.pyi" file.write_text(stub_file) - for module in modules: - gen_module(*module, path) + for module, name in modules: + gen_module(module, name, path, f"{log_path}.{name}") return if __name__ == "__main__": + logger = logging.getLogger("gen_stubs") + logging.basicConfig(level=logging.INFO) raphtory = import_module("raphtory") path = Path(__file__).parent.parent / "python" - gen_module(raphtory, "raphtory", path) + gen_module(raphtory, "raphtory", path, "raphtory") diff --git a/python/src/lib.rs b/python/src/lib.rs index 753e88987f..91e010ef0a 100644 --- a/python/src/lib.rs +++ b/python/src/lib.rs @@ -1,8 +1,11 @@ extern crate core; use pyo3::prelude::*; -use raphtory_core::python::packages::base_modules::{ - add_raphtory_classes, base_algorithm_module, base_graph_gen_module, base_graph_loader_module, - base_vectors_module, +use raphtory_core::python::{ + graph::node_state::base_node_state_module, + packages::base_modules::{ + add_raphtory_classes, base_algorithm_module, base_graph_gen_module, + base_graph_loader_module, base_vectors_module, + }, }; use raphtory_graphql::python::pymodule::base_graphql_module; @@ -16,11 +19,12 @@ fn raphtory(py: Python<'_>, m: &Bound) -> PyResult<()> { let graph_loader_module = base_graph_loader_module(py)?; let graph_gen_module = base_graph_gen_module(py)?; let vectors_module = base_vectors_module(py)?; + let node_state_module = base_node_state_module(py)?; m.add_submodule(&graphql_module)?; m.add_submodule(&algorithm_module)?; m.add_submodule(&graph_loader_module)?; m.add_submodule(&graph_gen_module)?; m.add_submodule(&vectors_module)?; - + m.add_submodule(&node_state_module)?; Ok(()) } diff --git a/python/tests/graphql/test_graph_nodes_edges_property_filter.py b/python/tests/graphql/test_graph_nodes_edges_property_filter.py new file mode 100644 index 0000000000..d68b449182 --- /dev/null +++ b/python/tests/graphql/test_graph_nodes_edges_property_filter.py @@ -0,0 +1,2238 @@ +import tempfile + +import pytest + +from raphtory.graphql import GraphServer +from raphtory import Graph, PersistentGraph +import json +import re + +PORT = 1737 + + +def create_test_graph(g): + g.add_node(1, "a", properties={"prop1": 60, "prop2": 31.3, "prop3": "abc123", "prop4": True, "prop5": [1, 2, 3]}) + g.add_node(1, "b", properties={"prop1": 10, "prop2": 31.3, "prop3": "abc223", "prop4": False}) + g.add_node(1, "c", properties={"prop1": 20, "prop2": 31.3, "prop3": "abc333", "prop4": True, "prop5": [5, 6, 7]}) + g.add_node(1, "d", properties={"prop1": 30, "prop2": 31.3, "prop3": "abc444", "prop4": False}) + g.add_edge( + 2, + "a", + "d", + properties={"eprop1": 60, "eprop2": 0.4, "eprop3": "xyz123", "eprop4": True, "eprop5": [1, 2, 3]}, + ) + g.add_edge( + 2, + "b", + "d", + properties={"eprop1": 10, "eprop2": 1.7, "eprop3": "xyz123", "eprop4": True, "eprop5": [3, 4, 5]}, + ) + g.add_edge( + 2, + "c", + "d", + properties={"eprop1": 30, "eprop2": 6.4, "eprop3": "xyz123", "eprop4": False, "eprop5": [10]}, + ) + return g + + +def run_graphql_test(query, expected_output, graph): + create_test_graph(graph) + tmp_work_dir = tempfile.mkdtemp() + with GraphServer(tmp_work_dir).start(PORT) as server: + client = server.get_client() + client.send_graph(path="g", graph=graph) + + response = client.query(query) + + # Convert response to a dictionary if needed and compare + response_dict = json.loads(response) if isinstance(response, str) else response + assert response_dict == expected_output + + +def run_graphql_error_test(query, expected_error_message, graph): + create_test_graph(graph) + tmp_work_dir = tempfile.mkdtemp() + with GraphServer(tmp_work_dir).start(PORT) as server: + client = server.get_client() + client.send_graph(path="g", graph=graph) + + with pytest.raises(Exception) as excinfo: + client.query(query) + + full_error_message = str(excinfo.value) + match = re.search(r'"message":"(.*?)"', full_error_message) + error_message = match.group(1) if match else "" + + assert error_message == expected_error_message, f"Expected '{expected_error_message}', but got '{error_message}'" + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_equal(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop5", + condition: { + operator: EQUAL, + value: [1, 2, 3] + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodeFilter": { + "nodes": { + "list": [ + {"name": "a"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_equal_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop5", + condition: { + operator: EQUAL + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a value for Equal operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_equal_type_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop5", + condition: { + operator: EQUAL, + value: 1 + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property prop5: expected List but actual type is I64" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_not_equal(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop4", + condition: { + operator: NOT_EQUAL, + value: true + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodeFilter": { + "nodes": { + "list": [ + {"name": "b"}, + {"name": "d"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_not_equal_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop4", + condition: { + operator: NOT_EQUAL + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a value for NotEqual operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_not_equal_type_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop4", + condition: { + operator: NOT_EQUAL, + value: 1 + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property prop4: expected Bool but actual type is I64" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_greater_than_or_equal(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: GREATER_THAN_OR_EQUAL, + value: 60 + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodeFilter": { + "nodes": { + "list": [ + {"name": "a"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_greater_than_or_equal_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: GREATER_THAN_OR_EQUAL + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a value for GreaterThanOrEqual operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_greater_than_or_equal_type_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: GREATER_THAN_OR_EQUAL, + value: true + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property prop1: expected I64 but actual type is Bool" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_less_than_or_equal(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: LESS_THAN_OR_EQUAL, + value: 30 + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodeFilter": { + "nodes": { + "list": [ + {"name": "b"}, + {"name": "c"}, + {"name": "d"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_less_than_or_equal_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: LESS_THAN_OR_EQUAL + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a value for LessThanOrEqual operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_less_than_or_equal_type_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: LESS_THAN_OR_EQUAL, + value: "shivam" + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property prop1: expected I64 but actual type is Str" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_greater_than(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: GREATER_THAN, + value: 30 + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodeFilter": { + "nodes": { + "list": [ + {"name": "a"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_greater_than_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: GREATER_THAN + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a value for GreaterThan operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_greater_than_type_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: GREATER_THAN, + value: "shivam" + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property prop1: expected I64 but actual type is Str" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_less_than(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: LESS_THAN, + value: 30 + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodeFilter": { + "nodes": { + "list": [ + {"name": "b"}, + {"name": "c"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_less_than_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: LESS_THAN + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a value for LessThan operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_less_than_type_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: LESS_THAN, + value: "shivam" + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property prop1: expected I64 but actual type is Str" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_is_none(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop5", + condition: { + operator: IS_NONE + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodeFilter": { + "nodes": { + "list": [ + {"name": "b"}, + {"name": "d"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_is_some(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop5", + condition: { + operator: IS_SOME + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodeFilter": { + "nodes": { + "list": [ + {"name": "a"}, + {"name": "c"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_any(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: ANY, + value: [10, 30, 50, 70] + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodeFilter": { + "nodes": { + "list": [ + {"name": "b"}, + {"name": "d"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_any_empty_list(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: ANY, + value: [] + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_any_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: ANY, + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a list for Any operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_any_type_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: ANY, + value: "shivam" + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a list for Any operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_not_any(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: NOT_ANY, + value: [10, 30, 50, 70] + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodeFilter": { + "nodes": { + "list": [ + {"name": "a"}, + {"name": "c"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_not_any_empty_list(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: NOT_ANY, + value: [] + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [ + {"name": "a"}, + {"name": "b"}, + {"name": "c"}, + {"name": "d"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_not_any_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: NOT_ANY, + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a list for NotAny operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_node_property_filter_not_any_type_error(graph): + query = """ + query { + graph(path: "g") { + nodeFilter( + property: "prop1", + condition: { + operator: NOT_ANY, + value: "shivam" + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a list for NotAny operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_equal(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop5", + condition: { + operator: EQUAL, + value: [1, 2, 3] + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_output = { + "graph": { + "edgeFilter": { + "edges": { + "list": [ + { + "src": {"name": "a"}, + "dst": {"name": "d"} + } + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +def test_graph_edge_property_filter_equal_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop5", + condition: { + operator: EQUAL, + value: [1, 2, 3] + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_edge_property_filter_equal_no_value_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop5", + condition: { + operator: EQUAL + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Expected a value for Equal operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_equal_type_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop5", + condition: { + operator: EQUAL, + value: 1 + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property eprop5: expected List but actual type is I64" + run_graphql_error_test(query, expected_error_message, graph()) + + +def test_graph_edge_property_filter_equal_type_error_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop5", + condition: { + operator: EQUAL, + value: 1 + } + ) { + nodes { + list { + name + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_not_equal(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop4", + condition: { + operator: NOT_EQUAL, + value: true + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_output = { + "graph": { + "edgeFilter": { + "edges": { + "list": [ + { + "src": {"name": "c"}, + "dst": {"name": "d"} + } + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +def test_graph_edge_property_filter_not_equal_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop4", + condition: { + operator: NOT_EQUAL, + value: true + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_edge_property_filter_not_equal_no_value_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop4", + condition: { + operator: NOT_EQUAL + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Expected a value for NotEqual operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_not_equal_type_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop4", + condition: { + operator: NOT_EQUAL, + value: 1 + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property eprop4: expected Bool but actual type is I64" + run_graphql_error_test(query, expected_error_message, graph()) + + +def test_graph_edge_property_filter_not_equal_type_error_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop4", + condition: { + operator: NOT_EQUAL, + value: 1 + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_greater_than_or_equal(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: GREATER_THAN_OR_EQUAL, + value: 60 + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_output = { + "graph": { + "edgeFilter": { + "edges": { + "list": [ + { + "src": {"name": "a"}, + "dst": {"name": "d"} + } + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +def test_graph_edge_property_filter_greater_than_or_equal_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: GREATER_THAN_OR_EQUAL, + value: 60 + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_edge_property_filter_greater_than_or_equal_no_value_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: GREATER_THAN_OR_EQUAL + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Expected a value for GreaterThanOrEqual operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_greater_than_or_equal_type_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: GREATER_THAN_OR_EQUAL, + value: true + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property eprop1: expected I64 but actual type is Bool" + run_graphql_error_test(query, expected_error_message, graph()) + + +def test_graph_edge_property_filter_greater_than_or_equal_type_error_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: GREATER_THAN_OR_EQUAL, + value: true + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_less_than_or_equal(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: LESS_THAN_OR_EQUAL, + value: 30 + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_output = { + "graph": { + "edgeFilter": { + "edges": { + "list": [ + { + "src": {"name": "b"}, + "dst": {"name": "d"} + }, + { + "src": {"name": "c"}, + "dst": {"name": "d"} + } + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +def test_graph_edge_property_filter_less_than_or_equal_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: LESS_THAN_OR_EQUAL, + value: 30 + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_edge_property_filter_less_than_or_equal_no_value_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: LESS_THAN_OR_EQUAL + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Expected a value for LessThanOrEqual operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_less_than_or_equal_type_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: LESS_THAN_OR_EQUAL, + value: "shivam" + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property eprop1: expected I64 but actual type is Str" + run_graphql_error_test(query, expected_error_message, graph()) + + +def test_graph_edge_property_filter_less_than_or_equal_type_error_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: LESS_THAN_OR_EQUAL, + value: "shivam" + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_greater_than(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: GREATER_THAN, + value: 30 + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_output = { + "graph": { + "edgeFilter": { + "edges": { + "list": [ + { + "src": {"name": "a"}, + "dst": {"name": "d"} + } + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +def test_graph_edge_property_filter_greater_than_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: GREATER_THAN, + value: 30 + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_edge_property_filter_greater_than_no_value_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: GREATER_THAN + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Expected a value for GreaterThan operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_greater_than_type_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: GREATER_THAN, + value: "shivam" + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property eprop1: expected I64 but actual type is Str" + run_graphql_error_test(query, expected_error_message, graph()) + + +def test_graph_edge_property_filter_greater_than_type_error_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: GREATER_THAN, + value: "shivam" + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_less_than(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: LESS_THAN, + value: 30 + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_output = { + "graph": { + "edgeFilter": { + "edges": { + "list": [ + { + "src": {"name": "b"}, + "dst": {"name": "d"} + } + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +def test_graph_edge_property_filter_less_than_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: LESS_THAN, + value: 30 + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_edge_property_filter_less_than_no_value_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: LESS_THAN + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Expected a value for LessThan operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_less_than_type_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: LESS_THAN, + value: "shivam" + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property eprop1: expected I64 but actual type is Str" + run_graphql_error_test(query, expected_error_message, graph()) + + +def test_graph_edge_property_filter_less_than_type_error_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: LESS_THAN, + value: "shivam" + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_is_none(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop5", + condition: { + operator: IS_NONE + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_output = { + "graph": { + "edgeFilter": { + "edges": { + "list": [] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +def test_graph_edge_property_filter_is_none_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop5", + condition: { + operator: IS_NONE + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_is_some(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop5", + condition: { + operator: IS_SOME + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_output = { + "graph": { + "edgeFilter": { + "edges": { + "list": [ + { + "src": {"name": "a"}, + "dst": {"name": "d"} + }, + { + "src": {"name": "b"}, + "dst": {"name": "d"} + }, + { + "src": {"name": "c"}, + "dst": {"name": "d"} + } + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +def test_graph_edge_property_filter_is_some_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop5", + condition: { + operator: IS_SOME + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_any(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: ANY, + value: [10, 20, 30] + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_output = { + "graph": { + "edgeFilter": { + "edges": { + "list": [ + { + "src": {"name": "b"}, + "dst": {"name": "d"} + }, + { + "src": {"name": "c"}, + "dst": {"name": "d"} + } + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +def test_graph_edge_property_filter_any_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: ANY, + value: [10, 20, 30] + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_any_empty_list(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: ANY, + value: [] + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_output = { + "graph": { + "edgeFilter": { + "edges": { + "list": [] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +def test_graph_edge_property_filter_any_empty_list_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: ANY, + value: [] + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_edge_property_filter_any_no_value_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "prop1", + condition: { + operator: ANY, + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Expected a list for Any operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_edge_property_filter_any_type_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "prop1", + condition: { + operator: ANY, + value: "shivam" + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Expected a list for Any operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_not_any(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: NOT_ANY, + value: [10, 20, 30] + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_output = { + "graph": { + "edgeFilter": { + "edges": { + "list": [ + { + "src": {"name": "a"}, + "dst": {"name": "d"} + } + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +def test_graph_edge_property_filter_not_any_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: NOT_ANY, + value: [10, 20, 30] + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +# Edge property filter is not supported yet for PersistentGraph +@pytest.mark.parametrize("graph", [Graph]) +def test_graph_edge_property_filter_not_any_empty_list(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: NOT_ANY, + value: [] + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_output = { + "graph": { + "edgeFilter": { + "edges": { + "list": [ + { + "src": {"name": "a"}, + "dst": {"name": "d"} + }, + { + "src": {"name": "b"}, + "dst": {"name": "d"} + }, + { + "src": {"name": "c"}, + "dst": {"name": "d"} + } + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +def test_graph_edge_property_filter_not_any_empty_list_persistent_graph(): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: NOT_ANY, + value: [] + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Property filtering not implemented on PersistentGraph yet" + run_graphql_error_test(query, expected_error_message, PersistentGraph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_edge_property_filter_not_any_no_value_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: NOT_ANY, + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Expected a list for NotAny operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_graph_edge_property_filter_not_any_type_error(graph): + query = """ + query { + graph(path: "g") { + edgeFilter( + property: "eprop1", + condition: { + operator: NOT_ANY, + value: "shivam" + } + ) { + edges { + list { + src{name} + dst{name} + } + } + } + } + } + """ + expected_error_message = "Expected a list for NotAny operator" + run_graphql_error_test(query, expected_error_message, graph()) diff --git a/python/tests/graphql/test_nodes_property_filter.py b/python/tests/graphql/test_nodes_property_filter.py new file mode 100644 index 0000000000..af6e844026 --- /dev/null +++ b/python/tests/graphql/test_nodes_property_filter.py @@ -0,0 +1,888 @@ +import tempfile + +import pytest + +from raphtory.graphql import GraphServer +from raphtory import Graph, PersistentGraph +import json +import re + +PORT = 1737 + + +def create_test_graph(g): + g.add_node(1, "a", properties={"prop1": 60, "prop2": 31.3, "prop3": "abc123", "prop4": True, "prop5": [1, 2, 3]}) + g.add_node(1, "b", properties={"prop1": 10, "prop2": 31.3, "prop3": "abc223", "prop4": False}) + g.add_node(1, "c", properties={"prop1": 20, "prop2": 31.3, "prop3": "abc333", "prop4": True, "prop5": [5, 6, 7]}) + g.add_node(1, "d", properties={"prop1": 30, "prop2": 31.3, "prop3": "abc444", "prop4": False}) + g.add_edge( + 2, + "a", + "d", + properties={"eprop1": 60, "eprop2": 0.4, "eprop3": "xyz123", "eprop4": True, "eprop5": [1, 2, 3]}, + ) + g.add_edge( + 2, + "b", + "d", + properties={"eprop1": 10, "eprop2": 1.7, "eprop3": "xyz123", "eprop4": True, "eprop5": [3, 4, 5]}, + ) + g.add_edge( + 2, + "c", + "d", + properties={"eprop1": 30, "eprop2": 6.4, "eprop3": "xyz123", "eprop4": False, "eprop5": [10]}, + ) + return g + + +def run_graphql_test(query, expected_output, graph): + create_test_graph(graph) + tmp_work_dir = tempfile.mkdtemp() + with GraphServer(tmp_work_dir).start(PORT) as server: + client = server.get_client() + client.send_graph(path="g", graph=graph) + + response = client.query(query) + + # Convert response to a dictionary if needed and compare + response_dict = json.loads(response) if isinstance(response, str) else response + assert response_dict == expected_output + + +def run_graphql_error_test(query, expected_error_message, graph): + create_test_graph(graph) + tmp_work_dir = tempfile.mkdtemp() + with GraphServer(tmp_work_dir).start(PORT) as server: + client = server.get_client() + client.send_graph(path="g", graph=graph) + + with pytest.raises(Exception) as excinfo: + client.query(query) + + full_error_message = str(excinfo.value) + match = re.search(r'"message":"(.*?)"', full_error_message) + error_message = match.group(1) if match else "" + + assert error_message == expected_error_message, f"Expected '{expected_error_message}', but got '{error_message}'" + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_equal(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop5", + condition: { + operator: EQUAL, + value: [1, 2, 3] + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [ + { + "name": "a" + } + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_equal_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop5", + condition: { + operator: EQUAL + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a value for Equal operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_equal_type_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop5", + condition: { + operator: EQUAL, + value: 1 + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property prop5: expected List but actual type is I64" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_not_equal(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop4", + condition: { + operator: NOT_EQUAL, + value: true + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [ + {"name": "b"}, + {"name": "d"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_not_equal_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop4", + condition: { + operator: NOT_EQUAL + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a value for NotEqual operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_not_equal_type_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop4", + condition: { + operator: NOT_EQUAL, + value: 1 + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property prop4: expected Bool but actual type is I64" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_greater_than_or_equal(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: GREATER_THAN_OR_EQUAL, + value: 60 + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [ + {"name": "a"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_greater_than_or_equal_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: GREATER_THAN_OR_EQUAL + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a value for GreaterThanOrEqual operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_greater_than_or_equal_type_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: GREATER_THAN_OR_EQUAL, + value: true + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property prop1: expected I64 but actual type is Bool" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_less_than_or_equal(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: LESS_THAN_OR_EQUAL, + value: 30 + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [ + {"name": "b"}, + {"name": "c"}, + {"name": "d"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_less_than_or_equal_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: LESS_THAN_OR_EQUAL + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a value for LessThanOrEqual operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_less_than_or_equal_type_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: LESS_THAN_OR_EQUAL, + value: "shivam" + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property prop1: expected I64 but actual type is Str" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_greater_than(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: GREATER_THAN, + value: 30 + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [ + {"name": "a"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_greater_than_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: GREATER_THAN + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a value for GreaterThan operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_greater_than_type_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: GREATER_THAN, + value: "shivam" + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property prop1: expected I64 but actual type is Str" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_less_than(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: LESS_THAN, + value: 30 + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [ + {"name": "b"}, + {"name": "c"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_less_than_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: LESS_THAN + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a value for LessThan operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_less_than_type_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: LESS_THAN, + value: "shivam" + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "PropertyType Error: Wrong type for property prop1: expected I64 but actual type is Str" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_is_none(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop5", + condition: { + operator: IS_NONE + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [ + {"name": "b"}, + {"name": "d"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_is_some(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop5", + condition: { + operator: IS_SOME + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [ + {"name": "a"}, + {"name": "c"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_any(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: ANY, + value: [10, 30, 50, 70] + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [ + {"name": "b"}, + {"name": "d"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_any_empty_list(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: ANY, + value: [] + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_any_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: ANY, + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a list for Any operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_any_type_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: ANY, + value: "shivam" + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a list for Any operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_not_any(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: NOT_ANY, + value: [10, 30, 50, 70] + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [ + {"name": "a"}, + {"name": "c"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_not_any_empty_list(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: NOT_ANY, + value: [] + } + ) { + list { + name + } + } + } + } + } + """ + expected_output = { + "graph": { + "nodes": { + "nodeFilter": { + "list": [ + {"name": "a"}, + {"name": "b"}, + {"name": "c"}, + {"name": "d"} + ] + } + } + } + } + run_graphql_test(query, expected_output, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_not_any_no_value_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: NOT_ANY, + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a list for NotAny operator" + run_graphql_error_test(query, expected_error_message, graph()) + + +@pytest.mark.parametrize("graph", [Graph, PersistentGraph]) +def test_node_property_filter_not_any_type_error(graph): + query = """ + query { + graph(path: "g") { + nodes { + nodeFilter( + property: "prop1", + condition: { + operator: NOT_ANY, + value: "shivam" + } + ) { + list { + name + } + } + } + } + } + """ + expected_error_message = "Expected a list for NotAny operator" + run_graphql_error_test(query, expected_error_message, graph()) diff --git a/python/tests/test_graphdb/test_graphdb_imports.py b/python/tests/test_graphdb/test_graphdb_imports.py index 836771562b..793817d033 100644 --- a/python/tests/test_graphdb/test_graphdb_imports.py +++ b/python/tests/test_graphdb/test_graphdb_imports.py @@ -19,6 +19,13 @@ def test_import_into_graph(): assert res.properties.get("temp") == True assert res.properties.constant.get("con") == 11 + gg = Graph() + gg.add_node(1, "B") + with pytest.raises(Exception) as excinfo: + gg.import_nodes([g_a, g_b]) + assert "Nodes already exist" in str(excinfo.value) + assert gg.node("A") is None + gg = Graph() gg.import_nodes([g_a, g_b]) assert len(gg.nodes) == 2 @@ -35,11 +42,19 @@ def test_import_into_graph(): assert res.properties.as_dict() == props e_c_d = g.add_edge(4, "C", "D") + gg = Graph() gg.import_edges([e_a_b, e_c_d]) assert len(gg.nodes) == 4 assert len(gg.edges) == 2 + gg = Graph() + gg.add_edge(1, "C", "D") + with pytest.raises(Exception) as excinfo: + gg.import_edges([e_a_b, e_c_d]) + assert "Edges already exist" in str(excinfo.value) + assert gg.edge("A", "B") is None + def test_import_with_int(): g = Graph() @@ -56,3 +71,247 @@ def test_import_with_int(): g2.import_node(g.node(1)) g2.import_nodes([g.node(2), g.node(3)]) assert g2.count_nodes() == g.count_nodes() + + +def test_import_node_as(): + g = Graph() + a = g.add_node(1, "A") + b = g.add_node(1, "B", {"temp": True}) + b.add_constant_properties({"con": 11}) + + gg = Graph() + res = gg.import_node_as(a, "X") + assert res.name == "X" + assert res.history().tolist() == [1] + + gg.add_node(1, "Y") + + with pytest.raises(Exception) as excinfo: + gg.import_node_as(b, "Y") + + assert "Node already exists" in str(excinfo.value) + + assert gg.nodes.name == ["X", "Y"] + y = gg.node("Y") + assert y.name == "Y" + assert y.history().tolist() == [1] + assert y.properties.get("temp") is None + assert y.properties.constant.get("con") is None + + +def test_import_node_as_merge(): + g = Graph() + a = g.add_node(1, "A") + b = g.add_node(1, "B", {"temp": True}) + b.add_constant_properties({"con": 11}) + + gg = Graph() + res = gg.import_node_as(a, "X") + assert res.name == "X" + assert res.history().tolist() == [1] + + gg.add_node(1, "Y") + gg.import_node_as(b, "Y", True) + + assert gg.nodes.name == ["X", "Y"] + y = gg.node("Y") + assert y.name == "Y" + assert y.history().tolist() == [1] + assert y.properties.get("temp") == True + assert y.properties.constant.get("con") == 11 + + +def test_import_nodes_as(): + g = Graph() + a = g.add_node(1, "A") + b = g.add_node(1, "B", {"temp": True}) + b.add_constant_properties({"con": 11}) + + gg = Graph() + gg.add_node(1, "Y") + + with pytest.raises(Exception) as excinfo: + gg.import_nodes_as([a, b], ["X", "Y"]) + + assert "Nodes already exist" in str(excinfo.value) + + assert gg.node("X") == None + + assert sorted(gg.nodes.name) == ["Y"] + y = gg.node("Y") + assert y.name == "Y" + assert y.history().tolist() == [1] + assert y.properties.get("temp") is None + assert y.properties.constant.get("con") is None + + +def test_import_nodes_as_merge(): + g = Graph() + a = g.add_node(1, "A") + b = g.add_node(1, "B", {"temp": True}) + b.add_constant_properties({"con": 11}) + + gg = Graph() + gg.add_node(1, "Y") + gg.import_nodes_as([a, b], ["X", "Y"], True) + + assert sorted(gg.nodes.name) == ["X", "Y"] + x = gg.node("X") + assert x.name == "X" + assert x.history().tolist() == [1] + + y = gg.node("Y") + assert y.name == "Y" + assert y.history().tolist() == [1] + assert y.properties.get("temp") == True + assert y.properties.constant.get("con") == 11 + + +def test_import_edge_as(): + g = Graph() + a = g.add_node(1, "A") + b = g.add_node(1, "B", {"temp": True}) + b.add_constant_properties({"con": 11}) + + e_a_b = g.add_edge(2, "A", "B", {"e_temp": True}) + e_b_c = g.add_edge(2, "B", "C", {"e_temp": True}) + + gg = Graph() + gg.add_edge(1, "X", "Y") + + gg.import_edge_as(e_b_c, ("Y", "Z")) + + with pytest.raises(Exception) as excinfo: + gg.import_edge_as(e_a_b, ("X", "Y")) + assert "Edge already exists" in str(excinfo.value) + + assert sorted(gg.nodes.name) == ["X", "Y", "Z"] + x = gg.node("X") + assert x.name == "X" + assert x.history().tolist() == [1] + + y = gg.node("Y") + assert y.name == "Y" + assert y.history().tolist() == [1, 2] + assert y.properties.get("temp") is None + assert y.properties.constant.get("con") is None + + e = gg.edge("X", "Y") + assert e.properties.get("e_temp") is None + + +def test_import_edge_as_merge(): + g = Graph() + a = g.add_node(1, "A") + b = g.add_node(1, "B", {"temp": True}) + b.add_constant_properties({"con": 11}) + + e_a_b = g.add_edge(2, "A", "B", {"e_temp": True}) + + gg = Graph() + gg.add_edge(3, "X", "Y") + gg.import_edge_as(e_a_b, ("X", "Y"), True) + + assert sorted(gg.nodes.name) == ["X", "Y"] + x = gg.node("X") + assert x.name == "X" + print(x.history()) + assert x.history().tolist() == [2, 3] + + y = gg.node("Y") + assert y.name == "Y" + assert y.history().tolist() == [2, 3] + assert y.properties.get("temp") is None + assert y.properties.constant.get("con") is None + + e = gg.edge("X", "Y") + assert e.properties.get("e_temp") == True + + +def test_import_edges_as(): + g = Graph() + a = g.add_node(1, "A") + b = g.add_node(1, "B", {"temp": True}) + b.add_constant_properties({"con": 11}) + c = g.add_node(1, "C") + + e_a_b = g.add_edge(2, "A", "B", {"e_temp": True}) + e_b_c = g.add_edge(2, "B", "C") + + gg = Graph() + gg.add_edge(1, "Y", "Z") + + with pytest.raises(Exception) as excinfo: + gg.import_edges_as([e_a_b, e_b_c], [("X", "Y"), ("Y", "Z")]) + assert "Edges already exist" in str(excinfo.value) + + assert sorted(gg.nodes.name) == ["Y", "Z"] + + y = gg.node("Y") + assert y.name == "Y" + assert y.history().tolist() == [1] + assert y.properties.get("temp") is None + assert y.properties.constant.get("con") is None + + z = gg.node("Z") + assert z.name == "Z" + assert z.history().tolist() == [1] + + +def test_import_edges_as_merge(): + g = Graph() + a = g.add_node(1, "A") + b = g.add_node(1, "B", {"temp": True}) + b.add_constant_properties({"con": 11}) + c = g.add_node(1, "C") + + e_a_b = g.add_edge(2, "A", "B", {"e_temp": True}) + e_b_c = g.add_edge(2, "B", "C") + + gg = Graph() + gg.add_edge(3, "Y", "Z") + gg.import_edges_as([e_a_b, e_b_c], [("X", "Y"), ("Y", "Z")], True) + + assert sorted(gg.nodes.name) == ["X", "Y", "Z"] + + x = gg.node("X") + assert x.name == "X" + assert x.history().tolist() == [2] + + y = gg.node("Y") + assert y.name == "Y" + assert y.history().tolist() == [2, 3] + assert y.properties.get("temp") is None + assert y.properties.constant.get("con") is None + + z = gg.node("Z") + assert z.name == "Z" + assert z.history().tolist() == [2, 3] + + +def test_import_edges(): + g = Graph() + g.add_node(1, 1) + g.add_node(1, 2) + g.add_node(1, 3) + g.add_edge(1, 4, 5) + g.add_edge(1, 6, 7) + g.add_edge(1, 8, 9) + g2 = Graph() + g2.import_edges(g.edges) + assert g2.count_edges() == 3 + assert g.edges.id == g2.edges.id + + +def test_import_edges_iterator(): + g = Graph() + g.add_node(1, 1) + g.add_node(1, 2) + g.add_node(1, 3) + g.add_edge(1, 4, 5) + g.add_edge(1, 6, 7) + g.add_edge(1, 8, 9) + g2 = Graph() + g2.import_edges(iter(g.edges)) + assert g2.count_edges() == 3 + assert g.edges.id == g2.edges.id diff --git a/python/tests/test_graphdb/test_node_state.py b/python/tests/test_graphdb/test_node_state.py new file mode 100644 index 0000000000..a8015ff770 --- /dev/null +++ b/python/tests/test_graphdb/test_node_state.py @@ -0,0 +1,26 @@ +from raphtory import Graph + + +def test_degree_window(): + g = Graph() + g.add_edge(0, 1, 2) + g.add_edge(1, 1, 3) + g.add_edge(2, 1, 4) + + degs = g.nodes.out_degree() + assert degs == [3, 0, 0, 0] + assert degs.before(1) == [1, 0, 0, 0] + assert degs[1] == 3 + assert degs.before(1)[1] == 1 + + +def test_degree_layer(): + g = Graph() + g.add_edge(0, 1, 2, layer="1") + g.add_edge(0, 1, 3, layer="2") + g.add_edge(0, 1, 4, layer="2") + + degs = g.nodes.out_degree() + assert degs == [3, 0, 0, 0] + assert degs.layers(["1"]) == [1, 0, 0, 0] + assert degs.layers(["2"]) == [2, 0, 0, 0] diff --git a/raphtory-api/src/core/storage/timeindex.rs b/raphtory-api/src/core/storage/timeindex.rs index 7b6d214dd4..c96e0002ca 100644 --- a/raphtory-api/src/core/storage/timeindex.rs +++ b/raphtory-api/src/core/storage/timeindex.rs @@ -23,6 +23,62 @@ pub trait AsTime: fmt::Debug + Copy + Ord + Eq + Send + Sync + 'static { fn new(t: i64, s: usize) -> Self; } +pub trait TimeIndexIntoOps: Sized { + type IndexType: AsTime; + type RangeType: TimeIndexIntoOps; + + fn into_range(self, w: Range) -> Self::RangeType; + + fn into_range_t(self, w: Range) -> Self::RangeType { + self.into_range(Self::IndexType::range(w)) + } + + fn into_iter(self) -> impl Iterator + Send; + + fn into_iter_t(self) -> impl Iterator + Send { + self.into_iter().map(|time| time.t()) + } +} + +pub trait TimeIndexOps: Send + Sync { + type IndexType: AsTime; + type RangeType<'a>: TimeIndexOps + 'a + where + Self: 'a; + + fn active(&self, w: Range) -> bool; + + fn active_t(&self, w: Range) -> bool { + self.active(Self::IndexType::range(w)) + } + + fn range(&self, w: Range) -> Self::RangeType<'_>; + + fn range_t(&self, w: Range) -> Self::RangeType<'_> { + self.range(Self::IndexType::range(w)) + } + + fn first_t(&self) -> Option { + self.first().map(|ti| ti.t()) + } + + fn first(&self) -> Option; + + fn last_t(&self) -> Option { + self.last().map(|ti| ti.t()) + } + + fn last(&self) -> Option; + + fn iter(&self) -> Box + Send + '_>; + + fn iter_t(&self) -> Box + Send + '_> { + Box::new(self.iter().map(|time| time.t())) + } + + fn len(&self) -> usize; +} + impl From for TimeIndexEntry { fn from(value: i64) -> Self { Self::start(value) diff --git a/raphtory-api/src/iter.rs b/raphtory-api/src/iter.rs new file mode 100644 index 0000000000..840687fa0e --- /dev/null +++ b/raphtory-api/src/iter.rs @@ -0,0 +1,12 @@ +pub type BoxedIter = Box + Send>; +pub type BoxedLIter<'a, T> = Box + Send + 'a>; + +pub trait IntoDynBoxed<'a, T> { + fn into_dyn_boxed(self) -> BoxedLIter<'a, T>; +} + +impl<'a, T, I: Iterator + Send + 'a> IntoDynBoxed<'a, T> for I { + fn into_dyn_boxed(self) -> BoxedLIter<'a, T> { + Box::new(self) + } +} diff --git a/raphtory-api/src/lib.rs b/raphtory-api/src/lib.rs index c90b7d8bb9..a5bb02383c 100644 --- a/raphtory-api/src/lib.rs +++ b/raphtory-api/src/lib.rs @@ -3,3 +3,5 @@ pub mod compute; pub mod core; #[cfg(feature = "python")] pub mod python; + +pub mod iter; diff --git a/raphtory-cypher/src/executor/table_provider/edge.rs b/raphtory-cypher/src/executor/table_provider/edge.rs index a980874c63..3312b2e005 100644 --- a/raphtory-cypher/src/executor/table_provider/edge.rs +++ b/raphtory-cypher/src/executor/table_provider/edge.rs @@ -54,7 +54,7 @@ impl EdgeListTableProvider { .as_ref() .layer(layer_id) .edges_storage() - .time() + .time_col() .values() .len(); @@ -186,7 +186,7 @@ fn produce_record_batch( let layer = graph.as_ref().layer(layer_id); let edges = layer.edges_storage(); - let chunked_lists_ts = edges.time(); + let chunked_lists_ts = edges.time_col(); let offsets = chunked_lists_ts.offsets(); // FIXME: potentially implement into_iter_chunks() for chunked arrays to avoid having to collect these chunks, if it turns out to be a problem let time_values_chunks = chunked_lists_ts @@ -257,7 +257,7 @@ fn produce_record_batch( let column_ids = layer .edges_storage() - .data_type() + .prop_dtypes() .iter() .enumerate() .skip(1) // first one is supposed to be time diff --git a/raphtory-cypher/src/transpiler/mod.rs b/raphtory-cypher/src/transpiler/mod.rs index 2b129d4847..ea9099edc1 100644 --- a/raphtory-cypher/src/transpiler/mod.rs +++ b/raphtory-cypher/src/transpiler/mod.rs @@ -262,7 +262,7 @@ fn scan_edges_as_sql_cte( // TODO: this needs to match the schema from EdgeListTableProvider fn full_layer_fields(graph: &DiskGraphStorage, layer_id: usize) -> Option { - let dt = graph.as_ref().layer(layer_id).edges_props_data_type(); + let dt = graph.as_ref().layer(layer_id).edges_props_data_type()?; let arr_dt: arrow_schema::DataType = dt.clone().into(); match arr_dt { arrow_schema::DataType::Struct(fields) => { diff --git a/raphtory-graphql/src/model/graph/graph.rs b/raphtory-graphql/src/model/graph/graph.rs index 9eba75dbdf..4d414d8cd9 100644 --- a/raphtory-graphql/src/model/graph/graph.rs +++ b/raphtory-graphql/src/model/graph/graph.rs @@ -3,6 +3,7 @@ use crate::{ model::{ graph::{ edge::Edge, edges::GqlEdges, node::Node, nodes::GqlNodes, property::GqlProperties, + FilterCondition, Operator, }, plugins::graph_algorithm_plugin::GraphAlgorithmPlugin, schema::graph_schema::GraphSchema, @@ -486,4 +487,330 @@ impl GqlGraph { other_g.write_updates()?; Ok(true) } + + async fn node_filter( + &self, + property: String, + condition: FilterCondition, + ) -> Result { + match condition.operator { + Operator::Equal => { + if let Some(value) = condition.value { + let filtered_graph = self + .graph + .filter_nodes(PropertyFilter::eq(property, value.0))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "Equal".into(), + )) + } + } + Operator::NotEqual => { + if let Some(value) = condition.value { + let filtered_graph = self + .graph + .filter_nodes(PropertyFilter::ne(property, value.0))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "NotEqual".into(), + )) + } + } + Operator::GreaterThanOrEqual => { + if let Some(value) = condition.value { + let filtered_graph = self + .graph + .filter_nodes(PropertyFilter::ge(property, value.0))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "GreaterThanOrEqual".into(), + )) + } + } + Operator::LessThanOrEqual => { + if let Some(value) = condition.value { + let filtered_graph = self + .graph + .filter_nodes(PropertyFilter::le(property, value.0))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "LessThanOrEqual".into(), + )) + } + } + Operator::GreaterThan => { + if let Some(value) = condition.value { + let filtered_graph = self + .graph + .filter_nodes(PropertyFilter::gt(property, value.0))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "GreaterThan".into(), + )) + } + } + Operator::LessThan => { + if let Some(value) = condition.value { + let filtered_graph = self + .graph + .filter_nodes(PropertyFilter::lt(property, value.0))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "LessThan".into(), + )) + } + } + Operator::IsNone => { + let filtered_graph = self.graph.filter_nodes(PropertyFilter::is_none(property))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } + Operator::IsSome => { + let filtered_graph = self.graph.filter_nodes(PropertyFilter::is_some(property))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } + Operator::Any => { + if let Some(Prop::List(list)) = condition.value.map(|v| v.0) { + let prop_values: Vec = list.iter().cloned().collect(); + let filtered_graph = self + .graph + .filter_nodes(PropertyFilter::any(property, prop_values))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "list".into(), + "Any".into(), + )) + } + } + Operator::NotAny => { + if let Some(Prop::List(list)) = condition.value.map(|v| v.0) { + let prop_values: Vec = list.iter().cloned().collect(); + let filtered_graph = self + .graph + .filter_nodes(PropertyFilter::not_any(property, prop_values))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "list".into(), + "NotAny".into(), + )) + } + } + } + } + + async fn edge_filter( + &self, + property: String, + condition: FilterCondition, + ) -> Result { + match condition.operator { + Operator::Equal => { + if let Some(value) = condition.value { + let filtered_graph = self + .graph + .filter_edges(PropertyFilter::eq(property, value.0))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "Equal".into(), + )) + } + } + Operator::NotEqual => { + if let Some(value) = condition.value { + let filtered_graph = self + .graph + .filter_edges(PropertyFilter::ne(property, value.0))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "NotEqual".into(), + )) + } + } + Operator::GreaterThanOrEqual => { + if let Some(value) = condition.value { + let filtered_graph = self + .graph + .filter_edges(PropertyFilter::ge(property, value.0))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "GreaterThanOrEqual".into(), + )) + } + } + Operator::LessThanOrEqual => { + if let Some(value) = condition.value { + let filtered_graph = self + .graph + .filter_edges(PropertyFilter::le(property, value.0))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "LessThanOrEqual".into(), + )) + } + } + Operator::GreaterThan => { + if let Some(value) = condition.value { + let filtered_graph = self + .graph + .filter_edges(PropertyFilter::gt(property, value.0))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "GreaterThan".into(), + )) + } + } + Operator::LessThan => { + if let Some(value) = condition.value { + let filtered_graph = self + .graph + .filter_edges(PropertyFilter::lt(property, value.0))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "LessThan".into(), + )) + } + } + Operator::IsNone => { + let filtered_graph = self.graph.filter_edges(PropertyFilter::is_none(property))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } + Operator::IsSome => { + let filtered_graph = self.graph.filter_edges(PropertyFilter::is_some(property))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } + Operator::Any => { + if let Some(Prop::List(list)) = condition.value.map(|v| v.0) { + let prop_values: Vec = list.iter().cloned().collect(); + let filtered_graph = self + .graph + .filter_edges(PropertyFilter::any(property, prop_values))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "list".into(), + "Any".into(), + )) + } + } + Operator::NotAny => { + if let Some(Prop::List(list)) = condition.value.map(|v| v.0) { + let prop_values: Vec = list.iter().cloned().collect(); + let filtered_graph = self + .graph + .filter_edges(PropertyFilter::not_any(property, prop_values))?; + Ok(GqlGraph::new( + self.path.clone(), + filtered_graph.into_dynamic(), + self.index.clone(), + )) + } else { + Err(GraphError::ExpectedValueForOperator( + "list".into(), + "NotAny".into(), + )) + } + } + } + } } diff --git a/raphtory-graphql/src/model/graph/mod.rs b/raphtory-graphql/src/model/graph/mod.rs index 4c65a1b6c9..7b332b3a41 100644 --- a/raphtory-graphql/src/model/graph/mod.rs +++ b/raphtory-graphql/src/model/graph/mod.rs @@ -1,3 +1,6 @@ +use crate::model::graph::property::GqlPropValue; +use dynamic_graphql::{Enum, InputObject}; + pub(crate) mod edge; mod edges; pub(crate) mod graph; @@ -8,3 +11,23 @@ mod nodes; mod path_from_node; pub(crate) mod property; pub(crate) mod vectorised_graph; + +#[derive(InputObject, Debug)] +pub struct FilterCondition { + pub operator: Operator, + pub value: Option, +} + +#[derive(Enum, Copy, Clone, Debug)] +pub enum Operator { + Equal, + NotEqual, + GreaterThanOrEqual, + LessThanOrEqual, + GreaterThan, + LessThan, + IsNone, + IsSome, + Any, + NotAny, +} diff --git a/raphtory-graphql/src/model/graph/mutable_graph.rs b/raphtory-graphql/src/model/graph/mutable_graph.rs index fb991316b7..8ba3eebccf 100644 --- a/raphtory-graphql/src/model/graph/mutable_graph.rs +++ b/raphtory-graphql/src/model/graph/mutable_graph.rs @@ -137,7 +137,7 @@ impl GqlMutableGraph { .add_constant_properties(as_properties(constant_props))?; } if let Ok(node) = self.get_node_view(name) { - node.update_embeddings().await; // FIXME: ideally this should call the embedding function just once!! + let _ = node.update_embeddings().await; // FIXME: ideally this should call the embedding function just once!! } } self.graph.write_updates()?; @@ -166,7 +166,7 @@ impl GqlMutableGraph { as_properties(properties.unwrap_or(vec![])), layer.as_str(), )?; - edge.update_embeddings().await; + let _ = edge.update_embeddings().await; self.graph.write_updates()?; Ok(edge.into()) } @@ -192,7 +192,7 @@ impl GqlMutableGraph { .add_constant_properties(as_properties(constant_props), layer)?; } if let Ok(edge) = self.get_edge_view(src, dst) { - edge.update_embeddings().await; // FIXME: ideally this should call the embedding function just once!! + let _ = edge.update_embeddings().await; // FIXME: ideally this should call the embedding function just once!! } } self.graph.write_updates()?; @@ -209,7 +209,7 @@ impl GqlMutableGraph { layer: Option, ) -> Result { let edge = self.graph.delete_edge(time, src, dst, layer.as_str())?; - edge.update_embeddings().await; + let _ = edge.update_embeddings().await; self.graph.write_updates()?; Ok(edge.into()) } @@ -253,7 +253,8 @@ impl GqlMutableGraph { impl GqlMutableGraph { async fn update_graph_embeddings(&self) { - self.graph + let _ = self + .graph .update_graph_embeddings(Some(self.path.get_original_path_str().to_owned())) .await; } @@ -308,7 +309,7 @@ impl GqlMutableNode { ) -> Result { self.node .add_constant_properties(as_properties(properties))?; - self.node.update_embeddings().await; + let _ = self.node.update_embeddings().await; self.node.graph.write_updates()?; Ok(true) } @@ -316,7 +317,7 @@ impl GqlMutableNode { /// Set the node type (errors if the node already has a non-default type) async fn set_node_type(&self, new_type: String) -> Result { self.node.set_node_type(&new_type)?; - self.node.update_embeddings().await; + let _ = self.node.update_embeddings().await; self.node.graph.write_updates()?; Ok(true) } @@ -328,7 +329,7 @@ impl GqlMutableNode { ) -> Result { self.node .update_constant_properties(as_properties(properties))?; - self.node.update_embeddings().await; + let _ = self.node.update_embeddings().await; self.node.graph.write_updates()?; Ok(true) } @@ -341,7 +342,7 @@ impl GqlMutableNode { ) -> Result { self.node .add_updates(time, as_properties(properties.unwrap_or(vec![])))?; - self.node.update_embeddings().await; + let _ = self.node.update_embeddings().await; self.node.graph.write_updates()?; Ok(true) } @@ -383,7 +384,7 @@ impl GqlMutableEdge { /// Mark the edge as deleted at time `time` async fn delete(&self, time: i64, layer: Option) -> Result { self.edge.delete(time, layer.as_str())?; - self.edge.update_embeddings().await; + let _ = self.edge.update_embeddings().await; self.edge.graph.write_updates()?; Ok(true) } @@ -399,7 +400,7 @@ impl GqlMutableEdge { ) -> Result { self.edge .add_constant_properties(as_properties(properties), layer.as_str())?; - self.edge.update_embeddings().await; + let _ = self.edge.update_embeddings().await; self.edge.graph.write_updates()?; Ok(true) } @@ -415,7 +416,7 @@ impl GqlMutableEdge { ) -> Result { self.edge .update_constant_properties(as_properties(properties), layer.as_str())?; - self.edge.update_embeddings().await; + let _ = self.edge.update_embeddings().await; self.edge.graph.write_updates()?; Ok(true) } @@ -435,7 +436,7 @@ impl GqlMutableEdge { as_properties(properties.unwrap_or(vec![])), layer.as_str(), )?; - self.edge.update_embeddings().await; + let _ = self.edge.update_embeddings().await; self.edge.graph.write_updates()?; Ok(true) } diff --git a/raphtory-graphql/src/model/graph/nodes.rs b/raphtory-graphql/src/model/graph/nodes.rs index 471c6fac1d..3adcf29dc6 100644 --- a/raphtory-graphql/src/model/graph/nodes.rs +++ b/raphtory-graphql/src/model/graph/nodes.rs @@ -1,8 +1,9 @@ -use crate::model::graph::node::Node; -use dynamic_graphql::{ResolvedObject, ResolvedObjectFields}; +use crate::model::graph::{node::Node, property::GqlPropValue, FilterCondition, Operator}; +use dynamic_graphql::{Enum, InputObject, ResolvedObject, ResolvedObjectFields}; use raphtory::{ + core::utils::errors::GraphError, db::{api::view::DynamicGraph, graph::nodes::Nodes}, - prelude::*, + prelude::{GraphViewOps, *}, }; #[derive(ResolvedObject)] @@ -93,6 +94,129 @@ impl GqlNodes { self.update(self.nn.type_filter(&node_types)) } + async fn node_filter( + &self, + property: String, + condition: FilterCondition, + ) -> Result { + match condition.operator { + Operator::Equal => { + if let Some(value) = condition.value { + let filtered_nodes = self + .nn + .filter_nodes(PropertyFilter::eq(property, value.0))?; + Ok(self.update(filtered_nodes)) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "Equal".into(), + )) + } + } + Operator::NotEqual => { + if let Some(value) = condition.value { + let filtered_nodes = self + .nn + .filter_nodes(PropertyFilter::ne(property, value.0))?; + Ok(self.update(filtered_nodes)) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "NotEqual".into(), + )) + } + } + Operator::GreaterThanOrEqual => { + if let Some(value) = condition.value { + let filtered_nodes = self + .nn + .filter_nodes(PropertyFilter::ge(property, value.0))?; + Ok(self.update(filtered_nodes)) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "GreaterThanOrEqual".into(), + )) + } + } + Operator::LessThanOrEqual => { + if let Some(value) = condition.value { + let filtered_nodes = self + .nn + .filter_nodes(PropertyFilter::le(property, value.0))?; + Ok(self.update(filtered_nodes)) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "LessThanOrEqual".into(), + )) + } + } + Operator::GreaterThan => { + if let Some(value) = condition.value { + let filtered_nodes = self + .nn + .filter_nodes(PropertyFilter::gt(property, value.0))?; + Ok(self.update(filtered_nodes)) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "GreaterThan".into(), + )) + } + } + Operator::LessThan => { + if let Some(value) = condition.value { + let filtered_nodes = self + .nn + .filter_nodes(PropertyFilter::lt(property, value.0))?; + Ok(self.update(filtered_nodes)) + } else { + Err(GraphError::ExpectedValueForOperator( + "value".into(), + "LessThan".into(), + )) + } + } + Operator::IsNone => { + let filtered_nodes = self.nn.filter_nodes(PropertyFilter::is_none(property))?; + Ok(self.update(filtered_nodes)) + } + Operator::IsSome => { + let filtered_nodes = self.nn.filter_nodes(PropertyFilter::is_some(property))?; + Ok(self.update(filtered_nodes)) + } + Operator::Any => { + if let Some(Prop::List(list)) = condition.value.map(|v| v.0) { + let prop_values: Vec = list.iter().cloned().collect(); + let filtered_nodes = self + .nn + .filter_nodes(PropertyFilter::any(property, prop_values))?; + Ok(self.update(filtered_nodes)) + } else { + Err(GraphError::ExpectedValueForOperator( + "list".into(), + "Any".into(), + )) + } + } + Operator::NotAny => { + if let Some(Prop::List(list)) = condition.value.map(|v| v.0) { + let prop_values: Vec = list.iter().cloned().collect(); + let filtered_nodes = self + .nn + .filter_nodes(PropertyFilter::not_any(property, prop_values))?; + Ok(self.update(filtered_nodes)) + } else { + Err(GraphError::ExpectedValueForOperator( + "list".into(), + "NotAny".into(), + )) + } + } + } + } + //////////////////////// //// TIME QUERIES ////// //////////////////////// diff --git a/raphtory/Cargo.toml b/raphtory/Cargo.toml index b21b95e78b..246d42176e 100644 --- a/raphtory/Cargo.toml +++ b/raphtory/Cargo.toml @@ -40,7 +40,7 @@ quad-rand = { workspace = true } serde_json = { workspace = true } ouroboros = { workspace = true } either = { workspace = true } -kdam = { workspace = true } +kdam = { workspace = true, optional = true} bytemuck = { workspace = true } tracing = { workspace = true } diff --git a/raphtory/src/algorithms/motifs/temporal_rich_club_coefficient.rs b/raphtory/src/algorithms/motifs/temporal_rich_club_coefficient.rs index 999c3bd6a0..4c5103c322 100644 --- a/raphtory/src/algorithms/motifs/temporal_rich_club_coefficient.rs +++ b/raphtory/src/algorithms/motifs/temporal_rich_club_coefficient.rs @@ -56,7 +56,7 @@ where .collect(); if s_k.len() <= 1 { - return 0.0 as f64; + return 0.0f64; } let temp_rich_club_val = SlidingWindows::new(views.into_iter(), window_size) @@ -93,7 +93,7 @@ where let poss_edges = (s_k.len() * (s_k.len() - 1)) / 2; return (edges.len() as f64) / (poss_edges as f64); } - None => return 0 as f64, + None => return 0f64, } } diff --git a/raphtory/src/core/entities/properties/graph_meta.rs b/raphtory/src/core/entities/properties/graph_meta.rs index e40837ff47..57abbcf780 100644 --- a/raphtory/src/core/entities/properties/graph_meta.rs +++ b/raphtory/src/core/entities/properties/graph_meta.rs @@ -11,7 +11,9 @@ use raphtory_api::core::storage::{ FxDashMap, }; use serde::{Deserialize, Serialize}; -use std::ops::{Deref, DerefMut}; +#[cfg(feature = "proto")] +use std::ops::Deref; +use std::ops::DerefMut; #[derive(Serialize, Deserialize, Debug)] pub struct GraphMeta { diff --git a/raphtory/src/core/mod.rs b/raphtory/src/core/mod.rs index 52611058f1..58205c3010 100644 --- a/raphtory/src/core/mod.rs +++ b/raphtory/src/core/mod.rs @@ -32,7 +32,6 @@ use chrono::{DateTime, NaiveDateTime, Utc}; use itertools::Itertools; use raphtory_api::core::storage::arc_str::ArcStr; use serde::{Deserialize, Serialize}; -use serde_json::Value; use std::{ cmp::Ordering, collections::HashMap, @@ -171,39 +170,6 @@ impl PartialOrd for Prop { } impl Prop { - pub fn to_json(&self) -> Value { - match self { - Prop::Str(value) => Value::String(value.to_string()), - Prop::U8(value) => Value::Number((*value).into()), - Prop::U16(value) => Value::Number((*value).into()), - Prop::I32(value) => Value::Number((*value).into()), - Prop::I64(value) => Value::Number((*value).into()), - Prop::U32(value) => Value::Number((*value).into()), - Prop::U64(value) => Value::Number((*value).into()), - Prop::F32(value) => Value::Number(serde_json::Number::from_f64(*value as f64).unwrap()), - Prop::F64(value) => Value::Number(serde_json::Number::from_f64(*value).unwrap()), - Prop::Bool(value) => Value::Bool(*value), - Prop::List(value) => { - let vec: Vec = value.iter().map(|v| v.to_json()).collect(); - Value::Array(vec) - } - Prop::Map(value) => { - let map: serde_json::Map = value - .iter() - .map(|(k, v)| (k.to_string(), v.to_json())) - .collect(); - Value::Object(map) - } - Prop::DTime(value) => Value::String(value.to_string()), - Prop::NDTime(value) => Value::String(value.to_string()), - Prop::Graph(_) => Value::String("Graph cannot be converted to JSON".to_string()), - Prop::PersistentGraph(_) => { - Value::String("Persistent Graph cannot be converted to JSON".to_string()) - } - Prop::Document(DocumentInput { content, .. }) => Value::String(content.to_owned()), // TODO: return Value::Object ?? - } - } - pub fn dtype(&self) -> PropType { match self { Prop::Str(_) => PropType::Str, diff --git a/raphtory/src/core/storage/timeindex.rs b/raphtory/src/core/storage/timeindex.rs index fe58e28693..e79e71c247 100644 --- a/raphtory/src/core/storage/timeindex.rs +++ b/raphtory/src/core/storage/timeindex.rs @@ -281,62 +281,6 @@ impl<'a, T: AsTime, Ops: TimeIndexOps, V: AsRef> + Send } } -pub trait TimeIndexOps: Send + Sync { - type IndexType: AsTime; - type RangeType<'a>: TimeIndexOps + 'a - where - Self: 'a; - - fn active(&self, w: Range) -> bool; - - fn active_t(&self, w: Range) -> bool { - self.active(Self::IndexType::range(w)) - } - - fn range(&self, w: Range) -> Self::RangeType<'_>; - - fn range_t(&self, w: Range) -> Self::RangeType<'_> { - self.range(Self::IndexType::range(w)) - } - - fn first_t(&self) -> Option { - self.first().map(|ti| ti.t()) - } - - fn first(&self) -> Option; - - fn last_t(&self) -> Option { - self.last().map(|ti| ti.t()) - } - - fn last(&self) -> Option; - - fn iter(&self) -> Box + Send + '_>; - - fn iter_t(&self) -> Box + Send + '_> { - Box::new(self.iter().map(|time| time.t())) - } - - fn len(&self) -> usize; -} - -pub trait TimeIndexIntoOps: Sized { - type IndexType: AsTime; - type RangeType: TimeIndexIntoOps; - - fn into_range(self, w: Range) -> Self::RangeType; - - fn into_range_t(self, w: Range) -> Self::RangeType { - self.into_range(Self::IndexType::range(w)) - } - - fn into_iter(self) -> impl Iterator + Send; - - fn into_iter_t(self) -> impl Iterator + Send { - self.into_iter().map(|time| time.t()) - } -} - impl TimeIndexOps for TimeIndex { type IndexType = T; type RangeType<'a> diff --git a/raphtory/src/core/utils/errors.rs b/raphtory/src/core/utils/errors.rs index 0cd3b1b51a..dd3dd3238b 100644 --- a/raphtory/src/core/utils/errors.rs +++ b/raphtory/src/core/utils/errors.rs @@ -137,9 +137,15 @@ pub enum GraphError { #[error("Node already exists with ID {0:?}")] NodeExistsError(GID), + #[error("Nodes already exist with IDs: {0:?}")] + NodesExistError(Vec), + #[error("Edge already exists for nodes {0:?} {1:?}")] EdgeExistsError(GID, GID), + #[error("Edges already exist with IDs: {0:?}")] + EdgesExistError(Vec<(GID, GID)>), + #[error("Node {0} does not exist")] NodeMissingError(GID), @@ -278,6 +284,9 @@ pub enum GraphError { SystemTimeError(#[from] SystemTimeError), #[error("Property filtering not implemented on PersistentGraph yet")] PropertyFilteringNotImplemented, + + #[error("Expected a {0} for {1} operator")] + ExpectedValueForOperator(String, String), } impl GraphError { diff --git a/raphtory/src/db/api/mutation/import_ops.rs b/raphtory/src/db/api/mutation/import_ops.rs index 6d6f2a9430..5608e97108 100644 --- a/raphtory/src/db/api/mutation/import_ops.rs +++ b/raphtory/src/db/api/mutation/import_ops.rs @@ -1,8 +1,12 @@ -use std::borrow::Borrow; +use raphtory_api::core::entities::{GID, VID}; +use std::{borrow::Borrow, fmt::Debug}; use crate::{ core::{ - entities::LayerIds, + entities::{ + nodes::node_ref::{AsNodeRef, NodeRef}, + LayerIds, + }, utils::errors::{ GraphError, GraphError::{EdgeExistsError, NodeExistsError}, @@ -32,14 +36,12 @@ pub trait ImportOps: { /// Imports a single node into the graph. /// - /// This function takes a reference to a node and an optional boolean flag `force`. - /// If `force` is `Some(false)` or `None`, the function will return an error if the node already exists in the graph. - /// If `force` is `Some(true)`, the function will overwrite the existing node in the graph. - /// /// # Arguments /// /// * `node` - A reference to the node to be imported. - /// * `force` - An optional boolean flag. If `Some(true)`, the function will overwrite the existing node. + /// * `merge` - An optional boolean flag. + /// If `merge` is `false`, the function will return an error if the imported node already exists in the graph. + /// If `merge` is `true`, the function merges the histories of the imported node and the existing node (in the graph). /// /// # Returns /// @@ -47,19 +49,42 @@ pub trait ImportOps: fn import_node<'a, GHH: GraphViewOps<'a>, GH: GraphViewOps<'a>>( &self, node: &NodeView, - force: bool, + merge: bool, ) -> Result, GraphError>; - /// Imports multiple nodes into the graph. + /// Imports a single node into the graph. /// - /// This function takes a vector of references to nodes and an optional boolean flag `force`. - /// If `force` is `Some(false)` or `None`, the function will return an error if any of the nodes already exist in the graph. - /// If `force` is `Some(true)`, the function will overwrite the existing nodes in the graph. + /// # Arguments + /// + /// * `node` - A reference to the node to be imported. + /// * `new_id` - The new node id. + /// * `merge` - An optional boolean flag. + /// If `merge` is `false`, the function will return an error if the imported node already exists in the graph. + /// If `merge` is `true`, the function merges the histories of the imported node and the existing node (in the graph). + /// + /// # Returns + /// + /// A `Result` which is `Ok` if the node was successfully imported, and `Err` otherwise. + fn import_node_as< + 'a, + GHH: GraphViewOps<'a>, + GH: GraphViewOps<'a>, + V: AsNodeRef + Clone + Debug, + >( + &self, + node: &NodeView, + new_id: V, + merge: bool, + ) -> Result, GraphError>; + + /// Imports multiple nodes into the graph. /// /// # Arguments /// /// * `nodes` - A vector of references to the nodes to be imported. - /// * `force` - An optional boolean flag. If `Some(true)`, the function will overwrite the existing nodes. + /// * `merge` - An optional boolean flag. + /// If `merge` is `false`, the function will return an error if any of the imported nodes already exists in the graph. + /// If `merge` is `true`, the function merges the histories of the imported nodes and the existing nodes (in the graph). /// /// # Returns /// @@ -67,19 +92,42 @@ pub trait ImportOps: fn import_nodes<'a, GHH: GraphViewOps<'a>, GH: GraphViewOps<'a>>( &self, nodes: impl IntoIterator>>, - force: bool, + merge: bool, ) -> Result<(), GraphError>; - /// Imports a single edge into the graph. + /// Imports multiple nodes into the graph. + /// + /// # Arguments + /// + /// * `nodes` - A vector of references to the nodes to be imported. + /// * `new_ids` - A list of node IDs to use for the imported nodes. + /// * `merge` - An optional boolean flag. + /// If `merge` is `false`, the function will return an error if any of the imported nodes already exists in the graph. + /// If `merge` is `true`, the function merges the histories of the imported nodes and the existing nodes (in the graph). + /// + /// # Returns /// - /// This function takes a reference to an edge and an optional boolean flag `force`. - /// If `force` is `Some(false)` or `None`, the function will return an error if the edge already exists in the graph. - /// If `force` is `Some(true)`, the function will overwrite the existing edge in the graph. + /// A `Result` which is `Ok` if the nodes were successfully imported, and `Err` otherwise. + fn import_nodes_as< + 'a, + GHH: GraphViewOps<'a>, + GH: GraphViewOps<'a>, + V: AsNodeRef + Clone + Debug, + >( + &self, + nodes: impl IntoIterator>>, + new_ids: impl IntoIterator, + merge: bool, + ) -> Result<(), GraphError>; + + /// Imports a single edge into the graph. /// /// # Arguments /// /// * `edge` - A reference to the edge to be imported. - /// * `force` - An optional boolean flag. If `Some(true)`, the function will overwrite the existing edge. + /// * `merge` - An optional boolean flag. + /// If `merge` is `false`, the function will return an error if the imported edge already exists in the graph. + /// If `merge` is `true`, the function merges the histories of the imported edge and the existing edge (in the graph). /// /// # Returns /// @@ -87,19 +135,42 @@ pub trait ImportOps: fn import_edge<'a, GHH: GraphViewOps<'a>, GH: GraphViewOps<'a>>( &self, edge: &EdgeView, - force: bool, + merge: bool, ) -> Result, GraphError>; - /// Imports multiple edges into the graph. + /// Imports a single edge into the graph. + /// + /// # Arguments /// - /// This function takes a vector of references to edges and an optional boolean flag `force`. - /// If `force` is `Some(false)` or `None`, the function will return an error if any of the edges already exist in the graph. - /// If `force` is `Some(true)`, the function will overwrite the existing edges in the graph. + /// * `edge` - A reference to the edge to be imported. + /// * `new_id` - The ID of the new edge. It's a tuple of the source and destination node ids. + /// * `merge` - An optional boolean flag. + /// If `merge` is `false`, the function will return an error if the imported edge already exists in the graph. + /// If `merge` is `true`, the function merges the histories of the imported edge and the existing edge (in the graph). + /// + /// # Returns + /// + /// A `Result` which is `Ok` if the edge was successfully imported, and `Err` otherwise. + fn import_edge_as< + 'a, + GHH: GraphViewOps<'a>, + GH: GraphViewOps<'a>, + V: AsNodeRef + Clone + Debug, + >( + &self, + edge: &EdgeView, + new_id: (V, V), + merge: bool, + ) -> Result, GraphError>; + + /// Imports multiple edges into the graph. /// /// # Arguments /// /// * `edges` - A vector of references to the edges to be imported. - /// * `force` - An optional boolean flag. If `Some(true)`, the function will overwrite the existing edges. + /// * `merge` - An optional boolean flag. + /// If `merge` is `false`, the function will return an error if any of the imported edges already exists in the graph. + /// If `merge` is `true`, the function merges the histories of the imported edges and the existing edges (in the graph). /// /// # Returns /// @@ -107,7 +178,32 @@ pub trait ImportOps: fn import_edges<'a, GHH: GraphViewOps<'a>, GH: GraphViewOps<'a>>( &self, edges: impl IntoIterator>>, - force: bool, + merge: bool, + ) -> Result<(), GraphError>; + + /// Imports multiple edges into the graph. + /// + /// # Arguments + /// + /// * `edges` - A vector of references to the edges to be imported. + /// * `new_ids` - The IDs of the new edges. It's a vector of tuples of the source and destination node ids. + /// * `merge` - An optional boolean flag. + /// If `merge` is `false`, the function will return an error if any of the imported edges already exists in the graph. + /// If `merge` is `true`, the function merges the histories of the imported edges and the existing edges (in the graph). + /// + /// # Returns + /// + /// A `Result` which is `Ok` if the edges were successfully imported, and `Err` otherwise. + fn import_edges_as< + 'a, + GHH: GraphViewOps<'a>, + GH: GraphViewOps<'a>, + V: AsNodeRef + Clone + Debug, + >( + &self, + edges: impl IntoIterator>>, + new_ids: impl IntoIterator, + merge: bool, ) -> Result<(), GraphError>; } @@ -122,56 +218,54 @@ impl< fn import_node<'a, GHH: GraphViewOps<'a>, GH: GraphViewOps<'a>>( &self, node: &NodeView, - force: bool, + merge: bool, ) -> Result, GraphError> { - if !force && self.node(node.id()).is_some() { - return Err(NodeExistsError(node.id())); - } - let node_internal = match node.node_type().as_str() { - None => self.resolve_node(node.id())?.inner(), - Some(node_type) => { - let (node_internal, _) = self.resolve_node_and_type(node.id(), node_type)?.inner(); - node_internal.inner() - } - }; - - for h in node.history() { - let t = time_from_input(self, h)?; - self.internal_add_node(t, node_internal, &[])?; - } - for (name, prop_view) in node.properties().temporal().iter() { - let old_prop_id = node - .graph - .node_meta() - .temporal_prop_meta() - .get_id(&name) - .unwrap(); - let dtype = node - .graph - .node_meta() - .temporal_prop_meta() - .get_dtype(old_prop_id) - .unwrap(); - let new_prop_id = self.resolve_node_property(&name, dtype, false)?.inner(); - for (h, prop) in prop_view.iter() { - let t = time_from_input(self, h)?; - self.internal_add_node(t, node_internal, &[(new_prop_id, prop)])?; - } - } - self.node(node.id()) - .expect("node added") - .add_constant_properties(node.properties().constant())?; + import_node_internal(&self, node, node.id(), merge) + } - Ok(self.node(node.id()).unwrap()) + fn import_node_as< + 'a, + GHH: GraphViewOps<'a>, + GH: GraphViewOps<'a>, + V: AsNodeRef + Clone + Debug, + >( + &self, + node: &NodeView, + new_id: V, + merge: bool, + ) -> Result, GraphError> { + import_node_internal(&self, node, new_id, merge) } fn import_nodes<'a, GHH: GraphViewOps<'a>, GH: GraphViewOps<'a>>( &self, nodes: impl IntoIterator>>, - force: bool, + merge: bool, ) -> Result<(), GraphError> { - for node in nodes { - self.import_node(node.borrow(), force)?; + let nodes: Vec<_> = nodes.into_iter().collect(); + let new_ids: Vec = nodes.iter().map(|n| n.borrow().id()).collect(); + check_existing_nodes(self, &new_ids, merge)?; + for node in &nodes { + self.import_node(node.borrow(), merge)?; + } + Ok(()) + } + + fn import_nodes_as< + 'a, + GHH: GraphViewOps<'a>, + GH: GraphViewOps<'a>, + V: AsNodeRef + Clone + Debug, + >( + &self, + nodes: impl IntoIterator>>, + new_ids: impl IntoIterator, + merge: bool, + ) -> Result<(), GraphError> { + let new_ids: Vec = new_ids.into_iter().collect(); + check_existing_nodes(self, &new_ids, merge)?; + for (node, new_node_id) in nodes.into_iter().zip(new_ids.into_iter()) { + self.import_node_as(node.borrow(), new_node_id, merge)?; } Ok(()) } @@ -179,62 +273,241 @@ impl< fn import_edge<'a, GHH: GraphViewOps<'a>, GH: GraphViewOps<'a>>( &self, edge: &EdgeView, - force: bool, + merge: bool, ) -> Result, GraphError> { - // make sure we preserve all layers even if they are empty - // skip default layer - for layer in edge.graph.unique_layers().skip(1) { - self.resolve_layer(Some(&layer))?; - } - if !force && self.has_edge(edge.src().id(), edge.dst().id()) { - return Err(EdgeExistsError(edge.src().id(), edge.dst().id())); - } - // Add edges first so we definitely have all associated nodes (important in case of persistent edges) - // FIXME: this needs to be verified - for ee in edge.explode_layers() { - let layer_id = ee.edge.layer().expect("exploded layers"); - let layer_ids = LayerIds::One(layer_id); - let layer_name = self.get_layer_name(layer_id); - let layer_name: Option<&str> = if layer_id == 0 { - None - } else { - Some(&layer_name) - }; - for ee in ee.explode() { - self.add_edge( - ee.time().expect("exploded edge"), - ee.src().id(), - ee.dst().id(), - ee.properties().temporal().collect_properties(), - layer_name, - )?; - } - - if self.include_deletions() { - for t in edge.graph.edge_deletion_history(edge.edge, &layer_ids) { - let ti = time_from_input(self, t.t())?; - let src_id = self.resolve_node(edge.src().id())?.inner(); - let dst_id = self.resolve_node(edge.dst().id())?.inner(); - let layer = self.resolve_layer(layer_name)?.inner(); - self.internal_delete_edge(ti, src_id, dst_id, layer)?; - } - } + import_edge_internal(&self, edge, edge.src().id(), edge.dst().id(), merge) + } - self.edge(ee.src().id(), ee.dst().id()) - .expect("edge added") - .add_constant_properties(ee.properties().constant(), layer_name)?; - } - Ok(self.edge(edge.src().id(), edge.dst().id()).unwrap()) + fn import_edge_as< + 'a, + GHH: GraphViewOps<'a>, + GH: GraphViewOps<'a>, + V: AsNodeRef + Clone + Debug, + >( + &self, + edge: &EdgeView, + new_id: (V, V), + merge: bool, + ) -> Result, GraphError> { + import_edge_internal(&self, edge, new_id.0, new_id.1, merge) } fn import_edges<'a, GHH: GraphViewOps<'a>, GH: GraphViewOps<'a>>( &self, edges: impl IntoIterator>>, - force: bool, + merge: bool, ) -> Result<(), GraphError> { + let edges: Vec<_> = edges.into_iter().collect(); + let new_ids: Vec<(GID, GID)> = edges.iter().map(|e| e.borrow().id()).collect(); + check_existing_edges(self, &new_ids, merge)?; for edge in edges { - self.import_edge(edge.borrow(), force)?; + self.import_edge(edge.borrow(), merge)?; + } + Ok(()) + } + + fn import_edges_as< + 'a, + GHH: GraphViewOps<'a>, + GH: GraphViewOps<'a>, + V: AsNodeRef + Clone + Debug, + >( + &self, + edges: impl IntoIterator>>, + new_ids: impl IntoIterator, + merge: bool, + ) -> Result<(), GraphError> { + let new_ids: Vec<(V, V)> = new_ids.into_iter().collect(); + check_existing_edges(self, &new_ids, merge)?; + for (new_id, edge) in new_ids.into_iter().zip(edges) { + self.import_edge_as(edge.borrow(), new_id, merge)?; } Ok(()) } } + +fn import_node_internal< + 'a, + G: StaticGraphViewOps + + InternalAdditionOps + + InternalDeletionOps + + InternalPropertyAdditionOps + + InternalMaterialize, + GHH: GraphViewOps<'a>, + GH: GraphViewOps<'a>, + V: AsNodeRef + Clone + Debug, +>( + graph: &G, + node: &NodeView, + id: V, + merge: bool, +) -> Result, GraphError> { + if !merge { + if let Some(existing_node) = graph.node(&id) { + return Err(NodeExistsError(existing_node.id())); + } + } + + let node_internal = match node.node_type().as_str() { + None => graph.resolve_node(&id)?.inner(), + Some(node_type) => { + let (node_internal, _) = graph.resolve_node_and_type(&id, node_type)?.inner(); + node_internal.inner() + } + }; + + for h in node.history() { + let t = time_from_input(graph, h)?; + graph.internal_add_node(t, node_internal, &[])?; + } + + for (name, prop_view) in node.properties().temporal().iter() { + let old_prop_id = node + .graph + .node_meta() + .temporal_prop_meta() + .get_id(&name) + .unwrap(); + let dtype = node + .graph + .node_meta() + .temporal_prop_meta() + .get_dtype(old_prop_id) + .unwrap(); + let new_prop_id = graph.resolve_node_property(&name, dtype, false)?.inner(); + for (h, prop) in prop_view.iter() { + let t = time_from_input(graph, h)?; + graph.internal_add_node(t, node_internal, &[(new_prop_id, prop)])?; + } + } + + graph + .node(&id) + .expect("node added") + .add_constant_properties(node.properties().constant())?; + + Ok(graph.node(&id).unwrap()) +} + +fn import_edge_internal< + 'a, + G: StaticGraphViewOps + + InternalAdditionOps + + InternalDeletionOps + + InternalPropertyAdditionOps + + InternalMaterialize, + GHH: GraphViewOps<'a>, + GH: GraphViewOps<'a>, + V: AsNodeRef + Clone + Debug, +>( + graph: &G, + edge: &EdgeView, + src_id: V, + dst_id: V, + merge: bool, +) -> Result, GraphError> { + // Preserve all layers even if they are empty (except the default layer) + for layer in edge.graph.unique_layers().skip(1) { + graph.resolve_layer(Some(&layer))?; + } + + if !merge && graph.has_edge(&src_id, &dst_id) { + if let Some(existing_edge) = graph.edge(&src_id, &dst_id) { + return Err(EdgeExistsError( + existing_edge.src().id(), + existing_edge.dst().id(), + )); + } + } + + // Add edges first to ensure associated nodes are present + for ee in edge.explode_layers() { + let layer_id = ee.edge.layer().expect("exploded layers"); + let layer_ids = LayerIds::One(layer_id); + let layer_name = graph.get_layer_name(layer_id); + let layer_name: Option<&str> = if layer_id == 0 { + None + } else { + Some(&layer_name) + }; + + for ee in ee.explode() { + graph.add_edge( + ee.time().expect("exploded edge"), + &src_id, + &dst_id, + ee.properties().temporal().collect_properties(), + layer_name, + )?; + } + + if graph.include_deletions() { + for t in edge.graph.edge_deletion_history(edge.edge, &layer_ids) { + let ti = time_from_input(graph, t.t())?; + let src_node = graph.resolve_node(&src_id)?.inner(); + let dst_node = graph.resolve_node(&dst_id)?.inner(); + let layer = graph.resolve_layer(layer_name)?.inner(); + graph.internal_delete_edge(ti, src_node, dst_node, layer)?; + } + } + + graph + .edge(&src_id, &dst_id) + .expect("edge added") + .add_constant_properties(ee.properties().constant(), layer_name)?; + } + + Ok(graph.edge(&src_id, &dst_id).unwrap()) +} + +fn check_existing_nodes< + G: StaticGraphViewOps + + InternalAdditionOps + + InternalDeletionOps + + InternalPropertyAdditionOps + + InternalMaterialize, + V: AsNodeRef, +>( + graph: &G, + ids: &[V], + merge: bool, +) -> Result<(), GraphError> { + if !merge { + let mut existing_nodes = vec![]; + for id in ids { + if let Some(node) = graph.node(id) { + existing_nodes.push(node.id()); + } + } + if !existing_nodes.is_empty() { + return Err(GraphError::NodesExistError(existing_nodes)); + } + } + Ok(()) +} + +fn check_existing_edges< + G: StaticGraphViewOps + + InternalAdditionOps + + InternalDeletionOps + + InternalPropertyAdditionOps + + InternalMaterialize, + V: AsNodeRef + Clone + Debug, +>( + graph: &G, + new_ids: &[(V, V)], + merge: bool, +) -> Result<(), GraphError> { + if !merge { + let mut existing_edges = vec![]; + for (src, dst) in new_ids { + if let Some(existing_edge) = graph.edge(src, dst) { + existing_edges.push((existing_edge.src().id(), existing_edge.dst().id())); + } + } + if !existing_edges.is_empty() { + return Err(GraphError::EdgesExistError(existing_edges)); + } + } + Ok(()) +} diff --git a/raphtory/src/db/api/state/lazy_node_state.rs b/raphtory/src/db/api/state/lazy_node_state.rs index ebaced8da8..65a1f0fdf6 100644 --- a/raphtory/src/db/api/state/lazy_node_state.rs +++ b/raphtory/src/db/api/state/lazy_node_state.rs @@ -2,166 +2,167 @@ use crate::{ core::entities::{nodes::node_ref::AsNodeRef, VID}, db::{ api::{ - state::{NodeState, NodeStateOps}, - storage::graph::{nodes::node_storage_ops::NodeStorageOps, storage_ops::GraphStorage}, - view::{internal::NodeList, IntoDynBoxed}, + state::{ + ops::{node::NodeOp, NodeOpFilter}, + NodeState, NodeStateOps, + }, + view::{ + internal::{NodeList, OneHopFilter}, + BoxedLIter, IntoDynBoxed, + }, }, - graph::node::NodeView, + graph::{node::NodeView, nodes::Nodes}, }, - prelude::GraphViewOps, + prelude::*, }; use rayon::prelude::*; -use std::{marker::PhantomData, sync::Arc}; +use std::fmt::{Debug, Formatter}; #[derive(Clone)] -pub struct LazyNodeState<'graph, V, G, GH = G> { - op: Arc V + Send + Sync + 'graph>, - base_graph: G, - graph: GH, - node_types_filter: Option>, - _marker: PhantomData<&'graph ()>, +pub struct LazyNodeState<'graph, Op, G, GH = G> { + nodes: Nodes<'graph, G, GH>, + pub(crate) op: Op, } -impl< - 'graph, - G: GraphViewOps<'graph>, - GH: GraphViewOps<'graph>, - V: Clone + Send + Sync + 'graph, - > LazyNodeState<'graph, V, G, GH> +impl<'graph, G: GraphViewOps<'graph>, GH: GraphViewOps<'graph>, Op: NodeOp + 'graph> Debug + for LazyNodeState<'graph, Op, G, GH> +where + Op::Output: Debug, { - pub(crate) fn new( - base_graph: G, - graph: GH, - node_types_filter: Option>, - op: impl Fn(&GraphStorage, &GH, VID) -> V + Send + Sync + 'graph, - ) -> Self { - let op = Arc::new(op); - Self { - op, - base_graph, - graph, - node_types_filter, - _marker: Default::default(), - } + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_list().entries(self.values()).finish() } +} - fn apply(&self, cg: &GraphStorage, g: &GH, vid: VID) -> V { - (self.op)(cg, g, vid) - } +impl<'graph, Op: NodeOpFilter<'graph>, G: GraphViewOps<'graph>, GH: GraphViewOps<'graph>> + OneHopFilter<'graph> for LazyNodeState<'graph, Op, G, GH> +{ + type BaseGraph = G; + type FilteredGraph = Op::Graph; + type Filtered + 'graph> = + LazyNodeState<'graph, Op::Filtered, G, GH>; - pub fn compute(&self) -> NodeState<'graph, V, G, GH> { - let cg = self.graph.core_graph().lock(); - if self.graph.nodes_filtered() || self.node_types_filter.is_some() { - let keys: Vec<_> = cg - .nodes_par(&self.graph, self.node_types_filter.as_ref()) - .collect(); - let mut values = Vec::with_capacity(keys.len()); - keys.par_iter() - .map(|vid| self.apply(&cg, &self.graph, *vid)) - .collect_into_vec(&mut values); - NodeState::new( - self.base_graph.clone(), - self.graph.clone(), - values, - Some(keys.into()), - ) - } else { - let n = cg.nodes().len(); - let mut values = Vec::with_capacity(n); - (0..n) - .into_par_iter() - .map(|i| self.apply(&cg, &self.graph, VID(i))) - .collect_into_vec(&mut values); - NodeState::new(self.base_graph.clone(), self.graph.clone(), values, None) - } + fn current_filter(&self) -> &Self::FilteredGraph { + self.op.graph() } - pub fn collect>(&self) -> C { - self.par_values().collect() + fn base_graph(&self) -> &Self::BaseGraph { + self.nodes.base_graph() } - pub fn collect_vec(&self) -> Vec { - self.collect() + fn one_hop_filtered + 'graph>( + &self, + filtered_graph: GHH, + ) -> Self::Filtered { + LazyNodeState { + nodes: self.nodes.clone(), + op: self.op.filtered(filtered_graph), + } } } -impl<'graph, G: GraphViewOps<'graph>, GH: GraphViewOps<'graph>, V: 'graph> IntoIterator - for LazyNodeState<'graph, V, G, GH> +impl<'graph, Op: NodeOp + 'graph, G: GraphViewOps<'graph>, GH: GraphViewOps<'graph>> IntoIterator + for LazyNodeState<'graph, Op, G, GH> { - type Item = V; - type IntoIter = Box + Send + 'graph>; + type Item = Op::Output; + type IntoIter = BoxedLIter<'graph, Self::Item>; fn into_iter(self) -> Self::IntoIter { - let cg = self.graph.core_graph().lock(); - let graph = self.graph; - let op = self.op; - cg.clone() - .into_nodes_iter(graph.clone(), self.node_types_filter) - .map(move |v| op(&cg, &graph, v)) - .into_dyn_boxed() + self.into_values().into_dyn_boxed() } } -impl< - 'graph, - G: GraphViewOps<'graph>, - GH: GraphViewOps<'graph>, - V: Clone + Send + Sync + 'graph, - > NodeStateOps<'graph> for LazyNodeState<'graph, V, G, GH> +impl<'graph, Op: NodeOp + 'graph, G: GraphViewOps<'graph>, GH: GraphViewOps<'graph>> + LazyNodeState<'graph, Op, G, GH> +{ + pub(crate) fn new(op: Op, nodes: Nodes<'graph, G, GH>) -> Self { + Self { nodes, op } + } + + pub fn collect>(&self) -> C { + self.par_values().collect() + } + + pub fn collect_vec(&self) -> Vec { + self.collect() + } + + pub fn compute(&self) -> NodeState<'graph, Op::Output, G, GH> { + if self.nodes.is_filtered() { + let (keys, values): (Vec<_>, Vec<_>) = self + .par_iter() + .map(|(node, value)| (node.node, value)) + .unzip(); + NodeState::new( + self.nodes.base_graph.clone(), + self.nodes.graph.clone(), + values, + Some(keys.into()), + ) + } else { + let values = self.collect_vec(); + NodeState::new( + self.nodes.base_graph.clone(), + self.nodes.graph.clone(), + values, + None, + ) + } + } +} + +impl<'graph, Op: NodeOp + 'graph, G: GraphViewOps<'graph>, GH: GraphViewOps<'graph>> + NodeStateOps<'graph> for LazyNodeState<'graph, Op, G, GH> { type Graph = GH; type BaseGraph = G; type Value<'a> - = V + = Op::Output where 'graph: 'a, Self: 'a; - type OwnedValue = V; + type OwnedValue = Op::Output; fn graph(&self) -> &Self::Graph { - &self.graph + &self.nodes.graph } fn base_graph(&self) -> &Self::BaseGraph { - &self.base_graph + &self.nodes.base_graph } fn values<'a>(&'a self) -> impl Iterator> + 'a where 'graph: 'a, { - let cg = self.graph.core_graph().lock(); - cg.clone() - .into_nodes_iter(&self.graph, self.node_types_filter.clone()) - .map(move |vid| self.apply(&cg, &self.graph, vid)) + let storage = self.graph().core_graph().lock(); + self.nodes + .iter_refs() + .map(move |vid| self.op.apply(&storage, vid)) } fn par_values<'a>(&'a self) -> impl ParallelIterator> + 'a where 'graph: 'a, { - let cg = self.graph.core_graph().lock(); - cg.clone() - .into_nodes_par(&self.graph, self.node_types_filter.clone()) - .map(move |vid| self.apply(&cg, &self.graph, vid)) + let storage = self.graph().core_graph().lock(); + self.nodes + .par_iter_refs() + .map(move |vid| self.op.apply(&storage, vid)) } fn into_values(self) -> impl Iterator + 'graph { - let cg = self.graph.core_graph().lock(); - let graph = self.graph.clone(); - let op = self.op; - cg.clone() - .into_nodes_iter(self.graph, self.node_types_filter) - .map(move |n| op(&cg, &graph, n)) + let storage = self.graph().core_graph().lock(); + self.nodes + .iter_refs() + .map(move |vid| self.op.apply(&storage, vid)) } fn into_par_values(self) -> impl ParallelIterator + 'graph { - let cg = self.graph.core_graph().lock(); - let graph = self.graph.clone(); - let op = self.op; - cg.clone() - .into_nodes_par(self.graph, self.node_types_filter) - .map(move |n| op(&cg, &graph, n)) + let storage = self.graph().core_graph().lock(); + self.nodes + .par_iter_refs() + .map(move |vid| self.op.apply(&storage, vid)) } fn iter<'a>( @@ -175,15 +176,10 @@ impl< where 'graph: 'a, { - let cg = self.graph.core_graph().lock(); - cg.clone() - .into_nodes_iter(self.graph.clone(), self.node_types_filter.clone()) - .map(move |n| { - ( - NodeView::new_one_hop_filtered(&self.base_graph, &self.graph, n), - (self.op)(&cg, &self.graph, n), - ) - }) + let storage = self.graph().core_graph().lock(); + self.nodes + .iter() + .map(move |node| (node, self.op.apply(&storage, node.node))) } fn par_iter<'a>( @@ -197,25 +193,20 @@ impl< where 'graph: 'a, { - let cg = self.graph.core_graph().lock(); - cg.clone() - .into_nodes_par(self.graph.clone(), self.node_types_filter.clone()) - .map(move |n| { - ( - NodeView::new_one_hop_filtered(&self.base_graph, &self.graph, n), - (self.op)(&cg, &self.graph, n), - ) - }) + let storage = self.graph().core_graph().lock(); + self.nodes + .par_iter() + .map(move |node| (node, self.op.apply(&storage, node.node))) } fn get_by_index( &self, index: usize, ) -> Option<(NodeView<&Self::BaseGraph, &Self::Graph>, Self::Value<'_>)> { - if self.graph.nodes_filtered() { + if self.graph().nodes_filtered() { self.iter().nth(index) } else { - let vid = match self.graph.node_list() { + let vid = match self.graph().node_list() { NodeList::All { num_nodes } => { if index < num_nodes { VID(index) @@ -225,31 +216,66 @@ impl< } NodeList::List { nodes } => nodes.key(index)?, }; - let cg = self.graph.core_graph(); + let cg = self.graph().core_graph(); Some(( - NodeView::new_one_hop_filtered(&self.base_graph, &self.graph, vid), - (self.op)(cg, &self.graph, vid), + NodeView::new_one_hop_filtered(self.base_graph(), self.graph(), vid), + self.op.apply(cg, vid), )) } } fn get_by_node(&self, node: N) -> Option> { - let vid = self.graph.internalise_node(node.as_node_ref())?; - if !self.graph.has_node(vid) { - return None; - } - if let Some(type_filter) = self.node_types_filter.as_ref() { - let core_node_entry = &self.graph.core_node_entry(vid); - if !type_filter[core_node_entry.node_type_id()] { - return None; - } - } - - let cg = self.graph.core_graph(); - Some(self.apply(cg, &self.graph, vid)) + let node = (&self.graph()).node(node); + node.map(|node| self.op.apply(self.graph().core_graph(), node.node)) } fn len(&self) -> usize { - self.graph.count_nodes() + self.nodes.len() + } +} + +#[cfg(test)] +mod test { + use crate::{ + db::api::{ + state::{ + lazy_node_state::LazyNodeState, + ops::node::{Degree, NodeOp}, + }, + view::{internal::CoreGraphOps, IntoDynamic}, + }, + prelude::*, + }; + use raphtory_api::core::{entities::VID, Direction}; + use std::sync::Arc; + + struct TestWrapper(Op); + #[test] + fn test_compile() { + let g = Graph::new(); + g.add_edge(0, 0, 1, NO_PROPS, None).unwrap(); + let deg = g.nodes().degree(); + + assert_eq!(deg.collect_vec(), [1, 1]); + assert_eq!(deg.after(1).collect_vec(), [0, 0]); + + let g_dyn = g.clone().into_dynamic(); + + let deg = Degree { + graph: g_dyn, + dir: Direction::BOTH, + }; + let arc_deg: Arc> = Arc::new(deg); + + let node_state_dyn = LazyNodeState { + nodes: g.nodes(), + op: arc_deg.clone(), + }; + + let dyn_deg: Vec<_> = node_state_dyn.values().collect(); + assert_eq!(dyn_deg, [1, 1]); + assert_eq!(arc_deg.apply(g.core_graph(), VID(0)), 1); + + let _test_struct = TestWrapper(arc_deg); } } diff --git a/raphtory/src/db/api/state/mod.rs b/raphtory/src/db/api/state/mod.rs index 886b4a4888..15230f0493 100644 --- a/raphtory/src/db/api/state/mod.rs +++ b/raphtory/src/db/api/state/mod.rs @@ -1,11 +1,13 @@ mod group_by; mod lazy_node_state; mod node_state; -mod ops; -mod ord_ops; +mod node_state_ops; +mod node_state_ord_ops; +pub(crate) mod ops; pub use lazy_node_state::LazyNodeState; pub(crate) use node_state::Index; pub use node_state::NodeState; -pub use ops::NodeStateOps; -pub use ord_ops::{AsOrderedNodeStateOps, OrderedNodeStateOps}; +pub use node_state_ops::NodeStateOps; +pub use node_state_ord_ops::{AsOrderedNodeStateOps, OrderedNodeStateOps}; +pub use ops::node::NodeOp; diff --git a/raphtory/src/db/api/state/node_state.rs b/raphtory/src/db/api/state/node_state.rs index d5d6d76da2..cb158adaf9 100644 --- a/raphtory/src/db/api/state/node_state.rs +++ b/raphtory/src/db/api/state/node_state.rs @@ -1,7 +1,7 @@ use crate::{ core::entities::{nodes::node_ref::AsNodeRef, VID}, db::{ - api::{state::ops::NodeStateOps, view::IntoDynBoxed}, + api::{state::node_state_ops::NodeStateOps, view::IntoDynBoxed}, graph::node::NodeView, }, prelude::GraphViewOps, diff --git a/raphtory/src/db/api/state/ops.rs b/raphtory/src/db/api/state/node_state_ops.rs similarity index 98% rename from raphtory/src/db/api/state/ops.rs rename to raphtory/src/db/api/state/node_state_ops.rs index b1fc2381bd..732722beb7 100644 --- a/raphtory/src/db/api/state/ops.rs +++ b/raphtory/src/db/api/state/node_state_ops.rs @@ -1,7 +1,7 @@ use crate::{ core::entities::nodes::node_ref::AsNodeRef, db::{ - api::state::{node_state::NodeState, ord_ops, Index}, + api::state::{node_state::NodeState, node_state_ord_ops, Index}, graph::node::NodeView, }, prelude::{GraphViewOps, NodeViewOps}, @@ -157,7 +157,7 @@ pub trait NodeStateOps<'graph>: IntoIterator { cmp: F, k: usize, ) -> NodeState<'graph, Self::OwnedValue, Self::BaseGraph, Self::Graph> { - let values = ord_ops::par_top_k( + let values = node_state_ord_ops::par_top_k( self.par_iter(), |(_, v1), (_, v2)| cmp(v1.borrow(), v2.borrow()), k, diff --git a/raphtory/src/db/api/state/ord_ops.rs b/raphtory/src/db/api/state/node_state_ord_ops.rs similarity index 98% rename from raphtory/src/db/api/state/ord_ops.rs rename to raphtory/src/db/api/state/node_state_ord_ops.rs index afa2537e62..24c2dab52a 100644 --- a/raphtory/src/db/api/state/ord_ops.rs +++ b/raphtory/src/db/api/state/node_state_ord_ops.rs @@ -1,5 +1,5 @@ use crate::db::{ - api::state::{node_state::NodeState, ops::NodeStateOps}, + api::state::{node_state::NodeState, node_state_ops::NodeStateOps}, graph::node::NodeView, }; use num_traits::float::FloatCore; @@ -329,7 +329,7 @@ where #[cfg(test)] mod test { - use crate::db::api::state::ord_ops::par_top_k; + use crate::db::api::state::node_state_ord_ops::par_top_k; #[test] fn test_top_k() { diff --git a/raphtory/src/db/api/state/ops/history.rs b/raphtory/src/db/api/state/ops/history.rs new file mode 100644 index 0000000000..1904f03cb3 --- /dev/null +++ b/raphtory/src/db/api/state/ops/history.rs @@ -0,0 +1,101 @@ +use crate::{ + db::api::{ + state::{ops::NodeOpFilter, NodeOp}, + storage::graph::storage_ops::GraphStorage, + }, + prelude::GraphViewOps, +}; +use raphtory_api::core::entities::VID; + +#[derive(Debug, Clone)] +pub struct EarliestTime { + pub(crate) graph: G, +} + +impl<'graph, G: GraphViewOps<'graph>> NodeOp for EarliestTime { + type Output = Option; + + fn apply(&self, _storage: &GraphStorage, node: VID) -> Self::Output { + self.graph.node_earliest_time(node) + } +} + +impl<'graph, G: GraphViewOps<'graph>> NodeOpFilter<'graph> for EarliestTime { + type Graph = G; + type Filtered + 'graph> = EarliestTime; + + fn graph(&self) -> &Self::Graph { + &self.graph + } + + fn filtered + 'graph>( + &self, + filtered_graph: GH, + ) -> Self::Filtered { + EarliestTime { + graph: filtered_graph, + } + } +} + +#[derive(Debug, Clone)] +pub struct LatestTime { + pub(crate) graph: G, +} + +impl<'graph, G: GraphViewOps<'graph>> NodeOp for LatestTime { + type Output = Option; + + fn apply(&self, _storage: &GraphStorage, node: VID) -> Self::Output { + self.graph.node_latest_time(node) + } +} + +impl<'graph, G: GraphViewOps<'graph>> NodeOpFilter<'graph> for LatestTime { + type Graph = G; + type Filtered + 'graph> = LatestTime; + + fn graph(&self) -> &Self::Graph { + &self.graph + } + + fn filtered + 'graph>( + &self, + filtered_graph: GH, + ) -> Self::Filtered { + LatestTime { + graph: filtered_graph, + } + } +} + +#[derive(Debug, Clone)] +pub struct History { + pub(crate) graph: G, +} + +impl<'graph, G: GraphViewOps<'graph>> NodeOp for History { + type Output = Vec; + + fn apply(&self, _storage: &GraphStorage, node: VID) -> Self::Output { + self.graph.node_history(node) + } +} + +impl<'graph, G: GraphViewOps<'graph>> NodeOpFilter<'graph> for History { + type Graph = G; + type Filtered + 'graph> = History; + + fn graph(&self) -> &Self::Graph { + &self.graph + } + + fn filtered + 'graph>( + &self, + filtered_graph: GH, + ) -> Self::Filtered { + History { + graph: filtered_graph, + } + } +} diff --git a/raphtory/src/db/api/state/ops/mod.rs b/raphtory/src/db/api/state/ops/mod.rs new file mode 100644 index 0000000000..9677afc538 --- /dev/null +++ b/raphtory/src/db/api/state/ops/mod.rs @@ -0,0 +1,7 @@ +pub(crate) mod history; +pub(crate) mod node; +mod properties; + +pub use history::*; +pub use node::*; +pub use properties::*; diff --git a/raphtory/src/db/api/state/ops/node.rs b/raphtory/src/db/api/state/ops/node.rs new file mode 100644 index 0000000000..3fce15671d --- /dev/null +++ b/raphtory/src/db/api/state/ops/node.rs @@ -0,0 +1,146 @@ +use crate::{ + db::api::{storage::graph::storage_ops::GraphStorage, view::internal::CoreGraphOps}, + prelude::GraphViewOps, +}; +use raphtory_api::core::{ + entities::{GID, VID}, + storage::arc_str::ArcStr, + Direction, +}; +use std::{ops::Deref, sync::Arc}; + +pub trait NodeOp: Send + Sync { + type Output: Clone + Send + Sync; + fn apply(&self, storage: &GraphStorage, node: VID) -> Self::Output; + + fn map(self, map: fn(Self::Output) -> V) -> Map + where + Self: Sized, + { + Map { op: self, map } + } +} + +// Cannot use OneHopFilter because there is no way to specify the bound on Output +pub trait NodeOpFilter<'graph>: NodeOp + 'graph { + type Graph: GraphViewOps<'graph>; + type Filtered>: NodeOp + + NodeOpFilter<'graph, Graph = G> + + 'graph; + + fn graph(&self) -> &Self::Graph; + + fn filtered>(&self, graph: G) -> Self::Filtered; +} + +#[derive(Debug, Clone, Copy)] +pub struct Name; + +impl NodeOp for Name { + type Output = String; + + fn apply(&self, storage: &GraphStorage, node: VID) -> Self::Output { + storage.node_name(node) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct Id; + +impl NodeOp for Id { + type Output = GID; + + fn apply(&self, storage: &GraphStorage, node: VID) -> Self::Output { + storage.node_id(node) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct Type; +impl NodeOp for Type { + type Output = Option; + + fn apply(&self, storage: &GraphStorage, node: VID) -> Self::Output { + storage.node_type(node) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct TypeId; +impl NodeOp for TypeId { + type Output = usize; + + fn apply(&self, storage: &GraphStorage, node: VID) -> Self::Output { + storage.node_type_id(node) + } +} + +#[derive(Debug, Clone)] +pub struct Degree { + pub(crate) graph: G, + pub(crate) dir: Direction, +} + +impl<'graph, G: GraphViewOps<'graph>> NodeOp for Degree { + type Output = usize; + + fn apply(&self, storage: &GraphStorage, node: VID) -> usize { + storage.node_degree(node, self.dir, &self.graph) + } +} + +impl<'graph, G: GraphViewOps<'graph>> NodeOpFilter<'graph> for Degree { + type Graph = G; + type Filtered + 'graph> = Degree; + + fn graph(&self) -> &Self::Graph { + &self.graph + } + + fn filtered + 'graph>( + &self, + filtered_graph: GH, + ) -> Self::Filtered { + Degree { + graph: filtered_graph, + dir: self.dir, + } + } +} + +impl NodeOp for Arc> { + type Output = V; + fn apply(&self, storage: &GraphStorage, node: VID) -> V { + self.deref().apply(storage, node) + } +} + +#[derive(Debug, Copy, Clone)] +pub struct Map { + op: Op, + map: fn(Op::Output) -> V, +} + +impl NodeOp for Map { + type Output = V; + + fn apply(&self, storage: &GraphStorage, node: VID) -> Self::Output { + (self.map)(self.op.apply(storage, node)) + } +} + +impl<'graph, Op: NodeOpFilter<'graph>, V: Clone + Send + Sync + 'graph> NodeOpFilter<'graph> + for Map +{ + type Graph = Op::Graph; + type Filtered> = Map, V>; + + fn graph(&self) -> &Self::Graph { + self.op.graph() + } + + fn filtered>(&self, graph: G) -> Self::Filtered { + let op = self.op.filtered(graph); + Map { op, map: self.map } + } +} diff --git a/raphtory/src/db/api/state/ops/properties.rs b/raphtory/src/db/api/state/ops/properties.rs new file mode 100644 index 0000000000..458b8d5e91 --- /dev/null +++ b/raphtory/src/db/api/state/ops/properties.rs @@ -0,0 +1,21 @@ +use crate::{ + db::{ + api::{properties::Properties, state::NodeOp, storage::graph::storage_ops::GraphStorage}, + graph::node::NodeView, + }, + prelude::GraphViewOps, +}; +use raphtory_api::core::entities::VID; + +#[derive(Debug, Clone)] +pub struct GetProperties { + pub(crate) graph: G, +} + +impl<'graph, G: GraphViewOps<'graph>> NodeOp for GetProperties { + type Output = Properties>; + + fn apply(&self, _storage: &GraphStorage, node: VID) -> Self::Output { + Properties::new(NodeView::new_internal(self.graph.clone(), node)) + } +} diff --git a/raphtory/src/db/api/storage/graph/nodes/node_entry.rs b/raphtory/src/db/api/storage/graph/nodes/node_entry.rs index efc5c11c8d..690aaf3d57 100644 --- a/raphtory/src/db/api/storage/graph/nodes/node_entry.rs +++ b/raphtory/src/db/api/storage/graph/nodes/node_entry.rs @@ -69,7 +69,7 @@ impl<'b> NodeStorageEntry<'b> { self, layers: &LayerIds, dir: Direction, - ) -> impl Iterator + 'b { + ) -> impl Iterator + use<'b, '_> { match self { NodeStorageEntry::Mem(entry) => { StorageVariants::Mem(GenLockedIter::from(entry, |entry| { diff --git a/raphtory/src/db/api/view/internal/into_dynamic.rs b/raphtory/src/db/api/view/internal/into_dynamic.rs index 0994542059..3fde32c8c3 100644 --- a/raphtory/src/db/api/view/internal/into_dynamic.rs +++ b/raphtory/src/db/api/view/internal/into_dynamic.rs @@ -1,5 +1,5 @@ use crate::db::api::view::{ - internal::{DynamicGraph, Static}, + internal::{DynamicGraph, OneHopFilter, Static}, StaticGraphViewOps, }; @@ -18,3 +18,14 @@ impl IntoDynamic for DynamicGraph { self } } + +pub trait IntoDynHop: OneHopFilter<'static, FilteredGraph: IntoDynamic> { + fn into_dyn_hop(self) -> Self::Filtered; +} + +impl> IntoDynHop for T { + fn into_dyn_hop(self) -> Self::Filtered { + let graph = self.current_filter().clone().into_dynamic(); + self.one_hop_filtered(graph) + } +} diff --git a/raphtory/src/db/api/view/internal/mod.rs b/raphtory/src/db/api/view/internal/mod.rs index 744cbfc44f..f4d263e078 100644 --- a/raphtory/src/db/api/view/internal/mod.rs +++ b/raphtory/src/db/api/view/internal/mod.rs @@ -27,7 +27,7 @@ pub use core_ops::*; pub use edge_filter_ops::*; pub use filter_ops::*; pub use inherit::Base; -pub use into_dynamic::IntoDynamic; +pub use into_dynamic::{IntoDynHop, IntoDynamic}; pub use layer_ops::{DelegateLayerOps, InheritLayerOps, InternalLayerOps}; pub use list_ops::*; pub use materialize::*; diff --git a/raphtory/src/db/api/view/mod.rs b/raphtory/src/db/api/view/mod.rs index 8456aa491a..7d0c5cd18a 100644 --- a/raphtory/src/db/api/view/mod.rs +++ b/raphtory/src/db/api/view/mod.rs @@ -18,7 +18,8 @@ pub use edge_property_filter::EdgePropertyFilterOps; pub use exploded_edge_property_filter::ExplodedEdgePropertyFilterOps; pub use graph::*; pub use internal::{ - Base, BoxableGraphView, DynamicGraph, InheritViewOps, IntoDynamic, MaterializedGraph, + Base, BoxableGraphView, DynamicGraph, InheritViewOps, IntoDynHop, IntoDynamic, + MaterializedGraph, }; pub use layer::*; pub(crate) use node::BaseNodeViewOps; @@ -27,15 +28,4 @@ pub use node_property_filter::NodePropertyFilterOps; pub use reset_filter::*; pub use time::*; -pub type BoxedIter = Box + Send>; -pub type BoxedLIter<'a, T> = Box + Send + 'a>; - -pub trait IntoDynBoxed<'a, T> { - fn into_dyn_boxed(self) -> BoxedLIter<'a, T>; -} - -impl<'a, T, I: Iterator + Send + 'a> IntoDynBoxed<'a, T> for I { - fn into_dyn_boxed(self) -> BoxedLIter<'a, T> { - Box::new(self) - } -} +pub use raphtory_api::iter::{BoxedIter, BoxedLIter, IntoDynBoxed}; diff --git a/raphtory/src/db/api/view/node.rs b/raphtory/src/db/api/view/node.rs index aba41b55f0..6097ae2942 100644 --- a/raphtory/src/db/api/view/node.rs +++ b/raphtory/src/db/api/view/node.rs @@ -5,41 +5,31 @@ use crate::{ Direction, }, db::api::{ - properties::{internal::PropertiesOps, Properties}, - storage::graph::{nodes::node_storage_ops::NodeStorageOps, storage_ops::GraphStorage}, - view::{ - internal::{CoreGraphOps, OneHopFilter, TimeSemantics}, - reset_filter::ResetFilter, - TimeOps, - }, + properties::internal::PropertiesOps, + state::{ops, NodeOp}, + storage::graph::storage_ops::GraphStorage, + view::{internal::OneHopFilter, reset_filter::ResetFilter, TimeOps}, }, prelude::{EdgeViewOps, GraphViewOps, LayerOps}, }; use chrono::{DateTime, Utc}; -use raphtory_api::core::{entities::GID, storage::arc_str::ArcStr}; pub trait BaseNodeViewOps<'graph>: Clone + TimeOps<'graph> + LayerOps<'graph> { type BaseGraph: GraphViewOps<'graph>; type Graph: GraphViewOps<'graph>; - type ValueType: 'graph + type ValueType: 'graph where - T: 'graph; + Op: NodeOp + 'graph, + Op::Output: 'graph; type PropType: PropertiesOps + Clone + 'graph; type PathType: NodeViewOps<'graph, BaseGraph = Self::BaseGraph, Graph = Self::BaseGraph> + 'graph; type Edges: EdgeViewOps<'graph, Graph = Self::Graph, BaseGraph = Self::BaseGraph> + 'graph; - fn map< - O: Clone + Send + Sync + 'graph, - F: Fn(&GraphStorage, &Self::Graph, VID) -> O + Send + Sync + Clone + 'graph, - >( - &self, - op: F, - ) -> Self::ValueType; - - fn as_props(&self) -> Self::ValueType>; + fn graph(&self) -> &Self::Graph; + fn map(&self, op: F) -> Self::ValueType; fn map_edges< I: Iterator + Send + 'graph, F: Fn(&GraphStorage, &Self::Graph, VID) -> I + Send + Sync + Clone + 'graph, @@ -61,73 +51,79 @@ pub trait BaseNodeViewOps<'graph>: Clone + TimeOps<'graph> + LayerOps<'graph> { pub trait NodeViewOps<'graph>: Clone + TimeOps<'graph> + LayerOps<'graph> { type BaseGraph: GraphViewOps<'graph>; type Graph: GraphViewOps<'graph>; - type ValueType: 'graph + type ValueType: 'graph where - T: 'graph; + T: 'graph, + T::Output: 'graph; type PathType: NodeViewOps<'graph, BaseGraph = Self::BaseGraph, Graph = Self::BaseGraph> + 'graph; - type PropType: PropertiesOps + Clone + 'graph; type Edges: EdgeViewOps<'graph, Graph = Self::Graph, BaseGraph = Self::BaseGraph> + 'graph; /// Get the numeric id of the node - fn id(&self) -> Self::ValueType; + fn id(&self) -> Self::ValueType; /// Get the name of this node if a user has set one otherwise it returns the ID. /// /// Returns: /// /// The name of the node if one exists, otherwise the ID as a string. - fn name(&self) -> Self::ValueType; + fn name(&self) -> Self::ValueType; /// Returns the type of node - fn node_type(&self) -> Self::ValueType>; - fn node_type_id(&self) -> Self::ValueType; + fn node_type(&self) -> Self::ValueType; + fn node_type_id(&self) -> Self::ValueType; /// Get the timestamp for the earliest activity of the node - fn earliest_time(&self) -> Self::ValueType>; + fn earliest_time(&self) -> Self::ValueType>; - fn earliest_date_time(&self) -> Self::ValueType>>; + fn earliest_date_time( + &self, + ) -> Self::ValueType, Option>>>; /// Get the timestamp for the latest activity of the node - fn latest_time(&self) -> Self::ValueType>; + fn latest_time(&self) -> Self::ValueType>; - fn latest_date_time(&self) -> Self::ValueType>>; + fn latest_date_time( + &self, + ) -> Self::ValueType, Option>>>; /// Gets the history of the node (time that the node was added and times when changes were made to the node) - fn history(&self) -> Self::ValueType>; + fn history(&self) -> Self::ValueType>; /// Gets the history of the node (time that the node was added and times when changes were made to the node) as `DateTime` objects if parseable - fn history_date_time(&self) -> Self::ValueType>>>; + fn history_date_time( + &self, + ) -> Self::ValueType, Option>>>>; //Returns true if the node has any updates within the current window, otherwise false - fn is_active(&self) -> Self::ValueType; + fn is_active(&self) -> Self::ValueType, bool>>; /// Get a view of the temporal properties of this node. /// /// Returns: /// /// A view with the names of the properties as keys and the property values as values. - fn properties(&self) -> Self::ValueType>; + fn properties(&self) -> Self::ValueType>; /// Get the degree of this node (i.e., the number of edges that are incident to it). /// /// Returns: /// /// The degree of this node. - fn degree(&self) -> Self::ValueType; + fn degree(&self) -> Self::ValueType>; /// Get the in-degree of this node (i.e., the number of edges that point into it). /// /// Returns: /// /// The in-degree of this node. - fn in_degree(&self) -> Self::ValueType; + fn in_degree(&self) -> Self::ValueType>; /// Get the out-degree of this node (i.e., the number of edges that point out of it). /// /// Returns: /// /// The out-degree of this node. - fn out_degree(&self) -> Self::ValueType; + fn out_degree(&self) -> Self::ValueType>; /// Get the edges that are incident to this node. /// @@ -175,80 +171,123 @@ pub trait NodeViewOps<'graph>: Clone + TimeOps<'graph> + LayerOps<'graph> { impl<'graph, V: BaseNodeViewOps<'graph> + 'graph> NodeViewOps<'graph> for V { type BaseGraph = V::BaseGraph; type Graph = V::Graph; - type ValueType = V::ValueType; + type ValueType + = V::ValueType + where + T::Output: 'graph; type PathType = V::PathType; - type PropType = V::PropType; type Edges = V::Edges; #[inline] - fn id(&self) -> Self::ValueType { - self.map(|cg, _g, v| cg.node_entry(v).id().into()) + fn id(&self) -> Self::ValueType { + self.map(ops::Id) } #[inline] - fn name(&self) -> Self::ValueType { - self.map(|_cg, g, v| g.node_name(v)) + fn name(&self) -> Self::ValueType { + self.map(ops::Name) } #[inline] - fn node_type(&self) -> Self::ValueType> { - self.map(|_cg, g, v| g.node_type(v)) + fn node_type(&self) -> Self::ValueType { + self.map(ops::Type) } #[inline] - fn node_type_id(&self) -> Self::ValueType { - self.map(|_cg, g, v| g.node_type_id(v)) + fn node_type_id(&self) -> Self::ValueType { + self.map(ops::TypeId) } #[inline] - fn earliest_time(&self) -> Self::ValueType> { - self.map(|_cg, g, v| g.node_earliest_time(v)) + fn earliest_time(&self) -> Self::ValueType> { + let op = ops::EarliestTime { + graph: self.graph().clone(), + }; + self.map(op) } #[inline] - fn earliest_date_time(&self) -> Self::ValueType>> { - self.map(|_cg, g, v| g.node_earliest_time(v)?.dt()) + fn earliest_date_time( + &self, + ) -> Self::ValueType, Option>>> { + let op = ops::EarliestTime { + graph: self.graph().clone(), + } + .map(|t| t.and_then(|t| t.dt())); + self.map(op) } #[inline] - fn latest_time(&self) -> Self::ValueType> { - self.map(|_cg, g, v| g.node_latest_time(v)) + fn latest_time(&self) -> Self::ValueType> { + let op = ops::LatestTime { + graph: self.graph().clone(), + }; + self.map(op) } #[inline] - fn latest_date_time(&self) -> Self::ValueType>> { - self.map(|_cg, g, v| g.node_latest_time(v)?.dt()) + fn latest_date_time( + &self, + ) -> Self::ValueType, Option>>> { + let op = ops::LatestTime { + graph: self.graph().clone(), + } + .map(|t| t.and_then(|t| t.dt())); + self.map(op) } #[inline] - fn history(&self) -> Self::ValueType> { - self.map(|_cg, g, v| g.node_history(v)) + fn history(&self) -> Self::ValueType> { + let op = ops::History { + graph: self.graph().clone(), + }; + self.map(op) } #[inline] - fn history_date_time(&self) -> Self::ValueType>>> { - self.map(|_cg, g, v| { - g.node_history(v) - .iter() - .map(|t| t.dt()) - .collect::>>() - }) + fn history_date_time( + &self, + ) -> Self::ValueType, Option>>>> { + let op = ops::History { + graph: self.graph().clone(), + } + .map(|h| h.into_iter().map(|t| t.dt()).collect()); + self.map(op) } - fn is_active(&self) -> Self::ValueType { - self.map(|_cg, g, v| !g.node_history(v).is_empty()) + fn is_active(&self) -> Self::ValueType, bool>> { + let op = ops::History { + graph: self.graph().clone(), + } + .map(|h| !h.is_empty()); + self.map(op) } #[inline] - fn properties(&self) -> Self::ValueType> { - self.as_props() + fn properties(&self) -> Self::ValueType> { + let op = ops::GetProperties { + graph: self.graph().clone(), + }; + self.map(op) } #[inline] - fn degree(&self) -> Self::ValueType { - self.map(|cg, g, v| cg.node_degree(v, Direction::BOTH, g)) + fn degree(&self) -> Self::ValueType> { + let op = ops::Degree { + graph: self.graph().clone(), + dir: Direction::BOTH, + }; + self.map(op) } #[inline] - fn in_degree(&self) -> Self::ValueType { - self.map(|cg, g, v| cg.node_degree(v, Direction::IN, g)) + fn in_degree(&self) -> Self::ValueType> { + let op = ops::Degree { + graph: self.graph().clone(), + dir: Direction::IN, + }; + self.map(op) } #[inline] - fn out_degree(&self) -> Self::ValueType { - self.map(|cg, g, v| cg.node_degree(v, Direction::OUT, g)) + fn out_degree(&self) -> Self::ValueType> { + let op = ops::Degree { + graph: self.graph().clone(), + dir: Direction::OUT, + }; + self.map(op) } #[inline] fn edges(&self) -> Self::Edges { diff --git a/raphtory/src/db/graph/graph.rs b/raphtory/src/db/graph/graph.rs index 49f83120ea..d7f74952f5 100644 --- a/raphtory/src/db/graph/graph.rs +++ b/raphtory/src/db/graph/graph.rs @@ -417,7 +417,10 @@ mod db_tests { algorithms::components::weakly_connected_components, core::{ utils::{ - errors::GraphError, + errors::{ + GraphError, + GraphError::{EdgeExistsError, NodeExistsError, NodesExistError}, + }, time::{error::ParseTimeError, TryIntoTime}, }, Prop, @@ -608,73 +611,6 @@ mod db_tests { .all(|&(_, src, dst)| g.edge(src, dst).is_some()) } - #[test] - fn prop_json_test() { - let g = Graph::new(); - let _ = g.add_node(0, "A", NO_PROPS, None).unwrap(); - let _ = g.add_node(0, "B", NO_PROPS, None).unwrap(); - let e = g.add_edge(0, "A", "B", NO_PROPS, None).unwrap(); - e.add_constant_properties(vec![("aprop".to_string(), Prop::Bool(true))], None) - .unwrap(); - let ee = g.add_edge(0, "A", "B", NO_PROPS, Some("LAYERA")).unwrap(); - ee.add_constant_properties( - vec![("aprop".to_string(), Prop::Bool(false))], - Some("LAYERA"), - ) - .unwrap(); - let json_res = g - .edge("A", "B") - .unwrap() - .properties() - .constant() - .get("aprop") - .unwrap() - .to_json(); - let json_as_map = json_res.as_object().unwrap(); - assert_eq!(json_as_map.len(), 2); - assert_eq!(json_as_map.get("LAYERA"), Some(&Value::Bool(false))); - assert_eq!(json_as_map.get("_default"), Some(&Value::Bool(true))); - - let eee = g.add_edge(0, "A", "B", NO_PROPS, Some("LAYERB")).unwrap(); - let v: Vec = vec![Prop::Bool(true), Prop::Bool(false), Prop::U64(0)]; - eee.add_constant_properties( - vec![("bprop".to_string(), Prop::List(Arc::new(v)))], - Some("LAYERB"), - ) - .unwrap(); - let json_res = g - .edge("A", "B") - .unwrap() - .properties() - .constant() - .get("bprop") - .unwrap() - .to_json(); - let list_res = json_res.as_object().unwrap().get("LAYERB").unwrap(); - assert_eq!(list_res.as_array().unwrap().len(), 3); - - let eeee = g.add_edge(0, "A", "B", NO_PROPS, Some("LAYERC")).unwrap(); - let v: HashMap = HashMap::from([ - (ArcStr::from("H".to_string()), Prop::Bool(false)), - (ArcStr::from("Y".to_string()), Prop::U64(0)), - ]); - eeee.add_constant_properties( - vec![("mymap".to_string(), Prop::Map(Arc::new(v)))], - Some("LAYERC"), - ) - .unwrap(); - let json_res = g - .edge("A", "B") - .unwrap() - .properties() - .constant() - .get("mymap") - .unwrap() - .to_json(); - let map_res = json_res.as_object().unwrap().get("LAYERC").unwrap(); - assert_eq!(map_res.as_object().unwrap().len(), 2); - } - #[test] fn import_from_another_graph() { let g = Graph::new(); @@ -696,6 +632,24 @@ mod db_tests { Prop::I64(11) ); + let gg = Graph::new(); + gg.add_node(1, "B", NO_PROPS, None).unwrap(); + let res = gg.import_nodes(vec![&g_a, &g_b], false); + match res { + Err(NodesExistError(ids)) => { + assert_eq!( + ids.into_iter() + .map(|id| id.to_string()) + .collect::>(), + vec!["B"], + ); + } + Err(e) => panic!("Unexpected error: {:?}", e), + Ok(_) => panic!("Expected error but got Ok"), + } + + assert_eq!(gg.node("A"), None); + let gg = Graph::new(); let _ = gg.import_nodes(vec![&g_a, &g_b], false).unwrap(); assert_eq!(gg.nodes().name().collect_vec(), vec!["A", "B"]); @@ -721,9 +675,348 @@ mod db_tests { assert_eq!(res.properties().as_vec(), e_a_b_p.properties().as_vec()); let e_c_d = g.add_edge(4, "C", "D", NO_PROPS, None).unwrap(); + let gg = Graph::new(); let _ = gg.import_edges(vec![&e_a_b, &e_c_d], false).unwrap(); assert_eq!(gg.edges().len(), 2); + + let gg = Graph::new(); + let res = gg.add_edge(1, "C", "D", NO_PROPS, None); + let res = gg.import_edges(vec![&e_a_b, &e_c_d], false); + match res { + Err(GraphError::EdgesExistError(duplicates)) => { + assert_eq!( + duplicates + .into_iter() + .map(|(x, y)| (x.to_string(), y.to_string())) + .collect::>(), + vec![("C".to_string(), "D".to_string())] + ); + } + Err(e) => panic!("Unexpected error: {:?}", e), + Ok(_) => panic!("Expected error but got Ok"), + } + assert_eq!(gg.edge("A", "B"), None); + } + + #[test] + fn import_node_as() { + let g = Graph::new(); + let g_a = g.add_node(0, "A", NO_PROPS, None).unwrap(); + let g_b = g + .add_node(1, "B", vec![("temp".to_string(), Prop::Bool(true))], None) + .unwrap(); + let _ = g_b.add_constant_properties(vec![("con".to_string(), Prop::I64(11))]); + + let gg = Graph::new(); + let res = gg.import_node_as(&g_a, "X", false).unwrap(); + assert_eq!(res.name(), "X"); + assert_eq!(res.history(), vec![0]); + + let _ = gg.add_node(1, "Y", NO_PROPS, None).unwrap(); + let res = gg.import_node_as(&g_b, "Y", false); + match res { + Err(NodeExistsError(id)) => { + assert_eq!(id.to_string(), "Y"); + } + Err(e) => panic!("Unexpected error: {:?}", e), + Ok(_) => panic!("Expected error but got Ok"), + } + + let mut nodes = gg.nodes().name().collect_vec(); + nodes.sort(); + assert_eq!(nodes, vec!["X", "Y"]); // Nodes up until first failure are imported + let y = gg.node("Y").unwrap(); + + assert_eq!(y.name(), "Y"); + assert_eq!(y.history(), vec![1]); + assert_eq!(y.properties().get("temp"), None); + assert_eq!(y.properties().constant().get("con"), None); + } + + #[test] + fn import_node_as_merge() { + let g = Graph::new(); + let g_a = g.add_node(0, "A", NO_PROPS, None).unwrap(); + let g_b = g + .add_node(1, "B", vec![("temp".to_string(), Prop::Bool(true))], None) + .unwrap(); + let _ = g_b.add_constant_properties(vec![("con".to_string(), Prop::I64(11))]); + + let gg = Graph::new(); + gg.add_node(1, "Y", NO_PROPS, None).unwrap(); + + let res = gg.import_node_as(&g_a, "X", false).unwrap(); + assert_eq!(res.name(), "X"); + assert_eq!(res.history(), vec![0]); + + let res = gg.import_node_as(&g_b, "Y", true).unwrap(); + assert_eq!(res.name(), "Y"); + assert_eq!(res.history(), vec![1]); + assert_eq!(res.properties().get("temp").unwrap(), Prop::Bool(true)); + assert_eq!( + res.properties().constant().get("con").unwrap(), + Prop::I64(11) + ); + } + + #[test] + fn import_nodes_as() { + let g = Graph::new(); + let g_a = g.add_node(0, "A", NO_PROPS, None).unwrap(); + let g_b = g + .add_node(1, "B", vec![("temp".to_string(), Prop::Bool(true))], None) + .unwrap(); + let _ = g_b.add_constant_properties(vec![("con".to_string(), Prop::I64(11))]); + let g_c = g.add_node(0, "C", NO_PROPS, None).unwrap(); + + let gg = Graph::new(); + gg.add_node(1, "Q", NO_PROPS, None).unwrap(); + gg.add_node(1, "R", NO_PROPS, None).unwrap(); + let res = gg.import_nodes_as(vec![&g_a, &g_b, &g_c], vec!["P", "Q", "R"], false); + match res { + Err(NodesExistError(ids)) => { + assert_eq!( + ids.into_iter() + .map(|id| id.to_string()) + .collect::>(), + vec!["Q", "R"], + ); + } + Err(e) => panic!("Unexpected error: {:?}", e), + Ok(_) => panic!("Expected error but got Ok"), + } + let mut nodes = gg.nodes().name().collect_vec(); + nodes.sort(); + assert_eq!(nodes, vec!["Q", "R"]); // Nodes up until first failure are imported + let y = gg.node("Q").unwrap(); + assert_eq!(y.name(), "Q"); + assert_eq!(y.history(), vec![1]); + assert_eq!(y.properties().get("temp"), None); + assert_eq!(y.properties().constant().get("con"), None); + } + + #[test] + fn import_nodes_as_merge() { + let g = Graph::new(); + let g_a = g.add_node(0, "A", NO_PROPS, None).unwrap(); + let g_b = g + .add_node(1, "B", vec![("temp".to_string(), Prop::Bool(true))], None) + .unwrap(); + let _ = g_b.add_constant_properties(vec![("con".to_string(), Prop::I64(11))]); + + let gg = Graph::new(); + gg.add_node(1, "Q", NO_PROPS, None).unwrap(); + let _ = gg + .import_nodes_as(vec![&g_a, &g_b], vec!["P", "Q"], true) + .unwrap(); + let mut nodes = gg.nodes().name().collect_vec(); + nodes.sort(); + assert_eq!(nodes, vec!["P", "Q"]); + let y = gg.node("Q").unwrap(); + assert_eq!(y.name(), "Q"); + assert_eq!(y.history(), vec![1]); + assert_eq!(y.properties().get("temp").unwrap(), Prop::Bool(true)); + assert_eq!(y.properties().constant().get("con").unwrap(), Prop::I64(11)); + } + + #[test] + fn import_edge_as() { + let g = Graph::new(); + let g_a = g.add_node(0, "A", NO_PROPS, None).unwrap(); + let g_b = g + .add_node(1, "B", vec![("temp".to_string(), Prop::Bool(true))], None) + .unwrap(); + let _ = g_b.add_constant_properties(vec![("con".to_string(), Prop::I64(11))]); + let e_a_b = g + .add_edge( + 2, + "A", + "B", + vec![("e_temp".to_string(), Prop::Bool(false))], + None, + ) + .unwrap(); + let e_b_c = g + .add_edge( + 2, + "B", + "C", + vec![("e_temp".to_string(), Prop::Bool(false))], + None, + ) + .unwrap(); + + let gg = Graph::new(); + let e = gg.add_edge(1, "X", "Y", NO_PROPS, None).unwrap(); + let res = gg.import_edge_as(&e_b_c, ("Y", "Z"), false); + let res = gg.import_edge_as(&e_a_b, ("X", "Y"), false); + match res { + Err(EdgeExistsError(src_id, dst_id)) => { + assert_eq!(src_id.to_string(), "X"); + assert_eq!(dst_id.to_string(), "Y"); + } + Err(e) => panic!("Unexpected error: {:?}", e), + Ok(_) => panic!("Expected error but got Ok"), + } + let mut nodes = gg.nodes().name().collect_vec(); + nodes.sort(); + assert_eq!(nodes, vec!["X", "Y", "Z"]); + let x = gg.node("X").unwrap(); + assert_eq!(x.name(), "X"); + assert_eq!(x.history(), vec![1]); + let y = gg.node("Y").unwrap(); + assert_eq!(y.name(), "Y"); + assert_eq!(y.history(), vec![1, 2]); + assert_eq!(y.properties().get("temp"), None); + assert_eq!(y.properties().constant().get("con"), None); + + let e_src = gg.edge("X", "Y").unwrap().src().name(); + let e_dst = gg.edge("X", "Y").unwrap().dst().name(); + assert_eq!(e_src, "X"); + assert_eq!(e_dst, "Y"); + + let props = gg.edge("X", "Y").unwrap().properties().as_vec(); + assert_eq!(props, vec![]); + } + + #[test] + fn import_edge_as_merge() { + let g = Graph::new(); + let g_a = g.add_node(0, "A", NO_PROPS, None).unwrap(); + let g_b = g + .add_node(1, "B", vec![("temp".to_string(), Prop::Bool(true))], None) + .unwrap(); + let _ = g_b.add_constant_properties(vec![("con".to_string(), Prop::I64(11))]); + let e_a_b = g + .add_edge( + 2, + "A", + "B", + vec![("e_temp".to_string(), Prop::Bool(false))], + None, + ) + .unwrap(); + + let gg = Graph::new(); + let _ = gg.add_edge(3, "X", "Y", NO_PROPS, None).unwrap(); + let res = gg.import_edge_as(&e_a_b, ("X", "Y"), true).unwrap(); + assert_eq!(res.src().name(), "X"); + assert_eq!(res.dst().name(), "Y"); + assert_eq!(res.properties().as_vec(), e_a_b.properties().as_vec()); + let mut nodes = gg.nodes().name().collect_vec(); + nodes.sort(); + assert_eq!(nodes, vec!["X", "Y"]); + let x = gg.node("X").unwrap(); + assert_eq!(x.name(), "X"); + assert_eq!(x.history(), vec![2, 3]); + let y = gg.node("Y").unwrap(); + assert_eq!(y.name(), "Y"); + assert_eq!(y.history(), vec![2, 3]); + assert_eq!(y.properties().get("temp"), None); + assert_eq!(y.properties().constant().get("con"), None); + } + + #[test] + fn import_edges_as() { + let g = Graph::new(); + let g_a = g.add_node(0, "A", NO_PROPS, None).unwrap(); + let g_b = g + .add_node(1, "B", vec![("temp".to_string(), Prop::Bool(true))], None) + .unwrap(); + let _ = g_b.add_constant_properties(vec![("con".to_string(), Prop::I64(11))]); + let g_c = g.add_node(0, "C", NO_PROPS, None).unwrap(); + let e_a_b = g + .add_edge( + 2, + "A", + "B", + vec![("e_temp".to_string(), Prop::Bool(false))], + None, + ) + .unwrap(); + let e_b_c = g.add_edge(2, "B", "C", NO_PROPS, None).unwrap(); + + let gg = Graph::new(); + let e = gg.add_edge(1, "Y", "Z", NO_PROPS, None).unwrap(); + let res = gg.import_edges_as([&e_a_b, &e_b_c], [("X", "Y"), ("Y", "Z")], false); + match res { + Err(GraphError::EdgesExistError(duplicates)) => { + assert_eq!( + duplicates + .into_iter() + .map(|(x, y)| (x.to_string(), y.to_string())) + .collect::>(), + vec![("Y".to_string(), "Z".to_string())] + ); + } + Err(e) => panic!("Unexpected error: {:?}", e), + Ok(_) => panic!("Expected error but got Ok"), + } + let mut nodes = gg.nodes().name().collect_vec(); + nodes.sort(); + assert_eq!(nodes, vec!["Y", "Z"]); + let y = gg.node("Y").unwrap(); + assert_eq!(y.name(), "Y"); + assert_eq!(y.history(), vec![1]); + assert_eq!(y.properties().get("temp"), None); + assert_eq!(y.properties().constant().get("con"), None); + let x = gg.node("Z").unwrap(); + assert_eq!(x.name(), "Z"); + assert_eq!(x.history(), vec![1]); + + assert!(gg.edge("X", "Y").is_none()); + + let e_y_z = gg.edge("Y", "Z").unwrap(); + assert_eq!( + (e_y_z.src().name().as_str(), e_y_z.dst().name().as_str()), + ("Y", "Z") + ); + + let props = e_y_z.properties().as_vec(); + assert_eq!(props, vec![]); + } + + #[test] + fn import_edges_as_merge() { + let g = Graph::new(); + let g_a = g.add_node(0, "A", NO_PROPS, None).unwrap(); + let g_b = g + .add_node(1, "B", vec![("temp".to_string(), Prop::Bool(true))], None) + .unwrap(); + let _ = g_b.add_constant_properties(vec![("con".to_string(), Prop::I64(11))]); + let e_a_b = g + .add_edge( + 2, + "A", + "B", + vec![("e_temp".to_string(), Prop::Bool(false))], + None, + ) + .unwrap(); + + let gg = Graph::new(); + let _ = gg.add_edge(3, "X", "Y", NO_PROPS, None).unwrap(); + let res = gg.import_edges_as([&e_a_b], [("X", "Y")], true).unwrap(); + + let e_x_y = gg.edge("X", "Y").unwrap(); + assert_eq!( + (e_x_y.src().name().as_str(), e_x_y.dst().name().as_str()), + ("X", "Y") + ); + assert_eq!(e_x_y.properties().get("e_temp").unwrap(), Prop::Bool(false)); + + let mut nodes = gg.nodes().name().collect_vec(); + nodes.sort(); + assert_eq!(nodes, vec!["X", "Y"]); + let x = gg.node("X").unwrap(); + assert_eq!(x.name(), "X"); + assert_eq!(x.history(), vec![2, 3]); + let y = gg.node("Y").unwrap(); + assert_eq!(y.name(), "Y"); + assert_eq!(y.history(), vec![2, 3]); + assert_eq!(y.properties().get("temp"), None); + assert_eq!(y.properties().constant().get("con"), None); } #[test] diff --git a/raphtory/src/db/graph/node.rs b/raphtory/src/db/graph/node.rs index 933d1fa252..1887c7d214 100644 --- a/raphtory/src/db/graph/node.rs +++ b/raphtory/src/db/graph/node.rs @@ -11,9 +11,8 @@ use crate::{ internal::{InternalAdditionOps, InternalPropertyAdditionOps}, time_from_input, CollectProperties, TryIntoInputTime, }, - properties::{ - internal::{ConstPropertiesOps, TemporalPropertiesOps, TemporalPropertyViewOps}, - Properties, + properties::internal::{ + ConstPropertiesOps, TemporalPropertiesOps, TemporalPropertyViewOps, }, view::{ internal::{CoreGraphOps, OneHopFilter, Static, TimeSemantics}, @@ -27,7 +26,10 @@ use crate::{ use crate::{ core::{entities::nodes::node_ref::AsNodeRef, storage::timeindex::AsTime, PropType}, - db::{api::storage::graph::storage_ops::GraphStorage, graph::edges::Edges}, + db::{ + api::{state::NodeOp, storage::graph::storage_ops::GraphStorage}, + graph::edges::Edges, + }, }; use chrono::{DateTime, Utc}; use raphtory_api::core::storage::arc_str::ArcStr; @@ -308,23 +310,21 @@ impl<'graph, G: GraphViewOps<'graph>, GH: GraphViewOps<'graph>> BaseNodeViewOps< type BaseGraph = G; type Graph = GH; type ValueType - = T + = T::Output where - T: 'graph; + T: NodeOp + 'graph, + T::Output: 'graph; type PropType = Self; type PathType = PathFromNode<'graph, G, G>; type Edges = Edges<'graph, G, GH>; - fn map O>( - &self, - op: F, - ) -> Self::ValueType { - let cg = self.graph.core_graph(); - op(cg, &self.graph, self.node) + fn graph(&self) -> &Self::Graph { + &self.graph } - fn as_props(&self) -> Self::ValueType> { - Properties::new(self.clone()) + fn map(&self, op: F) -> Self::ValueType { + let cg = self.graph.core_graph(); + op.apply(cg, self.node) } fn map_edges< diff --git a/raphtory/src/db/graph/nodes.rs b/raphtory/src/db/graph/nodes.rs index 7a415c26ff..52905cf1dd 100644 --- a/raphtory/src/db/graph/nodes.rs +++ b/raphtory/src/db/graph/nodes.rs @@ -2,7 +2,6 @@ use crate::{ core::entities::{edges::edge_ref::EdgeRef, nodes::node_ref::AsNodeRef, VID}, db::{ api::{ - properties::Properties, state::LazyNodeState, storage::graph::storage_ops::GraphStorage, view::{ @@ -15,9 +14,13 @@ use crate::{ prelude::*, }; -use crate::db::graph::create_node_type_filter; +use crate::db::{api::state::NodeOp, graph::create_node_type_filter}; use rayon::iter::ParallelIterator; -use std::{marker::PhantomData, sync::Arc}; +use std::{ + fmt::{Debug, Formatter}, + marker::PhantomData, + sync::Arc, +}; #[derive(Clone)] pub struct Nodes<'graph, G, GH = G> { @@ -27,6 +30,14 @@ pub struct Nodes<'graph, G, GH = G> { _marker: PhantomData<&'graph ()>, } +impl<'graph, G: GraphViewOps<'graph>, GH: GraphViewOps<'graph> + Debug> Debug + for Nodes<'graph, G, GH> +{ + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_list().entries(self.iter()).finish() + } +} + impl<'graph, G, GH> From> for Nodes<'graph, DynamicGraph, DynamicGraph> where G: GraphViewOps<'graph> + IntoDynamic, @@ -90,6 +101,12 @@ where } } + pub(crate) fn par_iter_refs(&self) -> impl ParallelIterator + 'graph { + let g = self.graph.core_graph().lock(); + let node_types_filter = self.node_types_filter.clone(); + g.into_nodes_par(self.graph.clone(), node_types_filter) + } + #[inline] pub(crate) fn iter_refs(&self) -> impl Iterator + 'graph { let g = self.graph.core_graph().lock(); @@ -169,6 +186,10 @@ where pub fn get_temporal_prop_id(&self, prop_name: &str) -> Option { self.graph.node_meta().get_prop_id(prop_name, false) } + + pub fn is_filtered(&self) -> bool { + self.node_types_filter.is_some() || self.graph.nodes_filtered() + } } impl<'graph, G, GH> BaseNodeViewOps<'graph> for Nodes<'graph, G, GH> @@ -178,25 +199,20 @@ where { type BaseGraph = G; type Graph = GH; - type ValueType = LazyNodeState<'graph, T, G, GH>; + type ValueType = LazyNodeState<'graph, T, G, GH>; type PropType = NodeView; type PathType = PathFromGraph<'graph, G, G>; type Edges = NestedEdges<'graph, G, GH>; - fn map< - O: Clone + Send + Sync + 'graph, - F: Fn(&GraphStorage, &Self::Graph, VID) -> O + Send + Sync + 'graph, - >( - &self, - op: F, - ) -> Self::ValueType { - let g = self.graph.clone(); - let bg = self.base_graph.clone(); - LazyNodeState::new(bg, g, self.node_types_filter.clone(), op) + fn graph(&self) -> &Self::Graph { + &self.graph } - fn as_props(&self) -> Self::ValueType> { - self.map(|_cg, g, v| Properties::new(NodeView::new_internal(g.clone(), v))) + fn map(&self, op: F) -> Self::ValueType + where + ::Output: 'graph, + { + LazyNodeState::new(op, self.clone()) } fn map_edges< diff --git a/raphtory/src/db/graph/path.rs b/raphtory/src/db/graph/path.rs index bdb7e0d1b4..d897aae924 100644 --- a/raphtory/src/db/graph/path.rs +++ b/raphtory/src/db/graph/path.rs @@ -2,7 +2,7 @@ use crate::{ core::entities::{edges::edge_ref::EdgeRef, VID}, db::{ api::{ - properties::Properties, + state::NodeOp, storage::graph::storage_ops::GraphStorage, view::{ internal::OneHopFilter, BaseNodeViewOps, BoxedLIter, DynamicGraph, IntoDynBoxed, @@ -147,30 +147,30 @@ impl<'graph, G: GraphViewOps<'graph>, GH: GraphViewOps<'graph>> BaseNodeViewOps< { type BaseGraph = G; type Graph = GH; - type ValueType = BoxedLIter<'graph, BoxedLIter<'graph, T>>; + type ValueType = BoxedLIter<'graph, BoxedLIter<'graph, T::Output>>; type PropType = NodeView; type PathType = PathFromGraph<'graph, G, G>; type Edges = NestedEdges<'graph, G, GH>; - fn map O + Send + Clone + 'graph>( - &self, - op: F, - ) -> Self::ValueType { - let graph = self.graph.clone(); + fn graph(&self) -> &Self::Graph { + &self.graph + } + + fn map(&self, op: F) -> Self::ValueType + where + ::Output: 'graph, + { + let storage = self.graph.core_graph().lock(); self.iter_refs() .map(move |it| { - let graph = graph.clone(); let op = op.clone(); - it.map(move |node| op(graph.core_graph(), &graph, node)) + let storage = storage.clone(); + it.map(move |node| op.apply(&storage, node)) .into_dyn_boxed() }) .into_dyn_boxed() } - fn as_props(&self) -> Self::ValueType> { - self.map(|_cg, g, v| Properties::new(NodeView::new_internal(g.clone(), v))) - } - fn map_edges< I: Iterator + Send + 'graph, F: Fn(&GraphStorage, &Self::Graph, VID) -> I + Send + Sync + Clone + 'graph, @@ -396,24 +396,21 @@ impl<'graph, G: GraphViewOps<'graph>, GH: GraphViewOps<'graph>> BaseNodeViewOps< { type BaseGraph = G; type Graph = GH; - type ValueType = BoxedLIter<'graph, T>; + type ValueType = BoxedLIter<'graph, T::Output>; type PropType = NodeView; type PathType = PathFromNode<'graph, G, G>; type Edges = Edges<'graph, G, GH>; - fn map O + Send + 'graph>( - &self, - op: F, - ) -> Self::ValueType { - let graph = self.graph.clone(); - Box::new( - self.iter_refs() - .map(move |node| op(graph.core_graph(), &graph, node)), - ) + fn graph(&self) -> &Self::Graph { + &self.graph } - fn as_props(&self) -> Self::ValueType> { - self.map(|_cg, g, v| Properties::new(NodeView::new_internal(g.clone(), v))) + fn map(&self, op: F) -> Self::ValueType + where + ::Output: 'graph, + { + let storage = self.graph.core_graph().lock(); + Box::new(self.iter_refs().map(move |node| op.apply(&storage, node))) } fn map_edges< diff --git a/raphtory/src/db/task/node/eval_node.rs b/raphtory/src/db/task/node/eval_node.rs index a76cee0698..6cb3cc396e 100644 --- a/raphtory/src/db/task/node/eval_node.rs +++ b/raphtory/src/db/task/node/eval_node.rs @@ -10,7 +10,7 @@ use crate::{ }, db::{ api::{ - properties::Properties, + state::NodeOp, storage::graph::storage_ops::GraphStorage, view::{internal::OneHopFilter, BaseNodeViewOps, BoxedLIter, IntoDynBoxed}, }, @@ -347,25 +347,21 @@ impl< { type BaseGraph = &'graph G; type Graph = GH; - type ValueType = Box + 'graph>; + type ValueType = Box + 'graph>; type PropType = NodeView; type PathType = EvalPathFromNode<'graph, 'a, G, &'graph G, CS, S>; type Edges = EvalEdges<'graph, 'a, G, GH, CS, S>; - fn map< - O: Clone + Send + Sync + 'graph, - F: Fn(&GraphStorage, &Self::Graph, VID) -> O + Send + Sync + Clone + 'graph, - >( - &self, - op: F, - ) -> Self::ValueType { - let graph = self.graph.clone(); - let storage = self.base_graph.storage; - Box::new(self.iter_refs().map(move |node| op(storage, &graph, node))) + fn graph(&self) -> &Self::Graph { + &self.graph } - fn as_props(&self) -> Self::ValueType> { - self.map(|_cg, g, v| Properties::new(NodeView::new_internal(g.clone(), v))) + fn map(&self, op: F) -> Self::ValueType + where + ::Output: 'graph, + { + let storage = self.base_graph.storage; + Box::new(self.iter_refs().map(move |node| op.apply(storage, node))) } fn map_edges< @@ -495,26 +491,23 @@ impl< { type BaseGraph = &'graph G; type Graph = GH; - type ValueType - = T + type ValueType + = T::Output where T: 'graph; type PropType = NodeView; type PathType = EvalPathFromNode<'graph, 'a, G, &'graph G, CS, S>; type Edges = EvalEdges<'graph, 'a, G, GH, CS, S>; - fn map< - O: Clone + Send + Sync + 'graph, - F: Fn(&GraphStorage, &Self::Graph, VID) -> O + Send + Sync + Clone + 'graph, - >( - &self, - op: F, - ) -> Self::ValueType { - op(self.eval_graph.storage, &self.graph, self.node) + fn graph(&self) -> &Self::Graph { + &self.graph } - fn as_props(&self) -> Self::ValueType> { - Properties::new(NodeView::new_internal(self.graph.clone(), self.node)) + fn map(&self, op: F) -> Self::ValueType + where + ::Output: 'graph, + { + op.apply(self.eval_graph.storage, self.node) } fn map_edges< diff --git a/raphtory/src/disk_graph/graph_impl/edge_storage_ops.rs b/raphtory/src/disk_graph/graph_impl/edge_storage_ops.rs index d8e8388e03..f0c124ae72 100644 --- a/raphtory/src/disk_graph/graph_impl/edge_storage_ops.rs +++ b/raphtory/src/disk_graph/graph_impl/edge_storage_ops.rs @@ -9,7 +9,6 @@ use crate::{ tprop_storage_ops::TPropOps, variants::layer_variants::LayerVariants, }, - disk_graph::graph_impl::tprops::read_tprop_column, }; use pometry_storage::{edge::Edge, tprops::DiskTProp}; use raphtory_api::core::{entities::edges::edge_ref::EdgeRef, storage::timeindex::TimeIndexEntry}; @@ -110,11 +109,12 @@ impl<'a> EdgeStorageOps<'a> for Edge<'a> { fn temporal_prop_layer(self, layer_id: usize, prop_id: usize) -> impl TPropOps<'a> + Sync + 'a { self.graph() .localize_edge_prop_id(layer_id, prop_id) - .and_then(|local_id| { - self.temporal_prop_layer_inner(layer_id, local_id) - .map(|field| (field, local_id)) + .map(|prop_id| { + self.graph() + .layer(layer_id) + .edges_storage() + .prop(self.eid(), prop_id) }) - .and_then(|(field, prop_id)| read_tprop_column(self, prop_id, layer_id, field)) .unwrap_or(DiskTProp::empty()) } diff --git a/raphtory/src/disk_graph/graph_impl/mod.rs b/raphtory/src/disk_graph/graph_impl/mod.rs index 7b9dd344bc..6a909aa574 100644 --- a/raphtory/src/disk_graph/graph_impl/mod.rs +++ b/raphtory/src/disk_graph/graph_impl/mod.rs @@ -5,7 +5,6 @@ use crate::{core::utils::errors::GraphError, disk_graph::DiskGraphStorage, prelu mod edge_storage_ops; mod interop; pub mod prop_conversion; -mod time_index_into_ops; pub mod tprops; #[derive(Debug)] diff --git a/raphtory/src/disk_graph/graph_impl/time_index_into_ops.rs b/raphtory/src/disk_graph/graph_impl/time_index_into_ops.rs deleted file mode 100644 index 74f35a4995..0000000000 --- a/raphtory/src/disk_graph/graph_impl/time_index_into_ops.rs +++ /dev/null @@ -1,176 +0,0 @@ -use crate::{ - core::storage::timeindex::{TimeIndexIntoOps, TimeIndexOps}, - db::api::view::IntoDynBoxed, -}; -use pometry_storage::{ - prelude::{ArrayOps, BaseArrayOps}, - timestamps::TimeStamps, -}; -use raphtory_api::core::storage::timeindex::TimeIndexEntry; -use std::ops::Range; - -impl<'a> TimeIndexIntoOps for TimeStamps<'a, TimeIndexEntry> { - type IndexType = TimeIndexEntry; - - type RangeType = Self; - - fn into_range(self, w: Range) -> Self { - let start = self.position(&w.start); - let end = self.position(&w.end); - let (timestamps, sec_index) = self.into_inner(); - TimeStamps::new( - timestamps.sliced(start..end), - sec_index.map(|sec_index| sec_index.sliced(start..end)), - ) - } - - #[allow(refining_impl_trait)] - fn into_iter(self) -> impl Iterator + Send + 'static { - let (timestamps, sec_index) = self.into_inner(); - let sec_iter: Box + Send> = sec_index - .map(|v| v.into_owned().map(|i| i as usize).into_dyn_boxed()) - .unwrap_or(self.timestamps().range().clone().into_dyn_boxed()); - timestamps - .into_owned() - .zip(sec_iter) - .map(|(t, s)| TimeIndexEntry(t, s)) - } -} -impl<'a> TimeIndexIntoOps for TimeStamps<'a, i64> { - type IndexType = i64; - - type RangeType = Self; - - fn into_range(self, w: Range) -> Self { - let start = self.timestamps().partition_point(|i| i < w.start); - let end = self.timestamps().partition_point(|i| i < w.end); - let (timestamps, _) = self.into_inner(); - TimeStamps::new(timestamps.sliced(start..end), None) - } - fn into_iter(self) -> impl Iterator + Send { - let (timestamps, _) = self.into_inner(); - timestamps - } -} - -impl<'a> TimeIndexOps for TimeStamps<'a, TimeIndexEntry> { - type IndexType = TimeIndexEntry; - type RangeType<'b> - = TimeStamps<'b, TimeIndexEntry> - where - Self: 'b; - - fn len(&self) -> usize { - self.timestamps().len() - } - - fn active(&self, w: Range) -> bool { - let i = self.position(&w.start); - i < self.timestamps().len() && self.get(i) < w.end - } - - fn range(&self, w: Range) -> Self::RangeType<'_> { - let start = self.position(&w.start); - let end = self.position(&w.end); - TimeStamps::new( - self.timestamps().sliced(start..end), - self.sec_index() - .map(|sec_index| sec_index.sliced(start..end)), - ) - } - - fn first_t(&self) -> Option { - (self.timestamps().len() > 0).then(|| self.timestamps().get(0)) - } - - fn first(&self) -> Option { - if self.timestamps().len() == 0 { - return None; - } - let t = self.timestamps().get(0); - let sec = self.sec_index().as_ref().map(|arr| arr.get(0)).unwrap_or(0); - - Some(TimeIndexEntry::new(t, sec as usize)) - } - - fn last_t(&self) -> Option { - (self.timestamps().len() > 0).then(|| self.timestamps().get(self.timestamps().len() - 1)) - } - - fn last(&self) -> Option { - if self.timestamps().len() == 0 { - return None; - } - let last_idx = self.timestamps().len() - 1; - - let t = self.timestamps().get(last_idx); - let sec = self - .sec_index() - .as_ref() - .map(|arr| arr.get(last_idx)) - .unwrap_or(0); - - Some(TimeIndexEntry::new(t, sec as usize)) - } - - fn iter(&self) -> Box + Send + 'a> { - let sec_iter = self - .sec_index() - .map(|v| v.map(|i| i as usize).into_dyn_boxed()) - .unwrap_or(self.timestamps().range().clone().into_dyn_boxed()); - Box::new( - self.timestamps() - .into_iter() - .zip(sec_iter) - .map(|(t, s)| TimeIndexEntry(t, s)), - ) - } -} -impl<'a> TimeIndexOps for TimeStamps<'a, i64> { - type IndexType = i64; - type RangeType<'b> - = TimeStamps<'b, i64> - where - Self: 'b; - - fn len(&self) -> usize { - self.timestamps().len() - } - fn active(&self, w: Range) -> bool { - let i = self.timestamps().insertion_point(w.start); - i < self.timestamps().len() && self.timestamps().get(i) < w.end - } - - fn range(&self, w: Range) -> Self::RangeType<'_> { - let start = self.timestamps().partition_point(|i| i < w.start); - let end = self.timestamps().partition_point(|i| i < w.end); - TimeStamps::new( - self.timestamps().sliced(start..end), - self.sec_index() - .map(|sec_index| sec_index.sliced(start..end)), - ) - } - - fn first(&self) -> Option { - if self.timestamps().len() == 0 { - return None; - } - let t = self.timestamps().get(0); - Some(t) - } - - fn last(&self) -> Option { - if self.timestamps().len() == 0 { - return None; - } - - let last_idx = self.timestamps().len() - 1; - - let t = self.timestamps().get(last_idx); - Some(t) - } - - fn iter(&self) -> Box + Send + '_> { - Box::new(self.timestamps().into_iter()) - } -} diff --git a/raphtory/src/disk_graph/graph_impl/tprops.rs b/raphtory/src/disk_graph/graph_impl/tprops.rs index 492360e9d3..9591c98174 100644 --- a/raphtory/src/disk_graph/graph_impl/tprops.rs +++ b/raphtory/src/disk_graph/graph_impl/tprops.rs @@ -1,8 +1,5 @@ use crate::{ - arrow2::{ - datatypes::{ArrowDataType as DataType, Field}, - types::{NativeType, Offset}, - }, + arrow2::types::{NativeType, Offset}, core::storage::timeindex::TimeIndexIntoOps, db::api::{storage::graph::tprop_storage_ops::TPropOps, view::IntoDynBoxed}, prelude::Prop, @@ -10,9 +7,7 @@ use crate::{ use polars_arrow::array::Array; use pometry_storage::{ chunked_array::{bool_col::ChunkedBoolCol, col::ChunkedPrimitiveCol, utf8_col::StringCol}, - edge::Edge, prelude::{ArrayOps, BaseArrayOps}, - timestamps::TimeStamps, tprops::{DiskTProp, EmptyTProp, TPropColumn}, }; use raphtory_api::core::storage::timeindex::TimeIndexEntry; @@ -187,55 +182,6 @@ impl<'a, I: Offset> TPropOps<'a> for TPropColumn<'a, StringCol<'a, I>, TimeIndex } } -fn new_tprop_column( - edge: Edge, - id: usize, - layer_id: usize, -) -> Option, TimeIndexEntry>> -where - Prop: From, -{ - let props = edge.prop_values::(id, layer_id)?; - let timestamps = TimeStamps::new(edge.timestamp_slice(layer_id), None); - Some(TPropColumn::new(props, timestamps)) -} - -pub fn read_tprop_column( - edge: Edge, - id: usize, - layer_id: usize, - field: Field, -) -> Option> { - match field.data_type() { - DataType::Boolean => { - let props = edge.prop_bool_values(id, layer_id)?; - let timestamps = TimeStamps::new(edge.timestamp_slice(layer_id), None); - Some(DiskTProp::Bool(TPropColumn::new(props, timestamps))) - } - DataType::Int64 => new_tprop_column::(edge, id, layer_id).map(DiskTProp::I64), - DataType::Int32 => new_tprop_column::(edge, id, layer_id).map(DiskTProp::I32), - DataType::UInt32 => new_tprop_column::(edge, id, layer_id).map(DiskTProp::U32), - DataType::UInt64 => new_tprop_column::(edge, id, layer_id).map(DiskTProp::U64), - DataType::Float32 => new_tprop_column::(edge, id, layer_id).map(DiskTProp::F32), - DataType::Float64 => new_tprop_column::(edge, id, layer_id).map(DiskTProp::F64), - DataType::Utf8 => { - let props = edge.prop_str_values::(id, layer_id)?; - let timestamps = TimeStamps::new(edge.timestamp_slice(layer_id), None); - Some(DiskTProp::Str32(TPropColumn::new(props, timestamps))) - } - DataType::LargeUtf8 => { - let props = edge.prop_str_values::(id, layer_id)?; - let timestamps = TimeStamps::new(edge.timestamp_slice(layer_id), None); - Some(DiskTProp::Str64(TPropColumn::new(props, timestamps))) - } - DataType::Date64 => new_tprop_column::(edge, id, layer_id).map(DiskTProp::I64), - DataType::Timestamp(_, _) => { - new_tprop_column::(edge, id, layer_id).map(DiskTProp::I64) - } - _ => todo!(), - } -} - impl<'a> TPropOps<'a> for EmptyTProp { fn last_before(&self, _t: TimeIndexEntry) -> Option<(TimeIndexEntry, Prop)> { None diff --git a/raphtory/src/disk_graph/storage_interface/node.rs b/raphtory/src/disk_graph/storage_interface/node.rs index cf99884f03..29706e57a2 100644 --- a/raphtory/src/disk_graph/storage_interface/node.rs +++ b/raphtory/src/disk_graph/storage_interface/node.rs @@ -15,7 +15,7 @@ use crate::{ }; use itertools::Itertools; use polars_arrow::datatypes::ArrowDataType; -use pometry_storage::{graph::TemporalGraph, timestamps::TimeStamps, GidRef}; +use pometry_storage::{graph::TemporalGraph, timestamps::TimeStamps, tprops::DiskTProp, GidRef}; use rayon::iter::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator}; use std::{borrow::Cow, iter, sync::Arc}; @@ -35,11 +35,14 @@ impl<'a> DiskNode<'a> { } } - pub fn temporal_node_prop_ids(self) -> Box + 'a> { - match &self.graph.node_properties().temporal_props { - Some(props) => Box::new(props.prop_dtypes().iter().enumerate().map(|(i, _)| i)), - None => Box::new(std::iter::empty()), - } + pub fn temporal_node_prop_ids(self) -> impl Iterator + 'a { + self.graph + .prop_mapping() + .nodes() + .into_iter() + .enumerate() + .filter(|(_, exists)| exists.is_some()) + .map(|(id, _)| id) } pub(crate) fn new(graph: &'a TemporalGraph, vid: VID) -> Self { @@ -176,13 +179,14 @@ impl<'a> DiskNode<'a> { .collect::>(), }; - if let Some(props) = &self.graph.node_properties().temporal_props { + for props in self.graph.node_properties().temporal_props() { let timestamps = props.timestamps::(self.vid); if timestamps.len() > 0 { let ts = timestamps.times(); additions.push(ts); } } + NodeAdditions::Col(additions) } } @@ -239,11 +243,16 @@ impl<'a> NodeStorageOps<'a> for DiskNode<'a> { fn tprop(self, prop_id: usize) -> impl TPropOps<'a> { self.graph - .node_properties() - .temporal_props - .as_ref() - .unwrap() - .prop(self.vid, prop_id) + .prop_mapping() + .localise_node_prop_id(prop_id) + .and_then(|(layer, local_prop_id)| { + self.graph + .node_properties() + .temporal_props() + .get(layer) + .map(|t_props| t_props.prop(self.vid, local_prop_id)) + }) + .unwrap_or(DiskTProp::empty()) } fn prop(self, prop_id: usize) -> Option { @@ -268,7 +277,7 @@ impl<'a> NodeStorageOps<'a> for DiskNode<'a> { self, layers: &LayerIds, dir: Direction, - ) -> Box + Send + 'a> { + ) -> impl Iterator + Send + 'a { //FIXME: something is capturing the &LayerIds lifetime when using impl Iterator Box::new(match dir { Direction::OUT => DirectionVariants::Out(self.out_edges(layers)), diff --git a/raphtory/src/io/arrow/df_loaders.rs b/raphtory/src/io/arrow/df_loaders.rs index 80ec595780..1a54ddf88c 100644 --- a/raphtory/src/io/arrow/df_loaders.rs +++ b/raphtory/src/io/arrow/df_loaders.rs @@ -15,6 +15,7 @@ use crate::{ serialise::incremental::InternalCache, }; use bytemuck::checked::cast_slice_mut; +#[cfg(feature = "python")] use kdam::{Bar, BarBuilder, BarExt}; use raphtory_api::{ atomic_extra::atomic_usize_from_mut_slice, @@ -27,6 +28,7 @@ use raphtory_api::{ use rayon::prelude::*; use std::{collections::HashMap, sync::atomic::Ordering}; +#[cfg(feature = "python")] fn build_progress_bar(des: String, num_rows: usize) -> Result { BarBuilder::default() .desc(des) @@ -414,6 +416,7 @@ pub(crate) fn load_edge_deletions_from_df< None }; let layer_index = layer_index.transpose()?; + #[cfg(feature = "python")] let mut pb = build_progress_bar("Loading edge deletions".to_string(), df_view.num_rows)?; let mut start_idx = graph.reserve_event_ids(df_view.num_rows)?; @@ -436,6 +439,7 @@ pub(crate) fn load_edge_deletions_from_df< graph.delete_edge((time, start_idx + idx), src, dst, layer)?; Ok::<(), GraphError>(()) })?; + #[cfg(feature = "python")] let _ = pb.update(df.len()); start_idx += df.len(); } @@ -480,6 +484,7 @@ pub(crate) fn load_node_props_from_df< .collect::, GraphError>>()?, None => vec![], }; + #[cfg(feature = "python")] let mut pb = build_progress_bar("Loading node properties".to_string(), df_view.num_rows)?; for chunk in df_view.chunks { let df = chunk?; @@ -512,6 +517,7 @@ pub(crate) fn load_node_props_from_df< } Ok::<(), GraphError>(()) })?; + #[cfg(feature = "python")] let _ = pb.update(df.len()); } Ok(()) @@ -543,6 +549,7 @@ pub(crate) fn load_edges_props_from_df< None }; let layer_index = layer_index.transpose()?; + #[cfg(feature = "python")] let mut pb = build_progress_bar("Loading edge properties".to_string(), df_view.num_rows)?; let shared_constant_properties = match shared_constant_properties { None => { @@ -596,6 +603,7 @@ pub(crate) fn load_edges_props_from_df< } Ok::<(), GraphError>(()) })?; + #[cfg(feature = "python")] let _ = pb.update(df.len()); } Ok(()) diff --git a/raphtory/src/python/algorithm/max_weight_matching.rs b/raphtory/src/python/algorithm/max_weight_matching.rs index ef7d8f7889..12eda6669a 100644 --- a/raphtory/src/python/algorithm/max_weight_matching.rs +++ b/raphtory/src/python/algorithm/max_weight_matching.rs @@ -13,7 +13,7 @@ use crate::{ use pyo3::prelude::*; /// A Matching (i.e., a set of edges that do not share any nodes) -#[pyclass(frozen, name = "Matching")] +#[pyclass(frozen, name = "Matching", module = "raphtory.algorithms")] pub struct PyMatching { inner: Matching, } diff --git a/raphtory/src/python/graph/algorithm_result.rs b/raphtory/src/python/graph/algorithm_result.rs index 7dfb276f43..2d55e18794 100644 --- a/raphtory/src/python/graph/algorithm_result.rs +++ b/raphtory/src/python/graph/algorithm_result.rs @@ -23,7 +23,7 @@ impl Repr for AlgorithmResultRs { - #[pyclass] + #[pyclass(module = "raphtory", frozen)] pub struct $objectName( $crate::algorithms::algorithm_result::AlgorithmResult< $rustGraph, @@ -93,7 +93,7 @@ macro_rules! py_algorithm_result_base { /// Sorts by node id in ascending or descending order. /// /// Arguments: - /// `reverse`: If `true`, sorts the result in descending order; otherwise, sorts in ascending order. + /// reverse: If `true`, sorts the result in descending order; otherwise, sorts in ascending order. Defaults to True. /// /// Returns: /// A sorted list of tuples containing node names and values. @@ -156,7 +156,7 @@ macro_rules! py_algorithm_result_partial_ord { /// Sorts the `AlgorithmResult` by its values in ascending or descending order. /// /// Arguments: - /// reverse (bool): If `true`, sorts the result in descending order; otherwise, sorts in ascending order. + /// reverse (bool): If `true`, sorts the result in descending order, otherwise, sorts in ascending order. Defaults to True. /// /// Returns: /// A sorted vector of tuples containing keys of type `H` and values of type `Y`. @@ -175,7 +175,7 @@ macro_rules! py_algorithm_result_partial_ord { /// value by the node name in either ascending or descending order. /// /// Arguments: - /// reverse (bool): A boolean value indicating whether the sorting should be done in reverse order or not. + /// reverse (bool): A boolean value indicating whether the sorting should be done in reverse order or not. Defaults to True. /// If reverse is true, the sorting will be done in descending order, otherwise it will be done in /// ascending order. /// @@ -196,8 +196,8 @@ macro_rules! py_algorithm_result_partial_ord { /// /// Arguments: /// k (int): The number of elements to retrieve. - /// percentage (bool): If `true`, the `k` parameter is treated as a percentage of total elements. - /// reverse (bool): If `true`, retrieves the elements in descending order; otherwise, in ascending order. + /// percentage (bool): If `True`, the `k` parameter is treated as a percentage of total elements. Defaults to False. + /// reverse (bool): If `True`, retrieves the elements in descending order, otherwise, in ascending order. Defaults to True. /// /// Returns: /// An Option containing a vector of tuples with keys of type `H` and values of type `Y`. diff --git a/raphtory/src/python/graph/disk_graph.rs b/raphtory/src/python/graph/disk_graph.rs index 70b9f7b084..4cb1f9fa64 100644 --- a/raphtory/src/python/graph/disk_graph.rs +++ b/raphtory/src/python/graph/disk_graph.rs @@ -14,7 +14,7 @@ use crate::{ python::{graph::graph::PyGraph, types::repr::StructReprBuilder}, }; use itertools::Itertools; -use pometry_storage::graph::load_node_const_properties; +use pometry_storage::graph::{load_node_const_properties, TemporalGraph}; use pyo3::{exceptions::PyRuntimeError, prelude::*, pybacked::PyBackedStr, types::PyDict}; use std::{ ops::Deref, @@ -262,6 +262,19 @@ impl PyDiskGraph { Self::load_from_dir(self.graph_dir().to_path_buf()) } + #[pyo3(signature = (location, chunk_size=20_000_000))] + pub fn append_node_temporal_properties( + &self, + location: &str, + chunk_size: usize, + ) -> Result { + let path = PathBuf::from_str(location).unwrap(); + let chunks = read_struct_arrays(&path, None)?; + let mut graph = TemporalGraph::new(self.graph.inner().graph_dir())?; + graph.load_temporal_node_props_from_chunks(chunks, chunk_size, false)?; + Self::load_from_dir(self.graph_dir().to_path_buf()) + } + /// Merge this graph with another `DiskGraph`. Note that both graphs should have nodes that are /// sorted by their global ids or the resulting graph will be nonsense! fn merge_by_sorted_gids( diff --git a/raphtory/src/python/graph/edge.rs b/raphtory/src/python/graph/edge.rs index d0399c2c61..277b528375 100644 --- a/raphtory/src/python/graph/edge.rs +++ b/raphtory/src/python/graph/edge.rs @@ -32,13 +32,13 @@ use std::{ /// PyEdge is a Python class that represents an edge in the graph. /// An edge is a directed connection between two nodes. -#[pyclass(name = "Edge", subclass)] +#[pyclass(name = "Edge", subclass, module = "raphtory", frozen)] #[derive(Clone)] pub struct PyEdge { pub(crate) edge: EdgeView, } -#[pyclass(name="MutableEdge", extends=PyEdge)] +#[pyclass(name="MutableEdge", extends=PyEdge, module="raphtory", frozen)] pub struct PyMutableEdge { edge: EdgeView, } diff --git a/raphtory/src/python/graph/edges.rs b/raphtory/src/python/graph/edges.rs index 429ae9ceae..6ee19100ba 100644 --- a/raphtory/src/python/graph/edges.rs +++ b/raphtory/src/python/graph/edges.rs @@ -24,7 +24,7 @@ use crate::{ }, utils::{ export::{create_row, extract_properties, get_column_names_from_props}, - NumpyArray, PyGenericIterable, PyTime, + NumpyArray, PyGenericIterable, }, }, }; @@ -34,7 +34,7 @@ use rayon::{iter::IntoParallelIterator, prelude::*}; use std::collections::HashMap; /// A list of edges that can be iterated over. -#[pyclass(name = "Edges")] +#[pyclass(name = "Edges", module = "raphtory", frozen)] pub struct PyEdges { edges: Edges<'static, DynamicGraph>, } diff --git a/raphtory/src/python/graph/graph.rs b/raphtory/src/python/graph/graph.rs index 5f48490171..d7b7495c7b 100644 --- a/raphtory/src/python/graph/graph.rs +++ b/raphtory/src/python/graph/graph.rs @@ -17,6 +17,7 @@ use crate::{ edge::PyEdge, graph_with_deletions::PyPersistentGraph, io::pandas_loaders::*, node::PyNode, views::graph_view::PyGraphView, }, + types::iterable::FromIterable, utils::{PyNodeRef, PyTime}, }, serialise::{StableDecode, StableEncode}, @@ -34,7 +35,7 @@ use std::{ /// Arguments: /// num_shards (int, optional): The number of locks to use in the storage to allow for multithreaded updates. #[derive(Clone)] -#[pyclass(name = "Graph", extends = PyGraphView, module = "raphtory")] +#[pyclass(name = "Graph", extends = PyGraphView, module = "raphtory", frozen)] pub struct PyGraph { pub graph: Graph, } @@ -118,7 +119,7 @@ impl PyGraph { } } -#[pyclass(module = "raphtory")] +#[pyclass(module = "raphtory", frozen)] pub struct PyGraphEncoder; #[pymethods] @@ -131,7 +132,7 @@ impl PyGraphEncoder { fn __call__(&self, bytes: Vec) -> Result { MaterializedGraph::decode_from_bytes(&bytes) } - fn __setstate__(&mut self) {} + fn __setstate__(&self) {} fn __getstate__(&self) {} } @@ -176,8 +177,12 @@ impl PyGraph { /// id (str|int): The id of the node. /// properties (PropInput, optional): The properties of the node. /// node_type (str, optional): The optional string which will be used as a node type + /// /// Returns: - /// MutableNode: The added node + /// MutableNode: The added node. + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3(signature = (timestamp, id, properties = None, node_type = None))] pub fn add_node( &self, @@ -197,8 +202,12 @@ impl PyGraph { /// id (str|int): The id of the node. /// properties (PropInput, optional): The properties of the node. /// node_type (str, optional): The optional string which will be used as a node type + /// /// Returns: - /// MutableNode: The created node + /// MutableNode: The created node. + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3(signature = (timestamp, id, properties = None, node_type = None))] pub fn create_node( &self, @@ -216,6 +225,12 @@ impl PyGraph { /// Arguments: /// timestamp (TimeInput): The timestamp of the temporal property. /// properties (PropInput): The temporal properties of the graph. + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. pub fn add_property( &self, timestamp: PyTime, @@ -228,6 +243,12 @@ impl PyGraph { /// /// Arguments: /// properties (PropInput): The static properties of the graph. + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. pub fn add_constant_properties( &self, properties: HashMap, @@ -240,6 +261,11 @@ impl PyGraph { /// Arguments: /// properties (PropInput): The static properties of the graph. /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. pub fn update_constant_properties( &self, properties: HashMap, @@ -257,7 +283,10 @@ impl PyGraph { /// layer (str, optional): The layer of the edge. /// /// Returns: - /// MutableEdge: The added edge + /// MutableEdge: The added edge. + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3(signature = (timestamp, src, dst, properties = None, layer = None))] pub fn add_edge( &self, @@ -273,74 +302,182 @@ impl PyGraph { /// Import a single node into the graph. /// - /// This function takes a PyNode object and an optional boolean flag. If the flag is set to true, - /// the function will force the import of the node even if it already exists in the graph. - /// /// Arguments: /// node (Node): A Node object representing the node to be imported. - /// force (bool): An optional boolean flag indicating whether to force the import of the node. + /// merge (bool): An optional boolean flag. + /// If merge is false, the function will return an error if the imported node already exists in the graph. + /// If merge is true, the function merges the histories of the imported node and the existing node (in the graph). /// /// Returns: - /// Node: A Result object which is Ok if the node was successfully imported, and Err otherwise. - #[pyo3(signature = (node, force = false))] + /// Node: A node object if the node was successfully imported. + /// + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (node, merge = false))] pub fn import_node( &self, node: PyNode, - force: bool, + merge: bool, + ) -> Result, GraphError> { + self.graph.import_node(&node.node, merge) + } + + /// Import a single node into the graph with new id. + /// + /// Arguments: + /// node (Node): A Node object representing the node to be imported. + /// new_id (str|int): The new node id. + /// merge (bool): An optional boolean flag. + /// If merge is false, the function will return an error if the imported node already exists in the graph. + /// If merge is true, the function merges the histories of the imported node and the existing node (in the graph). + /// + /// Returns: + /// Node: A node object if the node was successfully imported. + /// + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (node, new_id, merge = false))] + pub fn import_node_as( + &self, + node: PyNode, + new_id: GID, + merge: bool, ) -> Result, GraphError> { - self.graph.import_node(&node.node, force) + self.graph.import_node_as(&node.node, new_id, merge) } /// Import multiple nodes into the graph. /// - /// This function takes a vector of PyNode objects and an optional boolean flag. If the flag is set to true, - /// the function will force the import of the nodes even if they already exist in the graph. + /// Arguments: + /// nodes (List[Node]): A vector of Node objects representing the nodes to be imported. + /// merge (bool): An optional boolean flag. + /// If merge is false, the function will return an error if any of the imported nodes already exists in the graph. + /// If merge is true, the function merges the histories of the imported nodes and the existing nodes (in the graph). + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (nodes, merge = false))] + pub fn import_nodes(&self, nodes: FromIterable, merge: bool) -> Result<(), GraphError> { + let node_views = nodes.iter().map(|node| &node.node); + self.graph.import_nodes(node_views, merge) + } + + /// Import multiple nodes into the graph with new ids. /// /// Arguments: + /// nodes (List[Node]): A vector of Node objects representing the nodes to be imported. + /// new_ids (List[str|int]): A list of node IDs to use for the imported nodes. + /// merge (bool): An optional boolean flag. + /// If merge is false, the function will return an error if any of the imported nodes already exists in the graph. + /// If merge is true, the function merges the histories of the imported nodes and the existing nodes (in the graph). /// - /// nodes (List[Node]): A vector of PyNode objects representing the nodes to be imported. - /// force (bool): An optional boolean flag indicating whether to force the import of the nodes. + /// Returns: + /// None: This function does not return a value, if the operation is successful. /// - #[pyo3(signature = (nodes, force = false))] - pub fn import_nodes(&self, nodes: Vec, force: bool) -> Result<(), GraphError> { + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (nodes, new_ids, merge = false))] + pub fn import_nodes_as( + &self, + nodes: Vec, + new_ids: Vec, + merge: bool, + ) -> Result<(), GraphError> { let node_views = nodes.iter().map(|node| &node.node); - self.graph.import_nodes(node_views, force) + self.graph.import_nodes_as(node_views, new_ids, merge) } /// Import a single edge into the graph. /// - /// This function takes a PyEdge object and an optional boolean flag. If the flag is set to true, - /// the function will force the import of the edge even if it already exists in the graph. - /// /// Arguments: - /// - /// edge (Edge): A PyEdge object representing the edge to be imported. - /// force (bool): An optional boolean flag indicating whether to force the import of the edge. + /// edge (Edge): A Edge object representing the edge to be imported. + /// merge (bool): An optional boolean flag. + /// If merge is false, the function will return an error if the imported edge already exists in the graph. + /// If merge is true, the function merges the histories of the imported edge and the existing edge (in the graph). /// /// Returns: - /// Edge: A Result object which is Ok if the edge was successfully imported, and Err otherwise. - #[pyo3(signature = (edge, force = false))] + /// EdgeView: An EdgeView object if the edge was successfully imported. + /// + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (edge, merge = false))] pub fn import_edge( &self, edge: PyEdge, - force: bool, + merge: bool, ) -> Result, GraphError> { - self.graph.import_edge(&edge.edge, force) + self.graph.import_edge(&edge.edge, merge) } - /// Import multiple edges into the graph. + /// Import a single edge into the graph with new id. /// - /// This function takes a vector of PyEdge objects and an optional boolean flag. If the flag is set to true, - /// the function will force the import of the edges even if they already exist in the graph. + /// Arguments: + /// edge (Edge): A Edge object representing the edge to be imported. + /// new_id (tuple) : The ID of the new edge. It's a tuple of the source and destination node ids. + /// merge (bool): An optional boolean flag. + /// If merge is false, the function will return an error if the imported edge already exists in the graph. + /// If merge is true, the function merges the histories of the imported edge and the existing edge (in the graph). + /// + /// Returns: + /// EdgeView: An EdgeView object if the edge was successfully imported. + /// + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (edge, new_id, merge = false))] + pub fn import_edge_as( + &self, + edge: PyEdge, + new_id: (GID, GID), + merge: bool, + ) -> Result, GraphError> { + self.graph.import_edge_as(&edge.edge, new_id, merge) + } + + /// Import multiple edges into the graph. /// /// Arguments: + /// edges (List[Edge]): A list of Edge objects representing the edges to be imported. + /// merge (bool): An optional boolean flag. + /// If merge is false, the function will return an error if any of the imported edges already exists in the graph. + /// If merge is true, the function merges the histories of the imported edges and the existing edges (in the graph). + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (edges, merge = false))] + pub fn import_edges(&self, edges: FromIterable, merge: bool) -> Result<(), GraphError> { + let edge_views = edges.iter().map(|edge| &edge.edge); + self.graph.import_edges(edge_views, merge) + } + + /// Import multiple edges into the graph with new ids. /// + /// Arguments: /// edges (List[Edge]): A list of Edge objects representing the edges to be imported. - /// force (bool): An optional boolean flag indicating whether to force the import of the edges. - #[pyo3(signature = (edges, force = false))] - pub fn import_edges(&self, edges: Vec, force: bool) -> Result<(), GraphError> { + /// new_ids (List[tuple]) - The IDs of the new edges. It's a vector of tuples of the source and destination node ids. + /// merge (bool): An optional boolean flag. + /// If merge is false, the function will return an error if any of the imported edges already exists in the graph. + /// If merge is true, the function merges the histories of the imported edges and the existing edges (in the graph). + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (edges, new_ids, merge = false))] + pub fn import_edges_as( + &self, + edges: Vec, + new_ids: Vec<(GID, GID)>, + merge: bool, + ) -> Result<(), GraphError> { let edge_views = edges.iter().map(|edge| &edge.edge); - self.graph.import_edges(edge_views, force) + self.graph.import_edges_as(edge_views, new_ids, merge) } //FIXME: This is reimplemented here to get mutable views. If we switch the underlying graph to enum dispatch, this won't be necessary! @@ -350,7 +487,7 @@ impl PyGraph { /// id (str|int): the node id /// /// Returns: - /// Node: the node with the specified id, or None if the node does not exist + /// Node: The node object with the specified id, or None if the node does not exist pub fn node(&self, id: PyNodeRef) -> Option> { self.graph.node(id) } @@ -413,6 +550,12 @@ impl PyGraph { /// properties (List[str]): List of node property column names. Defaults to None. (optional) /// constant_properties (List[str]): List of constant node property column names. Defaults to None. (optional) /// shared_constant_properties (PropInput): A dictionary of constant properties that will be added to every node. Defaults to None. (optional) + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3( signature = (df,time, id, node_type = None, node_type_col = None, properties = None, constant_properties = None, shared_constant_properties = None) )] @@ -453,6 +596,12 @@ impl PyGraph { /// properties (List[str]): List of node property column names. Defaults to None. (optional) /// constant_properties (List[str]): List of constant node property column names. Defaults to None. (optional) /// shared_constant_properties (PropInput): A dictionary of constant properties that will be added to every node. Defaults to None. (optional) + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3( signature = (parquet_path, time, id, node_type = None, node_type_col = None, properties = None, constant_properties = None, shared_constant_properties = None) )] @@ -494,6 +643,12 @@ impl PyGraph { /// shared_constant_properties (PropInput): A dictionary of constant properties that will be added to every edge. Defaults to None. (optional) /// layer (str): A constant value to use as the layer for all edges (optional) Defaults to None. (cannot be used in combination with layer_col) /// layer_col (str): The edge layer col name in dataframe (optional) Defaults to None. (cannot be used in combination with layer) + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3( signature = (df, time, src, dst, properties = None, constant_properties = None, shared_constant_properties = None, layer = None, layer_col = None) )] @@ -537,6 +692,12 @@ impl PyGraph { /// shared_constant_properties (PropInput): A dictionary of constant properties that will be added to every edge. Defaults to None. (optional) /// layer (str): A constant value to use as the layer for all edges (optional) Defaults to None. (cannot be used in combination with layer_col) /// layer_col (str): The edge layer col name in dataframe (optional) Defaults to None. (cannot be used in combination with layer) + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3( signature = (parquet_path, time, src, dst, properties = None, constant_properties = None, shared_constant_properties = None, layer = None, layer_col = None) )] @@ -577,6 +738,12 @@ impl PyGraph { /// node_type_col (str): The node type col name in dataframe (optional) Defaults to None. (cannot be used in combination with node_type) /// constant_properties (List[str]): List of constant node property column names. Defaults to None. (optional) /// shared_constant_properties (PropInput): A dictionary of constant properties that will be added to every node. Defaults to None. (optional) + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3(signature = (df, id, node_type=None, node_type_col=None, constant_properties = None, shared_constant_properties = None))] fn load_node_props_from_pandas( &self, @@ -608,6 +775,12 @@ impl PyGraph { /// node_type_col (str): The node type col name in dataframe (optional) Defaults to None. (cannot be used in combination with node_type) /// constant_properties (List[str]): List of constant node property column names. Defaults to None. (optional) /// shared_constant_properties (PropInput): A dictionary of constant properties that will be added to every node. Defaults to None. (optional) + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3(signature = (parquet_path, id, node_type=None,node_type_col=None, constant_properties = None, shared_constant_properties = None))] fn load_node_props_from_parquet( &self, @@ -640,6 +813,12 @@ impl PyGraph { /// shared_constant_properties (PropInput): A dictionary of constant properties that will be added to every edge. Defaults to None. (optional) /// layer (str): The edge layer name (optional) Defaults to None. /// layer_col (str): The edge layer col name in dataframe (optional) Defaults to None. + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3( signature = (df, src, dst, constant_properties = None, shared_constant_properties = None, layer = None, layer_col = None) )] @@ -676,6 +855,12 @@ impl PyGraph { /// shared_constant_properties (PropInput): A dictionary of constant properties that will be added to every edge. Defaults to None. (optional) /// layer (str): The edge layer name (optional) Defaults to None. /// layer_col (str): The edge layer col name in dataframe (optional) Defaults to None. + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3( signature = (parquet_path, src, dst, constant_properties = None, shared_constant_properties = None, layer = None, layer_col = None) )] diff --git a/raphtory/src/python/graph/graph_with_deletions.rs b/raphtory/src/python/graph/graph_with_deletions.rs index e3f78469b1..1ea4aeb186 100644 --- a/raphtory/src/python/graph/graph_with_deletions.rs +++ b/raphtory/src/python/graph/graph_with_deletions.rs @@ -36,7 +36,7 @@ use std::{ /// A temporal graph that allows edges and nodes to be deleted. #[derive(Clone)] -#[pyclass(name = "PersistentGraph", extends = PyGraphView, frozen)] +#[pyclass(name = "PersistentGraph", extends = PyGraphView, frozen, module="raphtory")] pub struct PyPersistentGraph { pub(crate) graph: PersistentGraph, } @@ -118,7 +118,10 @@ impl PyPersistentGraph { /// node_type (str) : The optional string which will be used as a node type /// /// Returns: - /// None + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3(signature = (timestamp, id, properties = None, node_type = None))] pub fn add_node( &self, @@ -141,6 +144,9 @@ impl PyPersistentGraph { /// /// Returns: /// MutableNode + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3(signature = (timestamp, id, properties = None, node_type = None))] pub fn create_node( &self, @@ -160,7 +166,10 @@ impl PyPersistentGraph { /// properties (dict): The temporal properties of the graph. /// /// Returns: - /// None + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. pub fn add_property( &self, timestamp: PyTime, @@ -175,7 +184,10 @@ impl PyPersistentGraph { /// properties (dict): The static properties of the graph. /// /// Returns: - /// None + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. pub fn add_constant_properties( &self, properties: HashMap, @@ -189,7 +201,10 @@ impl PyPersistentGraph { /// properties (dict): The static properties of the graph. /// /// Returns: - /// None + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. pub fn update_constant_properties( &self, properties: HashMap, @@ -207,7 +222,10 @@ impl PyPersistentGraph { /// layer (str): The layer of the edge. /// /// Returns: - /// None + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3(signature = (timestamp, src, dst, properties = None, layer = None))] pub fn add_edge( &self, @@ -231,6 +249,9 @@ impl PyPersistentGraph { /// /// Returns: /// The deleted edge + /// + /// Raises: + /// GraphError: If the operation fails. #[pyo3(signature = (timestamp, src, dst, layer=None))] pub fn delete_edge( &self, @@ -249,7 +270,7 @@ impl PyPersistentGraph { /// id (str | int): the node id /// /// Returns: - /// the node with the specified id, or None if the node does not exist + /// The node with the specified id, or None if the node does not exist pub fn node(&self, id: PyNodeRef) -> Option> { self.graph.node(id) } @@ -262,7 +283,7 @@ impl PyPersistentGraph { /// dst (str | int): the destination node id /// /// Returns: - /// the edge with the specified source and destination nodes, or None if the edge does not exist + /// The edge with the specified source and destination nodes, or None if the edge does not exist #[pyo3(signature = (src, dst))] pub fn edge( &self, @@ -274,75 +295,189 @@ impl PyPersistentGraph { /// Import a single node into the graph. /// - /// This function takes a PyNode object and an optional boolean flag. If the flag is set to true, - /// the function will force the import of the node even if it already exists in the graph. + /// This function takes a node object and an optional boolean flag. If the flag is set to true, + /// the function will merge the import of the node even if it already exists in the graph. /// /// Arguments: - /// node (Node): A PyNode object representing the node to be imported. - /// force (bool): An optional boolean flag indicating whether to force the import of the node. + /// node (Node): A node object representing the node to be imported. + /// merge (bool): An optional boolean flag indicating whether to merge the import of the node. Defaults to False. /// /// Returns: - /// Result, GraphError> - A Result object which is Ok if the node was successfully imported, and Err otherwise. - #[pyo3(signature = (node, force = false))] + /// NodeView: A nodeview object if the node was successfully imported, and an error otherwise. + /// + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (node, merge = false))] pub fn import_node( &self, node: PyNode, - force: bool, + merge: bool, ) -> Result, GraphError> { - self.graph.import_node(&node.node, force) + self.graph.import_node(&node.node, merge) + } + + /// Import a single node into the graph with new id. + /// + /// This function takes a node object, a new node id and an optional boolean flag. If the flag is set to true, + /// the function will merge the import of the node even if it already exists in the graph. + /// + /// Arguments: + /// node (Node): A node object representing the node to be imported. + /// new_id (str|int): The new node id. + /// merge (bool): An optional boolean flag indicating whether to merge the import of the node. Defaults to False. + /// + /// Returns: + /// NodeView: A nodeview object if the node was successfully imported, and an error otherwise. + /// + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (node, new_id, merge = false))] + pub fn import_node_as( + &self, + node: PyNode, + new_id: GID, + merge: bool, + ) -> Result, GraphError> { + self.graph.import_node_as(&node.node, new_id, merge) } /// Import multiple nodes into the graph. /// - /// This function takes a vector of PyNode objects and an optional boolean flag. If the flag is set to true, - /// the function will force the import of the nodes even if they already exist in the graph. + /// This function takes a vector of node objects and an optional boolean flag. If the flag is set to true, + /// the function will merge the import of the nodes even if they already exist in the graph. /// /// Arguments: + /// nodes (List[Node]): A vector of node objects representing the nodes to be imported. + /// merge (bool): An optional boolean flag indicating whether to merge the import of the nodes. Defaults to False. /// - /// nodes (List[Node]): A vector of PyNode objects representing the nodes to be imported. - /// force (bool): An optional boolean flag indicating whether to force the import of the nodes. + /// Returns: + /// None: This function does not return a value, if the operation is successful. /// - #[pyo3(signature = (nodes, force = false))] - pub fn import_nodes(&self, nodes: Vec, force: bool) -> Result<(), GraphError> { + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (nodes, merge = false))] + pub fn import_nodes(&self, nodes: Vec, merge: bool) -> Result<(), GraphError> { let node_views = nodes.iter().map(|node| &node.node); - self.graph.import_nodes(node_views, force) + self.graph.import_nodes(node_views, merge) } - /// Import a single edge into the graph. + /// Import multiple nodes into the graph with new ids. /// - /// This function takes a PyEdge object and an optional boolean flag. If the flag is set to true, - /// the function will force the import of the edge even if it already exists in the graph. + /// This function takes a vector of node objects, a list of new node ids and an optional boolean flag. If the flag is set to true, + /// the function will merge the import of the nodes even if they already exist in the graph. /// /// Arguments: + /// nodes (List[Node]): A vector of node objects representing the nodes to be imported. + /// new_ids (List[str|int]): A list of node IDs to use for the imported nodes. + /// merge (bool): An optional boolean flag indicating whether to merge the import of the nodes. Defaults to False. /// - /// edge (Edge): A PyEdge object representing the edge to be imported. - /// force (bool): An optional boolean flag indicating whether to force the import of the edge. + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (nodes, new_ids, merge = false))] + pub fn import_nodes_as( + &self, + nodes: Vec, + new_ids: Vec, + merge: bool, + ) -> Result<(), GraphError> { + let node_views = nodes.iter().map(|node| &node.node); + self.graph.import_nodes_as(node_views, new_ids, merge) + } + + /// Import a single edge into the graph. + /// + /// This function takes an edge object and an optional boolean flag. If the flag is set to true, + /// the function will merge the import of the edge even if it already exists in the graph. + /// + /// Arguments: + /// edge (Edge): An edge object representing the edge to be imported. + /// merge (bool): An optional boolean flag indicating whether to merge the import of the edge. Defaults to False. /// /// Returns: /// Edge: The imported edge. - #[pyo3(signature = (edge, force = false))] + /// + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (edge, merge = false))] pub fn import_edge( &self, edge: PyEdge, - force: bool, + merge: bool, + ) -> Result, GraphError> { + self.graph.import_edge(&edge.edge, merge) + } + + /// Import a single edge into the graph with new id. + /// + /// This function takes a edge object, a new edge id and an optional boolean flag. If the flag is set to true, + /// the function will merge the import of the edge even if it already exists in the graph. + /// + /// Arguments: + /// edge (Edge): A edge object representing the edge to be imported. + /// new_id (tuple) : The ID of the new edge. It's a tuple of the source and destination node ids. + /// merge (bool): An optional boolean flag indicating whether to merge the import of the edge. Defaults to False. + /// + /// Returns: + /// Edge: The imported edge. + /// + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (edge, new_id, merge = false))] + pub fn import_edge_as( + &self, + edge: PyEdge, + new_id: (GID, GID), + merge: bool, ) -> Result, GraphError> { - self.graph.import_edge(&edge.edge, force) + self.graph.import_edge_as(&edge.edge, new_id, merge) } /// Import multiple edges into the graph. /// - /// This function takes a vector of PyEdge objects and an optional boolean flag. If the flag is set to true, - /// the function will force the import of the edges even if they already exist in the graph. + /// This function takes a vector of edge objects and an optional boolean flag. If the flag is set to true, + /// the function will merge the import of the edges even if they already exist in the graph. /// /// Arguments: + /// edges (List[Edge]): A vector of edge objects representing the edges to be imported. + /// merge (bool): An optional boolean flag indicating whether to merge the import of the edges. Defaults to False. /// - /// edges (List[Edge]): A vector of PyEdge objects representing the edges to be imported. - /// force (bool): An optional boolean flag indicating whether to force the import of the edges. + /// Returns: + /// None: This function does not return a value, if the operation is successful. /// - #[pyo3(signature = (edges, force = false))] - pub fn import_edges(&self, edges: Vec, force: bool) -> Result<(), GraphError> { + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (edges, merge = false))] + pub fn import_edges(&self, edges: Vec, merge: bool) -> Result<(), GraphError> { let edge_views = edges.iter().map(|edge| &edge.edge); - self.graph.import_edges(edge_views, force) + self.graph.import_edges(edge_views, merge) + } + + /// Import multiple edges into the graph with new ids. + /// + /// This function takes a vector of edge objects, a list of new edge ids and an optional boolean flag. If the flag is set to true, + /// the function will merge the import of the edges even if they already exist in the graph. + /// + /// Arguments: + /// edges (List[Edge]): A vector of edge objects representing the edges to be imported. + /// merge (bool): An optional boolean flag indicating whether to merge the import of the edges. Defaults to False. + /// + /// Returns: + /// None: This function does not return a value, if the operation is successful. + /// + /// Raises: + /// GraphError: If the operation fails. + #[pyo3(signature = (edges, new_ids, merge = false))] + pub fn import_edges_as( + &self, + edges: Vec, + new_ids: Vec<(GID, GID)>, + merge: bool, + ) -> Result<(), GraphError> { + let edge_views = edges.iter().map(|edge| &edge.edge); + self.graph.import_edges_as(edge_views, new_ids, merge) } //****** Saving And Loading ******// @@ -377,8 +512,9 @@ impl PyPersistentGraph { /// properties (List[str]): List of node property column names. Defaults to None. (optional) /// constant_properties (List[str]): List of constant node property column names. Defaults to None. (optional) /// shared_constant_properties (dict): A dictionary of constant properties that will be added to every node. Defaults to None. (optional) + /// /// Returns: - /// None: If the operation is successful. + /// None: This function does not return a value, if the operation is successful. /// /// Raises: /// GraphError: If the operation fails. @@ -420,8 +556,9 @@ impl PyPersistentGraph { /// properties (List[str]): List of node property column names. Defaults to None. (optional) /// constant_properties (List[str]): List of constant node property column names. Defaults to None. (optional) /// shared_constant_properties (dict): A dictionary of constant properties that will be added to every node. Defaults to None. (optional) + /// /// Returns: - /// None: If the operation is successful. + /// None: This function does not return a value, if the operation is successful. /// /// Raises: /// GraphError: If the operation fails. @@ -464,8 +601,9 @@ impl PyPersistentGraph { /// shared_constant_properties (dict): A dictionary of constant properties that will be added to every edge. Defaults to None. (optional) /// layer (str): A constant value to use as the layer for all edges (optional) Defaults to None. (cannot be used in combination with layer_col) /// layer_col (str): The edge layer col name in dataframe (optional) Defaults to None. (cannot be used in combination with layer) + /// /// Returns: - /// None: If the operation is successful. + /// None: This function does not return a value, if the operation is successful. /// /// Raises: /// GraphError: If the operation fails. @@ -510,8 +648,9 @@ impl PyPersistentGraph { /// shared_constant_properties (dict): A dictionary of constant properties that will be added to every edge. Defaults to None. (optional) /// layer (str): A constant value to use as the layer for all edges (optional) Defaults to None. (cannot be used in combination with layer_col) /// layer_col (str): The edge layer col name in dataframe (optional) Defaults to None. (cannot be used in combination with layer) + /// /// Returns: - /// None: If the operation is successful. + /// None: This function does not return a value, if the operation is successful. /// /// Raises: /// GraphError: If the operation fails. @@ -553,8 +692,9 @@ impl PyPersistentGraph { /// dst (str): The column name for the destination node ids. /// layer (str): A constant value to use as the layer for all edges (optional) Defaults to None. (cannot be used in combination with layer_col) /// layer_col (str): The edge layer col name in dataframe (optional) Defaults to None. (cannot be used in combination with layer) + /// /// Returns: - /// None: If the operation is successful. + /// None: This function does not return a value, if the operation is successful. /// /// Raises: /// GraphError: If the operation fails. @@ -580,8 +720,9 @@ impl PyPersistentGraph { /// time (str): The column name for the update timestamps. /// layer (str): A constant value to use as the layer for all edges (optional) Defaults to None. (cannot be used in combination with layer_col) /// layer_col (str): The edge layer col name in dataframe (optional) Defaults to None. (cannot be used in combination with layer) + /// /// Returns: - /// None: If the operation is successful. + /// None: This function does not return a value, if the operation is successful. /// /// Raises: /// GraphError: If the operation fails. @@ -617,7 +758,7 @@ impl PyPersistentGraph { /// shared_constant_properties (dict): A dictionary of constant properties that will be added to every node. Defaults to None. (optional) /// /// Returns: - /// None: If the operation is successful. + /// None: This function does not return a value, if the operation is successful. /// /// Raises: /// GraphError: If the operation fails. @@ -654,7 +795,7 @@ impl PyPersistentGraph { /// shared_constant_properties (dict): A dictionary of constant properties that will be added to every node. Defaults to None. (optional) /// /// Returns: - /// None: If the operation is successful. + /// None: This function does not return a value, if the operation is successful. /// /// Raises: /// GraphError: If the operation fails. @@ -692,7 +833,7 @@ impl PyPersistentGraph { /// layer_col (str): The edge layer col name in dataframe (optional) Defaults to None. /// /// Returns: - /// None: If the operation is successful. + /// None: This function does not return a value, if the operation is successful. /// /// Raises: /// GraphError: If the operation fails. @@ -732,7 +873,7 @@ impl PyPersistentGraph { /// layer_col (str): The edge layer col name in dataframe (optional) Defaults to None. /// /// Returns: - /// None: If the operation is successful. + /// None: This function does not return a value, if the operation is successful. /// /// Raises: /// GraphError: If the operation fails. diff --git a/raphtory/src/python/graph/index.rs b/raphtory/src/python/graph/index.rs index e06f253081..0e9b6d83e7 100644 --- a/raphtory/src/python/graph/index.rs +++ b/raphtory/src/python/graph/index.rs @@ -24,7 +24,7 @@ impl PyGraphView { /// A searchable Index for a `Graph`. This allows for fuzzy and exact searches of nodes and edges. /// This makes use of Tantivity internally to provide the search functionality. /// To create a graph index, call `graph.index()` on any `Graph` object in python. -#[pyclass] +#[pyclass(frozen, module = "raphtory")] pub struct GraphIndex { graph: IndexedGraph, } diff --git a/raphtory/src/python/graph/mod.rs b/raphtory/src/python/graph/mod.rs index 008b470860..39bc5381a3 100644 --- a/raphtory/src/python/graph/mod.rs +++ b/raphtory/src/python/graph/mod.rs @@ -10,5 +10,6 @@ pub mod edges; pub mod index; pub mod io; pub mod node; +pub mod node_state; pub mod properties; pub mod views; diff --git a/raphtory/src/python/graph/node.rs b/raphtory/src/python/graph/node.rs index 7d03edc09e..9bd3201997 100644 --- a/raphtory/src/python/graph/node.rs +++ b/raphtory/src/python/graph/node.rs @@ -10,7 +10,7 @@ use crate::{ db::{ api::{ properties::Properties, - state::{LazyNodeState, NodeStateOps}, + state::{ops, LazyNodeState, NodeStateOps}, view::{ internal::{CoreGraphOps, DynamicGraph, Immutable, IntoDynamic, MaterializedGraph}, *, @@ -61,7 +61,7 @@ use rayon::{iter::IntoParallelIterator, prelude::*}; use std::collections::HashMap; /// A node (or node) in the graph. -#[pyclass(name = "Node", subclass)] +#[pyclass(name = "Node", subclass, module = "raphtory", frozen)] #[derive(Clone)] pub struct PyNode { pub node: NodeView, @@ -239,7 +239,7 @@ impl PyNode { /// Returns the history of a node, including node additions and changes made to node. /// /// Returns: - /// List[Datetime]: A list of timestamps of the event history of node. + /// List[datetime]: A list of timestamps of the event history of node. /// pub fn history_date_time(&self) -> Option>> { self.node.history_date_time() @@ -295,7 +295,7 @@ impl<'graph, G: GraphViewOps<'graph>, GH: GraphViewOps<'graph>> Repr for NodeVie } } -#[pyclass(name = "MutableNode", extends = PyNode)] +#[pyclass(name = "MutableNode", extends = PyNode, module="raphtory", frozen)] pub struct PyMutableNode { node: NodeView, } @@ -373,9 +373,6 @@ impl PyMutableNode { /// Parameters: /// t (TimeInput): The timestamp at which the updates should be applied. /// properties (PropInput): A dictionary of properties to update. Each key is a string representing the property name, and each value is of type Prop representing the property value. If None, no properties are updated. - /// - /// Returns: - /// Result: A result object indicating success or failure. On failure, it contains a GraphError. #[pyo3(signature = (t, properties=None))] pub fn add_updates( &self, @@ -423,7 +420,7 @@ impl PyMutableNode { } /// A list of nodes that can be iterated over. -#[pyclass(name = "Nodes")] +#[pyclass(name = "Nodes", module = "raphtory", frozen)] pub struct PyNodes { pub(crate) nodes: Nodes<'static, DynamicGraph, DynamicGraph>, } @@ -502,19 +499,21 @@ impl PyNodes { /// Returns an iterator over the nodes ids #[getter] - fn id(&self) -> LazyNodeState<'static, GID, DynamicGraph, DynamicGraph> { + fn id(&self) -> LazyNodeState<'static, ops::Id, DynamicGraph, DynamicGraph> { self.nodes.id() } /// Returns an iterator over the nodes name #[getter] - fn name(&self) -> LazyNodeState<'static, String, DynamicGraph, DynamicGraph> { + fn name(&self) -> LazyNodeState<'static, ops::Name, DynamicGraph, DynamicGraph> { self.nodes.name() } /// Returns an iterator over the nodes earliest time #[getter] - fn earliest_time(&self) -> LazyNodeState<'static, Option, DynamicGraph, DynamicGraph> { + fn earliest_time( + &self, + ) -> LazyNodeState<'static, ops::EarliestTime, DynamicGraph, DynamicGraph> { self.nodes.earliest_time() } @@ -523,13 +522,19 @@ impl PyNodes { /// Returns: /// Earliest time of the nodes. #[getter] - fn earliest_date_time(&self) -> LazyNodeState<'static, Option>, DynamicGraph> { + fn earliest_date_time( + &self, + ) -> LazyNodeState< + 'static, + ops::Map, Option>>, + DynamicGraph, + > { self.nodes.earliest_date_time() } /// Returns an iterator over the nodes latest time #[getter] - fn latest_time(&self) -> LazyNodeState<'static, Option, DynamicGraph> { + fn latest_time(&self) -> LazyNodeState<'static, ops::LatestTime, DynamicGraph> { self.nodes.latest_time() } @@ -538,7 +543,13 @@ impl PyNodes { /// Returns: /// Latest date time of the nodes. #[getter] - fn latest_date_time(&self) -> LazyNodeState<'static, Option>, DynamicGraph> { + fn latest_date_time( + &self, + ) -> LazyNodeState< + 'static, + ops::Map, Option>>, + DynamicGraph, + > { self.nodes.latest_date_time() } @@ -547,13 +558,13 @@ impl PyNodes { /// Returns: /// A list of unix timestamps. /// - fn history(&self) -> LazyNodeState<'static, Vec, DynamicGraph> { + fn history(&self) -> LazyNodeState<'static, ops::History, DynamicGraph> { self.nodes.history() } /// Returns the type of node #[getter] - fn node_type(&self) -> LazyNodeState<'static, Option, DynamicGraph> { + fn node_type(&self) -> LazyNodeState<'static, ops::Type, DynamicGraph> { self.nodes.node_type() } @@ -564,7 +575,11 @@ impl PyNodes { /// fn history_date_time( &self, - ) -> LazyNodeState<'static, Option>>, DynamicGraph> { + ) -> LazyNodeState< + 'static, + ops::Map, Option>>>, + DynamicGraph, + > { self.nodes.history_date_time() } @@ -582,7 +597,7 @@ impl PyNodes { /// /// Returns: /// An iterator of the number of edges of the nodes - fn degree(&self) -> LazyNodeState<'static, usize, DynamicGraph> { + fn degree(&self) -> LazyNodeState<'static, ops::Degree, DynamicGraph> { self.nodes.degree() } @@ -590,7 +605,7 @@ impl PyNodes { /// /// Returns: /// An iterator of the number of in edges of the nodes - fn in_degree(&self) -> LazyNodeState<'static, usize, DynamicGraph> { + fn in_degree(&self) -> LazyNodeState<'static, ops::Degree, DynamicGraph> { self.nodes.in_degree() } @@ -598,7 +613,7 @@ impl PyNodes { /// /// Returns: /// An iterator of the number of out edges of the nodes - fn out_degree(&self) -> LazyNodeState<'static, usize, DynamicGraph> { + fn out_degree(&self) -> LazyNodeState<'static, ops::Degree, DynamicGraph> { self.nodes.out_degree() } diff --git a/raphtory/src/python/graph/node_state.rs b/raphtory/src/python/graph/node_state.rs new file mode 100644 index 0000000000..f2162b30ce --- /dev/null +++ b/raphtory/src/python/graph/node_state.rs @@ -0,0 +1,505 @@ +use pyo3::prelude::*; + +use crate::{ + add_classes, + core::entities::nodes::node_ref::{AsNodeRef, NodeRef}, + db::{ + api::{ + state::{ops, LazyNodeState, NodeOp, NodeState, NodeStateOps, OrderedNodeStateOps}, + view::{ + internal::Static, DynamicGraph, GraphViewOps, IntoDynHop, IntoDynamic, + StaticGraphViewOps, + }, + }, + graph::node::NodeView, + }, + prelude::*, + py_borrowing_iter, + python::{ + types::{repr::Repr, wrappers::iterators::PyBorrowingIterator}, + utils::PyNodeRef, + }, +}; +use chrono::{DateTime, Utc}; +use pyo3::{ + exceptions::{PyKeyError, PyTypeError}, + types::PyNotImplemented, +}; +use raphtory_api::core::{entities::GID, storage::arc_str::ArcStr}; +use std::{collections::HashMap, sync::Arc}; + +macro_rules! impl_node_state_ops { + ($name:ident, $value:ty, $inner_t:ty, $to_owned:expr, $computed:literal, $py_value:literal) => { + impl $name { + pub fn iter(&self) -> impl Iterator + '_ { + self.inner.values().map($to_owned) + } + } + + #[pymethods] + impl $name { + fn __len__(&self) -> usize { + self.inner.len() + } + + /// Iterate over nodes + /// + /// Returns: + /// Iterator[Node] + fn nodes(&self) -> PyBorrowingIterator { + py_borrowing_iter!(self.inner.clone(), $inner_t, |inner| { + inner.nodes().map(|n| n.cloned()) + }) + } + + fn __iter__(&self) -> PyBorrowingIterator { + py_borrowing_iter!(self.inner.clone(), $inner_t, |inner| inner + .values() + .map($to_owned)) + } + + fn __getitem__(&self, node: PyNodeRef) -> PyResult<$value> { + let node = node.as_node_ref(); + self.inner + .get_by_node(node) + .map($to_owned) + .ok_or_else(|| match node { + NodeRef::External(id) => { + PyKeyError::new_err(format!("Missing value for node with id {id}")) + } + NodeRef::Internal(vid) => { + let node = self.inner.graph().node(vid); + match node { + Some(node) => { + PyKeyError::new_err(format!("Missing value {}", node.repr())) + } + None => PyTypeError::new_err("Invalid node reference"), + } + } + }) + } + + /// Returns: + #[doc = concat!(" Iterator[Tuple[Node, ", $py_value, "]]")] + fn items(&self) -> PyBorrowingIterator { + py_borrowing_iter!(self.inner.clone(), $inner_t, |inner| inner + .iter() + .map(|(n, v)| (n.cloned(), ($to_owned)(v)))) + } + + /// Returns: + #[doc = concat!(" Iterator[",$py_value, "]")] + fn values(&self) -> PyBorrowingIterator { + self.__iter__() + } + + /// Sort results by node id + /// + /// Returns: + #[doc = concat!(" ", $computed)] + fn sorted_by_id(&self) -> NodeState<'static, $value, DynamicGraph> { + self.inner.sort_by_id() + } + + fn __repr__(&self) -> String { + self.inner.repr() + } + } + }; +} + +macro_rules! impl_node_state_ord_ops { + ($name:ident, $value:ty, $to_owned:expr, $computed:literal, $py_value:literal) => { + #[pymethods] + impl $name { + /// Sort by value + /// + /// Arguments: + /// reverse (bool): If `True`, sort in descending order, otherwise ascending. Defaults to False. + /// + /// Returns: + #[doc = concat!(" ", $computed)] + #[pyo3(signature = (reverse = false))] + fn sorted(&self, reverse: bool) -> NodeState<'static, $value, DynamicGraph> { + self.inner.sort_by_values(reverse) + } + + /// Compute the k largest values + /// + /// Arguments: + /// k (int): The number of values to return + /// + /// Returns: + #[doc = concat!(" ", $computed)] + fn top_k(&self, k: usize) -> NodeState<'static, $value, DynamicGraph> { + self.inner.top_k(k) + } + + /// Compute the k smallest values + /// + /// Arguments: + /// k (int): The number of values to return + /// + /// Returns: + #[doc = concat!(" ", $computed)] + fn bottom_k(&self, k: usize) -> NodeState<'static, $value, DynamicGraph> { + self.inner.bottom_k(k) + } + + /// Return smallest value and corresponding node + /// + /// Returns: + #[doc = concat!(" Optional[Tuple[Node, ", $py_value,"]]")] + fn min_item(&self) -> Option<(NodeView, $value)> { + self.inner + .min_item() + .map(|(n, v)| (n.cloned(), ($to_owned)(v))) + } + + /// Return the minimum value + /// + /// Returns: + #[doc = concat!(" Optional[", $py_value, "]")] + fn min(&self) -> Option<$value> { + self.inner.min().map($to_owned) + } + + /// Return largest value and corresponding node + /// + /// Returns: + #[doc = concat!(" Optional[Tuple[Node, ", $py_value,"]]")] + fn max_item(&self) -> Option<(NodeView, $value)> { + self.inner + .max_item() + .map(|(n, v)| (n.cloned(), ($to_owned)(v))) + } + + /// Return the maximum value + /// + /// Returns: + #[doc = concat!(" Optional[", $py_value, "]")] + fn max(&self) -> Option<$value> { + self.inner.max().map($to_owned) + } + + /// Return the median value + /// + /// Returns: + #[doc = concat!(" Optional[", $py_value, "]")] + fn median(&self) -> Option<$value> { + self.inner.median().map($to_owned) + } + + /// Return medain value and corresponding node + /// + /// Returns: + #[doc = concat!(" Optional[Tuple[Node, ", $py_value,"]]")] + fn median_item(&self) -> Option<(NodeView, $value)> { + self.inner + .median_item() + .map(|(n, v)| (n.cloned(), ($to_owned)(v))) + } + + fn __eq__<'py>(&self, other: &Bound<'py, PyAny>, py: Python<'py>) -> PyObject { + if let Ok(other) = other.downcast::() { + let other = Bound::borrow(other); + return self.inner.values().eq(other.inner.values()).into_py(py); + } else if let Ok(other) = other.extract::>() { + return self + .inner + .values() + .map($to_owned) + .eq(other.into_iter()) + .into_py(py); + } else if let Ok(other) = other.extract::>() { + return (self.inner.len() == other.len() + && other.into_iter().all(|(node, value)| { + self.inner.get_by_node(node).map($to_owned) == Some(value) + })) + .into_py(py); + } + PyNotImplemented::get_bound(py).into_py(py) + } + } + }; +} + +macro_rules! impl_node_state_num_ops { + ($name:ident, $value:ty, $py_value:literal) => { + #[pymethods] + impl $name { + /// sum of values over all nodes + /// + /// Returns: + #[doc= concat!(" ", $py_value)] + fn sum(&self) -> $value { + self.inner.sum() + } + + /// mean of values over all nodes + /// + /// Returns: + /// float + fn mean(&self) -> f64 { + self.inner.mean() + } + } + }; +} + +macro_rules! impl_lazy_node_state { + ($name:ident<$op:ty>, $computed:literal, $py_value:literal) => { + /// A lazy view over node values + #[pyclass(module = "raphtory.node_state", frozen)] + pub struct $name { + inner: LazyNodeState<'static, $op, DynamicGraph, DynamicGraph>, + } + + #[pymethods] + impl $name { + /// Compute all values and return the result as a node view + /// + /// Returns: + #[doc = concat!(" ", $computed)] + fn compute( + &self, + ) -> NodeState<'static, <$op as NodeOp>::Output, DynamicGraph, DynamicGraph> { + self.inner.compute() + } + + /// Compute all values and return the result as a list + /// + /// Returns + #[doc = concat!(" list[", $py_value, "]")] + fn collect(&self) -> Vec<<$op as NodeOp>::Output> { + self.inner.collect() + } + } + + impl_node_state_ops!( + $name, + <$op as NodeOp>::Output, + LazyNodeState<'static, $op, DynamicGraph, DynamicGraph>, + |v: <$op as NodeOp>::Output| v, + $computed, + $py_value + ); + + impl From> for $name { + fn from(inner: LazyNodeState<'static, $op, DynamicGraph, DynamicGraph>) -> Self { + $name { inner } + } + } + + impl pyo3::IntoPy for LazyNodeState<'static, $op, DynamicGraph, DynamicGraph> { + fn into_py(self, py: Python<'_>) -> PyObject { + $name::from(self).into_py(py) + } + } + }; +} + +macro_rules! impl_node_state { + ($name:ident<$value:ty>, $computed:literal, $py_value:literal) => { + #[pyclass(module = "raphtory.node_state", frozen)] + pub struct $name { + inner: Arc>, + } + + impl_node_state_ops!( + $name, + $value, + Arc>, + |v: &$value| v.clone(), + $computed, + $py_value + ); + + impl From> for $name { + fn from(inner: NodeState<'static, $value, DynamicGraph, DynamicGraph>) -> Self { + $name { + inner: inner.into(), + } + } + } + + impl pyo3::IntoPy for NodeState<'static, $value, DynamicGraph, DynamicGraph> { + fn into_py(self, py: Python<'_>) -> PyObject { + $name::from(self).into_py(py) + } + } + }; +} + +macro_rules! impl_lazy_node_state_ord { + ($name:ident<$value:ty>, $computed:literal, $py_value:literal) => { + impl_lazy_node_state!($name<$value>, $computed, $py_value); + impl_node_state_ord_ops!( + $name, + <$value as NodeOp>::Output, + |v: <$value as NodeOp>::Output| v, + $computed, + $py_value + ); + }; +} + +macro_rules! impl_node_state_ord { + ($name:ident<$value:ty>, $computed:literal, $py_value:literal) => { + impl_node_state!($name<$value>, $computed, $py_value); + impl_node_state_ord_ops!($name, $value, |v: &$value| v.clone(), $computed, $py_value); + }; +} + +macro_rules! impl_lazy_node_state_num { + ($name:ident<$value:ty>, $computed:literal, $py_value:literal) => { + impl_lazy_node_state_ord!($name<$value>, $computed, $py_value); + impl_node_state_num_ops!($name, <$value as NodeOp>::Output, $py_value); + }; +} + +macro_rules! impl_node_state_num { + ($name:ident<$value:ty>, $computed:literal, $py_value:literal) => { + impl_node_state_ord!($name<$value>, $computed, $py_value); + impl_node_state_num_ops!($name, $value, $py_value); + }; +} + +macro_rules! impl_one_hop { + ($name:ident<$($path:ident)::+>, $py_name:literal) => { + impl IntoPy + for LazyNodeState<'static, $($path)::+, DynamicGraph, DynamicGraph> + { + fn into_py(self, py: Python<'_>) -> PyObject { + self.into_dyn_hop().into_py(py) + } + } + + impl_timeops!($name, inner, LazyNodeState<'static, $($path)::+, DynamicGraph>, $py_name); + impl_layerops!($name, inner, LazyNodeState<'static, $($path)::+, DynamicGraph>, $py_name); + } + } + +impl_lazy_node_state_num!( + DegreeView>, + "NodeStateUsize", + "int" +); +impl_one_hop!(DegreeView, "DegreeView"); + +impl_node_state_num!(NodeStateUsize, "NodeStateUsize", "int"); + +impl_node_state_num!(NodeStateU64, "NodeStateU64", "int"); + +impl_lazy_node_state_ord!(IdView, "NodeStateGID", "GID"); +impl_node_state_ord!(NodeStateGID, "NodeStateGID", "GID"); + +impl_lazy_node_state_ord!( + EarliestTimeView>, + "NodeStateOptionI64", + "Optional[int]" +); +impl_one_hop!(EarliestTimeView, "EarliestTimeView"); +impl_lazy_node_state_ord!( + LatestTimeView>, + "NodeStateOptionI64", + "Optional[int]" +); +impl_one_hop!(LatestTimeView, "LatestTimeView"); +impl_node_state_ord!( + NodeStateOptionI64>, + "NodeStateOptionI64", + "Optional[int]" +); + +impl_lazy_node_state_ord!(NameView, "NodeStateString", "str"); +impl_node_state_ord!(NodeStateString, "NodeStateString", "str"); + +type EarliestDateTime = ops::Map, Option>>; +impl_lazy_node_state_ord!( + EarliestDateTimeView>, + "NodeStateOptionDateTime", + "Optional[Datetime]" +); +impl_one_hop!( + EarliestDateTimeView, + "EarliestDateTimeView" +); + +type LatestDateTime = ops::Map, Option>>; +impl_lazy_node_state_ord!( + LatestDateTimeView, Option>>>, + "NodeStateOptionDateTime", + "Optional[Datetime]" +); +impl_one_hop!(LatestDateTimeView, "LatestDateTimeView"); +impl_node_state_ord!( + NodeStateOptionDateTime>>, + "NodeStateOptionDateTime", + "Optional[Datetime]" +); + +impl_lazy_node_state_ord!( + HistoryView>, + "NodeStateListI64", + "list[int]" +); +impl_one_hop!(HistoryView, "HistoryView"); +impl_node_state_ord!(NodeStateListI64>, "NodeStateListI64", "list[int]"); + +type HistoryDateTime = ops::Map, Option>>>; +impl_lazy_node_state_ord!( + HistoryDateTimeView>, + "NodeStateOptionListDateTime", + "Optional[list[Datetime]]" +); +impl_one_hop!(HistoryDateTimeView, "HistoryDateTimeView"); +impl_node_state_ord!( + NodeStateOptionListDateTime>>>, + "NodeStateOptionListDateTime", + "Optional[list[Datetime]]" +); + +impl_lazy_node_state_ord!( + NodeTypeView, + "NodeStateOptionStr", + "Optional[str]" +); +impl_node_state_ord!( + NodeStateOptionStr>, + "NodeStateOptionStr", + "Optional[str]" +); + +impl_node_state_ord!( + NodeStateListDateTime>>, + "NodeStateListDateTime", + "list[Datetime]" +); + +pub fn base_node_state_module(py: Python<'_>) -> PyResult> { + let m = PyModule::new_bound(py, "node_state")?; + add_classes!( + &m, + DegreeView, + NodeStateUsize, + NodeStateU64, + IdView, + NodeStateGID, + EarliestTimeView, + LatestTimeView, + NameView, + NodeStateString, + EarliestDateTimeView, + LatestDateTimeView, + NodeStateOptionDateTime, + HistoryView, + NodeStateListI64, + HistoryDateTimeView, + NodeStateOptionListDateTime, + NodeTypeView, + NodeStateOptionStr, + NodeStateListDateTime + ); + Ok(m) +} diff --git a/raphtory/src/python/graph/properties/constant_props.rs b/raphtory/src/python/graph/properties/constant_props.rs index 622208b195..d177a0e22a 100644 --- a/raphtory/src/python/graph/properties/constant_props.rs +++ b/raphtory/src/python/graph/properties/constant_props.rs @@ -33,7 +33,7 @@ impl<'a, P: PropertiesOps> Repr for ConstProperties<'a, P> { } /// A view of constant properties of an entity -#[pyclass(name = "ConstProperties")] +#[pyclass(name = "ConstProperties", module = "raphtory", frozen)] pub struct PyConstProperties { props: DynConstProperties, } diff --git a/raphtory/src/python/graph/properties/props.rs b/raphtory/src/python/graph/properties/props.rs index 4082bc625b..6c586753ce 100644 --- a/raphtory/src/python/graph/properties/props.rs +++ b/raphtory/src/python/graph/properties/props.rs @@ -76,7 +76,7 @@ impl From for PyPropsComp { } /// A view of the properties of an entity -#[pyclass(name = "Properties")] +#[pyclass(name = "Properties", module = "raphtory", frozen)] pub struct PyProperties { props: DynProperties, } diff --git a/raphtory/src/python/graph/properties/temporal_props.rs b/raphtory/src/python/graph/properties/temporal_props.rs index 67bd03a912..181e536d72 100644 --- a/raphtory/src/python/graph/properties/temporal_props.rs +++ b/raphtory/src/python/graph/properties/temporal_props.rs @@ -75,7 +75,7 @@ impl<'source> FromPyObject<'source> for PyTemporalPropsCmp { } /// A view of the temporal properties of an entity -#[pyclass(name = "TemporalProperties")] +#[pyclass(name = "TemporalProperties", module = "raphtory", frozen)] pub struct PyTemporalProperties { props: DynTemporalProperties, } @@ -180,7 +180,7 @@ impl PyTemporalProperties { } /// A view of a temporal property -#[pyclass(name = "TemporalProp")] +#[pyclass(name = "TemporalProp", module = "raphtory", frozen)] pub struct PyTemporalProp { prop: DynTemporalProperty, } diff --git a/raphtory/src/python/graph/views/graph_view.rs b/raphtory/src/python/graph/views/graph_view.rs index 7a08c43e1f..47096dd30f 100644 --- a/raphtory/src/python/graph/views/graph_view.rs +++ b/raphtory/src/python/graph/views/graph_view.rs @@ -36,7 +36,7 @@ use crate::{ repr::{Repr, StructReprBuilder}, wrappers::prop::PyPropertyFilter, }, - utils::{PyNodeRef, PyTime}, + utils::PyNodeRef, }, }; use chrono::prelude::*; @@ -67,7 +67,7 @@ impl<'source> FromPyObject<'source> for DynamicGraph { } /// Graph view is a read-only version of a graph at a certain point in time. -#[pyclass(name = "GraphView", frozen, subclass)] +#[pyclass(name = "GraphView", frozen, subclass, module = "raphtory")] #[derive(Clone)] #[repr(C)] pub struct PyGraphView { @@ -136,6 +136,9 @@ impl IntoPy #[pymethods] impl PyGraphView { /// Return all the layer ids in the graph + /// + /// Returns: + /// list[str] #[getter] pub fn unique_layers(&self) -> Vec { self.graph.unique_layers().collect() @@ -146,7 +149,7 @@ impl PyGraphView { /// Timestamp of earliest activity in the graph /// /// Returns: - /// the timestamp of the earliest activity in the graph + /// Optional[int]: the timestamp of the earliest activity in the graph #[getter] pub fn earliest_time(&self) -> Option { self.graph.earliest_time() @@ -155,7 +158,7 @@ impl PyGraphView { /// DateTime of earliest activity in the graph /// /// Returns: - /// the datetime of the earliest activity in the graph + /// Optional[Datetime]: the datetime of the earliest activity in the graph #[getter] pub fn earliest_date_time(&self) -> Option> { self.graph.earliest_date_time() @@ -164,7 +167,7 @@ impl PyGraphView { /// Timestamp of latest activity in the graph /// /// Returns: - /// the timestamp of the latest activity in the graph + /// Optional[int]: the timestamp of the latest activity in the graph #[getter] pub fn latest_time(&self) -> Option { self.graph.latest_time() @@ -173,7 +176,7 @@ impl PyGraphView { /// DateTime of latest activity in the graph /// /// Returns: - /// the datetime of the latest activity in the graph + /// Optional[Datetime]: the datetime of the latest activity in the graph #[getter] pub fn latest_date_time(&self) -> Option> { self.graph.latest_date_time() @@ -182,7 +185,7 @@ impl PyGraphView { /// Number of edges in the graph /// /// Returns: - /// the number of edges in the graph + /// int: the number of edges in the graph pub fn count_edges(&self) -> usize { self.graph.count_edges() } @@ -190,7 +193,7 @@ impl PyGraphView { /// Number of edges in the graph /// /// Returns: - /// the number of temporal edges in the graph + /// int: the number of temporal edges in the graph pub fn count_temporal_edges(&self) -> usize { self.graph.count_temporal_edges() } @@ -198,7 +201,7 @@ impl PyGraphView { /// Number of nodes in the graph /// /// Returns: - /// the number of nodes in the graph + /// int: the number of nodes in the graph pub fn count_nodes(&self) -> usize { self.graph.count_nodes() } @@ -209,7 +212,7 @@ impl PyGraphView { /// id (str or int): the node id /// /// Returns: - /// true if the graph contains the specified node, false otherwise + /// bool: true if the graph contains the specified node, false otherwise pub fn has_node(&self, id: PyNodeRef) -> bool { self.graph.has_node(id) } @@ -221,7 +224,7 @@ impl PyGraphView { /// dst (str or int): the destination node id /// /// Returns: - /// true if the graph contains the specified edge, false otherwise + /// bool: true if the graph contains the specified edge, false otherwise #[pyo3(signature = (src, dst))] pub fn has_edge(&self, src: PyNodeRef, dst: PyNodeRef) -> bool { self.graph.has_edge(src, dst) @@ -235,16 +238,16 @@ impl PyGraphView { /// id (str or int): the node id /// /// Returns: - /// the node with the specified id, or None if the node does not exist + /// Optional[Node]: the node with the specified id, or None if the node does not exist pub fn node(&self, id: PyNodeRef) -> Option> { self.graph.node(id) } /// Get the nodes that match the properties name and value /// Arguments: - /// property_dict (dict): the properties name and value + /// property_dict (dict[str, Prop]): the properties name and value /// Returns: - /// the nodes that match the properties name and value + /// list[Node]: the nodes that match the properties name and value #[pyo3(signature = (properties_dict))] pub fn find_nodes(&self, properties_dict: HashMap) -> Vec { let iter = self.nodes().into_iter().par_bridge(); @@ -268,7 +271,7 @@ impl PyGraphView { /// Gets the nodes in the graph /// /// Returns: - /// the nodes in the graph + /// Nodes: the nodes in the graph #[getter] pub fn nodes(&self) -> Nodes<'static, DynamicGraph> { self.graph.nodes() @@ -281,7 +284,7 @@ impl PyGraphView { /// dst (str or int): the destination node id /// /// Returns: - /// the edge with the specified source and destination nodes, or None if the edge does not exist + /// Optional[Edge]: the edge with the specified source and destination nodes, or None if the edge does not exist #[pyo3(signature = (src, dst))] pub fn edge( &self, @@ -293,9 +296,9 @@ impl PyGraphView { /// Get the edges that match the properties name and value /// Arguments: - /// property_dict (dict): the properties name and value + /// property_dict (dict[str, Prop]): the properties name and value /// Returns: - /// the edges that match the properties name and value + /// list[Edge]: the edges that match the properties name and value #[pyo3(signature = (properties_dict))] pub fn find_edges(&self, properties_dict: HashMap) -> Vec { let iter = self.edges().into_iter().par_bridge(); @@ -319,7 +322,7 @@ impl PyGraphView { /// Gets all edges in the graph /// /// Returns: - /// the edges in the graph + /// Edges: the edges in the graph #[getter] pub fn edges(&self) -> Edges<'static, DynamicGraph> { self.graph.edges() @@ -329,7 +332,7 @@ impl PyGraphView { /// /// /// Returns: - /// HashMap - Properties paired with their names + /// Properties: Properties paired with their names #[getter] fn properties(&self) -> Properties { self.graph.properties() @@ -338,10 +341,10 @@ impl PyGraphView { /// Returns a subgraph given a set of nodes /// /// Arguments: - /// * `nodes`: set of nodes + /// nodes (list[InputNode]): set of nodes /// /// Returns: - /// GraphView - Returns the subgraph + /// GraphView: Returns the subgraph fn subgraph(&self, nodes: Vec) -> NodeSubgraph { self.graph.subgraph(nodes) } @@ -349,10 +352,10 @@ impl PyGraphView { /// Returns a subgraph filtered by node types given a set of node types /// /// Arguments: - /// * `node_types`: set of node types + /// node_types (list[str]): set of node types /// /// Returns: - /// GraphView - Returns the subgraph + /// GraphView: Returns the subgraph fn subgraph_node_types(&self, node_types: Vec) -> TypeFilteredSubgraph { self.graph.subgraph_node_types(node_types) } @@ -360,10 +363,10 @@ impl PyGraphView { /// Returns a subgraph given a set of nodes that are excluded from the subgraph /// /// Arguments: - /// * `nodes`: set of nodes + /// nodes (list[InputNode]): set of nodes /// /// Returns: - /// GraphView - Returns the subgraph + /// GraphView: Returns the subgraph fn exclude_nodes(&self, nodes: Vec) -> NodeSubgraph { self.graph.exclude_nodes(nodes) } @@ -371,7 +374,7 @@ impl PyGraphView { /// Returns a 'materialized' clone of the graph view - i.e. a new graph with a copy of the data seen within the view instead of just a mask over the original graph /// /// Returns: - /// GraphView - Returns a graph clone + /// GraphView: Returns a graph clone fn materialize(&self) -> Result { self.graph.materialize() } diff --git a/raphtory/src/python/packages/algorithms.rs b/raphtory/src/python/packages/algorithms.rs index eed3251aea..963fd6112b 100644 --- a/raphtory/src/python/packages/algorithms.rs +++ b/raphtory/src/python/packages/algorithms.rs @@ -705,8 +705,8 @@ pub fn label_propagation( /// /// Arguments: /// graph (GraphView): the graph view -/// seeds (int | float | list[Node]): the seeding strategy to use for the initial infection (if `int`, choose fixed number -/// of nodes at random, if `float` infect each node with this probability, if `[Node]` +/// seeds (int | float | list[InputNode]): the seeding strategy to use for the initial infection (if `int`, choose fixed number +/// of nodes at random, if `float` infect each node with this probability, if `list` /// initially infect the specified nodes /// infection_prob (float): the probability for a contact between infected and susceptible nodes to lead /// to a transmission diff --git a/raphtory/src/python/packages/base_modules.rs b/raphtory/src/python/packages/base_modules.rs index 404c69d2ae..9422667dae 100644 --- a/raphtory/src/python/packages/base_modules.rs +++ b/raphtory/src/python/packages/base_modules.rs @@ -144,3 +144,5 @@ pub fn base_vectors_module(py: Python<'_>) -> Result, PyErr> { vectors_module.add_class::()?; Ok(vectors_module) } + +pub use crate::python::graph::node_state::base_node_state_module; diff --git a/raphtory/src/python/packages/vectors.rs b/raphtory/src/python/packages/vectors.rs index 84604d9bbc..40fd2c4d18 100644 --- a/raphtory/src/python/packages/vectors.rs +++ b/raphtory/src/python/packages/vectors.rs @@ -1,12 +1,6 @@ use crate::{ - core::{ - utils::{errors::GraphError, time::IntoTime}, - DocumentInput, Lifespan, Prop, - }, - db::api::{ - properties::{internal::PropertiesOps, Properties}, - view::{MaterializedGraph, StaticGraphViewOps}, - }, + core::utils::{errors::GraphError, time::IntoTime}, + db::api::view::{MaterializedGraph, StaticGraphViewOps}, prelude::{EdgeViewOps, GraphViewOps, NodeViewOps}, python::{ graph::{edge::PyEdge, node::PyNode, views::graph_view::PyGraphView}, @@ -21,7 +15,6 @@ use crate::{ Document, Embedding, EmbeddingFunction, EmbeddingResult, }, }; -use chrono::DateTime; use futures_util::future::BoxFuture; use itertools::Itertools; use pyo3::{ @@ -70,17 +63,6 @@ impl<'source> FromPyObject<'source> for PyQuery { } } -fn format_time(millis: i64) -> String { - if millis == 0 { - "unknown time".to_owned() - } else { - match DateTime::from_timestamp_millis(millis) { - Some(time) => time.naive_utc().format("%Y-%m-%d %H:%M:%S").to_string(), - None => "unknown time".to_owned(), - } - } -} - impl PyDocument { pub fn extract_rust_document(&self, py: Python) -> Result { if let (Some(entity), Some(embedding)) = (&self.entity, &self.embedding) { @@ -173,68 +155,6 @@ pub fn into_py_document( } } -/// This funtions ignores the time history of temporal props if their type is Document and they have a life different than Lifespan::Inherited -fn get_documents_from_props( - properties: Properties

, - name: &str, -) -> Box> { - let prop = properties.temporal().get(name); - - match prop { - Some(prop) => { - let props = prop.into_iter(); - let docs = props - .map(|(time, prop)| prop_to_docs(&prop, Lifespan::event(time)).collect_vec()) - .flatten(); - Box::new(docs) - } - None => match properties.get(name) { - Some(prop) => Box::new( - prop_to_docs(&prop, Lifespan::Inherited) - .collect_vec() - .into_iter(), - ), - _ => Box::new(std::iter::empty()), - }, - } -} - -impl Lifespan { - fn overwrite_inherited(&self, default_lifespan: Lifespan) -> Self { - match self { - Lifespan::Inherited => default_lifespan, - other => other.clone(), - } - } -} - -fn prop_to_docs( - prop: &Prop, - default_lifespan: Lifespan, -) -> Box + '_> { - match prop { - Prop::List(docs) => Box::new( - docs.iter() - .map(move |prop| prop_to_docs(prop, default_lifespan)) - .flatten(), - ), - Prop::Map(doc_map) => Box::new( - doc_map - .values() - .map(move |prop| prop_to_docs(prop, default_lifespan)) - .flatten(), - ), - Prop::Document(document) => Box::new(std::iter::once(DocumentInput { - life: document.life.overwrite_inherited(default_lifespan), - ..document.clone() - })), - prop => Box::new(std::iter::once(DocumentInput { - content: prop.to_string(), - life: default_lifespan, - })), - } -} - #[pymethods] impl PyGraphView { /// Create a VectorisedGraph from the current graph diff --git a/raphtory/src/python/types/iterable.rs b/raphtory/src/python/types/iterable.rs index 9f73ffea8b..9ac5b6fa9c 100644 --- a/raphtory/src/python/types/iterable.rs +++ b/raphtory/src/python/types/iterable.rs @@ -2,8 +2,12 @@ use crate::{ db::api::view::BoxedIter, python::types::repr::{iterator_repr, Repr}, }; -use pyo3::{IntoPy, PyObject}; -use std::{marker::PhantomData, sync::Arc}; +use pyo3::prelude::*; +use std::{ + marker::PhantomData, + ops::{Deref, DerefMut}, + sync::Arc, +}; pub struct Iterable + From + Repr> { pub name: &'static str, @@ -113,3 +117,31 @@ impl + From + Repr> Repr for NestedIterable(Vec); +impl Deref for FromIterable { + type Target = [T]; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DerefMut for FromIterable { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +impl<'py, T: FromPyObject<'py>> FromPyObject<'py> for FromIterable { + fn extract_bound(ob: &Bound<'py, PyAny>) -> PyResult { + let len = ob.len().unwrap_or(0); + let mut vec = Vec::::with_capacity(len); + { + for value in ob.iter()? { + vec.push(value?.extract()?) + } + } + Ok(Self(vec)) + } +} diff --git a/raphtory/src/python/types/macros/trait_impl/layerops.rs b/raphtory/src/python/types/macros/trait_impl/layerops.rs index 74acea9e68..83f677a937 100644 --- a/raphtory/src/python/types/macros/trait_impl/layerops.rs +++ b/raphtory/src/python/types/macros/trait_impl/layerops.rs @@ -20,6 +20,9 @@ macro_rules! impl_layerops { #[doc = concat!(" Return a view of ", $name, r#" containing the layer `"name"`"#)] /// Errors if the layer does not exist /// + /// Arguments: + /// name (str): then name of the layer. + /// /// Returns: #[doc = concat!(" ", $name, ": The layered view")] fn layer( @@ -58,6 +61,12 @@ macro_rules! impl_layerops { } #[doc = concat!(" Check if ", $name, r#" has the layer `"name"`"#)] + /// + /// Arguments: + /// name (str): the name of the layer to check + /// + /// Returns: + /// bool fn has_layer( &self, name: &str, diff --git a/raphtory/src/python/types/macros/trait_impl/mod.rs b/raphtory/src/python/types/macros/trait_impl/mod.rs index 89c1a8f03e..e5dba46e7a 100644 --- a/raphtory/src/python/types/macros/trait_impl/mod.rs +++ b/raphtory/src/python/types/macros/trait_impl/mod.rs @@ -15,8 +15,6 @@ mod repr; #[macro_use] mod iterable_mixin; -mod node_state; - #[macro_use] mod serialise; diff --git a/raphtory/src/python/types/macros/trait_impl/node_state.rs b/raphtory/src/python/types/macros/trait_impl/node_state.rs deleted file mode 100644 index fe804a59f9..0000000000 --- a/raphtory/src/python/types/macros/trait_impl/node_state.rs +++ /dev/null @@ -1,302 +0,0 @@ -use crate::{ - core::entities::nodes::node_ref::{AsNodeRef, NodeRef}, - db::{ - api::{ - state::{LazyNodeState, NodeState, NodeStateOps, OrderedNodeStateOps}, - view::{DynamicGraph, GraphViewOps}, - }, - graph::node::NodeView, - }, - py_borrowing_iter, - python::{ - types::{repr::Repr, wrappers::iterators::PyBorrowingIterator}, - utils::PyNodeRef, - }, -}; -use chrono::{DateTime, Utc}; -use pyo3::{ - exceptions::{PyKeyError, PyTypeError}, - prelude::*, - types::PyNotImplemented, -}; -use raphtory_api::core::{entities::GID, storage::arc_str::ArcStr}; -use std::{collections::HashMap, sync::Arc}; - -macro_rules! impl_node_state_ops { - ($name:ident<$value:ty>, $inner_t:ty, $to_owned:expr) => { - impl $name { - pub fn iter(&self) -> impl Iterator + '_ { - self.inner.values().map($to_owned) - } - } - - #[pymethods] - impl $name { - fn __len__(&self) -> usize { - self.inner.len() - } - - fn nodes(&self) -> PyBorrowingIterator { - py_borrowing_iter!(self.inner.clone(), $inner_t, |inner| { - inner.nodes().map(|n| n.cloned()) - }) - } - - fn __iter__(&self) -> PyBorrowingIterator { - py_borrowing_iter!(self.inner.clone(), $inner_t, |inner| inner - .values() - .map($to_owned)) - } - - fn __getitem__(&self, node: PyNodeRef) -> PyResult<$value> { - let node = node.as_node_ref(); - self.inner - .get_by_node(node) - .map($to_owned) - .ok_or_else(|| match node { - NodeRef::External(id) => { - PyKeyError::new_err(format!("Missing value for node with id {id}")) - } - NodeRef::Internal(vid) => { - let node = self.inner.graph().node(vid); - match node { - Some(node) => { - PyKeyError::new_err(format!("Missing value {}", node.repr())) - } - None => PyTypeError::new_err("Invalid node reference"), - } - } - }) - } - - fn items(&self) -> PyBorrowingIterator { - py_borrowing_iter!(self.inner.clone(), $inner_t, |inner| inner - .iter() - .map(|(n, v)| (n.cloned(), ($to_owned)(v)))) - } - - fn values(&self) -> PyBorrowingIterator { - self.__iter__() - } - - fn sorted_by_id(&self) -> NodeState<'static, $value, DynamicGraph> { - self.inner.sort_by_id() - } - - fn __repr__(&self) -> String { - self.inner.repr() - } - } - }; -} - -macro_rules! impl_node_state_ord_ops { - ($name:ident<$value:ty>, $to_owned:expr) => { - #[pymethods] - impl $name { - #[pyo3(signature = (reverse = false))] - fn sorted(&self, reverse: bool) -> NodeState<'static, $value, DynamicGraph> { - self.inner.sort_by_values(reverse) - } - - fn top_k(&self, k: usize) -> NodeState<'static, $value, DynamicGraph> { - self.inner.top_k(k) - } - - fn bottom_k(&self, k: usize) -> NodeState<'static, $value, DynamicGraph> { - self.inner.bottom_k(k) - } - - fn min_item(&self) -> Option<(NodeView, $value)> { - self.inner - .min_item() - .map(|(n, v)| (n.cloned(), ($to_owned)(v))) - } - - fn min(&self) -> Option<$value> { - self.inner.min().map($to_owned) - } - - fn max_item(&self) -> Option<(NodeView, $value)> { - self.inner - .max_item() - .map(|(n, v)| (n.cloned(), ($to_owned)(v))) - } - - fn max(&self) -> Option<$value> { - self.inner.max().map($to_owned) - } - - fn median(&self) -> Option<$value> { - self.inner.median().map($to_owned) - } - - fn median_item(&self) -> Option<(NodeView, $value)> { - self.inner - .median_item() - .map(|(n, v)| (n.cloned(), ($to_owned)(v))) - } - - fn __eq__<'py>(&self, other: &Bound<'py, PyAny>, py: Python<'py>) -> PyObject { - if let Ok(other) = other.downcast::() { - let other = Bound::borrow(other); - return self.inner.values().eq(other.inner.values()).into_py(py); - } else if let Ok(other) = other.extract::>() { - return self - .inner - .values() - .map($to_owned) - .eq(other.into_iter()) - .into_py(py); - } else if let Ok(other) = other.extract::>() { - return (self.inner.len() == other.len() - && other.into_iter().all(|(node, value)| { - self.inner.get_by_node(node).map($to_owned) == Some(value) - })) - .into_py(py); - } - PyNotImplemented::get_bound(py).into_py(py) - } - } - }; -} - -macro_rules! impl_node_state_num_ops { - ($name:ident<$value:ty>) => { - #[pymethods] - impl $name { - fn sum(&self) -> $value { - self.inner.sum() - } - - fn mean(&self) -> f64 { - self.inner.mean() - } - } - }; -} - -macro_rules! impl_lazy_node_state { - ($name:ident<$value:ty>) => { - #[pyclass] - pub struct $name { - inner: LazyNodeState<'static, $value, DynamicGraph, DynamicGraph>, - } - - #[pymethods] - impl $name { - fn compute(&self) -> NodeState<'static, $value, DynamicGraph, DynamicGraph> { - self.inner.compute() - } - - fn collect(&self) -> Vec<$value> { - self.inner.collect() - } - } - - impl_node_state_ops!( - $name<$value>, - LazyNodeState<'static, $value, DynamicGraph, DynamicGraph>, - |v: $value| v - ); - - impl From> for $name { - fn from(inner: LazyNodeState<'static, $value, DynamicGraph, DynamicGraph>) -> Self { - $name { inner } - } - } - - impl pyo3::IntoPy for LazyNodeState<'static, $value, DynamicGraph, DynamicGraph> { - fn into_py(self, py: Python<'_>) -> PyObject { - $name::from(self).into_py(py) - } - } - }; -} - -macro_rules! impl_node_state { - ($name:ident<$value:ty>) => { - #[pyclass] - pub struct $name { - inner: Arc>, - } - - impl_node_state_ops!( - $name<$value>, - Arc>, - |v: &$value| v.clone() - ); - - impl From> for $name { - fn from(inner: NodeState<'static, $value, DynamicGraph, DynamicGraph>) -> Self { - $name { - inner: inner.into(), - } - } - } - - impl pyo3::IntoPy for NodeState<'static, $value, DynamicGraph, DynamicGraph> { - fn into_py(self, py: Python<'_>) -> PyObject { - $name::from(self).into_py(py) - } - } - }; -} - -macro_rules! impl_lazy_node_state_ord { - ($name:ident<$value:ty>) => { - impl_lazy_node_state!($name<$value>); - impl_node_state_ord_ops!($name<$value>, |v: $value| v); - }; -} - -macro_rules! impl_node_state_ord { - ($name:ident<$value:ty>) => { - impl_node_state!($name<$value>); - impl_node_state_ord_ops!($name<$value>, |v: &$value| v.clone()); - }; -} - -macro_rules! impl_lazy_node_state_num { - ($name:ident<$value:ty>) => { - impl_lazy_node_state_ord!($name<$value>); - impl_node_state_num_ops!($name<$value>); - }; -} - -macro_rules! impl_node_state_num { - ($name:ident<$value:ty>) => { - impl_node_state_ord!($name<$value>); - impl_node_state_num_ops!($name<$value>); - }; -} - -impl_lazy_node_state_num!(LazyNodeStateUsize); -impl_node_state_num!(NodeStateUsize); - -impl_lazy_node_state_num!(LazyNodeStateU64); -impl_node_state_num!(NodeStateU64); - -impl_node_state_ord!(NodeStateGID); -impl_lazy_node_state_ord!(LazyNodeStateGID); - -impl_lazy_node_state_ord!(LazyNodeStateOptionI64>); -impl_node_state_ord!(NodeStateOptionI64>); - -impl_lazy_node_state_ord!(LazyNodeStateString); -impl_node_state_ord!(NodeStateString); - -impl_lazy_node_state_ord!(LazyNodeStateOptionDateTime>>); -impl_node_state_ord!(NodeStateOptionDateTime>>); - -impl_lazy_node_state_ord!(LazyNodeStateListI64>); -impl_node_state_ord!(NodeStateListI64>); - -impl_lazy_node_state_ord!(LazyNodeStateOptionListDateTime>>>); -impl_node_state_ord!(NodeStateOptionListDateTime>>>); - -impl_lazy_node_state_ord!(LazyNodeStateOptionStr>); -impl_node_state_ord!(NodeStateOptionStr>); - -impl_lazy_node_state_ord!(LazyNodeStateListDateTime>>); -impl_node_state_ord!(NodeStateListDateTime>>); diff --git a/raphtory/src/python/types/macros/trait_impl/timeops.rs b/raphtory/src/python/types/macros/trait_impl/timeops.rs index 143583fb03..b4969e7630 100644 --- a/raphtory/src/python/types/macros/trait_impl/timeops.rs +++ b/raphtory/src/python/types/macros/trait_impl/timeops.rs @@ -13,7 +13,7 @@ macro_rules! impl_timeops { #[doc = concat!(r" Gets the start time for rolling and expanding windows for this ", $name)] /// /// Returns: - #[doc = concat!(r" The earliest time that this ", $name, r" is valid or None if the ", $name, r" is valid for all times.")] + #[doc = concat!(r" Optional[int]: The earliest time that this ", $name, r" is valid or None if the ", $name, r" is valid for all times.")] #[getter] pub fn start(&self) -> Option { self.$field.start() @@ -22,7 +22,7 @@ macro_rules! impl_timeops { #[doc = concat!(r" Gets the earliest datetime that this ", $name, r" is valid")] /// /// Returns: - #[doc = concat!(r" The earliest datetime that this ", $name, r" is valid or None if the ", $name, r" is valid for all times.")] + #[doc = concat!(r" Optional[Datetime]: The earliest datetime that this ", $name, r" is valid or None if the ", $name, r" is valid for all times.")] #[getter] pub fn start_date_time(&self) -> Option> { self.$field.start_date_time() @@ -31,7 +31,7 @@ macro_rules! impl_timeops { #[doc = concat!(r" Gets the latest time that this ", $name, r" is valid.")] /// /// Returns: - #[doc = concat!(" The latest time that this ", $name, r" is valid or None if the ", $name, r" is valid for all times.")] + #[doc = concat!(" Optional[int]: The latest time that this ", $name, r" is valid or None if the ", $name, r" is valid for all times.")] #[getter] pub fn end(&self) -> Option { self.$field.end() @@ -40,13 +40,16 @@ macro_rules! impl_timeops { #[doc = concat!(r" Gets the latest datetime that this ", $name, r" is valid")] /// /// Returns: - #[doc = concat!(r" The latest datetime that this ", $name, r" is valid or None if the ", $name, r" is valid for all times.")] + #[doc = concat!(r" Optional[Datetime]: The latest datetime that this ", $name, r" is valid or None if the ", $name, r" is valid for all times.")] #[getter] pub fn end_date_time(&self) -> Option> { self.$field.end_date_time() } #[doc = concat!(r" Get the window size (difference between start and end) for this ", $name)] + /// + /// Returns: + /// Optional[int] #[getter] pub fn window_size(&self) -> Option { self.$field.window_size() @@ -92,11 +95,11 @@ macro_rules! impl_timeops { /// end (TimeInput | None): The end time of the window (unbounded if `None`). /// /// Returns: - #[doc = concat!("r A ", $name, " object.")] + #[doc = concat!("r ", $name)] pub fn window( &self, - start: PyTime, - end: PyTime, + start: $crate::python::utils::PyTime, + end: $crate::python::utils::PyTime, ) -> <$base_type as TimeOps<'static>>::WindowedViewType { self.$field .window(start, end) @@ -108,15 +111,15 @@ macro_rules! impl_timeops { /// time (TimeInput): The time of the window. /// /// Returns: - #[doc = concat!(r" A ", $name, r" object.")] - pub fn at(&self, time: PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { + #[doc = concat!(r" ", $name)] + pub fn at(&self, time: $crate::python::utils::PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { self.$field.at(time) } #[doc = concat!(r" Create a view of the ", $name, r" including all events at the latest time.")] /// /// Returns: - #[doc = concat!(r" A ", $name, r" object.")] + #[doc = concat!(r" ", $name)] pub fn latest(&self) -> <$base_type as TimeOps<'static>>::WindowedViewType { self.$field.latest() } @@ -129,8 +132,8 @@ macro_rules! impl_timeops { /// time (TimeInput): The time of the window. /// /// Returns: - #[doc = concat!(r" A ", $name, r" object.")] - pub fn snapshot_at(&self, time: PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { + #[doc = concat!(r" ", $name)] + pub fn snapshot_at(&self, time: $crate::python::utils::PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { self.$field.snapshot_at(time) } @@ -139,7 +142,7 @@ macro_rules! impl_timeops { /// This is equivalent to a no-op for `EventGraph`s and `latest()` for `PersitentGraph`s /// /// Returns: - #[doc = concat!(r" A ", $name, r" object.")] + #[doc = concat!(r" ", $name)] pub fn snapshot_latest(&self) -> <$base_type as TimeOps<'static>>::WindowedViewType { self.$field.snapshot_latest() } @@ -150,8 +153,8 @@ macro_rules! impl_timeops { /// end (TimeInput): The end time of the window. /// /// Returns: - #[doc = concat!(r" A ", $name, r" object.")] - pub fn before(&self, end: PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { + #[doc = concat!(r" ", $name)] + pub fn before(&self, end: $crate::python::utils::PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { self.$field.before(end) } @@ -161,8 +164,8 @@ macro_rules! impl_timeops { /// start (TimeInput): The start time of the window. /// /// Returns: - #[doc = concat!(r" A ", $name, r" object.")] - pub fn after(&self, start: PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { + #[doc = concat!(r" ", $name)] + pub fn after(&self, start: $crate::python::utils::PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { self.$field.after(start) } @@ -172,8 +175,8 @@ macro_rules! impl_timeops { /// start (TimeInput): the new start time of the window /// /// Returns: - #[doc = concat!(r" A ", $name, r" object.")] - pub fn shrink_start(&self, start: PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { + #[doc = concat!(r" ", $name)] + pub fn shrink_start(&self, start: $crate::python::utils::PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { self.$field.shrink_start(start) } @@ -182,8 +185,8 @@ macro_rules! impl_timeops { /// Arguments: /// end (TimeInput): the new end time of the window /// Returns: - #[doc = concat!(r" A ", $name, r" object.")] - fn shrink_end(&self, end: PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { + #[doc = concat!(r" ", $name)] + fn shrink_end(&self, end: $crate::python::utils::PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { self.$field.shrink_end(end) } @@ -193,7 +196,7 @@ macro_rules! impl_timeops { /// start (TimeInput): the new start time for the window /// end (TimeInput): the new end time for the window /// - fn shrink_window(&self, start: PyTime, end: PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { + fn shrink_window(&self, start: $crate::python::utils::PyTime, end: $crate::python::utils::PyTime) -> <$base_type as TimeOps<'static>>::WindowedViewType { self.$field.shrink_window(start, end) } } diff --git a/raphtory/src/python/types/repr.rs b/raphtory/src/python/types/repr.rs index 16900697d1..f113350484 100644 --- a/raphtory/src/python/types/repr.rs +++ b/raphtory/src/python/types/repr.rs @@ -1,6 +1,6 @@ use crate::{ core::storage::locked_view::LockedView, - db::api::state::{LazyNodeState, NodeState}, + db::api::state::{LazyNodeState, NodeOp, NodeState}, prelude::{GraphViewOps, NodeStateOps, NodeViewOps}, }; use chrono::{DateTime, NaiveDateTime, TimeZone}; @@ -221,12 +221,10 @@ impl<'a, R: Repr> Repr for &'a R { } } -impl< - 'graph, - G: GraphViewOps<'graph>, - GH: GraphViewOps<'graph>, - V: Repr + Clone + Send + Sync + 'graph, - > Repr for LazyNodeState<'graph, V, G, GH> +impl<'graph, G: GraphViewOps<'graph>, GH: GraphViewOps<'graph>, Op: NodeOp + 'graph> Repr + for LazyNodeState<'graph, Op, G, GH> +where + Op::Output: Repr + Send + Sync + 'graph, { fn repr(&self) -> String { StructReprBuilder::new("LazyNodeState") diff --git a/raphtory/src/python/types/wrappers/prop.rs b/raphtory/src/python/types/wrappers/prop.rs index 61a790d81a..d449154e68 100644 --- a/raphtory/src/python/types/wrappers/prop.rs +++ b/raphtory/src/python/types/wrappers/prop.rs @@ -131,7 +131,7 @@ impl Repr for Prop { pub type PropValue = Option; pub type PropHistItems = Vec<(i64, Prop)>; -#[pyclass(frozen, name = "PropertyFilter")] +#[pyclass(frozen, name = "PropertyFilter", module = "raphtory")] #[derive(Clone)] pub struct PyPropertyFilter(PropertyFilter); @@ -186,7 +186,7 @@ impl InternalNodePropertyFilterOps for PyPropertyFilter { /// property value (these filters always exclude entities that do not /// have the property) or use one of the methods to construct /// other kinds of filters. -#[pyclass(frozen, name = "Prop")] +#[pyclass(frozen, name = "Prop", module = "raphtory")] #[derive(Clone)] pub struct PyPropertyRef { name: String, diff --git a/raphtory/src/python/utils/mod.rs b/raphtory/src/python/utils/mod.rs index 59811d87fb..32553f95de 100644 --- a/raphtory/src/python/utils/mod.rs +++ b/raphtory/src/python/utils/mod.rs @@ -238,7 +238,7 @@ where } } -#[pyclass(name = "WindowSet")] +#[pyclass(name = "WindowSet", module = "raphtory", frozen)] pub struct PyWindowSet { window_set: Box, } diff --git a/raphtory/src/serialise/incremental.rs b/raphtory/src/serialise/incremental.rs index f5a92558c0..5e17929084 100644 --- a/raphtory/src/serialise/incremental.rs +++ b/raphtory/src/serialise/incremental.rs @@ -232,7 +232,7 @@ impl GraphWriter { } } -pub(crate) trait InternalCache { +pub trait InternalCache { /// Initialise the cache by pointing it at a proto file. /// Future updates will be appended to the cache. fn init_cache(&self, path: &GraphFolder) -> Result<(), GraphError>; diff --git a/raphtory/src/vectors/vector_selection.rs b/raphtory/src/vectors/vector_selection.rs index 371a386e6e..ea41b4cc89 100644 --- a/raphtory/src/vectors/vector_selection.rs +++ b/raphtory/src/vectors/vector_selection.rs @@ -2,7 +2,6 @@ use itertools::{chain, Itertools}; use std::{ collections::{HashMap, HashSet}, ops::Deref, - usize, }; use crate::{ diff --git a/scripts/activate_private_storage.py b/scripts/activate_private_storage.py index 319c5fcd09..bea1370ebe 100755 --- a/scripts/activate_private_storage.py +++ b/scripts/activate_private_storage.py @@ -14,7 +14,7 @@ if "#[private-storage]" in line: next_line = lines[i + 1] if next_line.strip().startswith("#") and "pometry-storage" in next_line: - lines[i + 1] = re.sub(r"#\s*", "", next_line, 1) + lines[i + 1] = re.sub(r"#\s*", "", next_line, count=1) if "#[public-storage]" in line: next_line = lines[i + 1] if next_line.strip().startswith("pometry-storage"):