Skip to content
Merged
Show file tree
Hide file tree
Changes from 15 commits
Commits
Show all changes
43 commits
Select commit Hold shift + click to select a range
06dbc2e
redesign, refactor property filters
shivamka1 Oct 2, 2025
61145c9
takes a reference
shivamka1 Oct 6, 2025
261385b
rework validation
shivamka1 Oct 7, 2025
4281d4e
fix arbitrary list, fix tests
shivamka1 Oct 8, 2025
fdce926
merge python traits, fix tests, add more validations
shivamka1 Oct 8, 2025
6de1a5c
rework gql filtering apis, fix tests
shivamka1 Oct 14, 2025
560fc18
impl nodes select filtering in gql
shivamka1 Oct 21, 2025
bf8c67a
change semantics of filters in gql, add missing filter apis in edges,…
shivamka1 Oct 21, 2025
3c17197
add more edge filter tests
shivamka1 Oct 22, 2025
10224c4
add filtering to path from node, add tests
shivamka1 Oct 22, 2025
fd1253a
impl window filter
shivamka1 Oct 26, 2025
520ad5c
impl window filter in python, add tests
shivamka1 Oct 27, 2025
c4f388b
impl gql window filter, add tests
shivamka1 Oct 28, 2025
3251d27
ref
shivamka1 Oct 28, 2025
ff7ff9d
impl edge node filtering, add few tests
shivamka1 Oct 29, 2025
95a4549
rid redundant code
shivamka1 Oct 31, 2025
edd66b7
fix call to filter nodes
shivamka1 Oct 31, 2025
a6ca33a
rid dead code
shivamka1 Oct 31, 2025
9242e98
Integrating edge endpoint filtering mechanism into Python using the s…
arienandalibi Nov 1, 2025
5902056
Added src/dst endpoint filtering support for exploded edge filters in…
arienandalibi Nov 3, 2025
0ab738a
Added src/dst endpoint filtering support for exploded edge filters in…
arienandalibi Nov 4, 2025
277ce21
Added tests from previous branch, some of them fail
arienandalibi Nov 4, 2025
64ac8eb
Fixed DynFilterOps implementations for EndpointWrapper<T> types. Endp…
arienandalibi Nov 4, 2025
a377f00
Changed many impls to be blanket implementations using traits, especi…
arienandalibi Nov 5, 2025
a6493dc
Merge branch 'master_filter' into features/edge-srcdst
shivamka1 Nov 10, 2025
2807c1c
rid dead code
shivamka1 Nov 10, 2025
cd13d2f
nodeops suggestions from lucas
shivamka1 Nov 11, 2025
ef8eefb
start fixing some apis
ljeub-pometry Nov 12, 2025
afd2252
fixed most of the compilation errors
ljeub-pometry Nov 12, 2025
d825be9
fix all the python problems except for actually implementing the pyth…
ljeub-pometry Nov 12, 2025
908be83
finish ref, fix tests
shivamka1 Nov 19, 2025
adf8d75
redone
shivamka1 Nov 20, 2025
b3c767c
fix
shivamka1 Nov 20, 2025
8822288
more changes
shivamka1 Nov 20, 2025
0865b17
start fixing infinite trait bound recursion
ljeub-pometry Nov 20, 2025
76c817a
rid filtered graphs
shivamka1 Nov 20, 2025
e8eb639
rid nodetypefilteredgraph
shivamka1 Nov 20, 2025
bda5e77
add review suggestions
shivamka1 Nov 20, 2025
51e97c3
fix infinite type recursion
ljeub-pometry Nov 21, 2025
dfa0d37
fmt, fix recursion issue in search
shivamka1 Nov 23, 2025
0edc36f
impl py
shivamka1 Nov 25, 2025
2c976c8
Do not rely on Wrap for the trait bounds in the builder API as the co…
ljeub-pometry Nov 25, 2025
1a6fc74
fix gql, tests
shivamka1 Nov 25, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 0 additions & 56 deletions python/python/raphtory/__init__.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ __all__ = [
"IndexSpec",
"Prop",
"version",
"DiskGraphStorage",
"graphql",
"algorithms",
"graph_loader",
Expand Down Expand Up @@ -1376,17 +1375,6 @@ class Graph(GraphView):
MutableNode: The node object with the specified id, or None if the node does not exist
"""

def persist_as_disk_graph(self, graph_dir: str | PathLike) -> DiskGraphStorage:
"""
save graph in disk_graph format and memory map the result

Arguments:
graph_dir (str | PathLike): folder where the graph will be saved

Returns:
DiskGraphStorage: the persisted graph storage
"""

def persistent_graph(self) -> PersistentGraph:
"""
View graph with persistent semantics
Expand Down Expand Up @@ -1424,17 +1412,6 @@ class Graph(GraphView):
bytes:
"""

def to_disk_graph(self, graph_dir: str | PathLike) -> Graph:
"""
Persist graph on disk

Arguments:
graph_dir (str | PathLike): the folder where the graph will be persisted

Returns:
Graph: a view of the persisted graph
"""

def to_parquet(self, graph_dir: str | PathLike):
"""
Persist graph to parquet files
Expand Down Expand Up @@ -2209,7 +2186,6 @@ class PersistentGraph(GraphView):
bytes:
"""

def to_disk_graph(self, graph_dir): ...
def update_metadata(self, metadata: dict) -> None:
"""
Updates metadata of the graph.
Expand Down Expand Up @@ -6142,35 +6118,3 @@ class Prop(object):
def u8(value): ...

def version(): ...

class DiskGraphStorage(object):
def __repr__(self):
"""Return repr(self)."""

def append_node_temporal_properties(self, location, chunk_size=20000000): ...
def graph_dir(self): ...
@staticmethod
def load_from_dir(graph_dir): ...
@staticmethod
def load_from_pandas(graph_dir, edge_df, time_col, src_col, dst_col): ...
@staticmethod
def load_from_parquets(
graph_dir,
layer_parquet_cols,
node_properties=None,
chunk_size=10000000,
t_props_chunk_size=10000000,
num_threads=4,
node_type_col=None,
node_id_col=None,
): ...
def load_node_metadata(self, location, col_names=None, chunk_size=None): ...
def load_node_types(self, location, col_name, chunk_size=None): ...
def merge_by_sorted_gids(self, other, graph_dir):
"""
Merge this graph with another `DiskGraph`. Note that both graphs should have nodes that are
sorted by their global ids or the resulting graph will be nonsense!
"""

def to_events(self): ...
def to_persistent(self): ...
3 changes: 0 additions & 3 deletions python/python/raphtory/algorithms/__init__.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ __all__ = [
"max_weight_matching",
"Matching",
"Infected",
"connected_components",
]

def dijkstra_single_source_shortest_paths(
Expand Down Expand Up @@ -894,5 +893,3 @@ class Infected(object):
Returns:
int:
"""

def connected_components(graph): ...
63 changes: 22 additions & 41 deletions python/python/raphtory/filter/__init__.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,9 @@ __all__ = [
"ExplodedEdge",
"Property",
"Metadata",
"TemporalPropertyFilterBuilder",
"NodeWindow",
"EdgeWindow",
"ExplodedEdgeWindow",
]

class FilterExpr(object):
Expand Down Expand Up @@ -75,17 +77,20 @@ class PropertyFilterOps(object):
def avg(self): ...
def contains(self, value): ...
def ends_with(self, value): ...
def first(self): ...
def fuzzy_search(self, prop_value, levenshtein_distance, prefix_match): ...
def is_in(self, values): ...
def is_none(self): ...
def is_not_in(self, values): ...
def is_some(self): ...
def last(self): ...
def len(self): ...
def max(self): ...
def min(self): ...
def not_contains(self, value): ...
def starts_with(self, value): ...
def sum(self): ...
def temporal(self): ...

class Node(object):
@staticmethod
Expand Down Expand Up @@ -119,6 +124,8 @@ class Node(object):

@staticmethod
def property(name): ...
@staticmethod
def window(py_start, py_end): ...

class EdgeFilterOp(object):
def __eq__(self, value):
Expand Down Expand Up @@ -160,56 +167,30 @@ class Edge(object):
def property(name): ...
@staticmethod
def src(): ...
@staticmethod
def window(py_start, py_end): ...

class ExplodedEdge(object):
@staticmethod
def metadata(name): ...
@staticmethod
def property(name): ...
@staticmethod
def window(py_start, py_end): ...

class Property(PropertyFilterOps):
"""
Construct a property filter

Arguments:
name (str): the name of the property to filter
"""

def temporal(self): ...

class Metadata(PropertyFilterOps):
"""
Construct a metadata filter

Arguments:
name (str): the name of the property to filter
"""

class TemporalPropertyFilterBuilder(object):
def __eq__(self, value):
"""Return self==value."""

def __ge__(self, value):
"""Return self>=value."""

def __gt__(self, value):
"""Return self>value."""

def __le__(self, value):
"""Return self<=value."""
class Metadata(PropertyFilterOps): ...

def __lt__(self, value):
"""Return self<value."""
class NodeWindow(object):
def metadata(self, name): ...
def property(self, name): ...

def __ne__(self, value):
"""Return self!=value."""
class EdgeWindow(object):
def metadata(self, name): ...
def property(self, name): ...

def all(self): ...
def any(self): ...
def avg(self): ...
def first(self): ...
def latest(self): ...
def len(self): ...
def max(self): ...
def min(self): ...
def sum(self): ...
class ExplodedEdgeWindow(object):
def metadata(self, name): ...
def property(self, name): ...
Original file line number Diff line number Diff line change
Expand Up @@ -96,9 +96,9 @@ def check(graph):


@with_disk_variants(init_edges_graph, variants=["graph", "event_disk_graph"])
def test_temporal_latest_semantics():
def test_temporal_last_semantics():
def check(graph):
filter_expr = filter.Edge.property("p1").temporal().latest() == 1
filter_expr = filter.Edge.property("p1").temporal().last() == 1
result_ids = sorted(graph.filter(filter_expr).edges.id)
expected_ids = sorted(
[("N1", "N2"), ("N3", "N4"), ("N4", "N5"), ("N6", "N7"), ("N7", "N8")]
Expand All @@ -112,9 +112,9 @@ def check(graph):
init_fn=combined([init_edges_graph, init_graph_for_secondary_indexes]),
variants=["graph", "event_disk_graph"],
)
def test_temporal_latest_semantics_for_secondary_indexes3():
def test_temporal_last_semantics_for_secondary_indexes3():
def check(graph):
filter_expr = filter.Edge.property("p1").temporal().latest() == 1
filter_expr = filter.Edge.property("p1").temporal().last() == 1
result_ids = sorted(graph.filter(filter_expr).edges.id)
expected_ids = sorted(
[
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,9 @@ def check(graph):


@with_disk_variants(init_nodes_graph)
def test_temporal_latest_semantics():
def test_temporal_last_semantics():
def check(graph):
filter_expr = filter.Node.property("p1").temporal().latest() == 1
filter_expr = filter.Node.property("p1").temporal().last() == 1
result_ids = sorted(graph.filter(filter_expr).nodes.id)
expected_ids = sorted(["N1", "N3", "N4", "N6", "N7"])
assert result_ids == expected_ids
Expand All @@ -69,9 +69,9 @@ def check(graph):
@with_disk_variants(
init_fn=combined([init_nodes_graph, init_graph_for_secondary_indexes]),
)
def test_temporal_latest_semantics_for_secondary_indexes():
def test_temporal_last_semantics_for_secondary_indexes():
def check(graph):
filter_expr = filter.Node.property("p1").temporal().latest() == 1
filter_expr = filter.Node.property("p1").temporal().last() == 1
result_ids = sorted(graph.filter(filter_expr).nodes.id)
expected_ids = sorted(["N1", "N16", "N3", "N4", "N6", "N7"])
assert result_ids == expected_ids
Expand Down
Loading