|
12 | 12 | import tensorstore as ts |
13 | 13 | import zstandard as zstd |
14 | 14 | from graph_tool import Graph |
| 15 | +from cachetools import LRUCache |
15 | 16 |
|
16 | 17 | from pychunkedgraph.graph import types |
17 | 18 | from pychunkedgraph.graph.chunks.utils import ( |
|
21 | 22 | from pychunkedgraph.graph.utils import basetypes |
22 | 23 |
|
23 | 24 | from ..utils import basetypes |
| 25 | +from ..utils.generic import get_parents_at_timestamp |
24 | 26 |
|
25 | 27 |
|
26 | 28 | _edge_type_fileds = ("in_chunk", "between_chunk", "cross_chunk") |
|
39 | 41 | ] |
40 | 42 | ) |
41 | 43 | ZSTD_EDGE_COMPRESSION = 17 |
| 44 | +PARENTS_CACHE = LRUCache(64 * 1024) |
42 | 45 |
|
43 | 46 |
|
44 | 47 | class Edges: |
@@ -341,6 +344,23 @@ def _filter(node): |
341 | 344 | chunks_map[node_b] = np.concatenate(chunks_map[node_b]) |
342 | 345 | return int(mlayer), _filter(node_a), _filter(node_b) |
343 | 346 |
|
| 347 | + def _populate_parents_cache(children: np.ndarray): |
| 348 | + global PARENTS_CACHE |
| 349 | + |
| 350 | + not_cached = [] |
| 351 | + for child in children: |
| 352 | + try: |
| 353 | + # reset lru index, these will be needed soon |
| 354 | + _ = PARENTS_CACHE[child] |
| 355 | + except KeyError: |
| 356 | + not_cached.append(child) |
| 357 | + |
| 358 | + all_parents = cg.get_parents(not_cached, current=False) |
| 359 | + for child, parents in zip(not_cached, all_parents): |
| 360 | + PARENTS_CACHE[child] = {} |
| 361 | + for parent, ts in parents: |
| 362 | + PARENTS_CACHE[child][ts] = parent |
| 363 | + |
344 | 364 | def _get_new_edge(edge, parent_ts, padding): |
345 | 365 | """ |
346 | 366 | Attempts to find new edge(s) for the stale `edge`. |
@@ -371,7 +391,13 @@ def _get_new_edge(edge, parent_ts, padding): |
371 | 391 | if np.any(mask): |
372 | 392 | parents_a = _edges[mask][:, 0] |
373 | 393 | children_b = cg.get_children(_edges[mask][:, 1], flatten=True) |
374 | | - parents_b = np.unique(cg.get_parents(children_b, time_stamp=parent_ts)) |
| 394 | + # parents_b = np.unique(cg.get_parents(children_b, time_stamp=parent_ts)) |
| 395 | + _populate_parents_cache(children_b) |
| 396 | + _parents_b, missing = get_parents_at_timestamp( |
| 397 | + children_b, PARENTS_CACHE, time_stamp=parent_ts, unique=True |
| 398 | + ) |
| 399 | + _parents_b_missing = np.unique(cg.get_parents(missing, time_stamp=parent_ts)) |
| 400 | + parents_b = np.concatenate([_parents_b, _parents_b_missing]) |
375 | 401 | _cx_edges_d = cg.get_cross_chunk_edges(parents_b, time_stamp=parent_ts) |
376 | 402 | parents_b = [] |
377 | 403 | for _node, _edges_d in _cx_edges_d.items(): |
|
0 commit comments