Skip to content

Commit ed9edcb

Browse files
committed
Update compute_hash_for_node_and_parents recursive base cases to do check first.
1 parent 0c3e7db commit ed9edcb

File tree

1 file changed

+44
-39
lines changed

1 file changed

+44
-39
lines changed

src/core/model/pipeline.rs

Lines changed: 44 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -208,51 +208,56 @@ impl Pipeline {
208208
}
209209

210210
/// Compute the hash for each node in the graph which is defined as the hash of its kernel + the hashes of its parents
211-
pub(crate) fn compute_hash_for_node_and_parents(
211+
pub(crate) fn compute_hash_for_node_and_parents<'g>(
212212
node_idx: NodeIndex,
213213
input_spec: &HashMap<String, Vec<NodeURI>>,
214-
graph: &mut graph::Graph<PipelineNode, ()>,
215-
) {
216-
// Collect parent indices first to avoid borrowing issues
217-
let parent_indices: Vec<NodeIndex> = graph.neighbors_directed(node_idx, Incoming).collect();
214+
graph: &'g mut graph::Graph<PipelineNode, ()>,
215+
) -> &'g str {
216+
if graph[node_idx].hash.is_empty() {
217+
// Collect parent indices first to avoid borrowing issues
218+
let parent_indices: Vec<NodeIndex> =
219+
graph.neighbors_directed(node_idx, Incoming).collect();
218220

219-
// Sort the parent hashes to ensure consistent ordering
220-
let mut parent_hashes: Vec<String> = if parent_indices.is_empty() {
221-
// This is parent node, thus we will need to use the input_spec to generate a unique hash for the node
222-
// Find all the input keys that map to this node
223-
let input_keys = input_spec.iter().filter_map(|(input_key, node_uris)| {
224-
node_uris.iter().find_map(|node_uri| {
225-
(node_uri.node_id == graph[node_idx].label).then(|| input_key.clone())
226-
})
227-
});
228-
229-
input_keys.collect()
230-
} else {
231-
parent_indices
232-
.into_iter()
233-
.map(|parent_idx| {
234-
// Check if hash has been computed for this node, if not trigger computation
235-
if graph[parent_idx].hash.is_empty() {
236-
// Recursive call to compute the parent's hash
237-
Self::compute_hash_for_node_and_parents(parent_idx, input_spec, graph);
238-
}
239-
graph[parent_idx].hash.clone()
240-
})
241-
.collect()
242-
};
221+
// Sort the parent hashes to ensure consistent ordering
222+
let mut parent_hashes: Vec<String> = if parent_indices.is_empty() {
223+
// This is parent node, thus we will need to use the input_spec to generate a unique hash for the node
224+
// Find all the input keys that map to this node
225+
input_spec
226+
.iter()
227+
.filter_map(|(input_key, node_uris)| {
228+
node_uris.iter().find_map(|node_uri| {
229+
(node_uri.node_id == graph[node_idx].label).then(|| input_key.clone())
230+
})
231+
})
232+
.collect()
233+
} else {
234+
parent_indices
235+
.into_iter()
236+
.map(|parent_idx| {
237+
// Check if hash has been computed for this node, if not trigger computation
238+
Self::compute_hash_for_node_and_parents(parent_idx, input_spec, graph)
239+
.to_owned()
240+
})
241+
.collect()
242+
};
243243

244-
parent_hashes.sort();
244+
parent_hashes.sort();
245245

246-
// Combine the node's kernel hash + the parent_hashes by concatenation only if there are parents hashes, else it is just the kernel hash
247-
if parent_hashes.is_empty() {
248-
} else {
249-
let hash_for_node = format!(
250-
"{}{}",
251-
&graph[node_idx].kernel.get_hash(),
252-
parent_hashes.into_iter().join("")
253-
);
254-
graph[node_idx].hash = hash_buffer(hash_for_node.as_bytes());
246+
// Combine the node's kernel hash + the parent_hashes by concatenation only if there are parents hashes, else it is just the kernel hash
247+
if parent_hashes.is_empty() {
248+
let kernel_hash = graph[node_idx].kernel.get_hash().to_owned();
249+
graph[node_idx].hash.clone_from(&kernel_hash);
250+
} else {
251+
let hash_for_node = format!(
252+
"{}{}",
253+
&graph[node_idx].kernel.get_hash(),
254+
parent_hashes.into_iter().join("")
255+
);
256+
graph[node_idx].hash = hash_buffer(hash_for_node.as_bytes());
257+
}
255258
}
259+
260+
&graph[node_idx].hash
256261
}
257262
}
258263

0 commit comments

Comments
 (0)