Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/core/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,7 @@ impl fmt::Debug for OrcaError {
| Kind::IncompletePacket { backtrace, .. }
| Kind::InvalidFilepath { backtrace, .. }
| Kind::MissingInfo { backtrace, .. }
| Kind::PodFailed { backtrace, .. }
| Kind::BollardError { backtrace, .. }
| Kind::ChronoParseError { backtrace, .. }
| Kind::DOTError { backtrace, .. }
Expand Down
136 changes: 133 additions & 3 deletions src/core/orchestrator/agent.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
use crate::uniffi::{
error::{OrcaError, Result, selector},
orchestrator::agent::{Agent, AgentClient},
use crate::{
core::pipeline::{NodeInfo, NodeState, Payload},
uniffi::{
error::{OrcaError, Result, selector},
model::{packet::Packet, pipeline::PipelineJob},
orchestrator::agent::{Agent, AgentClient},
pipeline::{PipelineRun, PipelineStatus},
},
};
use chrono::{DateTime, Utc};
use futures_util::future::FutureExt as _;
Expand Down Expand Up @@ -87,6 +92,131 @@
pub(crate) async fn log(&self, message: &str) -> Result<()> {
self.publish("log", BTreeMap::new(), message).await
}
#[expect(
clippy::excessive_nesting,
clippy::indexing_slicing,
clippy::expect_used,
clippy::cast_sign_loss,
clippy::significant_drop_tightening,
reason = "debug"
)]
pub(crate) fn new_pipeline_run(&self, pipeline_job: &Arc<PipelineJob>) -> PipelineRun {
let pipeline_run = PipelineRun {
pipeline_job: Arc::clone(pipeline_job),
created: Utc::now().timestamp() as u64,
terminated: Arc::new(Mutex::new(None)),

Check failure on line 107 in src/core/orchestrator/agent.rs

View workflow job for this annotation

GitHub Actions / rust-syntax-style-format-and-integration

failed to resolve: use of undeclared type `Mutex`
status: Arc::new(Mutex::new(PipelineStatus::Running)),

Check failure on line 108 in src/core/orchestrator/agent.rs

View workflow job for this annotation

GitHub Actions / rust-syntax-style-format-and-integration

failed to resolve: use of undeclared type `Mutex`
state: Arc::new(Mutex::new(HashMap::new())),

Check failure on line 109 in src/core/orchestrator/agent.rs

View workflow job for this annotation

GitHub Actions / rust-syntax-style-format-and-integration

failed to resolve: use of undeclared type `Mutex`
services: TaskTracker::new(),
};
pipeline_run.services.spawn({
let agent_client = Arc::new(self.clone());
let inner_pipeline_run = pipeline_run.clone();
async move {
let pipeline_result = agent_client
.get_pipeline_result(inner_pipeline_run.clone().into())
.await?;
let mut terminated = inner_pipeline_run.terminated.lock().expect("debug");
*terminated = Some(pipeline_result.terminated);
let mut status = inner_pipeline_run.status.lock().expect("debug");
*status = pipeline_result.status;
Ok::<_, OrcaError>(())
}
});
pipeline_run.services.spawn({
let agent_client = Arc::new(self.clone());
let pipeline_hash = pipeline_job.hash.clone();
let state = Arc::clone(&pipeline_run.state);
let pipeline_nodes = pipeline_job
.pipeline
.graph
.node_weights()
.map(|node| node.name.clone())
.collect::<HashSet<_>>();

Check failure on line 135 in src/core/orchestrator/agent.rs

View workflow job for this annotation

GitHub Actions / rust-syntax-style-format-and-integration

cannot find type `HashSet` in this scope
async move {
let subscriber = agent_client
.session
.declare_subscriber(&format!(
"group/{}/status/pipeline_job/{}/**",
&agent_client.group, &pipeline_hash
))
.await
.context(selector::AgentCommunicationFailure {})?;
// let tracker = TaskTracker::new();
loop {
let sample = subscriber
.recv_async()
.await
.context(selector::AgentCommunicationFailure {})?;
// todo: can remove need for this by updating RE_AGENT_KEY_EXPR
let subtopics = sample.key_expr().as_str().split('/').collect::<Vec<_>>();
let (feed_type, source) = (subtopics[5], subtopics[6]);
let payload = serde_json::from_slice::<Payload<Packet, ()>>(
&sample.payload().to_bytes(),
)?;
if feed_type == "output" {
let mut inner_state = state.lock().expect("debug");
if let Some(node_info) = inner_state.get_mut(source) {
let node_state = match &payload {
Payload::Cancelled => NodeState::Cancelled,
Payload::Failed(error_msg) => {
NodeState::Failed(error_msg.to_owned())
}
Payload::End(()) => NodeState::Completed,
Payload::Stream(_) => node_info.state.clone(),
};
*node_info = NodeInfo {
state: node_state,
completed_packets: node_info.completed_packets
+ u32::from(matches!(payload, Payload::Stream(_))),
};
} else {
let node_state = match &payload {
Payload::Cancelled => NodeState::Cancelled,
Payload::Failed(error_msg) => {
NodeState::Failed(error_msg.to_owned())
}
Payload::End(()) => NodeState::Completed,
Payload::Stream(_) => NodeState::Active,
};
inner_state.insert(
source.to_owned(),
NodeInfo {
state: node_state,
completed_packets: u32::from(matches!(
payload,
Payload::Stream(_)
)),
},
);
}
} else if feed_type == "input" {
let mut inner_state = state.lock().expect("debug");
if !inner_state.contains_key(source) {
inner_state.insert(
source.to_owned(),
NodeInfo {
state: NodeState::Active,
completed_packets: 0,
},
);
}
}
let inner_state = state.lock().expect("debug");
if inner_state.keys().cloned().collect::<HashSet<_>>() == pipeline_nodes

Check failure on line 206 in src/core/orchestrator/agent.rs

View workflow job for this annotation

GitHub Actions / rust-syntax-style-format-and-integration

cannot find type `HashSet` in this scope
&& !inner_state.values().any(|v| {
matches!(v.state, NodeState::Idle)
|| matches!(v.state, NodeState::Active)
})
{
break;
}
}
Ok::<_, OrcaError>(())
}
});
pipeline_run
}
}

#[expect(
Expand Down
Loading
Loading