diff --git a/Cargo.lock b/Cargo.lock index fab3cb8..9c5402e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1626,6 +1626,7 @@ dependencies = [ "base64 0.22.1", "bytes", "futures-core", + "futures-util", "http", "http-body", "http-body-util", @@ -1645,12 +1646,14 @@ dependencies = [ "sync_wrapper", "tokio", "tokio-rustls", + "tokio-util", "tower", "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", + "wasm-streams", "web-sys", "webpki-roots", ] @@ -2551,6 +2554,19 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + [[package]] name = "web-sys" version = "0.3.82" diff --git a/Cargo.toml b/Cargo.toml index 5678d7c..5b6cadc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,7 +6,7 @@ edition = "2024" [dependencies] async-graphql = { version = "7.0.17", features = ["uuid"]} tokio = { version = "1", features = ["full"]} -reqwest = { version = "0.12.15", features = ["json", "rustls-tls"], default-features = false } +reqwest = { version = "0.12.15", features = ["json", "rustls-tls", "stream"], default-features = false } serde_json = "1.0.143" serde = { version = "1.0.219", features = ["derive"] } axum = "0.8.4" diff --git a/src/clients.rs b/src/clients.rs index a08346b..6fe22bd 100644 --- a/src/clients.rs +++ b/src/clients.rs @@ -1,3 +1,4 @@ +use std::borrow::Cow; use std::fmt; use axum::http::HeaderMap; @@ -5,13 +6,13 @@ use axum::http::HeaderMap; use httpmock::MockServer; use reqwest::{Client, Url}; use serde::de::DeserializeOwned; -use tracing::{info, instrument}; -use uuid::Uuid; +use tracing::{debug, info, instrument}; -use crate::model::{app, array, container, event_stream, run, table}; +use crate::model::{app, node, table}; pub type ClientResult = Result; +#[derive(Clone)] pub struct TiledClient { client: Client, address: Url, @@ -19,6 +20,11 @@ pub struct TiledClient { impl TiledClient { pub fn new(address: Url) -> Self { + if address.cannot_be_a_base() { + // Panicking is not great but if we've got this far, nothing else is going to work so + // bail out early. + panic!("Invalid tiled URL"); + } Self { client: Client::new(), address, @@ -29,9 +35,8 @@ impl TiledClient { &self, endpoint: &str, headers: Option, - query_params: Option<&[(&str, &str)]>, + query_params: Option<&[(&str, Cow<'_, str>)]>, ) -> ClientResult { - info!("Requesting from tiled: {}", endpoint); let url = self.address.join(endpoint)?; let mut request = match headers { @@ -39,7 +44,7 @@ impl TiledClient { None => self.client.get(url), }; if let Some(params) = query_params { - request = request.query(params); + request = request.query(¶ms); } info!("Querying: {request:?}"); @@ -50,84 +55,59 @@ impl TiledClient { pub async fn app_metadata(&self) -> ClientResult { self.request("/api/v1/", None, None).await } - pub async fn run_metadata(&self, id: Uuid) -> ClientResult { - self.request(&format!("/api/v1/metadata/{id}"), None, None) - .await - } - pub async fn event_stream_metadata( + pub async fn search( &self, - id: Uuid, - stream: String, - ) -> ClientResult { - self.request(&format!("/api/v1/metadata/{id}/{stream}"), None, None) + path: &str, + query: &[(&str, Cow<'_, str>)], + ) -> ClientResult { + self.request(&format!("api/v1/search/{}", path), None, Some(query)) .await } - pub async fn array_metadata( - &self, - id: Uuid, - stream: String, - array: String, - ) -> ClientResult { - self.request( - &format!("/api/v1/metadata/{id}/{stream}/{array}"), - None, - Some(&[("include_data_sources", "true")]), - ) - .await - } - pub async fn table_metadata( - &self, - id: Uuid, - stream: String, - table: String, - ) -> ClientResult { - self.request( - &format!("/api/v1/metadata/{id}/{stream}/{table}"), - None, - Some(&[("include_data_sources", "true")]), - ) - .await - } pub async fn table_full( &self, - id: Uuid, - stream: String, - table: String, + path: &str, + columns: Option>, ) -> ClientResult { let mut headers = HeaderMap::new(); headers.insert("accept", "application/json".parse().unwrap()); + let query = columns.map(|columns| { + columns + .into_iter() + .map(|col| ("column", col.into())) + .collect::>() + }); self.request( - &format!("/api/v1/table/full/{id}/{stream}/{table}"), + &format!("/api/v1/table/full/{}", path), Some(headers), - None, + query.as_deref(), ) .await } - pub async fn search_root(&self) -> ClientResult { - self.request("/api/v1/search/", None, None).await - } - pub async fn search_run_container( - &self, - id: Uuid, - ) -> ClientResult { - self.request(&format!("/api/v1/search/{id}"), None, None) - .await - } - pub async fn container_full( - &self, - id: Uuid, - stream: Option, - ) -> ClientResult { - let mut headers = HeaderMap::new(); - headers.insert("accept", "application/json".parse().unwrap()); - let endpoint = match stream { - Some(stream) => &format!("/api/v1/container/full/{id}/{stream}"), - None => &format!("/api/v1/container/full/{id}"), - }; + pub(crate) async fn download( + &self, + run: String, + stream: String, + det: String, + id: u32, + ) -> reqwest::Result { + let mut url = self + .address + .join("/api/v1/asset/bytes") + .expect("Base address was cannot_be_a_base"); + url.path_segments_mut() + .expect("Base address was cannot_be_a_base") + .push(&run) + .push(&stream) + .push(&det); - self.request(endpoint, Some(headers), None).await + debug!("Downloading id={id} from {url}"); + self.client + .get(url) + .query(&[("id", &id.to_string())]) + .send() + .await } /// Create a new client for the given mock server diff --git a/src/download.rs b/src/download.rs new file mode 100644 index 0000000..31414ac --- /dev/null +++ b/src/download.rs @@ -0,0 +1,83 @@ +use axum::body::Body; +use axum::extract::{Path, State}; +use axum::http::{HeaderMap, StatusCode}; +use serde_json::{Value, json}; +use tracing::{error, info}; + +use crate::clients::TiledClient; + +const FORWARDED_HEADERS: [&str; 4] = [ + "content-disposition", + "content-type", + "content-length", + "last-modified", +]; + +pub async fn download( + State(client): State, + Path((run, stream, det, id)): Path<(String, String, String, u32)>, +) -> (StatusCode, HeaderMap, Body) { + info!("Downloading {run}/{stream}/{det}/{id}"); + let req = client.download(run, stream, det, id).await; + forward_download_response(req).await +} + +async fn forward_download_response( + response: Result, +) -> (StatusCode, HeaderMap, Body) { + match response { + Ok(mut resp) => match resp.status().as_u16() { + 200..300 => { + let mut headers = HeaderMap::new(); + for key in FORWARDED_HEADERS { + if let Some(value) = resp.headers_mut().remove(key) { + headers.insert(key, value); + } + } + let stream = Body::from_stream(resp.bytes_stream()); + (StatusCode::OK, headers, stream) + }, + 400..500 => ( + // Probably permission error or non-existent file - forward error to client + resp.status(), + HeaderMap::new(), + Body::from_stream(resp.bytes_stream()) + ), + 100..200 | // ??? check tiled? + 300..400 | // should have followed a redirect + 0..100 | (600..) | // who needs standards anyway + 500..600 => { + let status = resp.status().as_u16(); + let content = resp.text().await.unwrap_or_else(|e| format!("Unable to read error response: {e}")); + ( + // Whatever we got back, it wasn't what we expected so blame it on tiled + StatusCode::SERVICE_UNAVAILABLE, + HeaderMap::new(), + json!({ + "detail": "Unexpected response from tiled", + "status": status, + // Try to parse response as json before giving up and passing a string + "response": serde_json::from_str(&content) + .unwrap_or(Value::String(content)) + }).to_string().into() + ) + } + }, + Err(err) => { + error!("Error sending request to tiled: {err}"); + let (status, message) = if err.is_connect() { + ( + StatusCode::SERVICE_UNAVAILABLE, + "Could not connect to tiled", + ) + } else { + ( + StatusCode::INTERNAL_SERVER_ERROR, + "Error making request to tiled", + ) + }; + + (status, HeaderMap::new(), message.into()) + } + } +} diff --git a/src/main.rs b/src/main.rs index 3bb1acb..ec9428a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,5 +1,3 @@ -use std::error; - use async_graphql::{EmptyMutation, EmptySubscription, Schema}; use axum::http::StatusCode; use axum::response::{Html, IntoResponse}; @@ -9,6 +7,7 @@ use axum::{Extension, Router}; mod cli; mod clients; mod config; +mod download; mod handlers; mod model; #[cfg(test)] @@ -25,7 +24,7 @@ use crate::handlers::{graphiql_handler, graphql_handler}; use crate::model::TiledQuery; #[tokio::main] -async fn main() -> Result<(), Box> { +async fn main() -> Result<(), Box> { let subscriber = tracing_subscriber::FmtSubscriber::new(); tracing::subscriber::set_global_default(subscriber)?; @@ -45,14 +44,18 @@ async fn main() -> Result<(), Box> { } } -async fn serve(config: GlazedConfig) -> Result<(), Box> { +async fn serve(config: GlazedConfig) -> Result<(), Box> { + let client = TiledClient::new(config.tiled_client.address); let schema = Schema::build(TiledQuery, EmptyMutation, EmptySubscription) - .data(TiledClient::new(config.tiled_client.address)) + .data(config.bind_address) + .data(client.clone()) .finish(); let app = Router::new() .route("/graphql", post(graphql_handler).get(graphql_get_warning)) .route("/graphiql", get(graphiql_handler)) + .route("/asset/{run}/{stream}/{det}/{id}", get(download::download)) + .with_state(client) .fallback(( StatusCode::NOT_FOUND, Html(include_str!("../static/404.html")), diff --git a/src/model.rs b/src/model.rs index b09f567..f3df4f9 100644 --- a/src/model.rs +++ b/src/model.rs @@ -6,11 +6,15 @@ pub(crate) mod node; pub(crate) mod run; pub(crate) mod table; -use async_graphql::{Context, Object, Result}; -use tracing::instrument; -use uuid::Uuid; +use std::collections::HashMap; +use std::net::SocketAddr; + +use async_graphql::{Context, Object, Result, Union}; +use serde_json::Value; +use tracing::{info, instrument}; use crate::clients::TiledClient; +use crate::model::node::NodeAttributes; pub(crate) struct TiledQuery; @@ -20,84 +24,176 @@ impl TiledQuery { async fn app_metadata(&self, ctx: &Context<'_>) -> Result { Ok(ctx.data::()?.app_metadata().await?) } - #[instrument(skip(self, ctx))] - async fn run_metadata(&self, ctx: &Context<'_>, id: Uuid) -> Result { - Ok(ctx.data::()?.run_metadata(id).await?) + + async fn instrument_session(&self, name: String) -> InstrumentSession { + InstrumentSession { name } } - #[instrument(skip(self, ctx))] - async fn event_stream_metadata( - &self, - ctx: &Context<'_>, - id: Uuid, - stream: String, - ) -> Result { - Ok(ctx - .data::()? - .event_stream_metadata(id, stream) - .await?) +} + +struct InstrumentSession { + name: String, +} + +#[Object] +impl InstrumentSession { + async fn name(&self) -> &str { + &self.name } - #[instrument(skip(self, ctx))] - async fn array_metadata( - &self, - ctx: &Context<'_>, - id: Uuid, - stream: String, - array: String, - ) -> Result { - Ok(ctx + async fn runs(&self, ctx: &Context<'_>) -> Result> { + let root = ctx .data::()? - .array_metadata(id, stream, array) - .await?) + .search( + "", + &[ + ( + "filter[eq][condition][key]", + "start.instrument_session".into(), + ), + ( + "filter[eq][condition][value]", + format!(r#""{}""#, self.name).into(), + ), + ("include_data_sources", "true".into()), + ], + ) + .await?; + Ok(root.data.into_iter().map(|d| Run { data: d }).collect()) } - #[instrument(skip(self, ctx))] - async fn table_metadata( - &self, - ctx: &Context<'_>, - id: Uuid, - stream: String, - table: String, - ) -> Result { - Ok(ctx - .data::()? - .table_metadata(id, stream, table) - .await?) +} + +#[derive(Union)] +enum RunData<'run> { + Array(ArrayData<'run>), + Internal(TableData), +} + +struct ArrayData<'run> { + run: &'run Run, + id: String, + stream: String, + attrs: node::Attributes, array::ArrayStructure>, +} + +#[Object] +impl<'run> ArrayData<'run> { + async fn name(&self) -> &str { + &self.id + } + async fn files<'ad>(&'ad self) -> Vec> { + self.attrs + .data_sources + .as_deref() + .unwrap_or_default() + .iter() + .flat_map(|source| source.assets.iter()) + .map(|a| Asset { + data: self, + asset: a, + }) + .collect() } - #[instrument(skip(self, ctx))] - async fn table_full( - &self, - ctx: &Context<'_>, - id: Uuid, - stream: String, - table: String, - ) -> Result { - Ok(ctx - .data::()? - .table_full(id, stream, table) - .await?) +} + +struct Asset<'a> { + asset: &'a node::Asset, + data: &'a ArrayData<'a>, +} + +#[Object] +impl Asset<'_> { + async fn file(&self) -> &str { + &self.asset.data_uri + } + async fn download(&self, ctx: &Context<'_>) -> Option { + let id = self.asset.id?; + let base = ctx.data::().ok()?; + Some(format!( + "{}/asset/{}/{}/{}/{}", + base, self.data.run.data.id, self.data.stream, self.data.id, id + )) } - #[instrument(skip(self, ctx))] - async fn search_root(&self, ctx: &Context<'_>) -> Result { - Ok(ctx.data::()?.search_root().await?) +} + +struct TableData { + id: String, + attrs: node::Attributes, table::TableStructure>, +} + +#[Object] +impl TableData { + async fn name(&self) -> &str { + &self.id } - #[instrument(skip(self, ctx))] - async fn search_run_container( - &self, - ctx: &Context<'_>, - id: Uuid, - ) -> Result { - Ok(ctx.data::()?.search_run_container(id).await?) + async fn columns(&self) -> &[String] { + &self.attrs.structure.columns } - #[instrument(skip(self, ctx))] - async fn container_full( + async fn data( &self, ctx: &Context<'_>, - id: Uuid, - stream: Option, - ) -> Result { - Ok(ctx - .data::()? - .container_full(id, stream) - .await?) + columns: Option>, + ) -> Result>> { + let client = ctx.data::()?; + let p = self + .attrs + .ancestors + .iter() + .chain(vec![&self.id]) + .map(|s| s.as_str()) + .collect::>() + .join("/"); + info!("path: {:?}", p); + + let table_data = client.table_full(&p, columns).await?; + Ok(table_data) + } +} + +struct Run { + data: node::Data, +} + +#[Object] +impl Run { + async fn scan_number(&self) -> Option { + if let NodeAttributes::Container(attr) = &self.data.attributes { + attr.metadata.start_doc().map(|sd| sd.scan_id) + } else { + None + } + } + async fn id(&self) -> &str { + &self.data.id + } + async fn data(&self, ctx: &Context<'_>) -> Result>> { + let client = ctx.data::()?; + let run_data = client + .search(&self.data.id, &[("include_data_sources", "true".into())]) + .await?; + let mut sources = Vec::new(); + for stream in run_data.data { + let stream_data = client + .search( + &format!("{}/{}", self.data.id, stream.id), + &[("include_data_sources", "true".into())], + ) + .await?; + for dataset in stream_data.data { + match dataset.attributes { + NodeAttributes::Array(attrs) => sources.push(RunData::Array(ArrayData { + run: self, + stream: stream.id.clone(), + id: dataset.id, + attrs, + })), + NodeAttributes::Table(attrs) => sources.push(RunData::Internal(TableData { + id: dataset.id, + attrs, + })), + NodeAttributes::Container(_) => {} + } + } + } + Ok(sources) } } @@ -105,7 +201,6 @@ impl TiledQuery { mod tests { use async_graphql::{EmptyMutation, EmptySubscription, Schema, value}; use httpmock::MockServer; - use uuid::Uuid; use crate::TiledQuery; use crate::clients::TiledClient; @@ -133,154 +228,4 @@ mod tests { assert_eq!(response.errors, &[]); mock.assert(); } - #[tokio::test] - async fn run_metadata() { - let id = Uuid::parse_str("5d8f5c3e-0e00-4c5c-816d-70b4b0f41498").unwrap(); - let server = MockServer::start(); - let mock = server - .mock_async(|when, then| { - when.method("GET").path(format!("/api/v1/metadata/{id}")); - then.status(200) - .body_from_file("resources/metadata_run.json"); - }) - .await; - let schema = build_schema(&server.base_url()); - let query = r#"{ runMetadata(id: "5d8f5c3e-0e00-4c5c-816d-70b4b0f41498") {data {id}}}"#; - let response = schema.execute(query).await; - let exp = value! ({ - "runMetadata": { "data": {"id": "5d8f5c3e-0e00-4c5c-816d-70b4b0f41498"}} - }); - - assert_eq!(response.data, exp); - assert_eq!(response.errors, &[]); - mock.assert(); - } - #[tokio::test] - async fn array_metadata() { - let id = Uuid::parse_str("4866611f-e6d9-4517-bedf-fc5526df57ad").unwrap(); - let stream = "primary"; - let array = "det"; - let server = MockServer::start(); - let mock = server - .mock_async(|when, then| { - when.method("GET") - .path(format!("/api/v1/metadata/{id}/{stream}/{array}")); - then.status(200) - .body_from_file("resources/metadata_array.json"); - }) - .await; - let schema = build_schema(&server.base_url()); - let query = r#"{ arrayMetadata(id:"4866611f-e6d9-4517-bedf-fc5526df57ad", stream:"primary", array:"det") {data {id}}}"#; - let response = schema.execute(query).await; - let exp = value! ({ - "arrayMetadata": { "data": {"id": "det"}} - }); - - assert_eq!(response.data, exp); - assert_eq!(response.errors, &[]); - mock.assert(); - } - #[tokio::test] - async fn table_metadata() { - let id = Uuid::parse_str("4866611f-e6d9-4517-bedf-fc5526df57ad").unwrap(); - let stream = "primary"; - let table = "internal"; - let server = MockServer::start(); - let mock = server - .mock_async(|when, then| { - when.method("GET") - .path(format!("/api/v1/metadata/{id}/{stream}/{table}")); - then.status(200) - .body_from_file("resources/metadata_table.json"); - }) - .await; - let schema = build_schema(&server.base_url()); - let query = r#"{ tableMetadata(id:"4866611f-e6d9-4517-bedf-fc5526df57ad", stream:"primary", table:"internal") {data {id}}}"#; - let response = schema.execute(query).await; - let exp = value! ({ - "tableMetadata": { "data": {"id": "internal"}} - }); - - assert_eq!(response.data, exp); - assert_eq!(response.errors, &[]); - mock.assert(); - } - #[tokio::test] - async fn search_root() { - let server = MockServer::start(); - let mock = server - .mock_async(|when, then| { - when.method("GET").path("/api/v1/search/"); - then.status(200) - .body_from_file("resources/search_root.json"); - }) - .await; - let schema = build_schema(&server.base_url()); - let query = r#"{ searchRoot {data{id}}}"#; - let response = schema.execute(query).await; - let exp = value! ({ - "searchRoot": { "data": [ - {"id": "4866611f-e6d9-4517-bedf-fc5526df57ad"}, - {"id": "1e37c0ed-e87e-470d-be18-9d7f62f69127"}, - ] - } - }); - - assert_eq!(response.data, exp); - assert_eq!(response.errors, &[]); - mock.assert(); - } - #[tokio::test] - async fn search_run_container() { - let id = Uuid::parse_str("5d8f5c3e-0e00-4c5c-816d-70b4b0f41498").unwrap(); - let server = MockServer::start(); - let mock = server - .mock_async(|when, then| { - when.method("GET").path(format!("/api/v1/search/{id}")); - then.status(200) - .body_from_file("resources/search_run_container.json"); - }) - .await; - let schema = build_schema(&server.base_url()); - let query = - r#"{searchRunContainer(id: "5d8f5c3e-0e00-4c5c-816d-70b4b0f41498") {data {id}}}"#; - let response = schema.execute(query).await; - let exp = value! ({ - "searchRunContainer": { "data": [{"id": "primary"}]} - }); - - assert_eq!(response.data, exp); - assert_eq!(response.errors, &[]); - mock.assert(); - } - #[tokio::test] - async fn container_full() { - let id = Uuid::parse_str("5d8f5c3e-0e00-4c5c-816d-70b4b0f41498").unwrap(); - let server = MockServer::start(); - let mock = server - .mock_async(|when, then| { - when.method("GET") - .path(format!("/api/v1/container/full/{id}")) - .header("accept", "application/json"); - then.status(200) - .body_from_file("resources/container_snippet.json"); - }) - .await; - let schema = build_schema(&server.base_url()); - let query = r#"{containerFull(id: "5d8f5c3e-0e00-4c5c-816d-70b4b0f41498"){contents}}"#; - let response = schema.execute(query).await; - let exp = value! ({ - "containerFull": { - "contents": { - "primary": { - "contents": {}, - "metadata": {} - } - } - } - }); - assert_eq!(response.data, exp); - assert_eq!(response.errors, &[]); - mock.assert(); - } } diff --git a/src/model/array.rs b/src/model/array.rs index bdd8019..a5ae805 100644 --- a/src/model/array.rs +++ b/src/model/array.rs @@ -1,39 +1,7 @@ -use std::collections::HashMap; - use async_graphql::SimpleObject; use serde::{Deserialize, Serialize}; use serde_json::Value; -use crate::model::node; - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct ArrayMetadataRoot { - pub data: ArrayData, - pub error: Value, - pub links: Option, - pub meta: Value, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct ArrayData { - pub id: String, - pub attributes: ArrayAttributes, - pub links: ArrayLinks, - pub meta: Value, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct ArrayAttributes { - pub ancestors: Vec, - pub structure_family: String, - pub specs: Option>, - pub metadata: HashMap, - pub structure: ArrayStructure, - pub access_blob: Value, - pub sorting: Value, - pub data_sources: Option>, -} - #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] pub struct ArrayStructure { data_type: DataType, @@ -50,22 +18,3 @@ pub struct DataType { itemsize: i64, dt_units: Value, } - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct ArrayLinks { - #[serde(rename = "self")] - #[graphql(name = "self")] - pub full: Option, - pub block: Option, -} - -#[cfg(test)] -mod tests { - use crate::model::array; - use crate::test_utils::assert_readable_as; - - #[test] - fn array_metadata() { - assert_readable_as::("resources/metadata_array.json"); - } -} diff --git a/src/model/container.rs b/src/model/container.rs index 6d4be75..d0915a8 100644 --- a/src/model/container.rs +++ b/src/model/container.rs @@ -1,27 +1,29 @@ -use async_graphql::SimpleObject; +use async_graphql::{SimpleObject, Union}; use serde::{Deserialize, Serialize}; use serde_json::Value; -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct Container { - pub contents: Value, - pub metadata: Value, -} +use crate::model::event_stream; +use crate::model::run::{self, Start}; -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct ContainerStructure { - pub contents: Value, - pub count: i64, +#[derive(Union, Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase", untagged)] +pub enum ContainerMetadata { + Run(Box), + EventStream(event_stream::EventStreamMetadata), } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct Specs { - pub name: String, - pub version: Option, +impl ContainerMetadata { + pub fn start_doc(&self) -> Option<&Start> { + if let ContainerMetadata::Run(run) = self { + Some(&run.start) + } else { + None + } + } } #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct Sorting { - pub key: String, - pub direction: i64, +pub struct ContainerStructure { + pub contents: Value, + pub count: i64, } diff --git a/src/model/event_stream.rs b/src/model/event_stream.rs index 57d3096..6583f91 100644 --- a/src/model/event_stream.rs +++ b/src/model/event_stream.rs @@ -5,44 +5,6 @@ use serde::{Deserialize, Serialize}; use serde_json::Value; use uuid::Uuid; -use crate::model::{container, node}; - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct EventStreamRoot { - pub data: Vec, - pub error: Value, - pub links: Option, - pub meta: Value, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct EventStreamMetadataRoot { - pub data: EventStreamData, - pub error: Value, - pub links: Option, - pub meta: Value, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct EventStreamData { - pub id: String, - pub attributes: EventStreamContainerAttributes, - pub links: node::Links, - pub meta: Value, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct EventStreamContainerAttributes { - pub ancestors: Vec, - pub structure_family: String, - pub specs: Vec, - pub metadata: EventStreamMetadata, - pub structure: container::ContainerStructure, - pub access_blob: Value, - pub sorting: Option>, - pub data_sources: Option>, -} - #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] pub struct EventStreamMetadata { configuration: HashMap>, @@ -54,21 +16,11 @@ pub struct EventStreamMetadata { #[cfg(test)] mod tests { - use crate::model::{container, event_stream}; + use crate::model::node; use crate::test_utils::assert_readable_as; - #[test] - fn event_stream_metadata() { - assert_readable_as::( - "resources/metadata_event_stream.json", - ); - } #[test] fn search_run_container_for_event_stream_containers() { - assert_readable_as::("resources/search_run_container.json"); - } - #[test] - fn container_full() { - assert_readable_as::("resources/container_event_stream.json"); + assert_readable_as::("resources/search_run_container.json"); } } diff --git a/src/model/node.rs b/src/model/node.rs index aee5d53..3d493f2 100644 --- a/src/model/node.rs +++ b/src/model/node.rs @@ -1,29 +1,61 @@ use std::collections::HashMap; -use async_graphql::{Enum, SimpleObject, Union}; +use async_graphql::{Enum, SimpleObject}; use serde::{Deserialize, Serialize}; use serde_json::Value; use crate::model::{array, container, table}; -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct Links { - #[serde(rename = "self")] - #[graphql(name = "self")] - pub self_field: String, - pub documentation: Option, - pub first: Option, - pub last: Option, - pub next: Option, - pub prev: Option, - pub search: Option, - pub full: Option, +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct Root { + pub data: Vec, + pub error: Value, + pub links: Option, + pub meta: Value, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct DataSource { - #[serde(flatten)] - pub structure: Structure, +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct Data { + pub id: String, + pub attributes: NodeAttributes, + pub links: Links, + pub meta: Value, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[serde(tag = "structure_family", rename_all = "lowercase")] +pub enum NodeAttributes { + Container(Attributes), + Array(Attributes, array::ArrayStructure>), + Table(Attributes, table::TableStructure>), +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct Attributes { + pub ancestors: Vec, + pub specs: Vec, + pub metadata: Meta, + pub structure: S, + pub access_blob: Value, + pub sorting: Option>, + pub data_sources: Option>>, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct Spec { + pub name: String, + pub version: Option, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct Sorting { + pub key: String, + pub direction: i64, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct DataSource { + pub structure: S, pub id: Option, pub mimetype: Option, pub parameters: HashMap, @@ -31,20 +63,6 @@ pub struct DataSource { management: Management, } -#[derive(Union, Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde( - rename_all = "lowercase", - tag = "structure_family", - content = "structure" -)] -pub enum Structure { - Array(array::ArrayStructure), - //Awkward(AwkwardSructure), - Container(container::ContainerStructure), - //Sparse(SparseStructure), - Table(table::TableStructure), -} - #[derive(Enum, Debug, Copy, Clone, Eq, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "lowercase")] pub enum Management { @@ -56,9 +74,25 @@ pub enum Management { #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] pub struct Asset { - data_uri: String, + pub data_uri: String, is_directory: bool, parameter: Option, num: Option, - id: Option, + pub id: Option, +} + +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] +pub struct Links { + #[serde(rename = "self")] + #[graphql(name = "self")] + pub self_field: String, + pub documentation: Option, + pub first: Option, + pub last: Option, + pub next: Option, + pub prev: Option, + pub search: Option, + pub full: Option, + pub block: Option, + pub partition: Option, } diff --git a/src/model/run.rs b/src/model/run.rs index c3aa919..7489c2e 100644 --- a/src/model/run.rs +++ b/src/model/run.rs @@ -5,44 +5,6 @@ use serde::{Deserialize, Serialize}; use serde_json::Value; use uuid::Uuid; -use crate::model::{container, node}; - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct RunRoot { - pub data: Vec, - pub error: Value, - pub links: Option, - pub meta: Value, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct RunMetadataRoot { - pub data: RunData, - pub error: Value, - pub links: Option, - pub meta: Value, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct RunData { - pub id: Uuid, - pub attributes: RunContainerAttributes, - pub links: node::Links, - pub meta: Value, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct RunContainerAttributes { - pub ancestors: Vec, - pub structure_family: String, - pub specs: Vec, - pub metadata: RunMetadata, - pub structure: container::ContainerStructure, - pub access_blob: Value, - pub sorting: Vec, - pub data_sources: Option>, -} - #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] pub struct RunMetadata { pub start: Start, @@ -110,19 +72,11 @@ pub struct Stop { #[cfg(test)] mod tests { - use crate::model::{container, run}; + use crate::model::node; use crate::test_utils::assert_readable_as; - #[test] - fn run_metadata() { - assert_readable_as::("resources/metadata_run.json"); - } #[test] fn search_root_for_run_containers() { - assert_readable_as::("resources/search_root.json"); - } - #[test] - fn container_full() { - assert_readable_as::("resources/container_run.json"); + assert_readable_as::("resources/search_root.json"); } } diff --git a/src/model/table.rs b/src/model/table.rs index 41d46fd..d93df05 100644 --- a/src/model/table.rs +++ b/src/model/table.rs @@ -4,66 +4,12 @@ use async_graphql::SimpleObject; use serde::{Deserialize, Serialize}; use serde_json::Value; -use crate::model::node; - pub type Table = HashMap>; -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct TableMetadataRoot { - pub data: TableData, - pub error: Value, - pub links: Option, - pub meta: Value, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct TableData { - pub id: String, - pub attributes: TableAttributes, - pub links: TableLinks, - pub meta: Value, -} - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct TableAttributes { - pub ancestors: Vec, - pub structure_family: String, - pub specs: Option>, - pub metadata: HashMap, - pub structure: TableStructure, - pub access_blob: Value, - pub sorting: Value, - pub data_sources: Option>, -} - #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] pub struct TableStructure { pub arrow_schema: String, pub npartitions: i64, - pub columns: Vec, + pub columns: Vec, pub resizable: bool, } - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, SimpleObject)] -pub struct TableLinks { - #[serde(rename = "self")] - #[graphql(name = "self")] - pub self_field: String, - pub full: Option, - pub partition: Option, -} - -#[cfg(test)] -mod tests { - use crate::model::table; - use crate::test_utils::assert_readable_as; - - #[test] - fn table_metadata() { - assert_readable_as::("resources/metadata_table.json"); - } - #[test] - fn table_full() { - assert_readable_as::("resources/table_full.json"); - } -}