Skip to content

Commit c0f2d65

Browse files
committed
server/counts: Split names and counts into parallel arrays
1 parent 828777e commit c0f2d65

File tree

1 file changed

+28
-23
lines changed

1 file changed

+28
-23
lines changed

src/server/counts.rs

+28-23
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use std::{collections::HashMap, num::ParseIntError};
1+
use std::num::ParseIntError;
22

33
use axum::{
44
extract::{Query, State},
@@ -28,22 +28,29 @@ struct IndexQuery {
2828

2929
#[derive(Serialize)]
3030
#[serde(untagged)]
31-
enum Counts {
32-
Normalized(HashMap<String, f64>),
33-
Raw(HashMap<String, i32>),
31+
enum Values {
32+
Normalized(Vec<f64>),
33+
Raw(Vec<i32>),
3434
}
3535

3636
#[derive(Serialize)]
3737
struct Run {
3838
id: i32,
39-
counts: Counts,
39+
values: Values,
4040
}
4141

4242
#[derive(Serialize)]
43-
struct IndexBody {
43+
#[serde(rename_all = "camelCase")]
44+
struct Counts {
45+
feature_names: Vec<String>,
4446
runs: Vec<Run>,
4547
}
4648

49+
#[derive(Serialize)]
50+
struct IndexBody {
51+
counts: Counts,
52+
}
53+
4754
pub fn router() -> Router<Context> {
4855
Router::new().route("/counts", get(index))
4956
}
@@ -123,15 +130,9 @@ async fn index(
123130
.into_iter()
124131
.zip(normalized_counts.axis_iter(Axis(0)))
125132
{
126-
let counts = feature_names
127-
.iter()
128-
.zip(row)
129-
.map(|(name, count)| (name.clone(), *count))
130-
.collect();
131-
132133
runs.push(Run {
133134
id,
134-
counts: Counts::Normalized(counts),
135+
values: Values::Normalized(row.to_vec()),
135136
});
136137
}
137138
} else {
@@ -147,7 +148,7 @@ async fn index(
147148
.map(|(name, count)| (name.clone(), *count))
148149
.collect();
149150

150-
let normalized_counts = match normalization_method {
151+
let normalized_counts_map = match normalization_method {
151152
Normalize::Fpkm => {
152153
crate::counts::normalization::fpkm::calculate_fpkms(&features, &counts)
153154
.unwrap()
@@ -159,28 +160,32 @@ async fn index(
159160
}
160161
};
161162

163+
let normalized_counts = feature_names
164+
.iter()
165+
.map(|name| normalized_counts_map[name])
166+
.collect();
167+
162168
runs.push(Run {
163169
id,
164-
counts: Counts::Normalized(normalized_counts),
170+
values: Values::Normalized(normalized_counts),
165171
})
166172
}
167173
}
168174
} else {
169175
let chunks = counts.chunks_exact(feature_names.len());
170176

171177
for (id, chunk) in run_ids.into_iter().zip(chunks) {
172-
let counts = feature_names
173-
.iter()
174-
.zip(chunk)
175-
.map(|(name, count)| (name.clone(), *count))
176-
.collect();
177-
178178
runs.push(Run {
179179
id,
180-
counts: Counts::Raw(counts),
180+
values: Values::Raw(chunk.to_vec()),
181181
});
182182
}
183183
}
184184

185-
Ok(Json(IndexBody { runs }))
185+
Ok(Json(IndexBody {
186+
counts: Counts {
187+
feature_names,
188+
runs,
189+
},
190+
}))
186191
}

0 commit comments

Comments
 (0)