forked from tracel-ai/burn
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathinference.rs
More file actions
56 lines (47 loc) · 1.76 KB
/
inference.rs
File metadata and controls
56 lines (47 loc) · 1.76 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
use burn::{
data::{dataloader::batcher::Batcher, dataset::Dataset},
module::Module,
record::{NoStdTrainingRecorder, Recorder},
tensor::backend::Backend,
};
use rgb::RGB8;
use textplots::{Chart, ColorPlot, Shape};
use crate::{
dataset::{HousingBatcher, HousingDataset, HousingDistrictItem},
model::{RegressionModelConfig, RegressionModelRecord},
};
pub fn infer<B: Backend>(artifact_dir: &str, device: B::Device) {
let record: RegressionModelRecord<B> = NoStdTrainingRecorder::new()
.load(format!("{artifact_dir}/model").into(), &device)
.expect("Trained model should exist; run train first");
let model = RegressionModelConfig::new()
.init(&device)
.load_record(record);
// Use a sample of 1000 items from the test split
let dataset = HousingDataset::test();
let items: Vec<HousingDistrictItem> = dataset.iter().take(1000).collect();
let batcher = HousingBatcher::new(device);
let batch = batcher.batch(items.clone());
let predicted = model.forward(batch.inputs);
let targets = batch.targets;
// Display the predicted vs expected values
let predicted = predicted.squeeze::<1>(1).into_data();
let expected = targets.into_data();
let points = predicted
.iter::<f32>()
.zip(expected.iter::<f32>())
.collect::<Vec<_>>();
println!("Predicted vs. Expected Median House Value (in 100,000$)");
Chart::new_with_y_range(120, 60, 0., 5., 0., 5.)
.linecolorplot(
&Shape::Points(&points),
RGB8 {
r: 255,
g: 85,
b: 85,
},
)
.display();
// Print a single numeric value as an example
println!("Predicted {} Expected {}", points[0].0, points[0].1);
}