-
Notifications
You must be signed in to change notification settings - Fork 60
Expand file tree
/
Copy pathCargo.toml
More file actions
48 lines (37 loc) · 1.15 KB
/
Cargo.toml
File metadata and controls
48 lines (37 loc) · 1.15 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
[package]
authors = ["Dilshod Tadjibaev (@antimora)"]
license = "MIT OR Apache-2.0"
name = "minilm-burn"
version = "0.1.0"
edition = "2024"
description = "MiniLM sentence transformer with Burn"
[features]
default = ["pretrained"]
pretrained = ["burn/network", "dep:dirs", "dep:hf-hub"]
# Backend selection
ndarray = ["burn/ndarray"]
tch-cpu = ["burn/tch"]
tch-gpu = ["burn/tch"]
wgpu = ["burn/wgpu"]
cuda = ["burn/cuda"]
[dependencies]
burn = { version = "0.20.0", default-features = false, features = ["std"] }
burn-store = { version = "0.20.0", features = ["std", "safetensors"] }
# Tokenizer
tokenizers = { version = "0.19.1", default-features = false, features = ["onig"] }
# HuggingFace model download
hf-hub = { version = "0.4.3", optional = true }
dirs = { version = "6.0.0", optional = true }
# Serialization
serde = { version = "1.0", default-features = false, features = ["derive", "alloc"] }
serde_json = "1.0"
[dev-dependencies]
clap = { version = "4.5", features = ["derive"] }
criterion = "0.5"
[[example]]
name = "inference"
required-features = ["pretrained", "ndarray"]
[[bench]]
name = "inference"
harness = false
required-features = ["pretrained"]