forked from Waseju/root-tissue-segmentation
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathMLproject
56 lines (52 loc) · 2.1 KB
/
MLproject
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
# Author: Julian Wanner
# Email: [email protected]
# Version: 1.0.0
# License: MIT
name: root-tissue-segmentation
#conda_env: environment.yml
docker_env:
image: ghcr.io/qbic-pipelines/root_tissue_segmentation:1.0.0
volumes: ["${PWD}/data:/data"]
run_params: [['--gpus', 'all'], ['--ipc','host']]
environment: [["MLF_CORE_DOCKER_RUN", "TRUE"],["CUBLAS_WORKSPACE_CONFIG", ":4096:8"]]
entry_points:
main:
parameters:
max_epochs: {type: int, default: 74}
gpus: {type: int, default: 2}
accelerator: {type str, default: "dp"}
training-batch-size: {type: int, default: 10}
test-batch-size: {type: int, default: 60}
lr: {type: float, default: 0.0005739979509018617}
weight-decay: {type: float, default: 0.08843850663784153}
gamma-factor: {type: float, default: 2.8074708243593878}
model: {type: str, default: "u2net"}
epsilon: {type: float, default: 2.1335101419972938e-14}
alpha-0: {type: float, default: 0.39228916176765655}
alpha-1: {type: float, default: 0.4476337434213018}
alpha-2: {type: float, default: 0.8902483094365905}
alpha-3: {type: float, default: 0.6695418278351652}
alpha-4: {type: float, default: 0.9382751049017035}
general-seed: {type: int, default: 0}
pytorch-seed: {type: int, default: 0}
log-interval: {type: int, default: 100}
command: |
python root_tissue_segmentation/root_tissue_segmentation.py \
--max_epochs {max_epochs} \
--gpus {gpus} \
--accelerator {accelerator} \
--training-batch-size {training-batch-size} \
--test-batch-size {test-batch-size} \
--lr {lr} \
--model {model} \
--weight-decay {weight-decay} \
--gamma-factor {gamma-factor} \
--epsilon {epsilon} \
--alpha-0 {alpha-0} \
--alpha-1 {alpha-1} \
--alpha-2 {alpha-2} \
--alpha-3 {alpha-3} \
--alpha-4 {alpha-4} \
--general-seed {general-seed} \
--pytorch-seed {pytorch-seed} \
--log-interval {log-interval}