forked from ray-project/kuberay
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathray-data-image-resize.yaml
More file actions
62 lines (61 loc) · 1.74 KB
/
ray-data-image-resize.yaml
File metadata and controls
62 lines (61 loc) · 1.74 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
apiVersion: ray.io/v1
kind: RayJob
metadata:
name: {{.Name}}
labels:
perf-test: ray-data-image-resize
spec:
shutdownAfterJobFinishes: true
entrypoint: python ray_data_image_resize.py
submitterPodTemplate:
spec:
restartPolicy: Never
containers:
- name: submitter-job
image: {{.Image}}
command:
- "sh"
- "-c"
args:
- |
#!/bin/sh
ray job logs $RAY_JOB_SUBMISSION_ID --address=http://$RAY_DASHBOARD_ADDRESS --follow || \
ray job submit --address=http://$RAY_DASHBOARD_ADDRESS --submission-id=$RAY_JOB_SUBMISSION_ID --runtime-env-json '{"env_vars":{"BUCKET_NAME":"ray-images","BUCKET_PREFIX":"images"}}' -- python ray_data_image_resize.py
resources:
requests:
cpu: "10m"
rayClusterSpec:
rayVersion: "2.52.0"
headGroupSpec:
rayStartParams:
disable-usage-stats: "true"
template:
spec:
containers:
- name: ray-head
image: {{.Image}}
ports:
- containerPort: 6379
name: gcs-server
- containerPort: 8265
name: dashboard
- containerPort: 10001
name: client
resources:
requests:
cpu: "100m"
memory: "2Gi"
workerGroupSpecs:
- replicas: 2
minReplicas: 1
maxReplicas: 5
groupName: worker-group
template:
spec:
containers:
- name: ray-worker
image: {{.Image}}
resources:
requests:
cpu: "100m"
memory: "2Gi"