forked from ray-project/kuberay
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathray-cluster-resource-isolation-with-overrides.gke.yaml
More file actions
61 lines (61 loc) · 1.69 KB
/
ray-cluster-resource-isolation-with-overrides.gke.yaml
File metadata and controls
61 lines (61 loc) · 1.69 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
apiVersion: ray.io/v1
kind: RayCluster
metadata:
name: raycluster-resource-isolation
spec:
rayVersion: "2.52.0"
headGroupSpec:
rayStartParams:
enable-resource-isolation: "true" # Set --enable-resource-isolation flag in `ray start`
system-reserved-cpu: "2.0"
system-reserved-memory: "4000000000" # 4GB
template:
metadata:
annotations:
node.gke.io/enable-writable-cgroups.ray-head: "true"
spec:
containers:
- name: ray-head
image: rayproject/ray:2.52.0
resources:
limits:
cpu: "4"
memory: "8Gi"
requests:
cpu: "4"
memory: "8Gi"
ports:
- containerPort: 6379
name: gcs-server
- containerPort: 8265
name: dashboard
- containerPort: 10001
name: client
nodeSelector:
node.gke.io/enable-writable-cgroups: "true"
workerGroupSpecs:
- replicas: 1
minReplicas: 1
maxReplicas: 5
groupName: workergroup
rayStartParams:
enable-resource-isolation: "true" # Set --enable-resource-isolation flag in `ray start`
system-reserved-cpu: "1.0"
system-reserved-memory: "2000000000" # 2GB
template:
metadata:
annotations:
node.gke.io/enable-writable-cgroups.ray-worker: "true"
spec:
containers:
- name: ray-worker
image: rayproject/ray:2.52.0
resources:
limits:
cpu: "2"
memory: "8Gi"
requests:
cpu: "2"
memory: "8Gi"
nodeSelector:
node.gke.io/enable-writable-cgroups: "true"