-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathvalues-scenario-airflow-operator.yaml
More file actions
80 lines (67 loc) · 1.89 KB
/
values-scenario-airflow-operator.yaml
File metadata and controls
80 lines (67 loc) · 1.89 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
# Preset: Airflow + Spark Operator
# Source: scripts/test-e2e-airflow-operator.sh (Spark 4.1)
# Components: Airflow + MinIO + Spark Operator
# Note: Spark Operator deployed separately in operator namespace
# This preset is for the spark-standalone chart which contains Airflow
# For Spark 4.1, use charts/spark-3.5/charts/spark-standalone
# Spark Operator deployment (separate chart):
# helm install spark-operator charts/spark-operator -n <operator-namespace> \
# --set sparkJobNamespace=<job-namespace> \
# --set image.repository=kubeflow/spark-operator \
# --set image.tag=v1beta2-1.6.2-3.5.0 \
# --set crds.create=false \
# --set webhook.enable=true
airflow:
enabled: true
fernetKey: "9_jzOiAmnzfASdT81H2Epx6R56z3XQP9N8vr3W76wro="
kubernetesExecutor:
deleteWorkerPods: false
kubernetes:
deleteWorkerPods: false
scheduler:
resources:
requests:
cpu: "100m"
memory: "256Mi"
limits:
cpu: "500m"
memory: "1Gi"
webserver:
resources:
requests:
cpu: "100m"
memory: "256Mi"
limits:
cpu: "500m"
memory: "1Gi"
minio:
enabled: true
persistence:
enabled: false
mlflow:
enabled: false
hiveMetastore:
enabled: false
historyServer:
enabled: false
sparkMaster:
enabled: false
sparkWorker:
enabled: false
shuffleService:
enabled: false
ingress:
enabled: false
security:
podSecurityStandards: false
# DAG files for Spark Operator
# See: charts/spark-3.5/charts/spark-standalone/files/airflow/dags/spark_operator_e2e.py
# Required Airflow variables:
# - spark_image: spark-custom:4.1.0
# - spark_namespace: <namespace>
# - spark_version: 4.1.0
# - spark_eventlog_dir: s3a://spark-logs/events
# - spark_operator_serviceaccount: spark-operator-spark-operator
# - s3_endpoint: http://minio.<namespace>.svc.cluster.local:9000
# - s3_access_key: <from secret>
# - s3_secret_key: <from secret>