-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathjupyter-connect-k8s-iceberg-4.1.1.yaml
More file actions
100 lines (87 loc) · 2.05 KB
/
jupyter-connect-k8s-iceberg-4.1.1.yaml
File metadata and controls
100 lines (87 loc) · 2.05 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
# Preset: Jupyter + Spark Connect + Iceberg (smoke test - local mode)
# Source: Spark 4.1.0 local mode with Iceberg libs
# Components: Connect + Jupyter (Hive Metastore disabled for smoke test)
global:
s3:
enabled: false
endpoint: "http://minio:9000"
accessKey: "minioadmin"
secretKey: "minioadmin"
pathStyleAccess: true
sslEnabled: false
spark-base:
minio:
enabled: false
postgresql:
enabled: false
rbac:
create: true
serviceAccountName: "spark-41"
core:
enabled: false
hiveMetastore:
enabled: false
connect:
replicas: 1
backendMode: local
image:
repository: ghcr.io/fall-out-bug/spark-k8s-spark-custom
tag: "4.1.0"
pullPolicy: IfNotPresent
resources:
requests:
cpu: "0"
memory: "512Mi"
limits:
cpu: "500m"
memory: "2Gi"
sparkConf:
# Iceberg with Hadoop catalog (no Hive Metastore needed)
"spark.sql.extensions": "org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions"
"spark.sql.catalog.spark_catalog": "org.apache.iceberg.spark.SparkSessionCatalog"
"spark.sql.catalog.spark_catalog.type": "hadoop"
"spark.sql.catalog.local": "org.apache.iceberg.spark.SparkCatalog"
"spark.sql.catalog.local.type": "hadoop"
"spark.sql.catalog.local.warehouse": "/tmp/warehouse/iceberg"
"spark.sql.iceberg.vectorization.enabled": "true"
driver:
host: ""
port: 7078
eventLog:
enabled: false
jupyter:
enabled: true
image:
repository: ghcr.io/fall-out-bug/spark-k8s-jupyter-spark
tag: "4.1.0"
pullPolicy: IfNotPresent
service:
type: ClusterIP
port: 8888
env:
SPARK_CONNECT_URL: "sc://spark-connect:15002"
resources:
requests:
cpu: "0"
memory: "256Mi"
limits:
cpu: "500m"
memory: "1Gi"
persistence:
enabled: false
hiveMetastore:
enabled: false
initSchema: false
historyServer:
enabled: false
ingress:
enabled: false
security:
podSecurityStandards: false
sparkOperator:
enabled: false
celeborn:
enabled: false
monitoring:
serviceMonitor:
enabled: false