Skip to content

Commit dfe70b3

Browse files
authored
Merge pull request #76 from shruthis4/sparkApplicationRBAC
Add OpenShift RBAC checks and smoke tests; update cluster role permis…
2 parents 120fde0 + 094e239 commit dfe70b3

File tree

8 files changed

+197
-2
lines changed

8 files changed

+197
-2
lines changed
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
name: ScheduledSpark Smoke
2+
3+
on:
4+
push:
5+
branches:
6+
- main
7+
paths:
8+
- 'examples/openshift/**'
9+
- 'config/**'
10+
- '.github/workflows/scheduledspark-smoke.yaml'
11+
pull_request:
12+
paths:
13+
- 'examples/openshift/**'
14+
- 'config/**'
15+
- '.github/workflows/scheduledspark-smoke.yaml'
16+
workflow_dispatch: {}
17+
18+
permissions:
19+
contents: read
20+
21+
jobs:
22+
smoke:
23+
runs-on: ubuntu-latest
24+
timeout-minutes: 20
25+
steps:
26+
- name: Checkout
27+
uses: actions/checkout@v5
28+
29+
- name: Setup Kind cluster
30+
uses: ./.github/actions/kind-cluster-setup
31+
32+
- name: Install oc
33+
uses: redhat-actions/oc-installer@35b60c3f9757ae4301521556e1b75ff6f59f8d7c
34+
with:
35+
oc_version: 'latest'
36+
37+
- name: Install Spark Operator (keep installed)
38+
run: CLEANUP=false make -C examples/openshift operator-install
39+
40+
- name: RBAC preflight
41+
env:
42+
NAMESPACE: spark-operator
43+
CONTROLLER_SA: spark-operator-controller
44+
run: make -C examples/openshift scheduledspark-rbac-check
45+
46+
- name: ScheduledSparkApplication smoke
47+
env:
48+
NAMESPACE: spark-operator
49+
SCHED_NAME: rbac-scheduled-smoke
50+
TIMEOUT_SECONDS: "180"
51+
run: make -C examples/openshift scheduledspark-smoke
52+

config/rbac/clusterrole.yaml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,9 @@ rules:
4040
- apiGroups: [sparkoperator.k8s.io]
4141
resources: [scheduledsparkapplications]
4242
verbs: [get, list, watch]
43+
- apiGroups: [sparkoperator.k8s.io]
44+
resources: [scheduledsparkapplications/finalizers]
45+
verbs: [update, patch]
4346
- apiGroups: [sparkoperator.k8s.io]
4447
resources: [sparkconnects]
4548
verbs: [get, list, watch]

examples/openshift/Makefile

Lines changed: 26 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,10 @@ export PATH := $(LOCALBIN):$(PATH)
1717
# Configuration
1818
KIND_CLUSTER_NAME ?= spark-operator
1919
CLEANUP ?= true
20+
NAMESPACE ?= spark-operator
21+
CONTROLLER_SA ?= spark-operator-controller
22+
SCHED_NAME ?= rbac-scheduled-smoke
23+
TIMEOUT_SECONDS ?= 180
2024

2125
##@ General
2226

@@ -84,11 +88,32 @@ e2e-kustomize-test: ## Run Go e2e tests using Kustomize manifests for operator i
8488
@echo "Running Go e2e tests with Kustomize installation..."
8589
cd $(REPO_ROOT) && INSTALL_METHOD=kustomize go test ./examples/openshift/tests/e2e/ -v -ginkgo.v -timeout 30m
8690

91+
.PHONY: scheduledspark-rbac-check
92+
scheduledspark-rbac-check: ## RBAC preflight for ScheduledSparkApplication.
93+
@echo "Running RBAC preflight for ScheduledSparkApplication..."
94+
chmod +x $(TESTS_DIR)/check-scheduledspark-rbac.sh
95+
NAMESPACE=$(NAMESPACE) CONTROLLER_SA=$(CONTROLLER_SA) \
96+
$(TESTS_DIR)/check-scheduledspark-rbac.sh
97+
98+
.PHONY: scheduledspark-smoke
99+
scheduledspark-smoke: ## ScheduledSparkApplication smoke test.
100+
@echo "Running ScheduledSparkApplication smoke test..."
101+
chmod +x $(TESTS_DIR)/test-scheduledspark-smoke.sh
102+
NAMESPACE=$(NAMESPACE) SCHED_NAME=$(SCHED_NAME) TIMEOUT_SECONDS=$(TIMEOUT_SECONDS) \
103+
$(TESTS_DIR)/test-scheduledspark-smoke.sh
104+
105+
.PHONY: scheduledspark-verify
106+
scheduledspark-verify: ## RBAC + ScheduledSparkApplication combined verification.
107+
$(MAKE) scheduledspark-rbac-check
108+
$(MAKE) scheduledspark-smoke
109+
87110
.PHONY: test-all
88-
test-all: ## Run all tests (operator-install, spark-pi, docling).
111+
test-all: ## Run all tests (operator-install, spark-pi, docling, scheduledspark).
89112
@echo "Running all OpenShift/KIND tests..."
90113
$(MAKE) operator-install CLEANUP=false
91114
$(MAKE) test-spark-pi CLEANUP=false
115+
$(MAKE) test-docling-spark CLEANUP=false
116+
$(MAKE) scheduledspark-verify CLEANUP=false
92117
$(MAKE) test-docling-spark
93118

94119
##@ Image Build/Load (CI and local)
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
#!/usr/bin/env bash
2+
set -euo pipefail
3+
4+
# RBAC preflight for ScheduledSparkApplication
5+
# Env (override as needed):
6+
# NAMESPACE (default: redhat-ods-applications)
7+
# CONTROLLER_SA (default: spark-operator-controller)
8+
9+
NAMESPACE="${NAMESPACE:-redhat-ods-applications}"
10+
CONTROLLER_SA="${CONTROLLER_SA:-spark-operator-controller}"
11+
12+
fail() { echo "ERROR: $*" >&2; exit 1; }
13+
14+
check() {
15+
local verb="$1" res="$2" sub="${3:-}"
16+
local cmd=(oc auth can-i "$verb" "$res" --as="system:serviceaccount:${NAMESPACE}:${CONTROLLER_SA}" -n "$NAMESPACE")
17+
[[ -n "$sub" ]] && cmd+=(--subresource="$sub")
18+
local out; out="$("${cmd[@]}")"
19+
echo "can-i $verb $res${sub:+/$sub} => $out"
20+
[[ "$out" == "yes" ]] || fail "Missing RBAC: $verb $res${sub:+/$sub}"
21+
}
22+
23+
echo "RBAC preflight for $NAMESPACE/$CONTROLLER_SA"
24+
check update scheduledsparkapplications finalizers
25+
check patch scheduledsparkapplications finalizers
26+
check create sparkapplications
27+
check update sparkapplications status
28+
echo "RBAC OK"
29+
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
apiVersion: sparkoperator.k8s.io/v1beta2
2+
kind: ScheduledSparkApplication
3+
metadata:
4+
name: ${SCHED_NAME}
5+
namespace: ${NAMESPACE}
6+
spec:
7+
schedule: "@every 1m"
8+
timeZone: "UTC"
9+
concurrencyPolicy: Forbid
10+
successfulRunHistoryLimit: 1
11+
failedRunHistoryLimit: 1
12+
template:
13+
type: Scala
14+
mode: cluster
15+
image: ${SPARK_IMAGE}
16+
imagePullPolicy: IfNotPresent
17+
mainClass: org.apache.spark.examples.SparkPi
18+
mainApplicationFile: local:///opt/spark/examples/jars/spark-examples_2.12-3.5.7.jar
19+
arguments: ["10"]
20+
sparkVersion: "3.5.7"
21+
restartPolicy: { type: Never }
22+
driver: { cores: 1, memory: "512m", serviceAccount: spark-operator-spark, securityContext: {} }
23+
executor: { cores: 1, instances: 1, memory: "512m", securityContext: {} }
24+
File renamed without changes.
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
#!/usr/bin/env bash
2+
set -euo pipefail
3+
4+
# ScheduledSparkApplication smoke test (no RBAC checks here)
5+
# Env (override as needed):
6+
# NAMESPACE (default: redhat-ods-applications)
7+
# SCHED_NAME (default: rbac-scheduled-smoke)
8+
# TIMEOUT_SECONDS (default: 180)
9+
# SPARK_IMAGE (default: quay.io/ssankepe/spark-openshift:3.5.7)
10+
# APP_YAML (default: script_dir/manifests/scheduledspark-smoke-app.yaml)
11+
12+
NAMESPACE="${NAMESPACE:-redhat-ods-applications}"
13+
SCHED_NAME="${SCHED_NAME:-rbac-scheduled-smoke}"
14+
TIMEOUT_SECONDS="${TIMEOUT_SECONDS:-180}"
15+
SPARK_IMAGE="${SPARK_IMAGE:-quay.io/ssankepe/spark-openshift:3.5.7}"
16+
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
17+
APP_YAML="${APP_YAML:-$SCRIPT_DIR/manifests/scheduledspark-smoke-app.yaml}"
18+
19+
apply_sched() {
20+
if [[ ! -f "$APP_YAML" ]]; then
21+
echo "ScheduledSparkApplication YAML not found: $APP_YAML" >&2
22+
exit 1
23+
fi
24+
export NAMESPACE SCHED_NAME SPARK_IMAGE
25+
envsubst < "$APP_YAML" | oc apply -f -
26+
}
27+
28+
cleanup() {
29+
oc delete scheduledsparkapplication "${SCHED_NAME}" -n "${NAMESPACE}" --ignore-not-found || true
30+
}
31+
trap cleanup EXIT
32+
33+
echo "Apply ScheduledSparkApplication ${SCHED_NAME}"
34+
# Ensure we always test a fresh object, not stale status from a prior run.
35+
oc delete scheduledsparkapplication "${SCHED_NAME}" -n "${NAMESPACE}" --ignore-not-found || true
36+
apply_sched
37+
38+
echo "Wait for child SparkApplication (<= ${TIMEOUT_SECONDS}s)"
39+
start=$(date +%s)
40+
while true; do
41+
child="$(oc get scheduledsparkapplication "${SCHED_NAME}" -n "${NAMESPACE}" \
42+
-o jsonpath='{.status.lastRunName}' 2>/dev/null || true)"
43+
if [[ -n "$child" ]]; then
44+
if oc get sparkapplication "$child" -n "${NAMESPACE}" >/dev/null 2>&1; then
45+
echo "Spawned: $child"
46+
break
47+
fi
48+
fi
49+
(( $(date +%s) - start > TIMEOUT_SECONDS )) && {
50+
echo "Timed out after ${TIMEOUT_SECONDS}s"
51+
oc describe scheduledsparkapplication "${SCHED_NAME}" -n "${NAMESPACE}" || true
52+
oc logs deploy/spark-operator-controller -n "${NAMESPACE}" --since=5m || true
53+
exit 1
54+
}
55+
sleep 5
56+
done
57+
58+
oc get scheduledsparkapplication "${SCHED_NAME}" -n "${NAMESPACE}" \
59+
-o jsonpath='{.status.lastRun}{" "}{.status.lastRunName}{" "}{.status.scheduleState}{"\n"}' || true
60+
oc get sparkapplication "$child" -n "${NAMESPACE}" -o wide || true
61+
echo "Smoke test succeeded"
62+

examples/openshift/tests/test-spark-pi.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ echo " Image: $SPARK_IMAGE"
106106
kubectl delete sparkapplication "$APP_NAME" -n "$APP_NAMESPACE" --ignore-not-found 2>/dev/null || true
107107

108108
# Apply the SparkApplication from YAML file (using envsubst for variable substitution)
109-
APP_YAML="${APP_YAML:-$SCRIPT_DIR/spark-pi-app.yaml}"
109+
APP_YAML="${APP_YAML:-$SCRIPT_DIR/manifests/spark-pi-app.yaml}"
110110
if [ ! -f "$APP_YAML" ]; then
111111
fail "SparkApplication YAML not found: $APP_YAML"
112112
fi

0 commit comments

Comments
 (0)