|
| 1 | +#!/usr/bin/env python3 |
| 2 | +# Copyright 2025 The Kubeflow Authors. |
| 3 | +# |
| 4 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | +# you may not use this file except in compliance with the License. |
| 6 | +# You may obtain a copy of the License at |
| 7 | +# |
| 8 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | +# |
| 10 | +# Unless required by applicable law or agreed to in writing, software |
| 11 | +# distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | +# See the License for the specific language governing permissions and |
| 14 | +# limitations under the License. |
| 15 | + |
| 16 | +"""E2E Test: Connect to Existing SparkConnect Session (two-client pattern). |
| 17 | +
|
| 18 | +This example demonstrates the "bring your own server" use case where: |
| 19 | +1. A setup client creates a SparkConnect server |
| 20 | +2. A test client connects to the existing server via base_url |
| 21 | +
|
| 22 | +This validates the connect(base_url="sc://...") codepath which bypasses |
| 23 | +session creation and directly connects to an existing Spark Connect server. |
| 24 | +
|
| 25 | +Usage: |
| 26 | + # Run in-cluster only (via K8s Job): |
| 27 | + SPARK_E2E_RUN_IN_CLUSTER=1 python examples/spark/connect_existing_session.py |
| 28 | +""" |
| 29 | + |
| 30 | +import os |
| 31 | +import sys |
| 32 | +import uuid |
| 33 | + |
| 34 | +from kubeflow.common.types import KubernetesBackendConfig |
| 35 | +from kubeflow.spark import Name, SparkClient |
| 36 | +from kubeflow.spark.backends.kubernetes.utils import build_service_url |
| 37 | + |
| 38 | + |
| 39 | +def _backend_config(): |
| 40 | + """Backend config; uses SPARK_TEST_NAMESPACE in CI.""" |
| 41 | + return KubernetesBackendConfig(namespace=os.environ.get("SPARK_TEST_NAMESPACE", "spark-test")) |
| 42 | + |
| 43 | + |
| 44 | +def _unique_session_name() -> str: |
| 45 | + """Generate unique session name to avoid conflicts in E2E runs.""" |
| 46 | + return f"connect-existing-{uuid.uuid4().hex[:8]}" |
| 47 | + |
| 48 | + |
| 49 | +def test_connect_to_existing_session(): |
| 50 | + """Test connect(base_url=...) with two clients. |
| 51 | +
|
| 52 | + Two-client pattern: |
| 53 | + - Setup client: creates SparkConnect server, stops SparkSession (server stays running) |
| 54 | + - Test client: connects via base_url to the existing server |
| 55 | + """ |
| 56 | + print("=" * 70) |
| 57 | + print("E2E: Connect to Existing SparkConnect Session") |
| 58 | + print("=" * 70) |
| 59 | + |
| 60 | + session_name = _unique_session_name() |
| 61 | + setup_client = None |
| 62 | + test_spark = None |
| 63 | + |
| 64 | + try: |
| 65 | + # Phase 1: Setup client creates SparkConnect server |
| 66 | + print("\n[Phase 1] Creating SparkConnect server...") |
| 67 | + setup_client = SparkClient(backend_config=_backend_config()) |
| 68 | + setup_spark = setup_client.connect(options=[Name(session_name)], timeout=180) |
| 69 | + |
| 70 | + info = setup_client.get_session(session_name) |
| 71 | + service_url = build_service_url(info) |
| 72 | + print(f" Session: {session_name}") |
| 73 | + print(f" URL: {service_url}") |
| 74 | + |
| 75 | + setup_spark.stop() |
| 76 | + print(" Setup SparkSession stopped (server still running)") |
| 77 | + |
| 78 | + # Phase 2: Test client connects via base_url |
| 79 | + print("\n[Phase 2] Connecting via base_url...") |
| 80 | + test_client = SparkClient(backend_config=_backend_config()) |
| 81 | + test_spark = test_client.connect(base_url=service_url) |
| 82 | + print(" Connected successfully!") |
| 83 | + |
| 84 | + # Phase 3: Validate with Spark operations |
| 85 | + print("\n[Phase 3] Validating...") |
| 86 | + count = test_spark.range(100).count() |
| 87 | + print(f" spark.range(100).count() = {count}") |
| 88 | + assert count == 100, f"Expected 100, got {count}" |
| 89 | + |
| 90 | + print("\n[SUCCESS] connect(base_url=...) works correctly!") |
| 91 | + |
| 92 | + finally: |
| 93 | + # Phase 4: Cleanup |
| 94 | + print("\n[Phase 4] Cleanup...") |
| 95 | + if test_spark: |
| 96 | + try: |
| 97 | + test_spark.stop() |
| 98 | + except Exception as e: |
| 99 | + print(f" Warning: {e}") |
| 100 | + if setup_client: |
| 101 | + try: |
| 102 | + setup_client.delete_session(session_name) |
| 103 | + print(f" Deleted {session_name}") |
| 104 | + except Exception as e: |
| 105 | + print(f" Warning: {e}") |
| 106 | + |
| 107 | + |
| 108 | +def main(): |
| 109 | + """Entry point for E2E test.""" |
| 110 | + if os.environ.get("SPARK_E2E_RUN_IN_CLUSTER") != "1": |
| 111 | + print("SKIP: Requires in-cluster execution (SPARK_E2E_RUN_IN_CLUSTER=1)") |
| 112 | + sys.exit(0) |
| 113 | + |
| 114 | + try: |
| 115 | + test_connect_to_existing_session() |
| 116 | + sys.exit(0) |
| 117 | + except Exception as e: |
| 118 | + print(f"\nFailed: {e}") |
| 119 | + sys.exit(1) |
| 120 | + |
| 121 | + |
| 122 | +if __name__ == "__main__": |
| 123 | + main() |
0 commit comments