Skip to content

Commit f616284

Browse files
committed
removed dependency
1 parent 59de6fe commit f616284

File tree

2 files changed

+0
-19
lines changed

2 files changed

+0
-19
lines changed

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,6 @@ dependencies = [
7777
"pytest-timeout~=2.3.1",
7878
"pytest-xdist~=3.5.0",
7979
"ruff~=0.3.4",
80-
"databricks-connect~=15.4.3",
8180
]
8281

8382
# store virtual env as the child of this folder. Helps VSCode (and PyCharm) to run better

tests/integration/fixtures/test_connect.py

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import os
22
from pytest import fixture
3-
from databricks.connect import DatabricksSession
43

54

65
@fixture
@@ -26,23 +25,6 @@ def test_databricks_connect_serverless(serverless_env, ws, spark):
2625
assert not creator # serverless clusters don't have assigned creator
2726

2827

29-
def test_databricks_connect_serverless_set_cluster_id(serverless_env, ws, request):
30-
# get the initial spark session to retrieve serverless cluster id
31-
spark_serverless = DatabricksSession.builder.serverless(True).getOrCreate()
32-
cluster_id = spark_serverless.conf.get("spark.databricks.clusterUsageTags.clusterId")
33-
ws.config.serverless_compute_id = cluster_id
34-
35-
# get a new spark session with the serverless cluster id set
36-
spark_serverless_new = request.getfixturevalue("spark")
37-
38-
rows = spark_serverless_new.sql("SELECT 1").collect()
39-
assert rows[0][0] == 1
40-
41-
assert spark_serverless_new.conf.get("spark.databricks.clusterUsageTags.clusterId") == cluster_id
42-
creator = get_cluster_creator(spark_serverless_new, ws)
43-
assert not creator # serverless clusters don't have assigned creator
44-
45-
4628
def get_cluster_creator(spark, ws):
4729
"""
4830
Get the creator of the cluster that the Spark session is connected to.

0 commit comments

Comments
 (0)