@@ -33,17 +33,13 @@ def spark_serverless_cluster_id(ws):
3333def test_databricks_connect (debug_env_bugfix , ws , spark ):
3434 rows = spark .sql ("SELECT 1" ).collect ()
3535 assert rows [0 ][0 ] == 1
36-
37- creator = get_cluster_creator (spark , ws )
38- assert creator # non-serverless clusters must have assigned creator
36+ assert not is_serverless_cluster (spark , ws )
3937
4038
4139def test_databricks_connect_serverless (serverless_env , ws , spark ):
4240 rows = spark .sql ("SELECT 1" ).collect ()
4341 assert rows [0 ][0 ] == 1
44-
45- creator = get_cluster_creator (spark , ws )
46- assert not creator # serverless clusters don't have assigned creator
42+ assert is_serverless_cluster (spark , ws )
4743
4844
4945def test_databricks_connect_serverless_set_cluster_id (ws , spark_serverless_cluster_id , spark ):
@@ -52,15 +48,13 @@ def test_databricks_connect_serverless_set_cluster_id(ws, spark_serverless_clust
5248
5349 cluster_id = spark .conf .get ("spark.databricks.clusterUsageTags.clusterId" )
5450 assert spark_serverless_cluster_id == cluster_id
55-
56- creator = get_cluster_creator (spark , ws )
57- assert not creator # serverless clusters don't have assigned creator
51+ assert is_serverless_cluster (spark , ws )
5852
5953
60- def get_cluster_creator (spark : SparkSession , ws : WorkspaceClient ) -> str | None :
54+ def is_serverless_cluster (spark : SparkSession , ws : WorkspaceClient ) -> bool :
6155 """
62- Get the creator of the cluster that the Spark session is connected to .
56+ Check if the current cluster used is serverless .
6357 """
6458 cluster_id = spark .conf .get ("spark.databricks.clusterUsageTags.clusterId" )
6559 creator = ws .clusters .get (cluster_id ).creator_user_name
66- return creator
60+ return not creator # serverless clusters don't have assigned creator
0 commit comments