@@ -255,6 +255,7 @@ class SubdocIntegrationTest extends SparkOperationalSimpleTest {
255255
256256 initialDataWithNested.write
257257 .format(" couchbase.kv" )
258+ .option(KeyValueOptions .Timeout , " 30s" )
258259 .option(KeyValueOptions .Bucket , testResources.bucketName)
259260 .option(KeyValueOptions .Scope , testResources.scopeName)
260261 .option(KeyValueOptions .Collection , TestCollection )
@@ -265,6 +266,7 @@ class SubdocIntegrationTest extends SparkOperationalSimpleTest {
265266 // Read the documents with CAS values included
266267 val docsWithCas = spark.read
267268 .format(" couchbase.query" )
269+ .option(KeyValueOptions .Timeout , " 30s" )
268270 .option(QueryOptions .Scope , testResources.scopeName)
269271 .option(QueryOptions .Collection , TestCollection )
270272 .option(QueryOptions .OutputCas , " true" )
@@ -276,17 +278,18 @@ class SubdocIntegrationTest extends SparkOperationalSimpleTest {
276278 .select(
277279 docsWithCas(" __META_ID" ),
278280 docsWithCas(" __META_CAS" ),
279- docsWithCas(" name" ),
280- docsWithCas(" age" ),
281- docsWithCas(" role" )
281+ docsWithCas(" profile. name" ),
282+ docsWithCas(" profile. age" ),
283+ docsWithCas(" profile. role" )
282284 )
283- .withColumnRenamed(" name" , " replace:profile.name" )
284- .withColumnRenamed(" age" , " replace:profile.age" )
285- .withColumnRenamed(" role" , " replace:profile.role" )
285+ .withColumnRenamed(" profile. name" , " replace:profile.name" )
286+ .withColumnRenamed(" profile. age" , " replace:profile.age" )
287+ .withColumnRenamed(" profile. role" , " replace:profile.role" )
286288
287289 // Write the transformed data back using subdoc replace with CAS
288290 transformedData.write
289291 .format(" couchbase.kv" )
292+ .option(KeyValueOptions .Timeout , " 30s" )
290293 .option(KeyValueOptions .Bucket , testResources.bucketName)
291294 .option(KeyValueOptions .Scope , testResources.scopeName)
292295 .option(KeyValueOptions .Collection , TestCollection )
0 commit comments