Skip to content

Commit c6589c0

Browse files
committed
Fix AWS credentials forwarding
The GitHub action we use in our workflow tests-aws.yml exposes the AWS credentials by setting environment variables. We copy them to the Spark containers.
1 parent 5cd9770 commit c6589c0

File tree

8 files changed

+29
-33
lines changed

8 files changed

+29
-33
lines changed

CONTRIBUTING.md

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,13 +28,19 @@ Tests are implemented in the `tests` sbt submodule. They simulate the submission
2828
sbt testOnly com.scylladb.migrator.BasicMigrationTest
2929
~~~
3030

31-
Or, to run the tests that access AWS, first configure your AWS credentials with `aws configure`, and then:
31+
Or, to run the tests that access AWS:
3232

3333
~~~ sh
34-
AWS_REGION=us-east-1 \
34+
export AWS_REGION=us-east-1
35+
export AWS_ACCESS_KEY_ID=xxx
36+
export AWS_SECRET_ACCESS_KEY=yyy
37+
export AWS_SESSION_TOKEN=zzz
38+
docker compose -f docker-compose-tests.yml up -d
3539
sbt "testOnly -- --include-categories=com.scylladb.migrator.AWS"
3640
~~~
3741

42+
Replace `xxx`, `yyy`, and `zzz` with your actual credentials.
43+
3844
4. Ultimately, stop the Docker containers
3945

4046
~~~ sh

build.sbt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ lazy val migrator = (project in file("migrator")).enablePlugins(BuildInfoPlugin)
5050
"software.amazon.awssdk" % "sts" % awsSdkVersion,
5151
"com.scylladb" %% "spark-scylladb-connector" % "4.0.0",
5252
"com.github.jnr" % "jnr-posix" % "3.1.19", // Needed by the Spark ScyllaDB connector
53-
"com.scylladb.alternator" % "emr-dynamodb-hadoop" % "5.7.1",
53+
"com.scylladb.alternator" % "emr-dynamodb-hadoop" % "5.8.0",
5454
"com.scylladb.alternator" % "load-balancing" % "1.0.0",
5555
"io.circe" %% "circe-generic" % "0.14.7",
5656
"io.circe" %% "circe-parser" % "0.14.7",

docker-compose-tests.yml

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,10 @@ services:
5353
command: master
5454
environment:
5555
SPARK_PUBLIC_DNS: localhost
56+
AWS_REGION: ${AWS_REGION}
57+
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
58+
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
59+
AWS_SESSION_TOKEN: ${AWS_SESSION_TOKEN}
5660
expose:
5761
- 5005
5862
ports:
@@ -63,7 +67,6 @@ services:
6367
- ./migrator/target/scala-2.13:/jars
6468
- ./tests/src/test/configurations:/app/configurations
6569
- ./tests/docker/spark-master:/app/savepoints
66-
- ./tests/docker/aws-profile:/root/.aws
6770
- ./tests/docker/parquet:/app/parquet
6871

6972
spark-worker:
@@ -74,6 +77,10 @@ services:
7477
SPARK_WORKER_MEMORY: 4G
7578
SPARK_WORKER_WEBUI_PORT: 8081
7679
SPARK_PUBLIC_DNS: localhost
80+
AWS_REGION: ${AWS_REGION}
81+
AWS_ACCESS_KEY_ID: ${AWS_ACCESS_KEY_ID}
82+
AWS_SECRET_ACCESS_KEY: ${AWS_SECRET_ACCESS_KEY}
83+
AWS_SESSION_TOKEN: ${AWS_SESSION_TOKEN}
7784
expose:
7885
- 5006
7986
ports:

tests/docker/aws-profile/.gitignore

Lines changed: 0 additions & 4 deletions
This file was deleted.

tests/src/test/configurations/dynamodb-to-alternator-streaming-skip-snapshot.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
source:
22
type: dynamodb
3-
table: StreamedItemsSkipSnapshotTest
3+
table: migrator-StreamedItemsSkipSnapshotTest
44
region: us-east-1
55

66
target:
77
type: dynamodb
8-
table: StreamedItemsSkipSnapshotTest
8+
table: migrator-StreamedItemsSkipSnapshotTest
99
region: dummy
1010
endpoint:
1111
host: http://scylla

tests/src/test/configurations/dynamodb-to-alternator-streaming.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
source:
22
type: dynamodb
3-
table: StreamedItemsTest
3+
table: migrator-StreamedItemsTest
44
region: us-east-1
55

66
target:
77
type: dynamodb
8-
table: StreamedItemsTest
8+
table: migrator-StreamedItemsTest
99
region: dummy
1010
endpoint:
1111
host: http://scylla

tests/src/test/scala/com/scylladb/migrator/alternator/MigratorSuite.scala

Lines changed: 1 addition & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -198,20 +198,7 @@ abstract class MigratorSuiteWithAWS extends MigratorSuite {
198198
lazy val sourceDDb: Fixture[DynamoDbClient] = new Fixture[DynamoDbClient]("sourceDDb") {
199199
private var client: DynamoDbClient = null
200200
def apply(): DynamoDbClient = client
201-
override def beforeAll(): Unit = {
202-
// Provision the AWS credentials on the Spark nodes via a Docker volume
203-
val localAwsCredentials =
204-
Paths.get(sys.props("user.home"), ".aws", "credentials")
205-
.toAbsolutePath
206-
(s"cp ${localAwsCredentials} docker/aws-profile/credentials").!!
207-
208-
val region = Region.of(sys.env("AWS_REGION"))
209-
client =
210-
DynamoDbClient
211-
.builder()
212-
.region(region)
213-
.build()
214-
}
201+
override def beforeAll(): Unit = client = DynamoDbClient.create()
215202
override def afterAll(): Unit = client.close()
216203
}
217204

tests/src/test/scala/com/scylladb/migrator/alternator/StreamedItemsTest.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,9 @@ import scala.util.chaining.scalaUtilChainingOps
1010

1111
class StreamedItemsTest extends MigratorSuiteWithAWS {
1212

13-
override val munitTimeout: Duration = 120.seconds
13+
override val munitTimeout: Duration = 300.seconds
1414

15-
withTable("StreamedItemsTest").test("Stream changes") { tableName =>
15+
withTable("migrator-StreamedItemsTest").test("Stream changes") { tableName =>
1616
val configFileName = "dynamodb-to-alternator-streaming.yaml"
1717

1818
// Populate the source table
@@ -27,7 +27,7 @@ class StreamedItemsTest extends MigratorSuiteWithAWS {
2727
submitSparkJobProcess(configFileName, "com.scylladb.migrator.Migrator")
2828
.run(ProcessLogger { log =>
2929
sparkLogs ++= log
30-
// println(log) // Uncomment to see the logs
30+
println(log)
3131
})
3232

3333
awaitAtMost(60.seconds) {
@@ -53,7 +53,7 @@ class StreamedItemsTest extends MigratorSuiteWithAWS {
5353
sourceDDb().putItem(PutItemRequest.builder().tableName(tableName).item(item2Data.asJava).build())
5454

5555
// Check that the added item has also been migrated
56-
awaitAtMost(60.seconds) {
56+
awaitAtMost(120.seconds) {
5757
targetAlternator()
5858
.getItem(GetItemRequest.builder().tableName(tableName).key(keys2.asJava).build())
5959
.tap { itemResult =>
@@ -71,7 +71,7 @@ class StreamedItemsTest extends MigratorSuiteWithAWS {
7171
deleteStreamTable(tableName)
7272
}
7373

74-
withTable("StreamedItemsSkipSnapshotTest").test("Stream changes but skip initial snapshot") { tableName =>
74+
withTable("migrator-StreamedItemsSkipSnapshotTest").test("Stream changes but skip initial snapshot") { tableName =>
7575
val configFileName = "dynamodb-to-alternator-streaming-skip-snapshot.yaml"
7676

7777
// Populate the source table
@@ -86,7 +86,7 @@ class StreamedItemsTest extends MigratorSuiteWithAWS {
8686
submitSparkJobProcess(configFileName, "com.scylladb.migrator.Migrator")
8787
.run(ProcessLogger { (log: String) =>
8888
sparkLogs ++= log
89-
// println(log) // Uncomment to see the logs
89+
println(log)
9090
})
9191

9292
// Wait for the changes to start being streamed
@@ -104,7 +104,7 @@ class StreamedItemsTest extends MigratorSuiteWithAWS {
104104
sourceDDb().putItem(PutItemRequest.builder().tableName(tableName).item(item2Data.asJava).build())
105105

106106
// Check that only the second item has been migrated
107-
awaitAtMost(60.seconds) {
107+
awaitAtMost(120.seconds) {
108108
targetAlternator()
109109
.getItem(GetItemRequest.builder().tableName(tableName).key(keys2.asJava).build())
110110
.tap { itemResult =>

0 commit comments

Comments
 (0)