Skip to content

Commit 9bfa583

Browse files
[Snowflake Connector] Add test to verify JDBC tasks produce expected SQL.
1 parent 1ad83f2 commit 9bfa583

File tree

2 files changed

+54
-0
lines changed

2 files changed

+54
-0
lines changed

dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeMetadataConnectorTest.java

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,23 @@
1616
*/
1717
package com.google.edwmigration.dumper.application.dumper.connector.snowflake;
1818

19+
import com.google.common.collect.ImmutableMap;
20+
import com.google.common.io.Resources;
21+
import com.google.edwmigration.dumper.application.dumper.ConnectorArguments;
1922
import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException;
2023
import com.google.edwmigration.dumper.application.dumper.connector.MetadataConnector;
24+
import com.google.edwmigration.dumper.application.dumper.task.JdbcSelectTask;
25+
import com.google.edwmigration.dumper.application.dumper.task.Task;
26+
import com.google.edwmigration.dumper.plugin.lib.dumper.spi.CoreMetadataDumpFormat;
2127
import com.google.edwmigration.dumper.plugin.lib.dumper.spi.SnowflakeMetadataDumpFormat;
2228
import com.google.edwmigration.dumper.test.TestUtils;
2329
import java.io.File;
30+
import java.io.IOException;
31+
import java.nio.charset.StandardCharsets;
32+
import java.util.ArrayList;
33+
import java.util.HashMap;
34+
import java.util.List;
35+
import java.util.Map;
2436
import javax.annotation.Nonnull;
2537
import org.junit.Assert;
2638
import org.junit.Assume;
@@ -124,4 +136,33 @@ public void testDatabaseNameFailure() {
124136

125137
Assert.assertTrue(exception.getMessage().startsWith("Database name not found"));
126138
}
139+
140+
@Test
141+
public void connector_generatesExpectedSql() throws IOException {
142+
Map<String, String> actualSqls = collectSqlStatements();
143+
TaskSqlMap expectedSqls =
144+
CoreMetadataDumpFormat.MAPPER.readValue(
145+
Resources.toString(
146+
Resources.getResource("connector/snowflake/jdbc-tasks-sql.yaml"),
147+
StandardCharsets.UTF_8),
148+
TaskSqlMap.class);
149+
150+
Assert.assertEquals(expectedSqls.size(), actualSqls.size());
151+
Assert.assertEquals(expectedSqls.keySet(), actualSqls.keySet());
152+
for (String name : expectedSqls.keySet()) {
153+
Assert.assertEquals(expectedSqls.get(name), actualSqls.get(name));
154+
}
155+
}
156+
157+
private static Map<String, String> collectSqlStatements() throws IOException {
158+
List<Task<?>> tasks = new ArrayList<>();
159+
SnowflakeMetadataConnector connector = new SnowflakeMetadataConnector();
160+
connector.addTasksTo(tasks, new ConnectorArguments("--connector", connector.getName()));
161+
return tasks.stream()
162+
.filter(t -> t instanceof JdbcSelectTask)
163+
.map(t -> (JdbcSelectTask) t)
164+
.collect(ImmutableMap.toImmutableMap(Task::getName, JdbcSelectTask::getSql));
165+
}
166+
167+
static class TaskSqlMap extends HashMap<String, String> {}
127168
}
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
databases-au.csv: "SELECT database_name, database_owner FROM SNOWFLAKE.ACCOUNT_USAGE.DATABASES WHERE DELETED IS NULL"
2+
databases.csv: "SELECT database_name, database_owner FROM INFORMATION_SCHEMA.DATABASES"
3+
schemata-au.csv: "SELECT catalog_name, schema_name FROM SNOWFLAKE.ACCOUNT_USAGE.SCHEMATA WHERE DELETED IS NULL"
4+
schemata.csv: "SELECT catalog_name, schema_name FROM INFORMATION_SCHEMA.SCHEMATA"
5+
tables-au.csv: "SELECT table_catalog, table_schema, table_name, table_type, row_count, bytes, clustering_key FROM SNOWFLAKE.ACCOUNT_USAGE.TABLES WHERE DELETED IS NULL"
6+
tables.csv: "SELECT table_catalog, table_schema, table_name, table_type, row_count, bytes, clustering_key FROM INFORMATION_SCHEMA.TABLES"
7+
external_tables.csv: "SELECT table_catalog, table_schema, table_name, location, file_format_name, file_format_type FROM INFORMATION_SCHEMA.EXTERNAL_TABLES WHERE table_schema != 'INFORMATION_SCHEMA'"
8+
columns-au.csv: "SELECT table_catalog, table_schema, table_name, ordinal_position, column_name, data_type FROM SNOWFLAKE.ACCOUNT_USAGE.COLUMNS WHERE DELETED IS NULL"
9+
columns.csv: "SELECT table_catalog, table_schema, table_name, ordinal_position, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS"
10+
views-au.csv: "SELECT table_catalog, table_schema, table_name, view_definition FROM SNOWFLAKE.ACCOUNT_USAGE.VIEWS WHERE DELETED IS NULL"
11+
views.csv: "SELECT table_catalog, table_schema, table_name, view_definition FROM INFORMATION_SCHEMA.VIEWS"
12+
functions-au.csv: "SELECT function_schema, function_name, data_type, argument_signature FROM SNOWFLAKE.ACCOUNT_USAGE.FUNCTIONS WHERE DELETED IS NULL"
13+
functions.csv: "SELECT function_schema, function_name, data_type, argument_signature FROM INFORMATION_SCHEMA.FUNCTIONS"

0 commit comments

Comments
 (0)