Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,23 @@
*/
package com.google.edwmigration.dumper.application.dumper.connector.snowflake;

import com.google.common.collect.ImmutableMap;
import com.google.common.io.Resources;
import com.google.edwmigration.dumper.application.dumper.ConnectorArguments;
import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException;
import com.google.edwmigration.dumper.application.dumper.connector.MetadataConnector;
import com.google.edwmigration.dumper.application.dumper.task.JdbcSelectTask;
import com.google.edwmigration.dumper.application.dumper.task.Task;
import com.google.edwmigration.dumper.plugin.lib.dumper.spi.CoreMetadataDumpFormat;
import com.google.edwmigration.dumper.plugin.lib.dumper.spi.SnowflakeMetadataDumpFormat;
import com.google.edwmigration.dumper.test.TestUtils;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nonnull;
import org.junit.Assert;
import org.junit.Assume;
Expand Down Expand Up @@ -124,4 +136,33 @@ public void testDatabaseNameFailure() {

Assert.assertTrue(exception.getMessage().startsWith("Database name not found"));
}

@Test
public void connector_generatesExpectedSql() throws IOException {
Map<String, String> actualSqls = collectSqlStatements();
TaskSqlMap expectedSqls =
CoreMetadataDumpFormat.MAPPER.readValue(
Resources.toString(
Resources.getResource("connector/snowflake/jdbc-tasks-sql.yaml"),
StandardCharsets.UTF_8),
TaskSqlMap.class);

Assert.assertEquals(expectedSqls.size(), actualSqls.size());
Assert.assertEquals(expectedSqls.keySet(), actualSqls.keySet());
for (String name : expectedSqls.keySet()) {
Assert.assertEquals(expectedSqls.get(name), actualSqls.get(name));
}
}

private static Map<String, String> collectSqlStatements() throws IOException {
List<Task<?>> tasks = new ArrayList<>();
SnowflakeMetadataConnector connector = new SnowflakeMetadataConnector();
connector.addTasksTo(tasks, new ConnectorArguments("--connector", connector.getName()));
return tasks.stream()
.filter(t -> t instanceof JdbcSelectTask)
.map(t -> (JdbcSelectTask) t)
.collect(ImmutableMap.toImmutableMap(Task::getName, JdbcSelectTask::getSql));
}

static class TaskSqlMap extends HashMap<String, String> {}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
databases-au.csv: "SELECT database_name, database_owner FROM SNOWFLAKE.ACCOUNT_USAGE.DATABASES WHERE DELETED IS NULL"
databases.csv: "SELECT database_name, database_owner FROM INFORMATION_SCHEMA.DATABASES"
schemata-au.csv: "SELECT catalog_name, schema_name FROM SNOWFLAKE.ACCOUNT_USAGE.SCHEMATA WHERE DELETED IS NULL"
schemata.csv: "SELECT catalog_name, schema_name FROM INFORMATION_SCHEMA.SCHEMATA"
tables-au.csv: "SELECT table_catalog, table_schema, table_name, table_type, row_count, bytes, clustering_key FROM SNOWFLAKE.ACCOUNT_USAGE.TABLES WHERE DELETED IS NULL"
tables.csv: "SELECT table_catalog, table_schema, table_name, table_type, row_count, bytes, clustering_key FROM INFORMATION_SCHEMA.TABLES"
external_tables.csv: "SELECT table_catalog, table_schema, table_name, location, file_format_name, file_format_type FROM INFORMATION_SCHEMA.EXTERNAL_TABLES WHERE table_schema != 'INFORMATION_SCHEMA'"
columns-au.csv: "SELECT table_catalog, table_schema, table_name, ordinal_position, column_name, data_type FROM SNOWFLAKE.ACCOUNT_USAGE.COLUMNS WHERE DELETED IS NULL"
columns.csv: "SELECT table_catalog, table_schema, table_name, ordinal_position, column_name, data_type FROM INFORMATION_SCHEMA.COLUMNS"
views-au.csv: "SELECT table_catalog, table_schema, table_name, view_definition FROM SNOWFLAKE.ACCOUNT_USAGE.VIEWS WHERE DELETED IS NULL"
views.csv: "SELECT table_catalog, table_schema, table_name, view_definition FROM INFORMATION_SCHEMA.VIEWS"
functions-au.csv: "SELECT function_schema, function_name, data_type, argument_signature FROM SNOWFLAKE.ACCOUNT_USAGE.FUNCTIONS WHERE DELETED IS NULL"
functions.csv: "SELECT function_schema, function_name, data_type, argument_signature FROM INFORMATION_SCHEMA.FUNCTIONS"