diff --git a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/ConnectorArguments.java b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/ConnectorArguments.java index a981845ca..c285d1580 100644 --- a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/ConnectorArguments.java +++ b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/ConnectorArguments.java @@ -18,7 +18,6 @@ import static com.google.common.base.MoreObjects.firstNonNull; import static com.google.common.collect.ImmutableList.toImmutableList; -import static com.google.edwmigration.dumper.application.dumper.utils.OptionalUtils.optionallyWhen; import static java.time.temporal.ChronoUnit.DAYS; import static java.time.temporal.ChronoUnit.HOURS; import static java.util.Arrays.stream; @@ -27,7 +26,6 @@ import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects.ToStringHelper; import com.google.common.base.Predicates; -import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.edwmigration.dumper.application.dumper.ZonedParser.DayOffset; import com.google.edwmigration.dumper.application.dumper.connector.Connector; @@ -302,6 +300,14 @@ public class ConnectorArguments extends DefaultArguments { private final OptionSpec optionOutputContinue = parser.accepts("continue", "Continues writing a previous output file."); + /** + * (Deprecated) earliest timestamp of logs to extract. + * + *

If the user specifies an earliest start time there will be extraneous empty dump files + * because we always iterate over the full 7 trailing days; maybe it's worth preventing that in + * the future. To do that, we should require getQueryLogEarliestTimestamp() to parse and return an + * ISO instant, not a database-server-specific format. + */ @Deprecated private final OptionSpec optionQueryLogEarliestTimestamp = parser @@ -590,10 +596,19 @@ public class ConnectorArguments extends DefaultArguments { private ConnectorProperties connectorProperties; - private final PasswordReader passwordReader = new PasswordReader(); + private final PasswordReader passwordReader; public ConnectorArguments(@Nonnull String... args) throws IOException { + this(Arrays.asList(args), new PasswordReader()); + } + + private ConnectorArguments(@Nonnull List args, @Nonnull PasswordReader passwordReader) { super(args); + this.passwordReader = passwordReader; + } + + public static ConnectorArguments create(@Nonnull List args) { + return new ConnectorArguments(args, new PasswordReader()); } @Override @@ -751,10 +766,6 @@ public boolean isAssessment() { return getOptions().has(optionAssessment); } - private Optional optionAsOptional(OptionSpec spec) { - return optionallyWhen(getOptions().has(spec), () -> getOptions().valueOf(spec)); - } - @Nonnull public Predicate getSchemaPredicate() { return toPredicate(getSchemata()); @@ -785,7 +796,11 @@ public String getUserOrFail() { */ @Nonnull public Optional getPasswordIfFlagProvided() { - return optionallyWhen(getOptions().has(optionPass), this::getPasswordOrPrompt); + if (getOptions().has(optionPass)) { + return Optional.of(getPasswordOrPrompt()); + } else { + return Optional.empty(); + } } @Nonnull @@ -832,7 +847,15 @@ public List getConfiguration() { } public Optional getOutputFile() { - return optionAsOptional(optionOutput).filter(file -> !Strings.isNullOrEmpty(file)); + if (!getOptions().has(optionOutput)) { + return Optional.empty(); + } + String file = getOptions().valueOf(optionOutput); + if (file == null || file.isEmpty()) { + return Optional.empty(); + } else { + return Optional.of(file); + } } public boolean isOutputContinue() { diff --git a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/DefaultArguments.java b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/DefaultArguments.java index c87a4df9a..23c150ccb 100644 --- a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/DefaultArguments.java +++ b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/DefaultArguments.java @@ -23,6 +23,7 @@ import java.io.PrintStream; import java.util.Arrays; import java.util.Comparator; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; @@ -36,15 +37,10 @@ import joptsimple.ValueConversionException; import joptsimple.ValueConverter; import org.anarres.jdiagnostics.ProductMetadata; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** @author shevek */ public class DefaultArguments { - @SuppressWarnings("UnusedVariable") - private static final Logger logger = LoggerFactory.getLogger(DefaultArguments.class); - public static class BooleanValueConverter implements ValueConverter { private final String[] V_TRUE = {"true", "t", "yes", "y", "1"}; @@ -96,9 +92,8 @@ public String valuePattern() { private final String[] args; private OptionSet options; - @SuppressWarnings("EI_EXPOSE_REP2") - public DefaultArguments(@Nonnull String[] args) { - this.args = args; + DefaultArguments(@Nonnull List args) { + this.args = args.toArray(new String[0]); } @Nonnull diff --git a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/AbstractSnowflakeConnector.java b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/AbstractSnowflakeConnector.java index a4236692f..4bd1dc439 100644 --- a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/AbstractSnowflakeConnector.java +++ b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/AbstractSnowflakeConnector.java @@ -16,9 +16,11 @@ */ package com.google.edwmigration.dumper.application.dumper.connector.snowflake; +import static com.google.common.base.CaseFormat.UPPER_CAMEL; +import static com.google.common.base.CaseFormat.UPPER_UNDERSCORE; + import com.google.common.base.CharMatcher; import com.google.common.base.Joiner; -import com.google.common.collect.ImmutableList; import com.google.edwmigration.dumper.application.dumper.ConnectorArguments; import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException; import com.google.edwmigration.dumper.application.dumper.annotations.RespectsArgumentDriver; @@ -34,16 +36,12 @@ import com.google.edwmigration.dumper.application.dumper.connector.Connector; import com.google.edwmigration.dumper.application.dumper.handle.Handle; import com.google.edwmigration.dumper.application.dumper.handle.JdbcHandle; -import com.google.edwmigration.dumper.application.dumper.task.AbstractJdbcTask; -import com.google.edwmigration.dumper.application.dumper.task.Summary; -import com.google.edwmigration.dumper.application.dumper.task.Task; import java.sql.Driver; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import java.util.Properties; import javax.annotation.Nonnull; -import javax.annotation.Nullable; import javax.sql.DataSource; import org.apache.commons.lang3.StringUtils; import org.springframework.jdbc.core.JdbcTemplate; @@ -103,35 +101,14 @@ public Handle open(@Nonnull ConnectorArguments arguments) @Override public final void validate(@Nonnull ConnectorArguments arguments) { - ArrayList messages = new ArrayList<>(); - MetadataDumperUsageException exception = null; - if (arguments.isPasswordFlagProvided() && arguments.isPrivateKeyFileProvided()) { String inconsistentAuth = "Private key authentication method can't be used together with user password. " + "If the private key file is encrypted, please use --" + ConnectorArguments.OPT_PRIVATE_KEY_PASSWORD + " to specify the key password."; - messages.add(inconsistentAuth); - exception = new MetadataDumperUsageException(inconsistentAuth, messages); - } - - boolean hasDatabases = !arguments.getDatabases().isEmpty(); - if (arguments.isAssessment() - && hasDatabases - && arguments.getConnectorName().toLowerCase().equals("snowflake")) { - String unsupportedFilter = - "Trying to filter by database with the --" - + ConnectorArguments.OPT_ASSESSMENT - + " flag. This is unsupported in Assessment. Remove either the --" - + ConnectorArguments.OPT_ASSESSMENT - + " or the --" - + ConnectorArguments.OPT_DATABASE - + " flag."; - messages.add(unsupportedFilter); - exception = new MetadataDumperUsageException(unsupportedFilter, messages); + throw new MetadataDumperUsageException(inconsistentAuth); } - removeDuplicateMessageAndThrow(exception); validateForConnector(arguments); } @@ -144,15 +121,6 @@ public final void validate(@Nonnull ConnectorArguments arguments) { */ protected abstract void validateForConnector(@Nonnull ConnectorArguments arguments); - private static void removeDuplicateMessageAndThrow( - @Nullable MetadataDumperUsageException exception) { - if (exception != null) { - List messages = exception.getMessages(); - messages.remove(messages.size() - 1); - throw exception; - } - } - private DataSource createUserPasswordDataSource(@Nonnull ConnectorArguments arguments, String url) throws SQLException { Driver driver = @@ -205,23 +173,6 @@ private String getUrlFromArguments(@Nonnull ConnectorArguments arguments) { return buf.toString(); } - final ImmutableList> getSqlTasks( - @Nonnull SnowflakeInput inputSource, - @Nonnull Class> header, - @Nonnull String format, - @Nonnull AbstractJdbcTask

schemaTask, - @Nonnull AbstractJdbcTask usageTask) { - switch (inputSource) { - case USAGE_THEN_SCHEMA_SOURCE: - return ImmutableList.of(usageTask, schemaTask.onlyIfFailed(usageTask)); - case SCHEMA_ONLY_SOURCE: - return ImmutableList.of(schemaTask); - case USAGE_ONLY_SOURCE: - return ImmutableList.of(usageTask); - } - throw new AssertionError(); - } - private void setCurrentDatabase(@Nonnull String databaseName, @Nonnull JdbcTemplate jdbcTemplate) throws MetadataDumperUsageException { String currentDatabase = @@ -259,4 +210,9 @@ static String describeAsDelegate(Connector connector, String baseName) { String details = String.format("%8s[same options as '%s']\n", "", baseName); return summary + details; } + + static String columnOf(Enum enumValue) { + String name = enumValue.name(); + return UPPER_CAMEL.to(UPPER_UNDERSCORE, name); + } } diff --git a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeAccountUsageMetadataConnector.java b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeAccountUsageMetadataConnector.java index cf4417b26..0575a28e5 100644 --- a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeAccountUsageMetadataConnector.java +++ b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeAccountUsageMetadataConnector.java @@ -16,15 +16,20 @@ */ package com.google.edwmigration.dumper.application.dumper.connector.snowflake; +import static com.google.edwmigration.dumper.application.dumper.ConnectorArguments.OPT_ASSESSMENT; import static com.google.edwmigration.dumper.application.dumper.connector.snowflake.SnowflakeInput.USAGE_ONLY_SOURCE; import com.google.auto.service.AutoService; +import com.google.edwmigration.dumper.application.dumper.ConnectorArguments; +import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException; import com.google.edwmigration.dumper.application.dumper.connector.Connector; import java.io.IOException; import javax.annotation.Nonnull; +import javax.annotation.ParametersAreNonnullByDefault; /** @author shevek */ @AutoService(Connector.class) +@ParametersAreNonnullByDefault public class SnowflakeAccountUsageMetadataConnector extends SnowflakeMetadataConnector { public SnowflakeAccountUsageMetadataConnector() { @@ -38,7 +43,15 @@ public String getDescription() { } @Override - public void printHelp(@Nonnull Appendable out) throws IOException { + public void printHelp(Appendable out) throws IOException { out.append(AbstractSnowflakeConnector.describeAsDelegate(this, "snowflake")); } + + @Override + public final void validateForConnector(ConnectorArguments arguments) { + if (arguments.isAssessment()) { + String message = String.format("The --%s flag is not supported.", OPT_ASSESSMENT); + throw new MetadataDumperUsageException(message); + } + } } diff --git a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeInformationSchemaMetadataConnector.java b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeInformationSchemaMetadataConnector.java index b8453b457..8bd166cc4 100644 --- a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeInformationSchemaMetadataConnector.java +++ b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeInformationSchemaMetadataConnector.java @@ -16,15 +16,20 @@ */ package com.google.edwmigration.dumper.application.dumper.connector.snowflake; +import static com.google.edwmigration.dumper.application.dumper.ConnectorArguments.OPT_ASSESSMENT; import static com.google.edwmigration.dumper.application.dumper.connector.snowflake.SnowflakeInput.SCHEMA_ONLY_SOURCE; import com.google.auto.service.AutoService; +import com.google.edwmigration.dumper.application.dumper.ConnectorArguments; +import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException; import com.google.edwmigration.dumper.application.dumper.connector.Connector; import java.io.IOException; import javax.annotation.Nonnull; +import javax.annotation.ParametersAreNonnullByDefault; /** @author shevek */ @AutoService(Connector.class) +@ParametersAreNonnullByDefault public class SnowflakeInformationSchemaMetadataConnector extends SnowflakeMetadataConnector { public SnowflakeInformationSchemaMetadataConnector() { @@ -41,4 +46,12 @@ public String getDescription() { public void printHelp(@Nonnull Appendable out) throws IOException { out.append(AbstractSnowflakeConnector.describeAsDelegate(this, "snowflake")); } + + @Override + public final void validateForConnector(ConnectorArguments arguments) { + if (arguments.isAssessment()) { + String message = String.format("The --%s flag is not supported.", OPT_ASSESSMENT); + throw new MetadataDumperUsageException(message); + } + } } diff --git a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeInput.java b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeInput.java index c5ccb99f4..fe439a9fd 100644 --- a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeInput.java +++ b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeInput.java @@ -16,6 +16,12 @@ */ package com.google.edwmigration.dumper.application.dumper.connector.snowflake; +import com.google.common.collect.ImmutableList; +import com.google.edwmigration.dumper.application.dumper.task.AbstractJdbcTask; +import com.google.edwmigration.dumper.application.dumper.task.Task; +import javax.annotation.Nonnull; +import javax.annotation.ParametersAreNonnullByDefault; + /** * Represents a strategy of getting Snowflake data. * @@ -29,11 +35,33 @@ * https://docs.snowflake.net/manuals/user-guide/data-share-consumers.html You must: GRANT IMPORTED * PRIVILEGES ON DATABASE snowflake TO ROLE ; */ +@ParametersAreNonnullByDefault enum SnowflakeInput { /** Get data from ACCOUNT_USAGE contents, with a fallback to INFORMATION_SCHEMA. */ - USAGE_THEN_SCHEMA_SOURCE, + USAGE_THEN_SCHEMA_SOURCE { + @Override + @Nonnull + ImmutableList> sqlTasks(AbstractJdbcTask schemaTask, AbstractJdbcTask usageTask) { + return ImmutableList.of(usageTask, schemaTask.onlyIfFailed(usageTask)); + } + }, /** Get data relying only on the contents of INFORMATION_SCHEMA */ - SCHEMA_ONLY_SOURCE, + SCHEMA_ONLY_SOURCE { + @Override + @Nonnull + ImmutableList> sqlTasks(AbstractJdbcTask schemaTask, AbstractJdbcTask usageTask) { + return ImmutableList.of(schemaTask); + } + }, /** Get data relying only on the contents of ACCOUNT_USAGE */ - USAGE_ONLY_SOURCE; + USAGE_ONLY_SOURCE { + @Override + @Nonnull + ImmutableList> sqlTasks(AbstractJdbcTask schemaTask, AbstractJdbcTask usageTask) { + return ImmutableList.of(usageTask); + } + }; + + @Nonnull + abstract ImmutableList> sqlTasks(AbstractJdbcTask schema, AbstractJdbcTask usage); } diff --git a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLogsConnector.java b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLogsConnector.java index 2b802689f..6fc1f2d54 100644 --- a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLogsConnector.java +++ b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLogsConnector.java @@ -16,12 +16,15 @@ */ package com.google.edwmigration.dumper.application.dumper.connector.snowflake; +import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.edwmigration.dumper.application.dumper.connector.snowflake.SnowflakeInput.SCHEMA_ONLY_SOURCE; import static com.google.edwmigration.dumper.application.dumper.utils.ArchiveNameUtil.getEntryFileNameWithTimestamp; +import static java.util.Arrays.stream; +import static java.util.stream.Collectors.joining; import static org.apache.commons.lang3.StringUtils.isBlank; import com.google.auto.service.AutoService; -import com.google.common.base.CaseFormat; +import com.google.common.collect.ImmutableList; import com.google.edwmigration.dumper.application.dumper.ConnectorArguments; import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException; import com.google.edwmigration.dumper.application.dumper.annotations.RespectsArgumentAssessment; @@ -45,9 +48,7 @@ import java.time.Duration; import java.time.ZoneOffset; import java.time.format.DateTimeFormatter; -import java.util.Arrays; import java.util.List; -import java.util.stream.Collectors; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -82,63 +83,6 @@ public SnowflakeLogsConnector() { this("snowflake-logs", SCHEMA_ONLY_SOURCE); } - public enum SnowflakeLogConnectorProperties implements ConnectorProperty { - OVERRIDE_QUERY("snowflake.logs.query", "Custom query for log dump."), - OVERRIDE_WHERE( - "snowflake.logs.where", "Custom where condition to append to query for log dump."), - - WAREHOUSE_EVENTS_HISTORY_OVERRIDE_QUERY( - "snowflake.warehouse_events_history.query", - "Custom query for warehouse events history dump"), - AUTOMATIC_CLUSTERING_HISTORY_OVERRIDE_QUERY( - "snowflake.automatic_clustering_history.query", - "Custom query for automatic clustering history dump"), - COPY_HISTORY_OVERRIDE_QUERY( - "snowflake.copy_history.query", "Custom query for copy history dump"), - DATABASE_REPLICATION_USAGE_HISTORY_OVERRIDE_QUERY( - "snowflake.database_replication_usage_history.query", - "Custom query for database replication usage history dump"), - LOGIN_HISTORY_OVERRIDE_QUERY( - "snowflake.login_history.query", "Custom query for login history dump"), - METERING_DAILY_HISTORY_OVERRIDE_QUERY( - "snowflake.metering_daily_history.query", "Custom query for metering daily history dump"), - PIPE_USAGE_HISTORY_OVERRIDE_QUERY( - "snowflake.pipe_usage_history.query", "Custom query for pipe usage history dump"), - QUERY_ACCELERATION_HISTORY_OVERRIDE_QUERY( - "snowflake.query_acceleration_history.query", - "Custom query for query acceleration history dump"), - REPLICATION_GROUP_USAGE_HISTORY_OVERRIDE_QUERY( - "snowflake.replication_group_usage_history.query", - "Custom query for replication group usage history dump"), - SERVERLESS_TASK_HISTORY_OVERRIDE_QUERY( - "snowflake.serverless_task_history.query", "Custom query for serverless task history dump"), - TASK_HISTORY_OVERRIDE_QUERY( - "snowflake.task_history.query", "Custom query for task history dump"), - WAREHOUSE_LOAD_HISTORY_OVERRIDE_QUERY( - "snowflake.warehouse_load_history.query", "Custom query for warehouse load history dump"), - WAREHOUSE_METERING_HISTORY_OVERRIDE_QUERY( - "snowflake.warehouse_metering_history.query", - "Custom query for warehouse metering history dump"); - - private final String name; - private final String description; - - SnowflakeLogConnectorProperties(String name, String description) { - this.name = name; - this.description = description; - } - - @Nonnull - public String getName() { - return name; - } - - @Nonnull - public String getDescription() { - return description; - } - } - private static class TaskDescription { private final String zipPrefix; private final String unformattedQuery; @@ -170,16 +114,16 @@ protected final void validateForConnector(@Nonnull ConnectorArguments arguments) } } - @Nonnull @Override - public Class> getConnectorProperties() { - return SnowflakeLogConnectorProperties.class; + @Nonnull + public String getDescription() { + return "Dumps logs from Snowflake."; } @Override @Nonnull - public String getDescription() { - return "Dumps logs from Snowflake."; + public ImmutableList getPropertyConstants() { + return SnowflakeLogsConnectorProperty.getConstants(); } private String newQueryFormat(@Nonnull ConnectorArguments arguments) @@ -199,36 +143,28 @@ private String newQueryFormat(@Nonnull ConnectorArguments arguments) private String createQueryFromAccountUsage(ConnectorArguments arguments) throws MetadataDumperUsageException { - String overrideQuery = getOverrideQuery(arguments); - if (overrideQuery != null) return overrideQuery; - - String overrideWhere = getOverrideWhere(arguments); - @SuppressWarnings("OrphanedFormatString") - StringBuilder queryBuilder = - new StringBuilder( - "SELECT database_name, \n" - + "schema_name, \n" - + "user_name, \n" - + "warehouse_name, \n" - + "query_id, \n" - + "session_id, \n" - + "query_type, \n" - + "execution_status, \n" - + "error_code, \n" - + "start_time, \n" - + "end_time, \n" - + "total_elapsed_time, \n" - + "bytes_scanned, \n" - + "rows_produced, \n" - + "credits_used_cloud_services, \n" - + "query_text \n" - + "FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY\n" - + "WHERE end_time >= to_timestamp_ltz('%s')\n" - + "AND end_time <= to_timestamp_ltz('%s')\n"); - queryBuilder.append(earliestTimestamp(arguments)); - if (overrideWhere != null) queryBuilder.append(" AND ").append(overrideWhere); - return queryBuilder.toString().replace('\n', ' '); + String baseQuery = + "SELECT database_name, \n" + + "schema_name, \n" + + "user_name, \n" + + "warehouse_name, \n" + + "query_id, \n" + + "session_id, \n" + + "query_type, \n" + + "execution_status, \n" + + "error_code, \n" + + "start_time, \n" + + "end_time, \n" + + "total_elapsed_time, \n" + + "bytes_scanned, \n" + + "rows_produced, \n" + + "credits_used_cloud_services, \n" + + "query_text \n" + + "FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY\n" + + "WHERE end_time >= to_timestamp_ltz('%s')\n" + + "AND end_time <= to_timestamp_ltz('%s')\n"; + return addOverridesToQuery(arguments, baseQuery).replace('\n', ' '); } private String createQueryFromInformationSchema(ConnectorArguments arguments) @@ -236,109 +172,104 @@ private String createQueryFromInformationSchema(ConnectorArguments arguments) // Docref: https://docs.snowflake.net/manuals/sql-reference/functions/query_history.html // Per the docref, Snowflake only retains/returns seven trailing days of logs in // INFORMATION_SCHEMA. - String overrideQuery = getOverrideQuery(arguments); - if (overrideQuery != null) return overrideQuery; - @SuppressWarnings("OrphanedFormatString") - StringBuilder queryBuilder = - new StringBuilder( - "SELECT database_name, \n" - + "schema_name, \n" - + "user_name, \n" - + "warehouse_name, \n" - + "query_id, \n" - + "session_id, \n" - + "query_type, \n" - + "execution_status, \n" - + "error_code, \n" - + "start_time, \n" - + "end_time, \n" - + "total_elapsed_time, \n" - + "bytes_scanned, \n" - + "rows_produced, \n" - + "credits_used_cloud_services, \n" - + "query_text \n" - + "FROM table(INFORMATION_SCHEMA.QUERY_HISTORY(\n" - + "result_limit=>10000\n" - // maximum range of 7 trailing days. - + ",end_time_range_start=>to_timestamp_ltz('%s')\n" - + ",end_time_range_end=>to_timestamp_ltz('%s')\n" - // It makes leter formatting easier if we always have a 'WHERE'. - + ")) WHERE 1=1\n"); - // if the user specifies an earliest start time there will be extraneous empty dump files - // because we always iterate over the full 7 trailing days; maybe it's worth - // preventing that in the future. To do that, we should require getQueryLogEarliestTimestamp() - // to parse and return an ISO instant, not a database-server-specific format. - queryBuilder.append(earliestTimestamp(arguments)); - - String overrideWhere = getOverrideWhere(arguments); - if (overrideWhere != null) { - queryBuilder.append(" AND " + overrideWhere); - } - return queryBuilder.toString().replace('\n', ' '); + String baseQuery = + "SELECT database_name, \n" + + "schema_name, \n" + + "user_name, \n" + + "warehouse_name, \n" + + "query_id, \n" + + "session_id, \n" + + "query_type, \n" + + "execution_status, \n" + + "error_code, \n" + + "start_time, \n" + + "end_time, \n" + + "total_elapsed_time, \n" + + "bytes_scanned, \n" + + "rows_produced, \n" + + "credits_used_cloud_services, \n" + + "query_text \n" + + "FROM table(INFORMATION_SCHEMA.QUERY_HISTORY(\n" + + "result_limit=>10000\n" + // maximum range of 7 trailing days. + + ",end_time_range_start=>to_timestamp_ltz('%s')\n" + + ",end_time_range_end=>to_timestamp_ltz('%s')\n" + // It makes later formatting easier if we always have a 'WHERE'. + + ")) WHERE 1=1\n"; + return addOverridesToQuery(arguments, baseQuery).replace('\n', ' '); } private String createExtendedQueryFromAccountUsage(ConnectorArguments arguments) throws MetadataDumperUsageException { - String overrideQuery = getOverrideQuery(arguments); - if (overrideQuery != null) return overrideQuery; - - String overrideWhere = getOverrideWhere(arguments); - @SuppressWarnings("OrphanedFormatString") - StringBuilder queryBuilder = - new StringBuilder( - "SELECT query_id, \n" - + "query_text, \n" - + "database_name, \n" - + "schema_name, \n" - + "query_type, \n" - + "session_id, \n" - + "user_name, \n" - + "warehouse_name, \n" - + "cluster_number, \n" - + "query_tag, \n" - + "execution_status, \n" - + "error_code, \n" - + "error_message, \n" - + "start_time, \n" - + "end_time, \n" - + "bytes_scanned, \n" - + "percentage_scanned_from_cache, \n" - + "bytes_written, \n" - + "rows_produced, \n" - + "rows_inserted, \n" - + "rows_updated, \n" - + "rows_deleted, \n" - + "rows_unloaded, \n" - + "bytes_deleted, \n" - + "partitions_scanned, \n" - + "partitions_total, \n" - + "bytes_spilled_to_local_storage, \n" - + "bytes_spilled_to_remote_storage, \n" - + "bytes_sent_over_the_network, \n" - + "total_elapsed_time, \n" - + "compilation_time, \n" - + "execution_time, \n" - + "queued_provisioning_time, \n" - + "queued_repair_time, \n" - + "queued_overload_time, \n" - + "transaction_blocked_time, \n" - + "list_external_files_time, \n" - + "credits_used_cloud_services, \n" - + "query_load_percent, \n" - + "query_acceleration_bytes_scanned, \n" - + "query_acceleration_partitions_scanned, \n" - + "child_queries_wait_time, \n" - + "transaction_id \n" - + "FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY\n" - + "WHERE end_time >= to_timestamp_ltz('%s')\n" - + "AND end_time <= to_timestamp_ltz('%s')\n" - + "AND is_client_generated_statement = FALSE\n"); + String baseQuery = + "SELECT query_id, \n" + + "query_text, \n" + + "database_name, \n" + + "schema_name, \n" + + "query_type, \n" + + "session_id, \n" + + "user_name, \n" + + "warehouse_name, \n" + + "cluster_number, \n" + + "query_tag, \n" + + "execution_status, \n" + + "error_code, \n" + + "error_message, \n" + + "start_time, \n" + + "end_time, \n" + + "bytes_scanned, \n" + + "percentage_scanned_from_cache, \n" + + "bytes_written, \n" + + "rows_produced, \n" + + "rows_inserted, \n" + + "rows_updated, \n" + + "rows_deleted, \n" + + "rows_unloaded, \n" + + "bytes_deleted, \n" + + "partitions_scanned, \n" + + "partitions_total, \n" + + "bytes_spilled_to_local_storage, \n" + + "bytes_spilled_to_remote_storage, \n" + + "bytes_sent_over_the_network, \n" + + "total_elapsed_time, \n" + + "compilation_time, \n" + + "execution_time, \n" + + "queued_provisioning_time, \n" + + "queued_repair_time, \n" + + "queued_overload_time, \n" + + "transaction_blocked_time, \n" + + "list_external_files_time, \n" + + "credits_used_cloud_services, \n" + + "query_load_percent, \n" + + "query_acceleration_bytes_scanned, \n" + + "query_acceleration_partitions_scanned, \n" + + "child_queries_wait_time, \n" + + "transaction_id \n" + + "FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY\n" + + "WHERE end_time >= to_timestamp_ltz('%s')\n" + + "AND end_time <= to_timestamp_ltz('%s')\n" + + "AND is_client_generated_statement = FALSE\n"; + return addOverridesToQuery(arguments, baseQuery).replace('\n', ' '); + } + + @Nonnull + static String addOverridesToQuery( + @Nonnull ConnectorArguments arguments, @Nonnull String baseQuery) { + String overrideQuery = getOverrideQuery(arguments); + if (overrideQuery != null) { + return overrideQuery; + } + StringBuilder queryBuilder = new StringBuilder(baseQuery); queryBuilder.append(earliestTimestamp(arguments)); - if (overrideWhere != null) queryBuilder.append(" AND ").append(overrideWhere); - return queryBuilder.toString().replace('\n', ' '); + + String overrideWhere = arguments.getDefinition(SnowflakeLogsConnectorProperty.OVERRIDE_WHERE); + if (overrideWhere != null) { + queryBuilder.append(" AND " + overrideWhere); + } + return queryBuilder.toString(); } @Nonnull @@ -352,9 +283,9 @@ static String earliestTimestamp(@Nonnull ConnectorArguments arguments) { } @CheckForNull - private String getOverrideQuery(@Nonnull ConnectorArguments arguments) + private static String getOverrideQuery(@Nonnull ConnectorArguments arguments) throws MetadataDumperUsageException { - String overrideQuery = arguments.getDefinition(SnowflakeLogConnectorProperties.OVERRIDE_QUERY); + String overrideQuery = arguments.getDefinition(SnowflakeLogsConnectorProperty.OVERRIDE_QUERY); if (overrideQuery != null) { if (StringUtils.countMatches(overrideQuery, "%s") != 2) throw new MetadataDumperUsageException( @@ -365,12 +296,6 @@ private String getOverrideQuery(@Nonnull ConnectorArguments arguments) return null; } - @CheckForNull - private String getOverrideWhere(@Nonnull ConnectorArguments arguments) - throws MetadataDumperUsageException { - return arguments.getDefinition(SnowflakeLogConnectorProperties.OVERRIDE_WHERE); - } - @Override public final void addTasksTo( @Nonnull List> out, @Nonnull ConnectorArguments arguments) @@ -403,7 +328,16 @@ public final void addTasksTo( QueryHistoryExtendedFormat.Header.class); queryLogIntervals.forEach(interval -> addJdbcTask(out, interval, queryHistoryTask)); - List timeSeriesTasks = createTimeSeriesTasks(arguments); + List timeSeriesTasks = + TimeSeriesView.valuesInOrder.stream() + .map( + item -> { + String override = arguments.getDefinition(item.property); + String prefix = formatPrefix(item.headerClass, item.viewName()); + String query = overrideableQuery(override, prefix, item.column.value); + return new TaskDescription(item.zipPrefix, query, item.headerClass); + }) + .collect(toImmutableList()); Duration duration = Duration.ofDays(1); ZonedIntervalIterableGenerator.forConnectorArguments( arguments, duration, IntervalExpander.createBasedOnDuration(duration)) @@ -441,160 +375,133 @@ static String overrideableQuery( } } - private String parseColumnsFromHeader(Class> headerClass) { - return Arrays.stream(headerClass.getEnumConstants()) - .map(Enum::name) - .map(name -> CaseFormat.UPPER_CAMEL.to(CaseFormat.UPPER_UNDERSCORE, name)) - .collect(Collectors.joining(", ")); + enum TimeSeriesColumn { + COMPLETED_TIME("COMPLETED_TIME"), + END_TIME("END_TIME"), + EVENT_TIMESTAMP("EVENT_TIMESTAMP"), + LAST_LOAD_TIME("LAST_LOAD_TIME"), + TIMESTAMP("TIMESTAMP"), + USAGE_DATE("USAGE_DATE"); + + String value; + + TimeSeriesColumn(String value) { + this.value = value; + } } - private List createTimeSeriesTasks(ConnectorArguments arguments) { - String queryPrefix = "SELECT %s FROM SNOWFLAKE.ACCOUNT_USAGE.%s"; - return Arrays.asList( - new TaskDescription( - WarehouseEventsHistoryFormat.ZIP_ENTRY_PREFIX, - overrideableQuery( - arguments.getDefinition( - SnowflakeLogConnectorProperties.WAREHOUSE_EVENTS_HISTORY_OVERRIDE_QUERY), - String.format( - queryPrefix, - parseColumnsFromHeader(WarehouseEventsHistoryFormat.Header.class), - "WAREHOUSE_EVENTS_HISTORY"), - "TIMESTAMP"), - WarehouseEventsHistoryFormat.Header.class), - new TaskDescription( - AutomaticClusteringHistoryFormat.ZIP_ENTRY_PREFIX, - overrideableQuery( - arguments.getDefinition( - SnowflakeLogConnectorProperties.AUTOMATIC_CLUSTERING_HISTORY_OVERRIDE_QUERY), - String.format( - queryPrefix, - parseColumnsFromHeader(AutomaticClusteringHistoryFormat.Header.class), - "AUTOMATIC_CLUSTERING_HISTORY"), - "END_TIME"), - AutomaticClusteringHistoryFormat.Header.class), - new TaskDescription( - CopyHistoryFormat.ZIP_ENTRY_PREFIX, - overrideableQuery( - arguments.getDefinition( - SnowflakeLogConnectorProperties.COPY_HISTORY_OVERRIDE_QUERY), - String.format( - queryPrefix, - parseColumnsFromHeader(CopyHistoryFormat.Header.class), - "COPY_HISTORY"), - "LAST_LOAD_TIME"), - CopyHistoryFormat.Header.class), - new TaskDescription( - DatabaseReplicationUsageHistoryFormat.ZIP_ENTRY_PREFIX, - overrideableQuery( - arguments.getDefinition( - SnowflakeLogConnectorProperties - .DATABASE_REPLICATION_USAGE_HISTORY_OVERRIDE_QUERY), - String.format( - queryPrefix, - parseColumnsFromHeader(DatabaseReplicationUsageHistoryFormat.Header.class), - "DATABASE_REPLICATION_USAGE_HISTORY"), - "END_TIME"), - DatabaseReplicationUsageHistoryFormat.Header.class), - new TaskDescription( - LoginHistoryFormat.ZIP_ENTRY_PREFIX, - overrideableQuery( - arguments.getDefinition( - SnowflakeLogConnectorProperties.LOGIN_HISTORY_OVERRIDE_QUERY), - String.format( - queryPrefix, - parseColumnsFromHeader(LoginHistoryFormat.Header.class), - "LOGIN_HISTORY"), - "EVENT_TIMESTAMP"), - LoginHistoryFormat.Header.class), - new TaskDescription( - MeteringDailyHistoryFormat.ZIP_ENTRY_PREFIX, - overrideableQuery( - arguments.getDefinition( - SnowflakeLogConnectorProperties.METERING_DAILY_HISTORY_OVERRIDE_QUERY), - String.format( - queryPrefix, - parseColumnsFromHeader(MeteringDailyHistoryFormat.Header.class), - "METERING_DAILY_HISTORY"), - "USAGE_DATE"), - MeteringDailyHistoryFormat.Header.class), - new TaskDescription( - PipeUsageHistoryFormat.ZIP_ENTRY_PREFIX, - overrideableQuery( - arguments.getDefinition( - SnowflakeLogConnectorProperties.PIPE_USAGE_HISTORY_OVERRIDE_QUERY), - String.format( - queryPrefix, - parseColumnsFromHeader(PipeUsageHistoryFormat.Header.class), - "PIPE_USAGE_HISTORY"), - "END_TIME"), - PipeUsageHistoryFormat.Header.class), - new TaskDescription( - QueryAccelerationHistoryFormat.ZIP_ENTRY_PREFIX, - overrideableQuery( - arguments.getDefinition( - SnowflakeLogConnectorProperties.QUERY_ACCELERATION_HISTORY_OVERRIDE_QUERY), - String.format( - queryPrefix, - parseColumnsFromHeader(QueryAccelerationHistoryFormat.Header.class), - "QUERY_ACCELERATION_HISTORY"), - "END_TIME"), - QueryAccelerationHistoryFormat.Header.class, - TaskCategory.OPTIONAL), - new TaskDescription( - ReplicationGroupUsageHistoryFormat.ZIP_ENTRY_PREFIX, - overrideableQuery( - arguments.getDefinition( - SnowflakeLogConnectorProperties.REPLICATION_GROUP_USAGE_HISTORY_OVERRIDE_QUERY), - String.format( - queryPrefix, - parseColumnsFromHeader(ReplicationGroupUsageHistoryFormat.Header.class), - "REPLICATION_GROUP_USAGE_HISTORY"), - "END_TIME"), - ReplicationGroupUsageHistoryFormat.Header.class), - new TaskDescription( - ServerlessTaskHistoryFormat.ZIP_ENTRY_PREFIX, - overrideableQuery( - arguments.getDefinition( - SnowflakeLogConnectorProperties.SERVERLESS_TASK_HISTORY_OVERRIDE_QUERY), - String.format( - queryPrefix, - parseColumnsFromHeader(ServerlessTaskHistoryFormat.Header.class), - "SERVERLESS_TASK_HISTORY"), - "END_TIME"), - ServerlessTaskHistoryFormat.Header.class), - new TaskDescription( - TaskHistoryFormat.ZIP_ENTRY_PREFIX, - overrideableQuery( - arguments.getDefinition( - SnowflakeLogConnectorProperties.TASK_HISTORY_OVERRIDE_QUERY), - String.format( - queryPrefix, - parseColumnsFromHeader(TaskHistoryFormat.Header.class), - "TASK_HISTORY"), - "COMPLETED_TIME"), - TaskHistoryFormat.Header.class), - new TaskDescription( - WarehouseLoadHistoryFormat.ZIP_ENTRY_PREFIX, - overrideableQuery( - arguments.getDefinition( - SnowflakeLogConnectorProperties.WAREHOUSE_LOAD_HISTORY_OVERRIDE_QUERY), - String.format( - queryPrefix, - parseColumnsFromHeader(WarehouseLoadHistoryFormat.Header.class), - "WAREHOUSE_LOAD_HISTORY"), - "END_TIME"), - WarehouseLoadHistoryFormat.Header.class), - new TaskDescription( - WarehouseMeteringHistoryFormat.ZIP_ENTRY_PREFIX, - overrideableQuery( - arguments.getDefinition( - SnowflakeLogConnectorProperties.WAREHOUSE_METERING_HISTORY_OVERRIDE_QUERY), - String.format( - queryPrefix, - parseColumnsFromHeader(WarehouseMeteringHistoryFormat.Header.class), - "WAREHOUSE_METERING_HISTORY"), - "END_TIME"), - WarehouseMeteringHistoryFormat.Header.class)); + enum TimeSeriesView { + WAREHOUSE_EVENTS_HISTORY( + WarehouseEventsHistoryFormat.Header.class, + WarehouseEventsHistoryFormat.ZIP_ENTRY_PREFIX, + TimeSeriesColumn.TIMESTAMP, + SnowflakeLogsConnectorProperty.WAREHOUSE_EVENTS_HISTORY_OVERRIDE_QUERY), + AUTOMATIC_CLUSTERING_HISTORY( + AutomaticClusteringHistoryFormat.Header.class, + AutomaticClusteringHistoryFormat.ZIP_ENTRY_PREFIX, + TimeSeriesColumn.END_TIME, + SnowflakeLogsConnectorProperty.AUTOMATIC_CLUSTERING_HISTORY_OVERRIDE_QUERY), + COPY_HISTORY( + CopyHistoryFormat.Header.class, + CopyHistoryFormat.ZIP_ENTRY_PREFIX, + TimeSeriesColumn.LAST_LOAD_TIME, + SnowflakeLogsConnectorProperty.COPY_HISTORY_OVERRIDE_QUERY), + DATABASE_REPLICATION_USAGE_HISTORY( + DatabaseReplicationUsageHistoryFormat.Header.class, + DatabaseReplicationUsageHistoryFormat.ZIP_ENTRY_PREFIX, + TimeSeriesColumn.END_TIME, + SnowflakeLogsConnectorProperty.DATABASE_REPLICATION_USAGE_HISTORY_OVERRIDE_QUERY), + LOGIN_HISTORY( + LoginHistoryFormat.Header.class, + LoginHistoryFormat.ZIP_ENTRY_PREFIX, + TimeSeriesColumn.EVENT_TIMESTAMP, + SnowflakeLogsConnectorProperty.LOGIN_HISTORY_OVERRIDE_QUERY), + METERING_DAILY_HISTORY( + MeteringDailyHistoryFormat.Header.class, + MeteringDailyHistoryFormat.ZIP_ENTRY_PREFIX, + TimeSeriesColumn.USAGE_DATE, + SnowflakeLogsConnectorProperty.METERING_DAILY_HISTORY_OVERRIDE_QUERY), + PIPE_USAGE_HISTORY( + PipeUsageHistoryFormat.Header.class, + PipeUsageHistoryFormat.ZIP_ENTRY_PREFIX, + TimeSeriesColumn.END_TIME, + SnowflakeLogsConnectorProperty.PIPE_USAGE_HISTORY_OVERRIDE_QUERY), + QUERY_ACCELERATION_HISTORY( + QueryAccelerationHistoryFormat.Header.class, + QueryAccelerationHistoryFormat.ZIP_ENTRY_PREFIX, + TimeSeriesColumn.END_TIME, + SnowflakeLogsConnectorProperty.QUERY_ACCELERATION_HISTORY_OVERRIDE_QUERY), + REPLICATION_GROUP_USAGE_HISTORY( + ReplicationGroupUsageHistoryFormat.Header.class, + QueryAccelerationHistoryFormat.ZIP_ENTRY_PREFIX, + TimeSeriesColumn.END_TIME, + SnowflakeLogsConnectorProperty.REPLICATION_GROUP_USAGE_HISTORY_OVERRIDE_QUERY), + SERVERLESS_TASK_HISTORY( + ServerlessTaskHistoryFormat.Header.class, + ServerlessTaskHistoryFormat.ZIP_ENTRY_PREFIX, + TimeSeriesColumn.END_TIME, + SnowflakeLogsConnectorProperty.SERVERLESS_TASK_HISTORY_OVERRIDE_QUERY), + TASK_HISTORY( + TaskHistoryFormat.Header.class, + TaskHistoryFormat.ZIP_ENTRY_PREFIX, + TimeSeriesColumn.COMPLETED_TIME, + SnowflakeLogsConnectorProperty.TASK_HISTORY_OVERRIDE_QUERY), + WAREHOUSE_LOAD_HISTORY( + WarehouseLoadHistoryFormat.Header.class, + WarehouseLoadHistoryFormat.ZIP_ENTRY_PREFIX, + TimeSeriesColumn.END_TIME, + SnowflakeLogsConnectorProperty.WAREHOUSE_LOAD_HISTORY_OVERRIDE_QUERY), + WAREHOUSE_METERING_HISTORY( + WarehouseMeteringHistoryFormat.Header.class, + WarehouseMeteringHistoryFormat.ZIP_ENTRY_PREFIX, + TimeSeriesColumn.END_TIME, + SnowflakeLogsConnectorProperty.WAREHOUSE_METERING_HISTORY_OVERRIDE_QUERY); + + final Class> headerClass; + final ConnectorProperty property; + final String queryPrefix; + final String zipPrefix; + final TimeSeriesColumn column; + + TimeSeriesView( + Class> headerClass, + String zipPrefix, + TimeSeriesColumn column, + SnowflakeLogsConnectorProperty property) { + this.headerClass = headerClass; + this.property = property; + this.queryPrefix = formatPrefix(headerClass, name()); + this.zipPrefix = zipPrefix; + this.column = column; + } + + static final ImmutableList valuesInOrder = + ImmutableList.of( + WAREHOUSE_EVENTS_HISTORY, + AUTOMATIC_CLUSTERING_HISTORY, + COPY_HISTORY, + DATABASE_REPLICATION_USAGE_HISTORY, + LOGIN_HISTORY, + METERING_DAILY_HISTORY, + PIPE_USAGE_HISTORY, + QUERY_ACCELERATION_HISTORY, + REPLICATION_GROUP_USAGE_HISTORY, + SERVERLESS_TASK_HISTORY, + TASK_HISTORY, + WAREHOUSE_LOAD_HISTORY, + WAREHOUSE_METERING_HISTORY); + + String viewName() { + return name(); + } + } + + @Nonnull + static String formatPrefix(@Nonnull Class> enumClass, @Nonnull String view) { + String selectList = + stream(enumClass.getEnumConstants()) + .map(AbstractSnowflakeConnector::columnOf) + .collect(joining(", ")); + return String.format("SELECT %s FROM SNOWFLAKE.ACCOUNT_USAGE.%s", selectList, view); } } diff --git a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLogsConnectorProperty.java b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLogsConnectorProperty.java new file mode 100644 index 000000000..b239d1b0c --- /dev/null +++ b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLogsConnectorProperty.java @@ -0,0 +1,83 @@ +/* + * Copyright 2022-2025 Google LLC + * Copyright 2013-2021 CompilerWorks + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.edwmigration.dumper.application.dumper.connector.snowflake; + +import static com.google.common.collect.ImmutableList.toImmutableList; +import static java.util.Arrays.stream; + +import com.google.common.collect.ImmutableList; +import com.google.edwmigration.dumper.application.dumper.connector.ConnectorProperty; +import javax.annotation.Nonnull; + +enum SnowflakeLogsConnectorProperty implements ConnectorProperty { + /* Basic overrides. */ + OVERRIDE_QUERY("snowflake.logs.query", "Custom query for log dump."), + OVERRIDE_WHERE("snowflake.logs.where", "Custom where condition to append to query for log dump."), + + /* Time series custom queries. */ + WAREHOUSE_EVENTS_HISTORY_OVERRIDE_QUERY( + "snowflake.warehouse_events_history.query", "Custom query for warehouse events history dump"), + AUTOMATIC_CLUSTERING_HISTORY_OVERRIDE_QUERY( + "snowflake.automatic_clustering_history.query", + "Custom query for automatic clustering history dump"), + COPY_HISTORY_OVERRIDE_QUERY("snowflake.copy_history.query", "Custom query for copy history dump"), + DATABASE_REPLICATION_USAGE_HISTORY_OVERRIDE_QUERY( + "snowflake.database_replication_usage_history.query", + "Custom query for database replication usage history dump"), + LOGIN_HISTORY_OVERRIDE_QUERY( + "snowflake.login_history.query", "Custom query for login history dump"), + METERING_DAILY_HISTORY_OVERRIDE_QUERY( + "snowflake.metering_daily_history.query", "Custom query for metering daily history dump"), + PIPE_USAGE_HISTORY_OVERRIDE_QUERY( + "snowflake.pipe_usage_history.query", "Custom query for pipe usage history dump"), + QUERY_ACCELERATION_HISTORY_OVERRIDE_QUERY( + "snowflake.query_acceleration_history.query", + "Custom query for query acceleration history dump"), + REPLICATION_GROUP_USAGE_HISTORY_OVERRIDE_QUERY( + "snowflake.replication_group_usage_history.query", + "Custom query for replication group usage history dump"), + SERVERLESS_TASK_HISTORY_OVERRIDE_QUERY( + "snowflake.serverless_task_history.query", "Custom query for serverless task history dump"), + TASK_HISTORY_OVERRIDE_QUERY("snowflake.task_history.query", "Custom query for task history dump"), + WAREHOUSE_LOAD_HISTORY_OVERRIDE_QUERY( + "snowflake.warehouse_load_history.query", "Custom query for warehouse load history dump"), + WAREHOUSE_METERING_HISTORY_OVERRIDE_QUERY( + "snowflake.warehouse_metering_history.query", + "Custom query for warehouse metering history dump"); + + private final String name; + private final String description; + + SnowflakeLogsConnectorProperty(String name, String description) { + this.name = name; + this.description = description; + } + + static ImmutableList getConstants() { + return stream(values()).collect(toImmutableList()); + } + + @Nonnull + public String getName() { + return name; + } + + @Nonnull + public String getDescription() { + return description; + } +} diff --git a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeMetadataConnector.java b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeMetadataConnector.java index 5dd1845b6..7fe4f7a4e 100644 --- a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeMetadataConnector.java +++ b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeMetadataConnector.java @@ -23,6 +23,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.edwmigration.dumper.application.dumper.ConnectorArguments; +import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException; import com.google.edwmigration.dumper.application.dumper.annotations.RespectsArgumentAssessment; import com.google.edwmigration.dumper.application.dumper.annotations.RespectsArgumentDatabaseForConnection; import com.google.edwmigration.dumper.application.dumper.annotations.RespectsArgumentDatabasePredicate; @@ -43,8 +44,6 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; /** * A connector to Snowflake databases. @@ -58,9 +57,6 @@ public class SnowflakeMetadataConnector extends AbstractSnowflakeConnector implements MetadataConnector, SnowflakeMetadataDumpFormat { - @SuppressWarnings("UnusedVariable") - private static final Logger logger = LoggerFactory.getLogger(SnowflakeMetadataConnector.class); - private static final String ACCOUNT_USAGE_SCHEMA_NAME = "SNOWFLAKE.ACCOUNT_USAGE"; private static final String ACCOUNT_USAGE_WHERE_CONDITION = "DELETED IS NULL"; private static final String EMPTY_WHERE_CONDITION = ""; @@ -132,7 +128,20 @@ public Iterable getPropertyConstants() { } @Override - protected final void validateForConnector(@Nonnull ConnectorArguments arguments) {} + protected void validateForConnector(@Nonnull ConnectorArguments arguments) { + boolean hasDatabases = !arguments.getDatabases().isEmpty(); + if (arguments.isAssessment() && hasDatabases) { + String unsupportedFilter = + "Trying to filter by database with the --" + + ConnectorArguments.OPT_ASSESSMENT + + " flag. This is unsupported in Assessment. Remove either the --" + + ConnectorArguments.OPT_ASSESSMENT + + " or the --" + + ConnectorArguments.OPT_DATABASE + + " flag."; + throw new MetadataDumperUsageException(unsupportedFilter); + } + } private void addSqlTasksWithInfoSchemaFallback( @Nonnull List> out, @@ -162,9 +171,7 @@ private void addSqlTasksWithInfoSchemaFallback( if (isAssessment) { out.add(usageTask); } else { - ImmutableList> tasks = - getSqlTasks(inputSource, header, format, schemaTask, usageTask); - out.addAll(tasks); + out.addAll(inputSource.sqlTasks(schemaTask, usageTask)); } } diff --git a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakePlanner.java b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakePlanner.java index ca2bee523..1f21f3a00 100644 --- a/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakePlanner.java +++ b/dumper/app/src/main/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakePlanner.java @@ -61,12 +61,21 @@ private enum Format { private static final String TIME_PREDICATE = "timestamp > CURRENT_TIMESTAMP(0) - INTERVAL '14 days'"; + private final ImmutableList assessmentQueries = + ImmutableList.of( + AssessmentQuery.createMetricsSelect(Format.TABLE_STORAGE_METRICS, UPPER_UNDERSCORE), + AssessmentQuery.createShow("WAREHOUSES", Format.WAREHOUSES, LOWER_UNDERSCORE), + SHOW_EXTERNAL_TABLES, + AssessmentQuery.createShow("FUNCTIONS", Format.FUNCTION_INFO, LOWER_UNDERSCORE)); + + private final ImmutableList liteAssessmentQueries = + ImmutableList.of( + AssessmentQuery.createShow("WAREHOUSES", Format.WAREHOUSES, LOWER_UNDERSCORE), + AssessmentQuery.createShow("EXTERNAL TABLES", Format.EXTERNAL_TABLES, LOWER_UNDERSCORE), + AssessmentQuery.createShow("FUNCTIONS", Format.FUNCTION_INFO, LOWER_UNDERSCORE)); + ImmutableList generateAssessmentQueries() { - return ImmutableList.of( - AssessmentQuery.createMetricsSelect(Format.TABLE_STORAGE_METRICS, UPPER_UNDERSCORE), - AssessmentQuery.createShow("WAREHOUSES", Format.WAREHOUSES, LOWER_UNDERSCORE), - SHOW_EXTERNAL_TABLES, - AssessmentQuery.createShow("FUNCTIONS", Format.FUNCTION_INFO, LOWER_UNDERSCORE)); + return assessmentQueries; } ImmutableList> generateLiteSpecificQueries() { @@ -94,18 +103,12 @@ ImmutableList> generateLiteSpecificQueries() { builder.add( new JdbcSelectTask(TablesFormat.AU_ZIP_ENTRY_NAME, tables) .withHeaderClass(TablesFormat.Header.class)); - builder.add(proceduresTask()); - builder.add(reportDateRangeTask()); - builder.add(eventStateTask()); - builder.add(warehouseEventsHistoryTask()); - builder.add(warehouseMeteringTask()); - builder.add(storageMetricsLiteTask()); - - ImmutableList liteAssessmentQueries = - ImmutableList.of( - AssessmentQuery.createShow("WAREHOUSES", Format.WAREHOUSES, LOWER_UNDERSCORE), - AssessmentQuery.createShow("EXTERNAL TABLES", Format.EXTERNAL_TABLES, LOWER_UNDERSCORE), - AssessmentQuery.createShow("FUNCTIONS", Format.FUNCTION_INFO, LOWER_UNDERSCORE)); + builder.add(procedures.toTask()); + builder.add(reportDateRange.toTask()); + builder.add(eventState.toTask()); + builder.add(warehouseEventsHistory.toTask()); + builder.add(warehouseMetering.toTask()); + builder.add(storageMetrics.toTask()); for (AssessmentQuery item : liteAssessmentQueries) { String query = String.format(item.formatString, view, /* an empty WHERE clause */ ""); @@ -153,33 +156,61 @@ String substitute(String schema, String whereCondition) { } } - Task eventStateTask() { + private static final class LiteTaskData { + + final String csvFile; + final String query; + final ImmutableList header; + + LiteTaskData(String csv, String query, ImmutableList header) { + this.csvFile = csv; + this.query = query; + this.header = header; + } + + Task toTask() { + return new LiteTimeSeriesTask(csvFile, query, header); + } + } + + private static LiteTaskData eventState = initEventState(); + private static LiteTaskData procedures = initProcedures(); + private static LiteTaskData reportDateRange = initReportDateRange(); + private static LiteTaskData storageMetrics = initStorageMetrics(); + private static LiteTaskData warehouseEventsHistory = initWarehouseEventsHistory(); + private static LiteTaskData warehouseMetering = initWarehouseMetering(); + + private static String buildQuery(String selectList, String view, String predicate) { + return String.format("SELECT %s FROM %s WHERE %s", selectList, view, predicate); + } + + private static LiteTaskData initEventState() { String query = "SELECT event_state, count(*)" + " FROM SNOWFLAKE.ACCOUNT_USAGE.WAREHOUSE_EVENTS_HISTORY" + " WHERE event_name LIKE '%CLUSTER%' GROUP BY ALL"; ImmutableList header = ImmutableList.of("EventState", "Count"); - return new LiteTimeSeriesTask("event_state.csv", query, header); + return new LiteTaskData("event_state.csv", query, header); } - Task proceduresTask() { + private static LiteTaskData initProcedures() { String view = "SNOWFLAKE.ACCOUNT_USAGE.PROCEDURES"; String query = String.format( "SELECT procedure_language, procedure_owner, count(1) FROM %s GROUP BY ALL", view); ImmutableList header = ImmutableList.of("Language", "Owner", "Count"); - return new LiteTimeSeriesTask("procedures.csv", query, header); + return new LiteTaskData("procedures.csv", query, header); } - Task reportDateRangeTask() { + private static LiteTaskData initReportDateRange() { String selectList = "min(timestamp), max(timestamp)"; String view = "SNOWFLAKE.ACCOUNT_USAGE.WAREHOUSE_EVENTS_HISTORY"; String query = buildQuery(selectList, view, TIME_PREDICATE); ImmutableList header = ImmutableList.of("StartTime", "EndTime"); - return new LiteTimeSeriesTask("report_date_range.csv", query, header); + return new LiteTaskData("report_date_range.csv", query, header); } - Task storageMetricsLiteTask() { + private static LiteTaskData initStorageMetrics() { String selectList = String.join( ", ", @@ -228,14 +259,10 @@ Task storageMetricsLiteTask() { "SchemaDropped", "Comment", "Deleted"); - return new LiteTimeSeriesTask("table_storage_metrics-au.csv", query, header); - } - - private static String buildQuery(String selectList, String view, String predicate) { - return String.format("SELECT %s FROM %s WHERE %s", selectList, view, predicate); + return new LiteTaskData("table_storage_metrics-au.csv", query, header); } - Task warehouseEventsHistoryTask() { + private static LiteTaskData initWarehouseEventsHistory() { String selectList = String.join( ", ", @@ -259,10 +286,10 @@ Task warehouseEventsHistoryTask() { "EventReason", "EventState", "QueryId"); - return new LiteTimeSeriesTask("warehouse_events_lite.csv", query, header); + return new LiteTaskData("warehouse_events_lite.csv", query, header); } - Task warehouseMeteringTask() { + private static LiteTaskData initWarehouseMetering() { String selectList = String.join( ", ", @@ -284,6 +311,6 @@ Task warehouseMeteringTask() { "WarehouseName", "CreditsUsedCompute", "CreditsUsedCloudServices"); - return new LiteTimeSeriesTask("warehouse_metering_lite.csv", query, header); + return new LiteTaskData("warehouse_metering_lite.csv", query, header); } } diff --git a/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/AbstractSnowflakeConnectorTest.java b/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/AbstractSnowflakeConnectorTest.java index 2dffb1f75..d06a527b3 100644 --- a/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/AbstractSnowflakeConnectorTest.java +++ b/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/AbstractSnowflakeConnectorTest.java @@ -16,7 +16,7 @@ */ package com.google.edwmigration.dumper.application.dumper.connector.snowflake; -import static java.util.Arrays.copyOf; +import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; @@ -25,7 +25,7 @@ import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException; import com.google.edwmigration.dumper.application.dumper.connector.AbstractConnectorTest; import java.io.IOException; -import java.util.stream.IntStream; +import java.util.ArrayList; import org.junit.Assert; import org.junit.Test; import org.junit.experimental.theories.Theories; @@ -119,6 +119,18 @@ public void validate_mixedPrivateKeyAndPassword_fail() throws IOException { "Private key authentication method can't be used together with user password")); } + enum TestEnum { + SomeValue + } + + @Test + public void columnOf_success() { + + String columnName = AbstractSnowflakeConnector.columnOf(TestEnum.SomeValue); + + assertEquals("SOME_VALUE", columnName); + } + @Test public void validate_assessmentEnabledWithDatabaseFilter_throwsUsageException() throws IOException { @@ -146,13 +158,10 @@ public void checkJnaInClasspath_success() { } private static ConnectorArguments makeArguments(String... extraArguments) { - try { - String[] arguments = copyOf(extraArguments, extraArguments.length + ARGS.size()); - IntStream.range(0, ARGS.size()) - .forEach(el -> arguments[el + extraArguments.length] = ARGS.get(el)); - return new ConnectorArguments(arguments); - } catch (IOException e) { - throw new RuntimeException(e); + ArrayList arguments = new ArrayList<>(ARGS); + for (String item : extraArguments) { + arguments.add(item); } + return ConnectorArguments.create(arguments); } } diff --git a/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeAccountUsageMetadataConnectorTest.java b/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeAccountUsageMetadataConnectorTest.java index afd03efcf..c3e4937ec 100644 --- a/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeAccountUsageMetadataConnectorTest.java +++ b/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeAccountUsageMetadataConnectorTest.java @@ -16,6 +16,11 @@ */ package com.google.edwmigration.dumper.application.dumper.connector.snowflake; +import static org.junit.Assert.assertThrows; + +import com.google.common.collect.ImmutableList; +import com.google.edwmigration.dumper.application.dumper.ConnectorArguments; +import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException; import com.google.edwmigration.dumper.application.dumper.connector.MetadataConnector; import com.google.edwmigration.dumper.plugin.lib.dumper.spi.SnowflakeMetadataDumpFormat; import com.google.edwmigration.dumper.test.TestUtils; @@ -65,4 +70,22 @@ public void testExecution() throws Exception { SnowflakeMetadataDumpFormat.FunctionInfoFormat.AU_ZIP_ENTRY_NAME, SnowflakeMetadataDumpFormat.WarehousesFormat.AU_ZIP_ENTRY_NAME); } + + @Test + public void validate_success() { + ConnectorArguments arguments = + ConnectorArguments.create( + ImmutableList.of("--connector", "snowflake-account-usage-metadata")); + + connector.validate(arguments); + } + + @Test + public void validate_assessmentFlagProvided_throwsException() { + ConnectorArguments arguments = + ConnectorArguments.create( + ImmutableList.of("--connector", "snowflake-account-usage-metadata", "--assessment")); + + assertThrows(MetadataDumperUsageException.class, () -> connector.validate(arguments)); + } } diff --git a/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeInformationSchemaMetadataConnectorTest.java b/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeInformationSchemaMetadataConnectorTest.java index c906976dd..092d906b3 100644 --- a/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeInformationSchemaMetadataConnectorTest.java +++ b/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeInformationSchemaMetadataConnectorTest.java @@ -16,6 +16,11 @@ */ package com.google.edwmigration.dumper.application.dumper.connector.snowflake; +import static org.junit.Assert.assertThrows; + +import com.google.common.collect.ImmutableList; +import com.google.edwmigration.dumper.application.dumper.ConnectorArguments; +import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException; import com.google.edwmigration.dumper.application.dumper.connector.MetadataConnector; import com.google.edwmigration.dumper.plugin.lib.dumper.spi.SnowflakeMetadataDumpFormat; import com.google.edwmigration.dumper.test.TestUtils; @@ -59,4 +64,23 @@ public void testExecution() throws Exception { validator.run(outputFile); } + + @Test + public void validate_success() { + ConnectorArguments arguments = + ConnectorArguments.create( + ImmutableList.of("--connector", "snowflake-information-schema-metadata")); + + connector.validate(arguments); + } + + @Test + public void validate_assessmentFlagProvided_throwsException() { + ConnectorArguments arguments = + ConnectorArguments.create( + ImmutableList.of( + "--connector", "snowflake-information-schema-metadata", "--assessment")); + + assertThrows(MetadataDumperUsageException.class, () -> connector.validate(arguments)); + } } diff --git a/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLiteConnectorTest.java b/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLiteConnectorTest.java index fb956838e..d1fe29034 100644 --- a/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLiteConnectorTest.java +++ b/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLiteConnectorTest.java @@ -18,6 +18,7 @@ import static org.junit.Assert.assertThrows; +import com.google.common.collect.ImmutableList; import com.google.edwmigration.dumper.application.dumper.ConnectorArguments; import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException; import org.junit.Test; @@ -28,8 +29,9 @@ public class SnowflakeLiteConnectorTest { @Test - public void validate_noAssessmentFlag_throwsUsageException() throws Exception { - ConnectorArguments noFlagArguments = new ConnectorArguments("--connector", "snowflake-lite"); + public void validate_noAssessmentFlag_throwsUsageException() { + ConnectorArguments noFlagArguments = + ConnectorArguments.create(ImmutableList.of("--connector", "snowflake-lite")); SnowflakeLiteConnector connector = new SnowflakeLiteConnector(); assertThrows(MetadataDumperUsageException.class, () -> connector.validate(noFlagArguments)); diff --git a/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLogsConnectorTest.java b/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLogsConnectorTest.java index 620a4c113..e956b932e 100644 --- a/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLogsConnectorTest.java +++ b/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeLogsConnectorTest.java @@ -17,24 +17,29 @@ package com.google.edwmigration.dumper.application.dumper.connector.snowflake; import static com.google.common.base.Predicates.alwaysTrue; +import static com.google.edwmigration.dumper.application.dumper.connector.snowflake.SnowflakeLogsConnector.TimeSeriesView.valuesInOrder; +import static com.google.edwmigration.dumper.application.dumper.connector.snowflake.SnowflakeLogsConnector.addOverridesToQuery; import static com.google.edwmigration.dumper.application.dumper.connector.snowflake.SnowflakeLogsConnector.earliestTimestamp; +import static com.google.edwmigration.dumper.application.dumper.connector.snowflake.SnowflakeLogsConnector.formatPrefix; import static com.google.edwmigration.dumper.application.dumper.connector.snowflake.SnowflakeLogsConnector.overrideableQuery; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; +import com.google.common.collect.ImmutableList; import com.google.edwmigration.dumper.application.dumper.ConnectorArguments; import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException; +import com.google.edwmigration.dumper.application.dumper.connector.snowflake.SnowflakeLogsConnector.TimeSeriesView; import com.google.edwmigration.dumper.test.TestUtils; import java.io.File; -import java.io.IOException; -import org.junit.Assert; import org.junit.Test; +import org.junit.experimental.theories.Theories; +import org.junit.experimental.theories.Theory; import org.junit.runner.RunWith; -import org.junit.runners.JUnit4; /** @author shevek */ -@RunWith(JUnit4.class) +@RunWith(Theories.class) public class SnowflakeLogsConnectorTest { @Test @@ -55,8 +60,61 @@ void test(File output) throws Exception { } @Test - public void earliestTimestamp_notProvided_emptyResult() throws IOException { - ConnectorArguments arguments = new ConnectorArguments("--connector", "snowflake-logs"); + public void addOverridesToQuery_fullQueryOverride_success() { + String override = + "SELECT * FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY\n" + + "WHERE end_time >= to_timestamp_ltz('%s')\n" + + "AND end_time <= to_timestamp_ltz('%s')\n"; + String property = String.format("-Dsnowflake.logs.query=%s", override); + ConnectorArguments arguments = + ConnectorArguments.create(ImmutableList.of("--connector", "snowflake-logs", property)); + + String result = addOverridesToQuery(arguments, "SELECT 1"); + + assertEquals(override, result); + } + + @Test + public void addOverridesToQuery_fullQueryOverrideWithBadValue_throwsException() { + String override = "text_with_no_format_specifiers"; + String property = String.format("-Dsnowflake.logs.query=%s", override); + ConnectorArguments arguments = + ConnectorArguments.create(ImmutableList.of("--connector", "snowflake-logs", property)); + + assertThrows( + MetadataDumperUsageException.class, () -> addOverridesToQuery(arguments, "SELECT 1")); + } + + @Test + public void addOverridesToQuery_noOverrides_nothingChanges() { + String originalQuery = "SELECT * FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY WHERE 1=1\n"; + ConnectorArguments arguments = + ConnectorArguments.create(ImmutableList.of("--connector", "snowflake-logs")); + + String result = addOverridesToQuery(arguments, originalQuery); + + assertEquals(originalQuery, result); + } + + @Test + public void addOverridesToQuery_whereOverride_success() { + String originalQuery = "SELECT * FROM SNOWFLAKE.ACCOUNT_USAGE.QUERY_HISTORY WHERE 1=1\n"; + String override = "rows_inserted > 0"; + String property = String.format("-Dsnowflake.logs.where=%s", override); + ConnectorArguments arguments = + ConnectorArguments.create(ImmutableList.of("--connector", "snowflake-logs", property)); + + String result = addOverridesToQuery(arguments, originalQuery); + + assertTrue(result, result.contains(originalQuery)); + assertTrue(result, result.contains("AND")); + assertTrue(result, result.contains(override)); + } + + @Test + public void earliestTimestamp_notProvided_emptyResult() { + ConnectorArguments arguments = + ConnectorArguments.create(ImmutableList.of("--connector", "snowflake-logs")); String result = earliestTimestamp(arguments); @@ -64,13 +122,14 @@ public void earliestTimestamp_notProvided_emptyResult() throws IOException { } @Test - public void earliestTimestamp_provided_resultMatches() throws IOException { + public void earliestTimestamp_provided_resultMatches() { ConnectorArguments arguments = - new ConnectorArguments( - "--connector", - "snowflake-logs", - "--" + ConnectorArguments.OPT_QUERY_LOG_EARLIEST_TIMESTAMP, - "2024-03-21"); + ConnectorArguments.create( + ImmutableList.of( + "--connector", + "snowflake-logs", + "--" + ConnectorArguments.OPT_QUERY_LOG_EARLIEST_TIMESTAMP, + "2024-03-21")); String result = earliestTimestamp(arguments); @@ -79,8 +138,22 @@ public void earliestTimestamp_provided_resultMatches() throws IOException { assertTrue(result, result.endsWith("\n")); } + enum TestEnum { + FirstValue, + SecondValue; + }; + + @Test + public void formatPrefix_success() { + + String result = formatPrefix(TestEnum.class, "TASK_HISTORY"); + + assertEquals( + "SELECT FIRST_VALUE, SECOND_VALUE FROM SNOWFLAKE.ACCOUNT_USAGE.TASK_HISTORY", result); + } + @Test - public void overrideableQuery_overrideAbsent_defaultUsed() throws IOException { + public void overrideableQuery_overrideAbsent_defaultUsed() { String defaultSql = "SELECT event_name, query_id FROM WAREHOUSE_EVENTS_HISTORY"; String result = overrideableQuery(null, defaultSql, "timestamp"); @@ -89,7 +162,7 @@ public void overrideableQuery_overrideAbsent_defaultUsed() throws IOException { } @Test - public void overrideableQuery_overrideEmpty_resultEmpty() throws IOException { + public void overrideableQuery_overrideEmpty_resultEmpty() { String defaultSql = "SELECT event_name, query_id FROM WAREHOUSE_EVENTS_HISTORY"; String override = ""; @@ -99,7 +172,7 @@ public void overrideableQuery_overrideEmpty_resultEmpty() throws IOException { } @Test - public void overrideableQuery_overridePresent_defaultIgnored() throws IOException { + public void overrideableQuery_overridePresent_defaultIgnored() { String defaultSql = "SELECT event_name, query_id FROM WAREHOUSE_EVENTS_HISTORY"; String override = "SELECT query_id FROM WAREHOUSE_EVENTS_HISTORY"; @@ -109,16 +182,28 @@ public void overrideableQuery_overridePresent_defaultIgnored() throws IOExceptio } @Test - public void validate_unsupportedOption_throwsException() throws IOException { + public void validate_unsupportedOption_throwsException() { SnowflakeLogsConnector connector = new SnowflakeLogsConnector(); ConnectorArguments arguments = - new ConnectorArguments( - "--connector", - "snowflake-logs", - "--assessment", - "--" + ConnectorArguments.OPT_QUERY_LOG_EARLIEST_TIMESTAMP, - "2024"); - - Assert.assertThrows(MetadataDumperUsageException.class, () -> connector.validate(arguments)); + ConnectorArguments.create( + ImmutableList.of( + "--connector", + "snowflake-logs", + "--assessment", + "--" + ConnectorArguments.OPT_QUERY_LOG_EARLIEST_TIMESTAMP, + "2024")); + + assertThrows(MetadataDumperUsageException.class, () -> connector.validate(arguments)); + } + + @Theory + public void valuesInOrder_allValuesPresent(TimeSeriesView view) { + + assertTrue(valuesInOrder.contains(view)); + } + + @Test + public void valuesInOrder_sizeMatches() { + assertEquals(TimeSeriesView.values().length, valuesInOrder.size()); } } diff --git a/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeMetadataConnectorTest.java b/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeMetadataConnectorTest.java index 673b868a8..ab4159290 100644 --- a/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeMetadataConnectorTest.java +++ b/dumper/app/src/test/java/com/google/edwmigration/dumper/application/dumper/connector/snowflake/SnowflakeMetadataConnectorTest.java @@ -42,7 +42,6 @@ import java.util.Map; import java.util.Map.Entry; import javax.annotation.Nonnull; -import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.lang3.StringUtils; import org.junit.Assume; import org.junit.Test; @@ -249,8 +248,12 @@ private static ImmutableMultimap collectSqlStatementsAsMultimap( String... extraArgs) throws IOException { List> tasks = new ArrayList<>(); SnowflakeMetadataConnector connector = new SnowflakeMetadataConnector(); - String[] args = ArrayUtils.addAll(new String[] {"--connector", connector.getName()}, extraArgs); - connector.addTasksTo(tasks, new ConnectorArguments(args)); + ImmutableList standardArgs = ImmutableList.of("--connector", connector.getName()); + ArrayList args = new ArrayList<>(standardArgs); + for (String item : extraArgs) { + args.add(item); + } + connector.addTasksTo(tasks, ConnectorArguments.create(args)); ImmutableMultimap.Builder builder = ImmutableMultimap.builder(); tasks.stream() .filter(t -> t instanceof JdbcSelectTask)