Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.edwmigration.dumper.application.dumper.utils.OptionalUtils.optionallyWhen;
import static java.time.temporal.ChronoUnit.DAYS;
import static java.time.temporal.ChronoUnit.HOURS;
import static java.util.Arrays.stream;
Expand All @@ -27,7 +26,6 @@
import com.google.common.base.MoreObjects;
import com.google.common.base.MoreObjects.ToStringHelper;
import com.google.common.base.Predicates;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.edwmigration.dumper.application.dumper.ZonedParser.DayOffset;
import com.google.edwmigration.dumper.application.dumper.connector.Connector;
Expand Down Expand Up @@ -302,6 +300,14 @@ public class ConnectorArguments extends DefaultArguments {
private final OptionSpec<Void> optionOutputContinue =
parser.accepts("continue", "Continues writing a previous output file.");

/**
* (Deprecated) earliest timestamp of logs to extract.
*
* <p>If the user specifies an earliest start time there will be extraneous empty dump files
* because we always iterate over the full 7 trailing days; maybe it's worth preventing that in
* the future. To do that, we should require getQueryLogEarliestTimestamp() to parse and return an
* ISO instant, not a database-server-specific format.
*/
@Deprecated
private final OptionSpec<String> optionQueryLogEarliestTimestamp =
parser
Expand Down Expand Up @@ -590,10 +596,19 @@ public class ConnectorArguments extends DefaultArguments {

private ConnectorProperties connectorProperties;

private final PasswordReader passwordReader = new PasswordReader();
private final PasswordReader passwordReader;

public ConnectorArguments(@Nonnull String... args) throws IOException {
this(Arrays.asList(args), new PasswordReader());
}

private ConnectorArguments(@Nonnull List<String> args, @Nonnull PasswordReader passwordReader) {
super(args);
this.passwordReader = passwordReader;
}

public static ConnectorArguments create(@Nonnull List<String> args) {
return new ConnectorArguments(args, new PasswordReader());
}

@Override
Expand Down Expand Up @@ -751,10 +766,6 @@ public boolean isAssessment() {
return getOptions().has(optionAssessment);
}

private <T> Optional<T> optionAsOptional(OptionSpec<T> spec) {
return optionallyWhen(getOptions().has(spec), () -> getOptions().valueOf(spec));
}

@Nonnull
public Predicate<String> getSchemaPredicate() {
return toPredicate(getSchemata());
Expand Down Expand Up @@ -785,7 +796,11 @@ public String getUserOrFail() {
*/
@Nonnull
public Optional<String> getPasswordIfFlagProvided() {
return optionallyWhen(getOptions().has(optionPass), this::getPasswordOrPrompt);
if (getOptions().has(optionPass)) {
return Optional.of(getPasswordOrPrompt());
} else {
return Optional.empty();
}
}

@Nonnull
Expand Down Expand Up @@ -832,7 +847,15 @@ public List<String> getConfiguration() {
}

public Optional<String> getOutputFile() {
return optionAsOptional(optionOutput).filter(file -> !Strings.isNullOrEmpty(file));
if (!getOptions().has(optionOutput)) {
return Optional.empty();
}
String file = getOptions().valueOf(optionOutput);
if (file == null || file.isEmpty()) {
return Optional.empty();
} else {
return Optional.of(file);
}
}

public boolean isOutputContinue() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import java.io.PrintStream;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
Expand All @@ -36,15 +37,10 @@
import joptsimple.ValueConversionException;
import joptsimple.ValueConverter;
import org.anarres.jdiagnostics.ProductMetadata;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/** @author shevek */
public class DefaultArguments {

@SuppressWarnings("UnusedVariable")
private static final Logger logger = LoggerFactory.getLogger(DefaultArguments.class);

public static class BooleanValueConverter implements ValueConverter<Boolean> {

private final String[] V_TRUE = {"true", "t", "yes", "y", "1"};
Expand Down Expand Up @@ -96,9 +92,8 @@ public String valuePattern() {
private final String[] args;
private OptionSet options;

@SuppressWarnings("EI_EXPOSE_REP2")
public DefaultArguments(@Nonnull String[] args) {
this.args = args;
DefaultArguments(@Nonnull List<String> args) {
this.args = args.toArray(new String[0]);
}

@Nonnull
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,11 @@
*/
package com.google.edwmigration.dumper.application.dumper.connector.snowflake;

import static com.google.common.base.CaseFormat.UPPER_CAMEL;
import static com.google.common.base.CaseFormat.UPPER_UNDERSCORE;

import com.google.common.base.CharMatcher;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.edwmigration.dumper.application.dumper.ConnectorArguments;
import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException;
import com.google.edwmigration.dumper.application.dumper.annotations.RespectsArgumentDriver;
Expand All @@ -34,16 +36,12 @@
import com.google.edwmigration.dumper.application.dumper.connector.Connector;
import com.google.edwmigration.dumper.application.dumper.handle.Handle;
import com.google.edwmigration.dumper.application.dumper.handle.JdbcHandle;
import com.google.edwmigration.dumper.application.dumper.task.AbstractJdbcTask;
import com.google.edwmigration.dumper.application.dumper.task.Summary;
import com.google.edwmigration.dumper.application.dumper.task.Task;
import java.sql.Driver;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.sql.DataSource;
import org.apache.commons.lang3.StringUtils;
import org.springframework.jdbc.core.JdbcTemplate;
Expand Down Expand Up @@ -103,35 +101,14 @@ public Handle open(@Nonnull ConnectorArguments arguments)

@Override
public final void validate(@Nonnull ConnectorArguments arguments) {
ArrayList<String> messages = new ArrayList<>();
MetadataDumperUsageException exception = null;

if (arguments.isPasswordFlagProvided() && arguments.isPrivateKeyFileProvided()) {
String inconsistentAuth =
"Private key authentication method can't be used together with user password. "
+ "If the private key file is encrypted, please use --"
+ ConnectorArguments.OPT_PRIVATE_KEY_PASSWORD
+ " to specify the key password.";
messages.add(inconsistentAuth);
exception = new MetadataDumperUsageException(inconsistentAuth, messages);
}

boolean hasDatabases = !arguments.getDatabases().isEmpty();
if (arguments.isAssessment()
&& hasDatabases
&& arguments.getConnectorName().toLowerCase().equals("snowflake")) {
String unsupportedFilter =
"Trying to filter by database with the --"
+ ConnectorArguments.OPT_ASSESSMENT
+ " flag. This is unsupported in Assessment. Remove either the --"
+ ConnectorArguments.OPT_ASSESSMENT
+ " or the --"
+ ConnectorArguments.OPT_DATABASE
+ " flag.";
messages.add(unsupportedFilter);
exception = new MetadataDumperUsageException(unsupportedFilter, messages);
throw new MetadataDumperUsageException(inconsistentAuth);
}
removeDuplicateMessageAndThrow(exception);
validateForConnector(arguments);
}

Expand All @@ -144,15 +121,6 @@ public final void validate(@Nonnull ConnectorArguments arguments) {
*/
protected abstract void validateForConnector(@Nonnull ConnectorArguments arguments);

private static void removeDuplicateMessageAndThrow(
@Nullable MetadataDumperUsageException exception) {
if (exception != null) {
List<String> messages = exception.getMessages();
messages.remove(messages.size() - 1);
throw exception;
}
}

private DataSource createUserPasswordDataSource(@Nonnull ConnectorArguments arguments, String url)
throws SQLException {
Driver driver =
Expand Down Expand Up @@ -205,23 +173,6 @@ private String getUrlFromArguments(@Nonnull ConnectorArguments arguments) {
return buf.toString();
}

final ImmutableList<Task<?>> getSqlTasks(
@Nonnull SnowflakeInput inputSource,
@Nonnull Class<? extends Enum<?>> header,
@Nonnull String format,
@Nonnull AbstractJdbcTask<Summary> schemaTask,
@Nonnull AbstractJdbcTask<Summary> usageTask) {
switch (inputSource) {
case USAGE_THEN_SCHEMA_SOURCE:
return ImmutableList.of(usageTask, schemaTask.onlyIfFailed(usageTask));
case SCHEMA_ONLY_SOURCE:
return ImmutableList.of(schemaTask);
case USAGE_ONLY_SOURCE:
return ImmutableList.of(usageTask);
}
throw new AssertionError();
}

private void setCurrentDatabase(@Nonnull String databaseName, @Nonnull JdbcTemplate jdbcTemplate)
throws MetadataDumperUsageException {
String currentDatabase =
Expand Down Expand Up @@ -259,4 +210,9 @@ static String describeAsDelegate(Connector connector, String baseName) {
String details = String.format("%8s[same options as '%s']\n", "", baseName);
return summary + details;
}

static String columnOf(Enum<?> enumValue) {
String name = enumValue.name();
return UPPER_CAMEL.to(UPPER_UNDERSCORE, name);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,20 @@
*/
package com.google.edwmigration.dumper.application.dumper.connector.snowflake;

import static com.google.edwmigration.dumper.application.dumper.ConnectorArguments.OPT_ASSESSMENT;
import static com.google.edwmigration.dumper.application.dumper.connector.snowflake.SnowflakeInput.USAGE_ONLY_SOURCE;

import com.google.auto.service.AutoService;
import com.google.edwmigration.dumper.application.dumper.ConnectorArguments;
import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException;
import com.google.edwmigration.dumper.application.dumper.connector.Connector;
import java.io.IOException;
import javax.annotation.Nonnull;
import javax.annotation.ParametersAreNonnullByDefault;

/** @author shevek */
@AutoService(Connector.class)
@ParametersAreNonnullByDefault
public class SnowflakeAccountUsageMetadataConnector extends SnowflakeMetadataConnector {

public SnowflakeAccountUsageMetadataConnector() {
Expand All @@ -38,7 +43,15 @@ public String getDescription() {
}

@Override
public void printHelp(@Nonnull Appendable out) throws IOException {
public void printHelp(Appendable out) throws IOException {
out.append(AbstractSnowflakeConnector.describeAsDelegate(this, "snowflake"));
}

@Override
public final void validateForConnector(ConnectorArguments arguments) {
if (arguments.isAssessment()) {
String message = String.format("The --%s flag is not supported.", OPT_ASSESSMENT);
throw new MetadataDumperUsageException(message);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,20 @@
*/
package com.google.edwmigration.dumper.application.dumper.connector.snowflake;

import static com.google.edwmigration.dumper.application.dumper.ConnectorArguments.OPT_ASSESSMENT;
import static com.google.edwmigration.dumper.application.dumper.connector.snowflake.SnowflakeInput.SCHEMA_ONLY_SOURCE;

import com.google.auto.service.AutoService;
import com.google.edwmigration.dumper.application.dumper.ConnectorArguments;
import com.google.edwmigration.dumper.application.dumper.MetadataDumperUsageException;
import com.google.edwmigration.dumper.application.dumper.connector.Connector;
import java.io.IOException;
import javax.annotation.Nonnull;
import javax.annotation.ParametersAreNonnullByDefault;

/** @author shevek */
@AutoService(Connector.class)
@ParametersAreNonnullByDefault
public class SnowflakeInformationSchemaMetadataConnector extends SnowflakeMetadataConnector {

public SnowflakeInformationSchemaMetadataConnector() {
Expand All @@ -41,4 +46,12 @@ public String getDescription() {
public void printHelp(@Nonnull Appendable out) throws IOException {
out.append(AbstractSnowflakeConnector.describeAsDelegate(this, "snowflake"));
}

@Override
public final void validateForConnector(ConnectorArguments arguments) {
if (arguments.isAssessment()) {
String message = String.format("The --%s flag is not supported.", OPT_ASSESSMENT);
throw new MetadataDumperUsageException(message);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,12 @@
*/
package com.google.edwmigration.dumper.application.dumper.connector.snowflake;

import com.google.common.collect.ImmutableList;
import com.google.edwmigration.dumper.application.dumper.task.AbstractJdbcTask;
import com.google.edwmigration.dumper.application.dumper.task.Task;
import javax.annotation.Nonnull;
import javax.annotation.ParametersAreNonnullByDefault;

/**
* Represents a strategy of getting Snowflake data.
*
Expand All @@ -29,11 +35,33 @@
* https://docs.snowflake.net/manuals/user-guide/data-share-consumers.html You must: GRANT IMPORTED
* PRIVILEGES ON DATABASE snowflake TO ROLE <SOMETHING>;
*/
@ParametersAreNonnullByDefault
enum SnowflakeInput {
/** Get data from ACCOUNT_USAGE contents, with a fallback to INFORMATION_SCHEMA. */
USAGE_THEN_SCHEMA_SOURCE,
USAGE_THEN_SCHEMA_SOURCE {
@Override
@Nonnull
ImmutableList<Task<?>> sqlTasks(AbstractJdbcTask<?> schemaTask, AbstractJdbcTask<?> usageTask) {
return ImmutableList.of(usageTask, schemaTask.onlyIfFailed(usageTask));
}
},
/** Get data relying only on the contents of INFORMATION_SCHEMA */
SCHEMA_ONLY_SOURCE,
SCHEMA_ONLY_SOURCE {
@Override
@Nonnull
ImmutableList<Task<?>> sqlTasks(AbstractJdbcTask<?> schemaTask, AbstractJdbcTask<?> usageTask) {
return ImmutableList.of(schemaTask);
}
},
/** Get data relying only on the contents of ACCOUNT_USAGE */
USAGE_ONLY_SOURCE;
USAGE_ONLY_SOURCE {
@Override
@Nonnull
ImmutableList<Task<?>> sqlTasks(AbstractJdbcTask<?> schemaTask, AbstractJdbcTask<?> usageTask) {
return ImmutableList.of(usageTask);
}
};

@Nonnull
abstract ImmutableList<Task<?>> sqlTasks(AbstractJdbcTask<?> schema, AbstractJdbcTask<?> usage);
}
Loading
Loading