Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
<properties>
<otp.serialization.version.id>55</otp.serialization.version.id>
<!-- Lib versions - keep list sorted on property name -->
<aws.sdk.version>2.15.0</aws.sdk.version>
<geotools.version>27.0</geotools.version>
<google.dagger.version>2.42</google.dagger.version>
<jackson.version>2.13.3</jackson.version>
Expand Down Expand Up @@ -463,6 +464,14 @@

<dependencyManagement>
<dependencies>
<dependency>
<!-- This make sure all s3 libraries are using compatible versions. -->
<groupId>software.amazon.awssdk</groupId>
<artifactId>bom</artifactId>
<version>${aws.sdk.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<!-- This make sure all google libraries are using compatible versions. -->
<groupId>com.google.cloud</groupId>
Expand Down Expand Up @@ -720,6 +729,12 @@
<version>${jackson.version}</version>
</dependency>

<!-- Amazon AWS sdk -->
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>s3</artifactId>
</dependency>

<!--
Google library imports
The version is imported using google libraries-bom, see dep.management
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
package org.opentripplanner.ext.datastore.aws;

import org.opentripplanner.datastore.api.DataSource;
import org.opentripplanner.datastore.api.FileType;
import software.amazon.awssdk.services.s3.S3Client;

abstract class AbstractAwsDataSource implements DataSource {

private final S3Client s3Client;

private final S3Object object;
private final FileType type;

AbstractAwsDataSource(S3Client s3Client, S3Object object, FileType type) {
this.s3Client = s3Client;
this.object = object;
this.type = type;
}

@Override
public final String name() {
return object.name();
}

@Override
public final String path() {
return object.toUriString();
}

@Override
public final FileType type() {
return type;
}

@Override
public final String toString() {
return type + " " + path();
}

S3Client s3Client() {
return s3Client;
}

S3Object object() {
return object;
}

String bucketName() {
return object.bucket();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package org.opentripplanner.ext.datastore.aws;

import dagger.Module;
import dagger.Provides;
import javax.annotation.Nullable;
import javax.inject.Singleton;
import org.opentripplanner.datastore.api.AmazonS3DSRepository;
import org.opentripplanner.datastore.api.OtpDataStoreConfig;
import org.opentripplanner.datastore.base.DataSourceRepository;
import org.opentripplanner.util.OTPFeature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@Module
public class AwsDataSourceModule {

private static final Logger LOG = LoggerFactory.getLogger(AwsDataSourceModule.class);

@Provides
@Singleton
@Nullable
@AmazonS3DSRepository
DataSourceRepository provideGoogleStorageDataSourceRepository(OtpDataStoreConfig c) {
if (OTPFeature.AmazonAwsS3Storage.isOff()) {
return null;
}
LOG.info("Amazon AWS S3 Storage Support enabled - S3 resources detected.");
return new AwsDataSourceRepository(c.s3Region(), c.s3CredentialsProfile());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
package org.opentripplanner.ext.datastore.aws;

import static org.opentripplanner.ext.datastore.aws.S3Object.S3_SCHEMA_PREFIX;

import java.net.URI;
import javax.annotation.Nonnull;
import org.opentripplanner.datastore.api.CompositeDataSource;
import org.opentripplanner.datastore.api.DataSource;
import org.opentripplanner.datastore.api.FileType;
import org.opentripplanner.datastore.base.DataSourceRepository;
import org.opentripplanner.datastore.base.ZipStreamDataSourceDecorator;
import org.opentripplanner.util.lang.StringUtils;
import software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.s3.S3Client;

/**
* This data store uses the local file system to access in-/out- data files.
*/
public class AwsDataSourceRepository implements DataSourceRepository {

private static final String DEFAULT_CREDENTIALS_PROFILE = "DEFAULT";

private final Region region;

private final String credentialsProfile;
private S3Client s3Client;

public AwsDataSourceRepository(String region, String credentialsProfile) {
this.region = Region.of(region);
this.credentialsProfile = credentialsProfile;
}

@Override
public String description() {
return "Google Cloud Storage";
}

@Override
public void open() {
this.s3Client = connectToStorage();
}

@Override
public DataSource findSource(@Nonnull URI uri, @Nonnull FileType type) {
if (skipUri(uri)) {
return null;
}
S3Object object = S3Object.toS3Object(uri);
return createSource(object, type);
}

@Override
public CompositeDataSource findCompositeSource(@Nonnull URI uri, @Nonnull FileType type) {
if (skipUri(uri)) {
return null;
}
return createCompositeSource(S3Object.toS3Object(uri), type);
}

/* private methods */

private static boolean skipUri(URI uri) {
return !S3_SCHEMA_PREFIX.equals(uri.getScheme());
}

private DataSource createSource(S3Object object, FileType type) {
/* TODO Check if object exist */
boolean exist = true;

if (exist) {
return new AwsFileDataSource(s3Client, object, type);
} else {
return new AwsOutFileDataSource(s3Client, object, type);
}
}

private CompositeDataSource createCompositeSource(S3Object object, FileType type) {
if (object.isRoot()) {
return new AwsDirectoryDataSource(s3Client, object, type);
}

if (object.name().endsWith(".zip")) {
/* TODO */
boolean exist = true;

if (exist) {
throw new IllegalArgumentException(type.text() + " not found: " + object.toUriString());
}
DataSource gsSource = new AwsFileDataSource(s3Client, object, type);
return new ZipStreamDataSourceDecorator(gsSource);
}
return new AwsDirectoryDataSource(s3Client, object, type);
}

private S3Client connectToStorage() {
var builder = S3Client.builder().region(region);

if (StringUtils.hasValue(credentialsProfile)) {
if (DEFAULT_CREDENTIALS_PROFILE.equals(credentialsProfile)) {
builder.credentialsProvider(ProfileCredentialsProvider.create());
} else {
builder.credentialsProvider(ProfileCredentialsProvider.create(credentialsProfile));
}
}
return builder.build();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
package org.opentripplanner.ext.datastore.aws;

import java.util.Collection;
import java.util.List;
import org.opentripplanner.datastore.api.CompositeDataSource;
import org.opentripplanner.datastore.api.DataSource;
import org.opentripplanner.datastore.api.FileType;
import software.amazon.awssdk.services.s3.S3Client;

/**
* This is a an adapter to to simulate a file directory on a GCS. Files created using an instance of
* this class wil have a common namespace. It does only support creating new output sources, it can
* not be used to list files with the common namespace (directory path).
*/
public class AwsDirectoryDataSource extends AbstractAwsDataSource implements CompositeDataSource {

AwsDirectoryDataSource(S3Client s3Client, S3Object object, FileType type) {
super(s3Client, object, type);
}

@Override
public boolean exists() {
// TODO
return super.exists();
}

@Override
public Collection<DataSource> content() {
// TODO
return List.of();
}

@Override
public DataSource entry(String name) {
S3Object child = object().child(name);

// TODO
boolean exists = true;
// If file exist
if (exists) {
return new AwsFileDataSource(s3Client(), child, type());
}
// New file
return new AwsOutFileDataSource(s3Client(), child, type());
}

@Override
public void delete() {
// TODO
}

@Override
public void close() {}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
package org.opentripplanner.ext.datastore.aws;

import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.zip.GZIPInputStream;
import org.opentripplanner.datastore.api.DataSource;
import org.opentripplanner.datastore.api.FileType;
import org.opentripplanner.datastore.file.DirectoryDataSource;
import org.opentripplanner.datastore.file.ZipFileDataSource;
import software.amazon.awssdk.services.s3.S3Client;

/**
* This class is a wrapper around and EXISTING Google Cloud Store bucket blob. It can be read and
* overwritten.
* <p>
* Reading compressed blobs is supported. The only format supported is gzip (extension .gz).
*/
class AwsFileDataSource extends AbstractAwsDataSource implements DataSource {

/**
* Create a data source wrapper around a file. This wrapper handles GZIP(.gz) compressed files as
* well as normal files. It does not handle directories({@link DirectoryDataSource}) or zip-files
* {@link ZipFileDataSource} which contain multiple files.
*/
AwsFileDataSource(S3Client s3Client, S3Object object, FileType type) {
super(s3Client, object, type);
}

@Override
public long size() {
/*TODO*/
return super.size();
}

@Override
public long lastModified() {
/*TODO*/
return super.lastModified();
}

@Override
public boolean exists() {
/*TODO*/
return super.exists();
}

@Override
public boolean isWritable() {
return true;
}

@Override
public InputStream asInputStream() {
InputStream in = null;
/* TODO */

// We support both gzip and unzipped files when reading.
if (object().name().endsWith(".gz")) {
try {
return new GZIPInputStream(in);
} catch (IOException e) {
throw new IllegalStateException(e.getLocalizedMessage(), e);
}
} else {
return in;
}
}

@Override
public OutputStream asOutputStream() {
/* TODO */
return null;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package org.opentripplanner.ext.datastore.aws;

import java.io.OutputStream;
import org.opentripplanner.datastore.api.DataSource;
import org.opentripplanner.datastore.api.FileType;
import org.opentripplanner.datastore.file.DirectoryDataSource;
import org.opentripplanner.datastore.file.ZipFileDataSource;
import software.amazon.awssdk.services.s3.S3Client;

class AwsOutFileDataSource extends AbstractAwsDataSource implements DataSource {

/**
* Create a data source wrapper around a file. This wrapper handles GZIP(.gz) compressed files as
* well as normal files. It does not handle directories({@link DirectoryDataSource}) or zip-files
* {@link ZipFileDataSource} which contain multiple files.
*/
AwsOutFileDataSource(S3Client s3Client, S3Object object, FileType type) {
super(s3Client, object, type);
}

@Override
public boolean exists() {
return false;
}

@Override
public OutputStream asOutputStream() {
return new AwsOutputStream(s3Client(), object()).open();
}
}
Loading