diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
new file mode 100644
index 00000000..2579abb7
--- /dev/null
+++ b/.git-blame-ignore-revs
@@ -0,0 +1,2 @@
+# Format source code according to checkstyle configuration
+a5da2cad2e1f73c18df90bbfbe5d06f2bbd3357e
diff --git a/build.gradle b/build.gradle
index 9bbbaee6..78270ef6 100644
--- a/build.gradle
+++ b/build.gradle
@@ -2,6 +2,7 @@ plugins {
id 'java-library'
id 'eclipse'
id 'maven-publish'
+ id 'checkstyle'
}
group = 'com.glencoesoftware.omero'
@@ -87,3 +88,7 @@ jar {
)
}
}
+
+checkstyle {
+ toolVersion = "10.26.1"
+}
diff --git a/config/checkstyle/checkstyle.xml b/config/checkstyle/checkstyle.xml
new file mode 100644
index 00000000..b10ab4ca
--- /dev/null
+++ b/config/checkstyle/checkstyle.xml
@@ -0,0 +1,470 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/main/java/com/glencoesoftware/omero/zarr/OmeroAmazonS3ClientFactory.java b/src/main/java/com/glencoesoftware/omero/zarr/OmeroAmazonS3ClientFactory.java
index 9a85a081..f078056b 100644
--- a/src/main/java/com/glencoesoftware/omero/zarr/OmeroAmazonS3ClientFactory.java
+++ b/src/main/java/com/glencoesoftware/omero/zarr/OmeroAmazonS3ClientFactory.java
@@ -15,14 +15,8 @@
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
-package com.glencoesoftware.omero.zarr;
-import java.net.URI;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-import org.slf4j.LoggerFactory;
+package com.glencoesoftware.omero.zarr;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.AWSCredentialsProviderChain;
@@ -36,7 +30,15 @@
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.util.AwsHostNameUtils;
import com.upplication.s3fs.AmazonS3ClientFactory;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import org.slf4j.LoggerFactory;
+/**
+ * Subclass which maps an URI into a set of credentials to use for the client.
+ */
public class OmeroAmazonS3ClientFactory extends AmazonS3ClientFactory {
private static final org.slf4j.Logger log =
@@ -48,11 +50,11 @@ public class OmeroAmazonS3ClientFactory extends AmazonS3ClientFactory {
protected AWSCredentialsProvider getCredentialsProvider(Properties props) {
// If AWS Environment or System Properties are set, throw an exception
// so users will know they are not supported
- if (System.getenv("AWS_ACCESS_KEY_ID") != null ||
- System.getenv("AWS_SECRET_ACCESS_KEY") != null ||
- System.getenv("AWS_SESSION_TOKEN") != null ||
- System.getProperty("aws.accessKeyId") != null ||
- System.getProperty("aws.secretAccessKey") != null) {
+ if (System.getenv("AWS_ACCESS_KEY_ID") != null
+ || System.getenv("AWS_SECRET_ACCESS_KEY") != null
+ || System.getenv("AWS_SESSION_TOKEN") != null
+ || System.getProperty("aws.accessKeyId") != null
+ || System.getProperty("aws.secretAccessKey") != null) {
throw new RuntimeException("AWS credentials supplied by environment variables"
+ " or Java system properties are not supported."
+ " Please use either named profiles or instance"
@@ -77,6 +79,7 @@ protected AWSCredentialsProvider getCredentialsProvider(Properties props) {
/**
* Retrieves the bucket name from a given URI.
+ *
* @param uri The URI to handle
* @return The bucket name
*/
@@ -90,6 +93,7 @@ private String getBucketFromUri(URI uri) {
/**
* Retrieves the region from a given URI.
+ *
* @param uri The URI to handle
* @return The region
*/
@@ -103,6 +107,7 @@ private String getRegionFromUri(URI uri) {
/**
* Retrieves the endpoint from a given URI.
+ *
* @param uri The URI to handle
* @return The endpoint
*/
@@ -112,7 +117,7 @@ public String getEndPointFromUri(URI uri) {
@Override
public synchronized AmazonS3 getAmazonS3(URI uri, Properties props) {
- //Check if we have a S3 client for this bucket
+ // Check if we have a S3 client for this bucket
String bucket = getBucketFromUri(uri);
if (bucketClientMap.containsKey(bucket)) {
log.info("Found bucket " + bucket);
@@ -120,11 +125,12 @@ public synchronized AmazonS3 getAmazonS3(URI uri, Properties props) {
}
log.info("Creating client for bucket " + bucket);
AmazonS3 client = AmazonS3ClientBuilder.standard()
- .withCredentials(getCredentialsProvider(props))
- .withClientConfiguration(getClientConfiguration(props))
- .withMetricsCollector(getRequestMetricsCollector(props))
- .withEndpointConfiguration(new EndpointConfiguration(getEndPointFromUri(uri), getRegionFromUri(uri)))
- .build();
+ .withCredentials(getCredentialsProvider(props))
+ .withClientConfiguration(getClientConfiguration(props))
+ .withMetricsCollector(getRequestMetricsCollector(props))
+ .withEndpointConfiguration(
+ new EndpointConfiguration(getEndPointFromUri(uri), getRegionFromUri(uri)))
+ .build();
bucketClientMap.put(bucket, client);
return client;
}
diff --git a/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3FileSystem.java b/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3FileSystem.java
index c4f02d1a..2af84c49 100644
--- a/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3FileSystem.java
+++ b/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3FileSystem.java
@@ -15,16 +15,18 @@
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
-package com.glencoesoftware.omero.zarr;
-import java.io.IOException;
+package com.glencoesoftware.omero.zarr;
import com.amazonaws.services.s3.AmazonS3;
import com.upplication.s3fs.S3FileSystem;
import com.upplication.s3fs.S3FileSystemProvider;
+import java.io.IOException;
+/** Subclass of S3FileSystem with performance optimizations. */
public class OmeroS3FileSystem extends S3FileSystem {
+ /** Default constructor. */
public OmeroS3FileSystem(S3FileSystemProvider provider, String key,
AmazonS3 client, String endpoint) {
super(provider, key, client, endpoint);
@@ -32,11 +34,11 @@ public OmeroS3FileSystem(S3FileSystemProvider provider, String key,
@Override
public void close() throws IOException {
- //No-op
+ // No-op
}
@Override
public boolean isOpen() {
- return true; //Not possible to be closed
+ return true; // Not possible to be closed
}
}
diff --git a/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3ReadOnlySeekableByteChannel.java b/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3ReadOnlySeekableByteChannel.java
index 8dbc1671..8fd8e1c3 100644
--- a/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3ReadOnlySeekableByteChannel.java
+++ b/src/main/java/com/glencoesoftware/omero/zarr/OmeroS3ReadOnlySeekableByteChannel.java
@@ -15,9 +15,14 @@
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
-package com.glencoesoftware.omero.zarr;
+package com.glencoesoftware.omero.zarr;
+import com.amazonaws.services.s3.model.GetObjectRequest;
+import com.amazonaws.services.s3.model.S3Object;
+import com.amazonaws.services.s3.model.S3ObjectInputStream;
+import com.upplication.s3fs.S3Path;
+import com.upplication.s3fs.S3ReadOnlySeekableByteChannel;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
@@ -32,14 +37,8 @@
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
-
import org.perf4j.slf4j.Slf4JStopWatch;
-import com.amazonaws.services.s3.model.GetObjectRequest;
-import com.amazonaws.services.s3.model.S3Object;
-import com.amazonaws.services.s3.model.S3ObjectInputStream;
-import com.upplication.s3fs.S3Path;
-import com.upplication.s3fs.S3ReadOnlySeekableByteChannel;
/**
* Overridden, hybrid version of the implementation from
@@ -61,15 +60,15 @@ public class OmeroS3ReadOnlySeekableByteChannel implements SeekableByteChannel {
* entire object in full from S3 without checks for length during
* object construction.
*/
- public OmeroS3ReadOnlySeekableByteChannel(S3Path path, Set extends OpenOption> options) throws IOException {
+ public OmeroS3ReadOnlySeekableByteChannel(S3Path path, Set extends OpenOption> options)
+ throws IOException {
this.options = Collections.unmodifiableSet(new HashSet<>(options));
- if (
- this.options.contains(StandardOpenOption.WRITE) ||
- this.options.contains(StandardOpenOption.CREATE) ||
- this.options.contains(StandardOpenOption.CREATE_NEW) ||
- this.options.contains(StandardOpenOption.APPEND)
- ) {
+ if (this.options.contains(StandardOpenOption.WRITE)
+ || this.options.contains(StandardOpenOption.CREATE)
+ || this.options.contains(StandardOpenOption.CREATE_NEW)
+ || this.options.contains(StandardOpenOption.APPEND)
+ ) {
throw new ReadOnlyFileSystemException();
}
@@ -92,10 +91,10 @@ public OmeroS3ReadOnlySeekableByteChannel(S3Path path, Set extends OpenOption>
// the stream closed as quickly as possible. See
// https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/model/S3Object.html#getObjectContent--
try (S3ObjectInputStream s3Stream = s3Object.getObjectContent()) {
- byte[] read_buf = new byte[1024*1024];
- int read_len = 0;
- while ((read_len = s3Stream.read(read_buf)) > 0) {
- outputStream.write(read_buf, 0, read_len);
+ byte[] readBuf = new byte[1024 * 1024];
+ int readLen = 0;
+ while ((readLen = s3Stream.read(readBuf)) > 0) {
+ outputStream.write(readBuf, 0, readLen);
}
}
this.data = outputStream.toByteArray();
@@ -124,7 +123,9 @@ public boolean isOpen() {
* private visibility.
*/
@Override
- public long position() { return position; }
+ public long position() {
+ return position;
+ }
/**
* Overridden, hybrid version of the implementation from
@@ -133,8 +134,7 @@ public boolean isOpen() {
*/
@Override
public SeekableByteChannel position(long targetPosition)
- throws IOException
- {
+ throws IOException {
throw new UnsupportedOperationException();
}
diff --git a/src/main/java/com/glencoesoftware/omero/zarr/ZarrPixelBuffer.java b/src/main/java/com/glencoesoftware/omero/zarr/ZarrPixelBuffer.java
index 4ff6b989..4cd2bee7 100644
--- a/src/main/java/com/glencoesoftware/omero/zarr/ZarrPixelBuffer.java
+++ b/src/main/java/com/glencoesoftware/omero/zarr/ZarrPixelBuffer.java
@@ -18,6 +18,10 @@
package com.glencoesoftware.omero.zarr;
+import com.bc.zarr.DataType;
+import com.bc.zarr.ZarrArray;
+import com.github.benmanes.caffeine.cache.AsyncLoadingCache;
+import com.github.benmanes.caffeine.cache.Caffeine;
import java.awt.Dimension;
import java.io.IOException;
import java.nio.BufferOverflowException;
@@ -31,22 +35,18 @@
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.stream.IntStream;
-
-import org.slf4j.LoggerFactory;
-
-import com.bc.zarr.DataType;
-import com.bc.zarr.ZarrArray;
-import com.github.benmanes.caffeine.cache.AsyncLoadingCache;
-import com.github.benmanes.caffeine.cache.Caffeine;
-
import loci.formats.FormatTools;
import ome.io.nio.DimensionsOutOfBoundsException;
import ome.io.nio.PixelBuffer;
import ome.io.nio.RomioPixelBuffer;
import ome.model.core.Pixels;
import ome.util.PixelData;
+import org.slf4j.LoggerFactory;
import ucar.ma2.InvalidRangeException;
+/**
+ * Subclass of ome.io.nio.PixelBuffer handling OME-Zarr data.
+ **/
public class ZarrPixelBuffer implements PixelBuffer {
private static final org.slf4j.Logger log =
@@ -55,25 +55,25 @@ public class ZarrPixelBuffer implements PixelBuffer {
/** Reference to the pixels. */
private final Pixels pixels;
- /** Root of the OME-NGFF multiscale we are operating on */
+ /** Root of the OME-NGFF multiscale we are operating on. */
private final Path root;
- /** Requested resolution level */
+ /** Requested resolution level. */
private int resolutionLevel;
- /** Total number of resolution levels */
+ /** Total number of resolution levels. */
private final int resolutionLevels;
- /** Max Plane Width */
+ /** Max Plane Width. */
private final Integer maxPlaneWidth;
- /** Max Plane Height */
+ /** Max Plane Height. */
private final Integer maxPlaneHeight;
- /** Zarr attributes present on the root group */
+ /** Zarr attributes present on the root group. */
private final Map rootGroupAttributes;
- /** Zarr array corresponding to the current resolution level */
+ /** Zarr array corresponding to the current resolution level. */
private ZarrArray array;
/**
@@ -82,34 +82,32 @@ public class ZarrPixelBuffer implements PixelBuffer {
*/
private Map zIndexMap;
- /** { resolutionLevel, z, c, t, x, y, w, h } vs. tile byte array cache */
+ /** { resolutionLevel, z, c, t, x, y, w, h } vs. tile byte array cache. */
private final AsyncLoadingCache, byte[]> tileCache;
- /** Whether or not the Zarr is on S3 or similar */
+ /** Whether or not the Zarr is on S3 or similar. */
private final boolean isRemote;
- /** Root path vs. metadata cache */
+ /** Root path vs. metadata cache. */
private final
AsyncLoadingCache> zarrMetadataCache;
- /** Array path vs. ZarrArray cache */
+ /** Array path vs. ZarrArray cache. */
private final AsyncLoadingCache zarrArrayCache;
- /** Supported axes, X and Y are essential */
+ /** Supported axes, X and Y are essential. */
public enum Axis {
X, Y, Z, C, T;
}
- /** Maps axes to their corresponding indexes */
+ /** Maps axes to their corresponding indexes. */
private Map axesOrder;
/**
- * Default constructor
+ * Default constructor.
+ *
* @param pixels Pixels metadata for the pixel buffer
* @param root The root of this buffer
- * @param maxTileLength Maximum tile length that can be used during
- * read operations
- * @throws IOException
*/
public ZarrPixelBuffer(Pixels pixels, Path root, Integer maxPlaneWidth,
Integer maxPlaneHeight,
@@ -121,10 +119,10 @@ public ZarrPixelBuffer(Pixels pixels, Path root, Integer maxPlaneWidth,
this.root = root;
this.zarrMetadataCache = zarrMetadataCache;
this.zarrArrayCache = zarrArrayCache;
- this.isRemote = root.toString().startsWith("s3://")? true : false;
+ this.isRemote = root.toString().startsWith("s3://") ? true : false;
try {
rootGroupAttributes = this.zarrMetadataCache.get(this.root).get();
- } catch (ExecutionException|InterruptedException e) {
+ } catch (ExecutionException | InterruptedException e) {
throw new IOException(e);
}
if (!rootGroupAttributes.containsKey("multiscales")) {
@@ -162,6 +160,7 @@ public ZarrPixelBuffer(Pixels pixels, Path root, Integer maxPlaneWidth,
/**
* Get Bio-Formats/OMERO pixels type for buffer.
+ *
* @return See above.
*/
public int getPixelsType() {
@@ -191,11 +190,11 @@ public int getPixelsType() {
/**
* Calculates the pixel length of a given NumPy like "shape".
+ *
* @param shape the NumPy like "shape" to calculate the length of
* @return See above
- * @see
- * numpy.shape documentation
+ * @see numpy.shape
+ * documentation
*/
private long length(int[] shape) {
return IntStream.of(shape)
@@ -269,7 +268,7 @@ private void read(byte[] buffer, int[] shape, int[] offset)
default:
throw new IllegalArgumentException(
"Data type " + dataType + " not supported");
- }
+ }
}
} catch (InvalidRangeException e) {
log.error("Error reading Zarr data", e);
@@ -294,8 +293,8 @@ private PixelData toPixelData(byte[] buffer) {
/**
* Retrieves the array chunk sizes of all subresolutions of this multiscale
* buffer.
+ *
* @return See above.
- * @throws IOException
*/
public int[][] getChunks() throws IOException {
List