From 22067cb91537813fa5ee805a446cb0eca7a8523d Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 30 Jan 2026 19:16:57 +0100 Subject: [PATCH 01/55] refactor(spi): redesign `ExportDataContext` with an immutable builder pattern and add validation tests Replaces the mutable `ExportDataContext` implementation with an immutable design using a builder pattern, adds input validation for `offset` and `length`, and includes corresponding unit tests for negative values. --- .../io/gdcc/spi/export/ExportDataContext.java | 254 +++++++++++++++--- .../spi/export/ExportDataContextTest.java | 25 ++ 2 files changed, 248 insertions(+), 31 deletions(-) create mode 100644 src/test/java/io/gdcc/spi/export/ExportDataContextTest.java diff --git a/src/main/java/io/gdcc/spi/export/ExportDataContext.java b/src/main/java/io/gdcc/spi/export/ExportDataContext.java index 9478d39..c7ce79b 100644 --- a/src/main/java/io/gdcc/spi/export/ExportDataContext.java +++ b/src/main/java/io/gdcc/spi/export/ExportDataContext.java @@ -1,48 +1,169 @@ package io.gdcc.spi.export; +import java.util.Objects; + /** - * - * @author landreev * Provides an optional mechanism for defining various data retrieval options * for the export subsystem in a way that should allow us adding support for * more options going forward with minimal or no changes to the already * implemented export plugins. */ -public class ExportDataContext { - private boolean datasetMetadataOnly = false; - private boolean publicFilesOnly = false; - private Integer offset = null; - private Integer length = null; +public final class ExportDataContext { - private ExportDataContext() { - - } + private final boolean datasetMetadataOnly; + private final boolean publicFilesOnly; + private final int offset; + private final int length; - public static ExportDataContext context() { - ExportDataContext context = new ExportDataContext(); - return context; - } + /** + * Default context with no special options. + */ + private static final ExportDataContext DEFAULT = builder().build(); - public ExportDataContext withDatasetMetadataOnly() { - this.datasetMetadataOnly = true; - return this; + private ExportDataContext(Builder builder) { + this.datasetMetadataOnly = builder.datasetMetadataOnly; + this.publicFilesOnly = builder.publicFilesOnly; + this.offset = builder.offset; + this.length = builder.length; } - public ExportDataContext withPublicFilesOnly() { - this.publicFilesOnly = true; - return this; + /** + * Returns a builder for creating new contexts. + */ + public static Builder builder() { + return new Builder(); } - public ExportDataContext withOffset(Integer offset) { - this.offset = offset; - return this; + /** + * Returns a default context with no special options. + */ + public static ExportDataContext defaults() { + return DEFAULT; } - public ExportDataContext withLength(Integer length) { - this.length = length; - return this; + /** + * Builder for ExportDataContext. + */ + public static class Builder { + private int offset = 0; // default: no offset = beginning + private int length = 0; // default: no length = no limit + private boolean datasetMetadataOnly = false; + private boolean publicFilesOnly = false; + + private Builder() { + // Hiding constructor to enforce use of static factory method + } + + /** + * Excludes file-level metadata from the export. + */ + public Builder datasetMetadataOnly() { + this.datasetMetadataOnly = true; + return this; + } + + /** + * Sets whether to exclude file-level metadata. + */ + public Builder datasetMetadataOnly(boolean value) { + this.datasetMetadataOnly = value; + return this; + } + + /** + * Includes only public (non-restricted, non-embargoed) files. + */ + public Builder publicFilesOnly() { + this.publicFilesOnly = true; + return this; + } + + /** + * Sets whether to include only public files. + */ + public Builder publicFilesOnly(boolean value) { + this.publicFilesOnly = value; + return this; + } + + /** + * Sets the starting position for results (0-based). + * + * @param offset zero-based starting position (must be >= 0) + * @return this builder + */ + public Builder offset(int offset) { + this.offset = offset; + return this; + } + + /** + * Sets the maximum number of results to return. + * + * @param length maximum number of items (0 = unlimited, must be >= 0) + * @return this builder + */ + public Builder length(int length) { + this.length = length; + return this; + } + + /** + * Convenience method to set both offset and length together. + * + * @param offset zero-based starting position (must be >= 0) + * @param length maximum number of items (0 = unlimited, must be >= 0) + * @return this builder + * @apiNote Pagination is primarily intended for retrieving specific subsets, + * not for iterating through large datasets. For full exports of + * large datasets, consider using streaming methods if available. + */ + public Builder pagination(int offset, int length) { + this.offset = offset; + this.length = length; + return this; + } + + /** + * Builds an immutable ExportDataContext. + * + * @return validated context + * @throws IllegalArgumentException if validation fails + */ + public ExportDataContext build() { + // Validate business rules + if (offset < 0) { + throw new IllegalArgumentException( + "offset must be non-negative, got: " + offset + ); + } + + if (length < 0) { + throw new IllegalArgumentException( + "length must be non-negative (0 = unlimited), got: " + length + ); + } + + return new ExportDataContext(this); + } + + /** + * Copies the properties from the given {@link ExportDataContext} instance into a new {@code Builder}. + * + * @param source the {@code ExportDataContext} instance from which to copy properties + * @return a new {@code Builder} instance with properties copied from the provided context + */ + public Builder from(ExportDataContext source) { + return new Builder() + .datasetMetadataOnly(source.datasetMetadataOnly) + .publicFilesOnly(source.publicFilesOnly) + .offset(source.offset) + .length(source.length); + } } + // Getters + public boolean isDatasetMetadataOnly() { return datasetMetadataOnly; } @@ -51,11 +172,82 @@ public boolean isPublicFilesOnly() { return publicFilesOnly; } - public Integer getOffset() { - return offset; + /** + * Returns the starting offset for results. + * + * @return zero-based offset (0 = start from beginning) + */ + public int getOffset() { + return offset; } - public Integer getLength() { - return length; - } + /** + * Returns the maximum number of results to return. + * + * @return maximum length (0 = unlimited) + */ + public int getLength() { + return length; + } + + /** + * @return true if a non-zero offset is configured + */ + public boolean hasOffset() { + return offset > 0; + } + + /** + * @return true if length is limited (non-zero) + */ + public boolean hasLengthLimit() { + return length > 0; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ExportDataContext that = (ExportDataContext) o; + return datasetMetadataOnly == that.datasetMetadataOnly && + publicFilesOnly == that.publicFilesOnly && + offset == that.offset && + length == that.length; + } + + @Override + public int hashCode() { + return Objects.hash(datasetMetadataOnly, publicFilesOnly, offset, length); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("ExportDataContext{"); + boolean hasContent = false; + + if (datasetMetadataOnly) { + sb.append("datasetMetadataOnly"); + hasContent = true; + } + + if (publicFilesOnly) { + if (hasContent) sb.append(", "); + sb.append("publicFilesOnly"); + hasContent = true; + } + + if (offset > 0 || length > 0) { + if (hasContent) sb.append(", "); + sb.append("offset=").append(offset); + sb.append(", length=").append(length == 0 ? "unlimited" : length); + hasContent = true; + } + + if (!hasContent) { + sb.append("defaults"); + } + + sb.append("}"); + return sb.toString(); + } } diff --git a/src/test/java/io/gdcc/spi/export/ExportDataContextTest.java b/src/test/java/io/gdcc/spi/export/ExportDataContextTest.java new file mode 100644 index 0000000..f663c3f --- /dev/null +++ b/src/test/java/io/gdcc/spi/export/ExportDataContextTest.java @@ -0,0 +1,25 @@ +package io.gdcc.spi.export; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; + +class ExportDataContextTest { + + @Test + void testBuildThrowsExceptionForNegativeOffset() { + // Given + var builder = ExportDataContext.builder().offset(-5); + // When & Then + assertThrows(IllegalArgumentException.class, builder::build); + } + + @Test + void testBuildThrowsExceptionForNegativeLength() { + // Given + var builder = ExportDataContext.builder().length(-5); + // When & Then + assertThrows(IllegalArgumentException.class, builder::build); + } + +} \ No newline at end of file From d45126c9d33fd7871b0555792298c5a3774a8eee Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 30 Jan 2026 20:59:07 +0100 Subject: [PATCH 02/55] refactor(spi): change `ExportException` to extend `RuntimeException` instead of `IOException` We need a way to throw an unchecked exception from the core back to the plugin. IOException is a checked exception. With unchecked exceptions, we don't need to declare them on methods (which we didn't so far) and it makes more sense to describe them in the Javadocs for the data provider. --- src/main/java/io/gdcc/spi/export/ExportException.java | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/main/java/io/gdcc/spi/export/ExportException.java b/src/main/java/io/gdcc/spi/export/ExportException.java index c816a60..57a6a1b 100644 --- a/src/main/java/io/gdcc/spi/export/ExportException.java +++ b/src/main/java/io/gdcc/spi/export/ExportException.java @@ -1,8 +1,6 @@ package io.gdcc.spi.export; -import java.io.IOException; - -public class ExportException extends IOException { +public class ExportException extends RuntimeException { public ExportException(String message) { super(message); } From d18d8e14542f6a912c4e32a6e1b9ccd1ad5d37eb Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 30 Jan 2026 21:21:03 +0100 Subject: [PATCH 03/55] refactor(spi): enhance `ExportDataProvider` with context-aware data retrieval methods and deprecations Revamp `ExportDataProvider` to provide context-sensitive methods for dataset metadata retrieval, improve documentation with detailed annotations, and deprecate older, less flexible methods. This keeps the old contract alive but encourages upgrading to the new style. The no-args methods can be removed at a later point in time. --- .../gdcc/spi/export/ExportDataProvider.java | 331 ++++++++++++------ 1 file changed, 233 insertions(+), 98 deletions(-) diff --git a/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/src/main/java/io/gdcc/spi/export/ExportDataProvider.java index 4197d97..8820db2 100644 --- a/src/main/java/io/gdcc/spi/export/ExportDataProvider.java +++ b/src/main/java/io/gdcc/spi/export/ExportDataProvider.java @@ -1,123 +1,258 @@ -package io.gdcc.spi.export; -import java.io.InputStream; -import java.util.Optional; +package io.gdcc.spi.export; import jakarta.json.JsonArray; import jakarta.json.JsonObject; +import java.io.InputStream; +import java.util.Optional; + /** - * Provides all the metadata Dataverse has about a given dataset that can then - * be used by an @see Exporter to create a new metadata export format. - * + * Provides dataset metadata that can be used by an {@link Exporter} to create + * new metadata export formats. + *

+ * This interface offers multiple methods for retrieving dataset metadata in various + * formats and levels of detail. Exporters should choose the method that best fits + * their needs, considering the completeness of metadata and performance implications. + * + *

Implementation Guide

+ * Implementers must override the context-accepting versions of all data retrieval + * methods. No-argument convenience methods are provided as default implementations + * for backward compatibility but are deprecated and will be removed in a future version. + * + *

Context Handling

+ * Implementations should respect context options where applicable. + * Not all methods support all context options - see individual method documentation for details. + * All methods require a non-null {@link ExportDataContext}. + * Passing null will result in a {@link NullPointerException}. + * Callers should use {@link ExportDataContext#defaults()} instead of passing null. + * + * @see Exporter + * @see ExportDataContext */ public interface ExportDataProvider { - + /** - * @return - dataset metadata in the standard Dataverse JSON format used in the - * API and available as the JSON metadata export via the user interface. - * @apiNote - there is no JSON schema defining this output, but the format is - * well documented in the Dataverse online guides. This, and the - * OAI_ORE export are the only two that provide 'complete' - * dataset-level metadata along with basic file metadata for each file - * in the dataset. - * @param context - supplies optional parameters. Needs to support - * context.isDatasetMetadataOnly(). In a situation where we - * need to generate a format like DC that has no use for the - * file-level metadata, it makes sense to skip retrieving and - * formatting it, since there can be a very large number of - * files in a dataset. + * Returns complete dataset metadata in Dataverse's standard JSON format. + *

+ * This format includes comprehensive dataset-level metadata along with basic + * metadata for each file in the dataset. It is the same JSON format used in + * the Dataverse API and available as a metadata export option in the UI. + * + * @param context configuration for data retrieval + * @return dataset metadata in Dataverse JSON format + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if context is null + * @since 2.1.0 + * @apiNote While no formal JSON schema exists for this format, it is well-documented + * in the Dataverse guides. Along with OAI_ORE, this is one of only two export + * formats that provide complete dataset and file metadata. + * @implNote Implementations must respect the {@code datasetMetadataOnly} flag. + * When true, file-level metadata should be excluded to optimize performance + * for datasets with large numbers of files. Other context options + * (publicFilesOnly, offset, length) do not apply and should be ignored. */ - JsonObject getDatasetJson(ExportDataContext... context); - + JsonObject getDatasetJson(ExportDataContext context); + /** - * - * @return - dataset metadata in the JSON-LD based OAI_ORE format used in - * Dataverse's archival bag export mechanism and as available in the - * user interface and by API. - * @apiNote - THis, and the JSON format are the only two that provide complete - * dataset-level metadata along with basic file metadata for each file - * in the dataset. - * @param context - supplies optional parameters. + * Returns complete dataset metadata using default options. + * + * @return dataset metadata in Dataverse JSON format + * @throws ExportException if metadata retrieval fails + * @since 1.0.0 + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetJson(ExportDataContext)} instead. */ - JsonObject getDatasetORE(ExportDataContext... context); - + @Deprecated(since = "2.1.0", forRemoval = true) + default JsonObject getDatasetJson() { + return getDatasetJson(ExportDataContext.defaults()); + } + /** - * Dataverse is capable of extracting DDI-centric metadata from tabular - * datafiles. This detailed metadata, which is only available for successfully - * "ingested" tabular files, is not included in the output of any other methods - * in this interface. - * - * @return - a JSONArray with one entry per ingested tabular dataset file. - * @apiNote - there is no JSON schema available for this output and the format - * is not well documented. Implementers may wish to expore the @see - * edu.harvard.iq.dataverse.export.DDIExporter and the @see - * edu.harvard.iq.dataverse.util.json.JSONPrinter classes where this - * output is used/generated (respectively). - * @param context - supplies optional parameters. + * Returns dataset metadata in JSON-LD-based OAI-ORE format. + *

+ * OAI-ORE (Open Archives Initiative Object Reuse and Exchange) provides a structured way to describe + * aggregations of web resources. This format is used in Dataverse's archival bag export mechanism + * and available via UI and API. + * + * @param context configuration for data retrieval + * @return dataset metadata in OAI_ORE format + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if context is null + * @since 2.1.0 + * @apiNote Along with the standard JSON format, this is one of only two export + * formats that provide complete dataset-level metadata along with basic + * file metadata for each file in the dataset. + * @implNote Implementations must respect the {@code datasetMetadataOnly} flag. + * Other context options do not apply and should be ignored. */ - JsonArray getDatasetFileDetails(ExportDataContext... context); - + JsonObject getDatasetORE(ExportDataContext context); + /** - * Similar to the above, but - * a) retrieves the information for the ingested/tabular data files _only_ - * b) provides an option for retrieving this stuff in batches - * c) provides an option for skipping restricted/embargoed etc. files. - * Intended for datasets with massive numbers of tabular files and datavariables. - * @param context - supplies optional parameters. - * current (2.1.0) known use cases: - * context.isPublicFilesOnly(); - * context.getOffset(); - * context.getLength(); - * @return json array containing the datafile/filemetadata->datatable->datavariable metadata - * @throws ExportException + * Returns dataset metadata in OAI-ORE format using default options. + * + * @return dataset metadata in OAI-ORE format + * @throws ExportException if metadata retrieval fails + * @since 1.0.0 + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetORE(ExportDataContext)} instead. */ - JsonArray getTabularDataDetails(ExportDataContext ... context) throws ExportException; + @Deprecated(since = "2.1.0", forRemoval = true) + default JsonObject getDatasetORE() { + return getDatasetORE(ExportDataContext.defaults()); + } /** - * - * @return - the subset of metadata conforming to the schema.org standard as - * available in the user interface and as included as header metadata in - * dataset pages (for use by search engines) - * @apiNote - as this metadata export is not complete, it should only be used as - * a starting point for an Exporter if it simplifies your exporter - * relative to using the JSON or OAI_ORE exports. - * @param context - supplies optional parameters. + * Returns detailed metadata for all files in the dataset. + *

+ * For tabular files that have been successfully ingested, this includes + * DDI-centric metadata extracted during the ingest process. This detailed + * metadata is not available through other methods in this interface. + * + * @param context configuration for data retrieval + * @return JSON array with one entry per dataset file (both tabular and non-tabular) + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if context is null + * @since 2.1.0 + * @apiNote No formal JSON schema is available for this output. The format is not + * extensively documented; implementers may wish to examine the DDIExporter + * and JSONPrinter classes in the Dataverse codebase for usage examples. + * @implNote Implementations should respect both {@code datasetMetadataOnly} and + * {@code publicFilesOnly} flags. Pagination options do not apply and + * should be ignored. */ - JsonObject getDatasetSchemaDotOrg(ExportDataContext... context); - + JsonArray getDatasetFileDetails(ExportDataContext context); + /** - * - * @return - the subset of metadata conforming to the DataCite standard as - * available in the Dataverse user interface and as sent to DataCite when DataCite DOIs are used. - * @apiNote - as this metadata export is not complete, it should only be used as - * a starting point for an Exporter if it simplifies your exporter - * relative to using the JSON or OAI_ORE exports. - * @param context - supplies optional parameters. + * Returns detailed metadata for all files using default options. + * + * @return JSON array with one entry per dataset file + * @throws ExportException if metadata retrieval fails + * @since 1.0.0 + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetFileDetails(ExportDataContext)} instead. */ - String getDataCiteXml(ExportDataContext... context); - + @Deprecated(since = "2.1.0", forRemoval = true) + default JsonArray getDatasetFileDetails() { + return getDatasetFileDetails(ExportDataContext.defaults()); + } + /** - * If an Exporter has specified a prerequisite format name via the - * getPrerequisiteFormatName() method, it can call this method to retrieve - * metadata in that format. - * - * @return - metadata in the specified prerequisite format (if available from - * another internal or added Exporter) as an Optional - * @apiNote - This functionality is intended as way to easily generate alternate - * formats of the ~same metadata, e.g. to support download as XML, - * HTML, PDF for a specific metadata standard (e.g. DDI). It can be - * particularly useful, reative to starting from the output of one of - * the getDataset* methods above, if there are existing libraries that - * can convert between these formats. Note that, since Exporters can be - * replaced, relying on this method could cause your Exporter to - * malfunction, e.g. if you depend on format "ddi" and a third party - * Exporter is configured to replace the internal ddi Exporter in - * Dataverse. - * @param context - supplies optional parameters. + * Returns detailed metadata for tabular files only, with support for filtering and pagination. + *

+ * This method is specifically designed for datasets with large numbers of tabular + * files and data variables. It provides access to the complete hierarchy of + * datafile → filemetadata → datatable → datavariable metadata. + * + * @param context configuration for data retrieval + * @return JSON array containing metadata for tabular files only + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if context is null + * @since 2.1.0 + * @apiNote Pagination is intended for retrieving specific subsets, not for iterating + * through large result sets. For complete exports, call once without pagination + * or iterate by checking for empty results. + * @implNote Implementations should respect {@code publicFilesOnly} to filter restricted + * or embargoed files. Pagination via {@code offset} and {@code length} should + * be supported where feasible. The {@code datasetMetadataOnly} flag does not + * apply and should be ignored. */ - default Optional getPrerequisiteInputStream(ExportDataContext... context) { + JsonArray getTabularDataDetails(ExportDataContext context); + + /** + * Returns dataset metadata conforming to the schema.org standard. + *

+ * This metadata subset is used in dataset page headers to improve discoverability by search engines. + * It provides structured data markup (JSON-LD) following the schema.org vocabulary. + * + * @param context configuration for data retrieval + * @return dataset metadata in schema.org format + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if context is null + * @since 2.1.0 + * @apiNote This metadata export is not complete. It should only be used as a starting + * point for an Exporter if it simplifies implementation compared to using + * the complete JSON or OAI_ORE exports. + * @implNote All context options are ignored by this method. + */ + JsonObject getDatasetSchemaDotOrg(ExportDataContext context); + + /** + * Returns dataset metadata in schema.org format using default options. + * + * @return dataset metadata in schema.org format + * @throws ExportException if metadata retrieval fails + * @since 1.0.0 + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetSchemaDotOrg(ExportDataContext)} instead. + */ + @Deprecated(since = "2.1.0", forRemoval = true) + default JsonObject getDatasetSchemaDotOrg() { + return getDatasetSchemaDotOrg(ExportDataContext.defaults()); + } + + /** + * Returns dataset metadata conforming to the DataCite standard as XML. + *

+ * This is the same metadata format sent to DataCite when DataCite DOIs are used. + * It provides citation metadata following the DataCite Metadata Schema. + * + * @param context configuration for data retrieval + * @return dataset metadata as DataCite XML string + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if context is null + * @since 2.1.0 + * @apiNote This metadata export is not complete. It should only be used as a starting + * point for an Exporter if it simplifies implementation compared to using + * the complete JSON or OAI_ORE exports. + * @implNote All context options are ignored by this method. + */ + String getDataCiteXml(ExportDataContext context); + + /** + * Returns dataset metadata in DataCite XML format using default options. + * + * @return dataset metadata as DataCite XML string + * @throws ExportException if metadata retrieval fails + * @since 1.0.0 + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDataCiteXml(ExportDataContext)} instead. + */ + @Deprecated(since = "2.1.0", forRemoval = true) + default String getDataCiteXml() { + return getDataCiteXml(ExportDataContext.defaults()); + } + + /** + * Returns metadata in the format specified by an Exporter's prerequisite. + *

+ * Some Exporters transform metadata from one standard format to another (e.g., + * DDI XML to DDI HTML). Such Exporters declare a prerequisite format via + * {@link Exporter#getPrerequisiteFormatName()}, and this method provides access + * to that prerequisite metadata. + * + * @param context configuration passed to the prerequisite exporter + * @return metadata in the prerequisite format, or empty if no prerequisite is configured + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if context is null + * @since 2.1.0 + * @apiNote This is useful for creating alternate representations of the same metadata + * (e.g., XML, HTML, PDF versions of a standard like DDI), especially when + * conversion libraries exist. Note that if a third-party Exporter replaces + * the internal exporter you depend on, this method may return unexpected results. + * @implNote The default implementation returns empty. Override only if your provider + * supports prerequisite format chaining. The prerequisite exporter receives + * the same context as specified in this call. + */ + default Optional getPrerequisiteInputStream(ExportDataContext context) { return Optional.empty(); } - - } + + /** + * Returns metadata in the prerequisite format using default options. + * + * @return metadata in the prerequisite format, or empty if no prerequisite is configured + * @throws ExportException if metadata retrieval fails + * @since 1.0.0 + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getPrerequisiteInputStream(ExportDataContext)} instead. + */ + @Deprecated(since = "2.1.0", forRemoval = true) + default Optional getPrerequisiteInputStream() { + return getPrerequisiteInputStream(ExportDataContext.defaults()); + } +} From 0947cc0043c60ebfe148df425b7be3cb9f5a32f0 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 30 Jan 2026 21:59:26 +0100 Subject: [PATCH 04/55] chore(deps): add JUnit Jupiter dependency for unit testing --- pom.xml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pom.xml b/pom.xml index 031e27d..c88a136 100644 --- a/pom.xml +++ b/pom.xml @@ -44,6 +44,13 @@ provided + + + + org.junit.jupiter + junit-jupiter + test + From ece68aef1b9c8929b65510100dfcb5ec9670b567 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 27 Feb 2026 19:42:00 +0100 Subject: [PATCH 05/55] feat(core): introduce plugin loader, core-provider and plugin contracts Add foundational plugin infrastructure, including `PluginLoader`, `Plugin`, `CoreProvider`, and related exception handling (`LoaderException`, `LoaderProblem`). Setup API for plugin discovery, API level compatibility checks, and isolated class loading. Includes initial implementation of `TestPlugin` and `TestProvider` with unit tests. --- .../gdcc/spi/core/loader/LoaderException.java | 17 ++ .../gdcc/spi/core/loader/LoaderProblem.java | 33 +++ .../io/gdcc/spi/core/loader/PluginLoader.java | 211 ++++++++++++++++++ .../io/gdcc/spi/core/loader/PluginSource.java | 101 +++++++++ .../io/gdcc/spi/core/plugin/CoreProvider.java | 20 ++ .../java/io/gdcc/spi/core/plugin/Plugin.java | 66 ++++++ .../spi/core/loader/PluginLoaderTest.java | 84 +++++++ .../io/gdcc/spi/core/test/TestPlugin.java | 28 +++ .../io/gdcc/spi/core/test/TestProvider.java | 13 ++ .../services/io.gdcc.spi.core.plugin.Plugin | 1 + 10 files changed, 574 insertions(+) create mode 100644 src/main/java/io/gdcc/spi/core/loader/LoaderException.java create mode 100644 src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java create mode 100644 src/main/java/io/gdcc/spi/core/loader/PluginLoader.java create mode 100644 src/main/java/io/gdcc/spi/core/loader/PluginSource.java create mode 100644 src/main/java/io/gdcc/spi/core/plugin/CoreProvider.java create mode 100644 src/main/java/io/gdcc/spi/core/plugin/Plugin.java create mode 100644 src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java create mode 100644 src/test/java/io/gdcc/spi/core/test/TestPlugin.java create mode 100644 src/test/java/io/gdcc/spi/core/test/TestProvider.java create mode 100644 src/test/resources/META-INF/services/io.gdcc.spi.core.plugin.Plugin diff --git a/src/main/java/io/gdcc/spi/core/loader/LoaderException.java b/src/main/java/io/gdcc/spi/core/loader/LoaderException.java new file mode 100644 index 0000000..9975004 --- /dev/null +++ b/src/main/java/io/gdcc/spi/core/loader/LoaderException.java @@ -0,0 +1,17 @@ +package io.gdcc.spi.core.loader; + +import java.util.List; + +public class LoaderException extends RuntimeException { + + private final List problems; + + public LoaderException(List problems) { + super("Multiple problems have been detected by the loader, accessible from getProblems()."); + this.problems = List.copyOf(problems); + } + + public List getProblems() { + return problems; + } +} diff --git a/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java b/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java new file mode 100644 index 0000000..89199cd --- /dev/null +++ b/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java @@ -0,0 +1,33 @@ +package io.gdcc.spi.core.loader; + +import java.nio.file.Path; +import java.util.Set; + +public sealed interface LoaderProblem permits + LoaderProblem.SourceFailure, + LoaderProblem.LocationFailure, + LoaderProblem.DuplicateSources { + + String message(); + + record SourceFailure(Throwable cause) implements LoaderProblem { + @Override + public String message() { + return cause.getClass().getSimpleName() + ": " + cause.getMessage(); + } + } + + record LocationFailure(Path location, Throwable cause) implements LoaderProblem { + @Override + public String message() { + return "Loading from " + location + " failed: " + cause.getMessage(); + } + } + + record DuplicateSources(Set duplicateGroup) implements LoaderProblem { + @Override + public String message() { + return "Duplicate plugin sources detected: " + duplicateGroup; + } + } +} diff --git a/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java b/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java new file mode 100644 index 0000000..43f5fd6 --- /dev/null +++ b/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java @@ -0,0 +1,211 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.core.plugin.Plugin; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.AccessDeniedException; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.LinkOption; +import java.nio.file.NotDirectoryException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.ServiceLoader; +import java.util.Set; +import java.util.regex.PatternSyntaxException; + +/** + * Loads plugins of a specified type from JAR files in a given directory using the Java ServiceLoader mechanism. + *

+ * Each plugin must implement the {@link Plugin} interface and provide a non-null, non-blank identity via the + * {@link Plugin#identity()} method. Plugins are loaded from individual JAR files, each loaded in its own + * {@link URLClassLoader}, enabling isolated class loading for plugin dependencies. + *

+ *

+ * This class supports custom ClassLoader hierarchies to accommodate complex deployment environments, and + * aggregates errors encountered during plugin discovery and loading into a single {@link LoaderException} + * if no plugins are successfully loaded. + *

+ *

+ * It supports one-time classloading, but no reloading of changed JARs at runtime. + * An application restart is required to pick up changes to plugin JARs. + *

+ * + * @param the type of plugin to load, constrained to implement the {@link Plugin} interface + */ +public class PluginLoader { + + private final Class pluginClass; + private final ClassLoader parentClassLoader; + + /** + * Constructs a new PluginLoader that will load plugins of the specified type {@code T}. + * The parent ClassLoader is set to the current thread's context ClassLoader, which allows + * plugins to access classes and resources on the core's classpath. + * + * @param pluginClass the Class object representing the plugin type {@code T} to load + */ + public PluginLoader(Class pluginClass) { + this(pluginClass, Thread.currentThread().getContextClassLoader()); + } + + /** + * Constructs a new PluginLoader that will load plugins of the specified type {@code T}. + * + * @param pluginClass the Class object representing the plugin type {@code T} to load + * @param parentClassLoader the ClassLoader to be used as the parent for class loading of plugins + */ + public PluginLoader(Class pluginClass, ClassLoader parentClassLoader) { + this.pluginClass = pluginClass; + this.parentClassLoader = parentClassLoader; + } + + /** + *

Loads all plugins of type {@code T} from JAR files located in the specified directory. + * Each JAR file is loaded using a dedicated {@link URLClassLoader}, and plugins are + * discovered via the Java {@link ServiceLoader} mechanism (META-INF/services/package.plus.service.ClassName file). + * + *

For each discovered plugin, its {@link Plugin#identity()} must be non-null and non-blank; + * otherwise, it is skipped and an error is recorded. + *

+ * + * @param pluginJarsLocation the directory containing JAR files to scan for plugins + * @return a map, linking {@link PluginSource} metadata to the corresponding plugin instance + * @throws LoaderException if one or more errors occur during loading, if no plugins + * could be successfully loaded, or if there are any duplicates. + * Note: The exception may contain multiple causes, each associated with a specific file or failure point + */ + public Map load(Path pluginJarsLocation) { + // Find and load plugins from JAR sources. + // "jar:!/" is the syntax required to scan a complete JAR file for classes + Map plugins = load(findSources(pluginJarsLocation)); + + // Make sure there are no duplicate plugins + List problems = new ArrayList<>(); + Set> duplicateGroups = PluginSource.groupDuplicates(plugins.keySet()); + if (!duplicateGroups.isEmpty()) { + duplicateGroups.forEach(group -> problems.add(new LoaderProblem.DuplicateSources(group))); + } + + // TODO: add verification logic for API levels and provider services + + if (problems.isEmpty()) { + return plugins; + } else { + throw new LoaderException(problems); + } + } + + /** + * Locates plugin files within the given directory (but not subdirectories) and constructs + * corresponding URL arrays for class loading. + * + * @param pluginsLocation the root directory path to search for plugins + * @return a map where each key is a path to a root classpath (a JAR file or directory) and the corresponding value is + * a single-element array containing the generated URL for that location. + * Note: for JARs, the URL is of the form "jar:!/" as required by {@code URLClassLoader} + * @throws LoaderException if one or more errors occur during directory scanning or URL construction + * and no valid mappings could be produced; the exception may contain multiple causes + * each associated with a specific file or failure point + */ + Map findSources(Path pluginsLocation) { + // Collect as many problems as possible before throwing an exception + List problems = new ArrayList<>(); + Map classRoots = new HashMap<>(); + + // Find all JAR files at the given location (ignoring potential subdirectories) + try (DirectoryStream stream = Files.newDirectoryStream(pluginsLocation, "*.jar")) { + // Using the foreach loop here to enable catching the URI/URL exceptions + for (Path path : stream) { + try { + // The URL[] is necessary as classloaders can deal with multiple locations at once. + classRoots.put(path, new URL[]{pathToUrl(path, "jar", "!/")}); + // This is not likely to happen, as we construct the URL from a valid path only + } catch (MalformedURLException e) { + problems.add(new LoaderProblem.LocationFailure(path, e)); + } + } + + // In addition: put the directory itself to enable loading exploded archives (mostly useful for testing). + // The location must be a browsable directory as otherwise exceptions would have been raised. + classRoots.put(pluginsLocation, new URL[]{pathToUrl(pluginsLocation, "", "")}); + + // NotDirectoryException | AccessDeniedException is a subset of IOException and covers these cases. + } catch (PatternSyntaxException | IOException e) { + problems.add(new LoaderProblem.SourceFailure(e)); + } + + if (problems.isEmpty()) { + return classRoots; + } + throw new LoaderException(problems); + } + + /** + * Loads plugins of type {@code T} from the specified mapping of locations to JAR URLs. + * Each location is processed by creating a dedicated {@link URLClassLoader}, and plugins are + * discovered via the Java {@link ServiceLoader} mechanism using the configured plugin class. + * + * For each discovered plugin, its {@link Plugin#identity()} must be non-null and non-blank; + * otherwise, it is skipped and an error is recorded. + * + * The returned map's keys describe the source of each loaded plugin via {@link PluginSource}, + * associating the plugin's logical identity, class name, and JAR file location. It is the + * caller's responsibility to verify no duplicates (by class name or identity) exist before + * handing the plugins to the core. + * + * @param sources a mapping from (JAR) file paths to their corresponding URLs used for class loading + * @return a map from {@link PluginSource} metadata to the corresponding plugin instance + * @throws LoaderException if one or more errors occur during loading and no plugins + * could be successfully loaded; the exception may contain multiple causes, + * each associated with a specific JAR file or failure point + */ + Map load(Map sources) { + List problems = new ArrayList<>(); + Map loadedPlugins = new HashMap<>(); + + // Create URLClassLoader for each file and load the plugin + sources.forEach((location, sourceUrl) -> { + try (URLClassLoader classLoader = URLClassLoader.newInstance(sourceUrl, this.parentClassLoader)) { + // Load all plugins that can be found within the source for type T + ServiceLoader loader = ServiceLoader.load(this.pluginClass, classLoader); + + // Iterate over all found plugins and add to the plugin map, including source information + loader.forEach(plugin -> { + String identity = plugin.identity(); + if (identity == null || identity.isBlank()) { + problems.add(new LoaderProblem.LocationFailure(location, new IllegalArgumentException(plugin.getClass().getCanonicalName() + "'s identity cannot be null or blank"))); + return; + } + + // Save the plugin source metadata and put the loaded plugin into the map + PluginSource source = new PluginSource(location, plugin.getClass().getCanonicalName(), identity); + loadedPlugins.put(source, plugin); + }); + } catch (IOException | NoSuchMethodError e) { + problems.add(new LoaderProblem.LocationFailure(location, e)); + } + }); + + if (problems.isEmpty()) { + return loadedPlugins; + } + throw new LoaderException(problems); + } + + static URL pathToUrl(Path path, String urlPrefix, String urlSuffix) throws MalformedURLException { + return new URL( + (urlPrefix == null || urlPrefix.isBlank() ? "" : urlPrefix + ":" ) + + path.toUri().toURL() + + ( urlSuffix == null || urlSuffix.isBlank() ? "" : urlSuffix ) + ); + } + +} diff --git a/src/main/java/io/gdcc/spi/core/loader/PluginSource.java b/src/main/java/io/gdcc/spi/core/loader/PluginSource.java new file mode 100644 index 0000000..86a5039 --- /dev/null +++ b/src/main/java/io/gdcc/spi/core/loader/PluginSource.java @@ -0,0 +1,101 @@ +package io.gdcc.spi.core.loader; + +import java.nio.file.Path; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.Objects; +import java.util.Set; + +/** + * This record captures metadata about where a plugin was discovered and loaded from, associating + * 1) the plugin's logical identity (as returned by {@link io.gdcc.spi.core.plugin.Plugin#identity()}), + * 2) the plugin's class name, and + * 3) the JAR file path containing the plugin implementation. + */ +public record PluginSource(Path location, String className, String identity) { + + @Override + public String toString() { + return String.format("%s: className=%s, identity=%s", location, className, identity); + } + + /** + * Checks if this PluginSource is a duplicate of another based on ANY of: + * - Same class name + * - Same normalized identity (case-insensitive, separators removed) + * + * "Same location" doesn't count as a duplicate to enable loading multiple plugins from the same location. + * + * While {@link #equals(Object)} checks for strict logical equality (important for {@code Set} or {@code Map}), + * this method is targeted at detecting logical duplicates (that may have different locations) but share + * other identifying characteristics. + */ + public boolean isDuplicateOf(PluginSource other) { + if (other == null) return false; + return Objects.equals(className, other.className) || + Objects.equals(normalizeIdentity(identity), normalizeIdentity(other.identity)); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null || getClass() != obj.getClass()) return false; + + PluginSource that = (PluginSource) obj; + + return Objects.equals(location, that.location) && + Objects.equals(className, that.className) && + Objects.equals(normalizeIdentity(identity), normalizeIdentity(that.identity)); + } + + /** + * Normalizes the given identity string for comparison purposes by converting it to lowercase + * and removing all occurrences of the characters "/\-_:.#~*" which are commonly used to separate words. + * + * @param identity the identity string to normalize + * @return the normalized identity string, or null if the input is null + */ + private String normalizeIdentity(String identity) { + if (identity == null) return null; + return identity.toLowerCase().replaceAll("[/\\\\_\\-:.#~*]+", ""); + } + + @Override + public int hashCode() { + return Objects.hash(location, className, normalizeIdentity(identity)); + } + + /** + * Groups duplicate {@link PluginSource} instances together based on shared identifying characteristics. + * Two sources are considered duplicates if they have the same class name or the same normalized identity + * (case-insensitive, with common separator characters removed), as determined by {@link PluginSource#isDuplicateOf(PluginSource)}. + * Sources at the same location are NOT considered duplicates, allowing multiple plugins to be loaded from the same file. + * + * @param sources the set of plugin sources to group; may be empty or contain duplicates + * @return a set of disjoint groups, where each group is a set of mutually duplicate sources + */ + public static Set> groupDuplicates(Set sources) { + Set> groups = new LinkedHashSet<>(); + // Create a copy of the sources as a working set + Set remaining = new HashSet<>(sources); + + // Iterate over all remaining sources + while (!remaining.isEmpty()) { + PluginSource source = remaining.iterator().next(); + Set group = new LinkedHashSet<>(); + + // Check against every remaining source if this is a new duplicate + for (PluginSource candidate : remaining) { + if (source.isDuplicateOf(candidate)) { + group.add(candidate); + } + } + + // Add the group to the result set and remove any found duplicates from the working set (so they are not checked again) + groups.add(group); + remaining.removeAll(group); + } + + return groups; + } +} diff --git a/src/main/java/io/gdcc/spi/core/plugin/CoreProvider.java b/src/main/java/io/gdcc/spi/core/plugin/CoreProvider.java new file mode 100644 index 0000000..9c2697f --- /dev/null +++ b/src/main/java/io/gdcc/spi/core/plugin/CoreProvider.java @@ -0,0 +1,20 @@ +package io.gdcc.spi.core.plugin; + +public interface CoreProvider { + + /** + * Plugins require accessibility to core functionality, which {@link CoreProvider}s offer to them. + * Any provider implementation in the core requires building against a specific version of the API contract. + * To avoid plugins asking the wrong questions or the wrong way, the plugin loader will need to check + * that the core provider implementations use the same API contract level as the plugins. + * + * @apiNote This method must be overridden by any provider interface extending this interface and return an API level + * inlined at compile-time of the interface. + * @return the API level the core uses + * @throws UnsupportedOperationException when the provider interface does not override the method + */ + static int apiLevel() { + throw new UnsupportedOperationException("Provider must override apiLevel()"); + } + +} diff --git a/src/main/java/io/gdcc/spi/core/plugin/Plugin.java b/src/main/java/io/gdcc/spi/core/plugin/Plugin.java new file mode 100644 index 0000000..df98f0e --- /dev/null +++ b/src/main/java/io/gdcc/spi/core/plugin/Plugin.java @@ -0,0 +1,66 @@ +package io.gdcc.spi.core.plugin; + +import java.util.Set; + +public interface Plugin { + + /** + * Returns the unique, machine-readable identifier for this plugin. + * This will be the primary key within the core to identify a specific plugin implementation. + * + * @return the plugin's identity string, which must be non-null, non-blank, and URL compatible. + * @implSpec This method must be overridden by any plugin implementation and return a non-null, non-blank, + * URL-compatible string. No plugin interface may provide a default implementation. + */ + String identity(); + + /** + * Returns the plugin API level that this plugin has been built against to the core system. + * This represents the version of the plugin contract that the plugin implementation + * adheres to, used by the core loader to ensure compatibility between the plugin + * and the core system. + * + * @return the API level provided by this plugin + * @implSpec This method must be overridden by any plugin implementation and return an API level + * inlined at compile-time of the interface. A plugin interface may provide a default implementation, + * but must be aware this has to be dropped once the initial API level needs to be increased due + * to a breaking change. + * @implNote Inlining the API level at build time requires this method body to return the primitive constant + * from the plugin interface. If the plugin interface uses a {@code int API_LEVEL} + * (always {@code static final} constants in interfaces), an example code would look like this: + * {@code return PutPluginInterfaceNameHere.API_LEVEL; } + */ + int providedPluginApiLevel(); + + /** + * A plugin interacts with the core using any number of providers. + * When a plugin is built, it is linked against a specific version of the {@link CoreProvider} contracts. + * At loading time, the core must ensure that the plugin will use the same API contract level as the provider + * implementations in the core expect. + * A loader uses this method to determine of a specific provider API level used at build-time of the plugin. + * + * @return the required API level for the given provider class the plugin expects the core to support + * @throws IllegalArgumentException when a given provider class is not needed or unknown to the plugin + * @implSpec This method must be overridden by any plugin implementation and return an API level + * inlined at compile-time of the interface. A plugin interface may provide a default implementation, + * but must be aware this has to be dropped once the initial API level needs to be increased due + * to a breaking change. + */ + int requiredProviderApiLevel(Class providerClass); + + /** + * Returns the set of {@link CoreProvider} interfaces that this plugin expects to be available + * at runtime for interacting with the core system. + * The core uses this information to validate compatibility and ensure all required provider contracts + * are present and match the API level used during plugin compilation. + * See also {@link #requiredProviderApiLevel(Class)} for details on API level compatibility. + * + * @return a non-null set of {@link CoreProvider} interface classes expected by the plugin + * @implSpec This method must be overridden by any plugin implementation and return a set of + * {@link CoreProvider} interface classes that the plugin expects to be available at runtime. + * The set may be empty if no providers are in use. The plugin interface may provide a + * default implementation, but must drop it once a breaking change for this method appears. + */ + Set> expectedProviders(); + +} diff --git a/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java b/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java new file mode 100644 index 0000000..c3c7c6f --- /dev/null +++ b/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java @@ -0,0 +1,84 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.core.plugin.Plugin; +import io.gdcc.spi.core.test.TestPlugin; +import org.junit.jupiter.api.Test; + +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class PluginLoaderTest { + + @Test + void findSources_HappyPath() { + // Given + Class sut = TestPlugin.class; + PluginLoader loader = new PluginLoader<>(sut); + Path directory = Path.of("target/test-classes/" + sut.getPackageName().replaceAll("\\.", "/")); + + // When + Map sources = assertDoesNotThrow(() -> loader.findSources(directory)); + + // Then + assertFalse(sources.isEmpty()); + assertTrue(sources.containsKey(directory)); + } + + @Test + void findSources_NoSuchFile() { + // Given + Class sut = TestPlugin.class; + PluginLoader loader = new PluginLoader<>(sut); + Path directory = Path.of("nosuchdir"); + + // When + LoaderException ex = assertThrows(LoaderException.class, () -> loader.findSources(directory)); + + // Then + assertEquals("NoSuchFileException: nosuchdir", ex.getProblems().get(0).message()); + } + + @Test + void findSources_NoDirectory() { + // Given + Class sut = TestPlugin.class; + PluginLoader loader = new PluginLoader<>(sut); + Path notDirectory = Path.of("target/test-classes/" + sut.getCanonicalName().replaceAll("\\.", "/") + ".class"); + + // When + LoaderException ex = assertThrows(LoaderException.class, () -> loader.findSources(notDirectory)); + + // Then + assertEquals("NotDirectoryException: " + notDirectory, ex.getProblems().get(0).message()); + } + + @Test + void load() throws MalformedURLException { + // Given + Class sut = Plugin.class; + PluginLoader loader = new PluginLoader<>(sut); + Path rootClassPath = Path.of("target/test-classes/"); + Map sources = Map.of(rootClassPath, new URL[]{PluginLoader.pathToUrl(rootClassPath, null, null)}); + + // When + Map plugins = loader.load(sources); + + // Then + assertFalse(plugins.isEmpty()); + List pluginSources = plugins.keySet().stream().toList(); + assertEquals(1, pluginSources.size()); + PluginSource source = pluginSources.get(0); + assertEquals(rootClassPath, source.location()); + assertEquals(TestPlugin.class.getName(), source.className()); + assertEquals(new TestPlugin().identity(), source.identity()); + } +} \ No newline at end of file diff --git a/src/test/java/io/gdcc/spi/core/test/TestPlugin.java b/src/test/java/io/gdcc/spi/core/test/TestPlugin.java new file mode 100644 index 0000000..132c6a4 --- /dev/null +++ b/src/test/java/io/gdcc/spi/core/test/TestPlugin.java @@ -0,0 +1,28 @@ +package io.gdcc.spi.core.test; + +import io.gdcc.spi.core.plugin.CoreProvider; +import io.gdcc.spi.core.plugin.Plugin; + +import java.util.Set; + +public class TestPlugin implements Plugin { + @Override + public String identity() { + return "test"; + } + + @Override + public int providedPluginApiLevel() { + return 1; + } + + @Override + public int requiredProviderApiLevel(Class providerClass) { + return -1; + } + + @Override + public Set> expectedProviders() { + return Set.of(TestProvider.class); + } +} diff --git a/src/test/java/io/gdcc/spi/core/test/TestProvider.java b/src/test/java/io/gdcc/spi/core/test/TestProvider.java new file mode 100644 index 0000000..bf06221 --- /dev/null +++ b/src/test/java/io/gdcc/spi/core/test/TestProvider.java @@ -0,0 +1,13 @@ +package io.gdcc.spi.core.test; + +import io.gdcc.spi.core.plugin.CoreProvider; + +public interface TestProvider extends CoreProvider { + + int API_LEVEL = 100; + + static int apiLevel() { + return API_LEVEL; + } + +} diff --git a/src/test/resources/META-INF/services/io.gdcc.spi.core.plugin.Plugin b/src/test/resources/META-INF/services/io.gdcc.spi.core.plugin.Plugin new file mode 100644 index 0000000..680a3ba --- /dev/null +++ b/src/test/resources/META-INF/services/io.gdcc.spi.core.plugin.Plugin @@ -0,0 +1 @@ +io.gdcc.spi.core.test.TestPlugin \ No newline at end of file From 9d81d16cc999f04051e3a62dce8ad38749f4c667 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 27 Feb 2026 20:41:07 +0100 Subject: [PATCH 06/55] chore(build): add `jdk.version` POM property for Java 17 compilation and compatibility target This defaults to 11 per GDCC Maven Parent --- pom.xml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pom.xml b/pom.xml index c88a136..92f2159 100644 --- a/pom.xml +++ b/pom.xml @@ -31,6 +31,10 @@ + + 17 + + jakarta.json From 0c8a94518a098f30ff9a5002bd0cc9f844cc9eb6 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 27 Feb 2026 20:41:52 +0100 Subject: [PATCH 07/55] refactor(spi): add `@Override` to `getMediaType` in `XMLExporter` for clarity and adherence to interfaces --- src/main/java/io/gdcc/spi/export/XMLExporter.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/io/gdcc/spi/export/XMLExporter.java b/src/main/java/io/gdcc/spi/export/XMLExporter.java index 3c3fa35..910dcd0 100644 --- a/src/main/java/io/gdcc/spi/export/XMLExporter.java +++ b/src/main/java/io/gdcc/spi/export/XMLExporter.java @@ -31,7 +31,8 @@ public interface XMLExporter extends Exporter { /** * @return - should always be MediaType.APPLICATION_XML */ - public default String getMediaType() { + @Override + default String getMediaType() { return MediaType.APPLICATION_XML; - }; + } } From d734ac5fb3dfbaf2cc6cfbe0d687f3def97f444d Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 2 Mar 2026 15:14:52 +0100 Subject: [PATCH 08/55] style(export): rename ExportDataContext to DatasetExportQuery, better reflecting the semantics in the name --- ...taContext.java => DatasetExportQuery.java} | 0 .../spi/export/ExportDataContextTest.java | 25 ------------------- 2 files changed, 25 deletions(-) rename src/main/java/io/gdcc/spi/export/{ExportDataContext.java => DatasetExportQuery.java} (100%) delete mode 100644 src/test/java/io/gdcc/spi/export/ExportDataContextTest.java diff --git a/src/main/java/io/gdcc/spi/export/ExportDataContext.java b/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java similarity index 100% rename from src/main/java/io/gdcc/spi/export/ExportDataContext.java rename to src/main/java/io/gdcc/spi/export/DatasetExportQuery.java diff --git a/src/test/java/io/gdcc/spi/export/ExportDataContextTest.java b/src/test/java/io/gdcc/spi/export/ExportDataContextTest.java deleted file mode 100644 index f663c3f..0000000 --- a/src/test/java/io/gdcc/spi/export/ExportDataContextTest.java +++ /dev/null @@ -1,25 +0,0 @@ -package io.gdcc.spi.export; - -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.*; - -class ExportDataContextTest { - - @Test - void testBuildThrowsExceptionForNegativeOffset() { - // Given - var builder = ExportDataContext.builder().offset(-5); - // When & Then - assertThrows(IllegalArgumentException.class, builder::build); - } - - @Test - void testBuildThrowsExceptionForNegativeLength() { - // Given - var builder = ExportDataContext.builder().length(-5); - // When & Then - assertThrows(IllegalArgumentException.class, builder::build); - } - -} \ No newline at end of file From 7359acc89090d356b5d04a297cd112ce0fd43dec Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 2 Mar 2026 15:17:18 +0100 Subject: [PATCH 09/55] feat(export): add export query predicates and builders for dataset and file metadata shaping Introduce `DatasetMetadataPredicates`, `FileMetadataPredicates`, and their respective builders (`DatasetExportQuery`, `FileExportQuery`) to provide flexible and conflict-validated metadata shaping options for export operations. Includes unit tests for predicate conflict validation. --- .../gdcc/spi/export/DatasetExportQuery.java | 265 +++++++----------- .../spi/export/DatasetMetadataPredicates.java | 76 +++++ .../io/gdcc/spi/export/FileExportQuery.java | 162 +++++++++++ .../spi/export/FileMetadataPredicates.java | 84 ++++++ .../export/FileMetadataPredicatesTest.java | 32 +++ 5 files changed, 463 insertions(+), 156 deletions(-) create mode 100644 src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java create mode 100644 src/main/java/io/gdcc/spi/export/FileExportQuery.java create mode 100644 src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java create mode 100644 src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java diff --git a/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java b/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java index c7ce79b..6fd4b48 100644 --- a/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java +++ b/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java @@ -1,252 +1,205 @@ package io.gdcc.spi.export; +import java.util.Collection; +import java.util.HashSet; import java.util.Objects; +import java.util.Optional; +import java.util.Set; /** - * Provides an optional mechanism for defining various data retrieval options - * for the export subsystem in a way that should allow us adding support for - * more options going forward with minimal or no changes to the already - * implemented export plugins. + * Defines what dataset metadata to retrieve and at what level of detail + * for dataset-oriented export operations. + *

+ * This is a pure data-shape specification: it answers which aspects of a dataset + * should be included in an export, and optionally how file metadata nested within + * that dataset should be shaped. It deliberately does not address which + * datasets to operate on (that is a selection concern at a higher level), nor + * how much data to retrieve per call — pagination is a separate, + * orthogonal concern expressed via a {@code PageRequest} at the method level. + *

+ * File metadata shaping is optional: if no {@link FileExportQuery} is provided, + * methods that include file metadata will apply their own defaults. Methods that + * do not return file metadata will ignore any nested {@link FileExportQuery}. + *

+ * Instances are immutable and must be constructed via {@link #builder()}. + * Use {@link #defaults()} for the standard query with no special filtering. + * + * @see FileExportQuery + * @see DatasetMetadataPredicates */ -public final class ExportDataContext { +public final class DatasetExportQuery { - private final boolean datasetMetadataOnly; - private final boolean publicFilesOnly; - private final int offset; - private final int length; + private final Set datasetPredicates; + private final FileExportQuery fileQuery; /** - * Default context with no special options. + * Default query, including all dataset metadata and applying file metadata defaults. */ - private static final ExportDataContext DEFAULT = builder().build(); + private static final DatasetExportQuery DEFAULT = builder().build(); - private ExportDataContext(Builder builder) { - this.datasetMetadataOnly = builder.datasetMetadataOnly; - this.publicFilesOnly = builder.publicFilesOnly; - this.offset = builder.offset; - this.length = builder.length; + private DatasetExportQuery(Builder builder) { + this.datasetPredicates = Set.copyOf(builder.datasetPredicates); + this.fileQuery = builder.fileQuery; } /** - * Returns a builder for creating new contexts. + * Returns a builder for creating new queries. + * + * @return a new {@link Builder} instance */ public static Builder builder() { return new Builder(); } /** - * Returns a default context with no special options. + * Returns the default query, which includes all dataset metadata with no special + * filtering, and defers file metadata shaping to method-level defaults. + * + * @return the shared default {@link DatasetExportQuery} instance */ - public static ExportDataContext defaults() { + public static DatasetExportQuery defaults() { return DEFAULT; } /** - * Builder for ExportDataContext. + * Builder for {@link DatasetExportQuery}. + *

+ * Obtain an instance via {@link DatasetExportQuery#builder()} or + * {@link Builder#from(DatasetExportQuery)} to derive a new query from an existing one. */ public static class Builder { - private int offset = 0; // default: no offset = beginning - private int length = 0; // default: no length = no limit - private boolean datasetMetadataOnly = false; - private boolean publicFilesOnly = false; + private final Set datasetPredicates = new HashSet<>(); + private FileExportQuery fileQuery = null; private Builder() { - // Hiding constructor to enforce use of static factory method - } - - /** - * Excludes file-level metadata from the export. - */ - public Builder datasetMetadataOnly() { - this.datasetMetadataOnly = true; - return this; - } - - /** - * Sets whether to exclude file-level metadata. - */ - public Builder datasetMetadataOnly(boolean value) { - this.datasetMetadataOnly = value; - return this; + // Hiding constructor to enforce use of the static factory method } /** - * Includes only public (non-restricted, non-embargoed) files. - */ - public Builder publicFilesOnly() { - this.publicFilesOnly = true; - return this; - } - - /** - * Sets whether to include only public files. + * Sets the dataset metadata predicates, replacing any previously set predicates. + * + * @param predicates the dataset metadata predicates to set + * @return this builder instance */ - public Builder publicFilesOnly(boolean value) { - this.publicFilesOnly = value; + public Builder datasetPredicates(DatasetMetadataPredicates... predicates) { + this.datasetPredicates.clear(); + this.datasetPredicates.addAll(Set.of(predicates)); return this; } /** - * Sets the starting position for results (0-based). + * Sets the dataset metadata predicates, replacing any previously set predicates. * - * @param offset zero-based starting position (must be >= 0) - * @return this builder + * @param predicates the dataset metadata predicates to set + * @return this builder instance */ - public Builder offset(int offset) { - this.offset = offset; + public Builder datasetPredicates(Collection predicates) { + this.datasetPredicates.clear(); + this.datasetPredicates.addAll(predicates); return this; } /** - * Sets the maximum number of results to return. + * Adds a dataset metadata predicate to the builder's collection of predicates. * - * @param length maximum number of items (0 = unlimited, must be >= 0) - * @return this builder + * @param predicate the dataset metadata predicate to add + * @return this builder instance */ - public Builder length(int length) { - this.length = length; + public Builder addDatasetPredicate(DatasetMetadataPredicates predicate) { + this.datasetPredicates.add(predicate); return this; } /** - * Convenience method to set both offset and length together. + * Sets the {@link FileExportQuery} to use for shaping file metadata nested + * within this dataset query. Replaces any previously set file query. + *

+ * If not set, methods that include file metadata will apply their own defaults. * - * @param offset zero-based starting position (must be >= 0) - * @param length maximum number of items (0 = unlimited, must be >= 0) - * @return this builder - * @apiNote Pagination is primarily intended for retrieving specific subsets, - * not for iterating through large datasets. For full exports of - * large datasets, consider using streaming methods if available. + * @param fileQuery the file export query to compose into this dataset query + * @return this builder instance */ - public Builder pagination(int offset, int length) { - this.offset = offset; - this.length = length; + public Builder fileQuery(FileExportQuery fileQuery) { + this.fileQuery = fileQuery; return this; } /** - * Builds an immutable ExportDataContext. + * Builds an immutable {@link DatasetExportQuery}. * - * @return validated context - * @throws IllegalArgumentException if validation fails + * @return a new, validated {@link DatasetExportQuery} + * @throws IllegalArgumentException if the predicate combination is invalid, + * e.g. due to conflicting predicates */ - public ExportDataContext build() { - // Validate business rules - if (offset < 0) { - throw new IllegalArgumentException( - "offset must be non-negative, got: " + offset - ); - } - - if (length < 0) { - throw new IllegalArgumentException( - "length must be non-negative (0 = unlimited), got: " + length - ); - } - - return new ExportDataContext(this); + public DatasetExportQuery build() { + return new DatasetExportQuery(this); } /** - * Copies the properties from the given {@link ExportDataContext} instance into a new {@code Builder}. + * Creates a new {@link Builder} pre-populated with the state of the given query, + * useful for deriving a modified copy without altering the original. * - * @param source the {@code ExportDataContext} instance from which to copy properties - * @return a new {@code Builder} instance with properties copied from the provided context + * @param source the {@link DatasetExportQuery} instance to copy from + * @return a new {@code Builder} with the same predicates and file query as {@code source} */ - public Builder from(ExportDataContext source) { + public Builder from(DatasetExportQuery source) { return new Builder() - .datasetMetadataOnly(source.datasetMetadataOnly) - .publicFilesOnly(source.publicFilesOnly) - .offset(source.offset) - .length(source.length); + .datasetPredicates(source.datasetPredicates) + .fileQuery(source.fileQuery); } } // Getters - public boolean isDatasetMetadataOnly() { - return datasetMetadataOnly; - } - - public boolean isPublicFilesOnly() { - return publicFilesOnly; - } - /** - * Returns the starting offset for results. + * Returns the dataset metadata predicates that control which aspects of the dataset + * are included in the export. * - * @return zero-based offset (0 = start from beginning) + * @return an unmodifiable set of {@link DatasetMetadataPredicates}; never {@code null} */ - public int getOffset() { - return offset; + public Set getDatasetPredicates() { + return datasetPredicates; } /** - * Returns the maximum number of results to return. + * Returns the optional {@link FileExportQuery} that controls how file metadata + * nested within this dataset export should be shaped. + *

+ * An empty {@link Optional} means no explicit file query was specified; methods + * that include file metadata will apply their own defaults in that case. * - * @return maximum length (0 = unlimited) - */ - public int getLength() { - return length; - } - - /** - * @return true if a non-zero offset is configured - */ - public boolean hasOffset() { - return offset > 0; - } - - /** - * @return true if length is limited (non-zero) + * @return an {@link Optional} containing the file export query, or empty if not set */ - public boolean hasLengthLimit() { - return length > 0; + public Optional getFileQuery() { + return Optional.ofNullable(fileQuery); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ExportDataContext that = (ExportDataContext) o; - return datasetMetadataOnly == that.datasetMetadataOnly && - publicFilesOnly == that.publicFilesOnly && - offset == that.offset && - length == that.length; + DatasetExportQuery that = (DatasetExportQuery) o; + return datasetPredicates.equals(that.datasetPredicates) + && Objects.equals(fileQuery, that.fileQuery); } @Override public int hashCode() { - return Objects.hash(datasetMetadataOnly, publicFilesOnly, offset, length); + return Objects.hash(datasetPredicates, fileQuery); } @Override public String toString() { - StringBuilder sb = new StringBuilder("ExportDataContext{"); - boolean hasContent = false; + StringBuilder sb = new StringBuilder("DatasetExportQuery{"); - if (datasetMetadataOnly) { - sb.append("datasetMetadataOnly"); - hasContent = true; + if (!datasetPredicates.isEmpty()) { + sb.append("datasetPredicates=").append(datasetPredicates).append(", "); } - - if (publicFilesOnly) { - if (hasContent) sb.append(", "); - sb.append("publicFilesOnly"); - hasContent = true; + if (fileQuery != null) { + sb.append("fileQuery=").append(fileQuery); + } else { + sb.append("fileQuery="); } - - if (offset > 0 || length > 0) { - if (hasContent) sb.append(", "); - sb.append("offset=").append(offset); - sb.append(", length=").append(length == 0 ? "unlimited" : length); - hasContent = true; - } - - if (!hasContent) { - sb.append("defaults"); - } - sb.append("}"); return sb.toString(); } diff --git a/src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java b/src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java new file mode 100644 index 0000000..74456e4 --- /dev/null +++ b/src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java @@ -0,0 +1,76 @@ +package io.gdcc.spi.export; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +/** + * Predicates for controlling which dataset metadata is included in an export + * and at what level of detail. + *

+ * These predicates are used in a {@link DatasetExportQuery} to shape dataset-level + * retrieval. They are exclusively concerned with dataset-level concerns — file-level + * filtering is handled separately by {@link FileMetadataPredicates} via + * {@link FileExportQuery}. + *

+ * Predicates may conflict with each other; use {@link #checkConflicts(Set)} to + * validate a combination before use. + * + * @see DatasetExportQuery + * @see FileMetadataPredicates + */ +public enum DatasetMetadataPredicates { + // NOTE: We can only define backward conflicts, as forward conflicts would lead + // to circular dependencies disallowed by the Java compiler. + + // Placeholder — dataset-level predicates to be added here as requirements emerge. + // Examples of future candidates: + // PUBLISHED_DATASETS_ONLY — restrict to published versions + // DRAFT_INCLUDED — include draft versions + // METADATA_BLOCKS_ONLY — exclude file metadata entirely + ; + + final Set conflicts; + + DatasetMetadataPredicates(DatasetMetadataPredicates... predicates) { + this.conflicts = Set.of(predicates); + } + + /** + * Returns {@code true} if this predicate conflicts with the given predicate. + * + * @param p the predicate to check against; {@code null} is safe and returns {@code false} + * @return {@code true} if a conflict exists, {@code false} otherwise + */ + public boolean conflictsWith(DatasetMetadataPredicates p) { + if (p == null) { + return false; + } + return conflicts.contains(p); + } + + /** + * Checks for conflicts among the given set of dataset metadata predicates. + * A predicate is considered conflicting if it has a conflict relationship with + * any other predicate in the set. + * + * @param predicates the set of predicates to check for conflicts + * @return an unmodifiable set of predicates from the input that conflict with at + * least one other predicate; empty if no conflicts exist + */ + @SuppressWarnings("java:S2259") + public static Set checkConflicts(Set predicates) { + Set foundConflicts = new HashSet<>(); + + for (DatasetMetadataPredicates predicate : predicates) { + for (DatasetMetadataPredicates compare : predicates) { + if (predicate.conflictsWith(compare) || compare.conflictsWith(predicate)) { + foundConflicts.add(predicate); + foundConflicts.add(compare); + } + } + } + + return Collections.unmodifiableSet(foundConflicts); + } +} \ No newline at end of file diff --git a/src/main/java/io/gdcc/spi/export/FileExportQuery.java b/src/main/java/io/gdcc/spi/export/FileExportQuery.java new file mode 100644 index 0000000..5ebcacd --- /dev/null +++ b/src/main/java/io/gdcc/spi/export/FileExportQuery.java @@ -0,0 +1,162 @@ +package io.gdcc.spi.export; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; + +import static io.gdcc.spi.export.FileMetadataPredicates.*; + +/** + * Defines what file metadata to retrieve and at what level of detail + * for file-oriented export operations. + *

+ * This is a pure data-shape specification: it answers which files should be included + * and how much detail about them should be fetched. It deliberately does not address + * how much data to retrieve per call — pagination is a separate, + * orthogonal concern expressed via a {@code PageRequest} at the method level. + *

+ * A {@code FileExportQuery} may be used standalone in file-centric export methods, + * or composed inside a {@code DatasetExportQuery} to specify how file metadata + * should be shaped within a dataset export. + *

+ * Instances are immutable and must be constructed via {@link #builder()}. + * Use {@link #defaults()} for the standard all-files query with no special filtering. + * + * @see FileMetadataPredicates + */ +public final class FileExportQuery { + + private final Set filePredicates; + + /** + * Default query with no special options. + */ + private static final FileExportQuery DEFAULT = builder().addFilePredicate(ALL_FILES).build(); + + private FileExportQuery(Builder builder) { + this.filePredicates = builder.filePredicates; + } + + /** + * Returns a builder for creating new queries. + */ + public static Builder builder() { + return new Builder(); + } + + /** + * Returns a default query, which includes all files without filtering or detail restrictions. + */ + public static FileExportQuery defaults() { + return DEFAULT; + } + + /** + * Builder for {@link FileExportQuery}. + *

+ * Obtain an instance via {@link FileExportQuery#builder()} or + * {@link Builder#from(FileExportQuery)} to derive a new query from an existing one. + */ + public static class Builder { + private final Set filePredicates = new HashSet<>(); + + private Builder() { + // Hiding constructor to enforce use of the static factory method + } + + /** + * Sets the file metadata predicates, replacing any previously set predicates. + * + * @param predicates the file metadata predicates to set + * @return this builder instance + */ + public Builder filePredicates(FileMetadataPredicates... predicates) { + this.filePredicates.clear(); + this.filePredicates.addAll(Set.of(predicates)); + return this; + } + + /** + * Sets the file metadata predicates, replacing any previously set predicates. + * + * @param predicates the file metadata predicates to set + * @return this builder instance + */ + public Builder filePredicates(Collection predicates) { + this.filePredicates.clear(); + this.filePredicates.addAll(predicates); + return this; + } + + /** + * Adds a file metadata predicate to the builder's collection of predicates. + * + * @param predicate the file metadata predicate to add + * @return this builder instance + */ + public Builder addFilePredicate(FileMetadataPredicates predicate) { + this.filePredicates.add(predicate); + return this; + } + + /** + * Builds an immutable {@link FileExportQuery}. + * + * @return validated context + * @throws IllegalArgumentException if validation fails + */ + public FileExportQuery build() { + return new FileExportQuery(this); + } + + /** + * Copies the properties from the given {@link FileExportQuery} instance into a new {@code Builder}. + * + * @param source the {@code FileExportQuery} instance from which to copy properties + * @return a new {@code Builder} instance with properties copied from the provided query + */ + public Builder from(FileExportQuery source) { + return new Builder() + .filePredicates(source.filePredicates); + } + } + + // Getters + + /** + * Returns the file metadata predicates that control which files are included + * and what level of detail is fetched for each. + * + * @return an unmodifiable set of {@link FileMetadataPredicates}; never {@code null} + */ + public Set getFilePredicates() { + return Collections.unmodifiableSet(filePredicates); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileExportQuery that = (FileExportQuery) o; + return filePredicates.equals(that.filePredicates); + } + + @Override + public int hashCode() { + return Objects.hash(filePredicates); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("FileExportQuery{"); + + if (!filePredicates.isEmpty()) { + sb.append("filePredicates=").append(filePredicates); + } + + sb.append("}"); + return sb.toString(); + } +} diff --git a/src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java b/src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java new file mode 100644 index 0000000..81f6a65 --- /dev/null +++ b/src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java @@ -0,0 +1,84 @@ +package io.gdcc.spi.export; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +/** + * Enum representing predicates for filtering file metadata during export operations. + * Each predicate defines criteria for including or excluding specific types of files. + * Conflicts between predicates are explicitly defined to prevent ambiguous or contradictory filtering rules. + * Any predicates should follow the pattern (SKIP|ONLY|INCLUDE)_[ADJECTIVE]_[OBJECT]. + */ +public enum FileMetadataPredicates { + // NOTE: We can only define backward conflicts, as forward conflicts would lead + // to circular dependencies disallowed by the Java compiler. + + /** + * Includes metadata for all files without restriction. + * Conflicts with any other predicate selecting files. + */ + ALL_FILES(), + /** + * Excludes metadata for all files. + * Conflicts with any other file selecting predicate. + */ + SKIP_FILES(ALL_FILES), + /** + * Only include files with public visibility. + * Conflicts with {@link #ALL_FILES} and {@link #SKIP_FILES}. + */ + ONLY_PUBLIC_FILES(ALL_FILES, SKIP_FILES), + /** + * Only include tabular data files. + * Conflicts with {@link #ALL_FILES} and {@link #SKIP_FILES}. + */ + ONLY_TABULAR_FILES(ALL_FILES, SKIP_FILES), + /** + * For tabular data files, control if variable details are included or not. + * (That can be huge and heterogeneous data with slow DB queries!) + * It has no conflicting predicates, as it is about detail inclusion, not file selection. + */ + INCLUDE_TABULAR_DATA_VARIABLES() + ; + + final Set conflicts; + + FileMetadataPredicates(FileMetadataPredicates... predicates) { + this.conflicts = Set.of(predicates); + } + + public boolean conflictsWith(FileMetadataPredicates p) { + if (p == null) { + return false; + } + return conflicts.contains(p); + } + + /** + * Checks for conflicts among the given set of export file predicates. + * A predicate is considered conflicting if it has a conflict relationship with + * any other predicate defined in the {@link FileMetadataPredicates} enum. + * + * @param predicates the set of predicates to check for conflicts + * @return an unmodifiable set of predicates from the input that conflict with at least one other predicate (empty if no conflict) + */ + @SuppressWarnings("java:S2259") + public static Set checkConflicts(Set predicates) { + Set foundConflicts = new HashSet<>(); + + // Iterate via O(n^2) through all predicates to check any existing predicate for a conflict. + // This way, a forward check is enough, as we iterate through the cartesian product. + for (FileMetadataPredicates predicate : predicates) { + for (FileMetadataPredicates compare : predicates) { + if (predicate.conflictsWith(compare) || compare.conflictsWith(predicate)) { + foundConflicts.add(predicate); + foundConflicts.add(compare); + } + } + } + + return Collections.unmodifiableSet(foundConflicts); + } + +} diff --git a/src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java b/src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java new file mode 100644 index 0000000..4157b95 --- /dev/null +++ b/src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java @@ -0,0 +1,32 @@ +package io.gdcc.spi.export; + +import org.junit.jupiter.api.Test; + +import java.util.Set; + +import static io.gdcc.spi.export.FileMetadataPredicates.*; +import static org.junit.jupiter.api.Assertions.*; + +class FileMetadataPredicatesTest { + + @Test + void checkForwardAndBackwardConflicts_All_None() { + Set predicates = Set.of(ALL_FILES, SKIP_FILES); + Set conflicts = FileMetadataPredicates.checkConflicts(predicates); + assertEquals(2, conflicts.size(), conflicts::toString); + } + + @Test + void checkForwardAndBackwardConflicts_All_SthElse() { + Set predicates = Set.of(ALL_FILES, ONLY_PUBLIC_FILES); + Set conflicts = FileMetadataPredicates.checkConflicts(predicates); + assertEquals(2, conflicts.size(), conflicts::toString); + } + + @Test + void checkNoConflicts_Public_Tabular() { + Set predicates = Set.of(ONLY_PUBLIC_FILES, ONLY_TABULAR_FILES); + Set conflicts = FileMetadataPredicates.checkConflicts(predicates); + assertTrue(conflicts.isEmpty(), conflicts::toString); + } +} \ No newline at end of file From b2118720931e5d6cdaa3f80d9baf24463e6dbd66 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 2 Mar 2026 15:17:53 +0100 Subject: [PATCH 10/55] feat(export): add `PageRequest` for pagination support with validation Introduce the `PageRequest` class to define pagination parameters for data retrieval. Includes methods for creating paged and unpaged requests, validation for offset and limit values, and utility methods. Add corresponding unit tests to ensure behavior correctness. --- .../java/io/gdcc/spi/export/PageRequest.java | 67 +++++++++++++++++++ .../io/gdcc/spi/export/PageRequestTest.java | 64 ++++++++++++++++++ 2 files changed, 131 insertions(+) create mode 100644 src/main/java/io/gdcc/spi/export/PageRequest.java create mode 100644 src/test/java/io/gdcc/spi/export/PageRequestTest.java diff --git a/src/main/java/io/gdcc/spi/export/PageRequest.java b/src/main/java/io/gdcc/spi/export/PageRequest.java new file mode 100644 index 0000000..335e8ed --- /dev/null +++ b/src/main/java/io/gdcc/spi/export/PageRequest.java @@ -0,0 +1,67 @@ +package io.gdcc.spi.export; + +import java.util.Objects; + +/** + * Defines pagination parameters for data retrieval methods that return + * potentially large collections of results. + * + *

Use {@link #unpaged()} for requests that should return all results in a single batch.

+ */ +public final class PageRequest { + + private static final PageRequest UNPAGED = new PageRequest(0, Integer.MAX_VALUE); + + private final int offset; + private final int limit; + + private PageRequest(int offset, int limit) { + if (offset < 0) throw new IllegalArgumentException("Offset must be >= 0, was: " + offset); + if (limit < 1) throw new IllegalArgumentException("Limit must be >= 1, was: " + limit); + this.offset = offset; + this.limit = limit; + } + + /** + * Creates a page request with the given offset and limit. + * + * @param offset zero-based index of the first result to return + * @param limit maximum number of results to return + * @return a new PageRequest + */ + public static PageRequest of(int offset, int limit) { + return new PageRequest(offset, limit); + } + + /** + * Returns a request for all results (no pagination). + */ + public static PageRequest unpaged() { + return UNPAGED; + } + + public int getOffset() { return offset; } + public int getLimit() { return limit; } + + public boolean isPaged() { return !this.equals(UNPAGED); } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PageRequest that = (PageRequest) o; + return offset == that.offset && limit == that.limit; + } + + @Override + public int hashCode() { + return Objects.hash(offset, limit); + } + + @Override + public String toString() { + return isPaged() + ? "PageRequest{offset=" + offset + ", limit=" + limit + "}" + : "PageRequest{unpaged}"; + } +} diff --git a/src/test/java/io/gdcc/spi/export/PageRequestTest.java b/src/test/java/io/gdcc/spi/export/PageRequestTest.java new file mode 100644 index 0000000..917625d --- /dev/null +++ b/src/test/java/io/gdcc/spi/export/PageRequestTest.java @@ -0,0 +1,64 @@ +package io.gdcc.spi.export; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class PageRequestTest { + + /** + * Tests for the `of` method in the `PageRequest` class. + * The `of` method is responsible for creating a new `PageRequest` object with the given offset and limit values. + * This test class ensures various scenarios for valid and invalid inputs are handled correctly. + */ + + @Test + void testOf_createsValidPageRequest() { + // Arrange & Act + PageRequest pageRequest = PageRequest.of(10, 20); + + // Assert + assertEquals(10, pageRequest.getOffset()); + assertEquals(20, pageRequest.getLimit()); + assertTrue(pageRequest.isPaged()); + } + + @Test + void testOf_withZeroOffsetAndValidLimit() { + // Arrange & Act + PageRequest pageRequest = PageRequest.of(0, 5); + + // Assert + assertEquals(0, pageRequest.getOffset()); + assertEquals(5, pageRequest.getLimit()); + assertTrue(pageRequest.isPaged()); + } + + @Test + void testOf_throwsExceptionForNegativeOffset() { + // Arrange & Act & Assert + IllegalArgumentException exception = assertThrows( + IllegalArgumentException.class, + () -> PageRequest.of(-1, 10) + ); + assertEquals("Offset must be >= 0, was: -1", exception.getMessage()); + } + + @Test + void testOf_throwsExceptionForZeroOrNegativeLimit() { + // Arrange & Act & Assert + IllegalArgumentException exception1 = assertThrows( + IllegalArgumentException.class, + () -> PageRequest.of(5, 0) + ); + assertEquals("Limit must be >= 1, was: 0", exception1.getMessage()); + + IllegalArgumentException exception2 = assertThrows( + IllegalArgumentException.class, + () -> PageRequest.of(5, -1) + ); + assertEquals("Limit must be >= 1, was: -1", exception2.getMessage()); + } +} \ No newline at end of file From ef777fe5fb6a89a43e4e9759680aa2828557904b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 2 Mar 2026 15:36:52 +0100 Subject: [PATCH 11/55] refactor(export): replace `ExportDataContext` with `DatasetExportQuery` and `FileExportQuery` Refactor `ExportDataProvider` to use `DatasetExportQuery` and `FileExportQuery` for metadata retrieval. Update affected method signatures, improve documentation, and introduce overloads for better flexibility and stream handling. Keep deprecation of methods with no parameters. --- .../gdcc/spi/export/ExportDataProvider.java | 124 +++++++++++------- 1 file changed, 80 insertions(+), 44 deletions(-) diff --git a/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/src/main/java/io/gdcc/spi/export/ExportDataProvider.java index 8820db2..495ea1c 100644 --- a/src/main/java/io/gdcc/spi/export/ExportDataProvider.java +++ b/src/main/java/io/gdcc/spi/export/ExportDataProvider.java @@ -3,9 +3,11 @@ import jakarta.json.JsonArray; import jakarta.json.JsonObject; +import org.w3c.dom.Document; import java.io.InputStream; import java.util.Optional; +import java.util.stream.Stream; /** * Provides dataset metadata that can be used by an {@link Exporter} to create @@ -23,12 +25,13 @@ *

Context Handling

* Implementations should respect context options where applicable. * Not all methods support all context options - see individual method documentation for details. - * All methods require a non-null {@link ExportDataContext}. + * All methods require a non-null {@link DatasetExportQuery} or {@link FileExportQuery}. * Passing null will result in a {@link NullPointerException}. - * Callers should use {@link ExportDataContext#defaults()} instead of passing null. + * Callers should use {@link DatasetExportQuery#defaults()} respectivelly {@link FileExportQuery#defaults()} instead of passing null. * * @see Exporter - * @see ExportDataContext + * @see DatasetExportQuery + * @see FileExportQuery */ public interface ExportDataProvider { @@ -39,10 +42,10 @@ public interface ExportDataProvider { * metadata for each file in the dataset. It is the same JSON format used in * the Dataverse API and available as a metadata export option in the UI. * - * @param context configuration for data retrieval + * @param query specification for data retrieval * @return dataset metadata in Dataverse JSON format * @throws ExportException if metadata retrieval fails - * @throws NullPointerException if context is null + * @throws NullPointerException if the query is null * @since 2.1.0 * @apiNote While no formal JSON schema exists for this format, it is well-documented * in the Dataverse guides. Along with OAI_ORE, this is one of only two export @@ -52,7 +55,7 @@ public interface ExportDataProvider { * for datasets with large numbers of files. Other context options * (publicFilesOnly, offset, length) do not apply and should be ignored. */ - JsonObject getDatasetJson(ExportDataContext context); + JsonObject getDatasetJson(DatasetExportQuery query); /** * Returns complete dataset metadata using default options. @@ -60,11 +63,11 @@ public interface ExportDataProvider { * @return dataset metadata in Dataverse JSON format * @throws ExportException if metadata retrieval fails * @since 1.0.0 - * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetJson(ExportDataContext)} instead. + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetJson(DatasetExportQuery)} instead. */ @Deprecated(since = "2.1.0", forRemoval = true) default JsonObject getDatasetJson() { - return getDatasetJson(ExportDataContext.defaults()); + return getDatasetJson(DatasetExportQuery.defaults()); } /** @@ -74,10 +77,10 @@ default JsonObject getDatasetJson() { * aggregations of web resources. This format is used in Dataverse's archival bag export mechanism * and available via UI and API. * - * @param context configuration for data retrieval - * @return dataset metadata in OAI_ORE format + * @param query specification for data retrieval + * @return dataset metadata in OAI-ORE format * @throws ExportException if metadata retrieval fails - * @throws NullPointerException if context is null + * @throws NullPointerException if the query is null * @since 2.1.0 * @apiNote Along with the standard JSON format, this is one of only two export * formats that provide complete dataset-level metadata along with basic @@ -85,7 +88,7 @@ default JsonObject getDatasetJson() { * @implNote Implementations must respect the {@code datasetMetadataOnly} flag. * Other context options do not apply and should be ignored. */ - JsonObject getDatasetORE(ExportDataContext context); + JsonObject getDatasetORE(DatasetExportQuery query); /** * Returns dataset metadata in OAI-ORE format using default options. @@ -93,46 +96,77 @@ default JsonObject getDatasetJson() { * @return dataset metadata in OAI-ORE format * @throws ExportException if metadata retrieval fails * @since 1.0.0 - * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetORE(ExportDataContext)} instead. + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetORE(DatasetExportQuery)} instead. */ @Deprecated(since = "2.1.0", forRemoval = true) default JsonObject getDatasetORE() { - return getDatasetORE(ExportDataContext.defaults()); + return getDatasetORE(DatasetExportQuery.defaults()); } /** - * Returns detailed metadata for all files in the dataset. + * Returns detailed metadata for files in the dataset. *

- * For tabular files that have been successfully ingested, this includes + * For tabular files that have been successfully ingested, this may include * DDI-centric metadata extracted during the ingest process. This detailed * metadata is not available through other methods in this interface. + *

+ * The query may specify filters to skip certain files or how much metadata details should be included. + * The resulting stream will contain a limited number of elements only, specified by a {@code PageRequest}, + * avoiding huge memory allocations in the provider. + *

* - * @param context configuration for data retrieval + * @param query specification for file data retrieval + * @param request the page request containing pagination information such as page offset and page size * @return JSON array with one entry per dataset file (both tabular and non-tabular) * @throws ExportException if metadata retrieval fails - * @throws NullPointerException if context is null + * @throws NullPointerException if the query or request is null + * @since 2.1.0 + * @apiNote No formal JSON schema is available for this output. The format is not + * extensively documented; implementers may wish to examine the DDIExporter + * and JSONPrinter classes in the Dataverse codebase for usage examples. + */ + Stream getDatasetFileDetails(FileExportQuery query, PageRequest request); + + /** + * Returns detailed metadata for files in the dataset. + *

+ * For tabular files that have been successfully ingested, this may include + * DDI-centric metadata extracted during the ingest process. This detailed + * metadata is not available through other methods in this interface. + *

+ * The query may specify filters to skip certain files or how much metadata details should be included. + * The resulting stream will contain all matching files for consumption. + * In cases with large metadata quantities use {@link #getDatasetFileDetails(FileExportQuery,PageRequest)} + * for a stream containing a limited number of elements only, avoiding huge memory allocations in the provider. + *

+ * + * @param query specification for file data retrieval + * @return JSON array with one entry per dataset file (both tabular and non-tabular) + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if the query is null * @since 2.1.0 * @apiNote No formal JSON schema is available for this output. The format is not * extensively documented; implementers may wish to examine the DDIExporter * and JSONPrinter classes in the Dataverse codebase for usage examples. - * @implNote Implementations should respect both {@code datasetMetadataOnly} and - * {@code publicFilesOnly} flags. Pagination options do not apply and - * should be ignored. */ - JsonArray getDatasetFileDetails(ExportDataContext context); + Stream getDatasetFileDetails(FileExportQuery query); /** * Returns detailed metadata for all files using default options. + *

+ * Note that this method will serialize all file metadata into one large JSON array. + * This can be memory-intensive for large datasets and should be used judiciously. + * There have been reports of unexportable large datasets in production installations. + * Using {@link #getDatasetFileDetails(FileExportQuery)} instead is advised. + *

* - * @return JSON array with one entry per dataset file + * @return JSON array with one JSON object entry per dataset file * @throws ExportException if metadata retrieval fails * @since 1.0.0 - * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetFileDetails(ExportDataContext)} instead. + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetFileDetails(FileExportQuery)} + * or {@link #getDatasetFileDetails(FileExportQuery, PageRequest)}instead. */ @Deprecated(since = "2.1.0", forRemoval = true) - default JsonArray getDatasetFileDetails() { - return getDatasetFileDetails(ExportDataContext.defaults()); - } /** * Returns detailed metadata for tabular files only, with support for filtering and pagination. @@ -155,6 +189,7 @@ default JsonArray getDatasetFileDetails() { * apply and should be ignored. */ JsonArray getTabularDataDetails(ExportDataContext context); + JsonArray getDatasetFileDetails(); /** * Returns dataset metadata conforming to the schema.org standard. @@ -162,17 +197,17 @@ default JsonArray getDatasetFileDetails() { * This metadata subset is used in dataset page headers to improve discoverability by search engines. * It provides structured data markup (JSON-LD) following the schema.org vocabulary. * - * @param context configuration for data retrieval + * @param query specification for data retrieval * @return dataset metadata in schema.org format * @throws ExportException if metadata retrieval fails - * @throws NullPointerException if context is null + * @throws NullPointerException if the query is null * @since 2.1.0 * @apiNote This metadata export is not complete. It should only be used as a starting * point for an Exporter if it simplifies implementation compared to using * the complete JSON or OAI_ORE exports. * @implNote All context options are ignored by this method. */ - JsonObject getDatasetSchemaDotOrg(ExportDataContext context); + JsonObject getDatasetSchemaDotOrg(DatasetExportQuery query); /** * Returns dataset metadata in schema.org format using default options. @@ -180,11 +215,11 @@ default JsonArray getDatasetFileDetails() { * @return dataset metadata in schema.org format * @throws ExportException if metadata retrieval fails * @since 1.0.0 - * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetSchemaDotOrg(ExportDataContext)} instead. + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetSchemaDotOrg(DatasetExportQuery)} instead. */ @Deprecated(since = "2.1.0", forRemoval = true) default JsonObject getDatasetSchemaDotOrg() { - return getDatasetSchemaDotOrg(ExportDataContext.defaults()); + return getDatasetSchemaDotOrg(DatasetExportQuery.defaults()); } /** @@ -192,18 +227,21 @@ default JsonObject getDatasetSchemaDotOrg() { *

* This is the same metadata format sent to DataCite when DataCite DOIs are used. * It provides citation metadata following the DataCite Metadata Schema. + *

+ * Note: the returned XML document can easily be queried using XPath and other techniques + *

* - * @param context configuration for data retrieval + * @param query specification for data retrieval * @return dataset metadata as DataCite XML string * @throws ExportException if metadata retrieval fails - * @throws NullPointerException if context is null + * @throws NullPointerException if the query is null * @since 2.1.0 * @apiNote This metadata export is not complete. It should only be used as a starting * point for an Exporter if it simplifies implementation compared to using * the complete JSON or OAI_ORE exports. * @implNote All context options are ignored by this method. */ - String getDataCiteXml(ExportDataContext context); + Document getDataCiteXml(DatasetExportQuery query); /** * Returns dataset metadata in DataCite XML format using default options. @@ -211,12 +249,10 @@ default JsonObject getDatasetSchemaDotOrg() { * @return dataset metadata as DataCite XML string * @throws ExportException if metadata retrieval fails * @since 1.0.0 - * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDataCiteXml(ExportDataContext)} instead. + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDataCiteXml(DatasetExportQuery)} instead. */ @Deprecated(since = "2.1.0", forRemoval = true) - default String getDataCiteXml() { - return getDataCiteXml(ExportDataContext.defaults()); - } + String getDataCiteXml(); /** * Returns metadata in the format specified by an Exporter's prerequisite. @@ -226,10 +262,10 @@ default String getDataCiteXml() { * {@link Exporter#getPrerequisiteFormatName()}, and this method provides access * to that prerequisite metadata. * - * @param context configuration passed to the prerequisite exporter + * @param query specifcation passed to the prerequisite exporter * @return metadata in the prerequisite format, or empty if no prerequisite is configured * @throws ExportException if metadata retrieval fails - * @throws NullPointerException if context is null + * @throws NullPointerException if the query is null * @since 2.1.0 * @apiNote This is useful for creating alternate representations of the same metadata * (e.g., XML, HTML, PDF versions of a standard like DDI), especially when @@ -239,7 +275,7 @@ default String getDataCiteXml() { * supports prerequisite format chaining. The prerequisite exporter receives * the same context as specified in this call. */ - default Optional getPrerequisiteInputStream(ExportDataContext context) { + default Optional getPrerequisiteInputStream(DatasetExportQuery query) { return Optional.empty(); } @@ -249,10 +285,10 @@ default Optional getPrerequisiteInputStream(ExportDataContext conte * @return metadata in the prerequisite format, or empty if no prerequisite is configured * @throws ExportException if metadata retrieval fails * @since 1.0.0 - * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getPrerequisiteInputStream(ExportDataContext)} instead. + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getPrerequisiteInputStream(DatasetExportQuery)} instead. */ @Deprecated(since = "2.1.0", forRemoval = true) default Optional getPrerequisiteInputStream() { - return getPrerequisiteInputStream(ExportDataContext.defaults()); + return getPrerequisiteInputStream(DatasetExportQuery.defaults()); } } From d269680a29e62b38b426395b35752e7c89053bef Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 2 Mar 2026 16:15:58 +0100 Subject: [PATCH 12/55] refactor(export): remove `getTabularDataDetails` method in `ExportDataProvider` as it duplicates existing functionality --- .../gdcc/spi/export/ExportDataProvider.java | 22 ------------------- 1 file changed, 22 deletions(-) diff --git a/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/src/main/java/io/gdcc/spi/export/ExportDataProvider.java index 495ea1c..cb3b9fe 100644 --- a/src/main/java/io/gdcc/spi/export/ExportDataProvider.java +++ b/src/main/java/io/gdcc/spi/export/ExportDataProvider.java @@ -167,28 +167,6 @@ default JsonObject getDatasetORE() { * or {@link #getDatasetFileDetails(FileExportQuery, PageRequest)}instead. */ @Deprecated(since = "2.1.0", forRemoval = true) - - /** - * Returns detailed metadata for tabular files only, with support for filtering and pagination. - *

- * This method is specifically designed for datasets with large numbers of tabular - * files and data variables. It provides access to the complete hierarchy of - * datafile → filemetadata → datatable → datavariable metadata. - * - * @param context configuration for data retrieval - * @return JSON array containing metadata for tabular files only - * @throws ExportException if metadata retrieval fails - * @throws NullPointerException if context is null - * @since 2.1.0 - * @apiNote Pagination is intended for retrieving specific subsets, not for iterating - * through large result sets. For complete exports, call once without pagination - * or iterate by checking for empty results. - * @implNote Implementations should respect {@code publicFilesOnly} to filter restricted - * or embargoed files. Pagination via {@code offset} and {@code length} should - * be supported where feasible. The {@code datasetMetadataOnly} flag does not - * apply and should be ignored. - */ - JsonArray getTabularDataDetails(ExportDataContext context); JsonArray getDatasetFileDetails(); /** From 6fb42285380e03f75ad08ffc5770e0e4e865fe6c Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Mar 2026 14:16:59 +0100 Subject: [PATCH 13/55] refactor(export): restructure package and move `export` module to dedicated submodule Migrate `export`-related classes to a dedicated `export` Maven submodule to improve modularity and maintainability. Add POM configuration, adjust imports, and update related tests accordingly. --- export/pom.xml | 36 +++++++++++++++++++ .../gdcc/spi/export/DatasetExportQuery.java | 0 .../spi/export/DatasetMetadataPredicates.java | 0 .../gdcc/spi/export/ExportDataProvider.java | 0 .../io/gdcc/spi/export/ExportException.java | 0 .../java/io/gdcc/spi/export/Exporter.java | 0 .../io/gdcc/spi/export/FileExportQuery.java | 0 .../spi/export/FileMetadataPredicates.java | 0 .../java/io/gdcc/spi/export/PageRequest.java | 0 .../java/io/gdcc/spi/export/XMLExporter.java | 0 .../export/FileMetadataPredicatesTest.java | 0 .../io/gdcc/spi/export/PageRequestTest.java | 0 12 files changed, 36 insertions(+) create mode 100644 export/pom.xml rename {src => export/src}/main/java/io/gdcc/spi/export/DatasetExportQuery.java (100%) rename {src => export/src}/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java (100%) rename {src => export/src}/main/java/io/gdcc/spi/export/ExportDataProvider.java (100%) rename {src => export/src}/main/java/io/gdcc/spi/export/ExportException.java (100%) rename {src => export/src}/main/java/io/gdcc/spi/export/Exporter.java (100%) rename {src => export/src}/main/java/io/gdcc/spi/export/FileExportQuery.java (100%) rename {src => export/src}/main/java/io/gdcc/spi/export/FileMetadataPredicates.java (100%) rename {src => export/src}/main/java/io/gdcc/spi/export/PageRequest.java (100%) rename {src => export/src}/main/java/io/gdcc/spi/export/XMLExporter.java (100%) rename {src => export/src}/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java (100%) rename {src => export/src}/test/java/io/gdcc/spi/export/PageRequestTest.java (100%) diff --git a/export/pom.xml b/export/pom.xml new file mode 100644 index 0000000..32a0620 --- /dev/null +++ b/export/pom.xml @@ -0,0 +1,36 @@ + + + 4.0.0 + + io.gdcc.spi + parent + 2.1.0-SNAPSHOT + + + export + ${spi.export.version} + + + + + io.gdcc.spi + meta + + + + + jakarta.json + jakarta.json-api + provided + + + + jakarta.ws.rs + jakarta.ws.rs-api + provided + + + + \ No newline at end of file diff --git a/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java b/export/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java similarity index 100% rename from src/main/java/io/gdcc/spi/export/DatasetExportQuery.java rename to export/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java diff --git a/src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java b/export/src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java similarity index 100% rename from src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java rename to export/src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java diff --git a/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/export/src/main/java/io/gdcc/spi/export/ExportDataProvider.java similarity index 100% rename from src/main/java/io/gdcc/spi/export/ExportDataProvider.java rename to export/src/main/java/io/gdcc/spi/export/ExportDataProvider.java diff --git a/src/main/java/io/gdcc/spi/export/ExportException.java b/export/src/main/java/io/gdcc/spi/export/ExportException.java similarity index 100% rename from src/main/java/io/gdcc/spi/export/ExportException.java rename to export/src/main/java/io/gdcc/spi/export/ExportException.java diff --git a/src/main/java/io/gdcc/spi/export/Exporter.java b/export/src/main/java/io/gdcc/spi/export/Exporter.java similarity index 100% rename from src/main/java/io/gdcc/spi/export/Exporter.java rename to export/src/main/java/io/gdcc/spi/export/Exporter.java diff --git a/src/main/java/io/gdcc/spi/export/FileExportQuery.java b/export/src/main/java/io/gdcc/spi/export/FileExportQuery.java similarity index 100% rename from src/main/java/io/gdcc/spi/export/FileExportQuery.java rename to export/src/main/java/io/gdcc/spi/export/FileExportQuery.java diff --git a/src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java b/export/src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java similarity index 100% rename from src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java rename to export/src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java diff --git a/src/main/java/io/gdcc/spi/export/PageRequest.java b/export/src/main/java/io/gdcc/spi/export/PageRequest.java similarity index 100% rename from src/main/java/io/gdcc/spi/export/PageRequest.java rename to export/src/main/java/io/gdcc/spi/export/PageRequest.java diff --git a/src/main/java/io/gdcc/spi/export/XMLExporter.java b/export/src/main/java/io/gdcc/spi/export/XMLExporter.java similarity index 100% rename from src/main/java/io/gdcc/spi/export/XMLExporter.java rename to export/src/main/java/io/gdcc/spi/export/XMLExporter.java diff --git a/src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java b/export/src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java similarity index 100% rename from src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java rename to export/src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java diff --git a/src/test/java/io/gdcc/spi/export/PageRequestTest.java b/export/src/test/java/io/gdcc/spi/export/PageRequestTest.java similarity index 100% rename from src/test/java/io/gdcc/spi/export/PageRequestTest.java rename to export/src/test/java/io/gdcc/spi/export/PageRequestTest.java From b42a780542c0a06271d322d6f2c71815bb2c8f91 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Mar 2026 14:19:40 +0100 Subject: [PATCH 14/55] refactor(meta): move `CoreProvider` and `Plugin` interfaces to "meta" Maven submodule, migrate to annotation-based plugin contracts Replace `CoreProvider` and `Plugin` interfaces with annotation-based plugin contract definitions (`@PluginContract` and `@DataversePlugin`). Introduce `PluginContractProcessor` for build-time metadata generation, ensuring contract validation and compatibility checks. Update supporting infrastructure to reflect the annotation-based approach. --- meta/pom.xml | 55 + .../spi/meta/annotations/DataversePlugin.java | 15 + .../spi/meta/annotations/PluginContract.java | 43 + .../meta/annotations/RequiredProvider.java | 17 + .../io/gdcc/spi/meta/plugin/CoreProvider.java | 15 + .../java/io/gdcc/spi/meta/plugin/Plugin.java | 26 + .../processor/PluginContractProcessor.java | 921 ++++++++++++++++ .../javax.annotation.processing.Processor | 1 + .../PluginContractProcessorTest.java | 995 ++++++++++++++++++ .../meta/processor/ProcessorTestCompiler.java | 93 ++ .../io/gdcc/spi/core/plugin/CoreProvider.java | 20 - .../java/io/gdcc/spi/core/plugin/Plugin.java | 66 -- 12 files changed, 2181 insertions(+), 86 deletions(-) create mode 100644 meta/pom.xml create mode 100644 meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java create mode 100644 meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java create mode 100644 meta/src/main/java/io/gdcc/spi/meta/annotations/RequiredProvider.java create mode 100644 meta/src/main/java/io/gdcc/spi/meta/plugin/CoreProvider.java create mode 100644 meta/src/main/java/io/gdcc/spi/meta/plugin/Plugin.java create mode 100644 meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java create mode 100644 meta/src/main/resources/META-INF/services/javax.annotation.processing.Processor create mode 100644 meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java create mode 100644 meta/src/test/java/io/gdcc/spi/meta/processor/ProcessorTestCompiler.java delete mode 100644 src/main/java/io/gdcc/spi/core/plugin/CoreProvider.java delete mode 100644 src/main/java/io/gdcc/spi/core/plugin/Plugin.java diff --git a/meta/pom.xml b/meta/pom.xml new file mode 100644 index 0000000..470f6fa --- /dev/null +++ b/meta/pom.xml @@ -0,0 +1,55 @@ + + + 4.0.0 + + io.gdcc.spi + parent + 2.1.0-SNAPSHOT + + + meta + ${spi.meta.version} + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${jdk.version} + ${compilerArgument} + + + + + default-compile + compile + + compile + + + + none + + + + default-testCompile + test-compile + + testCompile + + + + + + + + + + + \ No newline at end of file diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java new file mode 100644 index 0000000..0344365 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java @@ -0,0 +1,15 @@ +package io.gdcc.spi.meta.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Marks a concrete plugin implementation class for metadata generation. + * Plugin authors use this annotation to mark their plugin for scanning and loading. + */ +@Retention(RetentionPolicy.SOURCE) +@Target(ElementType.TYPE) +public @interface DataversePlugin { +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java new file mode 100644 index 0000000..042e3c2 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java @@ -0,0 +1,43 @@ +package io.gdcc.spi.meta.annotations; + +import io.gdcc.spi.meta.plugin.Plugin; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Declares that an SPI interface is a versioned plugin contract. + * + *

The contract API level is taken from the interface's {@code API_LEVEL} + * constant by the annotation processor.

+ */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE) +public @interface PluginContract { + + /** + * Whether this contract is the primary plugin kind or an optional capability. + */ + Kind kind(); + + /** + * Other plugin contracts that must also be implemented if this contract is implemented. + * Example: a {@link Kind#CAPABILITY} contract should ask for a {@link Kind#BASE} contract to be implemented. + */ + Class[] requires() default {}; + + /** + * Core providers required by this contract. + */ + RequiredProvider[] providers() default {}; + + /** + * Distinguishes a base plugin contract from optional capability contracts. + */ + enum Kind { + BASE, + CAPABILITY + } +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/RequiredProvider.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/RequiredProvider.java new file mode 100644 index 0000000..7760776 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/RequiredProvider.java @@ -0,0 +1,17 @@ +package io.gdcc.spi.meta.annotations; + +import io.gdcc.spi.meta.plugin.CoreProvider; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +/** + * Declares that a {@link PluginContract} requires a specific core provider contract. + * + *

The provider API level is taken from the provider interface's + * {@code API_LEVEL} constant at compile time by the annotation processor.

+ */ +@Retention(RetentionPolicy.RUNTIME) +public @interface RequiredProvider { + Class value(); +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/plugin/CoreProvider.java b/meta/src/main/java/io/gdcc/spi/meta/plugin/CoreProvider.java new file mode 100644 index 0000000..5b70630 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/plugin/CoreProvider.java @@ -0,0 +1,15 @@ +package io.gdcc.spi.meta.plugin; + +/** + * Represents a provider interface for core functionality within the plugin system. + * CoreProvider serves as a marker or extension point within the framework to be implemented + * by classes that provide essential services or functionality to the core system. + * + * Implementations of this interface are expected to integrate with the broader plugin system, + * potentially enabling the core system to interface with specific features or subsystems. + * + * @see Plugin + */ +public interface CoreProvider { + +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/plugin/Plugin.java b/meta/src/main/java/io/gdcc/spi/meta/plugin/Plugin.java new file mode 100644 index 0000000..8edb989 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/plugin/Plugin.java @@ -0,0 +1,26 @@ +package io.gdcc.spi.meta.plugin; + +/** + * Represents the contract for plugins in the system. Implementations of this interface serve + * as modular components that can be dynamically loaded and integrated into the broader application. + * + * Each plugin must provide a unique, machine-readable identifier to ensure proper identification + * and usage within the system. + * + * Implementers are required to define the {@link #identity()} method to specify their unique + * identifier. + * + * @see CoreProvider + */ +public interface Plugin { + + /** + * Returns the unique, machine-readable identifier for this plugin. + * This will be the primary key within the core to identify a specific plugin implementation. + * + * @return the plugin's identity string, which must be non-null, non-blank, and URL compatible. + * @implSpec This method must be overridden by any plugin implementation and return a non-null, non-blank, + * URL-compatible string. No plugin interface may provide a default implementation. + */ + String identity(); +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java new file mode 100644 index 0000000..1216f3e --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -0,0 +1,921 @@ +package io.gdcc.spi.meta.processor; + +import io.gdcc.spi.meta.annotations.PluginContract; + +import javax.annotation.processing.AbstractProcessor; +import javax.annotation.processing.ProcessingEnvironment; +import javax.annotation.processing.RoundEnvironment; +import javax.lang.model.SourceVersion; +import javax.lang.model.element.AnnotationMirror; +import javax.lang.model.element.AnnotationValue; +import javax.lang.model.element.Element; +import javax.lang.model.element.ElementKind; +import javax.lang.model.element.ExecutableElement; +import javax.lang.model.element.Modifier; +import javax.lang.model.element.TypeElement; +import javax.lang.model.element.VariableElement; +import javax.lang.model.type.DeclaredType; +import javax.lang.model.type.TypeKind; +import javax.lang.model.type.TypeMirror; +import javax.lang.model.util.Elements; +import javax.lang.model.util.Types; +import javax.tools.Diagnostic; +import javax.tools.FileObject; +import javax.tools.StandardLocation; +import java.io.IOException; +import java.io.Writer; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.Deque; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.TreeSet; + +/** + * Annotation processor generating build-time metadata for plugin implementations. + * + *

This processor scans classes marked with {@code @DataversePlugin}, discovers all implemented + * plugin contracts annotated with {@code @PluginContract}, validates the contract graph, and emits:

+ * + *
    + *
  1. a per-plugin descriptor under {@value #DESCRIPTOR_DIRECTORY}, and
  2. + *
  3. a {@code META-INF/services/...} entry for the base plugin contract when safe to do so.
  4. + *
+ * + *

The descriptor captures the build-time view of:

+ *
    + *
  • the plugin implementation class,
  • + *
  • the plugin's base contract,
  • + *
  • all implemented contract API levels,
  • + *
  • all required provider API levels.
  • + *
+ * + *

Service registration generation is intentionally cautious. If any implementation of a given base + * contract uses {@code @AutoService}, this processor suppresses generated service output for that + * entire contract to avoid two processors writing the same {@code META-INF/services/...} file.

+ * + *

Errors are reported against the offending source element and then converted into a local + * {@link ProcessorException}. This aborts processing of the current implementation only, allowing + * the processor to continue and surface additional problems in the same compilation run.

+ */ +public final class PluginContractProcessor extends AbstractProcessor { + + /** + * Fully qualified name of the implementation marker annotation. + * + *

A string constant is used instead of a direct class literal so this processor can stay + * tolerant during bootstrapping and module boundary changes.

+ * + * @see io.gdcc.spi.meta.annotations.DataversePlugin + */ + private static final String PLUGIN_IMPLEMENTATION_ANNOTATION = "io.gdcc.spi.meta.annotations.DataversePlugin"; + + /** + * Fully qualified name of the contract annotation found on plugin contract interfaces. + * + * @see io.gdcc.spi.meta.annotations.PluginContract + */ + private static final String PLUGIN_CONTRACT_ANNOTATION = "io.gdcc.spi.meta.annotations.PluginContract"; + + /** + * Fully qualified name of the nested provider requirement annotation used inside + * {@code @PluginContract.providers()}. + * + * @see io.gdcc.spi.meta.annotations.RequiredProvider + */ + private static final String REQUIRED_PROVIDER_ANNOTATION = "io.gdcc.spi.meta.annotations.RequiredProvider"; + + /** + * Fully qualified name of {@code @AutoService}. + * + *

This processor does not depend on AutoService directly. It merely detects the annotation by + * name so it can avoid generating conflicting ServiceLoader resources.

+ */ + private static final String AUTO_SERVICE_ANNOTATION = "com.google.auto.service.AutoService"; + + /** + * Fully qualified name of the common plugin super-interface. + * @see io.gdcc.spi.meta.plugin.Plugin + */ + private static final String PLUGIN_INTERFACE = "io.gdcc.spi.meta.plugin.Plugin"; + + /** + * Fully qualified name of the common provider super-interface. + * @see io.gdcc.spi.meta.plugin.CoreProvider + */ + private static final String CORE_PROVIDER_INTERFACE = "io.gdcc.spi.meta.plugin.CoreProvider"; + + /** + * Name of the compile-time constant field carrying the contract version. + */ + private static final String API_LEVEL_FIELD = "API_LEVEL"; + + /** + * Output directory for generated plugin descriptors. + */ + private static final String DESCRIPTOR_DIRECTORY = "META-INF/dataverse/plugins/"; + + /** + * Output directory for generated ServiceLoader files. + */ + private static final String SERVICES_DIRECTORY = "META-INF/services/"; + + /** + * Cached utility for type operations such as assignability checks. + */ + private Types types; + + /** + * Cached utility for element lookup and annotation default resolution. + */ + private Elements elements; + + /** + * Descriptor models accumulated during processing, keyed by implementation class name. + * + *

Descriptors are written only after processing is over, which keeps resource generation + * deterministic and avoids partial aggregate state.

+ */ + private final Map descriptors = new LinkedHashMap<>(); + + /** + * Service registrations grouped by base contract name. + * + *

Each map entry corresponds to one future {@code META-INF/services/} file. A sorted + * set is used to make generated output stable across compiler runs.

+ */ + private final Map> serviceImplementationsByContract = new LinkedHashMap<>(); + + /** + * Base contract names for which service file generation must be skipped. + * + *

If any implementation of a base contract uses {@code @AutoService}, that service type is + * considered externally managed and this processor suppresses its own output for the same path. + * This way, we do not have a race condition / conflict over one service file.

+ */ + private final Set serviceTypesManagedExternally = new LinkedHashSet<>(); + + /** + * Initializes compiler utility helpers from the processing environment. + * + * @param processingEnv the active annotation processing environment + */ + @Override + public synchronized void init(ProcessingEnvironment processingEnv) { + super.init(processingEnv); + this.types = processingEnv.getTypeUtils(); + this.elements = processingEnv.getElementUtils(); + } + + /** + * Returns the annotation types directly claimed by this processor. + * + *

The processor only claims {@code @DataversePlugin}. Other annotations are read while + * traversing the type model of such implementations.

+ * + * @return the supported top-level annotation types + */ + @Override + public Set getSupportedAnnotationTypes() { + return Set.of(PLUGIN_IMPLEMENTATION_ANNOTATION); + } + + /** + * Advertises support for the latest source version understood by the current compiler. + * + *

This is preferred over a hard-coded release because the processor mainly operates on the + * annotation/type model and should remain usable across newer Java releases automatically.

+ * + * @return the latest source version supported by the running compiler + */ + @Override + public SourceVersion getSupportedSourceVersion() { + return SourceVersion.latestSupported(); + } + + /** + * Main processor entry point for each annotation processing round. + * + *

During normal rounds, all {@code @DataversePlugin} classes are validated and converted + * into in-memory descriptor/service models. During the final round, those accumulated models are + * written to the compiler output.

+ * + * @param annotations the annotations requested for this round + * @param roundEnv the current round environment + * @return {@code true}, because this processor claims the handled annotation + */ + @Override + public boolean process(Set annotations, RoundEnvironment roundEnv) { + TypeElement markerAnnotation = elements.getTypeElement(PLUGIN_IMPLEMENTATION_ANNOTATION); + if (markerAnnotation == null) { + // If the marker annotation itself cannot be resolved, something is wrong with the + // processor classpath. Returning false leaves room for other processors to continue. + return false; + } + + for (Element element : roundEnv.getElementsAnnotatedWith(markerAnnotation)) { + if (!(element instanceof TypeElement implementation)) { + error(element, "@DataversePlugin may only be used on classes"); + continue; + } + + try { + processImplementation(implementation); + } catch (ProcessorException ignored) { + // A concrete error has already been reported with source location. + // Continue with the next implementation so the user gets more than one error per run. + } + } + + if (roundEnv.processingOver()) { + writeAllGeneratedResources(); + } + + return true; + } + + /** + * Processes one plugin implementation class. + * + *

The workflow is:

+ *
    + *
  1. validate the class structurally,
  2. + *
  3. discover all implemented contracts in the full type hierarchy,
  4. + *
  5. identify exactly one base contract,
  6. + *
  7. collect contract and provider API levels,
  8. + *
  9. record descriptor output,
  10. + *
  11. record ServiceLoader output unless {@code @AutoService} takes over.
  12. + *
+ * + * @param implementation the plugin implementation class + */ + private void processImplementation(TypeElement implementation) { + validateImplementationClass(implementation); + + Set contracts = collectImplementedContracts(implementation); + if (contracts.isEmpty()) { + error(implementation, "No implemented plugin contracts found"); + throw new ProcessorException(); + } + + TypeElement baseContract = null; + Map contractLevels = new LinkedHashMap<>(); + Map providerLevels = new LinkedHashMap<>(); + + for (TypeElement contract : sortByQualifiedName(contracts)) { + PluginContractModel model = readPluginContractModel(contract); + + if (model.kind() == PluginContract.Kind.BASE) { + if (baseContract != null) { + error( + implementation, + "Implementation must not implement multiple base plugin contracts: " + + baseContract.getQualifiedName() + " and " + contract.getQualifiedName() + ); + throw new ProcessorException(); + } + baseContract = contract; + } + + validateRequiredContracts(implementation, contract, contracts, model); + + // The API level is intentionally read from the compile-time constant present on the + // contract interface visible during this compilation. This preserves the build-time + // contract snapshot we later need at runtime. + int contractApiLevel = readIntConstant(contract, API_LEVEL_FIELD); + contractLevels.put(contract.getQualifiedName().toString(), contractApiLevel); + + // Provider requirements accumulate across all implemented contracts/capabilities. + // Conflicting requirements are rejected below. + Map requiredProviders = readProviderLevels(model.providers(), implementation); + mergeProviderLevels(providerLevels, requiredProviders, implementation); + } + + if (baseContract == null) { + error(implementation, "Implementation must implement exactly one base plugin contract"); + throw new ProcessorException(); + } + + String implementationClassName = implementation.getQualifiedName().toString(); + String baseContractName = baseContract.getQualifiedName().toString(); + + descriptors.put( + implementationClassName, + new GeneratedDescriptorModel( + implementationClassName, + baseContractName, + Map.copyOf(contractLevels), + Map.copyOf(providerLevels) + ) + ); + + if (hasAutoServiceAnnotation(implementation)) { + // Skip generated META-INF/services output for the entire base contract to avoid + // resource collisions with AutoService, which writes the same aggregate file path. + serviceTypesManagedExternally.add(baseContractName); + warning( + implementation, + "@AutoService detected; generated META-INF/services entry for " + + baseContractName + + " will be skipped to avoid conflicts" + ); + } else { + serviceImplementationsByContract + .computeIfAbsent(baseContractName, ignored -> new TreeSet<>()) + .add(implementationClassName); + } + } + + /** + * Validates the basic structural requirements for a plugin implementation. + * + * @param implementation the implementation class to validate + */ + private void validateImplementationClass(TypeElement implementation) { + if (implementation.getKind() != ElementKind.CLASS) { + error(implementation, "@DataversePlugin may only be used on classes"); + throw new ProcessorException(); + } + + if (!implementation.getModifiers().contains(Modifier.PUBLIC)) { + error(implementation, "Dataverse plugin implementations must be public"); + throw new ProcessorException(); + } + + if (implementation.getModifiers().contains(Modifier.ABSTRACT)) { + error(implementation, "Dataverse plugin implementations must not be abstract"); + throw new ProcessorException(); + } + } + + /** + * Collects all plugin contracts implemented by the given class, including inherited ones. + * + *

The traversal walks the full type hierarchy breadth-first across both superclasses and + * interfaces so that indirectly inherited contracts and capability interfaces are discovered too.

+ * + * @param implementation the implementation class to inspect + * @return all implemented types recognized as plugin contracts + */ + private Set collectImplementedContracts(TypeElement implementation) { + Set result = new LinkedHashSet<>(); + Set visited = new LinkedHashSet<>(); + Deque queue = new ArrayDeque<>(); + queue.addLast(implementation.asType()); + + while (!queue.isEmpty()) { + TypeMirror current = queue.removeFirst(); + if (current.getKind() == TypeKind.NONE) { + continue; + } + if (!(current instanceof DeclaredType declaredType)) { + continue; + } + + Element currentElement = declaredType.asElement(); + if (!(currentElement instanceof TypeElement currentType)) { + continue; + } + + String qualifiedName = currentType.getQualifiedName().toString(); + if (!visited.add(qualifiedName)) { + continue; + } + + if (isPluginContract(currentType)) { + result.add(currentType); + } + + for (TypeMirror iface : currentType.getInterfaces()) { + queue.addLast(iface); + } + + TypeMirror superclass = currentType.getSuperclass(); + if (superclass != null && superclass.getKind() != TypeKind.NONE) { + queue.addLast(superclass); + } + } + + return result; + } + + /** + * Determines whether the given type is a plugin contract. + * + *

A type qualifies as a plugin contract only when it is annotated with + * {@code @PluginContract} and is assignable to the common plugin super-interface.

+ * + * @param typeElement the type to test + * @return {@code true} if the type is a plugin contract + */ + private boolean isPluginContract(TypeElement typeElement) { + if (findAnnotationMirror(typeElement, PLUGIN_CONTRACT_ANNOTATION) == null) { + return false; + } + + TypeElement pluginType = elements.getTypeElement(PLUGIN_INTERFACE); + if (pluginType == null) { + return false; + } + + return types.isAssignable( + types.erasure(typeElement.asType()), + types.erasure(pluginType.asType()) + ); + } + + /** + * Reads and validates the metadata of one plugin contract interface. + * + * @param contract the contract interface + * @return the extracted in-memory contract model + */ + private PluginContractModel readPluginContractModel(TypeElement contract) { + AnnotationMirror annotation = findAnnotationMirror(contract, PLUGIN_CONTRACT_ANNOTATION); + if (annotation == null) { + error(contract, "Missing @PluginContract"); + throw new ProcessorException(); + } + + validateApiLevelConstant(contract); + + PluginContract.Kind kind = readContractKind(annotation, contract); + List requiredContracts = readClassArrayAnnotationValue(annotation, "requires"); + List providers = readRequiredProviders(annotation); + + return new PluginContractModel(kind, List.copyOf(requiredContracts), List.copyOf(providers)); + } + + /** + * Reads the {@code kind} member of a {@code @PluginContract} annotation. + * + * @param annotation the contract annotation mirror + * @param contract the annotated contract, used for diagnostics + * @return the parsed contract kind + */ + private PluginContract.Kind readContractKind(AnnotationMirror annotation, TypeElement contract) { + AnnotationValue value = getAnnotationValue(annotation, "kind"); + if (value == null) { + error(contract, "@PluginContract.kind is required"); + throw new ProcessorException(); + } + + Object raw = value.getValue(); + if (!(raw instanceof VariableElement enumConstant)) { + error(contract, "@PluginContract.kind must be an enum constant"); + throw new ProcessorException(); + } + + try { + return PluginContract.Kind.valueOf(enumConstant.getSimpleName().toString()); + } catch (IllegalArgumentException ex) { + error(contract, "Unsupported @PluginContract.kind: " + enumConstant.getSimpleName()); + throw new ProcessorException(); + } + } + + /** + * Verifies that the given type declares a valid compile-time constant {@code API_LEVEL} field. + * + * @param contract the contract or provider type to validate + */ + private void validateApiLevelConstant(TypeElement contract) { + readIntConstant(contract, API_LEVEL_FIELD); + } + + /** + * Reads a compile-time {@code int} constant from a type. + * + * @param type the owning type + * @param fieldName the field to locate + * @return the constant value + */ + private int readIntConstant(TypeElement type, String fieldName) { + for (Element enclosed : type.getEnclosedElements()) { + if (enclosed instanceof VariableElement variable + && variable.getSimpleName().contentEquals(fieldName)) { + Object value = variable.getConstantValue(); + if (value instanceof Integer intValue) { + return intValue; + } + + error(type, type.getQualifiedName() + "." + fieldName + " must be a compile-time int constant"); + throw new ProcessorException(); + } + } + + error(type, type.getQualifiedName() + " must declare int " + fieldName); + throw new ProcessorException(); + } + + /** + * Validates that all contracts required by the current contract are also implemented. + * + * @param implementation the concrete plugin implementation + * @param contract the contract currently being validated + * @param allImplementedContracts all discovered contracts of the implementation + * @param model the parsed model of the current contract + */ + private void validateRequiredContracts( + TypeElement implementation, + TypeElement contract, + Set allImplementedContracts, + PluginContractModel model + ) { + Set implementedNames = new LinkedHashSet<>(); + for (TypeElement implemented : allImplementedContracts) { + implementedNames.add(implemented.getQualifiedName().toString()); + } + + for (TypeElement requiredContract : model.requiredContracts()) { + String requiredName = requiredContract.getQualifiedName().toString(); + if (!implementedNames.contains(requiredName)) { + error( + implementation, + "Implementation of contract " + contract.getQualifiedName() + + " also requires contract " + requiredName + ); + throw new ProcessorException(); + } + } + } + + /** + * Resolves the API levels of all providers required by the current contract. + * + * @param providerTypes the provider interfaces referenced by the contract annotation + * @param implementation the concrete implementation being processed, used for diagnostics + * @return a map from provider class name to required API level + */ + private Map readProviderLevels(List providerTypes, TypeElement implementation) { + Map result = new LinkedHashMap<>(); + TypeElement coreProviderType = elements.getTypeElement(CORE_PROVIDER_INTERFACE); + if (coreProviderType == null) { + error(implementation, "Cannot resolve " + CORE_PROVIDER_INTERFACE); + throw new ProcessorException(); + } + + for (TypeElement providerType : providerTypes) { + if (!types.isAssignable( + types.erasure(providerType.asType()), + types.erasure(coreProviderType.asType()) + )) { + error( + implementation, + "Required provider " + providerType.getQualifiedName() + + " does not implement " + CORE_PROVIDER_INTERFACE + ); + throw new ProcessorException(); + } + + int apiLevel = readIntConstant(providerType, API_LEVEL_FIELD); + result.put(providerType.getQualifiedName().toString(), apiLevel); + } + + return result; + } + + /** + * Merges provider API level requirements from one contract into the accumulated set. + * + *

If the same provider is required with different API levels by different contracts, + * processing fails because the resulting runtime expectation would be ambiguous.

+ * + * @param merged the accumulated provider requirements + * @param additional the provider requirements from the current contract + * @param implementation the concrete implementation, used for diagnostics + */ + private void mergeProviderLevels( + Map merged, + Map additional, + TypeElement implementation + ) { + additional.forEach((providerName, apiLevel) -> { + Integer existing = merged.putIfAbsent(providerName, apiLevel); + if (existing != null && existing.intValue() != apiLevel) { + error( + implementation, + "Conflicting API levels for provider " + providerName + + ": " + existing + " vs " + apiLevel + ); + throw new ProcessorException(); + } + }); + } + + /** + * Checks whether the implementation class uses {@code @AutoService}. + * + * @param implementation the implementation class + * @return {@code true} if {@code @AutoService} is present + */ + private boolean hasAutoServiceAnnotation(TypeElement implementation) { + return findAnnotationMirror(implementation, AUTO_SERVICE_ANNOTATION) != null; + } + + /** + * Writes all accumulated generated resources after processing is complete. + * + *

Descriptors are always written. ServiceLoader files are written only for service types that + * are not externally managed via {@code @AutoService}.

+ */ + private void writeAllGeneratedResources() { + for (GeneratedDescriptorModel descriptor : descriptors.values()) { + writeDescriptor(descriptor); + } + + for (Map.Entry> entry : serviceImplementationsByContract.entrySet()) { + String serviceType = entry.getKey(); + if (serviceTypesManagedExternally.contains(serviceType)) { + continue; + } + writeServiceFile(serviceType, entry.getValue()); + } + } + + /** + * Writes one generated plugin descriptor file. + * + * @param descriptor the descriptor model to serialize + */ + private void writeDescriptor(GeneratedDescriptorModel descriptor) { + String resourceName = DESCRIPTOR_DIRECTORY + descriptor.pluginClass().replace('.', '_') + ".properties"; + + try { + FileObject resource = processingEnv.getFiler() + .createResource(StandardLocation.CLASS_OUTPUT, "", resourceName); + + Properties properties = new Properties(); + properties.setProperty("plugin.class", descriptor.pluginClass()); + properties.setProperty("plugin.kind", descriptor.pluginKind()); + + descriptor.contracts().forEach((contract, level) -> + properties.setProperty("plugin." + contract + ".level", Integer.toString(level))); + + descriptor.requiredProviders().forEach((provider, level) -> + properties.setProperty("plugin.requires." + provider + ".level", Integer.toString(level))); + + try (Writer writer = resource.openWriter()) { + properties.store(writer, "Generated plugin contract metadata"); + } + } catch (IOException e) { + processingEnv.getMessager().printMessage( + Diagnostic.Kind.ERROR, + "Failed to write descriptor for " + descriptor.pluginClass() + ": " + e.getMessage() + ); + } + } + + /** + * Writes one ServiceLoader registration file for a base contract. + * + * @param serviceTypeName the fully qualified name of the service interface + * @param implementations the implementation class names to register + */ + private void writeServiceFile(String serviceTypeName, Set implementations) { + String resourceName = SERVICES_DIRECTORY + serviceTypeName; + + try { + FileObject resource = processingEnv.getFiler() + .createResource(StandardLocation.CLASS_OUTPUT, "", resourceName); + + try (Writer writer = resource.openWriter()) { + for (String implementation : implementations) { + writer.write(implementation); + writer.write(System.lineSeparator()); + } + } + } catch (IOException e) { + processingEnv.getMessager().printMessage( + Diagnostic.Kind.ERROR, + "Failed to write service file for " + serviceTypeName + ": " + e.getMessage() + ); + } + } + + /** + * Finds an annotation mirror on the given element by fully qualified annotation type name. + * + * @param element the annotated element + * @param annotationTypeName the fully qualified annotation type name + * @return the matching annotation mirror, or {@code null} if absent + */ + private AnnotationMirror findAnnotationMirror(Element element, String annotationTypeName) { + for (AnnotationMirror mirror : element.getAnnotationMirrors()) { + Element annotationElement = mirror.getAnnotationType().asElement(); + if (annotationElement instanceof TypeElement annotationType + && annotationType.getQualifiedName().contentEquals(annotationTypeName)) { + return mirror; + } + } + return null; + } + + /** + * Resolves one annotation member value, including defaults. + * + * @param annotation the annotation mirror + * @param memberName the member to resolve + * @return the resolved annotation value, or {@code null} if not found + */ + private AnnotationValue getAnnotationValue(AnnotationMirror annotation, String memberName) { + Map values = + elements.getElementValuesWithDefaults(annotation); + + for (Map.Entry entry : values.entrySet()) { + if (entry.getKey().getSimpleName().contentEquals(memberName)) { + return entry.getValue(); + } + } + + return null; + } + + /** + * Reads an annotation member containing an array of class literals. + * + *

During annotation processing, class-valued members are represented as {@link TypeMirror}s + * within {@link AnnotationValue}s. This helper converts them into {@link TypeElement}s.

+ * + * @param annotation the annotation mirror + * @param memberName the member containing class literals + * @return the referenced types, preserving declaration order + */ + private List readClassArrayAnnotationValue(AnnotationMirror annotation, String memberName) { + AnnotationValue value = getAnnotationValue(annotation, memberName); + if (value == null) { + return List.of(); + } + + Object raw = value.getValue(); + if (!(raw instanceof List values)) { + return List.of(); + } + + List result = new ArrayList<>(); + for (Object entry : values) { + if (!(entry instanceof AnnotationValue annotationValue)) { + continue; + } + + Object classValue = annotationValue.getValue(); + if (!(classValue instanceof TypeMirror typeMirror)) { + continue; + } + + TypeElement typeElement = asTypeElement(typeMirror); + if (typeElement != null) { + result.add(typeElement); + } + } + + return List.copyOf(result); + } + + /** + * Reads the nested {@code providers()} member of a {@code @PluginContract} annotation. + * + *

The provider information is stored as nested {@code @RequiredProvider} annotations. This helper + * unwraps those nested annotations and returns the referenced provider types.

+ * + * @param pluginContractAnnotation the plugin contract annotation mirror + * @return provider types referenced by the contract + */ + private List readRequiredProviders(AnnotationMirror pluginContractAnnotation) { + AnnotationValue providersValue = getAnnotationValue(pluginContractAnnotation, "providers"); + if (providersValue == null) { + return List.of(); + } + + Object raw = providersValue.getValue(); + if (!(raw instanceof List values)) { + return List.of(); + } + + List result = new ArrayList<>(); + for (Object entry : values) { + if (!(entry instanceof AnnotationValue annotationValue)) { + continue; + } + + Object nested = annotationValue.getValue(); + if (!(nested instanceof AnnotationMirror providerAnnotation)) { + continue; + } + + TypeElement providerAnnotationType = asTypeElement(providerAnnotation.getAnnotationType()); + if (providerAnnotationType == null + || !providerAnnotationType.getQualifiedName().contentEquals(REQUIRED_PROVIDER_ANNOTATION)) { + continue; + } + + AnnotationValue providerClassValue = getAnnotationValue(providerAnnotation, "value"); + if (providerClassValue == null) { + continue; + } + + Object providerRaw = providerClassValue.getValue(); + if (!(providerRaw instanceof TypeMirror providerTypeMirror)) { + continue; + } + + TypeElement providerType = asTypeElement(providerTypeMirror); + if (providerType != null) { + result.add(providerType); + } + } + + return List.copyOf(result); + } + + /** + * Converts a declared type mirror into its corresponding type element. + * + * @param typeMirror the type mirror to convert + * @return the type element, or {@code null} if the mirror is not a declared type + */ + private TypeElement asTypeElement(TypeMirror typeMirror) { + if (!(typeMirror instanceof DeclaredType declaredType)) { + return null; + } + + Element element = declaredType.asElement(); + return element instanceof TypeElement typeElement ? typeElement : null; + } + + /** + * Returns the given types sorted by fully qualified name for deterministic processing order. + * + * @param typesToSort the types to sort + * @return a sorted list view + */ + private List sortByQualifiedName(Set typesToSort) { + return typesToSort.stream() + .sorted(Comparator.comparing(type -> type.getQualifiedName().toString())) + .toList(); + } + + /** + * Emits a compiler error message associated with a source element. + * + * @param element the source element to associate with the diagnostic + * @param message the diagnostic text + */ + private void error(Element element, String message) { + processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, message, element); + } + + /** + * Emits a compiler warning message associated with a source element. + * + * @param element the source element to associate with the diagnostic + * @param message the diagnostic text + */ + private void warning(Element element, String message) { + processingEnv.getMessager().printMessage(Diagnostic.Kind.WARNING, message, element); + } + + /** + * Internal in-memory representation of one contract interface. + * + * @param kind whether the contract is a base contract or a capability + * @param requiredContracts contracts that must also be implemented + * @param providers providers required by this contract + */ + private record PluginContractModel( + PluginContract.Kind kind, + List requiredContracts, + List providers + ) { + } + + /** + * Internal in-memory representation of one generated plugin descriptor. + * + * @param pluginClass implementation class name + * @param pluginKind fully qualified base contract name + * @param contracts map of implemented contract names to API levels + * @param requiredProviders map of required provider names to API levels + */ + private record GeneratedDescriptorModel( + String pluginClass, + String pluginKind, + Map contracts, + Map requiredProviders + ) { + } + + /** + * Local control-flow exception used to abort processing of a single implementation after an error. + * + *

This avoids deeply nested conditional code while still allowing the processor to continue + * with other plugin implementations in the same round.

+ */ + private static final class ProcessorException extends RuntimeException { + } +} \ No newline at end of file diff --git a/meta/src/main/resources/META-INF/services/javax.annotation.processing.Processor b/meta/src/main/resources/META-INF/services/javax.annotation.processing.Processor new file mode 100644 index 0000000..5bbc814 --- /dev/null +++ b/meta/src/main/resources/META-INF/services/javax.annotation.processing.Processor @@ -0,0 +1 @@ +io.gdcc.spi.meta.processor.PluginContractProcessor \ No newline at end of file diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java new file mode 100644 index 0000000..ae0d340 --- /dev/null +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java @@ -0,0 +1,995 @@ +package io.gdcc.spi.meta.processor; + +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.annotations.DataversePlugin; +import io.gdcc.spi.meta.annotations.RequiredProvider; +import io.gdcc.spi.meta.plugin.CoreProvider; +import io.gdcc.spi.meta.plugin.Plugin; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +import javax.tools.Diagnostic; +import java.io.IOException; +import java.nio.file.Files; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.*; + +class PluginContractProcessorTest { + + private final ProcessorTestCompiler compiler = new ProcessorTestCompiler(); + + @Nested + class Basics { + @Test + void generatesDescriptorAndServiceFileForValidPlugin() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestProvider.java", + """ + package test; + + import %s; + + public interface TestProvider extends CoreProvider { + int API_LEVEL = 7; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + import %s; + + @PluginContract( + kind = PluginContract.Kind.BASE, + providers = { @RequiredProvider(TestProvider.class) } + ) + public interface TestPlugin extends Plugin { + int API_LEVEL = 3; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/GoodPlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class GoodPlugin implements TestPlugin { + @Override + public String identity() { + return "good"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = "META-INF/dataverse/plugins/test_GoodPlugin.properties"; + String servicePath = "META-INF/services/test.TestPlugin"; + + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); + assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should be generated"); + + String descriptor = Files.readString(result.generatedFile(descriptorPath)); + assertTrue(descriptor.contains("plugin.class=test.GoodPlugin")); + assertTrue(descriptor.contains("plugin.kind=test.TestPlugin")); + assertTrue(descriptor.contains("plugin.test.TestPlugin.level=3")); + assertTrue(descriptor.contains("plugin.requires.test.TestProvider.level=7")); + + String serviceFile = Files.readString(result.generatedFile(servicePath)); + assertEquals("test.GoodPlugin", serviceFile.trim()); + } + + @Test + void failsWhenMultipleBaseContractsAreImplemented() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePluginA.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface BasePluginA extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BasePluginB.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface BasePluginB extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BadPlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class BadPlugin implements BasePluginA, BasePluginB { + @Override + public String identity() { + return "bad"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "multiple base plugin contracts"); + } + + @Test + void failsWhenRequiredContractIsMissing() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/CapabilityPlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + kind = PluginContract.Kind.CAPABILITY, + requires = { BasePlugin.class } + ) + public interface CapabilityPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/MissingBasePluginImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class MissingBasePluginImpl implements CapabilityPlugin { + @Override + public String identity() { + return "missing-base"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "also requires contract test.BasePlugin"); + } + + @Test + void suppressesGeneratedServiceFileWhenAutoServiceIsPresent() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "com/google/auto/service/AutoService.java", + """ + package com.google.auto.service; + + import java.lang.annotation.ElementType; + import java.lang.annotation.Retention; + import java.lang.annotation.RetentionPolicy; + import java.lang.annotation.Target; + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + public @interface AutoService { + Class[] value(); + } + """ + ), + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/AutoServicePlugin.java", + """ + package test; + + import com.google.auto.service.AutoService; + import %s; + + @DataversePlugin + @AutoService(TestPlugin.class) + public class AutoServicePlugin implements TestPlugin { + @Override + public String identity() { + return "auto"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = "META-INF/dataverse/plugins/test_AutoServicePlugin.properties"; + String servicePath = "META-INF/services/test.TestPlugin"; + + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should still be generated"); + assertFalse(Files.exists(result.generatedFile(servicePath)), "Service file should be suppressed"); + + assertDiagnosticContains(result, Diagnostic.Kind.WARNING, "@AutoService detected"); + } + + @Test + void failsWhenImplementationIsNotPublic() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/HiddenPlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + class HiddenPlugin implements TestPlugin { + @Override + public String identity() { + return "hidden"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must be public"); + } + + @Test + void failsWhenImplementationIsAbstract() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/AbstractPluginImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public abstract class AbstractPluginImpl implements TestPlugin { + @Override + public String identity() { + return "abstract"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must not be abstract"); + } + + @Test + void failsWhenNoBaseContractIsImplemented() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/CapabilityPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.CAPABILITY) + public interface CapabilityPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/CapabilityOnlyImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class CapabilityOnlyImpl implements CapabilityPlugin { + @Override + public String identity() { + return "capability-only"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "exactly one base plugin contract"); + } + } + + @Nested + class EdgeCases { + @Test + void failsWhenContractApiLevelIsMissing() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/MissingApiLevelPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface MissingApiLevelPlugin extends Plugin { + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/MissingApiLevelImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class MissingApiLevelImpl implements MissingApiLevelPlugin { + @Override + public String identity() { + return "missing-api-level"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must declare int API_LEVEL"); + } + + @Test + void failsWhenContractApiLevelIsNotCompileTimeConstant() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NonConstantApiLevelPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface NonConstantApiLevelPlugin extends Plugin { + Integer API_LEVEL = Integer.valueOf(2); + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/NonConstantApiLevelImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class NonConstantApiLevelImpl implements NonConstantApiLevelPlugin { + @Override + public String identity() { + return "non-constant-api-level"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must be a compile-time int constant"); + } + + @Test + void failsWhenRequiredProviderIsNotACoreProvider() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NotAProvider.java", + """ + package test; + + public interface NotAProvider { + int API_LEVEL = 1; + } + """ + ), + source( + "test/InvalidProviderPlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + kind = PluginContract.Kind.BASE, + providers = { @RequiredProvider(NotAProvider.class) } + ) + public interface InvalidProviderPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/InvalidProviderImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class InvalidProviderImpl implements InvalidProviderPlugin { + @Override + public String identity() { + return "invalid-provider"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "cannot be converted"); + } + + @Test + void failsWhenProviderApiLevelIsMissing() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/MissingProviderApiLevel.java", + """ + package test; + + import %s; + + public interface MissingProviderApiLevel extends CoreProvider { + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + kind = PluginContract.Kind.BASE, + providers = { @RequiredProvider(MissingProviderApiLevel.class) } + ) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/MissingProviderApiLevelImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class MissingProviderApiLevelImpl implements TestPlugin { + @Override + public String identity() { + return "missing-provider-api-level"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must declare int API_LEVEL"); + } + + @Test + void failsWhenProviderApiLevelIsNotCompileTimeConstant() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NonConstantProviderApiLevel.java", + """ + package test; + + import %s; + + public interface NonConstantProviderApiLevel extends CoreProvider { + Integer API_LEVEL = Integer.valueOf(9); + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + kind = PluginContract.Kind.BASE, + providers = { @RequiredProvider(NonConstantProviderApiLevel.class) } + ) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/NonConstantProviderApiLevelImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class NonConstantProviderApiLevelImpl implements TestPlugin { + @Override + public String identity() { + return "non-constant-provider-api-level"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must be a compile-time int constant"); + } + + @Test + void discoversInheritedContractsTransitively() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestProvider.java", + """ + package test; + + import %s; + + public interface TestProvider extends CoreProvider { + int API_LEVEL = 11; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + kind = PluginContract.Kind.BASE, + providers = { @RequiredProvider(TestProvider.class) } + ) + public interface BasePlugin extends Plugin { + int API_LEVEL = 3; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/IntermediateCapability.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + kind = PluginContract.Kind.CAPABILITY, + requires = { BasePlugin.class } + ) + public interface IntermediateCapability extends BasePlugin { + int API_LEVEL = 4; + } + """.formatted( + PluginContract.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/LeafCapability.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + kind = PluginContract.Kind.CAPABILITY, + requires = { BasePlugin.class, IntermediateCapability.class } + ) + public interface LeafCapability extends IntermediateCapability { + int API_LEVEL = 5; + } + """.formatted( + PluginContract.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/TransitiveImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class TransitiveImpl implements LeafCapability { + @Override + public String identity() { + return "transitive"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = "META-INF/dataverse/plugins/test_TransitiveImpl.properties"; + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); + + String descriptor = Files.readString(result.generatedFile(descriptorPath)); + assertTrue(descriptor.contains("plugin.class=test.TransitiveImpl")); + assertTrue(descriptor.contains("plugin.kind=test.BasePlugin")); + assertTrue(descriptor.contains("plugin.test.BasePlugin.level=3")); + assertTrue(descriptor.contains("plugin.test.IntermediateCapability.level=4")); + assertTrue(descriptor.contains("plugin.test.LeafCapability.level=5")); + assertTrue(descriptor.contains("plugin.requires.test.TestProvider.level=11")); + } + + @Test + void suppressesGeneratedServiceFileForWholeContractWhenAutoServiceIsMixedWithNormalImplementations() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "com/google/auto/service/AutoService.java", + """ + package com.google.auto.service; + + import java.lang.annotation.ElementType; + import java.lang.annotation.Retention; + import java.lang.annotation.RetentionPolicy; + import java.lang.annotation.Target; + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + public @interface AutoService { + Class[] value(); + } + """ + ), + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/AutoServiceImpl.java", + """ + package test; + + import com.google.auto.service.AutoService; + import %s; + + @DataversePlugin + @AutoService(TestPlugin.class) + public class AutoServiceImpl implements TestPlugin { + @Override + public String identity() { + return "auto"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ), + source( + "test/NormalImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class NormalImpl implements TestPlugin { + @Override + public String identity() { + return "normal"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String autoDescriptorPath = "META-INF/dataverse/plugins/test_AutoServiceImpl.properties"; + String normalDescriptorPath = "META-INF/dataverse/plugins/test_NormalImpl.properties"; + String servicePath = "META-INF/services/test.TestPlugin"; + + assertTrue(Files.exists(result.generatedFile(autoDescriptorPath)), "AutoService descriptor should be generated"); + assertTrue(Files.exists(result.generatedFile(normalDescriptorPath)), "Normal descriptor should be generated"); + assertFalse(Files.exists(result.generatedFile(servicePath)), "Service file should be suppressed for the whole contract"); + + assertDiagnosticContains(result, Diagnostic.Kind.WARNING, "@AutoService detected"); + } + + @Test + void mergesDuplicateProviderRequirementsWhenLevelsMatch() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/SharedProvider.java", + """ + package test; + + import %s; + + public interface SharedProvider extends CoreProvider { + int API_LEVEL = 8; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/CapabilityOne.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + kind = PluginContract.Kind.CAPABILITY, + requires = { BasePlugin.class }, + providers = { @RequiredProvider(SharedProvider.class) } + ) + public interface CapabilityOne extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/CapabilityTwo.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + kind = PluginContract.Kind.CAPABILITY, + requires = { BasePlugin.class }, + providers = { @RequiredProvider(SharedProvider.class) } + ) + public interface CapabilityTwo extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/MergedProviderImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class MergedProviderImpl implements BasePlugin, CapabilityOne, CapabilityTwo { + @Override + public String identity() { + return "merged-provider"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = "META-INF/dataverse/plugins/test_MergedProviderImpl.properties"; + String descriptor = Files.readString(result.generatedFile(descriptorPath)); + + assertTrue(descriptor.contains("plugin.requires.test.SharedProvider.level=8")); + } + } + + private static ProcessorTestCompiler.SourceFile source(String relativePath, String content) { + return new ProcessorTestCompiler.SourceFile(relativePath, content); + } + + private static void assertDiagnosticContains( + ProcessorTestCompiler.CompilationResult result, + Diagnostic.Kind kind, + String fragment + ) { + boolean found = result.diagnostics().stream() + .filter(diagnostic -> diagnostic.getKind() == kind) + .map(diagnostic -> diagnostic.getMessage(null)) + .anyMatch(message -> message.contains(fragment)); + + assertTrue( + found, + () -> "Expected diagnostic containing '%s' but got:%n%s".formatted(fragment, result.diagnosticsAsText()) + ); + } +} \ No newline at end of file diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/ProcessorTestCompiler.java b/meta/src/test/java/io/gdcc/spi/meta/processor/ProcessorTestCompiler.java new file mode 100644 index 0000000..f41aa80 --- /dev/null +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/ProcessorTestCompiler.java @@ -0,0 +1,93 @@ +package io.gdcc.spi.meta.processor; + +import javax.tools.Diagnostic; +import javax.tools.DiagnosticCollector; +import javax.tools.JavaCompiler; +import javax.tools.JavaFileObject; +import javax.tools.StandardJavaFileManager; +import javax.tools.ToolProvider; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +/** + * A utility class for compiling Java source files during tests, leveraging an in-memory + * approach to simulate compilation and validation of Java code. This is typically used + * in scenarios where processor-based code validation is needed. + * + * This class uses the Java Compiler API to compile source files provided as input and + * returns a result encapsulating success state, diagnostics information, and the path + * to generated class files. + */ +final class ProcessorTestCompiler { + + CompilationResult compile(List sources) throws IOException { + JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); + if (compiler == null) { + throw new IllegalStateException("No system Java compiler available. Are tests running on a JRE instead of a JDK?"); + } + + Path tempDir = Files.createTempDirectory("plugin-contract-processor-test"); + Path sourceDir = tempDir.resolve("src"); + Path classOutputDir = tempDir.resolve("classes"); + Files.createDirectories(sourceDir); + Files.createDirectories(classOutputDir); + + List sourcePaths = new ArrayList<>(); + for (SourceFile source : sources) { + Path file = sourceDir.resolve(source.relativePath()); + Files.createDirectories(file.getParent()); + Files.writeString(file, source.content(), StandardCharsets.UTF_8); + sourcePaths.add(file); + } + + DiagnosticCollector diagnostics = new DiagnosticCollector<>(); + + try (StandardJavaFileManager fileManager = compiler.getStandardFileManager(diagnostics, null, StandardCharsets.UTF_8)) { + Iterable compilationUnits = + fileManager.getJavaFileObjectsFromPaths(sourcePaths); + + List options = List.of( + "--release", "17", + "-classpath", System.getProperty("java.class.path"), + "-d", classOutputDir.toString() + ); + + JavaCompiler.CompilationTask task = compiler.getTask( + null, + fileManager, + diagnostics, + options, + null, + compilationUnits + ); + + task.setProcessors(List.of(new PluginContractProcessor())); + + boolean success = task.call(); + return new CompilationResult(success, List.copyOf(diagnostics.getDiagnostics()), classOutputDir); + } + } + + record SourceFile(String relativePath, String content) { + } + + record CompilationResult( + boolean success, + List> diagnostics, + Path classOutputDir + ) { + String diagnosticsAsText() { + return diagnostics.stream() + .map(diagnostic -> diagnostic.getKind() + ": " + diagnostic.getMessage(null)) + .reduce("", (left, right) -> left + right + System.lineSeparator()); + } + + Path generatedFile(String relativePath) { + return classOutputDir.resolve(relativePath); + } + } +} diff --git a/src/main/java/io/gdcc/spi/core/plugin/CoreProvider.java b/src/main/java/io/gdcc/spi/core/plugin/CoreProvider.java deleted file mode 100644 index 9c2697f..0000000 --- a/src/main/java/io/gdcc/spi/core/plugin/CoreProvider.java +++ /dev/null @@ -1,20 +0,0 @@ -package io.gdcc.spi.core.plugin; - -public interface CoreProvider { - - /** - * Plugins require accessibility to core functionality, which {@link CoreProvider}s offer to them. - * Any provider implementation in the core requires building against a specific version of the API contract. - * To avoid plugins asking the wrong questions or the wrong way, the plugin loader will need to check - * that the core provider implementations use the same API contract level as the plugins. - * - * @apiNote This method must be overridden by any provider interface extending this interface and return an API level - * inlined at compile-time of the interface. - * @return the API level the core uses - * @throws UnsupportedOperationException when the provider interface does not override the method - */ - static int apiLevel() { - throw new UnsupportedOperationException("Provider must override apiLevel()"); - } - -} diff --git a/src/main/java/io/gdcc/spi/core/plugin/Plugin.java b/src/main/java/io/gdcc/spi/core/plugin/Plugin.java deleted file mode 100644 index df98f0e..0000000 --- a/src/main/java/io/gdcc/spi/core/plugin/Plugin.java +++ /dev/null @@ -1,66 +0,0 @@ -package io.gdcc.spi.core.plugin; - -import java.util.Set; - -public interface Plugin { - - /** - * Returns the unique, machine-readable identifier for this plugin. - * This will be the primary key within the core to identify a specific plugin implementation. - * - * @return the plugin's identity string, which must be non-null, non-blank, and URL compatible. - * @implSpec This method must be overridden by any plugin implementation and return a non-null, non-blank, - * URL-compatible string. No plugin interface may provide a default implementation. - */ - String identity(); - - /** - * Returns the plugin API level that this plugin has been built against to the core system. - * This represents the version of the plugin contract that the plugin implementation - * adheres to, used by the core loader to ensure compatibility between the plugin - * and the core system. - * - * @return the API level provided by this plugin - * @implSpec This method must be overridden by any plugin implementation and return an API level - * inlined at compile-time of the interface. A plugin interface may provide a default implementation, - * but must be aware this has to be dropped once the initial API level needs to be increased due - * to a breaking change. - * @implNote Inlining the API level at build time requires this method body to return the primitive constant - * from the plugin interface. If the plugin interface uses a {@code int API_LEVEL} - * (always {@code static final} constants in interfaces), an example code would look like this: - * {@code return PutPluginInterfaceNameHere.API_LEVEL; } - */ - int providedPluginApiLevel(); - - /** - * A plugin interacts with the core using any number of providers. - * When a plugin is built, it is linked against a specific version of the {@link CoreProvider} contracts. - * At loading time, the core must ensure that the plugin will use the same API contract level as the provider - * implementations in the core expect. - * A loader uses this method to determine of a specific provider API level used at build-time of the plugin. - * - * @return the required API level for the given provider class the plugin expects the core to support - * @throws IllegalArgumentException when a given provider class is not needed or unknown to the plugin - * @implSpec This method must be overridden by any plugin implementation and return an API level - * inlined at compile-time of the interface. A plugin interface may provide a default implementation, - * but must be aware this has to be dropped once the initial API level needs to be increased due - * to a breaking change. - */ - int requiredProviderApiLevel(Class providerClass); - - /** - * Returns the set of {@link CoreProvider} interfaces that this plugin expects to be available - * at runtime for interacting with the core system. - * The core uses this information to validate compatibility and ensure all required provider contracts - * are present and match the API level used during plugin compilation. - * See also {@link #requiredProviderApiLevel(Class)} for details on API level compatibility. - * - * @return a non-null set of {@link CoreProvider} interface classes expected by the plugin - * @implSpec This method must be overridden by any plugin implementation and return a set of - * {@link CoreProvider} interface classes that the plugin expects to be available at runtime. - * The set may be empty if no providers are in use. The plugin interface may provide a - * default implementation, but must drop it once a breaking change for this method appears. - */ - Set> expectedProviders(); - -} From 5454e92f93f22c68ac9f639b02336294f05f2e40 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Mar 2026 15:55:36 +0100 Subject: [PATCH 15/55] feat(meta): enhance `PluginContractProcessor` for stricter validation and coverage Extend `PluginContractProcessor` to enforce stricter project-wide validation rules for plugin interfaces and implementations. Update processor logic to ensure idempotency during compilation rounds. Add unit tests to validate edge cases for plugin and provider contracts, such as missing annotations, duplicate registrations, and hierarchical type inspections. --- .../processor/PluginContractProcessor.java | 206 +++++- .../PluginContractProcessorTest.java | 584 +++++++++++++++++- 2 files changed, 779 insertions(+), 11 deletions(-) diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java index 1216f3e..19cdbae 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -160,6 +160,24 @@ public final class PluginContractProcessor extends AbstractProcessor { */ private final Set serviceTypesManagedExternally = new LinkedHashSet<>(); + /** + * Types already inspected during the current compilation. + * + *

The processor performs additional model-wide validation beyond explicit {@code @DataversePlugin} + * usages. Since the same type may reappear through multiple roots or hierarchy traversals, this set + * keeps those checks idempotent and avoids duplicate diagnostics.

+ */ + private final Set inspectedTypes = new LinkedHashSet<>(); + + /** + * Plugin implementations already converted into generated output models. + * + *

This is needed because implementations may be processed either explicitly through + * {@code @DataversePlugin} or implicitly when they are discovered as plain {@code Plugin} + * implementations during hierarchy inspection.

+ */ + private final Set processedImplementations = new LinkedHashSet<>(); + /** * Initializes compiler utility helpers from the processing environment. * @@ -175,14 +193,15 @@ public synchronized void init(ProcessingEnvironment processingEnv) { /** * Returns the annotation types directly claimed by this processor. * - *

The processor only claims {@code @DataversePlugin}. Other annotations are read while - * traversing the type model of such implementations.

+ *

The processor claims all annotations because it does not only react to explicitly annotated + * {@code @DataversePlugin} classes. It also performs project-wide validation for plugin contracts, + * provider contracts, and unannotated plugin implementations discovered in the type model.

* * @return the supported top-level annotation types */ @Override public Set getSupportedAnnotationTypes() { - return Set.of(PLUGIN_IMPLEMENTATION_ANNOTATION); + return Set.of("*"); } /** @@ -201,13 +220,18 @@ public SourceVersion getSupportedSourceVersion() { /** * Main processor entry point for each annotation processing round. * - *

During normal rounds, all {@code @DataversePlugin} classes are validated and converted - * into in-memory descriptor/service models. During the final round, those accumulated models are - * written to the compiler output.

+ *

During normal rounds, this processor performs two tasks:

+ *
    + *
  1. it inspects all root types and their hierarchies for project-wide contract validation,
  2. + *
  3. it processes explicitly annotated {@code @DataversePlugin} classes.
  4. + *
+ * + *

During the final round, all accumulated descriptor and service models are written to the + * compiler output.

* * @param annotations the annotations requested for this round * @param roundEnv the current round environment - * @return {@code true}, because this processor claims the handled annotation + * @return {@code false} so other processors may continue to participate normally */ @Override public boolean process(Set annotations, RoundEnvironment roundEnv) { @@ -218,6 +242,20 @@ public boolean process(Set annotations, RoundEnvironment return false; } + // Inspect all roots, not just annotated types. This enables strict enforcement for + // plugin/provider contracts and lets us discover plain Plugin implementations that + // should have used @DataversePlugin. + for (Element root : roundEnv.getRootElements()) { + if (root instanceof TypeElement typeElement) { + try { + inspectTypeHierarchy(typeElement); + } catch (ProcessorException ignored) { + // The concrete error has already been reported with source location. + // Continue with remaining roots to surface as many problems as possible. + } + } + } + for (Element element : roundEnv.getElementsAnnotatedWith(markerAnnotation)) { if (!(element instanceof TypeElement implementation)) { error(element, "@DataversePlugin may only be used on classes"); @@ -236,7 +274,7 @@ public boolean process(Set annotations, RoundEnvironment writeAllGeneratedResources(); } - return true; + return false; } /** @@ -255,6 +293,13 @@ public boolean process(Set annotations, RoundEnvironment * @param implementation the plugin implementation class */ private void processImplementation(TypeElement implementation) { + String implementationClassName = implementation.getQualifiedName().toString(); + if (!processedImplementations.add(implementationClassName)) { + // The implementation was already processed earlier in this compilation, for example + // when discovered implicitly during type hierarchy inspection. + return; + } + validateImplementationClass(implementation); Set contracts = collectImplementedContracts(implementation); @@ -301,7 +346,6 @@ private void processImplementation(TypeElement implementation) { throw new ProcessorException(); } - String implementationClassName = implementation.getQualifiedName().toString(); String baseContractName = baseContract.getQualifiedName().toString(); descriptors.put( @@ -404,6 +448,150 @@ private Set collectImplementedContracts(TypeElement implementation) return result; } + /** + * Traverses a type hierarchy and applies project-wide validation rules. + * + *

This method exists because the processor validates more than explicitly annotated + * implementations. It also enforces that:

+ *
    + *
  • plugin interfaces carry {@code @PluginContract},
  • + *
  • provider interfaces declare {@code API_LEVEL},
  • + *
  • concrete plugin implementations use {@code @DataversePlugin}, or at least trigger a warning.
  • + *
+ * + * @param typeElement the root type to inspect + */ + private void inspectTypeHierarchy(TypeElement typeElement) { + Deque queue = new ArrayDeque<>(); + queue.addLast(typeElement); + + while (!queue.isEmpty()) { + TypeElement current = queue.removeFirst(); + String qualifiedName = current.getQualifiedName().toString(); + if (!inspectedTypes.add(qualifiedName)) { + continue; + } + + inspectType(current); + + for (TypeMirror iface : current.getInterfaces()) { + TypeElement interfaceType = asTypeElement(iface); + if (interfaceType != null) { + queue.addLast(interfaceType); + } + } + + TypeMirror superclass = current.getSuperclass(); + TypeElement superType = asTypeElement(superclass); + if (superType != null && superclass.getKind() != TypeKind.NONE) { + queue.addLast(superType); + } + } + } + + /** + * Applies validation rules to a single type discovered during hierarchy inspection. + * + * @param typeElement the type to inspect + */ + private void inspectType(TypeElement typeElement) { + if (isPluginInterfaceCandidate(typeElement)) { + if (findAnnotationMirror(typeElement, PLUGIN_CONTRACT_ANNOTATION) == null) { + error(typeElement, "Plugin interfaces must declare @PluginContract"); + throw new ProcessorException(); + } + + validateApiLevelConstant(typeElement); + } + + if (isProviderInterfaceCandidate(typeElement)) { + validateApiLevelConstant(typeElement); + } + + if (isPluginImplementationCandidate(typeElement) + && findAnnotationMirror(typeElement, PLUGIN_IMPLEMENTATION_ANNOTATION) == null) { + warning( + typeElement, + "Plugin implementation should declare @DataversePlugin; processing it implicitly" + ); + + // Even without the annotation, we still process the implementation. This keeps the + // migration path smooth and ensures metadata generation does not depend solely on + // authors remembering one annotation. + processImplementation(typeElement); + } + } + + /** + * Determines whether a type qualifies as an implementation candidate for a plugin. + * + * @param typeElement the type to inspect + * @return {@code true} if the type is a concrete class implementing {@code Plugin} + */ + private boolean isPluginImplementationCandidate(TypeElement typeElement) { + if (typeElement.getKind() != ElementKind.CLASS) { + return false; + } + if (typeElement.getModifiers().contains(Modifier.ABSTRACT)) { + return false; + } + return implementsType(typeElement, PLUGIN_INTERFACE) && !isExactType(typeElement, PLUGIN_INTERFACE); + } + + /** + * Determines whether a type is a plugin interface candidate that must declare {@code @PluginContract}. + * + * @param typeElement the type to inspect + * @return {@code true} if the type is an interface extending {@code Plugin} + */ + private boolean isPluginInterfaceCandidate(TypeElement typeElement) { + return typeElement.getKind() == ElementKind.INTERFACE + && implementsType(typeElement, PLUGIN_INTERFACE) + && !isExactType(typeElement, PLUGIN_INTERFACE); + } + + /** + * Determines whether a type is a provider interface candidate that must declare {@code API_LEVEL}. + * + * @param typeElement the type to inspect + * @return {@code true} if the type is an interface extending {@code CoreProvider} + */ + private boolean isProviderInterfaceCandidate(TypeElement typeElement) { + return typeElement.getKind() == ElementKind.INTERFACE + && implementsType(typeElement, CORE_PROVIDER_INTERFACE) + && !isExactType(typeElement, CORE_PROVIDER_INTERFACE); + } + + /** + * Tests whether the given type is assignable to another type identified by fully qualified name. + * + * @param typeElement the source type + * @param targetTypeName the fully qualified target type name + * @return {@code true} if the source type is assignable to the target type + */ + private boolean implementsType(TypeElement typeElement, String targetTypeName) { + TypeElement targetType = elements.getTypeElement(targetTypeName); + if (targetType == null) { + return false; + } + + return types.isAssignable( + types.erasure(typeElement.asType()), + types.erasure(targetType.asType()) + ); + } + + /** + * Checks whether the given type is exactly the named type itself, not merely a subtype. + * + * @param typeElement the type to inspect + * @param targetTypeName the fully qualified target type name + * @return {@code true} if both names are identical + */ + private boolean isExactType(TypeElement typeElement, String targetTypeName) { + return typeElement.getQualifiedName().contentEquals(targetTypeName); + } + /** * Determines whether the given type is a plugin contract. * diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java index ae0d340..6310ce8 100644 --- a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java @@ -1,7 +1,7 @@ package io.gdcc.spi.meta.processor; -import io.gdcc.spi.meta.annotations.PluginContract; import io.gdcc.spi.meta.annotations.DataversePlugin; +import io.gdcc.spi.meta.annotations.PluginContract; import io.gdcc.spi.meta.annotations.RequiredProvider; import io.gdcc.spi.meta.plugin.CoreProvider; import io.gdcc.spi.meta.plugin.Plugin; @@ -13,7 +13,9 @@ import java.nio.file.Files; import java.util.List; -import static org.junit.jupiter.api.Assertions.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; class PluginContractProcessorTest { @@ -96,6 +98,135 @@ public String identity() { assertEquals("test.GoodPlugin", serviceFile.trim()); } + @Test + void warnsWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/ImplicitPlugin.java", + """ + package test; + + public class ImplicitPlugin implements TestPlugin { + @Override + public String identity() { + return "implicit"; + } + } + """ + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + assertDiagnosticContains(result, Diagnostic.Kind.WARNING, "@DataversePlugin"); + } + + @Test + void createsDescriptorEvenWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 4; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/ImplicitPlugin.java", + """ + package test; + + public class ImplicitPlugin implements TestPlugin { + @Override + public String identity() { + return "implicit"; + } + } + """ + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = "META-INF/dataverse/plugins/test_ImplicitPlugin.properties"; + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should still be generated"); + + String descriptor = Files.readString(result.generatedFile(descriptorPath)); + assertTrue(descriptor.contains("plugin.class=test.ImplicitPlugin")); + assertTrue(descriptor.contains("plugin.kind=test.TestPlugin")); + assertTrue(descriptor.contains("plugin.test.TestPlugin.level=4")); + } + + @Test + void createsServiceFileEvenWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 5; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/ImplicitPlugin.java", + """ + package test; + + public class ImplicitPlugin implements TestPlugin { + @Override + public String identity() { + return "implicit"; + } + } + """ + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String servicePath = "META-INF/services/test.TestPlugin"; + assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should still be generated"); + + String serviceFile = Files.readString(result.generatedFile(servicePath)); + assertEquals("test.ImplicitPlugin", serviceFile.trim()); + } + @Test void failsWhenMultipleBaseContractsAreImplemented() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( @@ -219,6 +350,43 @@ public String identity() { assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "also requires contract test.BasePlugin"); } + @Test + void failsWhenPluginInterfaceLacksPluginContractAnnotation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/UndeclaredPluginContract.java", + """ + package test; + + import %s; + + public interface UndeclaredPluginContract extends Plugin { + int API_LEVEL = 1; + } + """.formatted(Plugin.class.getCanonicalName()) + ), + source( + "test/UndeclaredPluginImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class UndeclaredPluginImpl implements UndeclaredPluginContract { + @Override + public String identity() { + return "undeclared-contract"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "Plugin interfaces must declare @PluginContract"); + } + @Test void suppressesGeneratedServiceFileWhenAutoServiceIsPresent() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( @@ -412,6 +580,387 @@ public String identity() { assertFalse(result.success(), "Compilation should fail"); assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "exactly one base plugin contract"); } + + @Test + void failsWhenDataversePluginAnnotatedClassIsNotAPluginImplementation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NotAPlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class NotAPlugin { + public String identity() { + return "not-a-plugin"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "No implemented plugin contracts found"); + } + + @Test + void doesNotProcessAnnotatedImplementationTwice() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 6; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/OncePlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class OncePlugin implements TestPlugin { + @Override + public String identity() { + return "once"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String servicePath = "META-INF/services/test.TestPlugin"; + assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should be generated"); + + List lines = Files.readAllLines(result.generatedFile(servicePath)) + .stream() + .filter(line -> !line.isBlank()) + .toList(); + + assertEquals(1, lines.size(), "Implementation should only be registered once"); + assertEquals("test.OncePlugin", lines.get(0)); + } + + @Test + void aggregatesMultipleImplementationsIntoOneServiceFile() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/APlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class APlugin implements TestPlugin { + @Override + public String identity() { + return "a"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ), + source( + "test/BPlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class BPlugin implements TestPlugin { + @Override + public String identity() { + return "b"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String servicePath = "META-INF/services/test.TestPlugin"; + assertTrue(Files.exists(result.generatedFile(servicePath)), "Aggregated service file should be generated"); + + List lines = Files.readAllLines(result.generatedFile(servicePath)) + .stream() + .filter(line -> !line.isBlank()) + .toList(); + + assertEquals(List.of("test.APlugin", "test.BPlugin"), lines); + } + + @Test + void compilesWhenAnnotatedPluginInterfaceHasNoImplementation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/LonelyPluginContract.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface LonelyPluginContract extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + } + + @Test + void compilesWhenUnusedProviderInterfaceHasApiLevel() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/LonelyProvider.java", + """ + package test; + + import %s; + + public interface LonelyProvider extends CoreProvider { + int API_LEVEL = 42; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + } + + @Test + void doesNotWarnForAbstractUnannotatedPluginBaseClass() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/AbstractBasePlugin.java", + """ + package test; + + public abstract class AbstractBasePlugin implements TestPlugin { + @Override + public String identity() { + return "base"; + } + } + """ + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + assertDiagnosticDoesNotContain(result, Diagnostic.Kind.WARNING, "@DataversePlugin"); + } + + @Test + void failsWhenIndirectPluginInterfaceLacksPluginContractAnnotation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/DerivedPlugin.java", + """ + package test; + + public interface DerivedPlugin extends BasePlugin { + int API_LEVEL = 2; + } + """ + ), + source( + "test/DerivedPluginImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class DerivedPluginImpl implements DerivedPlugin { + @Override + public String identity() { + return "derived"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "Plugin interfaces must declare @PluginContract"); + } + + @Test + void suppressesServiceGenerationOnlyForPluginKindManagedByAutoService() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "com/google/auto/service/AutoService.java", + """ + package com.google.auto.service; + + import java.lang.annotation.ElementType; + import java.lang.annotation.Retention; + import java.lang.annotation.RetentionPolicy; + import java.lang.annotation.Target; + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + public @interface AutoService { + Class[] value(); + } + """ + ), + source( + "test/PluginTypeA.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface PluginTypeA extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/PluginTypeB.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface PluginTypeB extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/AutoManagedA.java", + """ + package test; + + import com.google.auto.service.AutoService; + import %s; + + @DataversePlugin + @AutoService(PluginTypeA.class) + public class AutoManagedA implements PluginTypeA { + @Override + public String identity() { + return "a"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ), + source( + "test/NormalB.java", + """ + package test; + + import %s; + + @DataversePlugin + public class NormalB implements PluginTypeB { + @Override + public String identity() { + return "b"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String servicePathA = "META-INF/services/test.PluginTypeA"; + String servicePathB = "META-INF/services/test.PluginTypeB"; + + assertFalse(Files.exists(result.generatedFile(servicePathA)), "PluginTypeA service file should be suppressed"); + assertTrue(Files.exists(result.generatedFile(servicePathB)), "PluginTypeB service file should still be generated"); + + String serviceFileB = Files.readString(result.generatedFile(servicePathB)); + assertEquals("test.NormalB", serviceFileB.trim()); + } } @Nested @@ -970,6 +1519,11 @@ public String identity() { String descriptor = Files.readString(result.generatedFile(descriptorPath)); assertTrue(descriptor.contains("plugin.requires.test.SharedProvider.level=8")); + assertEquals( + 1, + countOccurrences(descriptor, "plugin.requires.test.SharedProvider.level=8"), + "Shared provider should only appear once in the descriptor" + ); } } @@ -992,4 +1546,30 @@ private static void assertDiagnosticContains( () -> "Expected diagnostic containing '%s' but got:%n%s".formatted(fragment, result.diagnosticsAsText()) ); } + + private static void assertDiagnosticDoesNotContain( + ProcessorTestCompiler.CompilationResult result, + Diagnostic.Kind kind, + String fragment + ) { + boolean found = result.diagnostics().stream() + .filter(diagnostic -> diagnostic.getKind() == kind) + .map(diagnostic -> diagnostic.getMessage(null)) + .anyMatch(message -> message.contains(fragment)); + + assertFalse( + found, + () -> "Did not expect diagnostic containing '%s' but got:%n%s".formatted(fragment, result.diagnosticsAsText()) + ); + } + + private static int countOccurrences(String text, String fragment) { + int count = 0; + int index = 0; + while ((index = text.indexOf(fragment, index)) >= 0) { + count++; + index += fragment.length(); + } + return count; + } } \ No newline at end of file From da8a7de1d080edcc6b25a8332d7d1a82262b6e67 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Mar 2026 16:35:00 +0100 Subject: [PATCH 16/55] style(meta): improve error messages in `PluginContractProcessor` for better clarity --- .../gdcc/spi/meta/processor/PluginContractProcessor.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java index 19cdbae..a3be2e9 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -304,7 +304,7 @@ private void processImplementation(TypeElement implementation) { Set contracts = collectImplementedContracts(implementation); if (contracts.isEmpty()) { - error(implementation, "No implemented plugin contracts found"); + error(implementation, "No implemented plugin contracts found. Did you annotate with @PluginContract?"); throw new ProcessorException(); } @@ -342,7 +342,7 @@ private void processImplementation(TypeElement implementation) { } if (baseContract == null) { - error(implementation, "Implementation must implement exactly one base plugin contract"); + error(implementation, "Implementation must implement exactly one Kind.BASE @PluginContract"); throw new ProcessorException(); } @@ -387,12 +387,12 @@ private void validateImplementationClass(TypeElement implementation) { } if (!implementation.getModifiers().contains(Modifier.PUBLIC)) { - error(implementation, "Dataverse plugin implementations must be public"); + error(implementation, "@DataversePlugin implementations must be public"); throw new ProcessorException(); } if (implementation.getModifiers().contains(Modifier.ABSTRACT)) { - error(implementation, "Dataverse plugin implementations must not be abstract"); + error(implementation, "@DataversePlugin implementations must not be abstract"); throw new ProcessorException(); } } From 1c4fb1a8776c472ff0ff6198bc7afc1eaa363fa8 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Mar 2026 16:39:07 +0100 Subject: [PATCH 17/55] feat(meta): enforce `@PluginContract` usage restriction to interfaces only Add validation logic in `PluginContractProcessor` to ensure `@PluginContract` is declared exclusively on interfaces. Update processor tests to cover invalid scenarios and verify compilation failures with meaningful diagnostics. --- .../processor/PluginContractProcessor.java | 23 +++++++++ .../PluginContractProcessorTest.java | 47 +++++++++++++++++++ 2 files changed, 70 insertions(+) diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java index a3be2e9..fa79822 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -495,6 +495,8 @@ private void inspectTypeHierarchy(TypeElement typeElement) { * @param typeElement the type to inspect */ private void inspectType(TypeElement typeElement) { + validatePluginContractUsage(typeElement); + if (isPluginInterfaceCandidate(typeElement)) { if (findAnnotationMirror(typeElement, PLUGIN_CONTRACT_ANNOTATION) == null) { error(typeElement, "Plugin interfaces must declare @PluginContract"); @@ -592,6 +594,27 @@ private boolean isExactType(TypeElement typeElement, String targetTypeName) { return typeElement.getQualifiedName().contentEquals(targetTypeName); } + /** + * Verifies that {@code @PluginContract} is only used on interfaces. + * + *

Although the annotation is intended for SPI interfaces, Java's annotation target model + * cannot express "interfaces only". This processor therefore enforces the rule explicitly and + * fails compilation when the annotation is placed on classes, enums, records, or other + * non-interface types.

+ * + * @param typeElement the type currently being inspected + */ + private void validatePluginContractUsage(TypeElement typeElement) { + if (findAnnotationMirror(typeElement, PLUGIN_CONTRACT_ANNOTATION) == null) { + return; + } + + if (typeElement.getKind() != ElementKind.INTERFACE) { + error(typeElement, "@PluginContract may only be declared on interfaces"); + throw new ProcessorException(); + } + } + /** * Determines whether the given type is a plugin contract. * diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java index 6310ce8..d4f2d10 100644 --- a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java @@ -98,6 +98,53 @@ public String identity() { assertEquals("test.GoodPlugin", serviceFile.trim()); } + @Test + void failsWhenPluginContractIsPlacedOnImplementationClass() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(kind = PluginContract.Kind.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/InvalidImplementation.java", + """ + package test; + + import %s; + import %s; + + @DataversePlugin + @PluginContract(kind = PluginContract.Kind.BASE) + public class InvalidImplementation implements TestPlugin { + @Override + public String identity() { + return "invalid"; + } + } + """.formatted( + DataversePlugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "@PluginContract may only be declared on interfaces"); + } + @Test void warnsWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( From 67cb4fc21724a8f3db2fba9f751b81a5f4e8d41c Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 10 Mar 2026 18:46:20 +0100 Subject: [PATCH 18/55] feat(meta): enforce validation against direct implementation of base interfaces Add new validation logic in `PluginContractProcessor` to prohibit direct implementations of `Plugin` and `CoreProvider` base interfaces. Update diagnostics for clearer error reporting and enhance processor tests for comprehensive coverage. --- .../processor/PluginContractProcessor.java | 59 ++++++++++++++++++- .../PluginContractProcessorTest.java | 36 ++++++++++- 2 files changed, 93 insertions(+), 2 deletions(-) diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java index fa79822..9e5bdff 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -304,7 +304,10 @@ private void processImplementation(TypeElement implementation) { Set contracts = collectImplementedContracts(implementation); if (contracts.isEmpty()) { - error(implementation, "No implemented plugin contracts found. Did you annotate with @PluginContract?"); + error( + implementation, + "No implemented plugin contracts found; implementations must implement a specific @PluginContract interface" + ); throw new ProcessorException(); } @@ -496,6 +499,7 @@ private void inspectTypeHierarchy(TypeElement typeElement) { */ private void inspectType(TypeElement typeElement) { validatePluginContractUsage(typeElement); + validateDirectBaseTypeImplementations(typeElement); if (isPluginInterfaceCandidate(typeElement)) { if (findAnnotationMirror(typeElement, PLUGIN_CONTRACT_ANNOTATION) == null) { @@ -524,6 +528,59 @@ && findAnnotationMirror(typeElement, PLUGIN_IMPLEMENTATION_ANNOTATION) == null) } } + /** + * Rejects direct implementations of the foundational base types {@code Plugin} and + * {@code CoreProvider}. + * + *

These two types are infrastructure-level marker/base interfaces only. Loadable plugins + * and concrete providers must instead implement a specific contract interface extending one + * of these base types. Otherwise, no meaningful compatibility contract can be derived.

+ * + * @param typeElement the type currently being inspected + */ + private void validateDirectBaseTypeImplementations(TypeElement typeElement) { + if (typeElement.getKind() != ElementKind.CLASS) { + return; + } + + if (directlyImplementsType(typeElement, PLUGIN_INTERFACE)) { + error( + typeElement, + "Plugin implementations must implement a specific plugin contract interface, not Plugin directly" + ); + throw new ProcessorException(); + } + + if (directlyImplementsType(typeElement, CORE_PROVIDER_INTERFACE)) { + error( + typeElement, + "Core provider implementations must implement a specific provider interface, not CoreProvider directly" + ); + throw new ProcessorException(); + } + } + + /** + * Checks whether a type directly declares the given interface in its {@code implements} clause. + * + *

This is stricter than assignability: it only matches explicit direct implementation and is + * used to reject classes that target the framework base interfaces {@code Plugin} or + * {@code CoreProvider} directly.

+ * + * @param typeElement the type to inspect + * @param targetTypeName the fully qualified interface name to look for + * @return {@code true} if the type directly implements the target interface + */ + private boolean directlyImplementsType(TypeElement typeElement, String targetTypeName) { + for (TypeMirror interfaceType : typeElement.getInterfaces()) { + TypeElement interfaceElement = asTypeElement(interfaceType); + if (interfaceElement != null && interfaceElement.getQualifiedName().contentEquals(targetTypeName)) { + return true; + } + } + return false; + } + /** * Determines whether a type qualifies as an implementation candidate for a plugin. * diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java index d4f2d10..b4c2139 100644 --- a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java @@ -625,7 +625,7 @@ public String identity() { )); assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "exactly one base plugin contract"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "exactly one Kind.BASE @PluginContract"); } @Test @@ -652,6 +652,39 @@ public String identity() { assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "No implemented plugin contracts found"); } + @Test + void failsWhenImplementationDirectlyImplementsPlugin() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/RawPluginImpl.java", + """ + package test; + + import %s; + import %s; + + @DataversePlugin + public class RawPluginImpl implements Plugin { + @Override + public String identity() { + return "raw-plugin"; + } + } + """.formatted( + Plugin.class.getCanonicalName(), + DataversePlugin.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains( + result, + Diagnostic.Kind.ERROR, + "must implement a specific plugin contract interface, not Plugin directly" + ); + } + @Test void doesNotProcessAnnotatedImplementationTwice() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( @@ -817,6 +850,7 @@ public interface LonelyProvider extends CoreProvider { } @Test + // because we want to allow base classes, as long as concrete classes are annotated @DataversePlugin void doesNotWarnForAbstractUnannotatedPluginBaseClass() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( From 531366df16c2b75accffd715f82b508047e1a2e5 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 11 Mar 2026 16:28:14 +0100 Subject: [PATCH 19/55] feat(meta): add `PluginDescriptor` and `PluginDescriptorFormat` for plugin metadata management Introduce classes for in-memory representation (`PluginDescriptor`) and serialization/deserialization of plugin metadata (`PluginDescriptorFormat`). Add comprehensive unit tests to verify metadata handling and ensure round-trip consistency. --- .../spi/meta/descriptor/PluginDescriptor.java | 172 +++++++++ .../descriptor/PluginDescriptorFormat.java | 190 +++++++++ .../processor/PluginContractProcessor.java | 58 +-- .../PluginDescriptorFormatTest.java | 363 ++++++++++++++++++ .../PluginContractProcessorTest.java | 58 ++- 5 files changed, 773 insertions(+), 68 deletions(-) create mode 100644 meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java create mode 100644 meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormat.java create mode 100644 meta/src/test/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormatTest.java diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java new file mode 100644 index 0000000..82038b4 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java @@ -0,0 +1,172 @@ +package io.gdcc.spi.meta.descriptor; + +import java.util.Map; +import java.util.Objects; +import java.util.OptionalInt; + +/** + * In-memory representation of one generated plugin descriptor. + * + * @param pluginClass implementation class name + * @param pluginKind fully qualified base contract name + * @param contracts map of implemented contract names to API levels + * @param requiredProviders map of required provider names to API levels + */ +public record PluginDescriptor( + String pluginClass, + String pluginKind, + Map contracts, + Map requiredProviders +) { + + /** + * Creates a new descriptor and defensively copies the contract/provider maps. + * + *

This ensures the descriptor remains immutable even if callers pass in + * mutable maps. All arguments must be non-null.

+ */ + public PluginDescriptor { + Objects.requireNonNull(pluginClass); + Objects.requireNonNull(pluginKind); + contracts = Map.copyOf(Objects.requireNonNull(contracts)); + requiredProviders = Map.copyOf(Objects.requireNonNull(requiredProviders)); + } + + /** + * Checks whether this plugin declares the given implemented contract. + * + * @param contractFqcn the fully qualified contract class name + * @return {@code true} if the contract is present in this descriptor + */ + public boolean implementsContract(String contractFqcn) { + Objects.requireNonNull(contractFqcn); + return contracts.containsKey(contractFqcn); + } + + /** + * Checks whether this plugin declares the given implemented contract. + * + * @param contractClass the contract class + * @return {@code true} if the contract is present in this descriptor + */ + public boolean implementsContract(Class contractClass) { + Objects.requireNonNull(contractClass); + return implementsContract(contractClass.getCanonicalName()); + } + + /** + * Returns the declared API level for the given implemented contract, if present. + * + * @param contractFqcn the fully qualified contract class name + * @return the declared API level wrapped in an {@link OptionalInt}, or an empty value if absent + */ + public OptionalInt contractLevel(String contractFqcn) { + Objects.requireNonNull(contractFqcn); + Integer value = contracts.get(contractFqcn); + return value == null ? OptionalInt.empty() : OptionalInt.of(value); + } + + /** + * Returns the declared API level for the given implemented contract, if present. + * + * @param contractClass the contract class + * @return the declared API level wrapped in an {@link OptionalInt}, or an empty value if absent + */ + public OptionalInt contractLevel(Class contractClass) { + Objects.requireNonNull(contractClass); + return contractLevel(contractClass.getCanonicalName()); + } + + /** + * Returns the declared API level for the given implemented contract. + * + * @param contractFqcn the fully qualified contract class name + * @return the declared API level + * @throws IllegalArgumentException if the contract is not present in this descriptor + */ + public int contractLevelOrThrow(String contractFqcn) { + return contractLevel(contractFqcn) + .orElseThrow(() -> new IllegalArgumentException("Unknown contract " + contractFqcn)); + } + + /** + * Returns the declared API level for the given implemented contract. + * + * @param contractClass the contract class + * @return the declared API level + * @throws IllegalArgumentException if the contract is not present in this descriptor + */ + public int contractLevelOrThrow(Class contractClass) { + Objects.requireNonNull(contractClass); + return contractLevelOrThrow(contractClass.getCanonicalName()); + } + + /** + * Checks whether this plugin declares the given required provider. + * + * @param providerFqcn the fully qualified provider class name + * @return {@code true} if the provider is present in this descriptor + */ + public boolean requiresProvider(String providerFqcn) { + Objects.requireNonNull(providerFqcn); + return requiredProviders.containsKey(providerFqcn); + } + + /** + * Checks whether this plugin declares the given required provider. + * + * @param providerClass the provider class + * @return {@code true} if the provider is present in this descriptor + */ + public boolean requiresProvider(Class providerClass) { + Objects.requireNonNull(providerClass); + return requiresProvider(providerClass.getCanonicalName()); + } + + /** + * Returns the declared required API level for the given provider, if present. + * + * @param providerFqcn the fully qualified provider class name + * @return the required provider API level wrapped in an {@link OptionalInt}, or an empty value if absent + */ + public OptionalInt requiredProviderLevel(String providerFqcn) { + Objects.requireNonNull(providerFqcn); + Integer value = requiredProviders.get(providerFqcn); + return value == null ? OptionalInt.empty() : OptionalInt.of(value); + } + + /** + * Returns the declared required API level for the given provider, if present. + * + * @param providerClass the provider class + * @return the required provider API level wrapped in an {@link OptionalInt}, or an empty value if absent + */ + public OptionalInt requiredProviderLevel(Class providerClass) { + Objects.requireNonNull(providerClass); + return requiredProviderLevel(providerClass.getCanonicalName()); + } + + /** + * Returns the declared required API level for the given provider. + * + * @param providerFqcn the fully qualified provider class name + * @return the required provider API level + * @throws IllegalArgumentException if the provider is not present in this descriptor + */ + public int requiredProviderLevelOrThrow(String providerFqcn) { + return requiredProviderLevel(providerFqcn) + .orElseThrow(() -> new IllegalArgumentException("Unknown required provider " + providerFqcn)); + } + + /** + * Returns the declared required API level for the given provider. + * + * @param providerClass the provider class + * @return the required provider API level + * @throws IllegalArgumentException if the provider is not present in this descriptor + */ + public int requiredProviderLevelOrThrow(Class providerClass) { + Objects.requireNonNull(providerClass); + return requiredProviderLevelOrThrow(providerClass.getCanonicalName()); + } +} \ No newline at end of file diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormat.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormat.java new file mode 100644 index 0000000..0262882 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormat.java @@ -0,0 +1,190 @@ +package io.gdcc.spi.meta.descriptor; + +import java.io.IOException; +import java.io.Reader; +import java.io.StringReader; +import java.io.Writer; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Properties; + +/** + * Defines constants for the format and structure of plugin descriptor files. + * These descriptors provide metadata about plugins, including their + * implementation class, type, contracts, and required providers. + * + *
    + * - DESCRIPTOR_DIRECTORY: Specifies the directory where plugin descriptor files are located. + * - PLUGIN_CLASS_FIELD: Defines the key used to identify the plugin's implementation class. + * - PLUGIN_KIND_FIELD: Defines the key used to specify the base contract type of the plugin. + * - CONTRACT_PREFIX and CONTRACT_SUFFIX: Define the keys used to denote the contracts + * implemented by the plugin and their associated API levels. + * - REQUIRED_PROVIDER_PREFIX and REQUIRED_PROVIDER_SUFFIX: Define the keys used to represent + * required providers and their associated API levels. + *
+ */ +public final class PluginDescriptorFormat { + public static final String DESCRIPTOR_DIRECTORY = "META-INF/dataverse/plugins/"; + public static final String DESCRIPTOR_EXTENSION = ".properties"; + + public static final String PLUGIN_CLASS_FIELD = "plugin.class"; + public static final String PLUGIN_KIND_FIELD = "plugin.kind"; + public static final String CONTRACT_PREFIX = "plugin.implements."; + public static final String CONTRACT_SUFFIX = ".level"; + public static final String REQUIRED_PROVIDER_PREFIX = "plugin.requires."; + public static final String REQUIRED_PROVIDER_SUFFIX = ".level"; + + private PluginDescriptorFormat() { + /* Intentionally left blank for helper class */ + } + + public static String toFilename(Class klazz) { + return toFilename(klazz.getCanonicalName()); + } + + public static String toFilename(String fqcn) { + return fqcn + DESCRIPTOR_EXTENSION; + } + + public static String toPath(Class klazz) { + return toPath(klazz.getCanonicalName()); + } + + public static String toPath(String fqcn) { + return DESCRIPTOR_DIRECTORY + toFilename(fqcn); + } + + public static String toContractLevel(Class contractClass) { + return toContractLevel(contractClass.getCanonicalName()); + } + + public static String toContractLevel(String contractFQCN) { + return CONTRACT_PREFIX + contractFQCN + CONTRACT_SUFFIX; + } + + public static String toRequiredProviderLevel(Class providerClass) { + return toRequiredProviderLevel(providerClass.getCanonicalName()); + } + + public static String toRequiredProviderLevel(String providerFQCN) { + return REQUIRED_PROVIDER_PREFIX + providerFQCN + REQUIRED_PROVIDER_SUFFIX; + } + + /** + * Serializes the provided {@link PluginDescriptor} into the given {@link Writer} + * in the form of a properties file, encoding plugin metadata such as plugin class, + * plugin kind, implemented contracts, and required providers. + * + * @param descriptor the {@link PluginDescriptor} containing the plugin metadata to be serialized + * @param writer the {@link Writer} where the descriptor properties will be written + * @throws IOException if an I/O error occurs while writing to the {@link Writer} + */ + public static void write(PluginDescriptor descriptor, Writer writer) throws IOException { + Properties properties = new Properties(); + properties.setProperty(PLUGIN_CLASS_FIELD, descriptor.pluginClass()); + properties.setProperty(PLUGIN_KIND_FIELD, descriptor.pluginKind()); + + descriptor.contracts().forEach((contract, level) -> + properties.setProperty(toContractLevel(contract), Integer.toString(level))); + + descriptor.requiredProviders().forEach((provider, level) -> + properties.setProperty(toRequiredProviderLevel(provider), Integer.toString(level))); + + properties.store(writer, "Generated plugin contract metadata"); + } + + /** + * Reads a plugin descriptor from the serialized properties format. + * + *

The returned descriptor contains the mandatory plugin class and base contract fields, + * plus all parsed contract/provider API levels found in the input.

+ * + * @param reader the character stream containing descriptor properties + * @return the parsed descriptor + * @throws IOException if the properties cannot be read + * @throws IllegalArgumentException if mandatory fields are missing or if any level value + * cannot be parsed as an integer + */ + public static PluginDescriptor read(Reader reader) throws IOException { + Properties properties = new Properties(); + properties.load(reader); + + String pluginClass = properties.getProperty(PLUGIN_CLASS_FIELD); + if (pluginClass == null || pluginClass.isBlank()) { + throw new IllegalArgumentException("Missing required property " + PLUGIN_CLASS_FIELD); + } + + String pluginKind = properties.getProperty(PLUGIN_KIND_FIELD); + if (pluginKind == null || pluginKind.isBlank()) { + throw new IllegalArgumentException("Missing required property " + PLUGIN_KIND_FIELD); + } + + Map contracts = new LinkedHashMap<>(); + Map requiredProviders = new LinkedHashMap<>(); + + for (String key : properties.stringPropertyNames()) { + if (PLUGIN_CLASS_FIELD.equals(key) || PLUGIN_KIND_FIELD.equals(key)) { + continue; + } + + if (key.startsWith(CONTRACT_PREFIX) && key.endsWith(CONTRACT_SUFFIX)) { + String contractName = key.substring( + CONTRACT_PREFIX.length(), + key.length() - CONTRACT_SUFFIX.length() + ); + contracts.put(contractName, parseLevel(properties.getProperty(key), key)); + } + + if (key.startsWith(REQUIRED_PROVIDER_PREFIX) && key.endsWith(REQUIRED_PROVIDER_SUFFIX)) { + String providerName = key.substring( + REQUIRED_PROVIDER_PREFIX.length(), + key.length() - REQUIRED_PROVIDER_SUFFIX.length() + ); + requiredProviders.put(providerName, parseLevel(properties.getProperty(key), key)); + } + } + + return new PluginDescriptor( + pluginClass, + pluginKind, + Map.copyOf(contracts), + Map.copyOf(requiredProviders) + ); + } + + /** + * Reads a plugin descriptor from the given string content. + * + * This method parses the input string into a {@link PluginDescriptor} object. It internally utilizes + * a {@link StringReader} to read the string and expects the content to be in a properties-based serialized format. + * + * @param content the string content containing serialized descriptor properties + * @return the parsed {@link PluginDescriptor} + * @throws RuntimeException if an I/O error occurs + * @throws IllegalArgumentException if mandatory fields are missing + */ + public static PluginDescriptor read(String content) { + PluginDescriptor descriptor = null; + + try (StringReader reader = new StringReader(content)) { + descriptor = read(reader); + } catch (IOException e) { + // As we read from an in-memory string, this seems highly unlikely to happen. + throw new RuntimeException(e); + } + + return descriptor; + } + + private static int parseLevel(String value, String key) { + if (value == null || value.isBlank()) { + throw new IllegalArgumentException("Missing level value for property " + key); + } + + try { + return Integer.parseInt(value); + } catch (NumberFormatException e) { + throw new IllegalArgumentException("Invalid integer value for property " + key + ": " + value, e); + } + } +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java index 9e5bdff..e4d1e92 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -1,6 +1,8 @@ package io.gdcc.spi.meta.processor; import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.descriptor.PluginDescriptor; +import io.gdcc.spi.meta.descriptor.PluginDescriptorFormat; import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.ProcessingEnvironment; @@ -32,7 +34,6 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Map; -import java.util.Properties; import java.util.Set; import java.util.TreeSet; @@ -141,7 +142,7 @@ public final class PluginContractProcessor extends AbstractProcessor { *

Descriptors are written only after processing is over, which keeps resource generation * deterministic and avoids partial aggregate state.

*/ - private final Map descriptors = new LinkedHashMap<>(); + private final Map descriptors = new LinkedHashMap<>(); /** * Service registrations grouped by base contract name. @@ -336,7 +337,14 @@ private void processImplementation(TypeElement implementation) { // contract interface visible during this compilation. This preserves the build-time // contract snapshot we later need at runtime. int contractApiLevel = readIntConstant(contract, API_LEVEL_FIELD); - contractLevels.put(contract.getQualifiedName().toString(), contractApiLevel); + String contractFQCN = contract.getQualifiedName().toString(); + // The following is just a precaution. As we look into these during compile time, it's hard to imagine + // a scenario where the levels ever actually differ. + if (contractLevels.containsKey(contractFQCN) && contractLevels.get(contractFQCN) != contractApiLevel) { + error(implementation, "Conflicting API levels on contract implementation: " + contractFQCN); + } else { + contractLevels.put(contract.getQualifiedName().toString(), contractApiLevel); + } // Provider requirements accumulate across all implemented contracts/capabilities. // Conflicting requirements are rejected below. @@ -353,11 +361,11 @@ private void processImplementation(TypeElement implementation) { descriptors.put( implementationClassName, - new GeneratedDescriptorModel( + new PluginDescriptor( implementationClassName, baseContractName, - Map.copyOf(contractLevels), - Map.copyOf(providerLevels) + contractLevels, + providerLevels ) ); @@ -893,7 +901,7 @@ private boolean hasAutoServiceAnnotation(TypeElement implementation) { * are not externally managed via {@code @AutoService}.

*/ private void writeAllGeneratedResources() { - for (GeneratedDescriptorModel descriptor : descriptors.values()) { + for (PluginDescriptor descriptor : descriptors.values()) { writeDescriptor(descriptor); } @@ -911,25 +919,16 @@ private void writeAllGeneratedResources() { * * @param descriptor the descriptor model to serialize */ - private void writeDescriptor(GeneratedDescriptorModel descriptor) { - String resourceName = DESCRIPTOR_DIRECTORY + descriptor.pluginClass().replace('.', '_') + ".properties"; + private void writeDescriptor(PluginDescriptor descriptor) { + String resourceName = PluginDescriptorFormat.toPath(descriptor.pluginClass()); try { - FileObject resource = processingEnv.getFiler() + FileObject resource = processingEnv + .getFiler() .createResource(StandardLocation.CLASS_OUTPUT, "", resourceName); - Properties properties = new Properties(); - properties.setProperty("plugin.class", descriptor.pluginClass()); - properties.setProperty("plugin.kind", descriptor.pluginKind()); - - descriptor.contracts().forEach((contract, level) -> - properties.setProperty("plugin." + contract + ".level", Integer.toString(level))); - - descriptor.requiredProviders().forEach((provider, level) -> - properties.setProperty("plugin.requires." + provider + ".level", Integer.toString(level))); - try (Writer writer = resource.openWriter()) { - properties.store(writer, "Generated plugin contract metadata"); + PluginDescriptorFormat.write(descriptor, writer); } } catch (IOException e) { processingEnv.getMessager().printMessage( @@ -941,6 +940,7 @@ private void writeDescriptor(GeneratedDescriptorModel descriptor) { /** * Writes one ServiceLoader registration file for a base contract. + * This is simply a re-implementation of what we did before with @AutoService and their processor * * @param serviceTypeName the fully qualified name of the service interface * @param implementations the implementation class names to register @@ -1162,22 +1162,6 @@ private record PluginContractModel( ) { } - /** - * Internal in-memory representation of one generated plugin descriptor. - * - * @param pluginClass implementation class name - * @param pluginKind fully qualified base contract name - * @param contracts map of implemented contract names to API levels - * @param requiredProviders map of required provider names to API levels - */ - private record GeneratedDescriptorModel( - String pluginClass, - String pluginKind, - Map contracts, - Map requiredProviders - ) { - } - /** * Local control-flow exception used to abort processing of a single implementation after an error. * diff --git a/meta/src/test/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormatTest.java b/meta/src/test/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormatTest.java new file mode 100644 index 0000000..05c31c2 --- /dev/null +++ b/meta/src/test/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormatTest.java @@ -0,0 +1,363 @@ +package io.gdcc.spi.meta.descriptor; + +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.io.StringReader; +import java.io.StringWriter; +import java.util.Map; +import java.util.Properties; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class PluginDescriptorFormatTest { + + @Nested + class Fields { + @Test + void toFilename_UsesDescriptorExtension_ForString() { + String result = PluginDescriptorFormat.toFilename("io.gdcc.example.MyPlugin"); + + assertEquals("io.gdcc.example.MyPlugin.properties", result); + } + + @Test + void toFilename_UsesDescriptorExtension_ForClass() { + String result = PluginDescriptorFormat.toFilename(SamplePlugin.class); + + assertEquals("io.gdcc.spi.meta.descriptor.PluginDescriptorFormatTest.SamplePlugin.properties", result); + } + + @Test + void toPath_PrependsDescriptorDirectory_ForString() { + String result = PluginDescriptorFormat.toPath("io.gdcc.example.MyPlugin"); + + assertEquals( + "META-INF/dataverse/plugins/io.gdcc.example.MyPlugin.properties", + result + ); + } + + @Test + void toPath_PrependsDescriptorDirectory_ForClass() { + String result = PluginDescriptorFormat.toPath(SamplePlugin.class); + + assertEquals( + "META-INF/dataverse/plugins/io.gdcc.spi.meta.descriptor.PluginDescriptorFormatTest.SamplePlugin.properties", + result + ); + } + + @Test + void toContractLevel_CreatesExpectedPropertyKey_ForString() { + String result = PluginDescriptorFormat.toContractLevel("io.gdcc.example.ExportPlugin"); + + assertEquals( + "plugin.implements.io.gdcc.example.ExportPlugin.level", + result + ); + } + + @Test + void toContractLevel_CreatesExpectedPropertyKey_ForClass() { + String result = PluginDescriptorFormat.toContractLevel(SampleContract.class); + + assertEquals( + "plugin.implements.io.gdcc.spi.meta.descriptor.PluginDescriptorFormatTest.SampleContract.level", + result + ); + } + + @Test + void toRequiredProviderLevel_CreatesExpectedPropertyKey_ForString() { + String result = PluginDescriptorFormat.toRequiredProviderLevel("io.gdcc.example.ExportProvider"); + + assertEquals( + "plugin.requires.io.gdcc.example.ExportProvider.level", + result + ); + } + + @Test + void toRequiredProviderLevel_CreatesExpectedPropertyKey_ForClass() { + String result = PluginDescriptorFormat.toRequiredProviderLevel(SampleProvider.class); + + assertEquals( + "plugin.requires.io.gdcc.spi.meta.descriptor.PluginDescriptorFormatTest.SampleProvider.level", + result + ); + } + } + + @Nested + class Write { + @Test + void write_WritesCoreFieldsContractsAndProviders() throws IOException { + PluginDescriptor descriptor = new PluginDescriptor( + "io.gdcc.example.MyPlugin", + "io.gdcc.example.ExportPlugin", + Map.of( + "io.gdcc.example.ExportPlugin", 2, + "io.gdcc.example.XmlCapability", 1 + ), + Map.of( + "io.gdcc.example.ExportProvider", 5 + ) + ); + + StringWriter writer = new StringWriter(); + + PluginDescriptorFormat.write(descriptor, writer); + + Properties properties = loadProperties(writer.toString()); + + assertEquals("io.gdcc.example.MyPlugin", properties.getProperty(PluginDescriptorFormat.PLUGIN_CLASS_FIELD)); + assertEquals("io.gdcc.example.ExportPlugin", properties.getProperty(PluginDescriptorFormat.PLUGIN_KIND_FIELD)); + assertEquals("2", properties.getProperty("plugin.implements.io.gdcc.example.ExportPlugin.level")); + assertEquals("1", properties.getProperty("plugin.implements.io.gdcc.example.XmlCapability.level")); + assertEquals("5", properties.getProperty("plugin.requires.io.gdcc.example.ExportProvider.level")); + } + + @Test + void write_WritesCoreFields_WhenContractsAndProvidersAreEmpty() throws IOException { + PluginDescriptor descriptor = new PluginDescriptor( + "io.gdcc.example.MinimalPlugin", + "io.gdcc.example.ExportPlugin", + Map.of(), + Map.of() + ); + + StringWriter writer = new StringWriter(); + + PluginDescriptorFormat.write(descriptor, writer); + + Properties properties = loadProperties(writer.toString()); + + assertEquals("io.gdcc.example.MinimalPlugin", properties.getProperty(PluginDescriptorFormat.PLUGIN_CLASS_FIELD)); + assertEquals("io.gdcc.example.ExportPlugin", properties.getProperty(PluginDescriptorFormat.PLUGIN_KIND_FIELD)); + assertEquals(2, properties.size(), "Only the two mandatory core fields should be present"); + } + + @Test + void write_UsesHelperGeneratedPropertyKeys() throws IOException { + PluginDescriptor descriptor = new PluginDescriptor( + "io.gdcc.example.MyPlugin", + "io.gdcc.example.ExportPlugin", + Map.of("io.gdcc.example.ExportPlugin", 7), + Map.of("io.gdcc.example.ExportProvider", 11) + ); + + StringWriter writer = new StringWriter(); + + PluginDescriptorFormat.write(descriptor, writer); + + String serialized = writer.toString(); + + assertTrue(serialized.contains(PluginDescriptorFormat.toContractLevel("io.gdcc.example.ExportPlugin") + "=7")); + assertTrue(serialized.contains(PluginDescriptorFormat.toRequiredProviderLevel("io.gdcc.example.ExportProvider") + "=11")); + } + + @Test + void stringAndClassOverloadsProduceEquivalentResults() { + assertEquals( + PluginDescriptorFormat.toFilename(SamplePlugin.class), + PluginDescriptorFormat.toFilename(SamplePlugin.class.getCanonicalName()) + ); + + assertEquals( + PluginDescriptorFormat.toPath(SamplePlugin.class), + PluginDescriptorFormat.toPath(SamplePlugin.class.getCanonicalName()) + ); + + assertEquals( + PluginDescriptorFormat.toContractLevel(SampleContract.class), + PluginDescriptorFormat.toContractLevel(SampleContract.class.getCanonicalName()) + ); + + assertEquals( + PluginDescriptorFormat.toRequiredProviderLevel(SampleProvider.class), + PluginDescriptorFormat.toRequiredProviderLevel(SampleProvider.class.getCanonicalName()) + ); + } + + private static Properties loadProperties(String text) throws IOException { + Properties properties = new Properties(); + properties.load(new java.io.StringReader(text)); + return properties; + } + } + + @Nested + class Read { + @Test + void read_ReadsMandatoryFieldsAndEmptyMaps() throws IOException { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + """; + + PluginDescriptor descriptor = PluginDescriptorFormat.read(new StringReader(serialized)); + + assertEquals("io.gdcc.example.MyPlugin", descriptor.pluginClass()); + assertEquals("io.gdcc.example.ExportPlugin", descriptor.pluginKind()); + assertEquals(Map.of(), descriptor.contracts()); + assertEquals(Map.of(), descriptor.requiredProviders()); + } + + @Test + void read_ReadsContractsAndRequiredProviders() throws IOException { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=2 + plugin.implements.io.gdcc.example.XmlCapability.level=1 + plugin.requires.io.gdcc.example.ExportProvider.level=5 + plugin.requires.io.gdcc.example.BatchProvider.level=9 + """; + + PluginDescriptor descriptor = PluginDescriptorFormat.read(new StringReader(serialized)); + + assertEquals("io.gdcc.example.MyPlugin", descriptor.pluginClass()); + assertEquals("io.gdcc.example.ExportPlugin", descriptor.pluginKind()); + assertEquals( + Map.of( + "io.gdcc.example.ExportPlugin", 2, + "io.gdcc.example.XmlCapability", 1 + ), + descriptor.contracts() + ); + assertEquals( + Map.of( + "io.gdcc.example.ExportProvider", 5, + "io.gdcc.example.BatchProvider", 9 + ), + descriptor.requiredProviders() + ); + } + + @Test + void read_IgnoresUnknownProperties() throws IOException { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=2 + plugin.requires.io.gdcc.example.ExportProvider.level=5 + plugin.something.unrelated=value + unrelated.field=42 + """; + + PluginDescriptor descriptor = PluginDescriptorFormat.read(new StringReader(serialized)); + + assertEquals("io.gdcc.example.MyPlugin", descriptor.pluginClass()); + assertEquals("io.gdcc.example.ExportPlugin", descriptor.pluginKind()); + assertEquals(Map.of("io.gdcc.example.ExportPlugin", 2), descriptor.contracts()); + assertEquals(Map.of("io.gdcc.example.ExportProvider", 5), descriptor.requiredProviders()); + } + + @Test + void read_FailsWhenPluginClassIsMissing() { + String serialized = """ + plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=2 + """; + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> PluginDescriptorFormat.read(new StringReader(serialized)) + ); + + assertEquals("Missing required property plugin.class", ex.getMessage()); + } + + @Test + void read_FailsWhenPluginKindIsMissing() { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=2 + """; + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> PluginDescriptorFormat.read(new StringReader(serialized)) + ); + + assertEquals("Missing required property plugin.kind", ex.getMessage()); + } + + @Test + void read_FailsWhenContractLevelIsNotAnInteger() { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=not-a-number + """; + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> PluginDescriptorFormat.read(new StringReader(serialized)) + ); + + assertEquals( + "Invalid integer value for property plugin.implements.io.gdcc.example.ExportPlugin.level: not-a-number", + ex.getMessage() + ); + } + + @Test + void read_FailsWhenRequiredProviderLevelIsNotAnInteger() { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + plugin.requires.io.gdcc.example.ExportProvider.level=nope + """; + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> PluginDescriptorFormat.read(new StringReader(serialized)) + ); + + assertEquals( + "Invalid integer value for property plugin.requires.io.gdcc.example.ExportProvider.level: nope", + ex.getMessage() + ); + } + + @Test + void read_RoundTripsWithWrite() throws IOException { + PluginDescriptor original = new PluginDescriptor( + "io.gdcc.example.MyPlugin", + "io.gdcc.example.ExportPlugin", + Map.of( + "io.gdcc.example.ExportPlugin", 2, + "io.gdcc.example.XmlCapability", 1 + ), + Map.of( + "io.gdcc.example.ExportProvider", 5 + ) + ); + + StringWriter writer = new StringWriter(); + PluginDescriptorFormat.write(original, writer); + + PluginDescriptor reread = PluginDescriptorFormat.read(new StringReader(writer.toString())); + + assertEquals(original.pluginClass(), reread.pluginClass()); + assertEquals(original.pluginKind(), reread.pluginKind()); + assertEquals(original.contracts(), reread.contracts()); + assertEquals(original.requiredProviders(), reread.requiredProviders()); + } + } + + + private static final class SamplePlugin { + } + + private interface SampleContract { + } + + private interface SampleProvider { + } +} \ No newline at end of file diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java index b4c2139..121a665 100644 --- a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java @@ -3,6 +3,8 @@ import io.gdcc.spi.meta.annotations.DataversePlugin; import io.gdcc.spi.meta.annotations.PluginContract; import io.gdcc.spi.meta.annotations.RequiredProvider; +import io.gdcc.spi.meta.descriptor.PluginDescriptor; +import io.gdcc.spi.meta.descriptor.PluginDescriptorFormat; import io.gdcc.spi.meta.plugin.CoreProvider; import io.gdcc.spi.meta.plugin.Plugin; import org.junit.jupiter.api.Nested; @@ -82,17 +84,17 @@ public String identity() { assertTrue(result.success(), result.diagnosticsAsText()); - String descriptorPath = "META-INF/dataverse/plugins/test_GoodPlugin.properties"; + String descriptorPath = PluginDescriptorFormat.toPath("test.GoodPlugin"); String servicePath = "META-INF/services/test.TestPlugin"; assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should be generated"); - String descriptor = Files.readString(result.generatedFile(descriptorPath)); - assertTrue(descriptor.contains("plugin.class=test.GoodPlugin")); - assertTrue(descriptor.contains("plugin.kind=test.TestPlugin")); - assertTrue(descriptor.contains("plugin.test.TestPlugin.level=3")); - assertTrue(descriptor.contains("plugin.requires.test.TestProvider.level=7")); + PluginDescriptor descriptor = PluginDescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.GoodPlugin", descriptor.pluginClass()); + assertEquals("test.TestPlugin", descriptor.pluginKind()); + assertEquals(3, descriptor.contractLevel("test.TestPlugin").getAsInt()); + assertEquals(7, descriptor.requiredProviderLevel("test.TestProvider").getAsInt()); String serviceFile = Files.readString(result.generatedFile(servicePath)); assertEquals("test.GoodPlugin", serviceFile.trim()); @@ -221,13 +223,13 @@ public String identity() { assertTrue(result.success(), result.diagnosticsAsText()); - String descriptorPath = "META-INF/dataverse/plugins/test_ImplicitPlugin.properties"; + String descriptorPath = PluginDescriptorFormat.toPath("test.ImplicitPlugin"); assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should still be generated"); - String descriptor = Files.readString(result.generatedFile(descriptorPath)); - assertTrue(descriptor.contains("plugin.class=test.ImplicitPlugin")); - assertTrue(descriptor.contains("plugin.kind=test.TestPlugin")); - assertTrue(descriptor.contains("plugin.test.TestPlugin.level=4")); + PluginDescriptor descriptor = PluginDescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.ImplicitPlugin", descriptor.pluginClass()); + assertEquals("test.TestPlugin", descriptor.pluginKind()); + assertEquals(4, descriptor.contractLevel("test.TestPlugin").getAsInt()); } @Test @@ -493,7 +495,7 @@ public String identity() { assertTrue(result.success(), result.diagnosticsAsText()); - String descriptorPath = "META-INF/dataverse/plugins/test_AutoServicePlugin.properties"; + String descriptorPath = PluginDescriptorFormat.toPath("test.AutoServicePlugin"); String servicePath = "META-INF/services/test.TestPlugin"; assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should still be generated"); @@ -1400,16 +1402,16 @@ public String identity() { assertTrue(result.success(), result.diagnosticsAsText()); - String descriptorPath = "META-INF/dataverse/plugins/test_TransitiveImpl.properties"; + String descriptorPath = PluginDescriptorFormat.toPath("test.TransitiveImpl"); assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); - String descriptor = Files.readString(result.generatedFile(descriptorPath)); - assertTrue(descriptor.contains("plugin.class=test.TransitiveImpl")); - assertTrue(descriptor.contains("plugin.kind=test.BasePlugin")); - assertTrue(descriptor.contains("plugin.test.BasePlugin.level=3")); - assertTrue(descriptor.contains("plugin.test.IntermediateCapability.level=4")); - assertTrue(descriptor.contains("plugin.test.LeafCapability.level=5")); - assertTrue(descriptor.contains("plugin.requires.test.TestProvider.level=11")); + PluginDescriptor descriptor = PluginDescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.TransitiveImpl", descriptor.pluginClass()); + assertEquals("test.BasePlugin", descriptor.pluginKind()); + assertEquals(3, descriptor.contractLevel("test.BasePlugin").getAsInt()); + assertEquals(4, descriptor.contractLevel("test.IntermediateCapability").getAsInt()); + assertEquals(5, descriptor.contractLevel("test.LeafCapability").getAsInt()); + assertEquals(11, descriptor.requiredProviderLevel("test.TestProvider").getAsInt()); } @Test @@ -1487,8 +1489,8 @@ public String identity() { assertTrue(result.success(), result.diagnosticsAsText()); - String autoDescriptorPath = "META-INF/dataverse/plugins/test_AutoServiceImpl.properties"; - String normalDescriptorPath = "META-INF/dataverse/plugins/test_NormalImpl.properties"; + String autoDescriptorPath = PluginDescriptorFormat.toPath("test.AutoServiceImpl"); + String normalDescriptorPath = PluginDescriptorFormat.toPath("test.NormalImpl"); String servicePath = "META-INF/services/test.TestPlugin"; assertTrue(Files.exists(result.generatedFile(autoDescriptorPath)), "AutoService descriptor should be generated"); @@ -1596,15 +1598,9 @@ public String identity() { assertTrue(result.success(), result.diagnosticsAsText()); - String descriptorPath = "META-INF/dataverse/plugins/test_MergedProviderImpl.properties"; - String descriptor = Files.readString(result.generatedFile(descriptorPath)); - - assertTrue(descriptor.contains("plugin.requires.test.SharedProvider.level=8")); - assertEquals( - 1, - countOccurrences(descriptor, "plugin.requires.test.SharedProvider.level=8"), - "Shared provider should only appear once in the descriptor" - ); + String descriptorPath = PluginDescriptorFormat.toPath("test.MergedProviderImpl"); + PluginDescriptor descriptor = PluginDescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals(8, descriptor.requiredProviderLevel("test.SharedProvider").getAsInt()); } } From 9e3aa6158c4156a47922b1e00e73515ff16ae792 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 13 Mar 2026 17:13:43 +0100 Subject: [PATCH 20/55] refactor(meta): rename `kind` to `role` in `@PluginContract` for improved clarity Update all references in annotations, related models, and processors to reflect the new terminology. Adjust validation logic, error messages, and unit tests accordingly. --- .../spi/meta/annotations/PluginContract.java | 6 +- .../processor/PluginContractProcessor.java | 28 ++++---- .../PluginContractProcessorTest.java | 68 +++++++++---------- 3 files changed, 51 insertions(+), 51 deletions(-) diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java index 042e3c2..4ae6c77 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java +++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java @@ -20,11 +20,11 @@ /** * Whether this contract is the primary plugin kind or an optional capability. */ - Kind kind(); + Role role(); /** * Other plugin contracts that must also be implemented if this contract is implemented. - * Example: a {@link Kind#CAPABILITY} contract should ask for a {@link Kind#BASE} contract to be implemented. + * Example: a {@link Role#CAPABILITY} contract should ask for a {@link Role#BASE} contract to be implemented. */ Class[] requires() default {}; @@ -36,7 +36,7 @@ /** * Distinguishes a base plugin contract from optional capability contracts. */ - enum Kind { + enum Role { BASE, CAPABILITY } diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java index e4d1e92..78453e8 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -319,7 +319,7 @@ private void processImplementation(TypeElement implementation) { for (TypeElement contract : sortByQualifiedName(contracts)) { PluginContractModel model = readPluginContractModel(contract); - if (model.kind() == PluginContract.Kind.BASE) { + if (model.role() == PluginContract.Role.BASE) { if (baseContract != null) { error( implementation, @@ -353,7 +353,7 @@ private void processImplementation(TypeElement implementation) { } if (baseContract == null) { - error(implementation, "Implementation must implement exactly one Kind.BASE @PluginContract"); + error(implementation, "Implementation must implement exactly one Role.BASE @PluginContract"); throw new ProcessorException(); } @@ -720,37 +720,37 @@ private PluginContractModel readPluginContractModel(TypeElement contract) { validateApiLevelConstant(contract); - PluginContract.Kind kind = readContractKind(annotation, contract); + PluginContract.Role role = readContractRole(annotation, contract); List requiredContracts = readClassArrayAnnotationValue(annotation, "requires"); List providers = readRequiredProviders(annotation); - return new PluginContractModel(kind, List.copyOf(requiredContracts), List.copyOf(providers)); + return new PluginContractModel(role, List.copyOf(requiredContracts), List.copyOf(providers)); } /** - * Reads the {@code kind} member of a {@code @PluginContract} annotation. + * Reads the {@code role} member of a {@code @PluginContract} annotation. * * @param annotation the contract annotation mirror * @param contract the annotated contract, used for diagnostics - * @return the parsed contract kind + * @return the parsed contract role */ - private PluginContract.Kind readContractKind(AnnotationMirror annotation, TypeElement contract) { - AnnotationValue value = getAnnotationValue(annotation, "kind"); + private PluginContract.Role readContractRole(AnnotationMirror annotation, TypeElement contract) { + AnnotationValue value = getAnnotationValue(annotation, "role"); if (value == null) { - error(contract, "@PluginContract.kind is required"); + error(contract, "@PluginContract.role is required"); throw new ProcessorException(); } Object raw = value.getValue(); if (!(raw instanceof VariableElement enumConstant)) { - error(contract, "@PluginContract.kind must be an enum constant"); + error(contract, "@PluginContract.role must be an enum constant"); throw new ProcessorException(); } try { - return PluginContract.Kind.valueOf(enumConstant.getSimpleName().toString()); + return PluginContract.Role.valueOf(enumConstant.getSimpleName().toString()); } catch (IllegalArgumentException ex) { - error(contract, "Unsupported @PluginContract.kind: " + enumConstant.getSimpleName()); + error(contract, "Unsupported @PluginContract.role: " + enumConstant.getSimpleName()); throw new ProcessorException(); } } @@ -1151,12 +1151,12 @@ private void warning(Element element, String message) { /** * Internal in-memory representation of one contract interface. * - * @param kind whether the contract is a base contract or a capability + * @param role whether the contract is a base contract or a capability * @param requiredContracts contracts that must also be implemented * @param providers providers required by this contract */ private record PluginContractModel( - PluginContract.Kind kind, + PluginContract.Role role, List requiredContracts, List providers ) { diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java index 121a665..a610781 100644 --- a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java @@ -51,7 +51,7 @@ public interface TestProvider extends CoreProvider { import %s; @PluginContract( - kind = PluginContract.Kind.BASE, + role = PluginContract.Role.BASE, providers = { @RequiredProvider(TestProvider.class) } ) public interface TestPlugin extends Plugin { @@ -111,7 +111,7 @@ void failsWhenPluginContractIsPlacedOnImplementationClass() throws IOException { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface TestPlugin extends Plugin { int API_LEVEL = 1; } @@ -129,7 +129,7 @@ public interface TestPlugin extends Plugin { import %s; @DataversePlugin - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public class InvalidImplementation implements TestPlugin { @Override public String identity() { @@ -158,7 +158,7 @@ void warnsWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOExce import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface TestPlugin extends Plugin { int API_LEVEL = 2; } @@ -197,7 +197,7 @@ void createsDescriptorEvenWhenPluginImplementationOmitsDataversePluginAnnotation import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface TestPlugin extends Plugin { int API_LEVEL = 4; } @@ -243,7 +243,7 @@ void createsServiceFileEvenWhenPluginImplementationOmitsDataversePluginAnnotatio import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface TestPlugin extends Plugin { int API_LEVEL = 5; } @@ -287,7 +287,7 @@ void failsWhenMultipleBaseContractsAreImplemented() throws IOException { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface BasePluginA extends Plugin { int API_LEVEL = 1; } @@ -304,7 +304,7 @@ public interface BasePluginA extends Plugin { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface BasePluginB extends Plugin { int API_LEVEL = 1; } @@ -346,7 +346,7 @@ void failsWhenRequiredContractIsMissing() throws IOException { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface BasePlugin extends Plugin { int API_LEVEL = 1; } @@ -365,7 +365,7 @@ public interface BasePlugin extends Plugin { import %s; @PluginContract( - kind = PluginContract.Kind.CAPABILITY, + role = PluginContract.Role.CAPABILITY, requires = { BasePlugin.class } ) public interface CapabilityPlugin extends Plugin { @@ -464,7 +464,7 @@ void suppressesGeneratedServiceFileWhenAutoServiceIsPresent() throws IOException import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface TestPlugin extends Plugin { int API_LEVEL = 2; } @@ -515,7 +515,7 @@ void failsWhenImplementationIsNotPublic() throws IOException { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface TestPlugin extends Plugin { int API_LEVEL = 1; } @@ -557,7 +557,7 @@ void failsWhenImplementationIsAbstract() throws IOException { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface TestPlugin extends Plugin { int API_LEVEL = 1; } @@ -599,7 +599,7 @@ void failsWhenNoBaseContractIsImplemented() throws IOException { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.CAPABILITY) + @PluginContract(role = PluginContract.Role.CAPABILITY) public interface CapabilityPlugin extends Plugin { int API_LEVEL = 1; } @@ -627,7 +627,7 @@ public String identity() { )); assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "exactly one Kind.BASE @PluginContract"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "exactly one Role.BASE @PluginContract"); } @Test @@ -698,7 +698,7 @@ void doesNotProcessAnnotatedImplementationTwice() throws IOException { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface TestPlugin extends Plugin { int API_LEVEL = 6; } @@ -750,7 +750,7 @@ void aggregatesMultipleImplementationsIntoOneServiceFile() throws IOException { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface TestPlugin extends Plugin { int API_LEVEL = 1; } @@ -817,7 +817,7 @@ void compilesWhenAnnotatedPluginInterfaceHasNoImplementation() throws IOExceptio import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface LonelyPluginContract extends Plugin { int API_LEVEL = 1; } @@ -863,7 +863,7 @@ void doesNotWarnForAbstractUnannotatedPluginBaseClass() throws IOException { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface TestPlugin extends Plugin { int API_LEVEL = 1; } @@ -902,7 +902,7 @@ void failsWhenIndirectPluginInterfaceLacksPluginContractAnnotation() throws IOEx import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface BasePlugin extends Plugin { int API_LEVEL = 1; } @@ -971,7 +971,7 @@ void suppressesServiceGenerationOnlyForPluginKindManagedByAutoService() throws I import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface PluginTypeA extends Plugin { int API_LEVEL = 1; } @@ -988,7 +988,7 @@ public interface PluginTypeA extends Plugin { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface PluginTypeB extends Plugin { int API_LEVEL = 1; } @@ -1059,7 +1059,7 @@ void failsWhenContractApiLevelIsMissing() throws IOException { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface MissingApiLevelPlugin extends Plugin { } """.formatted( @@ -1100,7 +1100,7 @@ void failsWhenContractApiLevelIsNotCompileTimeConstant() throws IOException { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface NonConstantApiLevelPlugin extends Plugin { Integer API_LEVEL = Integer.valueOf(2); } @@ -1154,7 +1154,7 @@ public interface NotAProvider { import %s; @PluginContract( - kind = PluginContract.Kind.BASE, + role = PluginContract.Role.BASE, providers = { @RequiredProvider(NotAProvider.class) } ) public interface InvalidProviderPlugin extends Plugin { @@ -1212,7 +1212,7 @@ public interface MissingProviderApiLevel extends CoreProvider { import %s; @PluginContract( - kind = PluginContract.Kind.BASE, + role = PluginContract.Role.BASE, providers = { @RequiredProvider(MissingProviderApiLevel.class) } ) public interface TestPlugin extends Plugin { @@ -1271,7 +1271,7 @@ public interface NonConstantProviderApiLevel extends CoreProvider { import %s; @PluginContract( - kind = PluginContract.Kind.BASE, + role = PluginContract.Role.BASE, providers = { @RequiredProvider(NonConstantProviderApiLevel.class) } ) public interface TestPlugin extends Plugin { @@ -1330,7 +1330,7 @@ public interface TestProvider extends CoreProvider { import %s; @PluginContract( - kind = PluginContract.Kind.BASE, + role = PluginContract.Role.BASE, providers = { @RequiredProvider(TestProvider.class) } ) public interface BasePlugin extends Plugin { @@ -1351,7 +1351,7 @@ public interface BasePlugin extends Plugin { import %s; @PluginContract( - kind = PluginContract.Kind.CAPABILITY, + role = PluginContract.Role.CAPABILITY, requires = { BasePlugin.class } ) public interface IntermediateCapability extends BasePlugin { @@ -1371,7 +1371,7 @@ public interface IntermediateCapability extends BasePlugin { import %s; @PluginContract( - kind = PluginContract.Kind.CAPABILITY, + role = PluginContract.Role.CAPABILITY, requires = { BasePlugin.class, IntermediateCapability.class } ) public interface LeafCapability extends IntermediateCapability { @@ -1442,7 +1442,7 @@ void suppressesGeneratedServiceFileForWholeContractWhenAutoServiceIsMixedWithNor import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface TestPlugin extends Plugin { int API_LEVEL = 2; } @@ -1523,7 +1523,7 @@ public interface SharedProvider extends CoreProvider { import %s; import %s; - @PluginContract(kind = PluginContract.Kind.BASE) + @PluginContract(role = PluginContract.Role.BASE) public interface BasePlugin extends Plugin { int API_LEVEL = 1; } @@ -1542,7 +1542,7 @@ public interface BasePlugin extends Plugin { import %s; @PluginContract( - kind = PluginContract.Kind.CAPABILITY, + role = PluginContract.Role.CAPABILITY, requires = { BasePlugin.class }, providers = { @RequiredProvider(SharedProvider.class) } ) @@ -1565,7 +1565,7 @@ public interface CapabilityOne extends Plugin { import %s; @PluginContract( - kind = PluginContract.Kind.CAPABILITY, + role = PluginContract.Role.CAPABILITY, requires = { BasePlugin.class }, providers = { @RequiredProvider(SharedProvider.class) } ) From b5e9f2d42b932af5f5eb0f9387e537c4384c28cc Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 27 Mar 2026 18:12:47 +0100 Subject: [PATCH 21/55] refactor(meta): rename `PluginDescriptor` to `Descriptor` for consistency Simplify class names to improve clarity and ensure alignment with metadata terminology. Update related references, serialization logic, and unit tests accordingly. --- .../gdcc/spi/meta/descriptor/Descriptor.java | 142 +++++++++++++++ ...iptorFormat.java => DescriptorFormat.java} | 48 ++--- .../spi/meta/descriptor/PluginDescriptor.java | 172 ------------------ .../processor/PluginContractProcessor.java | 22 +-- ...matTest.java => DescriptorFormatTest.java} | 102 +++++------ .../PluginContractProcessorTest.java | 54 +++--- 6 files changed, 255 insertions(+), 285 deletions(-) create mode 100644 meta/src/main/java/io/gdcc/spi/meta/descriptor/Descriptor.java rename meta/src/main/java/io/gdcc/spi/meta/descriptor/{PluginDescriptorFormat.java => DescriptorFormat.java} (80%) delete mode 100644 meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java rename meta/src/test/java/io/gdcc/spi/meta/descriptor/{PluginDescriptorFormatTest.java => DescriptorFormatTest.java} (73%) diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/Descriptor.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/Descriptor.java new file mode 100644 index 0000000..a4431f2 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/Descriptor.java @@ -0,0 +1,142 @@ +package io.gdcc.spi.meta.descriptor; + +import java.util.Map; +import java.util.Objects; +import java.util.OptionalInt; + +/** + * In-memory representation of raw metadata de/serialized from a Dataverse Plugin Metadata file. + * + * @param klass implementation class name + * @param kind fully qualified base contract name + * @param contracts map of implemented contract names to API levels. May not contain null keys or values. + * @param requiredProviders map of required provider names to API levels. May not contain null keys or values. + */ +public record Descriptor( + String klass, + String kind, + Map contracts, + Map requiredProviders +) { + + /** + * Creates a new plugin and defensively copies the contract/provider maps. + * + *

This ensures the plugin remains immutable even if callers pass in + * mutable maps. All arguments must be non-null.

+ * + *

Please note: the provided maps may not contain any null keys or values.

+ */ + public Descriptor { + Objects.requireNonNull(klass); + Objects.requireNonNull(kind); + contracts = Map.copyOf(Objects.requireNonNull(contracts)); + requiredProviders = Map.copyOf(Objects.requireNonNull(requiredProviders)); + } + + /** + * Determines whether this plugin is of a specified base contract kind. + * Checks both {@code kind} and implemented {@code contracts}. + * + * @param kindFqcn the fully qualified class name of the kind to check + * @return {@code true} if the plugin's kind matches the given class name and its contract is implemented, + * otherwise {@code false} + */ + public boolean isOfKind(String kindFqcn) { + Objects.requireNonNull(kindFqcn); + return kind.equals(kindFqcn) && implementsContract(kindFqcn); + } + + public boolean isOfKind(Class kind) { + Objects.requireNonNull(kind); + return isOfKind(DescriptorFormat.transformClassName(kind)); + } + + /** + * Checks whether this plugin declares the given implemented contract. + * + * @param contractFqcn the fully qualified contract class name + * @return {@code true} if the contract is present in this plugin + */ + public boolean implementsContract(String contractFqcn) { + Objects.requireNonNull(contractFqcn); + return contracts.containsKey(contractFqcn); + } + + /** + * Checks whether this plugin declares the given implemented contract. + * + * @param contractClass the contract class + * @return {@code true} if the contract is present in this plugin + */ + public boolean implementsContract(Class contractClass) { + Objects.requireNonNull(contractClass); + return implementsContract(DescriptorFormat.transformClassName(contractClass)); + } + + /** + * Returns the declared API level for the given implemented contract, if present. + * + * @param contractFqcn the fully qualified contract class name + * @return the declared API level wrapped in an {@link OptionalInt}, or an empty value if absent + */ + public int contractLevel(String contractFqcn) { + Objects.requireNonNull(contractFqcn); + return contracts.get(contractFqcn); + } + + /** + * Returns the declared API level for the given implemented contract, if present. + * + * @param contractClass the contract class + * @return the declared API level wrapped in an {@link OptionalInt}, or an empty value if absent + */ + public int contractLevel(Class contractClass) { + Objects.requireNonNull(contractClass); + return contractLevel(DescriptorFormat.transformClassName(contractClass)); + } + + /** + * Checks whether this plugin declares the given required provider. + * + * @param providerFqcn the fully qualified provider class name + * @return {@code true} if the provider is present in this plugin + */ + public boolean requiresProvider(String providerFqcn) { + Objects.requireNonNull(providerFqcn); + return requiredProviders.containsKey(providerFqcn); + } + + /** + * Checks whether this plugin declares the given required provider. + * + * @param providerClass the provider class + * @return {@code true} if the provider is present in this plugin + */ + public boolean requiresProvider(Class providerClass) { + Objects.requireNonNull(providerClass); + return requiresProvider(DescriptorFormat.transformClassName(providerClass)); + } + + /** + * Returns the declared required API level for the given provider, if present. + * + * @param providerFqcn the fully qualified provider class name + * @return the required provider API level wrapped in an {@link OptionalInt}, or an empty value if absent + */ + public int requiredProviderLevel(String providerFqcn) { + Objects.requireNonNull(providerFqcn); + return requiredProviders.get(providerFqcn); + } + + /** + * Returns the declared required API level for the given provider, if present. + * + * @param providerClass the provider class + * @return the required provider API level wrapped in an {@link OptionalInt}, or an empty value if absent + */ + public int requiredProviderLevel(Class providerClass) { + Objects.requireNonNull(providerClass); + return requiredProviderLevel(DescriptorFormat.transformClassName(providerClass)); + } +} \ No newline at end of file diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormat.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java similarity index 80% rename from meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormat.java rename to meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java index 0262882..2220f54 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormat.java +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java @@ -9,12 +9,12 @@ import java.util.Properties; /** - * Defines constants for the format and structure of plugin descriptor files. + * Defines constants for the format and structure of plugin plugin files. * These descriptors provide metadata about plugins, including their * implementation class, type, contracts, and required providers. * *
    - * - DESCRIPTOR_DIRECTORY: Specifies the directory where plugin descriptor files are located. + * - DESCRIPTOR_DIRECTORY: Specifies the directory where plugin plugin files are located. * - PLUGIN_CLASS_FIELD: Defines the key used to identify the plugin's implementation class. * - PLUGIN_KIND_FIELD: Defines the key used to specify the base contract type of the plugin. * - CONTRACT_PREFIX and CONTRACT_SUFFIX: Define the keys used to denote the contracts @@ -23,7 +23,7 @@ * required providers and their associated API levels. *
*/ -public final class PluginDescriptorFormat { +public final class DescriptorFormat { public static final String DESCRIPTOR_DIRECTORY = "META-INF/dataverse/plugins/"; public static final String DESCRIPTOR_EXTENSION = ".properties"; @@ -34,7 +34,7 @@ public final class PluginDescriptorFormat { public static final String REQUIRED_PROVIDER_PREFIX = "plugin.requires."; public static final String REQUIRED_PROVIDER_SUFFIX = ".level"; - private PluginDescriptorFormat() { + private DescriptorFormat() { /* Intentionally left blank for helper class */ } @@ -71,18 +71,18 @@ public static String toRequiredProviderLevel(String providerFQCN) { } /** - * Serializes the provided {@link PluginDescriptor} into the given {@link Writer} + * Serializes the provided {@link Descriptor} into the given {@link Writer} * in the form of a properties file, encoding plugin metadata such as plugin class, * plugin kind, implemented contracts, and required providers. * - * @param descriptor the {@link PluginDescriptor} containing the plugin metadata to be serialized - * @param writer the {@link Writer} where the descriptor properties will be written + * @param descriptor the {@link Descriptor} containing the plugin metadata to be serialized + * @param writer the {@link Writer} where the plugin properties will be written * @throws IOException if an I/O error occurs while writing to the {@link Writer} */ - public static void write(PluginDescriptor descriptor, Writer writer) throws IOException { + public static void write(Descriptor descriptor, Writer writer) throws IOException { Properties properties = new Properties(); - properties.setProperty(PLUGIN_CLASS_FIELD, descriptor.pluginClass()); - properties.setProperty(PLUGIN_KIND_FIELD, descriptor.pluginKind()); + properties.setProperty(PLUGIN_CLASS_FIELD, descriptor.klass()); + properties.setProperty(PLUGIN_KIND_FIELD, descriptor.kind()); descriptor.contracts().forEach((contract, level) -> properties.setProperty(toContractLevel(contract), Integer.toString(level))); @@ -94,18 +94,18 @@ public static void write(PluginDescriptor descriptor, Writer writer) throws IOEx } /** - * Reads a plugin descriptor from the serialized properties format. + * Reads a plugin plugin from the serialized properties format. * - *

The returned descriptor contains the mandatory plugin class and base contract fields, + *

The returned plugin contains the mandatory plugin class and base contract fields, * plus all parsed contract/provider API levels found in the input.

* - * @param reader the character stream containing descriptor properties - * @return the parsed descriptor + * @param reader the character stream containing plugin properties + * @return the parsed plugin. * @throws IOException if the properties cannot be read * @throws IllegalArgumentException if mandatory fields are missing or if any level value * cannot be parsed as an integer */ - public static PluginDescriptor read(Reader reader) throws IOException { + public static Descriptor read(Reader reader) throws IOException { Properties properties = new Properties(); properties.load(reader); @@ -144,27 +144,27 @@ public static PluginDescriptor read(Reader reader) throws IOException { } } - return new PluginDescriptor( + return new Descriptor( pluginClass, pluginKind, - Map.copyOf(contracts), - Map.copyOf(requiredProviders) + contracts, + requiredProviders ); } /** - * Reads a plugin descriptor from the given string content. + * Reads a plugin plugin from the given string content. * - * This method parses the input string into a {@link PluginDescriptor} object. It internally utilizes + * This method parses the input string into a {@link Descriptor} object. It internally utilizes * a {@link StringReader} to read the string and expects the content to be in a properties-based serialized format. * - * @param content the string content containing serialized descriptor properties - * @return the parsed {@link PluginDescriptor} + * @param content the string content containing serialized plugin properties + * @return the parsed {@link Descriptor} * @throws RuntimeException if an I/O error occurs * @throws IllegalArgumentException if mandatory fields are missing */ - public static PluginDescriptor read(String content) { - PluginDescriptor descriptor = null; + public static Descriptor read(String content) { + Descriptor descriptor = null; try (StringReader reader = new StringReader(content)) { descriptor = read(reader); diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java deleted file mode 100644 index 82038b4..0000000 --- a/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java +++ /dev/null @@ -1,172 +0,0 @@ -package io.gdcc.spi.meta.descriptor; - -import java.util.Map; -import java.util.Objects; -import java.util.OptionalInt; - -/** - * In-memory representation of one generated plugin descriptor. - * - * @param pluginClass implementation class name - * @param pluginKind fully qualified base contract name - * @param contracts map of implemented contract names to API levels - * @param requiredProviders map of required provider names to API levels - */ -public record PluginDescriptor( - String pluginClass, - String pluginKind, - Map contracts, - Map requiredProviders -) { - - /** - * Creates a new descriptor and defensively copies the contract/provider maps. - * - *

This ensures the descriptor remains immutable even if callers pass in - * mutable maps. All arguments must be non-null.

- */ - public PluginDescriptor { - Objects.requireNonNull(pluginClass); - Objects.requireNonNull(pluginKind); - contracts = Map.copyOf(Objects.requireNonNull(contracts)); - requiredProviders = Map.copyOf(Objects.requireNonNull(requiredProviders)); - } - - /** - * Checks whether this plugin declares the given implemented contract. - * - * @param contractFqcn the fully qualified contract class name - * @return {@code true} if the contract is present in this descriptor - */ - public boolean implementsContract(String contractFqcn) { - Objects.requireNonNull(contractFqcn); - return contracts.containsKey(contractFqcn); - } - - /** - * Checks whether this plugin declares the given implemented contract. - * - * @param contractClass the contract class - * @return {@code true} if the contract is present in this descriptor - */ - public boolean implementsContract(Class contractClass) { - Objects.requireNonNull(contractClass); - return implementsContract(contractClass.getCanonicalName()); - } - - /** - * Returns the declared API level for the given implemented contract, if present. - * - * @param contractFqcn the fully qualified contract class name - * @return the declared API level wrapped in an {@link OptionalInt}, or an empty value if absent - */ - public OptionalInt contractLevel(String contractFqcn) { - Objects.requireNonNull(contractFqcn); - Integer value = contracts.get(contractFqcn); - return value == null ? OptionalInt.empty() : OptionalInt.of(value); - } - - /** - * Returns the declared API level for the given implemented contract, if present. - * - * @param contractClass the contract class - * @return the declared API level wrapped in an {@link OptionalInt}, or an empty value if absent - */ - public OptionalInt contractLevel(Class contractClass) { - Objects.requireNonNull(contractClass); - return contractLevel(contractClass.getCanonicalName()); - } - - /** - * Returns the declared API level for the given implemented contract. - * - * @param contractFqcn the fully qualified contract class name - * @return the declared API level - * @throws IllegalArgumentException if the contract is not present in this descriptor - */ - public int contractLevelOrThrow(String contractFqcn) { - return contractLevel(contractFqcn) - .orElseThrow(() -> new IllegalArgumentException("Unknown contract " + contractFqcn)); - } - - /** - * Returns the declared API level for the given implemented contract. - * - * @param contractClass the contract class - * @return the declared API level - * @throws IllegalArgumentException if the contract is not present in this descriptor - */ - public int contractLevelOrThrow(Class contractClass) { - Objects.requireNonNull(contractClass); - return contractLevelOrThrow(contractClass.getCanonicalName()); - } - - /** - * Checks whether this plugin declares the given required provider. - * - * @param providerFqcn the fully qualified provider class name - * @return {@code true} if the provider is present in this descriptor - */ - public boolean requiresProvider(String providerFqcn) { - Objects.requireNonNull(providerFqcn); - return requiredProviders.containsKey(providerFqcn); - } - - /** - * Checks whether this plugin declares the given required provider. - * - * @param providerClass the provider class - * @return {@code true} if the provider is present in this descriptor - */ - public boolean requiresProvider(Class providerClass) { - Objects.requireNonNull(providerClass); - return requiresProvider(providerClass.getCanonicalName()); - } - - /** - * Returns the declared required API level for the given provider, if present. - * - * @param providerFqcn the fully qualified provider class name - * @return the required provider API level wrapped in an {@link OptionalInt}, or an empty value if absent - */ - public OptionalInt requiredProviderLevel(String providerFqcn) { - Objects.requireNonNull(providerFqcn); - Integer value = requiredProviders.get(providerFqcn); - return value == null ? OptionalInt.empty() : OptionalInt.of(value); - } - - /** - * Returns the declared required API level for the given provider, if present. - * - * @param providerClass the provider class - * @return the required provider API level wrapped in an {@link OptionalInt}, or an empty value if absent - */ - public OptionalInt requiredProviderLevel(Class providerClass) { - Objects.requireNonNull(providerClass); - return requiredProviderLevel(providerClass.getCanonicalName()); - } - - /** - * Returns the declared required API level for the given provider. - * - * @param providerFqcn the fully qualified provider class name - * @return the required provider API level - * @throws IllegalArgumentException if the provider is not present in this descriptor - */ - public int requiredProviderLevelOrThrow(String providerFqcn) { - return requiredProviderLevel(providerFqcn) - .orElseThrow(() -> new IllegalArgumentException("Unknown required provider " + providerFqcn)); - } - - /** - * Returns the declared required API level for the given provider. - * - * @param providerClass the provider class - * @return the required provider API level - * @throws IllegalArgumentException if the provider is not present in this descriptor - */ - public int requiredProviderLevelOrThrow(Class providerClass) { - Objects.requireNonNull(providerClass); - return requiredProviderLevelOrThrow(providerClass.getCanonicalName()); - } -} \ No newline at end of file diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java index 78453e8..7ac697d 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -1,8 +1,8 @@ package io.gdcc.spi.meta.processor; import io.gdcc.spi.meta.annotations.PluginContract; -import io.gdcc.spi.meta.descriptor.PluginDescriptor; -import io.gdcc.spi.meta.descriptor.PluginDescriptorFormat; +import io.gdcc.spi.meta.descriptor.Descriptor; +import io.gdcc.spi.meta.descriptor.DescriptorFormat; import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.ProcessingEnvironment; @@ -44,7 +44,7 @@ * plugin contracts annotated with {@code @PluginContract}, validates the contract graph, and emits:

* *
    - *
  1. a per-plugin descriptor under {@value #DESCRIPTOR_DIRECTORY}, and
  2. + *
  3. a per-plugin descriptor under {@value DescriptorFormat#DESCRIPTOR_DIRECTORY}, and
  4. *
  5. a {@code META-INF/services/...} entry for the base plugin contract when safe to do so.
  6. *
* @@ -142,7 +142,7 @@ public final class PluginContractProcessor extends AbstractProcessor { *

Descriptors are written only after processing is over, which keeps resource generation * deterministic and avoids partial aggregate state.

*/ - private final Map descriptors = new LinkedHashMap<>(); + private final Map descriptors = new LinkedHashMap<>(); /** * Service registrations grouped by base contract name. @@ -361,7 +361,7 @@ private void processImplementation(TypeElement implementation) { descriptors.put( implementationClassName, - new PluginDescriptor( + new Descriptor( implementationClassName, baseContractName, contractLevels, @@ -901,7 +901,7 @@ private boolean hasAutoServiceAnnotation(TypeElement implementation) { * are not externally managed via {@code @AutoService}.

*/ private void writeAllGeneratedResources() { - for (PluginDescriptor descriptor : descriptors.values()) { + for (Descriptor descriptor : descriptors.values()) { writeDescriptor(descriptor); } @@ -917,10 +917,10 @@ private void writeAllGeneratedResources() { /** * Writes one generated plugin descriptor file. * - * @param descriptor the descriptor model to serialize + * @param descriptor the plugin descriptor model to serialize */ - private void writeDescriptor(PluginDescriptor descriptor) { - String resourceName = PluginDescriptorFormat.toPath(descriptor.pluginClass()); + private void writeDescriptor(Descriptor descriptor) { + String resourceName = DescriptorFormat.toPath(descriptor.klass()); try { FileObject resource = processingEnv @@ -928,12 +928,12 @@ private void writeDescriptor(PluginDescriptor descriptor) { .createResource(StandardLocation.CLASS_OUTPUT, "", resourceName); try (Writer writer = resource.openWriter()) { - PluginDescriptorFormat.write(descriptor, writer); + DescriptorFormat.write(descriptor, writer); } } catch (IOException e) { processingEnv.getMessager().printMessage( Diagnostic.Kind.ERROR, - "Failed to write descriptor for " + descriptor.pluginClass() + ": " + e.getMessage() + "Failed to write descriptor for " + descriptor.klass() + ": " + e.getMessage() ); } } diff --git a/meta/src/test/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormatTest.java b/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorFormatTest.java similarity index 73% rename from meta/src/test/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormatTest.java rename to meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorFormatTest.java index 05c31c2..5dfaa26 100644 --- a/meta/src/test/java/io/gdcc/spi/meta/descriptor/PluginDescriptorFormatTest.java +++ b/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorFormatTest.java @@ -13,27 +13,27 @@ import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; -class PluginDescriptorFormatTest { +class DescriptorFormatTest { @Nested class Fields { @Test void toFilename_UsesDescriptorExtension_ForString() { - String result = PluginDescriptorFormat.toFilename("io.gdcc.example.MyPlugin"); + String result = DescriptorFormat.toFilename("io.gdcc.example.MyPlugin"); assertEquals("io.gdcc.example.MyPlugin.properties", result); } @Test void toFilename_UsesDescriptorExtension_ForClass() { - String result = PluginDescriptorFormat.toFilename(SamplePlugin.class); + String result = DescriptorFormat.toFilename(SamplePlugin.class); - assertEquals("io.gdcc.spi.meta.descriptor.PluginDescriptorFormatTest.SamplePlugin.properties", result); + assertEquals("io.gdcc.spi.meta.descriptor.DescriptorFormatTest.SamplePlugin.properties", result); } @Test void toPath_PrependsDescriptorDirectory_ForString() { - String result = PluginDescriptorFormat.toPath("io.gdcc.example.MyPlugin"); + String result = DescriptorFormat.toPath("io.gdcc.example.MyPlugin"); assertEquals( "META-INF/dataverse/plugins/io.gdcc.example.MyPlugin.properties", @@ -43,17 +43,17 @@ void toPath_PrependsDescriptorDirectory_ForString() { @Test void toPath_PrependsDescriptorDirectory_ForClass() { - String result = PluginDescriptorFormat.toPath(SamplePlugin.class); + String result = DescriptorFormat.toPath(SamplePlugin.class); assertEquals( - "META-INF/dataverse/plugins/io.gdcc.spi.meta.descriptor.PluginDescriptorFormatTest.SamplePlugin.properties", + "META-INF/dataverse/plugins/io.gdcc.spi.meta.descriptor.DescriptorFormatTest.SamplePlugin.properties", result ); } @Test void toContractLevel_CreatesExpectedPropertyKey_ForString() { - String result = PluginDescriptorFormat.toContractLevel("io.gdcc.example.ExportPlugin"); + String result = DescriptorFormat.toContractLevel("io.gdcc.example.ExportPlugin"); assertEquals( "plugin.implements.io.gdcc.example.ExportPlugin.level", @@ -63,17 +63,17 @@ void toContractLevel_CreatesExpectedPropertyKey_ForString() { @Test void toContractLevel_CreatesExpectedPropertyKey_ForClass() { - String result = PluginDescriptorFormat.toContractLevel(SampleContract.class); + String result = DescriptorFormat.toContractLevel(SampleContract.class); assertEquals( - "plugin.implements.io.gdcc.spi.meta.descriptor.PluginDescriptorFormatTest.SampleContract.level", + "plugin.implements.io.gdcc.spi.meta.descriptor.DescriptorFormatTest$SampleContract.level", result ); } @Test void toRequiredProviderLevel_CreatesExpectedPropertyKey_ForString() { - String result = PluginDescriptorFormat.toRequiredProviderLevel("io.gdcc.example.ExportProvider"); + String result = DescriptorFormat.toRequiredProviderLevel("io.gdcc.example.ExportProvider"); assertEquals( "plugin.requires.io.gdcc.example.ExportProvider.level", @@ -83,10 +83,10 @@ void toRequiredProviderLevel_CreatesExpectedPropertyKey_ForString() { @Test void toRequiredProviderLevel_CreatesExpectedPropertyKey_ForClass() { - String result = PluginDescriptorFormat.toRequiredProviderLevel(SampleProvider.class); + String result = DescriptorFormat.toRequiredProviderLevel(SampleProvider.class); assertEquals( - "plugin.requires.io.gdcc.spi.meta.descriptor.PluginDescriptorFormatTest.SampleProvider.level", + "plugin.requires.io.gdcc.spi.meta.descriptor.DescriptorFormatTest$SampleProvider.level", result ); } @@ -96,7 +96,7 @@ void toRequiredProviderLevel_CreatesExpectedPropertyKey_ForClass() { class Write { @Test void write_WritesCoreFieldsContractsAndProviders() throws IOException { - PluginDescriptor descriptor = new PluginDescriptor( + Descriptor descriptor = new Descriptor( "io.gdcc.example.MyPlugin", "io.gdcc.example.ExportPlugin", Map.of( @@ -110,12 +110,12 @@ void write_WritesCoreFieldsContractsAndProviders() throws IOException { StringWriter writer = new StringWriter(); - PluginDescriptorFormat.write(descriptor, writer); + DescriptorFormat.write(descriptor, writer); Properties properties = loadProperties(writer.toString()); - assertEquals("io.gdcc.example.MyPlugin", properties.getProperty(PluginDescriptorFormat.PLUGIN_CLASS_FIELD)); - assertEquals("io.gdcc.example.ExportPlugin", properties.getProperty(PluginDescriptorFormat.PLUGIN_KIND_FIELD)); + assertEquals("io.gdcc.example.MyPlugin", properties.getProperty(DescriptorFormat.PLUGIN_CLASS_FIELD)); + assertEquals("io.gdcc.example.ExportPlugin", properties.getProperty(DescriptorFormat.PLUGIN_KIND_FIELD)); assertEquals("2", properties.getProperty("plugin.implements.io.gdcc.example.ExportPlugin.level")); assertEquals("1", properties.getProperty("plugin.implements.io.gdcc.example.XmlCapability.level")); assertEquals("5", properties.getProperty("plugin.requires.io.gdcc.example.ExportProvider.level")); @@ -123,7 +123,7 @@ void write_WritesCoreFieldsContractsAndProviders() throws IOException { @Test void write_WritesCoreFields_WhenContractsAndProvidersAreEmpty() throws IOException { - PluginDescriptor descriptor = new PluginDescriptor( + Descriptor descriptor = new Descriptor( "io.gdcc.example.MinimalPlugin", "io.gdcc.example.ExportPlugin", Map.of(), @@ -132,18 +132,18 @@ void write_WritesCoreFields_WhenContractsAndProvidersAreEmpty() throws IOExcepti StringWriter writer = new StringWriter(); - PluginDescriptorFormat.write(descriptor, writer); + DescriptorFormat.write(descriptor, writer); Properties properties = loadProperties(writer.toString()); - assertEquals("io.gdcc.example.MinimalPlugin", properties.getProperty(PluginDescriptorFormat.PLUGIN_CLASS_FIELD)); - assertEquals("io.gdcc.example.ExportPlugin", properties.getProperty(PluginDescriptorFormat.PLUGIN_KIND_FIELD)); + assertEquals("io.gdcc.example.MinimalPlugin", properties.getProperty(DescriptorFormat.PLUGIN_CLASS_FIELD)); + assertEquals("io.gdcc.example.ExportPlugin", properties.getProperty(DescriptorFormat.PLUGIN_KIND_FIELD)); assertEquals(2, properties.size(), "Only the two mandatory core fields should be present"); } @Test void write_UsesHelperGeneratedPropertyKeys() throws IOException { - PluginDescriptor descriptor = new PluginDescriptor( + Descriptor descriptor = new Descriptor( "io.gdcc.example.MyPlugin", "io.gdcc.example.ExportPlugin", Map.of("io.gdcc.example.ExportPlugin", 7), @@ -152,34 +152,34 @@ void write_UsesHelperGeneratedPropertyKeys() throws IOException { StringWriter writer = new StringWriter(); - PluginDescriptorFormat.write(descriptor, writer); + DescriptorFormat.write(descriptor, writer); String serialized = writer.toString(); - assertTrue(serialized.contains(PluginDescriptorFormat.toContractLevel("io.gdcc.example.ExportPlugin") + "=7")); - assertTrue(serialized.contains(PluginDescriptorFormat.toRequiredProviderLevel("io.gdcc.example.ExportProvider") + "=11")); + assertTrue(serialized.contains(DescriptorFormat.toContractLevel("io.gdcc.example.ExportPlugin") + "=7")); + assertTrue(serialized.contains(DescriptorFormat.toRequiredProviderLevel("io.gdcc.example.ExportProvider") + "=11")); } @Test void stringAndClassOverloadsProduceEquivalentResults() { assertEquals( - PluginDescriptorFormat.toFilename(SamplePlugin.class), - PluginDescriptorFormat.toFilename(SamplePlugin.class.getCanonicalName()) + DescriptorFormat.toFilename(SamplePlugin.class), + DescriptorFormat.toFilename(SamplePlugin.class.getName()) ); assertEquals( - PluginDescriptorFormat.toPath(SamplePlugin.class), - PluginDescriptorFormat.toPath(SamplePlugin.class.getCanonicalName()) + DescriptorFormat.toPath(SamplePlugin.class), + DescriptorFormat.toPath(SamplePlugin.class.getName()) ); assertEquals( - PluginDescriptorFormat.toContractLevel(SampleContract.class), - PluginDescriptorFormat.toContractLevel(SampleContract.class.getCanonicalName()) + DescriptorFormat.toContractLevel(SampleContract.class), + DescriptorFormat.toContractLevel(SampleContract.class.getName()) ); assertEquals( - PluginDescriptorFormat.toRequiredProviderLevel(SampleProvider.class), - PluginDescriptorFormat.toRequiredProviderLevel(SampleProvider.class.getCanonicalName()) + DescriptorFormat.toRequiredProviderLevel(SampleProvider.class), + DescriptorFormat.toRequiredProviderLevel(SampleProvider.class.getName()) ); } @@ -199,10 +199,10 @@ void read_ReadsMandatoryFieldsAndEmptyMaps() throws IOException { plugin.kind=io.gdcc.example.ExportPlugin """; - PluginDescriptor descriptor = PluginDescriptorFormat.read(new StringReader(serialized)); + Descriptor descriptor = DescriptorFormat.read(new StringReader(serialized)); - assertEquals("io.gdcc.example.MyPlugin", descriptor.pluginClass()); - assertEquals("io.gdcc.example.ExportPlugin", descriptor.pluginKind()); + assertEquals("io.gdcc.example.MyPlugin", descriptor.klass()); + assertEquals("io.gdcc.example.ExportPlugin", descriptor.kind()); assertEquals(Map.of(), descriptor.contracts()); assertEquals(Map.of(), descriptor.requiredProviders()); } @@ -218,10 +218,10 @@ void read_ReadsContractsAndRequiredProviders() throws IOException { plugin.requires.io.gdcc.example.BatchProvider.level=9 """; - PluginDescriptor descriptor = PluginDescriptorFormat.read(new StringReader(serialized)); + Descriptor descriptor = DescriptorFormat.read(new StringReader(serialized)); - assertEquals("io.gdcc.example.MyPlugin", descriptor.pluginClass()); - assertEquals("io.gdcc.example.ExportPlugin", descriptor.pluginKind()); + assertEquals("io.gdcc.example.MyPlugin", descriptor.klass()); + assertEquals("io.gdcc.example.ExportPlugin", descriptor.kind()); assertEquals( Map.of( "io.gdcc.example.ExportPlugin", 2, @@ -249,10 +249,10 @@ void read_IgnoresUnknownProperties() throws IOException { unrelated.field=42 """; - PluginDescriptor descriptor = PluginDescriptorFormat.read(new StringReader(serialized)); + Descriptor descriptor = DescriptorFormat.read(new StringReader(serialized)); - assertEquals("io.gdcc.example.MyPlugin", descriptor.pluginClass()); - assertEquals("io.gdcc.example.ExportPlugin", descriptor.pluginKind()); + assertEquals("io.gdcc.example.MyPlugin", descriptor.klass()); + assertEquals("io.gdcc.example.ExportPlugin", descriptor.kind()); assertEquals(Map.of("io.gdcc.example.ExportPlugin", 2), descriptor.contracts()); assertEquals(Map.of("io.gdcc.example.ExportProvider", 5), descriptor.requiredProviders()); } @@ -266,7 +266,7 @@ void read_FailsWhenPluginClassIsMissing() { IllegalArgumentException ex = assertThrows( IllegalArgumentException.class, - () -> PluginDescriptorFormat.read(new StringReader(serialized)) + () -> DescriptorFormat.read(new StringReader(serialized)) ); assertEquals("Missing required property plugin.class", ex.getMessage()); @@ -281,7 +281,7 @@ void read_FailsWhenPluginKindIsMissing() { IllegalArgumentException ex = assertThrows( IllegalArgumentException.class, - () -> PluginDescriptorFormat.read(new StringReader(serialized)) + () -> DescriptorFormat.read(new StringReader(serialized)) ); assertEquals("Missing required property plugin.kind", ex.getMessage()); @@ -297,7 +297,7 @@ void read_FailsWhenContractLevelIsNotAnInteger() { IllegalArgumentException ex = assertThrows( IllegalArgumentException.class, - () -> PluginDescriptorFormat.read(new StringReader(serialized)) + () -> DescriptorFormat.read(new StringReader(serialized)) ); assertEquals( @@ -316,7 +316,7 @@ void read_FailsWhenRequiredProviderLevelIsNotAnInteger() { IllegalArgumentException ex = assertThrows( IllegalArgumentException.class, - () -> PluginDescriptorFormat.read(new StringReader(serialized)) + () -> DescriptorFormat.read(new StringReader(serialized)) ); assertEquals( @@ -327,7 +327,7 @@ void read_FailsWhenRequiredProviderLevelIsNotAnInteger() { @Test void read_RoundTripsWithWrite() throws IOException { - PluginDescriptor original = new PluginDescriptor( + Descriptor original = new Descriptor( "io.gdcc.example.MyPlugin", "io.gdcc.example.ExportPlugin", Map.of( @@ -340,12 +340,12 @@ void read_RoundTripsWithWrite() throws IOException { ); StringWriter writer = new StringWriter(); - PluginDescriptorFormat.write(original, writer); + DescriptorFormat.write(original, writer); - PluginDescriptor reread = PluginDescriptorFormat.read(new StringReader(writer.toString())); + Descriptor reread = DescriptorFormat.read(new StringReader(writer.toString())); - assertEquals(original.pluginClass(), reread.pluginClass()); - assertEquals(original.pluginKind(), reread.pluginKind()); + assertEquals(original.klass(), reread.klass()); + assertEquals(original.kind(), reread.kind()); assertEquals(original.contracts(), reread.contracts()); assertEquals(original.requiredProviders(), reread.requiredProviders()); } diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java index a610781..b0722bf 100644 --- a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java @@ -3,8 +3,8 @@ import io.gdcc.spi.meta.annotations.DataversePlugin; import io.gdcc.spi.meta.annotations.PluginContract; import io.gdcc.spi.meta.annotations.RequiredProvider; -import io.gdcc.spi.meta.descriptor.PluginDescriptor; -import io.gdcc.spi.meta.descriptor.PluginDescriptorFormat; +import io.gdcc.spi.meta.descriptor.Descriptor; +import io.gdcc.spi.meta.descriptor.DescriptorFormat; import io.gdcc.spi.meta.plugin.CoreProvider; import io.gdcc.spi.meta.plugin.Plugin; import org.junit.jupiter.api.Nested; @@ -84,17 +84,17 @@ public String identity() { assertTrue(result.success(), result.diagnosticsAsText()); - String descriptorPath = PluginDescriptorFormat.toPath("test.GoodPlugin"); + String descriptorPath = DescriptorFormat.toPath("test.GoodPlugin"); String servicePath = "META-INF/services/test.TestPlugin"; assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should be generated"); - PluginDescriptor descriptor = PluginDescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); - assertEquals("test.GoodPlugin", descriptor.pluginClass()); - assertEquals("test.TestPlugin", descriptor.pluginKind()); - assertEquals(3, descriptor.contractLevel("test.TestPlugin").getAsInt()); - assertEquals(7, descriptor.requiredProviderLevel("test.TestProvider").getAsInt()); + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.GoodPlugin", descriptor.klass()); + assertEquals("test.TestPlugin", descriptor.kind()); + assertEquals(3, descriptor.contractLevel("test.TestPlugin")); + assertEquals(7, descriptor.requiredProviderLevel("test.TestProvider")); String serviceFile = Files.readString(result.generatedFile(servicePath)); assertEquals("test.GoodPlugin", serviceFile.trim()); @@ -223,13 +223,13 @@ public String identity() { assertTrue(result.success(), result.diagnosticsAsText()); - String descriptorPath = PluginDescriptorFormat.toPath("test.ImplicitPlugin"); + String descriptorPath = DescriptorFormat.toPath("test.ImplicitPlugin"); assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should still be generated"); - PluginDescriptor descriptor = PluginDescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); - assertEquals("test.ImplicitPlugin", descriptor.pluginClass()); - assertEquals("test.TestPlugin", descriptor.pluginKind()); - assertEquals(4, descriptor.contractLevel("test.TestPlugin").getAsInt()); + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.ImplicitPlugin", descriptor.klass()); + assertEquals("test.TestPlugin", descriptor.kind()); + assertEquals(4, descriptor.contractLevel("test.TestPlugin")); } @Test @@ -495,7 +495,7 @@ public String identity() { assertTrue(result.success(), result.diagnosticsAsText()); - String descriptorPath = PluginDescriptorFormat.toPath("test.AutoServicePlugin"); + String descriptorPath = DescriptorFormat.toPath("test.AutoServicePlugin"); String servicePath = "META-INF/services/test.TestPlugin"; assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should still be generated"); @@ -1402,16 +1402,16 @@ public String identity() { assertTrue(result.success(), result.diagnosticsAsText()); - String descriptorPath = PluginDescriptorFormat.toPath("test.TransitiveImpl"); + String descriptorPath = DescriptorFormat.toPath("test.TransitiveImpl"); assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); - PluginDescriptor descriptor = PluginDescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); - assertEquals("test.TransitiveImpl", descriptor.pluginClass()); - assertEquals("test.BasePlugin", descriptor.pluginKind()); - assertEquals(3, descriptor.contractLevel("test.BasePlugin").getAsInt()); - assertEquals(4, descriptor.contractLevel("test.IntermediateCapability").getAsInt()); - assertEquals(5, descriptor.contractLevel("test.LeafCapability").getAsInt()); - assertEquals(11, descriptor.requiredProviderLevel("test.TestProvider").getAsInt()); + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.TransitiveImpl", descriptor.klass()); + assertEquals("test.BasePlugin", descriptor.kind()); + assertEquals(3, descriptor.contractLevel("test.BasePlugin")); + assertEquals(4, descriptor.contractLevel("test.IntermediateCapability")); + assertEquals(5, descriptor.contractLevel("test.LeafCapability")); + assertEquals(11, descriptor.requiredProviderLevel("test.TestProvider")); } @Test @@ -1489,8 +1489,8 @@ public String identity() { assertTrue(result.success(), result.diagnosticsAsText()); - String autoDescriptorPath = PluginDescriptorFormat.toPath("test.AutoServiceImpl"); - String normalDescriptorPath = PluginDescriptorFormat.toPath("test.NormalImpl"); + String autoDescriptorPath = DescriptorFormat.toPath("test.AutoServiceImpl"); + String normalDescriptorPath = DescriptorFormat.toPath("test.NormalImpl"); String servicePath = "META-INF/services/test.TestPlugin"; assertTrue(Files.exists(result.generatedFile(autoDescriptorPath)), "AutoService descriptor should be generated"); @@ -1598,9 +1598,9 @@ public String identity() { assertTrue(result.success(), result.diagnosticsAsText()); - String descriptorPath = PluginDescriptorFormat.toPath("test.MergedProviderImpl"); - PluginDescriptor descriptor = PluginDescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); - assertEquals(8, descriptor.requiredProviderLevel("test.SharedProvider").getAsInt()); + String descriptorPath = DescriptorFormat.toPath("test.MergedProviderImpl"); + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals(8, descriptor.requiredProviderLevel("test.SharedProvider")); } } From e79804f36d63012feac8e853ef77ce3ef0c8f598 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 27 Mar 2026 18:17:49 +0100 Subject: [PATCH 22/55] refactor(meta): remove unused `DESCRIPTOR_DIRECTORY` constant and enhance error message in `PluginContractProcessor` Clean up unused field to improve code maintainability. Clarify error messaging by including the fully qualified name of the `Plugin` interface for better developer guidance. --- .../spi/meta/processor/PluginContractProcessor.java | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java index 7ac697d..8a555ed 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -3,6 +3,7 @@ import io.gdcc.spi.meta.annotations.PluginContract; import io.gdcc.spi.meta.descriptor.Descriptor; import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import io.gdcc.spi.meta.plugin.Plugin; import javax.annotation.processing.AbstractProcessor; import javax.annotation.processing.ProcessingEnvironment; @@ -116,11 +117,6 @@ public final class PluginContractProcessor extends AbstractProcessor { */ private static final String API_LEVEL_FIELD = "API_LEVEL"; - /** - * Output directory for generated plugin descriptors. - */ - private static final String DESCRIPTOR_DIRECTORY = "META-INF/dataverse/plugins/"; - /** * Output directory for generated ServiceLoader files. */ @@ -307,7 +303,9 @@ private void processImplementation(TypeElement implementation) { if (contracts.isEmpty()) { error( implementation, - "No implemented plugin contracts found; implementations must implement a specific @PluginContract interface" + "No implemented plugin contracts found; " + + "implementations must implement a specific @PluginContract interface " + + "which itself extends " + Plugin.class.getName() ); throw new ProcessorException(); } From d19f1fc0ae36a4c623d907a2a4a9c53543badfd0 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 27 Mar 2026 18:18:19 +0100 Subject: [PATCH 23/55] test(meta): add unit test for invalid `@PluginContract` usage on non-plugin interfaces Validate that `@PluginContract` cannot be applied to interfaces that do not extend `Plugin`. Ensure meaningful diagnostics on compilation failure. --- .../PluginContractProcessorTest.java | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java index b0722bf..27964db 100644 --- a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java @@ -1048,6 +1048,47 @@ public String identity() { @Nested class EdgeCases { + @Test + void failsWhenContractIsOnNonPluginInterface() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/MissingExtendsPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface MissingExtendsPlugin { + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/MissingExtendsPluginImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class MissingExtendsPluginImpl implements MissingExtendsPlugin { + @Override + public String identity() { + return "missing-extends-plugin"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must implement a specific @PluginContract interface"); + } + @Test void failsWhenContractApiLevelIsMissing() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( From 41873d6fab47b2076d4cac9a778cc32cdec92fb1 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 27 Mar 2026 18:18:58 +0100 Subject: [PATCH 24/55] refactor(meta): extract `transformClassName` to centralize class name handling Replace redundant calls to `Class::getCanonicalName` across the `DescriptorFormat` class with `transformClassName` method to align serialization logic and handle inner class naming consistently. --- .../spi/meta/descriptor/DescriptorFormat.java | 27 ++++++++++++++----- 1 file changed, 20 insertions(+), 7 deletions(-) diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java index 2220f54..4eed2bd 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java @@ -38,16 +38,29 @@ private DescriptorFormat() { /* Intentionally left blank for helper class */ } - public static String toFilename(Class klazz) { - return toFilename(klazz.getCanonicalName()); + /** + * Transforms the provided class into its canonical name representation. + * Reusable in different places to keep serialization from class to FQCN aligned. + * + * @param klass the {@link Class} object whose canonical name is to be returned + * @return the name of the specified class, or null if the class does not have a name. + * Note: not using the canonical name to avoid issues with inner classes and de/serialization. + */ + public static String transformClassName(Class klass) { + return klass.getName(); + } + + public static String toFilename(Class klass) { + return toFilename(transformClassName(klass)); } public static String toFilename(String fqcn) { - return fqcn + DESCRIPTOR_EXTENSION; + // The FQCN may contain "$" from inner classes. This would be bad for filenames. + return fqcn.replace('$', '.') + DESCRIPTOR_EXTENSION; } - public static String toPath(Class klazz) { - return toPath(klazz.getCanonicalName()); + public static String toPath(Class klass) { + return toPath(transformClassName(klass)); } public static String toPath(String fqcn) { @@ -55,7 +68,7 @@ public static String toPath(String fqcn) { } public static String toContractLevel(Class contractClass) { - return toContractLevel(contractClass.getCanonicalName()); + return toContractLevel(transformClassName(contractClass)); } public static String toContractLevel(String contractFQCN) { @@ -63,7 +76,7 @@ public static String toContractLevel(String contractFQCN) { } public static String toRequiredProviderLevel(Class providerClass) { - return toRequiredProviderLevel(providerClass.getCanonicalName()); + return toRequiredProviderLevel(transformClassName(providerClass)); } public static String toRequiredProviderLevel(String providerFQCN) { From 9e65bd13b59f8db4f2e7f65bf0667b45ab327357 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 27 Mar 2026 18:22:58 +0100 Subject: [PATCH 25/55] feat(meta): introduce `PluginDescriptor` and `SourcedDescriptor` for plugin metadata representation Add `PluginDescriptor` for runtime-facing metadata of plugins and `SourcedDescriptor` for combining plugin information with source paths. Ensure immutability, validation, and equality consistency in both classes. --- .../spi/meta/descriptor/PluginDescriptor.java | 134 ++++++++++++++++++ .../meta/descriptor/SourcedDescriptor.java | 23 +++ 2 files changed, 157 insertions(+) create mode 100644 meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java create mode 100644 meta/src/main/java/io/gdcc/spi/meta/descriptor/SourcedDescriptor.java diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java new file mode 100644 index 0000000..ecea6bd --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java @@ -0,0 +1,134 @@ +package io.gdcc.spi.meta.descriptor; + +import io.gdcc.spi.meta.plugin.CoreProvider; +import io.gdcc.spi.meta.plugin.Plugin; + +import java.nio.file.Path; +import java.util.Map; +import java.util.Objects; +import java.util.OptionalInt; + +/** + * Runtime-facing descriptor of a resolved and loaded plugin implementation. + * + *

This descriptor represents a plugin after metadata has been interpreted in the context of the + * running application and the relevant Java types have been resolved. Unlike the build-time or + * serialized descriptor form, this model uses actual {@link Class} references for the plugin + * implementation, its base contract, all implemented plugin contracts, and all required core + * providers.

+ * + *

The generic type parameter {@code T} represents the base plugin contract under which the plugin + * was resolved and loaded. The {@code pluginClass} is therefore guaranteed to implement that base + * contract, while {@code kindClass} denotes the concrete base plugin contract itself.

+ * + *

The {@code contracts} map contains all plugin contracts implemented by the plugin together with + * their declared API levels. This includes the base contract as well as any optional capability + * contracts. The {@code requiredProviders} map contains all core providers required by the plugin's + * implemented contracts, again paired with their declared API levels.

+ * + * @param the base plugin contract type under which this plugin was resolved + * @param sourceLocation the source location from which the plugin was loaded, such as a JAR file or + * exploded classpath directory + * @param identity the logical plugin identity reported by the plugin instance; intended for + * distinguishing plugins at runtime + * @param pluginClass the concrete implementation class of the plugin + * @param kindClass the resolved base plugin contract implemented by the plugin + * @param contracts all resolved plugin contracts implemented by the plugin, mapped to their + * declared API levels + * @param requiredProviders all resolved core providers required by the plugin, mapped to their + * required API levels + */ +public record PluginDescriptor( + Path sourceLocation, + String identity, + Class pluginClass, + Class kindClass, + Map, Integer> contracts, + Map, Integer> requiredProviders +) { + + public PluginDescriptor { + Objects.requireNonNull(sourceLocation); + Objects.requireNonNull(identity); + Objects.requireNonNull(pluginClass); + Objects.requireNonNull(kindClass); + Objects.requireNonNull(contracts); + Objects.requireNonNull(requiredProviders); + + // Immutability is key + contracts = Map.copyOf(contracts); + requiredProviders = Map.copyOf(requiredProviders); + + // Sane structure checks + if (identity.isBlank()) + throw new IllegalArgumentException("Plugin identity cannot be blank"); + if (contracts.isEmpty()) + throw new IllegalArgumentException("Plugin must implement at least one contract (the kindClass one)"); + } + + public boolean implementsContract(Class contractClass) { + return this.contracts.containsKey(contractClass); + } + + public OptionalInt contractLevel(Class contractClass) { + return implementsContract(contractClass) + ? OptionalInt.of(this.contracts.get(contractClass)) + : OptionalInt.empty(); + } + + public boolean requiresProvider(Class providerClass) { + return this.requiredProviders.containsKey(providerClass); + } + + public OptionalInt requiredProviderLevel(Class providerClass) { + return requiresProvider(providerClass) + ? OptionalInt.of(this.requiredProviders.get(providerClass)) + : OptionalInt.empty(); + } + + /** + * Returns the normalized identity string of this plugin. + * The normalization process converts the identity to lowercase + * and removes special characters such as "/\\-_:.#~*", ensuring + * a consistent format for comparison purposes. + * + * @return the normalized identity string, or null if the original identity is null + */ + public String normalizedIdentity() { + return normalizeIdentity(this.identity); + } + + /** + * Normalizes the given identity string for comparison purposes by converting it to lowercase + * and removing all occurrences of the characters "/\-_:.#~*", which are commonly used to separate words. + * This avoids having multiple plugins targeting the same thing, like an export format with a slightly different + * case or special characters. + * + * @param identity the identity string to normalize + * @return the normalized identity string, or null if the input is null + */ + private String normalizeIdentity(String identity) { + if (identity == null) return null; + return identity.toLowerCase().replaceAll("[/\\\\_\\-:.#~*]+", ""); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null || getClass() != obj.getClass()) return false; + + PluginDescriptor that = (PluginDescriptor) obj; + + return Objects.equals(sourceLocation, that.sourceLocation) && + Objects.equals(pluginClass, that.pluginClass) && + ( + Objects.equals(identity, that.identity) || + Objects.equals(normalizeIdentity(identity), normalizeIdentity(that.identity)) + ); + } + + @Override + public int hashCode() { + return Objects.hash(sourceLocation, pluginClass, normalizeIdentity(identity)); + } +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/SourcedDescriptor.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/SourcedDescriptor.java new file mode 100644 index 0000000..a94bddb --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/SourcedDescriptor.java @@ -0,0 +1,23 @@ +package io.gdcc.spi.meta.descriptor; + +import java.nio.file.Path; +import java.util.Objects; + +/** + * A record representing a descriptor that is sourced from a specific location. + * Combines information about a descriptor and its source location. + * + * @param sourceLocation the path to the source location of the descriptor, must not be null + * @param plugin the {@link Descriptor} representing the plugin information, must not be null + */ +public record SourcedDescriptor(Path sourceLocation, Descriptor plugin) { + + public SourcedDescriptor { + Objects.requireNonNull(sourceLocation); + Objects.requireNonNull(plugin); + } + + public boolean isOfKind(Class contractClass) { + return plugin.isOfKind(contractClass); + } +} From fdcc7f1a5b82ac67216a6b28fb29a8c76b385118 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 27 Mar 2026 18:23:57 +0100 Subject: [PATCH 26/55] feat(meta): add `DescriptorScanner` for scanning plugin descriptors in directories and JARs Implement `DescriptorScanner` to locate and parse plugin descriptors from specified directories or JAR files. Include unit tests to validate functionality for various scenarios, including edge cases and error handling. --- .../meta/descriptor/DescriptorScanner.java | 134 ++++++++++++ .../descriptor/DescriptorScannerTest.java | 192 ++++++++++++++++++ 2 files changed, 326 insertions(+) create mode 100644 meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java create mode 100644 meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorScannerTest.java diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java new file mode 100644 index 0000000..d6562d1 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java @@ -0,0 +1,134 @@ +package io.gdcc.spi.meta.descriptor; + +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; + +import static io.gdcc.spi.meta.descriptor.DescriptorFormat.DESCRIPTOR_DIRECTORY; +import static io.gdcc.spi.meta.descriptor.DescriptorFormat.DESCRIPTOR_EXTENSION; +import static io.gdcc.spi.meta.descriptor.DescriptorFormat.read; + +public final class DescriptorScanner { + + private DescriptorScanner() { + /* Intentionally private constructor for helper class without instances */ + } + + /** + * Scans the specified path to identify plugin descriptors. The path can either be a directory + * or a JAR file. The method determines the type of the path and invokes the appropriate + * scanning logic to extract plugin descriptors. + * + * @param path the path to be scanned; must not be null. If the path represents a directory, + * plugin files contained within it will be scanned. If the path represents + * a JAR file, its internal entries will be scanned for descriptors. + * @return a list of {@code SourcedPluginDescriptor} objects representing plugin descriptors + * found at the given path. The list will be empty if no descriptors are found. + * @throws IllegalArgumentException if the provided {@code path} is {@code null}. + * @throws IOException if an I/O error occurs while accessing the specified path or its contents. + */ + public static List scanPath(Path path) throws IOException { + List scanResult = new ArrayList<>(); + + if (path == null) { + throw new IllegalArgumentException("Set of paths may not contain null values"); + } + if (Files.isDirectory(path)) { + scanDirectory(path).forEach(plugin -> scanResult.add(new SourcedDescriptor(path, plugin))); + } + scanJar(path).forEach(plugin -> scanResult.add(new SourcedDescriptor(path, plugin))); + + return List.copyOf(scanResult); + } + + /** + * Scans the specified JAR file for plugin plugin entries and extracts them into a list of + * {@link Descriptor} objects. The method looks for plugin plugin files based on predefined + * directory and file extension constants. + * + * @param jarPath the path to the JAR file to be scanned; must be a valid, readable, and regular file + * with a ".jar" extension. Usage of symbolic links is allowed. + * @return a list of {@code PluginDescriptor} objects extracted from the JAR file. If no plugin + * descriptors are found, the returned list will be empty. + * @throws IllegalArgumentException if the provided {@code jarPath} is {@code null}, does not exist, + * is unreadable, is not a regular file, or does not have a ".jar" + * extension. + * @throws IOException if an I/O error occurs while reading the JAR file or its entries. + */ + static List scanJar(Path jarPath) throws IOException { + if (jarPath == null || !Files.exists(jarPath) || !Files.isReadable(jarPath) || + !Files.isRegularFile(jarPath) || !jarPath.getFileName().toString().toLowerCase().endsWith(".jar")) { + throw new IllegalArgumentException("jarPath '" + jarPath + "' is not a readable JAR file"); + } + + // Iterate over the entries in the JAR file, read the ones we know to be plugin descriptors + List descriptors = new ArrayList<>(); + try (var jarFile = new JarFile(jarPath.toFile())) { + for (Iterator it = jarFile.entries().asIterator(); it.hasNext(); ) { + JarEntry entry = it.next(); + String name = entry.getName(); + + if (name.startsWith(DESCRIPTOR_DIRECTORY) && name.endsWith(DESCRIPTOR_EXTENSION)) { + try(InputStreamReader reader = new InputStreamReader(jarFile.getInputStream(entry), StandardCharsets.UTF_8)) { + Descriptor descriptor = read(reader); + descriptors.add(descriptor); + } + } + } + } + + return List.copyOf(descriptors); + } + + /** + * Scans the specified directory for plugin plugin files and extracts them into a list of + * {@link Descriptor} objects. The method searches for plugin files in a predefined + * subdirectory and processes files with a specific file extension. + * + * @param root the root directory to be scanned; must be a valid, readable, and existing directory. + * @return a list of {@code PluginDescriptor} objects extracted from the directory. If no plugin + * descriptors are found, the returned list will be empty. + * @throws IllegalArgumentException if the provided {@code root} is {@code null}, does not exist, + * is unreadable, or is not a directory. + * @throws IOException if an I/O error occurs while reading the directory or its contents. + */ + static List scanDirectory(Path root) throws IOException { + if (root == null || !Files.exists(root) || !Files.isReadable(root) || !Files.isDirectory(root)) { + throw new IllegalArgumentException("directory '" + root + "' is not a readable directory"); + } + + // Look up the plugin metadata directory - if it does not exist, there are no plugins here. + Path descriptorDir = root.resolve(DescriptorFormat.DESCRIPTOR_DIRECTORY); + if (!Files.isDirectory(descriptorDir)) { + return List.of(); + } + + // Scan the directory for plugin metadata, read it, and add it to a list + List descriptors = new ArrayList<>(); + try (var paths = Files.list(descriptorDir)) { + for (Path path : paths.toList()) { + String name = path.getFileName().toString(); + + if (name.endsWith(DESCRIPTOR_EXTENSION)) { + try (FileReader reader = new FileReader(path.toFile(), StandardCharsets.UTF_8)) { + Descriptor descriptor = read(reader); + descriptors.add(descriptor); + } + } + } + } + + return List.copyOf(descriptors); + } + + // TODO: a method to check that for a given plugin class and plugin kind there is a service loader entry present + +} diff --git a/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorScannerTest.java b/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorScannerTest.java new file mode 100644 index 0000000..eee0998 --- /dev/null +++ b/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorScannerTest.java @@ -0,0 +1,192 @@ +package io.gdcc.spi.meta.descriptor; + +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; +import java.util.jar.JarEntry; +import java.util.jar.JarOutputStream; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class DescriptorScannerTest { + + @TempDir + Path tempDir; + + @Nested + class Directory { + + @Test + void scanDirectory_ReturnsEmpty_WhenDescriptorDirectoryDoesNotExist() throws IOException { + List descriptors = DescriptorScanner.scanDirectory(tempDir); + + assertTrue(descriptors.isEmpty()); + } + + @Test + void scanDirectory_ReadsSingleDescriptor() throws IOException { + Path descriptorDir = tempDir.resolve(DescriptorFormat.DESCRIPTOR_DIRECTORY); + Files.createDirectories(descriptorDir); + + Path descriptorFile = descriptorDir.resolve("test.Plugin.properties"); + Files.writeString( + descriptorFile, + """ + plugin.class=test.Plugin + plugin.kind=test.BasePlugin + plugin.implements.test.BasePlugin.level=1 + plugin.requires.test.Provider.level=2 + """, + StandardCharsets.UTF_8 + ); + + List descriptors = DescriptorScanner.scanDirectory(tempDir); + + assertEquals(1, descriptors.size()); + Descriptor descriptor = descriptors.get(0); + assertEquals("test.Plugin", descriptor.klass()); + assertEquals("test.BasePlugin", descriptor.kind()); + assertEquals(1, descriptor.contractLevel("test.BasePlugin")); + assertEquals(2, descriptor.requiredProviderLevel("test.Provider")); + } + + @Test + void scanDirectory_IgnoresNonPropertyFiles() throws IOException { + Path descriptorDir = tempDir.resolve(DescriptorFormat.DESCRIPTOR_DIRECTORY); + Files.createDirectories(descriptorDir); + + Files.writeString( + descriptorDir.resolve("test.Plugin.properties"), + """ + plugin.class=test.Plugin + plugin.kind=test.BasePlugin + """, + StandardCharsets.UTF_8 + ); + Files.writeString(descriptorDir.resolve("README.txt"), "ignore me", StandardCharsets.UTF_8); + + List descriptors = DescriptorScanner.scanDirectory(tempDir); + + assertEquals(1, descriptors.size()); + assertEquals("test.Plugin", descriptors.get(0).klass()); + } + + @Test + void scanDirectory_RejectsNonDirectory() throws IOException { + Path file = Files.createTempFile(tempDir, "not-a-directory", ".txt"); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> DescriptorScanner.scanDirectory(file) + ); + + assertTrue(ex.getMessage().contains("not a readable directory")); + } + } + + @Nested + class Jar { + + @Test + void scanJar_ReadsSingleDescriptor() throws IOException { + Path jar = createJar(Map.of( + "META-INF/dataverse/plugins/test.Plugin.properties", + """ + plugin.class=test.Plugin + plugin.kind=test.BasePlugin + plugin.implements.test.BasePlugin.level=3 + plugin.requires.test.Provider.level=4 + """, + "META-INF/services/test.BasePlugin", + "test.Plugin" + )); + + List descriptors = DescriptorScanner.scanJar(jar); + + assertEquals(1, descriptors.size()); + Descriptor descriptor = descriptors.get(0); + assertEquals("test.Plugin", descriptor.klass()); + assertEquals("test.BasePlugin", descriptor.kind()); + assertEquals(3, descriptor.contractLevel("test.BasePlugin")); + assertEquals(4, descriptor.requiredProviderLevel("test.Provider")); + } + + @Test + void scanJar_ReadsMultipleDescriptors() throws IOException { + Path jar = createJar(Map.of( + "META-INF/dataverse/plugins/test.A.properties", + """ + plugin.class=test.A + plugin.kind=test.BasePlugin + plugin.implements.test.BasePlugin.level=1 + """, + "META-INF/dataverse/plugins/test.B.properties", + """ + plugin.class=test.B + plugin.kind=test.BasePlugin + plugin.implements.test.BasePlugin.level=2 + """ + )); + + List descriptors = DescriptorScanner.scanJar(jar); + + assertEquals(2, descriptors.size()); + } + + @Test + void scanJar_IgnoresNonDescriptorEntries() throws IOException { + Path jar = createJar(Map.of( + "META-INF/dataverse/plugins/test.Plugin.properties", + """ + plugin.class=test.Plugin + plugin.kind=test.BasePlugin + """, + "META-INF/dataverse/plugins/README.txt", + "ignore me", + "some/other/resource.txt", + "ignore me too" + )); + + List descriptors = DescriptorScanner.scanJar(jar); + + assertEquals(1, descriptors.size()); + assertEquals("test.Plugin", descriptors.get(0).klass()); + } + + @Test + void scanJar_RejectsNonJarFile() throws IOException { + Path file = Files.createTempFile(tempDir, "not-a-jar", ".txt"); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> DescriptorScanner.scanJar(file) + ); + + assertTrue(ex.getMessage().contains("not a readable JAR file")); + } + } + + private Path createJar(Map entries) throws IOException { + Path jar = Files.createTempFile(tempDir, "plugin-plugin-test-", ".jar"); + + try (JarOutputStream out = new JarOutputStream(Files.newOutputStream(jar))) { + for (Map.Entry entry : entries.entrySet()) { + JarEntry jarEntry = new JarEntry(entry.getKey()); + out.putNextEntry(jarEntry); + out.write(entry.getValue().getBytes(StandardCharsets.UTF_8)); + out.closeEntry(); + } + } + + return jar; + } +} From 4b3f1170c5e481cc5f33b7e930cb5921c4715446 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 27 Mar 2026 18:33:09 +0100 Subject: [PATCH 27/55] feat(core): move PluginLoader infrastructure to new Core Maven Module --- .../gdcc/spi/core/loader/LoaderException.java | 0 .../gdcc/spi/core/loader/LoaderProblem.java | 0 .../io/gdcc/spi/core/loader/PluginLoader.java | 0 .../spi/core/loader/PluginLoaderTest.java | 0 .../spi/core/test/basic/TestContract.java | 12 +++ .../gdcc/spi/core/test/basic/TestPlugin.java | 15 +++ .../io/gdcc/spi/core/loader/PluginSource.java | 101 ------------------ .../io/gdcc/spi/core/test/TestPlugin.java | 28 ----- .../io/gdcc/spi/core/test/TestProvider.java | 13 --- .../services/io.gdcc.spi.core.plugin.Plugin | 1 - 10 files changed, 27 insertions(+), 143 deletions(-) rename {src => core/src}/main/java/io/gdcc/spi/core/loader/LoaderException.java (100%) rename {src => core/src}/main/java/io/gdcc/spi/core/loader/LoaderProblem.java (100%) rename {src => core/src}/main/java/io/gdcc/spi/core/loader/PluginLoader.java (100%) rename {src => core/src}/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java (100%) create mode 100644 core/src/test/java/io/gdcc/spi/core/test/basic/TestContract.java create mode 100644 core/src/test/java/io/gdcc/spi/core/test/basic/TestPlugin.java delete mode 100644 src/main/java/io/gdcc/spi/core/loader/PluginSource.java delete mode 100644 src/test/java/io/gdcc/spi/core/test/TestPlugin.java delete mode 100644 src/test/java/io/gdcc/spi/core/test/TestProvider.java delete mode 100644 src/test/resources/META-INF/services/io.gdcc.spi.core.plugin.Plugin diff --git a/src/main/java/io/gdcc/spi/core/loader/LoaderException.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderException.java similarity index 100% rename from src/main/java/io/gdcc/spi/core/loader/LoaderException.java rename to core/src/main/java/io/gdcc/spi/core/loader/LoaderException.java diff --git a/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java similarity index 100% rename from src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java rename to core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java diff --git a/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java b/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java similarity index 100% rename from src/main/java/io/gdcc/spi/core/loader/PluginLoader.java rename to core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java diff --git a/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java similarity index 100% rename from src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java rename to core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java diff --git a/core/src/test/java/io/gdcc/spi/core/test/basic/TestContract.java b/core/src/test/java/io/gdcc/spi/core/test/basic/TestContract.java new file mode 100644 index 0000000..50248a5 --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/test/basic/TestContract.java @@ -0,0 +1,12 @@ +package io.gdcc.spi.core.test.basic; + +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.plugin.Plugin; + +@PluginContract(role = PluginContract.Role.BASE) +public interface TestContract extends Plugin { + + int API_LEVEL = 1; + + void test(); +} diff --git a/core/src/test/java/io/gdcc/spi/core/test/basic/TestPlugin.java b/core/src/test/java/io/gdcc/spi/core/test/basic/TestPlugin.java new file mode 100644 index 0000000..7148d0c --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/test/basic/TestPlugin.java @@ -0,0 +1,15 @@ +package io.gdcc.spi.core.test.basic; + +import io.gdcc.spi.meta.plugin.Plugin; + +public class TestPlugin implements TestContract { + @Override + public String identity() { + return "test"; + } + + @Override + public void test() { + /* Intentionally left blank */ + } +} diff --git a/src/main/java/io/gdcc/spi/core/loader/PluginSource.java b/src/main/java/io/gdcc/spi/core/loader/PluginSource.java deleted file mode 100644 index 86a5039..0000000 --- a/src/main/java/io/gdcc/spi/core/loader/PluginSource.java +++ /dev/null @@ -1,101 +0,0 @@ -package io.gdcc.spi.core.loader; - -import java.nio.file.Path; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.Objects; -import java.util.Set; - -/** - * This record captures metadata about where a plugin was discovered and loaded from, associating - * 1) the plugin's logical identity (as returned by {@link io.gdcc.spi.core.plugin.Plugin#identity()}), - * 2) the plugin's class name, and - * 3) the JAR file path containing the plugin implementation. - */ -public record PluginSource(Path location, String className, String identity) { - - @Override - public String toString() { - return String.format("%s: className=%s, identity=%s", location, className, identity); - } - - /** - * Checks if this PluginSource is a duplicate of another based on ANY of: - * - Same class name - * - Same normalized identity (case-insensitive, separators removed) - * - * "Same location" doesn't count as a duplicate to enable loading multiple plugins from the same location. - * - * While {@link #equals(Object)} checks for strict logical equality (important for {@code Set} or {@code Map}), - * this method is targeted at detecting logical duplicates (that may have different locations) but share - * other identifying characteristics. - */ - public boolean isDuplicateOf(PluginSource other) { - if (other == null) return false; - return Objects.equals(className, other.className) || - Objects.equals(normalizeIdentity(identity), normalizeIdentity(other.identity)); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null || getClass() != obj.getClass()) return false; - - PluginSource that = (PluginSource) obj; - - return Objects.equals(location, that.location) && - Objects.equals(className, that.className) && - Objects.equals(normalizeIdentity(identity), normalizeIdentity(that.identity)); - } - - /** - * Normalizes the given identity string for comparison purposes by converting it to lowercase - * and removing all occurrences of the characters "/\-_:.#~*" which are commonly used to separate words. - * - * @param identity the identity string to normalize - * @return the normalized identity string, or null if the input is null - */ - private String normalizeIdentity(String identity) { - if (identity == null) return null; - return identity.toLowerCase().replaceAll("[/\\\\_\\-:.#~*]+", ""); - } - - @Override - public int hashCode() { - return Objects.hash(location, className, normalizeIdentity(identity)); - } - - /** - * Groups duplicate {@link PluginSource} instances together based on shared identifying characteristics. - * Two sources are considered duplicates if they have the same class name or the same normalized identity - * (case-insensitive, with common separator characters removed), as determined by {@link PluginSource#isDuplicateOf(PluginSource)}. - * Sources at the same location are NOT considered duplicates, allowing multiple plugins to be loaded from the same file. - * - * @param sources the set of plugin sources to group; may be empty or contain duplicates - * @return a set of disjoint groups, where each group is a set of mutually duplicate sources - */ - public static Set> groupDuplicates(Set sources) { - Set> groups = new LinkedHashSet<>(); - // Create a copy of the sources as a working set - Set remaining = new HashSet<>(sources); - - // Iterate over all remaining sources - while (!remaining.isEmpty()) { - PluginSource source = remaining.iterator().next(); - Set group = new LinkedHashSet<>(); - - // Check against every remaining source if this is a new duplicate - for (PluginSource candidate : remaining) { - if (source.isDuplicateOf(candidate)) { - group.add(candidate); - } - } - - // Add the group to the result set and remove any found duplicates from the working set (so they are not checked again) - groups.add(group); - remaining.removeAll(group); - } - - return groups; - } -} diff --git a/src/test/java/io/gdcc/spi/core/test/TestPlugin.java b/src/test/java/io/gdcc/spi/core/test/TestPlugin.java deleted file mode 100644 index 132c6a4..0000000 --- a/src/test/java/io/gdcc/spi/core/test/TestPlugin.java +++ /dev/null @@ -1,28 +0,0 @@ -package io.gdcc.spi.core.test; - -import io.gdcc.spi.core.plugin.CoreProvider; -import io.gdcc.spi.core.plugin.Plugin; - -import java.util.Set; - -public class TestPlugin implements Plugin { - @Override - public String identity() { - return "test"; - } - - @Override - public int providedPluginApiLevel() { - return 1; - } - - @Override - public int requiredProviderApiLevel(Class providerClass) { - return -1; - } - - @Override - public Set> expectedProviders() { - return Set.of(TestProvider.class); - } -} diff --git a/src/test/java/io/gdcc/spi/core/test/TestProvider.java b/src/test/java/io/gdcc/spi/core/test/TestProvider.java deleted file mode 100644 index bf06221..0000000 --- a/src/test/java/io/gdcc/spi/core/test/TestProvider.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.gdcc.spi.core.test; - -import io.gdcc.spi.core.plugin.CoreProvider; - -public interface TestProvider extends CoreProvider { - - int API_LEVEL = 100; - - static int apiLevel() { - return API_LEVEL; - } - -} diff --git a/src/test/resources/META-INF/services/io.gdcc.spi.core.plugin.Plugin b/src/test/resources/META-INF/services/io.gdcc.spi.core.plugin.Plugin deleted file mode 100644 index 680a3ba..0000000 --- a/src/test/resources/META-INF/services/io.gdcc.spi.core.plugin.Plugin +++ /dev/null @@ -1 +0,0 @@ -io.gdcc.spi.core.test.TestPlugin \ No newline at end of file From 2bc5750ed22b7ed5934ff715da0360fa30cbd8ee Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sat, 28 Mar 2026 01:34:19 +0100 Subject: [PATCH 28/55] chore(meta,export): comment out version definitions and add notes on module version synchronization --- export/pom.xml | 3 ++- meta/pom.xml | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/export/pom.xml b/export/pom.xml index 32a0620..8c035ad 100644 --- a/export/pom.xml +++ b/export/pom.xml @@ -10,7 +10,8 @@ export - ${spi.export.version} + + diff --git a/meta/pom.xml b/meta/pom.xml index 470f6fa..c489747 100644 --- a/meta/pom.xml +++ b/meta/pom.xml @@ -10,7 +10,8 @@ meta - ${spi.meta.version} + + From 9ab95d2703a17fe3add18b7aaa88a727ddb1e726 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sat, 28 Mar 2026 01:48:58 +0100 Subject: [PATCH 29/55] feat(core): introduce `LoaderHelper`, `LoaderConfiguration`, and `DescriptorBuilder` utilities Add foundational components for plugin loading and validation: - `LoaderHelper` provides internal utility methods for plugin handling like validation, class loading, and conversion. - `LoaderConfiguration` defines loader behavior options. - `DescriptorBuilder` offers a fluent API for creating plugin descriptors (during testing). Includes PluginHandle and PluginValidationResult as data transfer objects. Includes (initial) unit tests for functionality verification. --- .../spi/core/loader/LoaderConfiguration.java | 41 ++ .../io/gdcc/spi/core/loader/LoaderHelper.java | 447 ++++++++++++++ .../gdcc/spi/core/loader/LoaderProblem.java | 63 +- .../io/gdcc/spi/core/loader/PluginHandle.java | 30 + .../core/loader/PluginValidationResult.java | 98 ++++ .../spi/core/loader/DescriptorBuilder.java | 191 ++++++ .../spi/core/loader/LoaderHelperTest.java | 547 ++++++++++++++++++ 7 files changed, 1411 insertions(+), 6 deletions(-) create mode 100644 core/src/main/java/io/gdcc/spi/core/loader/LoaderConfiguration.java create mode 100644 core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java create mode 100644 core/src/main/java/io/gdcc/spi/core/loader/PluginHandle.java create mode 100644 core/src/main/java/io/gdcc/spi/core/loader/PluginValidationResult.java create mode 100644 core/src/test/java/io/gdcc/spi/core/loader/DescriptorBuilder.java create mode 100644 core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderConfiguration.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderConfiguration.java new file mode 100644 index 0000000..5b415f9 --- /dev/null +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderConfiguration.java @@ -0,0 +1,41 @@ +package io.gdcc.spi.core.loader; + +/** + * @param ENFORCE_SINGLE_SOURCE_MATCHING_PLUGINS_ONLY + * When activated, any source may only provide plugins for a single given base plugin contract interface. + * In case any non-compliant plugin is found within, the loader will refrain from loading from the source entirely. + * When deactivated, any non-matching plugins will simply be ignored. + * Default: false + * @param EMIT_WARNINGS_ON_MULTI_PLUGIN_SOURCE + * If {@link #ENFORCE_SINGLE_SOURCE_MATCHING_PLUGINS_ONLY} is deactivated, either log a warning (true) or not (false). + * Default: false + * @param ABORT_ON_COMPATIBILITY_PROBLEMS + * Abort loading when detecting any problems before actually loading classes (API level verification, etc.). + * Default: true + * @param ABORT_ON_DUPLICATED_IDENTITIES + * Abort loading when detecting any plugins with duplicated identities, making them undistinguishable for users. + * Default: true + * @param ENFORCE_UNAMBIGUOUS_PLUGIN_IDENTITIES + * When activated, any plugin must have a unique identity within its source, ensuring unambiguous identification. + * Any plugin's identity that differs by case or special chars only will be seen as a duplicate. + * Default: true + */ +public record LoaderConfiguration( + boolean ENFORCE_SINGLE_SOURCE_MATCHING_PLUGINS_ONLY, + boolean EMIT_WARNINGS_ON_MULTI_PLUGIN_SOURCE, + boolean ABORT_ON_COMPATIBILITY_PROBLEMS, + boolean ABORT_ON_DUPLICATED_IDENTITIES, + boolean ENFORCE_UNAMBIGUOUS_PLUGIN_IDENTITIES +) { + + public static LoaderConfiguration defaultConfiguration() { + return new LoaderConfiguration( + false, + false, + true, + true, + true + ); + } + +} diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java new file mode 100644 index 0000000..d122de0 --- /dev/null +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java @@ -0,0 +1,447 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import io.gdcc.spi.meta.descriptor.PluginDescriptor; +import io.gdcc.spi.meta.descriptor.SourcedDescriptor; +import io.gdcc.spi.meta.plugin.CoreProvider; +import io.gdcc.spi.meta.plugin.Plugin; + +import java.lang.reflect.Field; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Provides utility methods for handling plugin loading and validation operations, + * such as converting file paths to URLs, checking class presence, determining core API levels, + * and validating plugin descriptors. It's intended for internal use only. + */ +final class LoaderHelper { + + private LoaderHelper() { + // Intentionally left blank for helper singleton + } + + /** + * Converts a {@link Path} object to a {@link URL} with optional prefix and suffix. + * + * @param path the {@link Path} to convert to a {@link URL}; must not be null + * @param urlPrefix an optional String to prepend to the URL (e.g., a scheme); can be null or blank + * @param urlSuffix an optional String to append to the URL; can be null or blank + * @return the constructed {@link URL} based on the provided path, prefix, and suffix + * @throws MalformedURLException if the constructed URL is invalid + */ + static URL pathToUrl(Path path, String urlPrefix, String urlSuffix) throws MalformedURLException { + return new URL( + (urlPrefix == null || urlPrefix.isBlank() ? "" : urlPrefix + ":" ) + + path.toUri().toURL() + + ( urlSuffix == null || urlSuffix.isBlank() ? "" : urlSuffix ) + ); + } + + /** + * Checks if a class with the specified fully qualified class name (FQCN) is present and + * accessible using the given {@link ClassLoader}. + * + * @param fqcn the fully qualified name of the class to be checked + * @param classLoader the {@link ClassLoader} to use for detecting the class + * @return true if the class is present and accessible; false otherwise + */ + static boolean isClassPresent(String fqcn, ClassLoader classLoader) { + try { + Class.forName(fqcn, false, classLoader); + return true; + } catch (ClassNotFoundException | LinkageError e) { + return false; + } + } + + + /** + * Determines the core API level of the given plugin class. This method checks for a static field + * named "API_LEVEL" in the specified class and returns its integer value if present and accessible. + * + * @param pluginClass the plugin class to check for the API level. The class must be an interface. + * @return an {@code OptionalInt} containing the API level if the field exists and is accessible; + * {@code OptionalInt.empty()} otherwise. + * @throws IllegalArgumentException if the provided class is not an interface. + */ + static int determineCoreApiLevel(Class pluginClass) { + // Looking up the plugin contract API level is only ever valid on SPI interfaces but never on implementations. + if (!pluginClass.isInterface()) { + throw new IllegalArgumentException("Plugin class must be an interface"); + } + try { + // Retrieve the field from exactly this class (we don't want to search any superclasses here!) + // TODO: Get the field name from some common place to both loader and annotation processor! + Field apiLevel = pluginClass.getDeclaredField("API_LEVEL"); + return apiLevel.getInt(pluginClass); + } catch (NoSuchFieldException | IllegalAccessException e) { + throw new IllegalStateException("Plugin contract class must have an (accessible) API_LEVEL field"); + } + } + + /** + * Determines the core API level of a given plugin class by its fully qualified class name. + * + * @param className the fully qualified name of the class to evaluate + * @return the core API level of the specified plugin class + * @throws IllegalArgumentException if the specified class cannot be found + */ + static int determineCoreApiLevel(String className, ClassLoader classLoader) { + Class pluginClass = resolveClass(className, classLoader); + return determineCoreApiLevel(pluginClass); + } + + /** + * Resolves and returns the {@link Class} object for the specified fully qualified class name + * using the provided {@link ClassLoader}. + * + * @param className the fully qualified name of the class to resolve; must not be null or empty + * @param classLoader the {@link ClassLoader} to use for loading the class; must not be null + * @return the {@link Class} object representing the loaded class + * @throws IllegalArgumentException if the class cannot be found or loaded + */ + static Class resolveClass(String className, ClassLoader classLoader) { + try { + return Class.forName(className, false, classLoader); + } catch (ClassNotFoundException e) { + throw new IllegalArgumentException("Plugin class not found: " + className, e); + } + } + + /** + * Validates a list of plugin descriptors to ensure there are no class name collisions either between + * plugins or with the core Java classpath. The method identifies plugins with conflicting class names + * and records the issues for further analysis. + * + * @param descriptors a list of {@code SourcedDescriptor} objects representing the plugins to validate. + * Each descriptor contains information about the plugin's source location and associated class. + * @param classLoader the {@code ClassLoader} used to check for class name conflicts with the core system. + * @return a {@code PluginValidationResult} object that contains two categories: + * - {@code accepted}: A list of plugins that passed validation without conflicts. + * - {@code rejected}: A map of rejected plugins to a list of {@code LoaderProblem} objects detailing + * the specific reasons for rejection. + */ + static PluginValidationResult verifyNoClassCollisions(List descriptors, ClassLoader classLoader) { + // Scratch spaces to build the result + Set accepted = new HashSet<>(); + Map> rejected = new HashMap<>(); + + // Just an ephemeral scratch space to save what we've already seen as plugins from where + Map classToSourcePath = new HashMap<>(); + + // Iterate through all the discovered plugins + descriptors.forEach(descriptor -> { + + String className = descriptor.plugin().klass(); + Path source = descriptor.sourceLocation(); + List problems = new ArrayList<>(); + + // Check if the classname was already provided from a different location + if (classToSourcePath.containsKey(className)) { + problems.add(new LoaderProblem.PluginClassNameCollision(className, classToSourcePath.get(className), source)); + } else { + classToSourcePath.put(className, source); + } + + // Check if the classname is already present on the current class path (the core) + if (isClassPresent(className, classLoader)) { + problems.add(new LoaderProblem.PluginClassNameCollisionWithCore(className, source)); + } + + // Let the record show... + if (problems.isEmpty()) { + accepted.add(descriptor); + } else { + rejected.put(descriptor, problems); + } + }); + + return new PluginValidationResult<>(Set.copyOf(accepted), PluginValidationResult.copyProblemMap(rejected), Map.of()); + } + + + /** + * Identifies and classifies plugin descriptors that are not implementations of the specified plugin + * contract class. The method evaluates each descriptor to determine whether its associated plugin + * adheres to the given plugin contract. Plugins are then categorized as accepted, rejected, or + * warning-based, depending on their compatibility and the loader configuration. + * + * @param descriptors a list of {@code SourcedDescriptor} objects representing the plugins to be evaluated. + * Each descriptor contains information about the plugin's source location and associated class. + * @param pluginClass the {@code Class} object representing the plugin contract that the plugins + * should implement. + * @param configuration a {@code LoaderConfiguration} object that dictates specific validation behaviors + * and enforcement rules when classifying the plugins. + * @return a {@code PluginValidationResult} object containing: + * - {@code accepted}: A set of plugins that fully adhere to the specified plugin contract. + * - {@code warning}: A map of plugins with potential issues or warnings that do not warrant rejection. + * - {@code rejected}: A map of plugins that were rejected due to failing to meet the plugin contract + * or violating enforced loader rules. + */ + static PluginValidationResult identifyNonImplementations( + List descriptors, Class pluginClass, LoaderConfiguration configuration) { + // Scratch spaces to build the result + Set accepted = new HashSet<>(); + Map> warning = new HashMap<>(); + Map> rejected = new HashMap<>(); + + // Check if the plugin to be loaded is an implementation of the plugin contract the loader was signed up for + descriptors.forEach(descriptor -> { + List problems = new ArrayList<>(); + + if (!descriptor.isOfKind(pluginClass)) { + problems.add(new LoaderProblem.PluginClassMismatch( + descriptor.plugin().klass(), + descriptor.sourceLocation(), + pluginClass.getCanonicalName() + )); + + if (configuration.ENFORCE_SINGLE_SOURCE_MATCHING_PLUGINS_ONLY()) + rejected.put(descriptor, problems); + else { + warning.put(descriptor, problems); + } + } else { + accepted.add(descriptor); + } + }); + + return new PluginValidationResult<>( + Set.copyOf(accepted), + PluginValidationResult.copyProblemMap(rejected), + PluginValidationResult.copyProblemMap(warning) + ); + } + + + /** + * Validates a list of plugin descriptors to ensure their API levels are compatible with the specified + * plugin contract class. The method verifies that each plugin both declares and adheres to the required + * API level for its declared contracts. Any discrepancies, such as missing contracts, mismatched API levels, + * or unsupported contracts, are recorded as validation problems. + * + * @param descriptors a list of {@code SourcedDescriptor} objects representing the plugins to validate. + * Each descriptor contains information about the plugin's source location and associated plugin details, + * including the contracts and API levels it supports. + * @param pluginClass the {@code Class} object representing the plugin contract that the plugins must + * comply with. The desired API level for this contract will be determined and used as part of + * the validation process. + * @return a {@code PluginValidationResult} object containing: + * - {@code accepted}: A set of plugins that fully adhere to the API level requirements for the + * specified plugin contract. + * - {@code rejected}: A map of plugins that failed validation, paired with a list of + * {@code LoaderProblem} objects describing the specific issues encountered. + */ + static PluginValidationResult verifyPluginApiLevels(List descriptors, Class pluginClass, ClassLoader classLoader) { + // Scratch spaces to build the result + Set accepted = new HashSet<>(); + Map> rejected = new HashMap<>(); + + // Determine the plugin contract's name once as a string for comparisons + String desiredPluginClass = DescriptorFormat.transformClassName(pluginClass); + int desiredPluginApiLevel = determineCoreApiLevel(pluginClass); + + // Iterate over all the plugins + for (SourcedDescriptor descriptor : descriptors) { + // Need to track if the base contract appears in the contracts including a level + boolean hasBaseClassContract = false; + // Save all the problems identified during validation + List problems = new ArrayList<>(); + + String pluginImplementationClass = descriptor.plugin().klass(); + + // Iterate all the implemented capabilities and check their levels + for (Map.Entry contract : descriptor.plugin().contracts().entrySet()) { + String pluginContractClass = contract.getKey(); + int pluginContractApiLevel = contract.getValue(); + + // Let the record show that this plugin does claim to implement the base contract for this loader + if (desiredPluginClass.equals(pluginContractClass)) { + hasBaseClassContract = true; + } + + try { + // Extract the API level the core is expecting for this contract + int coreContractApiLevel = determineCoreApiLevel(pluginContractClass, classLoader); + + // Compare base class core levels: they must match exactly, otherwise record a problem + if (coreContractApiLevel != pluginContractApiLevel) { + problems.add(new LoaderProblem.PluginClassApiLevelMismatch( + pluginImplementationClass, + descriptor.sourceLocation(), + coreContractApiLevel, + pluginContractApiLevel) + ); + } + } catch (IllegalArgumentException e) { + problems.add(new LoaderProblem.PluginClassUnsupported( + pluginImplementationClass, + descriptor.sourceLocation(), + pluginContractClass) + ); + } + } + + // If the plugin did not provide a level for the base contract this loader expects, this is a serious problem... + if (!hasBaseClassContract) { + problems.add(new LoaderProblem.PluginClassApiLevelMissing( + pluginImplementationClass, + descriptor.sourceLocation(), + desiredPluginClass, + desiredPluginApiLevel) + ); + } + + if (problems.isEmpty()) { + accepted.add(descriptor); + } else { + rejected.put(descriptor, List.copyOf(problems)); + } + } + + return new PluginValidationResult<>( + Set.copyOf(accepted), + PluginValidationResult.copyProblemMap(rejected), + Map.of() + ); + } + + /** + * Converts a {@link SourcedDescriptor} and a plugin instance into a {@link PluginDescriptor}. + * + * @param The type of the plugin, constrained to extend {@link Plugin}. + * @param sourceDescriptor The descriptor providing metadata about the source and configuration of the plugin. + * Must not be null. + * @param plugin The actual plugin instance to be described. Must not be null and must have a valid identity. + * @param classLoader The {@link ClassLoader} used to resolve any required classes. Must not be null. + * @return A {@link PluginDescriptor} object that encapsulates the metadata, identity, and other properties + * of the provided plugin. + * @throws NullPointerException If any of the input parameters is null. + * @throws IllegalArgumentException If the plugin's identity is null or blank, or any classes cannot be resolved. + */ + static PluginDescriptor toPluginDescriptor( + SourcedDescriptor sourceDescriptor, + T plugin, + ClassLoader classLoader + ) { + Objects.requireNonNull(sourceDescriptor); + Objects.requireNonNull(plugin); + Objects.requireNonNull(classLoader); + + String identity = plugin.identity(); + if (identity == null || identity.isBlank()) { + throw new IllegalArgumentException("Plugin identity may not be null or blank"); + } + + // The cast is necessary as getClass() returns T, but the actual implementation class is required + // At this point, we know that "plugin" is an instance of an implementation, so this operation is safe. + // T is an interface, as checked by the plugin loader during construction. + @SuppressWarnings("unchecked") + Class pluginClass = (Class) plugin.getClass(); + + // When reaching this point, the metadata verification already made sure that the plugin kind equals T. + // Casting here is safe. + @SuppressWarnings("unchecked") + Class kindClass = (Class) resolveClass(sourceDescriptor.plugin().kind(), classLoader); + + Map, Integer> contracts = new HashMap<>(); + sourceDescriptor.plugin().contracts().forEach((contractName, apiLevel) -> { + // Again, the metadata was already vetted to contain valid contract classes. Casting is safe here. + @SuppressWarnings("unchecked") + Class contractClass = (Class) resolveClass(contractName, classLoader); + contracts.put(contractClass, apiLevel); + }); + + Map, Integer> requiredProviders = new HashMap<>(); + sourceDescriptor.plugin().requiredProviders().forEach((providerName, apiLevel) -> { + // Again, the metadata was already vetted to contain valid provider requirements. Casting is safe here. + @SuppressWarnings("unchecked") + Class providerClass = (Class) resolveClass(providerName, classLoader); + requiredProviders.put(providerClass, apiLevel); + }); + + return new PluginDescriptor<>( + sourceDescriptor.sourceLocation(), + identity, + pluginClass, + kindClass, + contracts, + requiredProviders + ); + } + + /** + * Verifies the uniqueness of plugin identities within the provided set of plugins. + * Identifies duplicates based on the normalized identity of each plugin descriptor + * and returns a validation result categorizing acceptable and problematic plugins. + * + * If the provided configuration enforces unambiguous identities, duplicates will + * be treated as rejected. Otherwise, duplicates will be reported as warnings. + * + * @param The type of the plugin. + * @param plugins The set of plugin descriptors to validate. + * @param configuration The loader configuration that dictates validation behavior. + * @return A {@link PluginValidationResult} containing the accepted plugins, + * rejected duplicates if enforcement is enabled, or warnings for duplicates + * if enforcement is disabled. + */ + static PluginValidationResult> verifyUniqueIdentities(List> plugins, LoaderConfiguration configuration) { + // Group all descriptors by normalized identity + Map>> groups = plugins.stream() + .collect(Collectors.groupingBy( + entry -> entry.descriptor().normalizedIdentity(), + Collectors.toList()) + ); + + Set> accepted = new HashSet<>(); + Map, List> duplicates = new HashMap<>(); + + for (Map.Entry>> group : groups.entrySet()) { + String normalizedIdentity = group.getKey(); + List> members = group.getValue(); + + // Get all acceptable plugins that have no duplicates (single entry sets) + if (members.size() == 1) { + accepted.add(members.get(0)); + // There are no empty sets possible, so else equals size>1 + } else { + for (PluginHandle member : members) { + // Generate the list of duplication problems but skip for the current plugin + List problems = members.stream() + .filter(handle -> !handle.equals(member)) + .map(handle -> new LoaderProblem.DuplicateIdentity(normalizedIdentity, member.descriptor(), handle.descriptor())) + .collect(Collectors.toList()); + + duplicates.put(member, problems); + } + } + } + + if (configuration.ENFORCE_UNAMBIGUOUS_PLUGIN_IDENTITIES()) { + // Return duplicates as rejected + return new PluginValidationResult<>( + Set.copyOf(accepted), + PluginValidationResult.copyProblemMap(duplicates), + Map.of() + ); + } + + // Configuration says we only warn about duplicates + return new PluginValidationResult<>( + Set.copyOf(accepted), + Map.of(), + PluginValidationResult.copyProblemMap(duplicates) + ); + } + +} diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java index 89199cd..cf7f477 100644 --- a/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java @@ -1,12 +1,11 @@ package io.gdcc.spi.core.loader; +import io.gdcc.spi.meta.descriptor.PluginDescriptor; + import java.nio.file.Path; import java.util.Set; -public sealed interface LoaderProblem permits - LoaderProblem.SourceFailure, - LoaderProblem.LocationFailure, - LoaderProblem.DuplicateSources { +public sealed interface LoaderProblem permits LoaderProblem.DuplicateIdentity, LoaderProblem.LocationFailure, LoaderProblem.PluginClassApiLevelMismatch, LoaderProblem.PluginClassApiLevelMissing, LoaderProblem.PluginClassMismatch, LoaderProblem.PluginClassNameCollision, LoaderProblem.PluginClassNameCollisionWithCore, LoaderProblem.PluginClassUnsupported, LoaderProblem.SourceFailure { String message(); @@ -24,10 +23,62 @@ public String message() { } } - record DuplicateSources(Set duplicateGroup) implements LoaderProblem { + record DuplicateIdentity(String normalizedIdentity, PluginDescriptor source, PluginDescriptor duplicate) implements LoaderProblem { + @Override + public String message() { + return """ + Plugin %s ( %s @ %s) normalized identity %s collides with Plugin %s ( %s @ %s) + """.formatted( + source.pluginClass().getCanonicalName(), + source.identity(), + source.sourceLocation(), + normalizedIdentity, + duplicate.pluginClass().getCanonicalName(), + duplicate.identity(), + duplicate.sourceLocation() + ); + } + } + + record PluginClassNameCollision(String className, Path source1, Path source2) implements LoaderProblem { + @Override + public String message() { + return "Class " + className + " is defined in both " + source1 + " and " + source2; + } + } + + record PluginClassNameCollisionWithCore(String className, Path source) implements LoaderProblem { + @Override + public String message() { + return "Class " + className + " is defined in both core and " + source; + } + } + + record PluginClassMismatch(String className, Path source, String pluginKind) implements LoaderProblem { + @Override + public String message() { + return "Class " + className + " in " + source + " does not implement " + pluginKind; + } + } + + record PluginClassUnsupported(String className, Path source, String pluginContract) implements LoaderProblem { + @Override + public String message() { + return "Class " + className + " in " + source + " implements unsupported plugin contract " + pluginContract; + } + } + + record PluginClassApiLevelMissing(String classname, Path source, String contractClass, int coreLevel) implements LoaderProblem { + @Override + public String message() { + return "Class " + classname + " in " + source + " provides no API level for " + contractClass + ", but core expects " + coreLevel; + } + } + + record PluginClassApiLevelMismatch(String classname, Path source, int coreLevel, int pluginLevel) implements LoaderProblem { @Override public String message() { - return "Duplicate plugin sources detected: " + duplicateGroup; + return "Class " + classname + " in " + source + " uses API level " + pluginLevel + " but core expects " + coreLevel; } } } diff --git a/core/src/main/java/io/gdcc/spi/core/loader/PluginHandle.java b/core/src/main/java/io/gdcc/spi/core/loader/PluginHandle.java new file mode 100644 index 0000000..6ecfd07 --- /dev/null +++ b/core/src/main/java/io/gdcc/spi/core/loader/PluginHandle.java @@ -0,0 +1,30 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.meta.descriptor.PluginDescriptor; +import io.gdcc.spi.meta.plugin.Plugin; + +import java.util.Objects; + +/** + * Encapsulates a plugin and its corresponding descriptor, providing a unified representation + * of a resolved plugin and its metadata in the runtime context. + * + *

The {@code PluginHandle} is an immutable record that binds a concrete plugin instance + * with its associated {@link PluginDescriptor}. This ensures both the metadata and the + * operational plugin instance are accessible and linked together, facilitating plugin management + * and execution.

+ * + * @param the type of the plugin instance, which must extend {@link Plugin} + * @param descriptor the runtime descriptor containing metadata and implementation details + * about the plugin; must not be null + * @param plugin the actual plugin instance associated with the descriptor; must not be null + */ +public record PluginHandle( + PluginDescriptor descriptor, + T plugin +) { + public PluginHandle { + Objects.requireNonNull(descriptor, "Plugin descriptor cannot be null"); + Objects.requireNonNull(plugin, "Plugin instance cannot be null"); + } +} diff --git a/core/src/main/java/io/gdcc/spi/core/loader/PluginValidationResult.java b/core/src/main/java/io/gdcc/spi/core/loader/PluginValidationResult.java new file mode 100644 index 0000000..f55106e --- /dev/null +++ b/core/src/main/java/io/gdcc/spi/core/loader/PluginValidationResult.java @@ -0,0 +1,98 @@ +package io.gdcc.spi.core.loader; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +/** + * Represents the result of validating a set of plugins. + * It's intended for internal use only. + * + * @param The type of the plugin descriptors being validated. + * @param accepted The set of plugins that were successfully validated and accepted. + * @param rejected A mapping of plugins to a list of {@link LoaderProblem} instances providing detailed + * reasons for their rejection. + * @param warning A mapping of plugins to a list of {@link LoaderProblem} instances providing detailed + * reasons why they would usually be rejected but are included by configuration choice. + */ +record PluginValidationResult( + Set accepted, + Map> rejected, + Map> warning +) { + + /** + * Combines multiple {@code PluginValidationResult} instances into a single result by merging their + * accepted keys, rejected problems, and warnings. Accepted keys that are also present in either + * rejected or warning collections are excluded from the final result. + * + * @param the type of keys in the validation results + * @param results the array of {@code PluginValidationResult} instances to combine; may include null values + * @return a new {@code PluginValidationResult} instance where all provided results are merged, + * ensuring no overlap between accepted, rejected, and warning entries + */ + @SafeVarargs + static PluginValidationResult merge(PluginValidationResult... results) { + Set mergedAccepted = new HashSet<>(); + Map> mergedRejected = new HashMap<>(); + Map> mergedWarning = new HashMap<>(); + + Arrays.stream(results) + .filter(Objects::nonNull) + .forEach(result -> { + if (result.accepted() != null) { + mergedAccepted.addAll(result.accepted()); + } + + if (result.rejected() != null) { + result.rejected().forEach((key, value) -> + mergedRejected.merge(key, new ArrayList<>(value), (left, right) -> { + left.addAll(right); + return left; + }) + ); + } + + if (result.warning() != null) { + result.warning().forEach((key, value) -> + mergedWarning.merge(key, new ArrayList<>(value), (left, right) -> { + left.addAll(right); + return left; + }) + ); + } + }); + + // Warnings and rejection win over acceptance. + // Note: Keep warnings around even if a rejection also exists for the same descriptor, + // because the diagnostic information is still useful. + mergedAccepted.removeAll(mergedWarning.keySet()); + mergedAccepted.removeAll(mergedRejected.keySet()); + + return new PluginValidationResult<>( + Set.copyOf(mergedAccepted), + copyProblemMap(mergedRejected), + copyProblemMap(mergedWarning) + ); + } + + /** + * Creates a defensive copy of the provided map where each key-value mapping is preserved, and the lists of + * {@link LoaderProblem} are converted into immutable copies. + * + * @param the type of the keys in the map + * @param input the map containing keys and lists of {@link LoaderProblem} that needs to be copied + * @return a new map where the original map's structure is maintained, and all lists are immutable + */ + static Map> copyProblemMap(Map> input) { + Map> copy = new HashMap<>(); + input.forEach((key, value) -> copy.put(key, List.copyOf(value))); + return Map.copyOf(copy); + } + +} diff --git a/core/src/test/java/io/gdcc/spi/core/loader/DescriptorBuilder.java b/core/src/test/java/io/gdcc/spi/core/loader/DescriptorBuilder.java new file mode 100644 index 0000000..4b3d65f --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/loader/DescriptorBuilder.java @@ -0,0 +1,191 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.core.test.basic.TestContract; +import io.gdcc.spi.core.test.basic.TestPlugin; +import io.gdcc.spi.meta.descriptor.Descriptor; +import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import io.gdcc.spi.meta.descriptor.SourcedDescriptor; + +import java.nio.file.Path; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.function.IntUnaryOperator; +import java.util.function.UnaryOperator; + +final class DescriptorBuilder { + private Path source; + private String className; + private String kind; + private Map contracts; + private Map requiredProviders; + + private DescriptorBuilder() { + } + + static DescriptorBuilder aDescriptor() { + DescriptorBuilder builder = new DescriptorBuilder(); + builder.source = Path.of("target", "test-classes"); + builder.className = DescriptorFormat.transformClassName(TestPlugin.class); + builder.kind = DescriptorFormat.transformClassName(TestContract.class); + builder.contracts = new LinkedHashMap<>( + Map.of( + DescriptorFormat.transformClassName(TestContract.class), + TestContract.API_LEVEL + )); + builder.requiredProviders = new LinkedHashMap<>(); + return builder; + } + + DescriptorBuilder but() { + DescriptorBuilder copy = new DescriptorBuilder(); + copy.source = this.source; + copy.className = this.className; + copy.kind = this.kind; + copy.contracts = new LinkedHashMap<>(this.contracts); + copy.requiredProviders = new LinkedHashMap<>(this.requiredProviders); + return copy; + } + + DescriptorBuilder withSource(Path source) { + this.source = source; + return this; + } + + DescriptorBuilder withSource(String first, String... more) { + this.source = Path.of(first, more); + return this; + } + + DescriptorBuilder mapSource(UnaryOperator mapper) { + this.source = mapper.apply(this.source); + return this; + } + + DescriptorBuilder withClassName(String className) { + this.className = className; + return this; + } + + DescriptorBuilder withClassName(Class implementationClass) { + this.className = DescriptorFormat.transformClassName(implementationClass); + return this; + } + + DescriptorBuilder mapClassName(UnaryOperator mapper) { + this.className = mapper.apply(this.className); + return this; + } + + DescriptorBuilder withClassPackage(String packageName) { + int lastDot = className.lastIndexOf('.'); + String simpleName = lastDot >= 0 ? className.substring(lastDot + 1) : className; + this.className = packageName == null || packageName.isBlank() + ? simpleName + : packageName + "." + simpleName; + return this; + } + + DescriptorBuilder withKind(String kind) { + this.kind = kind; + return this; + } + + DescriptorBuilder withKind(Class kindClass) { + this.kind = DescriptorFormat.transformClassName(kindClass); + return this; + } + + DescriptorBuilder mapKind(UnaryOperator mapper) { + this.kind = mapper.apply(this.kind); + return this; + } + + DescriptorBuilder withContracts(Map contracts) { + this.contracts = new LinkedHashMap<>(contracts); + return this; + } + + DescriptorBuilder withoutContracts() { + this.contracts.clear(); + return this; + } + + DescriptorBuilder withContract(String contract, int level) { + this.contracts.put(contract, level); + return this; + } + + DescriptorBuilder withContract(Class contractClass, int level) { + return withContract(DescriptorFormat.transformClassName(contractClass), level); + } + + DescriptorBuilder withoutContract(String contract) { + this.contracts.remove(contract); + return this; + } + + DescriptorBuilder withoutContract(Class contractClass) { + return withoutContract(DescriptorFormat.transformClassName(contractClass)); + } + + DescriptorBuilder mapContract(String contract, IntUnaryOperator mapper) { + Integer current = this.contracts.get(contract); + if (current == null) { + throw new IllegalArgumentException("Contract not present: " + contract); + } + this.contracts.put(contract, mapper.applyAsInt(current)); + return this; + } + + DescriptorBuilder mapContract(Class contractClass, IntUnaryOperator mapper) { + return mapContract(DescriptorFormat.transformClassName(contractClass), mapper); + } + + DescriptorBuilder withRequiredProviders(Map requiredProviders) { + this.requiredProviders = new LinkedHashMap<>(requiredProviders); + return this; + } + + DescriptorBuilder withRequiredProvider(String provider, int level) { + this.requiredProviders.put(provider, level); + return this; + } + + DescriptorBuilder withRequiredProvider(Class providerClass, int level) { + return withRequiredProvider(DescriptorFormat.transformClassName(providerClass), level); + } + + DescriptorBuilder withoutRequiredProvider(String provider) { + this.requiredProviders.remove(provider); + return this; + } + + DescriptorBuilder withoutRequiredProvider(Class providerClass) { + return withoutRequiredProvider(DescriptorFormat.transformClassName(providerClass)); + } + + DescriptorBuilder mapRequiredProvider(String provider, IntUnaryOperator mapper) { + Integer current = this.requiredProviders.get(provider); + if (current == null) { + throw new IllegalArgumentException("Required provider not present: " + provider); + } + this.requiredProviders.put(provider, mapper.applyAsInt(current)); + return this; + } + + DescriptorBuilder mapRequiredProvider(Class providerClass, IntUnaryOperator mapper) { + return mapRequiredProvider(DescriptorFormat.transformClassName(providerClass), mapper); + } + + SourcedDescriptor build() { + return new SourcedDescriptor( + source, + new Descriptor( + className, + kind, + Map.copyOf(contracts), + Map.copyOf(requiredProviders) + ) + ); + } +} \ No newline at end of file diff --git a/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java b/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java new file mode 100644 index 0000000..d92ec47 --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java @@ -0,0 +1,547 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.core.test.basic.TestContract; +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import io.gdcc.spi.meta.descriptor.SourcedDescriptor; +import io.gdcc.spi.meta.plugin.CoreProvider; +import io.gdcc.spi.meta.plugin.Plugin; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +import java.nio.file.Path; +import java.util.List; +import java.util.Map; + +import static io.gdcc.spi.meta.descriptor.DescriptorFormat.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class LoaderHelperTest { + + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + + @Nested + class DetermineApiLevel { + + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + + @Test + void determineCoreApiLevel_validClass() { + assertEquals(TestContract.API_LEVEL, LoaderHelper.determineCoreApiLevel(TestContract.class)); + } + + @Test + void determineCoreApiLevel_validFQCN() { + assertEquals( + TestContract.API_LEVEL, + LoaderHelper.determineCoreApiLevel(transformClassName(TestContract.class), classLoader) + ); + } + + @Test + void determineCoreApiLevel_invalidFQCN() { + assertThrows( + IllegalArgumentException.class, + () -> LoaderHelper.determineCoreApiLevel("foo.Bar", classLoader) + ); + } + } + + @Nested + class NoClassCollisions { + + @Test + void noCollisions_happyPath() { + // given + List descriptors = List.of( + DescriptorBuilder.aDescriptor().withClassPackage("com.example").build(), + DescriptorBuilder.aDescriptor().withClassPackage("com.foobar").build() + ); + + // when + var results = LoaderHelper.verifyNoClassCollisions(descriptors, classLoader); + + System.out.println(results); + + // then + assertEquals(2, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void noCollisions_pluginClassesCollide() { + // given + // Same FQCN (com.foobar.TestPlugin), but from different sources. + List descriptors = List.of( + DescriptorBuilder.aDescriptor() + .withSource("foobar.jar") + .withClassPackage("com.foobar") + .build(), + DescriptorBuilder.aDescriptor() + .withSource("example.jar") + .withClassPackage("com.foobar") + .build() + ); + + // when + var results = LoaderHelper.verifyNoClassCollisions(descriptors, classLoader); + + // then + assertEquals(1, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + for (Map.Entry> result : results.rejected().entrySet()) { + List problems = result.getValue(); + assertTrue(problems.stream().allMatch( + problem -> problem.getClass().equals(LoaderProblem.PluginClassNameCollision.class) + )); + } + } + + @Test + void noCollisions_pluginClassesCollideWithCore() { + // given + // Same FQCN as TestPlugin, but from different source + List descriptors = List.of( + DescriptorBuilder.aDescriptor().withSource("foobar.jar").build() + ); + + // when + var results = LoaderHelper.verifyNoClassCollisions(descriptors, classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + for (Map.Entry> result : results.rejected().entrySet()) { + List problems = result.getValue(); + assertTrue(problems.stream().allMatch( + problem -> problem.getClass().equals(LoaderProblem.PluginClassNameCollisionWithCore.class) + )); + } + } + } + + @Nested + class IdentifyNonImplementations { + + @Test + void identifyNonImplementations_matchingDescriptorIsAccepted() { + // given + List descriptors = List.of( + DescriptorBuilder.aDescriptor().build() + ); + + // when + var results = LoaderHelper.identifyNonImplementations( + descriptors, + TestContract.class, + enforcingConfiguration() + ); + + // then + assertEquals(1, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void identifyNonImplementations_nonMatchingDescriptorIsRejectedWhenEnforced() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withKind("com.example.OtherContract") + .build(); + + // when + var results = LoaderHelper.identifyNonImplementations( + List.of(descriptor), + TestContract.class, + enforcingConfiguration() + ); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + + List problems = results.rejected().get(descriptor); + assertEquals(1, problems.size()); + assertInstanceOf(LoaderProblem.PluginClassMismatch.class, problems.get(0)); + } + + @Test + void identifyNonImplementations_nonMatchingDescriptorIsWarningWhenNotEnforced() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withKind("com.example.OtherContract") + .build(); + + // when + var results = LoaderHelper.identifyNonImplementations( + List.of(descriptor), + TestContract.class, + permissiveConfiguration() + ); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(1, results.warning().size()); + assertEquals(0, results.rejected().size()); + + List problems = results.warning().get(descriptor); + assertEquals(1, problems.size()); + assertInstanceOf(LoaderProblem.PluginClassMismatch.class, problems.get(0)); + } + + @Test + void identifyNonImplementations_mixedDescriptorsSeparatesAcceptedAndRejected() { + // given + SourcedDescriptor matching = DescriptorBuilder.aDescriptor() + .withSource("matching.jar") + .build(); + SourcedDescriptor nonMatching = DescriptorBuilder.aDescriptor() + .withSource("non-matching.jar") + .withKind("com.example.OtherContract") + .build(); + + // when + var results = LoaderHelper.identifyNonImplementations( + List.of(matching, nonMatching), + TestContract.class, + enforcingConfiguration() + ); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(matching)); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(nonMatching)); + } + + @Test + void identifyNonImplementations_mixedDescriptorsSeparatesAcceptedAndWarnings() { + // given + SourcedDescriptor matching = DescriptorBuilder.aDescriptor() + .withSource("matching.jar") + .build(); + SourcedDescriptor nonMatching = DescriptorBuilder.aDescriptor() + .withSource("non-matching.jar") + .withKind("com.example.OtherContract") + .build(); + + // when + var results = LoaderHelper.identifyNonImplementations( + List.of(matching, nonMatching), + TestContract.class, + permissiveConfiguration() + ); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(matching)); + assertEquals(1, results.warning().size()); + assertTrue(results.warning().containsKey(nonMatching)); + assertEquals(0, results.rejected().size()); + } + } + + @Nested + class VerifyApiLevels { + + @Test + void verifyApiLevels_happyPath() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor().build(); + + // when + var results = LoaderHelper.verifyPluginApiLevels(List.of(descriptor), TestContract.class, classLoader); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(descriptor)); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void verifyApiLevels_baseContractLevelMismatch() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .mapContract(TestContract.class, level -> level + 1) + .build(); + + // when + var results = LoaderHelper.verifyPluginApiLevels(List.of(descriptor), TestContract.class, classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + assertInstanceOf(LoaderProblem.PluginClassApiLevelMismatch.class, results.rejected().get(descriptor).get(0)); + } + + @Test + void verifyApiLevels_missingBaseContract() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withoutContract(TestContract.class) + .build(); + + // when + var results = LoaderHelper.verifyPluginApiLevels(List.of(descriptor), TestContract.class, classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + assertInstanceOf(LoaderProblem.PluginClassApiLevelMissing.class, results.rejected().get(descriptor).get(0)); + } + + @Test + void verifyApiLevels_unsupportedContract() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withContract("com.example.UnsupportedContract", 7) + .build(); + + // when + var results = LoaderHelper.verifyPluginApiLevels(List.of(descriptor), TestContract.class, classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + assertInstanceOf(LoaderProblem.PluginClassUnsupported.class, results.rejected().get(descriptor).get(0)); + } + + @Test + void verifyApiLevels_reportsMultipleProblemsForSingleDescriptor() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withoutContract(TestContract.class) + .withContract("com.example.UnsupportedContract", 7) + .build(); + + // when + var results = LoaderHelper.verifyPluginApiLevels(List.of(descriptor), TestContract.class, classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + + List problems = results.rejected().get(descriptor); + assertEquals(2, problems.size()); + assertTrue(problems.stream().anyMatch( + problem -> problem.getClass().equals(LoaderProblem.PluginClassUnsupported.class) + )); + assertTrue(problems.stream().anyMatch( + problem -> problem.getClass().equals(LoaderProblem.PluginClassApiLevelMissing.class) + )); + } + } + + @Nested + class ToPluginDescriptor { + + interface TestProvider extends CoreProvider { + int API_LEVEL = 7; + } + + @PluginContract(role = PluginContract.Role.BASE) + interface TestBasePlugin extends Plugin { + int API_LEVEL = 3; + } + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { TestBasePlugin.class } + ) + interface TestCapabilityPlugin extends Plugin { + int API_LEVEL = 5; + } + + static class GoodPlugin implements TestBasePlugin, TestCapabilityPlugin { + @Override + public String identity() { + return "good-plugin"; + } + } + + static class NullIdentityPlugin implements TestBasePlugin { + @Override + public String identity() { + return null; + } + } + + static class BlankIdentityPlugin implements TestBasePlugin { + @Override + public String identity() { + return " "; + } + } + + @Test + void toPluginDescriptor_happyPath() { + // given + SourcedDescriptor sourceDescriptor = DescriptorBuilder.aDescriptor() + .withSource("plugins", "good-plugin.jar") + .withClassName(GoodPlugin.class) + .withKind(TestBasePlugin.class) + .withContracts(Map.of( + transformClassName(TestBasePlugin.class), TestBasePlugin.API_LEVEL, + transformClassName(TestCapabilityPlugin.class), TestCapabilityPlugin.API_LEVEL + )) + .withRequiredProviders(Map.of( + transformClassName(TestProvider.class), TestProvider.API_LEVEL + )) + .build(); + GoodPlugin plugin = new GoodPlugin(); + + // when + var result = LoaderHelper.toPluginDescriptor(sourceDescriptor, plugin, classLoader); + + // then + assertEquals(Path.of("plugins", "good-plugin.jar"), result.sourceLocation()); + assertEquals("good-plugin", result.identity()); + assertEquals(GoodPlugin.class, result.pluginClass()); + assertEquals(TestBasePlugin.class, result.kindClass()); + assertEquals(2, result.contracts().size()); + assertEquals(TestBasePlugin.API_LEVEL, result.contracts().get(TestBasePlugin.class)); + assertEquals(TestCapabilityPlugin.API_LEVEL, result.contracts().get(TestCapabilityPlugin.class)); + assertEquals(1, result.requiredProviders().size()); + assertEquals(TestProvider.API_LEVEL, result.requiredProviders().get(TestProvider.class)); + } + + @Test + void toPluginDescriptor_rejectsNullIdentity() { + // given + SourcedDescriptor sourceDescriptor = DescriptorBuilder.aDescriptor() + .withSource("plugins", "null-identity.jar") + .withClassName(NullIdentityPlugin.class) + .withKind(TestBasePlugin.class) + .withContracts(Map.of( + transformClassName(TestBasePlugin.class), TestBasePlugin.API_LEVEL + )) + .build(); + NullIdentityPlugin plugin = new NullIdentityPlugin(); + + // when + then + var exception = assertThrows( + IllegalArgumentException.class, + () -> LoaderHelper.toPluginDescriptor(sourceDescriptor, plugin, classLoader) + ); + assertTrue(exception.getMessage().contains("Plugin identity may not be")); + } + + @Test + void toPluginDescriptor_rejectsBlankIdentity() { + // given + SourcedDescriptor sourceDescriptor = DescriptorBuilder.aDescriptor() + .withSource("plugins", "blank-identity.jar") + .withClassName(BlankIdentityPlugin.class) + .withKind(TestBasePlugin.class) + .withContracts(Map.of( + transformClassName(TestBasePlugin.class), TestBasePlugin.API_LEVEL + )) + .build(); + BlankIdentityPlugin plugin = new BlankIdentityPlugin(); + + // when + then + var exception = assertThrows( + IllegalArgumentException.class, + () -> LoaderHelper.toPluginDescriptor(sourceDescriptor, plugin, classLoader) + ); + assertTrue(exception.getMessage().contains("Plugin identity may not be")); + } + + @Test + void toPluginDescriptor_failsWhenContractClassCannotBeResolved() { + // given + SourcedDescriptor sourceDescriptor = DescriptorBuilder.aDescriptor() + .withClassName(GoodPlugin.class) + .withKind(TestBasePlugin.class) + .withContracts(Map.of( + "com.example.DoesNotExist", 99 + )) + .build(); + GoodPlugin plugin = new GoodPlugin(); + + // when + then + assertThrows( + IllegalArgumentException.class, + () -> LoaderHelper.toPluginDescriptor(sourceDescriptor, plugin, classLoader) + ); + } + + @Test + void toPluginDescriptor_failsWhenContractClassEmpty() { + // given + SourcedDescriptor sourceDescriptor = DescriptorBuilder.aDescriptor() + .withClassName(GoodPlugin.class) + .withKind(TestBasePlugin.class) + .withContracts(Map.of()) + .withRequiredProviders(Map.of()) + .build(); + GoodPlugin plugin = new GoodPlugin(); + + // when + then + assertThrows( + IllegalArgumentException.class, + () -> LoaderHelper.toPluginDescriptor(sourceDescriptor, plugin, classLoader) + ); + } + + @Test + void toPluginDescriptor_failsWhenProviderClassCannotBeResolved() { + // given + SourcedDescriptor sourceDescriptor = DescriptorBuilder.aDescriptor() + .withClassName(GoodPlugin.class) + .withKind(TestBasePlugin.class) + .withContracts(Map.of( + transformClassName(TestBasePlugin.class), TestBasePlugin.API_LEVEL + )) + .withRequiredProviders(Map.of( + "com.example.MissingProvider", 42 + )) + .build(); + GoodPlugin plugin = new GoodPlugin(); + + // when + then + assertThrows( + IllegalArgumentException.class, + () -> LoaderHelper.toPluginDescriptor(sourceDescriptor, plugin, classLoader) + ); + } + } + + static LoaderConfiguration enforcingConfiguration() { + return new LoaderConfiguration( + true, + false, + true, + true, + true + ); + } + + static LoaderConfiguration permissiveConfiguration() { + return new LoaderConfiguration( + false, + false, + true, + true, + false + ); + } +} From 0358a129d25286dda9a6b03c9f6956de69d0b5b2 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sat, 28 Mar 2026 01:49:24 +0100 Subject: [PATCH 30/55] chore(core): add Maven POM file for core module with initial dependencies --- core/pom.xml | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 core/pom.xml diff --git a/core/pom.xml b/core/pom.xml new file mode 100644 index 0000000..1b0edc2 --- /dev/null +++ b/core/pom.xml @@ -0,0 +1,33 @@ + + + 4.0.0 + + io.gdcc.spi + parent + 2.1.0-SNAPSHOT + + + core + + + + + + io.gdcc.spi + meta + + + org.slf4j + slf4j-api + + + + org.slf4j + slf4j-simple + test + + + + \ No newline at end of file From cd6b88a2fa2a813356609b65beb5c3517584fe8b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sat, 28 Mar 2026 01:53:01 +0100 Subject: [PATCH 31/55] feat(core): enhance `PluginLoader` with validation and configuration options, add `SourceScanner` Extend `PluginLoader` with new constructors, `preloadPlugins` logic, and enhanced validation for class collisions, implementation checks, and API compatibility. Introduce the `SourceScanner` functional interface for custom filesystem scanning of plugin descriptors (especially during testing). Add nested unit tests for new functionality. --- .../io/gdcc/spi/core/loader/PluginLoader.java | 262 ++++++++++++++---- .../gdcc/spi/core/loader/SourceScanner.java | 25 ++ .../spi/core/loader/PluginLoaderTest.java | 197 ++++++++++--- 3 files changed, 399 insertions(+), 85 deletions(-) create mode 100644 core/src/main/java/io/gdcc/spi/core/loader/SourceScanner.java diff --git a/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java b/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java index 43f5fd6..f895fe0 100644 --- a/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java +++ b/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java @@ -1,25 +1,31 @@ package io.gdcc.spi.core.loader; -import io.gdcc.spi.core.plugin.Plugin; +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.descriptor.DescriptorScanner; +import io.gdcc.spi.meta.descriptor.PluginDescriptor; +import io.gdcc.spi.meta.descriptor.SourcedDescriptor; +import io.gdcc.spi.meta.plugin.Plugin; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; -import java.nio.file.AccessDeniedException; import java.nio.file.DirectoryStream; import java.nio.file.Files; -import java.nio.file.LinkOption; -import java.nio.file.NotDirectoryException; import java.nio.file.Path; import java.util.ArrayList; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; +import java.util.ServiceConfigurationError; import java.util.ServiceLoader; import java.util.Set; import java.util.regex.PatternSyntaxException; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** * Loads plugins of a specified type from JAR files in a given directory using the Java ServiceLoader mechanism. @@ -42,13 +48,18 @@ */ public class PluginLoader { + private static final Logger logger = LoggerFactory.getLogger(PluginLoader.class); + private final Class pluginClass; private final ClassLoader parentClassLoader; + private final LoaderConfiguration configuration; /** * Constructs a new PluginLoader that will load plugins of the specified type {@code T}. * The parent ClassLoader is set to the current thread's context ClassLoader, which allows * plugins to access classes and resources on the core's classpath. + * It uses the system default configuration for plugin loading behaviors, + * see {@link LoaderConfiguration#defaultConfiguration()}. * * @param pluginClass the Class object representing the plugin type {@code T} to load */ @@ -58,13 +69,61 @@ public PluginLoader(Class pluginClass) { /** * Constructs a new PluginLoader that will load plugins of the specified type {@code T}. + * It uses the system default configuration for plugin loading behaviors, + * see {@link LoaderConfiguration#defaultConfiguration()}. * * @param pluginClass the Class object representing the plugin type {@code T} to load * @param parentClassLoader the ClassLoader to be used as the parent for class loading of plugins */ public PluginLoader(Class pluginClass, ClassLoader parentClassLoader) { + this(pluginClass, LoaderConfiguration.defaultConfiguration(), parentClassLoader); + } + + /** + * Constructs a new PluginLoader that will load plugins of the specified type {@code T}. + * The parent ClassLoader is set to the current thread's context ClassLoader, which allows + * plugins to access classes and resources on the core's classpath. + * + * @param pluginClass the Class object representing the type of plugin {@code T} to load + * @param configuration the LoaderConfiguration specifying custom plugin loading behaviors + */ + public PluginLoader(Class pluginClass, LoaderConfiguration configuration) { + this(pluginClass, configuration, Thread.currentThread().getContextClassLoader()); + } + + /** + * Constructs a new instance of the PluginLoader, which is responsible for loading plugins of the specified type {@code T}. + * + * @param pluginClass the Class object representing the type of plugin {@code T} to load + * @param configuration the LoaderConfiguration specifying custom plugin loading behaviors + * @param parentClassLoader the ClassLoader to be used as the parent for loading plugin classes and resources + */ + public PluginLoader(Class pluginClass, LoaderConfiguration configuration, ClassLoader parentClassLoader) { + + // Basic Verification + Objects.requireNonNull(pluginClass); + Objects.requireNonNull(configuration); + Objects.requireNonNull(parentClassLoader); this.pluginClass = pluginClass; + this.configuration = configuration; this.parentClassLoader = parentClassLoader; + + // Check that the plugin class is a base plugin contract + validatePluginBaseClass(pluginClass); + } + + /** + * Validates that the provided class is a valid Dataverse Plugin Interface. + * The class must be an interface and annotated with {@link PluginContract}. + * + * @param pluginClass the class to validate + * @throws IllegalArgumentException if the class is not an interface or is not annotated with {@code @PluginContract} + */ + static void validatePluginBaseClass(Class pluginClass) { + if (!pluginClass.isInterface() || + !pluginClass.isAnnotationPresent(PluginContract.class) || + pluginClass.getDeclaredAnnotationsByType(PluginContract.class)[0].role() != PluginContract.Role.BASE) + throw new IllegalArgumentException("Class argument must be a Dataverse Plugin Interface annotated with @PluginContract and have a role of BASE"); } /** @@ -77,30 +136,21 @@ public PluginLoader(Class pluginClass, ClassLoader parentClassLoader) { *

* * @param pluginJarsLocation the directory containing JAR files to scan for plugins - * @return a map, linking {@link PluginSource} metadata to the corresponding plugin instance + * @return a list of {@link PluginHandle}, linking each plugin's metadata to the corresponding plugin instance * @throws LoaderException if one or more errors occur during loading, if no plugins * could be successfully loaded, or if there are any duplicates. * Note: The exception may contain multiple causes, each associated with a specific file or failure point */ - public Map load(Path pluginJarsLocation) { - // Find and load plugins from JAR sources. - // "jar:!/" is the syntax required to scan a complete JAR file for classes - Map plugins = load(findSources(pluginJarsLocation)); + public List> load(Path pluginJarsLocation) { - // Make sure there are no duplicate plugins - List problems = new ArrayList<>(); - Set> duplicateGroups = PluginSource.groupDuplicates(plugins.keySet()); - if (!duplicateGroups.isEmpty()) { - duplicateGroups.forEach(group -> problems.add(new LoaderProblem.DuplicateSources(group))); - } + // Find all potential sources within the given location + Map sources = findSources(pluginJarsLocation); - // TODO: add verification logic for API levels and provider services + // Preload the plugins (already validating via metadata before handing off to any classloader) + List descriptors = preloadPlugins(sources.keySet()); - if (problems.isEmpty()) { - return plugins; - } else { - throw new LoaderException(problems); - } + // Load the pre-validated plugins + return load(descriptors, sources); } /** @@ -126,7 +176,8 @@ Map findSources(Path pluginsLocation) { for (Path path : stream) { try { // The URL[] is necessary as classloaders can deal with multiple locations at once. - classRoots.put(path, new URL[]{pathToUrl(path, "jar", "!/")}); + // Note: "jar:!/" is the special syntax required to scan a complete JAR file for classes + classRoots.put(path, new URL[]{LoaderHelper.pathToUrl(path, "jar", "!/")}); // This is not likely to happen, as we construct the URL from a valid path only } catch (MalformedURLException e) { problems.add(new LoaderProblem.LocationFailure(path, e)); @@ -135,7 +186,7 @@ Map findSources(Path pluginsLocation) { // In addition: put the directory itself to enable loading exploded archives (mostly useful for testing). // The location must be a browsable directory as otherwise exceptions would have been raised. - classRoots.put(pluginsLocation, new URL[]{pathToUrl(pluginsLocation, "", "")}); + classRoots.put(pluginsLocation, new URL[]{LoaderHelper.pathToUrl(pluginsLocation, "", "")}); // NotDirectoryException | AccessDeniedException is a subset of IOException and covers these cases. } catch (PatternSyntaxException | IOException e) { @@ -148,6 +199,95 @@ Map findSources(Path pluginsLocation) { throw new LoaderException(problems); } + /** + * Preloads plugins by scanning the provided source paths and retrieving their descriptors. + * Uses {@link DescriptorScanner#scanPath(Path)} to scan each source path. + * @see PluginLoader#preloadPlugins(Set, SourceScanner) for more details. + * + * @param sources A set of file paths representing the sources to scan for plugins. + * @return A list of SourcedDescriptor objects representing the preloaded plugins. + * @throws LoaderException If an error occurs while scanning the paths or loading plugin descriptors. + */ + List preloadPlugins(Set sources) throws LoaderException { + return preloadPlugins(sources, DescriptorScanner::scanPath); + } + + /** + * Preloads plugin descriptors from the given list of source paths. This method scans the provided + * sources for plugin descriptors, and validates them against various criteria (such as class name collisions, + * proper implementations of the desired plugin class, and API compatibility). It returns a set of plugin + * descriptors either valid or associated with a warning-level incompatibility. + * + *

Problems encountered during the loading process either trigger an exception to abort loading or + * are logged only based on configuration settings.

+ * + * @param sources the list of paths to scan for plugin descriptors + * @return a list of valid plugin descriptors that were successfully scanned and passed all validation checks + * @throws LoaderException if validation problems are encountered and the configuration mandates an abort + */ + List preloadPlugins(Set sources, SourceScanner scanner) throws LoaderException { + // Try to continue as long as possible before erroring out, catching as many problems as possible at once. + List sourceProblems = new ArrayList<>(); + // Scratch space to collect descriptors from the given sources. + List descriptors = new ArrayList<>(); + + // 1. Grab all the plugin descriptors from the given sources + for (Path source : sources) { + try { + descriptors.addAll(scanner.scanPath(source)); + } catch (IOException e) { + sourceProblems.add(new LoaderProblem.LocationFailure(source, e)); + logger.debug("Failed to scan source: {}", source, e); + } + } + logger.debug("Scanning for plugin descriptors found {} plugins: {}", descriptors.size(), descriptors); + + // 2. Verify that no class name collisions exist any plugins to be loaded. + var collisionResult = LoaderHelper.verifyNoClassCollisions(descriptors, this.parentClassLoader); + logger.debug("Scanning for class name collisions results: {}", collisionResult); + + // 3. Filter the descriptors to only include those that implement the desired plugin (base) class. + var implementationResult = LoaderHelper.identifyNonImplementations(descriptors, this.pluginClass, this.configuration); + logger.debug("Scanning for non-implementations results: {}", implementationResult); + + // 4. Verify that every plugin class has a service loader entry. Remove any affected from the list. + // TODO: implement here - or make it a part of the resolving process when the descriptors are loaded + + // 5. Verify that the API level of the plugin matches the core-expected level(s). + var apiLevelResult = LoaderHelper.verifyPluginApiLevels(descriptors, this.pluginClass, this.parentClassLoader); + logger.debug("Scanning for API level matches results: {}", apiLevelResult); + + // 6. Verify all the provider requirements by the plugin are met + // TODO: implement + + // Merge all the different results to receive the final picture which plugins are faulty + var finalResults = PluginValidationResult.merge(collisionResult, implementationResult, apiLevelResult); + // Merge all the problems into one large list, to be wrapped in an exception + finalResults.rejected().forEach((descriptor, problems) -> sourceProblems.addAll(problems)); + + // By default, we should abort now. In case we are asked to keep going by configuration, + // let the logs show any found problems as warnings. + if (configuration.ABORT_ON_COMPATIBILITY_PROBLEMS() && (!sourceProblems.isEmpty() || !finalResults.rejected().isEmpty())) { + throw new LoaderException(sourceProblems); + } + + logger.warn("Pre-loading validation failed for {} plugins with {} problems, continuing with {} valid and {} warning plugins as requested.", + finalResults.rejected().size() + finalResults.warning().size(), + sourceProblems.size(), + finalResults.accepted().size(), + finalResults.warning().size()); + sourceProblems.forEach(problem -> logger.warn(problem.message())); + finalResults.warning().forEach((descriptor, problems) -> + logger.warn("Plugin {} has {} potential compatibility problems: {}", + descriptor, + problems.size(), + problems.stream().map(LoaderProblem::message).collect(Collectors.joining(", ")) + )); + + return Stream.concat(finalResults.accepted().stream(), finalResults.warning().keySet().stream()).toList(); + } + + /** * Loads plugins of type {@code T} from the specified mapping of locations to JAR URLs. * Each location is processed by creating a dedicated {@link URLClassLoader}, and plugins are @@ -156,23 +296,24 @@ Map findSources(Path pluginsLocation) { * For each discovered plugin, its {@link Plugin#identity()} must be non-null and non-blank; * otherwise, it is skipped and an error is recorded. * - * The returned map's keys describe the source of each loaded plugin via {@link PluginSource}, + * The returned map's keys describe the source of each loaded plugin via {@link PluginDescriptor}, * associating the plugin's logical identity, class name, and JAR file location. It is the * caller's responsibility to verify no duplicates (by class name or identity) exist before * handing the plugins to the core. * * @param sources a mapping from (JAR) file paths to their corresponding URLs used for class loading - * @return a map from {@link PluginSource} metadata to the corresponding plugin instance + * @return a map from {@link PluginDescriptor} metadata to the corresponding plugin instance * @throws LoaderException if one or more errors occur during loading and no plugins * could be successfully loaded; the exception may contain multiple causes, * each associated with a specific JAR file or failure point */ - Map load(Map sources) { - List problems = new ArrayList<>(); - Map loadedPlugins = new HashMap<>(); + List> load(List descriptors, Map sources) { + List sourceProblems = new ArrayList<>(); + List> loadedPlugins = new ArrayList<>(); // Create URLClassLoader for each file and load the plugin - sources.forEach((location, sourceUrl) -> { + descriptors.forEach(descriptor -> { + URL[] sourceUrl = sources.get(descriptor.sourceLocation()); try (URLClassLoader classLoader = URLClassLoader.newInstance(sourceUrl, this.parentClassLoader)) { // Load all plugins that can be found within the source for type T ServiceLoader loader = ServiceLoader.load(this.pluginClass, classLoader); @@ -181,31 +322,58 @@ Map load(Map sources) { loader.forEach(plugin -> { String identity = plugin.identity(); if (identity == null || identity.isBlank()) { - problems.add(new LoaderProblem.LocationFailure(location, new IllegalArgumentException(plugin.getClass().getCanonicalName() + "'s identity cannot be null or blank"))); + sourceProblems.add(new LoaderProblem.LocationFailure( + descriptor.sourceLocation(), + new IllegalArgumentException(plugin.getClass().getCanonicalName() + "'s identity cannot be null or blank"))); return; } - // Save the plugin source metadata and put the loaded plugin into the map - PluginSource source = new PluginSource(location, plugin.getClass().getCanonicalName(), identity); - loadedPlugins.put(source, plugin); + // Save the plugin and its metadata to the set of already loaded plugins + loadedPlugins.add( + new PluginHandle<>( + LoaderHelper.toPluginDescriptor( + descriptor, + plugin, + this.parentClassLoader), + plugin) + ); }); - } catch (IOException | NoSuchMethodError e) { - problems.add(new LoaderProblem.LocationFailure(location, e)); + } catch (IOException | NoSuchMethodError | ServiceConfigurationError | UnsupportedClassVersionError e) { + sourceProblems.add(new LoaderProblem.LocationFailure(descriptor.sourceLocation(), e)); } }); + logger.debug("Loader was able to load {} plugins from {} sources.", loadedPlugins.size(), sources.size()); - if (problems.isEmpty()) { - return loadedPlugins; + // Make sure there are no duplicate plugin identities + PluginValidationResult> duplicationChecks = LoaderHelper.verifyUniqueIdentities(loadedPlugins, configuration); + + // Merge all the different results to receive the final picture which plugins are faulty + // (For now, we only have a single check at this stage) + var finalResults = PluginValidationResult.merge(duplicationChecks); + + // Merge all the problems into one large list, to be wrapped in an exception + finalResults.rejected().forEach((descriptor, problems) -> sourceProblems.addAll(problems)); + + // By default, we should abort now. In case we are asked to keep going by configuration, + // let the logs show any found problems as warnings. + if (configuration.ABORT_ON_COMPATIBILITY_PROBLEMS() && (!sourceProblems.isEmpty() || !finalResults.rejected().isEmpty())) { + throw new LoaderException(sourceProblems); } - throw new LoaderException(problems); - } - - static URL pathToUrl(Path path, String urlPrefix, String urlSuffix) throws MalformedURLException { - return new URL( - (urlPrefix == null || urlPrefix.isBlank() ? "" : urlPrefix + ":" ) + - path.toUri().toURL() + - ( urlSuffix == null || urlSuffix.isBlank() ? "" : urlSuffix ) - ); + + logger.warn("Validation after loading failed for {} plugins with {} problems, continuing with {} valid and {} warning plugins as requested.", + finalResults.rejected().size() + finalResults.warning().size(), + sourceProblems.size(), + finalResults.accepted().size(), + finalResults.warning().size()); + sourceProblems.forEach(problem -> logger.warn(problem.message())); + finalResults.warning().forEach((descriptor, problems) -> + logger.warn("Plugin {} has {} potential problems: {}", + descriptor, + problems.size(), + problems.stream().map(LoaderProblem::message).collect(Collectors.joining(", ")) + )); + + return Stream.concat(finalResults.accepted().stream(), finalResults.warning().keySet().stream()).toList(); } } diff --git a/core/src/main/java/io/gdcc/spi/core/loader/SourceScanner.java b/core/src/main/java/io/gdcc/spi/core/loader/SourceScanner.java new file mode 100644 index 0000000..6a5d32d --- /dev/null +++ b/core/src/main/java/io/gdcc/spi/core/loader/SourceScanner.java @@ -0,0 +1,25 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.meta.descriptor.SourcedDescriptor; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.List; + +/** + * Functional interface for scanning a specified path on the filesystem + * to discover and retrieve plugin descriptors. + * + * Implementations of this interface are responsible for performing the + * scanning operation on the provided {@link Path} and returning a list + * of descriptors that represent the discovered plugins. + * + * For now, this is mostly used to allow injecting custom scanners for + * testing purposes. As such, it is kept package-private. + * + * @see io.gdcc.spi.meta.descriptor.DescriptorScanner + */ +@FunctionalInterface +interface SourceScanner { + List scanPath(Path source) throws IOException; +} diff --git a/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java index c3c7c6f..aea7b20 100644 --- a/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java +++ b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java @@ -1,84 +1,205 @@ package io.gdcc.spi.core.loader; -import io.gdcc.spi.core.plugin.Plugin; -import io.gdcc.spi.core.test.TestPlugin; +import io.gdcc.spi.core.test.basic.TestContract; +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; +import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Path; import java.util.List; import java.util.Map; +import java.util.Set; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; class PluginLoaderTest { - @Test - void findSources_HappyPath() { - // Given - Class sut = TestPlugin.class; - PluginLoader loader = new PluginLoader<>(sut); - Path directory = Path.of("target/test-classes/" + sut.getPackageName().replaceAll("\\.", "/")); + @Nested + class ValidateBaseClass { + @Test + void validatePluginBaseClass_validBaseClass() { + assertDoesNotThrow(() -> PluginLoader.validatePluginBaseClass(TestContract.class)); + } - // When - Map sources = assertDoesNotThrow(() -> loader.findSources(directory)); + @Test + void validatePluginBaseClass_invalidBaseClass_missingAnnotation() { + // Given + interface MissingAnnotationPlugin { + } + + // When & Then + assertThrows(IllegalArgumentException.class, () -> PluginLoader.validatePluginBaseClass(MissingAnnotationPlugin.class)); + } - // Then - assertFalse(sources.isEmpty()); - assertTrue(sources.containsKey(directory)); + @Test + void validatePluginBaseClass_invalidBaseClass_wrongType() { + // Given + class NotAnInterfacePlugin { + } + + // When & Then + assertThrows(IllegalArgumentException.class, () -> PluginLoader.validatePluginBaseClass(NotAnInterfacePlugin.class)); + } + + @Test + void validatePluginBaseClass_invalidBaseClass_wrongRole() { + // Given + @PluginContract(role = PluginContract.Role.CAPABILITY) + interface IncorrectRolePlugin { + } + + // When & Then + assertThrows(IllegalArgumentException.class, () -> PluginLoader.validatePluginBaseClass(IncorrectRolePlugin.class)); + } } - @Test - void findSources_NoSuchFile() { - // Given - Class sut = TestPlugin.class; - PluginLoader loader = new PluginLoader<>(sut); - Path directory = Path.of("nosuchdir"); + @Nested + class FindSources { + @Test + void findSources_HappyPath() { + // Given + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut); + Path directory = Path.of("target/test-classes/" + sut.getPackageName().replaceAll("\\.", "/")); + + // When + Map sources = assertDoesNotThrow(() -> loader.findSources(directory)); + + // Then + assertFalse(sources.isEmpty()); + assertTrue(sources.containsKey(directory)); + } - // When - LoaderException ex = assertThrows(LoaderException.class, () -> loader.findSources(directory)); + @Test + void findSources_NoSuchFile() { + // Given + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut); + Path directory = Path.of("nosuchdir"); + + // When + LoaderException ex = assertThrows(LoaderException.class, () -> loader.findSources(directory)); + + // Then + assertEquals("NoSuchFileException: nosuchdir", ex.getProblems().get(0).message()); + } - // Then - assertEquals("NoSuchFileException: nosuchdir", ex.getProblems().get(0).message()); + @Test + void findSources_NoDirectory() { + // Given + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut); + Path notDirectory = Path.of("target/test-classes/" + DescriptorFormat.transformClassName(sut).replaceAll("\\.", "/") + ".class"); + + // When + LoaderException ex = assertThrows(LoaderException.class, () -> loader.findSources(notDirectory)); + + // Then + assertEquals("NotDirectoryException: " + notDirectory, ex.getProblems().get(0).message()); + } } - @Test - void findSources_NoDirectory() { - // Given - Class sut = TestPlugin.class; - PluginLoader loader = new PluginLoader<>(sut); - Path notDirectory = Path.of("target/test-classes/" + sut.getCanonicalName().replaceAll("\\.", "/") + ".class"); + @Nested + class Preload { - // When - LoaderException ex = assertThrows(LoaderException.class, () -> loader.findSources(notDirectory)); + LoaderConfiguration enforcingConfig = new LoaderConfiguration( + true, + false, + true, + true, + true + ); - // Then - assertEquals("NotDirectoryException: " + notDirectory, ex.getProblems().get(0).message()); + LoaderConfiguration permissiveConfig = new LoaderConfiguration( + false, + false, + false, + false, + false + ); + + @Test + void preLoad_throwsOnNormalProblemsWhenEnforcing() { + // given + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut, enforcingConfig); + Path rootClassPath = Path.of("target/test-classes/"); + // Should generate class name conflict with core + SourceScanner scanner = source -> List.of(DescriptorBuilder.aDescriptor().build()); + + // when + var exception = assertThrows(LoaderException.class, () -> loader.preloadPlugins(Set.of(rootClassPath), scanner)); + + // then + assertInstanceOf(LoaderProblem.PluginClassNameCollisionWithCore.class, exception.getProblems().get(0)); + } + + @Test + void preLoad_throwsOnIOExceptionsWhenEnforcing() { + // given + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut, enforcingConfig); + Path rootClassPath = Path.of("target/test-classes/"); + // Should generate class name conflict with core + SourceScanner scanner = source -> { + throw new IOException("Test exception"); + }; + + // when + var exception = assertThrows(LoaderException.class, () -> loader.preloadPlugins(Set.of(rootClassPath), scanner)); + + // then + assertInstanceOf(LoaderProblem.LocationFailure.class, exception.getProblems().get(0)); + } + + @Test + void preLoad_continuesOnProblemsWhenPermissive() { + // given + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut, permissiveConfig); + Path rootClassPath = Path.of("target/test-classes/"); + // Should generate class name conflict with core + SourceScanner scanner = source -> List.of(DescriptorBuilder.aDescriptor().build()); + + // when + var result = loader.preloadPlugins(Set.of(rootClassPath), scanner); + + // then + assertEquals(0, result.size()); + } } @Test void load() throws MalformedURLException { // Given - Class sut = Plugin.class; - PluginLoader loader = new PluginLoader<>(sut); + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut); Path rootClassPath = Path.of("target/test-classes/"); + + /* Map sources = Map.of(rootClassPath, new URL[]{PluginLoader.pathToUrl(rootClassPath, null, null)}); // When - Map plugins = loader.load(sources); + Map plugins = loader.load(sources); // Then assertFalse(plugins.isEmpty()); - List pluginSources = plugins.keySet().stream().toList(); + List pluginSources = plugins.keySet().stream().toList(); assertEquals(1, pluginSources.size()); - PluginSource source = pluginSources.get(0); + PluginOrigin source = pluginSources.get(0); assertEquals(rootClassPath, source.location()); assertEquals(TestPlugin.class.getName(), source.className()); assertEquals(new TestPlugin().identity(), source.identity()); + + */ } } \ No newline at end of file From c905de0c5c42462909e1339957da7cbdc42bd7be Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sat, 28 Mar 2026 01:55:17 +0100 Subject: [PATCH 32/55] chore(api): introduce Dataverse SPI package module Shipping an all-in-one package for users makes using the SPI API much easier. --- api/pom.xml | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 api/pom.xml diff --git a/api/pom.xml b/api/pom.xml new file mode 100644 index 0000000..d94ea58 --- /dev/null +++ b/api/pom.xml @@ -0,0 +1,42 @@ + + + 4.0.0 + + io.gdcc.spi + parent + 2.1.0-SNAPSHOT + + + io.gdcc + dataverse-spi + + + + + jar + + + + + io.gdcc.spi + meta + + + io.gdcc.spi + core + + + io.gdcc.spi + export + + + + \ No newline at end of file From f345c944611e962c1c6ccd625407db49d6b99839 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Sat, 28 Mar 2026 01:56:54 +0100 Subject: [PATCH 33/55] chore: update parent POM at Git root to manage modules and dependencies Switch parent packaging to POM, define modular structure, and introduce dependency management for streamlined versioning and future extensibility. --- pom.xml | 46 ++++++++++++++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 16 deletions(-) diff --git a/pom.xml b/pom.xml index 92f2159..d011432 100644 --- a/pom.xml +++ b/pom.xml @@ -10,13 +10,20 @@ 0.13.1 - io.gdcc - dataverse-spi + io.gdcc.spi + parent + 2.1.0-SNAPSHOT - jar + pom Dataverse SPI Plugin API https://dataverse.org + + api + core + export + meta + A package to create out-of-tree Java code for Dataverse Software. Plugin projects can use this package as an API dependency just like Jakarta EE APIs if they want to create external plugins. These will be loaded @@ -35,20 +42,27 @@ 17 + + + + io.gdcc.spi + meta + ${project.version} + + + io.gdcc.spi + core + ${project.version} + + + io.gdcc.spi + export + ${project.version} + + + + - - jakarta.json - jakarta.json-api - provided - - - - jakarta.ws.rs - jakarta.ws.rs-api - provided - - - org.junit.jupiter From a3eda7e26ce266b43d2684253f64d6cb0557ceb9 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Mon, 30 Mar 2026 09:06:44 +0200 Subject: [PATCH 34/55] refactor(meta): centralize processor constants into `ProcessorConstants` Extract repeated strings and constants from `PluginContractProcessor` to a new `ProcessorConstants` class for improved maintainability and consistency. Update the (core) `LoaderHelper` to reuse the centralized `API_LEVEL_FIELD_NAME`, coordinating the field name between processor (plugin compile time) and loader (plugin runtime). --- .../io/gdcc/spi/core/loader/LoaderHelper.java | 6 +- .../processor/PluginContractProcessor.java | 94 +++++-------------- .../meta/processor/ProcessorConstants.java | 64 +++++++++++++ 3 files changed, 89 insertions(+), 75 deletions(-) create mode 100644 meta/src/main/java/io/gdcc/spi/meta/processor/ProcessorConstants.java diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java index d122de0..b844a43 100644 --- a/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java @@ -5,6 +5,7 @@ import io.gdcc.spi.meta.descriptor.SourcedDescriptor; import io.gdcc.spi.meta.plugin.CoreProvider; import io.gdcc.spi.meta.plugin.Plugin; +import io.gdcc.spi.meta.processor.ProcessorConstants; import java.lang.reflect.Field; import java.net.MalformedURLException; @@ -81,11 +82,10 @@ static int determineCoreApiLevel(Class pluginClass) { } try { // Retrieve the field from exactly this class (we don't want to search any superclasses here!) - // TODO: Get the field name from some common place to both loader and annotation processor! - Field apiLevel = pluginClass.getDeclaredField("API_LEVEL"); + Field apiLevel = pluginClass.getDeclaredField(ProcessorConstants.API_LEVEL_FIELD_NAME); return apiLevel.getInt(pluginClass); } catch (NoSuchFieldException | IllegalAccessException e) { - throw new IllegalStateException("Plugin contract class must have an (accessible) API_LEVEL field"); + throw new IllegalStateException("Plugin contract class must have an (accessible) " + ProcessorConstants.API_LEVEL_FIELD_NAME + " field"); } } diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java index 8a555ed..7b29a8b 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -67,56 +67,6 @@ */ public final class PluginContractProcessor extends AbstractProcessor { - /** - * Fully qualified name of the implementation marker annotation. - * - *

A string constant is used instead of a direct class literal so this processor can stay - * tolerant during bootstrapping and module boundary changes.

- * - * @see io.gdcc.spi.meta.annotations.DataversePlugin - */ - private static final String PLUGIN_IMPLEMENTATION_ANNOTATION = "io.gdcc.spi.meta.annotations.DataversePlugin"; - - /** - * Fully qualified name of the contract annotation found on plugin contract interfaces. - * - * @see io.gdcc.spi.meta.annotations.PluginContract - */ - private static final String PLUGIN_CONTRACT_ANNOTATION = "io.gdcc.spi.meta.annotations.PluginContract"; - - /** - * Fully qualified name of the nested provider requirement annotation used inside - * {@code @PluginContract.providers()}. - * - * @see io.gdcc.spi.meta.annotations.RequiredProvider - */ - private static final String REQUIRED_PROVIDER_ANNOTATION = "io.gdcc.spi.meta.annotations.RequiredProvider"; - - /** - * Fully qualified name of {@code @AutoService}. - * - *

This processor does not depend on AutoService directly. It merely detects the annotation by - * name so it can avoid generating conflicting ServiceLoader resources.

- */ - private static final String AUTO_SERVICE_ANNOTATION = "com.google.auto.service.AutoService"; - - /** - * Fully qualified name of the common plugin super-interface. - * @see io.gdcc.spi.meta.plugin.Plugin - */ - private static final String PLUGIN_INTERFACE = "io.gdcc.spi.meta.plugin.Plugin"; - - /** - * Fully qualified name of the common provider super-interface. - * @see io.gdcc.spi.meta.plugin.CoreProvider - */ - private static final String CORE_PROVIDER_INTERFACE = "io.gdcc.spi.meta.plugin.CoreProvider"; - - /** - * Name of the compile-time constant field carrying the contract version. - */ - private static final String API_LEVEL_FIELD = "API_LEVEL"; - /** * Output directory for generated ServiceLoader files. */ @@ -232,7 +182,7 @@ public SourceVersion getSupportedSourceVersion() { */ @Override public boolean process(Set annotations, RoundEnvironment roundEnv) { - TypeElement markerAnnotation = elements.getTypeElement(PLUGIN_IMPLEMENTATION_ANNOTATION); + TypeElement markerAnnotation = elements.getTypeElement(ProcessorConstants.PLUGIN_IMPLEMENTATION_ANNOTATION); if (markerAnnotation == null) { // If the marker annotation itself cannot be resolved, something is wrong with the // processor classpath. Returning false leaves room for other processors to continue. @@ -334,7 +284,7 @@ private void processImplementation(TypeElement implementation) { // The API level is intentionally read from the compile-time constant present on the // contract interface visible during this compilation. This preserves the build-time // contract snapshot we later need at runtime. - int contractApiLevel = readIntConstant(contract, API_LEVEL_FIELD); + int contractApiLevel = readIntConstant(contract, ProcessorConstants.API_LEVEL_FIELD_NAME); String contractFQCN = contract.getQualifiedName().toString(); // The following is just a precaution. As we look into these during compile time, it's hard to imagine // a scenario where the levels ever actually differ. @@ -508,7 +458,7 @@ private void inspectType(TypeElement typeElement) { validateDirectBaseTypeImplementations(typeElement); if (isPluginInterfaceCandidate(typeElement)) { - if (findAnnotationMirror(typeElement, PLUGIN_CONTRACT_ANNOTATION) == null) { + if (findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) == null) { error(typeElement, "Plugin interfaces must declare @PluginContract"); throw new ProcessorException(); } @@ -521,7 +471,7 @@ private void inspectType(TypeElement typeElement) { } if (isPluginImplementationCandidate(typeElement) - && findAnnotationMirror(typeElement, PLUGIN_IMPLEMENTATION_ANNOTATION) == null) { + && findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_IMPLEMENTATION_ANNOTATION) == null) { warning( typeElement, "Plugin implementation should declare @DataversePlugin; processing it implicitly" @@ -549,7 +499,7 @@ private void validateDirectBaseTypeImplementations(TypeElement typeElement) { return; } - if (directlyImplementsType(typeElement, PLUGIN_INTERFACE)) { + if (directlyImplementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE)) { error( typeElement, "Plugin implementations must implement a specific plugin contract interface, not Plugin directly" @@ -557,7 +507,7 @@ private void validateDirectBaseTypeImplementations(TypeElement typeElement) { throw new ProcessorException(); } - if (directlyImplementsType(typeElement, CORE_PROVIDER_INTERFACE)) { + if (directlyImplementsType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE)) { error( typeElement, "Core provider implementations must implement a specific provider interface, not CoreProvider directly" @@ -600,7 +550,7 @@ private boolean isPluginImplementationCandidate(TypeElement typeElement) { if (typeElement.getModifiers().contains(Modifier.ABSTRACT)) { return false; } - return implementsType(typeElement, PLUGIN_INTERFACE) && !isExactType(typeElement, PLUGIN_INTERFACE); + return implementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE) && !isExactType(typeElement, ProcessorConstants.PLUGIN_INTERFACE); } /** @@ -611,8 +561,8 @@ private boolean isPluginImplementationCandidate(TypeElement typeElement) { */ private boolean isPluginInterfaceCandidate(TypeElement typeElement) { return typeElement.getKind() == ElementKind.INTERFACE - && implementsType(typeElement, PLUGIN_INTERFACE) - && !isExactType(typeElement, PLUGIN_INTERFACE); + && implementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE) + && !isExactType(typeElement, ProcessorConstants.PLUGIN_INTERFACE); } /** @@ -623,8 +573,8 @@ && implementsType(typeElement, PLUGIN_INTERFACE) */ private boolean isProviderInterfaceCandidate(TypeElement typeElement) { return typeElement.getKind() == ElementKind.INTERFACE - && implementsType(typeElement, CORE_PROVIDER_INTERFACE) - && !isExactType(typeElement, CORE_PROVIDER_INTERFACE); + && implementsType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE) + && !isExactType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE); } /** @@ -668,7 +618,7 @@ private boolean isExactType(TypeElement typeElement, String targetTypeName) { * @param typeElement the type currently being inspected */ private void validatePluginContractUsage(TypeElement typeElement) { - if (findAnnotationMirror(typeElement, PLUGIN_CONTRACT_ANNOTATION) == null) { + if (findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) == null) { return; } @@ -688,11 +638,11 @@ private void validatePluginContractUsage(TypeElement typeElement) { * @return {@code true} if the type is a plugin contract */ private boolean isPluginContract(TypeElement typeElement) { - if (findAnnotationMirror(typeElement, PLUGIN_CONTRACT_ANNOTATION) == null) { + if (findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) == null) { return false; } - TypeElement pluginType = elements.getTypeElement(PLUGIN_INTERFACE); + TypeElement pluginType = elements.getTypeElement(ProcessorConstants.PLUGIN_INTERFACE); if (pluginType == null) { return false; } @@ -710,7 +660,7 @@ private boolean isPluginContract(TypeElement typeElement) { * @return the extracted in-memory contract model */ private PluginContractModel readPluginContractModel(TypeElement contract) { - AnnotationMirror annotation = findAnnotationMirror(contract, PLUGIN_CONTRACT_ANNOTATION); + AnnotationMirror annotation = findAnnotationMirror(contract, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION); if (annotation == null) { error(contract, "Missing @PluginContract"); throw new ProcessorException(); @@ -759,7 +709,7 @@ private PluginContract.Role readContractRole(AnnotationMirror annotation, TypeEl * @param contract the contract or provider type to validate */ private void validateApiLevelConstant(TypeElement contract) { - readIntConstant(contract, API_LEVEL_FIELD); + readIntConstant(contract, ProcessorConstants.API_LEVEL_FIELD_NAME); } /** @@ -828,9 +778,9 @@ private void validateRequiredContracts( */ private Map readProviderLevels(List providerTypes, TypeElement implementation) { Map result = new LinkedHashMap<>(); - TypeElement coreProviderType = elements.getTypeElement(CORE_PROVIDER_INTERFACE); + TypeElement coreProviderType = elements.getTypeElement(ProcessorConstants.CORE_PROVIDER_INTERFACE); if (coreProviderType == null) { - error(implementation, "Cannot resolve " + CORE_PROVIDER_INTERFACE); + error(implementation, "Cannot resolve " + ProcessorConstants.CORE_PROVIDER_INTERFACE); throw new ProcessorException(); } @@ -842,12 +792,12 @@ private Map readProviderLevels(List providerTypes, error( implementation, "Required provider " + providerType.getQualifiedName() - + " does not implement " + CORE_PROVIDER_INTERFACE + + " does not implement " + ProcessorConstants.CORE_PROVIDER_INTERFACE ); throw new ProcessorException(); } - int apiLevel = readIntConstant(providerType, API_LEVEL_FIELD); + int apiLevel = readIntConstant(providerType, ProcessorConstants.API_LEVEL_FIELD_NAME); result.put(providerType.getQualifiedName().toString(), apiLevel); } @@ -889,7 +839,7 @@ private void mergeProviderLevels( * @return {@code true} if {@code @AutoService} is present */ private boolean hasAutoServiceAnnotation(TypeElement implementation) { - return findAnnotationMirror(implementation, AUTO_SERVICE_ANNOTATION) != null; + return findAnnotationMirror(implementation, ProcessorConstants.AUTO_SERVICE_ANNOTATION) != null; } /** @@ -1076,7 +1026,7 @@ private List readRequiredProviders(AnnotationMirror pluginContractA TypeElement providerAnnotationType = asTypeElement(providerAnnotation.getAnnotationType()); if (providerAnnotationType == null - || !providerAnnotationType.getQualifiedName().contentEquals(REQUIRED_PROVIDER_ANNOTATION)) { + || !providerAnnotationType.getQualifiedName().contentEquals(ProcessorConstants.REQUIRED_PROVIDER_ANNOTATION)) { continue; } diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/ProcessorConstants.java b/meta/src/main/java/io/gdcc/spi/meta/processor/ProcessorConstants.java new file mode 100644 index 0000000..e1c2d1a --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/ProcessorConstants.java @@ -0,0 +1,64 @@ +package io.gdcc.spi.meta.processor; + +import io.gdcc.spi.meta.annotations.DataversePlugin; +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.annotations.RequiredProvider; +import io.gdcc.spi.meta.plugin.CoreProvider; +import io.gdcc.spi.meta.plugin.Plugin; + +public class ProcessorConstants { + + private ProcessorConstants() { + /* Intentionally left blank for singleton */ + } + + /** + * Name of the compile-time constant field carrying the contract version. + */ + public static final String API_LEVEL_FIELD_NAME = "API_LEVEL"; + + /** + * Fully qualified name of the implementation marker annotation. + * + *

A string constant is used instead of a direct class literal so this processor can stay + * tolerant during bootstrapping and module boundary changes.

+ * + * @see io.gdcc.spi.meta.annotations.DataversePlugin + */ + public static final String PLUGIN_IMPLEMENTATION_ANNOTATION = DataversePlugin.class.getName(); + + /** + * Fully qualified name of the contract annotation found on plugin contract interfaces. + * + * @see io.gdcc.spi.meta.annotations.PluginContract + */ + public static final String PLUGIN_CONTRACT_ANNOTATION = PluginContract.class.getName(); + + /** + * Fully qualified name of the nested provider requirement annotation used inside + * {@code @PluginContract.providers()}. + * + * @see io.gdcc.spi.meta.annotations.RequiredProvider + */ + public static final String REQUIRED_PROVIDER_ANNOTATION = RequiredProvider.class.getName(); + + /** + * Fully qualified name of {@code @AutoService}. + * + *

The processor does not depend on AutoService directly. It merely detects the annotation by + * name so it can avoid generating conflicting ServiceLoader resources.

+ */ + public static final String AUTO_SERVICE_ANNOTATION = "com.google.auto.service.AutoService"; + + /** + * Fully qualified name of the common plugin super-interface. + * @see io.gdcc.spi.meta.plugin.Plugin + */ + public static final String PLUGIN_INTERFACE = Plugin.class.getName(); + + /** + * Fully qualified name of the common provider super-interface. + * @see io.gdcc.spi.meta.plugin.CoreProvider + */ + public static final String CORE_PROVIDER_INTERFACE = CoreProvider.class.getName(); +} From 9747e21acb8fe516d17e5e69c87707a434c38dff Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 31 Mar 2026 02:56:11 +0200 Subject: [PATCH 35/55] docs(meta): document SPI annotations for plugin contracts and implementations Add comprehensive Javadoc for SPI annotations, including `PluginContract`, `DataversePlugin`, and the new `package-info.java`. Clarify roles, rules, and examples for authoring and implementing Dataverse plugins. --- .../spi/meta/annotations/DataversePlugin.java | 26 +++++- .../spi/meta/annotations/PluginContract.java | 82 ++++++++++++++++--- .../spi/meta/annotations/package-info.java | 56 +++++++++++++ 3 files changed, 152 insertions(+), 12 deletions(-) create mode 100644 meta/src/main/java/io/gdcc/spi/meta/annotations/package-info.java diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java index 0344365..59989a4 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java +++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java @@ -7,7 +7,31 @@ /** * Marks a concrete plugin implementation class for metadata generation. - * Plugin authors use this annotation to mark their plugin for scanning and loading. + * + *

Plugin authors should place this annotation on every concrete plugin implementation + * class that is meant to be discovered and loaded by Dataverse.

+ * + *

Annotated classes are validated by the {@link io.gdcc.spi.meta.processor.PluginContractProcessor annotation processor} + * and contribute generated compatibility metadata used during plugin loading.

+ * + *

Implementation rules:

+ *
    + *
  • the annotated type must be a {@code public}, non-abstract class,
  • + *
  • it must implement exactly one {@link PluginContract.Role#BASE base contract},
  • + *
  • it may additionally implement any number of {@link PluginContract.Role#CAPABILITY capability contracts}.
  • + *
+ * + *

A capability contract is never loadable on its own. A plugin implementing a capability + * must also implement the capability's required base contract. The base contract is the single hook + * the Dataverse core uses to discover and load your plugin.

+ * + * @implNote Example where {@code Exporter} is a base contract and {@code FooExporter} a capability: + *
{@code
+ * @DataversePlugin
+ * public class MyBarExporter implements Exporter, FooExporter {
+ *     // Your implementation goes here...
+ * }
+ * }
*/ @Retention(RetentionPolicy.SOURCE) @Target(ElementType.TYPE) diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java index 4ae6c77..c52124e 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java +++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java @@ -8,36 +8,96 @@ import java.lang.annotation.Target; /** - * Declares that an SPI interface is a versioned plugin contract. + * Declares a versioned plugin contract interface. * - *

The contract API level is taken from the interface's {@code API_LEVEL} - * constant by the annotation processor.

+ *

A plugin contract defines either a directly loadable plugin kind + * ({@link Role#BASE}) or an additional, non-loadable capability + * ({@link Role#CAPABILITY}).

+ * + *

The annotated type must be an {@code interface} extending {@link Plugin} + * and must declare a compile-time constant primitive {@code int API_LEVEL} field. + *

+ * + *

General contract rules:

+ *
    + *
  1. Plugin contracts may only be declared on interfaces.
  2. + *
  3. Plugin contracts must extend {@link Plugin}.
  4. + *
  5. Plugin contracts may not extend other plugin contracts. (One exception, see below.)
  6. + *
  7. A plugin implementation may implement exactly one {@link Role#BASE base contract}.
  8. + *
+ * + *

Base contracts are used as the unique service-loading identity of a plugin. + * Capability contracts are never loaded directly; they add optional functionality + * and are discovered through generated plugin metadata.

+ * + *

Capability rules:

+ *
    + *
  1. A capability contract must declare {@link #requires()}.
  2. + *
  3. A capability must require exactly one base contract.
  4. + *
  5. A capability may extend the required base contract to provide default implementations.
  6. + *
  7. For now, requiring or extending another capability is not supported.
  8. + *
  9. A plugin implementing a capability must also implement its required base contract.
  10. + *
+ * + * Note: this annotation cannot be used repeatedly on the same type. + * + * @implNote Example base contract: + *
{@code
+ * @PluginContract(role = PluginContract.Role.BASE)
+ * public interface FooBar extends Plugin {
+ *     int API_LEVEL = 1;
+ * }
+ * }
+ * Example capability contract: + *
{@code
+ * @PluginContract(
+ *     role = PluginContract.Role.CAPABILITY,
+ *     requires = { FooBar.class }
+ * )
+ * public interface BarBeque extends Plugin {
+ *     int API_LEVEL = 1;
+ *
+ *     default String getMediaType() {
+ *         return "application/bbq";
+ *     }
+ * }
+ * }
*/ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) public @interface PluginContract { - + /** - * Whether this contract is the primary plugin kind or an optional capability. + * Declares whether this contract is a directly loadable base contract or an additional capability contract. */ Role role(); - + /** - * Other plugin contracts that must also be implemented if this contract is implemented. - * Example: a {@link Role#CAPABILITY} contract should ask for a {@link Role#BASE} contract to be implemented. + * Other plugin contracts that must also be implemented when this contract is implemented. + * + *

For {@link Role#CAPABILITY capabilities}, this must currently contain exactly one + * required {@link Role#BASE base contract}. Capabilities are not directly loadable and + * therefore must always be paired with their base contract.

*/ Class[] requires() default {}; - + /** - * Core providers required by this contract. + * Core provider contracts required by this plugin contract. */ RequiredProvider[] providers() default {}; /** - * Distinguishes a base plugin contract from optional capability contracts. + * Distinguishes directly loadable base contracts from additional capability contracts. */ enum Role { + /** + * A directly loadable plugin contract. + */ BASE, + + /** + * An additional plugin capability that refines behavior but is not directly loadable. + */ CAPABILITY } } diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/package-info.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/package-info.java new file mode 100644 index 0000000..bac2b22 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/package-info.java @@ -0,0 +1,56 @@ +/** + * Annotations used to declare Dataverse plugin contracts, plugin implementations, + * and required core providers. + * + *

This package defines the author-facing SPI model:

+ *
    + *
  • a {@linkplain io.gdcc.spi.meta.annotations.PluginContract.Role#BASE base contract} + * is the unique, directly loadable identity of a plugin,
  • + *
  • a {@linkplain io.gdcc.spi.meta.annotations.PluginContract.Role#CAPABILITY capability contract} + * adds optional functionality but is never loaded directly,
  • + *
  • a {@linkplain io.gdcc.spi.meta.annotations.DataversePlugin plugin implementation} + * must implement exactly one base contract and may additionally implement compatible capabilities,
  • + *
  • a {@linkplain io.gdcc.spi.meta.annotations.RequiredProvider required provider} + * declares Dataverse infrastructure contracts needed by a plugin contract.
  • + *
+ * + *

Only base contracts are used as plugin loading identities.

+ * + *

Contract interfaces must extend {@link io.gdcc.spi.meta.plugin.Plugin}, declare + * {@link io.gdcc.spi.meta.annotations.PluginContract}, and provide a compile-time + * {@code int API_LEVEL} constant. Plugin contracts must not extend other plugin contracts + * (with the single exception of a capability extending a required base contract).

+ * + *

Capabilities are attached to a plugin through normal Java interface implementation. + * This allows SPI authors to provide additional methods and default implementations + * without introducing ambiguity into plugin loading. If multiple implemented interfaces + * contribute conflicting default methods, the plugin implementation class must resolve + * that conflict explicitly.

+ * + *

Example with extending base contract:

+ *
{@code
+ * @PluginContract(role = PluginContract.Role.BASE)
+ * public interface FooBar extends Plugin {
+ *     int API_LEVEL = 1;
+ *     String getMediaType();
+ * }
+ *
+ * @PluginContract(
+ *     role = PluginContract.Role.CAPABILITY,
+ *     requires = { FooBar.class }
+ * )
+ * public interface BarBeque extends FooBar {
+ *     int API_LEVEL = 1;
+ *
+ *     default String getMediaType() {
+ *         return "application/bbq";
+ *     }
+ * }
+ *
+ * @DataversePlugin
+ * public class Grill implements FooBar, BarBeque {
+ *     // no override needed unless another default conflicts
+ * }
+ * }
+ */ +package io.gdcc.spi.meta.annotations; From 7ef5f6858bec1c9183f99229c95885cc3eb5cbf8 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 31 Mar 2026 02:59:39 +0200 Subject: [PATCH 36/55] feat(meta): enforce contract graph validation in `PluginContractProcessor` Add comprehensive validation rules for plugin contract hierarchies, ensuring compliance with `Base` and `Capability` roles. Includes safeguards for `@PluginContract` usage, required contracts implementation, and package locality constraints. Allow base contract extension by a capability contract when it is required. Extend Javadoc and restructure the processor for improved readability and future maintainability. --- .../processor/PluginContractProcessor.java | 720 ++++++++++++++---- 1 file changed, 586 insertions(+), 134 deletions(-) diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java index 7b29a8b..063a868 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -57,6 +57,22 @@ *
  • all required provider API levels.
  • * * + *

    Contract graph rules

    + * + *

    The processor enforces a strict contract hierarchy:

    + *
      + *
    • A {@link PluginContract.Role#BASE base contract} is the unique, directly loadable identity + * of a plugin. Base contracts may not extend other contracts and may not declare + * {@code requires}.
    • + *
    • A {@link PluginContract.Role#CAPABILITY capability contract} adds optional functionality. + * It must declare exactly one base contract in {@code requires}. A capability may optionally + * extend its required base contract in the Java type hierarchy to provide default + * implementations for methods declared by the base. A capability may not extend another + * capability.
    • + *
    + * + *

    Service registration

    + * *

    Service registration generation is intentionally cautious. If any implementation of a given base * contract uses {@code @AutoService}, this processor suppresses generated service output for that * entire contract to avoid two processors writing the same {@code META-INF/services/...} file.

    @@ -224,6 +240,8 @@ public boolean process(Set annotations, RoundEnvironment return false; } + // ── Implementation processing ─────────────────────────────────────────────── + /** * Processes one plugin implementation class. * @@ -271,7 +289,7 @@ private void processImplementation(TypeElement implementation) { if (baseContract != null) { error( implementation, - "Implementation must not implement multiple base plugin contracts: " + "Implementation must implement exactly one Role.BASE @PluginContract, but implements: " + baseContract.getQualifiedName() + " and " + contract.getQualifiedName() ); throw new ProcessorException(); @@ -337,6 +355,9 @@ private void processImplementation(TypeElement implementation) { /** * Validates the basic structural requirements for a plugin implementation. * + *

    A valid plugin implementation must be a public, non-abstract class. These constraints + * ensure that the ServiceLoader can instantiate the class at runtime.

    + * * @param implementation the implementation class to validate */ private void validateImplementationClass(TypeElement implementation) { @@ -407,6 +428,44 @@ private Set collectImplementedContracts(TypeElement implementation) return result; } + /** + * Validates that all contracts required by the current contract are also implemented + * by the plugin implementation class. + * + *

    This ensures that a plugin implementing a capability also implements the capability's + * required base contract, which is the only loadable identity for the plugin.

    + * + * @param implementation the concrete plugin implementation + * @param contract the contract currently being validated + * @param allImplementedContracts all discovered contracts of the implementation + * @param model the parsed model of the current contract + */ + private void validateRequiredContracts( + TypeElement implementation, + TypeElement contract, + Set allImplementedContracts, + PluginContractModel model + ) { + Set implementedNames = new LinkedHashSet<>(); + for (TypeElement implemented : allImplementedContracts) { + implementedNames.add(implemented.getQualifiedName().toString()); + } + + for (TypeElement requiredContract : model.requiredContracts()) { + String requiredName = requiredContract.getQualifiedName().toString(); + if (!implementedNames.contains(requiredName)) { + error( + implementation, + "Implementation of contract " + contract.getQualifiedName() + + " also requires contract " + requiredName + ); + throw new ProcessorException(); + } + } + } + + // ── Type hierarchy inspection ─────────────────────────────────────────────── + /** * Traverses a type hierarchy and applies project-wide validation rules. * @@ -451,6 +510,10 @@ private void inspectTypeHierarchy(TypeElement typeElement) { /** * Applies validation rules to a single type discovered during hierarchy inspection. * + *

    This method dispatches to specialized validators based on the nature of the type: + * contract interfaces, provider interfaces, and plugin implementation candidates each + * have their own set of rules.

    + * * @param typeElement the type to inspect */ private void inspectType(TypeElement typeElement) { @@ -459,11 +522,12 @@ private void inspectType(TypeElement typeElement) { if (isPluginInterfaceCandidate(typeElement)) { if (findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) == null) { - error(typeElement, "Plugin interfaces must declare @PluginContract"); + error(typeElement, "Interfaces extending Plugin must declare @PluginContract"); throw new ProcessorException(); } validateApiLevelConstant(typeElement); + validateContractGraph(typeElement); } if (isProviderInterfaceCandidate(typeElement)) { @@ -484,178 +548,431 @@ && findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_IMPLEMENTATION_AN } } + // ── Contract graph validation ─────────────────────────────────────────────── + /** - * Rejects direct implementations of the foundational base types {@code Plugin} and - * {@code CoreProvider}. + * Validates the contract graph rules for a {@code @PluginContract}-annotated interface. * - *

    These two types are infrastructure-level marker/base interfaces only. Loadable plugins - * and concrete providers must instead implement a specific contract interface extending one - * of these base types. Otherwise, no meaningful compatibility contract can be derived.

    + *

    This is the central method enforcing the structural rules of the contract hierarchy. + * It reads the contract's role and delegates to role-specific validation:

    * - * @param typeElement the type currently being inspected + *
      + *
    • BASE contracts may not declare {@code requires} and may not extend + * other contracts.
    • + *
    • CAPABILITY contracts must declare exactly one base contract in + * {@code requires}. They may optionally extend their required base contract in the + * Java type hierarchy (to provide default implementations), but may not extend another + * capability.
    • + *
    + * + *

    Additionally, for capabilities, this method enforces package locality: the capability + * must reside in the same package or a subpackage of its required base contract.

    + * + * @param contract the contract interface to validate */ - private void validateDirectBaseTypeImplementations(TypeElement typeElement) { - if (typeElement.getKind() != ElementKind.CLASS) { + private void validateContractGraph(TypeElement contract) { + AnnotationMirror annotation = findAnnotationMirror(contract, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION); + if (annotation == null) { return; } - if (directlyImplementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE)) { + PluginContract.Role role = readContractRole(annotation, contract); + List requiredContracts = readClassArrayAnnotationValue(annotation, "requires"); + + // Collect all parent types that are themselves plugin contracts. This is used to enforce + // the extension rules: which contracts may extend which other contracts. + List extendedContracts = findExtendedPluginContracts(contract); + + if (role == PluginContract.Role.BASE) { + validateBaseContractGraph(contract, requiredContracts, extendedContracts); + } else { + validateCapabilityContractGraph(contract, requiredContracts, extendedContracts); + } + } + + /** + * Validates the graph rules specific to a {@link PluginContract.Role#BASE base contract}. + * + *

    Base contracts are the loading identities of plugins. They form the roots of the + * contract graph and therefore:

    + *
      + *
    • must not declare {@code requires} — they do not depend on other contracts,
    • + *
    • must not extend other contracts — there can only be one loading identity per plugin.
    • + *
    + * + * @param contract the base contract being validated + * @param requiredContracts the contracts listed in the {@code requires} attribute + * @param extendedContracts parent contracts found in the Java type hierarchy + */ + private void validateBaseContractGraph( + TypeElement contract, + List requiredContracts, + List extendedContracts + ) { + // Base contracts are self-contained loading identities and may not require other contracts. + if (!requiredContracts.isEmpty()) { error( - typeElement, - "Plugin implementations must implement a specific plugin contract interface, not Plugin directly" + contract, + "Base contract " + contract.getQualifiedName() + + " may not require other contracts; only capabilities may declare requires" ); throw new ProcessorException(); } - if (directlyImplementsType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE)) { + // Base contracts must not extend other contracts. Allowing this would create ambiguous + // loading identities — the plugin loader would not know which base to register under. + if (!extendedContracts.isEmpty()) { + TypeElement parent = extendedContracts.get(0); error( - typeElement, - "Core provider implementations must implement a specific provider interface, not CoreProvider directly" + contract, + "Contract " + parent.getQualifiedName() + + " may not be extended by base contract " + contract.getQualifiedName() + + "; base contracts must not extend other contracts" ); throw new ProcessorException(); } } /** - * Checks whether a type directly declares the given interface in its {@code implements} clause. + * Validates the graph rules specific to a {@link PluginContract.Role#CAPABILITY capability contract}. * - *

    This is stricter than assignability: it only matches explicit direct implementation and is - * used to reject classes that target the framework base interfaces {@code Plugin} or - * {@code CoreProvider} directly.

    + *

    Capabilities are non-loadable extensions. The rules ensure a clean, unambiguous graph:

    + *
      + *
    • A capability must require exactly one base contract.
    • + *
    • A capability may extend its required base contract to provide default implementations + * for methods declared by the base.
    • + *
    • A capability may not extend another capability contract.
    • + *
    • A capability must reside in the same package or a subpackage of its required base.
    • + *
    * - * @param typeElement the type to inspect - * @param targetTypeName the fully qualified interface name to look for - * @return {@code true} if the type directly implements the target interface + * @param contract the capability contract being validated + * @param requiredContracts the contracts listed in the {@code requires} attribute + * @param extendedContracts parent contracts found in the Java type hierarchy */ - private boolean directlyImplementsType(TypeElement typeElement, String targetTypeName) { - for (TypeMirror interfaceType : typeElement.getInterfaces()) { - TypeElement interfaceElement = asTypeElement(interfaceType); - if (interfaceElement != null && interfaceElement.getQualifiedName().contentEquals(targetTypeName)) { - return true; - } - } - return false; + private void validateCapabilityContractGraph( + TypeElement contract, + List requiredContracts, + List extendedContracts + ) { + // A capability must require exactly one base contract. This links the capability to its + // loading identity and ensures the plugin loader can always resolve the plugin's kind. + TypeElement requiredBase = validateCapabilityRequires(contract, requiredContracts); + + // Validate the Java extends hierarchy: a capability may extend its required base, but + // must not extend any other contract (especially not another capability). + validateCapabilityExtensions(contract, extendedContracts, requiredBase); + + // Capabilities must be co-located with their base contract so that SPI authors maintain + // a cohesive package structure. + validatePackageLocality(contract, requiredBase); } /** - * Determines whether a type qualifies as an implementation candidate for a plugin. + * Validates the {@code requires} attribute of a capability contract. * - * @param typeElement the type to inspect - * @return {@code true} if the type is a concrete class implementing {@code Plugin} + *

    A capability must require exactly one entry, and that entry must be a base contract + * interface — not a capability, not a class, and not the capability itself.

    + * + * @param contract the capability contract being validated + * @param requiredContracts the contracts listed in the {@code requires} attribute + * @return the single required base contract */ - private boolean isPluginImplementationCandidate(TypeElement typeElement) { - if (typeElement.getKind() != ElementKind.CLASS) { - return false; + private TypeElement validateCapabilityRequires( + TypeElement contract, + List requiredContracts + ) { + String errorMessage = "Capability contract %s must require single base @PluginContract interface".formatted(contract.getQualifiedName()); + + // Exactly one entry is required. Zero entries, multiple entries, or entries that are + // not base contracts all fail with the same message. + if (requiredContracts.size() != 1) { + error(contract, errorMessage); + throw new ProcessorException(); } - if (typeElement.getModifiers().contains(Modifier.ABSTRACT)) { - return false; + + TypeElement required = requiredContracts.get(0); + + // The required type must be an interface (not a class) and must carry @PluginContract. + if (required.getKind() != ElementKind.INTERFACE) { + error(contract, errorMessage); + throw new ProcessorException(); } - return implementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE) && !isExactType(typeElement, ProcessorConstants.PLUGIN_INTERFACE); + + // Self-references are meaningless and would create a cycle. + if (required.getQualifiedName().contentEquals(contract.getQualifiedName())) { + error(contract, errorMessage); + throw new ProcessorException(); + } + + // The required contract must be annotated with @PluginContract. + AnnotationMirror requiredAnnotation = findAnnotationMirror( + required, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION + ); + if (requiredAnnotation == null) { + error(contract, errorMessage); + throw new ProcessorException(); + } + + // The required contract must have the BASE role. Capabilities requiring other capabilities + // are not supported. + PluginContract.Role requiredRole = readContractRole(requiredAnnotation, required); + if (requiredRole != PluginContract.Role.BASE) { + error(contract, errorMessage); + throw new ProcessorException(); + } + + return required; } /** - * Determines whether a type is a plugin interface candidate that must declare {@code @PluginContract}. + * Validates the Java {@code extends} hierarchy of a capability contract. * - * @param typeElement the type to inspect - * @return {@code true} if the type is an interface extending {@code Plugin} - */ - private boolean isPluginInterfaceCandidate(TypeElement typeElement) { - return typeElement.getKind() == ElementKind.INTERFACE - && implementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE) - && !isExactType(typeElement, ProcessorConstants.PLUGIN_INTERFACE); - } - - /** - * Determines whether a type is a provider interface candidate that must declare {@code API_LEVEL}. + *

    A capability may extend its required base contract — this is the mechanism that allows + * the capability to provide default implementations for methods declared by the base, and + * Java's type system will correctly resolve them without requiring bridge methods in the + * plugin implementation class.

    * - * @param typeElement the type to inspect - * @return {@code true} if the type is an interface extending {@code CoreProvider} + *

    However, a capability may not extend another capability contract. Capability-to-capability + * inheritance is not supported in the current model.

    + * + * @param contract the capability contract being validated + * @param extendedContracts all parent types that are plugin contracts + * @param requiredBase the single required base contract from the {@code requires} attribute */ - private boolean isProviderInterfaceCandidate(TypeElement typeElement) { - return typeElement.getKind() == ElementKind.INTERFACE - && implementsType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE) - && !isExactType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE); + private void validateCapabilityExtensions( + TypeElement contract, + List extendedContracts, + TypeElement requiredBase + ) { + for (TypeElement parent : extendedContracts) { + AnnotationMirror parentAnnotation = findAnnotationMirror( + parent, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION + ); + if (parentAnnotation == null) { + // Should not happen since findExtendedPluginContracts only returns annotated types, + // but guard defensively. + continue; + } + + PluginContract.Role parentRole = readContractRole(parentAnnotation, parent); + + if (parentRole == PluginContract.Role.CAPABILITY) { + // Capability-to-capability extension is not allowed. Each capability is a + // standalone extension point attached to a base contract. + error( + contract, + "Contract " + parent.getQualifiedName() + + " may not be extended by capability contract " + contract.getQualifiedName() + + "; capabilities may not extend other capabilities" + ); + throw new ProcessorException(); + } + + if (parentRole == PluginContract.Role.BASE) { + // A capability may extend a base contract, but only if that base is the same one + // declared in requires. Extending an unrelated base would silently introduce a + // second loading identity into the hierarchy. + if (!parent.getQualifiedName().contentEquals(requiredBase.getQualifiedName())) { + error( + contract, + "Capability contract " + contract.getQualifiedName() + + " extends base contract " + parent.getQualifiedName() + + " but requires " + requiredBase.getQualifiedName() + + "; the extended base must match the required base" + ); + throw new ProcessorException(); + } + // Extension matches requires — this is the allowed case. + } + } + + // If the capability extends a base contract but does not declare it in requires, the + // contract graph would be inconsistent. Check the reverse: if the capability extends + // a base, that base must appear in requires (already validated above). But if the + // capability extends a base that is NOT in extendedContracts check, we need to also + // check: does the contract extend the required base without declaring requires? + // Actually, this direction is already covered: we validate requires first, and then + // check that any extended base matches requires. The remaining case is: the capability + // extends a base but forgot requires entirely — that's caught by validateCapabilityRequires. + + // Additional check: if the capability extends a base contract in its Java type hierarchy, + // that base MUST be declared in requires. This handles the case where the capability + // extends a base but declares a different (or no) base in requires. + for (TypeElement parent : extendedContracts) { + AnnotationMirror parentAnnotation = findAnnotationMirror( + parent, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION + ); + if (parentAnnotation == null) { + continue; + } + PluginContract.Role parentRole = readContractRole(parentAnnotation, parent); + if (parentRole == PluginContract.Role.BASE + && !parent.getQualifiedName().contentEquals(requiredBase.getQualifiedName())) { + error( + contract, + "Capability contract " + contract.getQualifiedName() + + " must require extended base contract interface " + parent.getQualifiedName() + ); + throw new ProcessorException(); + } + } } /** - * Tests whether the given type is assignable to another type identified by fully qualified name. + * Finds all direct parent interfaces of the given contract that are themselves plugin contracts. * - * @param typeElement the source type - * @param targetTypeName the fully qualified target type name - * @return {@code true} if the source type is assignable to the target type + *

    This only looks at the directly declared {@code extends} clause of the interface, not at + * transitive ancestors. The common super-interface {@code Plugin} is excluded because it is + * a framework marker, not a contract.

    + * + * @param contract the contract interface to inspect + * @return parent types that carry {@code @PluginContract}, in declaration order */ - private boolean implementsType(TypeElement typeElement, String targetTypeName) { - TypeElement targetType = elements.getTypeElement(targetTypeName); - if (targetType == null) { - return false; + private List findExtendedPluginContracts(TypeElement contract) { + List result = new ArrayList<>(); + for (TypeMirror iface : contract.getInterfaces()) { + TypeElement parent = asTypeElement(iface); + if (parent == null) { + continue; + } + + // Skip the Plugin marker interface — it is a framework type, not a contract. + if (parent.getQualifiedName().contentEquals(ProcessorConstants.PLUGIN_INTERFACE)) { + continue; + } + + // Only consider interfaces that are annotated with @PluginContract. + if (findAnnotationMirror(parent, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) != null) { + result.add(parent); + } } - - return types.isAssignable( - types.erasure(typeElement.asType()), - types.erasure(targetType.asType()) - ); + return result; } /** - * Checks whether the given type is exactly the named type itself, not merely a subtype. + * Validates that the capability resides in the same package or a subpackage of its required + * base contract. * - * @param typeElement the type to inspect - * @param targetTypeName the fully qualified target type name - * @return {@code true} if both names are identical + *

    This rule enforces package locality for SPI cohesion: capabilities should be defined + * close to their base contract so that related contracts form a coherent API surface.

    + * + * @param capability the capability contract being validated + * @param requiredBase the required base contract */ - private boolean isExactType(TypeElement typeElement, String targetTypeName) { - return typeElement.getQualifiedName().contentEquals(targetTypeName); + private void validatePackageLocality(TypeElement capability, TypeElement requiredBase) { + String capabilityPackage = getPackageName(capability); + String basePackage = getPackageName(requiredBase); + + // The capability must be in the same package or a subpackage of the base. + // "test.export.xml".startsWith("test.export.") covers subpackages. + // Direct equality covers the same-package case. + boolean sameOrSubpackage = capabilityPackage.equals(basePackage) + || capabilityPackage.startsWith(basePackage + "."); + + if (!sameOrSubpackage) { + error( + capability, + "Capability contract " + capability.getQualifiedName() + + " and its required base contract " + requiredBase.getQualifiedName() + + " must share same package path; " + capabilityPackage + + " is not within " + basePackage + ); + throw new ProcessorException(); + } } + // ── Direct base type validation ───────────────────────────────────────────── + /** - * Verifies that {@code @PluginContract} is only used on interfaces. + * Rejects direct implementations of the foundational base types {@code Plugin} and + * {@code CoreProvider}. * - *

    Although the annotation is intended for SPI interfaces, Java's annotation target model - * cannot express "interfaces only". This processor therefore enforces the rule explicitly and - * fails compilation when the annotation is placed on classes, enums, records, or other - * non-interface types.

    + *

    These two types are infrastructure-level marker/base interfaces only. Loadable plugins + * and concrete providers must instead implement a specific contract interface extending one + * of these base types. Otherwise, no meaningful compatibility contract can be derived.

    * * @param typeElement the type currently being inspected */ - private void validatePluginContractUsage(TypeElement typeElement) { - if (findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) == null) { + private void validateDirectBaseTypeImplementations(TypeElement typeElement) { + if (typeElement.getKind() != ElementKind.CLASS) { return; } - if (typeElement.getKind() != ElementKind.INTERFACE) { - error(typeElement, "@PluginContract may only be declared on interfaces"); + if (directlyImplementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE)) { + error( + typeElement, + "Plugin implementations must implement a specific plugin contract interface, not Plugin directly" + ); + throw new ProcessorException(); + } + + if (directlyImplementsType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE)) { + error( + typeElement, + "Core provider implementations must implement a specific provider interface, not CoreProvider directly" + ); throw new ProcessorException(); } } /** - * Determines whether the given type is a plugin contract. + * Checks whether a type directly declares the given interface in its {@code implements} clause. * - *

    A type qualifies as a plugin contract only when it is annotated with - * {@code @PluginContract} and is assignable to the common plugin super-interface.

    + *

    This is stricter than assignability: it only matches explicit direct implementation and is + * used to reject classes that target the framework base interfaces {@code Plugin} or + * {@code CoreProvider} directly.

    * - * @param typeElement the type to test - * @return {@code true} if the type is a plugin contract + * @param typeElement the type to inspect + * @param targetTypeName the fully qualified interface name to look for + * @return {@code true} if the type directly implements the target interface */ - private boolean isPluginContract(TypeElement typeElement) { + private boolean directlyImplementsType(TypeElement typeElement, String targetTypeName) { + for (TypeMirror interfaceType : typeElement.getInterfaces()) { + TypeElement interfaceElement = asTypeElement(interfaceType); + if (interfaceElement != null && interfaceElement.getQualifiedName().contentEquals(targetTypeName)) { + return true; + } + } + return false; + } + + // ── @PluginContract usage validation ──────────────────────────────────────── + + /** + * Verifies that {@code @PluginContract} is only used on interfaces that extend {@code Plugin}. + * + *

    Although the annotation targets {@code ElementType.TYPE}, Java's annotation target model + * cannot express "interfaces extending Plugin only". This processor therefore enforces the + * rule explicitly and fails compilation when the annotation is placed on classes, enums, + * records, or interfaces that do not extend {@code Plugin}.

    + * + * @param typeElement the type currently being inspected + */ + private void validatePluginContractUsage(TypeElement typeElement) { if (findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) == null) { - return false; + return; } - TypeElement pluginType = elements.getTypeElement(ProcessorConstants.PLUGIN_INTERFACE); - if (pluginType == null) { - return false; + // @PluginContract must be on an interface that extends Plugin. + if (typeElement.getKind() != ElementKind.INTERFACE + || !implementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE)) { + error( + typeElement, + "@PluginContract may only be declared on interfaces extending Plugin" + ); + throw new ProcessorException(); } - - return types.isAssignable( - types.erasure(typeElement.asType()), - types.erasure(pluginType.asType()) - ); } + // ── Contract model reading ────────────────────────────────────────────────── + /** * Reads and validates the metadata of one plugin contract interface. * + *

    This extracts the role, required contracts, and provider dependencies from the + * {@code @PluginContract} annotation. It also validates the presence and correctness + * of the {@code API_LEVEL} compile-time constant.

    + * * @param contract the contract interface * @return the extracted in-memory contract model */ @@ -678,6 +995,10 @@ private PluginContractModel readPluginContractModel(TypeElement contract) { /** * Reads the {@code role} member of a {@code @PluginContract} annotation. * + *

    During annotation processing, enum-valued annotation members appear as + * {@link VariableElement} instances. This method extracts the constant name and + * maps it back to the {@link PluginContract.Role} enum.

    + * * @param annotation the contract annotation mirror * @param contract the annotated contract, used for diagnostics * @return the parsed contract role @@ -703,6 +1024,8 @@ private PluginContract.Role readContractRole(AnnotationMirror annotation, TypeEl } } + // ── API level validation ──────────────────────────────────────────────────── + /** * Verifies that the given type declares a valid compile-time constant {@code API_LEVEL} field. * @@ -715,6 +1038,10 @@ private void validateApiLevelConstant(TypeElement contract) { /** * Reads a compile-time {@code int} constant from a type. * + *

    The field must be a primitive {@code int} with a compile-time constant value. + * Boxed {@code Integer} fields or fields initialized with method calls do not qualify + * because their values are not available to the annotation processor at compile time.

    + * * @param type the owning type * @param fieldName the field to locate * @return the constant value @@ -737,41 +1064,15 @@ private int readIntConstant(TypeElement type, String fieldName) { throw new ProcessorException(); } - /** - * Validates that all contracts required by the current contract are also implemented. - * - * @param implementation the concrete plugin implementation - * @param contract the contract currently being validated - * @param allImplementedContracts all discovered contracts of the implementation - * @param model the parsed model of the current contract - */ - private void validateRequiredContracts( - TypeElement implementation, - TypeElement contract, - Set allImplementedContracts, - PluginContractModel model - ) { - Set implementedNames = new LinkedHashSet<>(); - for (TypeElement implemented : allImplementedContracts) { - implementedNames.add(implemented.getQualifiedName().toString()); - } - - for (TypeElement requiredContract : model.requiredContracts()) { - String requiredName = requiredContract.getQualifiedName().toString(); - if (!implementedNames.contains(requiredName)) { - error( - implementation, - "Implementation of contract " + contract.getQualifiedName() - + " also requires contract " + requiredName - ); - throw new ProcessorException(); - } - } - } + // ── Provider handling ─────────────────────────────────────────────────────── /** * Resolves the API levels of all providers required by the current contract. * + *

    Each provider type referenced in a {@code @RequiredProvider} annotation must be an + * interface extending {@code CoreProvider} and must declare a compile-time {@code API_LEVEL} + * constant.

    + * * @param providerTypes the provider interfaces referenced by the contract annotation * @param implementation the concrete implementation being processed, used for diagnostics * @return a map from provider class name to required API level @@ -832,9 +1133,14 @@ private void mergeProviderLevels( }); } + // ── AutoService detection ─────────────────────────────────────────────────── + /** * Checks whether the implementation class uses {@code @AutoService}. * + *

    The processor does not depend on AutoService directly. It merely detects the annotation + * by name so it can avoid generating conflicting ServiceLoader resources.

    + * * @param implementation the implementation class * @return {@code true} if {@code @AutoService} is present */ @@ -842,6 +1148,8 @@ private boolean hasAutoServiceAnnotation(TypeElement implementation) { return findAnnotationMirror(implementation, ProcessorConstants.AUTO_SERVICE_ANNOTATION) != null; } + // ── Resource generation ───────────────────────────────────────────────────── + /** * Writes all accumulated generated resources after processing is complete. * @@ -888,7 +1196,9 @@ private void writeDescriptor(Descriptor descriptor) { /** * Writes one ServiceLoader registration file for a base contract. - * This is simply a re-implementation of what we did before with @AutoService and their processor + * + *

    This replaces the need for {@code @AutoService} on plugin implementations. The generated + * file follows the standard {@code META-INF/services/} convention.

    * * @param serviceTypeName the fully qualified name of the service interface * @param implementations the implementation class names to register @@ -914,9 +1224,132 @@ private void writeServiceFile(String serviceTypeName, Set implementation } } + // ── Type candidate checks ─────────────────────────────────────────────────── + + /** + * Determines whether a type qualifies as an implementation candidate for a plugin. + * + *

    A candidate is a concrete (non-abstract) class that implements {@code Plugin} through + * some contract interface. Abstract base classes are intentionally excluded — they may exist + * as shared implementation helpers without needing {@code @DataversePlugin}.

    + * + * @param typeElement the type to inspect + * @return {@code true} if the type is a concrete class implementing {@code Plugin} + */ + private boolean isPluginImplementationCandidate(TypeElement typeElement) { + if (typeElement.getKind() != ElementKind.CLASS) { + return false; + } + if (typeElement.getModifiers().contains(Modifier.ABSTRACT)) { + return false; + } + return implementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE) + && !isExactType(typeElement, ProcessorConstants.PLUGIN_INTERFACE); + } + + /** + * Determines whether a type is a plugin interface candidate that must declare {@code @PluginContract}. + * + *

    Any interface extending {@code Plugin} (other than {@code Plugin} itself) is expected to + * be a contract interface and must carry the {@code @PluginContract} annotation.

    + * + * @param typeElement the type to inspect + * @return {@code true} if the type is an interface extending {@code Plugin} + */ + private boolean isPluginInterfaceCandidate(TypeElement typeElement) { + return typeElement.getKind() == ElementKind.INTERFACE + && implementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE) + && !isExactType(typeElement, ProcessorConstants.PLUGIN_INTERFACE); + } + + /** + * Determines whether a type is a provider interface candidate that must declare {@code API_LEVEL}. + * + * @param typeElement the type to inspect + * @return {@code true} if the type is an interface extending {@code CoreProvider} + */ + private boolean isProviderInterfaceCandidate(TypeElement typeElement) { + return typeElement.getKind() == ElementKind.INTERFACE + && implementsType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE) + && !isExactType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE); + } + + /** + * Determines whether the given type is a plugin contract. + * + *

    A type qualifies as a plugin contract only when it is annotated with + * {@code @PluginContract} and is assignable to the common plugin super-interface.

    + * + * @param typeElement the type to test + * @return {@code true} if the type is a plugin contract + */ + private boolean isPluginContract(TypeElement typeElement) { + if (findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) == null) { + return false; + } + + TypeElement pluginType = elements.getTypeElement(ProcessorConstants.PLUGIN_INTERFACE); + if (pluginType == null) { + return false; + } + + return types.isAssignable( + types.erasure(typeElement.asType()), + types.erasure(pluginType.asType()) + ); + } + + // ── Type system helpers ───────────────────────────────────────────────────── + + /** + * Tests whether the given type is assignable to another type identified by fully qualified name. + * + * @param typeElement the source type + * @param targetTypeName the fully qualified target type name + * @return {@code true} if the source type is assignable to the target type + */ + private boolean implementsType(TypeElement typeElement, String targetTypeName) { + TypeElement targetType = elements.getTypeElement(targetTypeName); + if (targetType == null) { + return false; + } + + return types.isAssignable( + types.erasure(typeElement.asType()), + types.erasure(targetType.asType()) + ); + } + + /** + * Checks whether the given type is exactly the named type itself, not merely a subtype. + * + * @param typeElement the type to inspect + * @param targetTypeName the fully qualified target type name + * @return {@code true} if both names are identical + */ + private boolean isExactType(TypeElement typeElement, String targetTypeName) { + return typeElement.getQualifiedName().contentEquals(targetTypeName); + } + + /** + * Extracts the package name from a type element. + * + * @param typeElement the type whose package to determine + * @return the fully qualified package name + */ + private String getPackageName(TypeElement typeElement) { + return elements.getPackageOf(typeElement).getQualifiedName().toString(); + } + + // ── Annotation mirror helpers ─────────────────────────────────────────────── + /** * Finds an annotation mirror on the given element by fully qualified annotation type name. * + *

    Annotation mirrors are the compile-time representation of annotations. Unlike + * {@code getAnnotation()}, mirrors work reliably during annotation processing even when + * the annotation class is being compiled in the same round.

    + * * @param element the annotated element * @param annotationTypeName the fully qualified annotation type name * @return the matching annotation mirror, or {@code null} if absent @@ -935,6 +1368,9 @@ private AnnotationMirror findAnnotationMirror(Element element, String annotation /** * Resolves one annotation member value, including defaults. * + *

    Uses {@code Elements.getElementValuesWithDefaults()} so that annotation members with + * default values are visible even when not explicitly set by the author.

    + * * @param annotation the annotation mirror * @param memberName the member to resolve * @return the resolved annotation value, or {@code null} if not found @@ -1052,6 +1488,10 @@ private List readRequiredProviders(AnnotationMirror pluginContractA /** * Converts a declared type mirror into its corresponding type element. * + *

    During annotation processing, types are represented as {@link TypeMirror} instances. + * This utility extracts the underlying {@link TypeElement} when the mirror represents a + * declared (class/interface) type.

    + * * @param typeMirror the type mirror to convert * @return the type element, or {@code null} if the mirror is not a declared type */ @@ -1064,9 +1504,14 @@ private TypeElement asTypeElement(TypeMirror typeMirror) { return element instanceof TypeElement typeElement ? typeElement : null; } + // ── Ordering helpers ──────────────────────────────────────────────────────── + /** * Returns the given types sorted by fully qualified name for deterministic processing order. * + *

    Sorting ensures that error messages and generated output are stable across compiler + * runs regardless of the order in which the compiler discovers types.

    + * * @param typesToSort the types to sort * @return a sorted list view */ @@ -1076,6 +1521,8 @@ private List sortByQualifiedName(Set typesToSort) { .toList(); } + // ── Diagnostic helpers ────────────────────────────────────────────────────── + /** * Emits a compiler error message associated with a source element. * @@ -1096,8 +1543,13 @@ private void warning(Element element, String message) { processingEnv.getMessager().printMessage(Diagnostic.Kind.WARNING, message, element); } + // ── Internal model ────────────────────────────────────────────────────────── + /** - * Internal in-memory representation of one contract interface. + * Internal in-memory representation of one contract interface's annotation metadata. + * + *

    This record captures the parsed state of a {@code @PluginContract} annotation for + * use during validation and descriptor generation.

    * * @param role whether the contract is a base contract or a capability * @param requiredContracts contracts that must also be implemented From 61fa9aebff482bd6c5a8f9634899359cf88df74e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 31 Mar 2026 02:59:55 +0200 Subject: [PATCH 37/55] test(meta): restructure and expand test cases for `PluginContractProcessor` compliance Refactor and organize tests for improved readability and coverage. Add new validation scenarios, including public and abstract implementation checks, base/capability contract rules, and descriptor generation validation. --- .../PluginContractProcessorTest.java | 3134 ++++++++++++----- 1 file changed, 2223 insertions(+), 911 deletions(-) diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java index 27964db..34b5867 100644 --- a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java @@ -24,228 +24,126 @@ class PluginContractProcessorTest { private final ProcessorTestCompiler compiler = new ProcessorTestCompiler(); @Nested - class Basics { + class ImplementationContractGraphRules { @Test - void generatesDescriptorAndServiceFileForValidPlugin() throws IOException { + void compilesWhenImplementationOverridesConflictingDefaultMethodsFromCapabilities() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "test/TestProvider.java", - """ - package test; - - import %s; - - public interface TestProvider extends CoreProvider { - int API_LEVEL = 7; - } - """.formatted(CoreProvider.class.getCanonicalName()) - ), - source( - "test/TestPlugin.java", + "test/export/BaseExporter.java", """ - package test; - - import %s; - import %s; - import %s; - import %s; - - @PluginContract( - role = PluginContract.Role.BASE, - providers = { @RequiredProvider(TestProvider.class) } - ) - public interface TestPlugin extends Plugin { - int API_LEVEL = 3; - } - """.formatted( + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + + String getMediaType(); + } + """.formatted( Plugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName(), - RequiredProvider.class.getCanonicalName(), PluginContract.class.getCanonicalName() ) ), source( - "test/GoodPlugin.java", - """ - package test; - - import %s; - - @DataversePlugin - public class GoodPlugin implements TestPlugin { - @Override - public String identity() { - return "good"; - } - } - """.formatted(DataversePlugin.class.getCanonicalName()) - ) - )); - - assertTrue(result.success(), result.diagnosticsAsText()); - - String descriptorPath = DescriptorFormat.toPath("test.GoodPlugin"); - String servicePath = "META-INF/services/test.TestPlugin"; - - assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); - assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should be generated"); - - Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); - assertEquals("test.GoodPlugin", descriptor.klass()); - assertEquals("test.TestPlugin", descriptor.kind()); - assertEquals(3, descriptor.contractLevel("test.TestPlugin")); - assertEquals(7, descriptor.requiredProviderLevel("test.TestProvider")); - - String serviceFile = Files.readString(result.generatedFile(servicePath)); - assertEquals("test.GoodPlugin", serviceFile.trim()); - } - - @Test - void failsWhenPluginContractIsPlacedOnImplementationClass() throws IOException { - ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( - source( - "test/TestPlugin.java", + "test/export/XmlCapability.java", """ - package test; - - import %s; - import %s; - - @PluginContract(role = PluginContract.Role.BASE) - public interface TestPlugin extends Plugin { - int API_LEVEL = 1; + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 2; + + default String getMediaType() { + return "application/xml"; } - """.formatted( + } + """.formatted( Plugin.class.getCanonicalName(), PluginContract.class.getCanonicalName() ) ), source( - "test/InvalidImplementation.java", + "test/export/TextCapability.java", """ - package test; - - import %s; - import %s; - - @DataversePlugin - @PluginContract(role = PluginContract.Role.BASE) - public class InvalidImplementation implements TestPlugin { - @Override - public String identity() { - return "invalid"; - } - } - """.formatted( - DataversePlugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName() + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } ) - ) - )); - - assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "@PluginContract may only be declared on interfaces"); - } - - @Test - void warnsWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOException { - ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( - source( - "test/TestPlugin.java", - """ - package test; - - import %s; - import %s; - - @PluginContract(role = PluginContract.Role.BASE) - public interface TestPlugin extends Plugin { - int API_LEVEL = 2; + public interface TextCapability extends Plugin { + int API_LEVEL = 3; + + default String getMediaType() { + return "text/plain"; } - """.formatted( + } + """.formatted( Plugin.class.getCanonicalName(), PluginContract.class.getCanonicalName() ) ), source( - "test/ImplicitPlugin.java", - """ - package test; - - public class ImplicitPlugin implements TestPlugin { - @Override - public String identity() { - return "implicit"; - } - } - """ - ) - )); - - assertTrue(result.success(), result.diagnosticsAsText()); - assertDiagnosticContains(result, Diagnostic.Kind.WARNING, "@DataversePlugin"); - } - - @Test - void createsDescriptorEvenWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOException { - ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( - source( - "test/TestPlugin.java", + "test/export/MultiCapabilityExporter.java", """ - package test; - - import %s; - import %s; - - @PluginContract(role = PluginContract.Role.BASE) - public interface TestPlugin extends Plugin { - int API_LEVEL = 4; + package test.export; + + import %s; + + @DataversePlugin + public class MultiCapabilityExporter implements BaseExporter, XmlCapability, TextCapability { + @Override + public String identity() { + return "multi"; } - """.formatted( - Plugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName() - ) - ), - source( - "test/ImplicitPlugin.java", - """ - package test; - - public class ImplicitPlugin implements TestPlugin { - @Override - public String identity() { - return "implicit"; - } + + @Override + public String getMediaType() { + return XmlCapability.super.getMediaType(); } - """ + } + """.formatted(DataversePlugin.class.getCanonicalName()) ) )); assertTrue(result.success(), result.diagnosticsAsText()); - String descriptorPath = DescriptorFormat.toPath("test.ImplicitPlugin"); - assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should still be generated"); + String descriptorPath = DescriptorFormat.toPath("test.export.MultiCapabilityExporter"); + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); - assertEquals("test.ImplicitPlugin", descriptor.klass()); - assertEquals("test.TestPlugin", descriptor.kind()); - assertEquals(4, descriptor.contractLevel("test.TestPlugin")); + assertEquals("test.export.BaseExporter", descriptor.kind()); + assertEquals(1, descriptor.contractLevel("test.export.BaseExporter")); + assertEquals(2, descriptor.contractLevel("test.export.XmlCapability")); + assertEquals(3, descriptor.contractLevel("test.export.TextCapability")); } @Test - void createsServiceFileEvenWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOException { + void failsWhenNoBaseContractIsImplemented() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "test/TestPlugin.java", + "test/CapabilityPlugin.java", """ package test; import %s; import %s; - @PluginContract(role = PluginContract.Role.BASE) - public interface TestPlugin extends Plugin { - int API_LEVEL = 5; + @PluginContract(role = PluginContract.Role.CAPABILITY) + public interface CapabilityPlugin extends Plugin { + int API_LEVEL = 1; } """.formatted( Plugin.class.getCanonicalName(), @@ -253,27 +151,25 @@ public interface TestPlugin extends Plugin { ) ), source( - "test/ImplicitPlugin.java", + "test/CapabilityOnlyImpl.java", """ package test; - public class ImplicitPlugin implements TestPlugin { + import %s; + + @DataversePlugin + public class CapabilityOnlyImpl implements CapabilityPlugin { @Override public String identity() { - return "implicit"; + return "capability-only"; } } - """ + """.formatted(DataversePlugin.class.getCanonicalName()) ) )); - assertTrue(result.success(), result.diagnosticsAsText()); - - String servicePath = "META-INF/services/test.TestPlugin"; - assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should still be generated"); - - String serviceFile = Files.readString(result.generatedFile(servicePath)); - assertEquals("test.ImplicitPlugin", serviceFile.trim()); + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "exactly one Role.BASE @PluginContract"); } @Test @@ -332,7 +228,7 @@ public String identity() { )); assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "multiple base plugin contracts"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "exactly one Role.BASE @PluginContract"); } @Test @@ -400,32 +296,37 @@ public String identity() { } @Test - void failsWhenPluginInterfaceLacksPluginContractAnnotation() throws IOException { + void failsWhenImplementationIsNotPublic() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "test/UndeclaredPluginContract.java", + "test/TestPlugin.java", """ package test; + import %s; import %s; - public interface UndeclaredPluginContract extends Plugin { + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { int API_LEVEL = 1; } - """.formatted(Plugin.class.getCanonicalName()) + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) ), source( - "test/UndeclaredPluginImpl.java", + "test/HiddenPlugin.java", """ package test; import %s; @DataversePlugin - public class UndeclaredPluginImpl implements UndeclaredPluginContract { + class HiddenPlugin implements TestPlugin { @Override public String identity() { - return "undeclared-contract"; + return "hidden"; } } """.formatted(DataversePlugin.class.getCanonicalName()) @@ -433,121 +334,11 @@ public String identity() { )); assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "Plugin interfaces must declare @PluginContract"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must be public"); } @Test - void suppressesGeneratedServiceFileWhenAutoServiceIsPresent() throws IOException { - ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( - source( - "com/google/auto/service/AutoService.java", - """ - package com.google.auto.service; - - import java.lang.annotation.ElementType; - import java.lang.annotation.Retention; - import java.lang.annotation.RetentionPolicy; - import java.lang.annotation.Target; - - @Retention(RetentionPolicy.RUNTIME) - @Target(ElementType.TYPE) - public @interface AutoService { - Class[] value(); - } - """ - ), - source( - "test/TestPlugin.java", - """ - package test; - - import %s; - import %s; - - @PluginContract(role = PluginContract.Role.BASE) - public interface TestPlugin extends Plugin { - int API_LEVEL = 2; - } - """.formatted( - Plugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName() - ) - ), - source( - "test/AutoServicePlugin.java", - """ - package test; - - import com.google.auto.service.AutoService; - import %s; - - @DataversePlugin - @AutoService(TestPlugin.class) - public class AutoServicePlugin implements TestPlugin { - @Override - public String identity() { - return "auto"; - } - } - """.formatted(DataversePlugin.class.getCanonicalName()) - ) - )); - - assertTrue(result.success(), result.diagnosticsAsText()); - - String descriptorPath = DescriptorFormat.toPath("test.AutoServicePlugin"); - String servicePath = "META-INF/services/test.TestPlugin"; - - assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should still be generated"); - assertFalse(Files.exists(result.generatedFile(servicePath)), "Service file should be suppressed"); - - assertDiagnosticContains(result, Diagnostic.Kind.WARNING, "@AutoService detected"); - } - - @Test - void failsWhenImplementationIsNotPublic() throws IOException { - ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( - source( - "test/TestPlugin.java", - """ - package test; - - import %s; - import %s; - - @PluginContract(role = PluginContract.Role.BASE) - public interface TestPlugin extends Plugin { - int API_LEVEL = 1; - } - """.formatted( - Plugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName() - ) - ), - source( - "test/HiddenPlugin.java", - """ - package test; - - import %s; - - @DataversePlugin - class HiddenPlugin implements TestPlugin { - @Override - public String identity() { - return "hidden"; - } - } - """.formatted(DataversePlugin.class.getCanonicalName()) - ) - )); - - assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must be public"); - } - - @Test - void failsWhenImplementationIsAbstract() throws IOException { + void failsWhenImplementationIsAbstract() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( "test/TestPlugin.java", @@ -588,48 +379,6 @@ public String identity() { assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must not be abstract"); } - @Test - void failsWhenNoBaseContractIsImplemented() throws IOException { - ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( - source( - "test/CapabilityPlugin.java", - """ - package test; - - import %s; - import %s; - - @PluginContract(role = PluginContract.Role.CAPABILITY) - public interface CapabilityPlugin extends Plugin { - int API_LEVEL = 1; - } - """.formatted( - Plugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName() - ) - ), - source( - "test/CapabilityOnlyImpl.java", - """ - package test; - - import %s; - - @DataversePlugin - public class CapabilityOnlyImpl implements CapabilityPlugin { - @Override - public String identity() { - return "capability-only"; - } - } - """.formatted(DataversePlugin.class.getCanonicalName()) - ) - )); - - assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "exactly one Role.BASE @PluginContract"); - } - @Test void failsWhenDataversePluginAnnotatedClassIsNotAPluginImplementation() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( @@ -688,7 +437,8 @@ public String identity() { } @Test - void doesNotProcessAnnotatedImplementationTwice() throws IOException { + // because we want to allow base classes, as long as concrete classes are annotated @DataversePlugin + void doesNotWarnForAbstractUnannotatedPluginBaseClass() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( "test/TestPlugin.java", @@ -700,7 +450,7 @@ void doesNotProcessAnnotatedImplementationTwice() throws IOException { @PluginContract(role = PluginContract.Role.BASE) public interface TestPlugin extends Plugin { - int API_LEVEL = 6; + int API_LEVEL = 1; } """.formatted( Plugin.class.getCanonicalName(), @@ -708,85 +458,78 @@ public interface TestPlugin extends Plugin { ) ), source( - "test/OncePlugin.java", + "test/AbstractBasePlugin.java", """ package test; - import %s; - - @DataversePlugin - public class OncePlugin implements TestPlugin { + public abstract class AbstractBasePlugin implements TestPlugin { @Override public String identity() { - return "once"; + return "base"; } } - """.formatted(DataversePlugin.class.getCanonicalName()) + """ ) )); assertTrue(result.success(), result.diagnosticsAsText()); - - String servicePath = "META-INF/services/test.TestPlugin"; - assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should be generated"); - - List lines = Files.readAllLines(result.generatedFile(servicePath)) - .stream() - .filter(line -> !line.isBlank()) - .toList(); - - assertEquals(1, lines.size(), "Implementation should only be registered once"); - assertEquals("test.OncePlugin", lines.get(0)); + assertDiagnosticDoesNotContain(result, Diagnostic.Kind.WARNING, "@DataversePlugin"); } - + } + + @Nested + class DescriptorFileGeneration { @Test - void aggregatesMultipleImplementationsIntoOneServiceFile() throws IOException { + void generatesDescriptorAndServiceFileForValidPlugin() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestProvider.java", + """ + package test; + + import %s; + + public interface TestProvider extends CoreProvider { + int API_LEVEL = 7; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), source( "test/TestPlugin.java", """ package test; - + import %s; import %s; - - @PluginContract(role = PluginContract.Role.BASE) + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE, + providers = { @RequiredProvider(TestProvider.class) } + ) public interface TestPlugin extends Plugin { - int API_LEVEL = 1; + int API_LEVEL = 3; } """.formatted( Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName(), PluginContract.class.getCanonicalName() ) ), source( - "test/APlugin.java", - """ - package test; - - import %s; - - @DataversePlugin - public class APlugin implements TestPlugin { - @Override - public String identity() { - return "a"; - } - } - """.formatted(DataversePlugin.class.getCanonicalName()) - ), - source( - "test/BPlugin.java", + "test/GoodPlugin.java", """ package test; - + import %s; - + @DataversePlugin - public class BPlugin implements TestPlugin { + public class GoodPlugin implements TestPlugin { @Override public String identity() { - return "b"; + return "good"; } } """.formatted(DataversePlugin.class.getCanonicalName()) @@ -795,77 +538,301 @@ public String identity() { assertTrue(result.success(), result.diagnosticsAsText()); + String descriptorPath = DescriptorFormat.toPath("test.GoodPlugin"); String servicePath = "META-INF/services/test.TestPlugin"; - assertTrue(Files.exists(result.generatedFile(servicePath)), "Aggregated service file should be generated"); - List lines = Files.readAllLines(result.generatedFile(servicePath)) - .stream() - .filter(line -> !line.isBlank()) - .toList(); + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); + assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should be generated"); - assertEquals(List.of("test.APlugin", "test.BPlugin"), lines); + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.GoodPlugin", descriptor.klass()); + assertEquals("test.TestPlugin", descriptor.kind()); + assertEquals(3, descriptor.contractLevel("test.TestPlugin")); + assertEquals(7, descriptor.requiredProviderLevel("test.TestProvider")); + + String serviceFile = Files.readString(result.generatedFile(servicePath)); + assertEquals("test.GoodPlugin", serviceFile.trim()); } @Test - void compilesWhenAnnotatedPluginInterfaceHasNoImplementation() throws IOException { + void compilesWhenPluginImplementsOneBaseAndOneCapability() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "test/LonelyPluginContract.java", + "test/export/BaseExporter.java", """ - package test; - - import %s; - import %s; - - @PluginContract(role = PluginContract.Role.BASE) - public interface LonelyPluginContract extends Plugin { - int API_LEVEL = 1; + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + + String getMediaType(); + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlCapability.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends BaseExporter { + int API_LEVEL = 2; + + default String getMediaType() { + return "application/xml"; } - """.formatted( + } + """.formatted( Plugin.class.getCanonicalName(), PluginContract.class.getCanonicalName() ) + ), + source( + "test/export/XmlExporterImpl.java", + """ + package test.export; + + import %s; + + @DataversePlugin + public class XmlExporterImpl implements XmlCapability { + @Override + public String identity() { + return "xml"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) ) )); assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.export.XmlExporterImpl"); + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); + + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.export.XmlExporterImpl", descriptor.klass()); + assertEquals("test.export.BaseExporter", descriptor.kind()); + assertEquals(1, descriptor.contractLevel("test.export.BaseExporter")); + assertEquals(2, descriptor.contractLevel("test.export.XmlCapability")); } @Test - void compilesWhenUnusedProviderInterfaceHasApiLevel() throws IOException { + void compilesWhenPluginImplementsOneBaseAndTwoCapabilities() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "test/LonelyProvider.java", + "test/export/BaseExporter.java", """ - package test; - - import %s; - - public interface LonelyProvider extends CoreProvider { - int API_LEVEL = 42; + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlCapability.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/PrettyPrintCapability.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface PrettyPrintCapability extends Plugin { + int API_LEVEL = 3; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlPrettyExporterImpl.java", + """ + package test.export; + + import %s; + + @DataversePlugin + public class XmlPrettyExporterImpl implements BaseExporter, XmlCapability, PrettyPrintCapability { + @Override + public String identity() { + return "xml-pretty"; } - """.formatted(CoreProvider.class.getCanonicalName()) + } + """.formatted(DataversePlugin.class.getCanonicalName()) ) )); assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.export.XmlPrettyExporterImpl"); + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); + + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.export.BaseExporter", descriptor.kind()); + assertEquals(1, descriptor.contractLevel("test.export.BaseExporter")); + assertEquals(2, descriptor.contractLevel("test.export.XmlCapability")); + assertEquals(3, descriptor.contractLevel("test.export.PrettyPrintCapability")); } @Test - // because we want to allow base classes, as long as concrete classes are annotated @DataversePlugin - void doesNotWarnForAbstractUnannotatedPluginBaseClass() throws IOException { + void mergesProviderRequirementsFromBaseAndCapability() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/BaseProvider.java", + """ + package test.export; + + import %s; + + public interface BaseProvider extends CoreProvider { + int API_LEVEL = 10; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/export/XmlProvider.java", + """ + package test.export; + + import %s; + + public interface XmlProvider extends CoreProvider { + int API_LEVEL = 20; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/export/BaseExporter.java", + """ + package test.export; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE, + providers = { @RequiredProvider(BaseProvider.class) } + ) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/export/XmlCapability.java", + """ + package test.export; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class }, + providers = { @RequiredProvider(XmlProvider.class) } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/export/XmlExporterImpl.java", + """ + package test.export; + + import %s; + + @DataversePlugin + public class XmlExporterImpl implements BaseExporter, XmlCapability { + @Override + public String identity() { + return "xml"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.export.XmlExporterImpl"); + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + + assertEquals(10, descriptor.requiredProviderLevel("test.export.BaseProvider")); + assertEquals(20, descriptor.requiredProviderLevel("test.export.XmlProvider")); + } + + @Test + void warnsWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( "test/TestPlugin.java", """ package test; - + import %s; import %s; - + @PluginContract(role = PluginContract.Role.BASE) public interface TestPlugin extends Plugin { - int API_LEVEL = 1; + int API_LEVEL = 2; } """.formatted( Plugin.class.getCanonicalName(), @@ -873,14 +840,14 @@ public interface TestPlugin extends Plugin { ) ), source( - "test/AbstractBasePlugin.java", + "test/ImplicitPlugin.java", """ package test; - - public abstract class AbstractBasePlugin implements TestPlugin { + + public class ImplicitPlugin implements TestPlugin { @Override public String identity() { - return "base"; + return "implicit"; } } """ @@ -888,23 +855,23 @@ public String identity() { )); assertTrue(result.success(), result.diagnosticsAsText()); - assertDiagnosticDoesNotContain(result, Diagnostic.Kind.WARNING, "@DataversePlugin"); + assertDiagnosticContains(result, Diagnostic.Kind.WARNING, "@DataversePlugin"); } @Test - void failsWhenIndirectPluginInterfaceLacksPluginContractAnnotation() throws IOException { + void createsDescriptorEvenWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "test/BasePlugin.java", + "test/TestPlugin.java", """ package test; - + import %s; import %s; - + @PluginContract(role = PluginContract.Role.BASE) - public interface BasePlugin extends Plugin { - int API_LEVEL = 1; + public interface TestPlugin extends Plugin { + int API_LEVEL = 4; } """.formatted( Plugin.class.getCanonicalName(), @@ -912,155 +879,65 @@ public interface BasePlugin extends Plugin { ) ), source( - "test/DerivedPlugin.java", - """ - package test; - - public interface DerivedPlugin extends BasePlugin { - int API_LEVEL = 2; - } - """ - ), - source( - "test/DerivedPluginImpl.java", + "test/ImplicitPlugin.java", """ package test; - - import %s; - - @DataversePlugin - public class DerivedPluginImpl implements DerivedPlugin { + + public class ImplicitPlugin implements TestPlugin { @Override public String identity() { - return "derived"; + return "implicit"; } } - """.formatted(DataversePlugin.class.getCanonicalName()) + """ ) )); - assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "Plugin interfaces must declare @PluginContract"); + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.ImplicitPlugin"); + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should still be generated"); + + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.ImplicitPlugin", descriptor.klass()); + assertEquals("test.TestPlugin", descriptor.kind()); + assertEquals(4, descriptor.contractLevel("test.TestPlugin")); } @Test - void suppressesServiceGenerationOnlyForPluginKindManagedByAutoService() throws IOException { + void usesBaseContractAsDescriptorKindWhenCapabilityIsAlsoImplemented() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "com/google/auto/service/AutoService.java", + "test/export/BaseExporter.java", """ - package com.google.auto.service; - - import java.lang.annotation.ElementType; - import java.lang.annotation.Retention; - import java.lang.annotation.RetentionPolicy; - import java.lang.annotation.Target; - - @Retention(RetentionPolicy.RUNTIME) - @Target(ElementType.TYPE) - public @interface AutoService { - Class[] value(); - } - """ - ), - source( - "test/PluginTypeA.java", - """ - package test; - - import %s; - import %s; - - @PluginContract(role = PluginContract.Role.BASE) - public interface PluginTypeA extends Plugin { - int API_LEVEL = 1; - } - """.formatted( - Plugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName() - ) - ), - source( - "test/PluginTypeB.java", - """ - package test; - - import %s; - import %s; - - @PluginContract(role = PluginContract.Role.BASE) - public interface PluginTypeB extends Plugin { - int API_LEVEL = 1; - } - """.formatted( + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( Plugin.class.getCanonicalName(), PluginContract.class.getCanonicalName() ) ), source( - "test/AutoManagedA.java", - """ - package test; - - import com.google.auto.service.AutoService; - import %s; - - @DataversePlugin - @AutoService(PluginTypeA.class) - public class AutoManagedA implements PluginTypeA { - @Override - public String identity() { - return "a"; - } - } - """.formatted(DataversePlugin.class.getCanonicalName()) - ), - source( - "test/NormalB.java", - """ - package test; - - import %s; - - @DataversePlugin - public class NormalB implements PluginTypeB { - @Override - public String identity() { - return "b"; - } - } - """.formatted(DataversePlugin.class.getCanonicalName()) - ) - )); - - assertTrue(result.success(), result.diagnosticsAsText()); - - String servicePathA = "META-INF/services/test.PluginTypeA"; - String servicePathB = "META-INF/services/test.PluginTypeB"; - - assertFalse(Files.exists(result.generatedFile(servicePathA)), "PluginTypeA service file should be suppressed"); - assertTrue(Files.exists(result.generatedFile(servicePathB)), "PluginTypeB service file should still be generated"); - - String serviceFileB = Files.readString(result.generatedFile(servicePathB)); - assertEquals("test.NormalB", serviceFileB.trim()); - } - } - - @Nested - class EdgeCases { - @Test - void failsWhenContractIsOnNonPluginInterface() throws IOException { - ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( - source( - "test/MissingExtendsPlugin.java", + "test/export/XmlCapability.java", """ - package test; + package test.export; import %s; import %s; - @PluginContract(role = PluginContract.Role.BASE) - public interface MissingExtendsPlugin { + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 2; } """.formatted( Plugin.class.getCanonicalName(), @@ -1068,32 +945,59 @@ public interface MissingExtendsPlugin { ) ), source( - "test/MissingExtendsPluginImpl.java", + "test/export/XmlExporterImpl.java", """ - package test; + package test.export; import %s; @DataversePlugin - public class MissingExtendsPluginImpl implements MissingExtendsPlugin { + public class XmlExporterImpl implements BaseExporter, XmlCapability { @Override public String identity() { - return "missing-extends-plugin"; + return "xml"; } } """.formatted(DataversePlugin.class.getCanonicalName()) ) )); - assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must implement a specific @PluginContract interface"); + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.export.XmlExporterImpl"); + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); + + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.export.BaseExporter", descriptor.kind()); + assertEquals(1, descriptor.contractLevel("test.export.BaseExporter")); + assertEquals(2, descriptor.contractLevel("test.export.XmlCapability")); } - + } + + @Nested + class AutoServiceFileGeneration { @Test - void failsWhenContractApiLevelIsMissing() throws IOException { + void suppressesGeneratedServiceFileForWholeContractWhenAutoServiceIsMixedWithNormalImplementations() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "test/MissingApiLevelPlugin.java", + "com/google/auto/service/AutoService.java", + """ + package com.google.auto.service; + + import java.lang.annotation.ElementType; + import java.lang.annotation.Retention; + import java.lang.annotation.RetentionPolicy; + import java.lang.annotation.Target; + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + public @interface AutoService { + Class[] value(); + } + """ + ), + source( + "test/TestPlugin.java", """ package test; @@ -1101,7 +1005,8 @@ void failsWhenContractApiLevelIsMissing() throws IOException { import %s; @PluginContract(role = PluginContract.Role.BASE) - public interface MissingApiLevelPlugin extends Plugin { + public interface TestPlugin extends Plugin { + int API_LEVEL = 2; } """.formatted( Plugin.class.getCanonicalName(), @@ -1109,383 +1014,1830 @@ public interface MissingApiLevelPlugin extends Plugin { ) ), source( - "test/MissingApiLevelImpl.java", + "test/AutoServiceImpl.java", """ package test; + import com.google.auto.service.AutoService; import %s; @DataversePlugin - public class MissingApiLevelImpl implements MissingApiLevelPlugin { + @AutoService(TestPlugin.class) + public class AutoServiceImpl implements TestPlugin { @Override public String identity() { - return "missing-api-level"; + return "auto"; } } """.formatted(DataversePlugin.class.getCanonicalName()) - ) - )); - - assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must declare int API_LEVEL"); - } - - @Test - void failsWhenContractApiLevelIsNotCompileTimeConstant() throws IOException { - ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( - source( - "test/NonConstantApiLevelPlugin.java", - """ - package test; - - import %s; - import %s; - - @PluginContract(role = PluginContract.Role.BASE) - public interface NonConstantApiLevelPlugin extends Plugin { - Integer API_LEVEL = Integer.valueOf(2); - } - """.formatted( - Plugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName() - ) ), source( - "test/NonConstantApiLevelImpl.java", + "test/NormalImpl.java", """ package test; import %s; @DataversePlugin - public class NonConstantApiLevelImpl implements NonConstantApiLevelPlugin { + public class NormalImpl implements TestPlugin { @Override public String identity() { - return "non-constant-api-level"; + return "normal"; } } """.formatted(DataversePlugin.class.getCanonicalName()) ) )); - assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must be a compile-time int constant"); + assertTrue(result.success(), result.diagnosticsAsText()); + + String autoDescriptorPath = DescriptorFormat.toPath("test.AutoServiceImpl"); + String normalDescriptorPath = DescriptorFormat.toPath("test.NormalImpl"); + String servicePath = "META-INF/services/test.TestPlugin"; + + assertTrue(Files.exists(result.generatedFile(autoDescriptorPath)), "AutoService descriptor should be generated"); + assertTrue(Files.exists(result.generatedFile(normalDescriptorPath)), "Normal descriptor should be generated"); + assertFalse(Files.exists(result.generatedFile(servicePath)), "Service file should be suppressed for the whole contract"); + + assertDiagnosticContains(result, Diagnostic.Kind.WARNING, "@AutoService detected"); } @Test - void failsWhenRequiredProviderIsNotACoreProvider() throws IOException { + void createsServiceFileEvenWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "test/NotAProvider.java", - """ - package test; - - public interface NotAProvider { - int API_LEVEL = 1; - } - """ - ), - source( - "test/InvalidProviderPlugin.java", + "test/TestPlugin.java", """ - package test; - - import %s; - import %s; - import %s; - - @PluginContract( - role = PluginContract.Role.BASE, - providers = { @RequiredProvider(NotAProvider.class) } - ) - public interface InvalidProviderPlugin extends Plugin { - int API_LEVEL = 1; - } - """.formatted( + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 5; + } + """.formatted( Plugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName(), - RequiredProvider.class.getCanonicalName() + PluginContract.class.getCanonicalName() ) ), source( - "test/InvalidProviderImpl.java", + "test/ImplicitPlugin.java", """ - package test; - - import %s; - - @DataversePlugin - public class InvalidProviderImpl implements InvalidProviderPlugin { - @Override - public String identity() { - return "invalid-provider"; + package test; + + public class ImplicitPlugin implements TestPlugin { + @Override + public String identity() { + return "implicit"; + } } - } - """.formatted(DataversePlugin.class.getCanonicalName()) + """ ) )); - assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "cannot be converted"); + assertTrue(result.success(), result.diagnosticsAsText()); + + String servicePath = "META-INF/services/test.TestPlugin"; + assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should still be generated"); + + String serviceFile = Files.readString(result.generatedFile(servicePath)); + assertEquals("test.ImplicitPlugin", serviceFile.trim()); } @Test - void failsWhenProviderApiLevelIsMissing() throws IOException { + void suppressesGeneratedServiceFileWhenAutoServiceIsPresent() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "test/MissingProviderApiLevel.java", + "com/google/auto/service/AutoService.java", """ - package test; - - import %s; - - public interface MissingProviderApiLevel extends CoreProvider { - } - """.formatted(CoreProvider.class.getCanonicalName()) + package com.google.auto.service; + + import java.lang.annotation.ElementType; + import java.lang.annotation.Retention; + import java.lang.annotation.RetentionPolicy; + import java.lang.annotation.Target; + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + public @interface AutoService { + Class[] value(); + } + """ ), source( "test/TestPlugin.java", """ - package test; - - import %s; - import %s; - import %s; - - @PluginContract( - role = PluginContract.Role.BASE, - providers = { @RequiredProvider(MissingProviderApiLevel.class) } + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() ) - public interface TestPlugin extends Plugin { - int API_LEVEL = 1; - } - """.formatted( + ), + source( + "test/AutoServicePlugin.java", + """ + package test; + + import com.google.auto.service.AutoService; + import %s; + + @DataversePlugin + @AutoService(TestPlugin.class) + public class AutoServicePlugin implements TestPlugin { + @Override + public String identity() { + return "auto"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.AutoServicePlugin"); + String servicePath = "META-INF/services/test.TestPlugin"; + + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should still be generated"); + assertFalse(Files.exists(result.generatedFile(servicePath)), "Service file should be suppressed"); + + assertDiagnosticContains(result, Diagnostic.Kind.WARNING, "@AutoService detected"); + } + + @Test + void suppressesServiceGenerationOnlyForPluginKindManagedByAutoService() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "com/google/auto/service/AutoService.java", + """ + package com.google.auto.service; + + import java.lang.annotation.ElementType; + import java.lang.annotation.Retention; + import java.lang.annotation.RetentionPolicy; + import java.lang.annotation.Target; + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + public @interface AutoService { + Class[] value(); + } + """ + ), + source( + "test/PluginTypeA.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface PluginTypeA extends Plugin { + int API_LEVEL = 1; + } + """.formatted( Plugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName(), - RequiredProvider.class.getCanonicalName() + PluginContract.class.getCanonicalName() ) ), source( - "test/MissingProviderApiLevelImpl.java", + "test/PluginTypeB.java", """ - package test; - - import %s; - - @DataversePlugin - public class MissingProviderApiLevelImpl implements TestPlugin { - @Override - public String identity() { - return "missing-provider-api-level"; + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface PluginTypeB extends Plugin { + int API_LEVEL = 1; } - } - """.formatted(DataversePlugin.class.getCanonicalName()) + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/AutoManagedA.java", + """ + package test; + + import com.google.auto.service.AutoService; + import %s; + + @DataversePlugin + @AutoService(PluginTypeA.class) + public class AutoManagedA implements PluginTypeA { + @Override + public String identity() { + return "a"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ), + source( + "test/NormalB.java", + """ + package test; + + import %s; + + @DataversePlugin + public class NormalB implements PluginTypeB { + @Override + public String identity() { + return "b"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) ) )); - assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must declare int API_LEVEL"); + assertTrue(result.success(), result.diagnosticsAsText()); + + String servicePathA = "META-INF/services/test.PluginTypeA"; + String servicePathB = "META-INF/services/test.PluginTypeB"; + + assertFalse(Files.exists(result.generatedFile(servicePathA)), "PluginTypeA service file should be suppressed"); + assertTrue(Files.exists(result.generatedFile(servicePathB)), "PluginTypeB service file should still be generated"); + + String serviceFileB = Files.readString(result.generatedFile(servicePathB)); + assertEquals("test.NormalB", serviceFileB.trim()); } @Test - void failsWhenProviderApiLevelIsNotCompileTimeConstant() throws IOException { + void doesNotGenerateServiceFileForCapabilityContracts() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "test/NonConstantProviderApiLevel.java", + "test/export/BaseExporter.java", """ - package test; - + package test.export; + import %s; - - public interface NonConstantProviderApiLevel extends CoreProvider { - Integer API_LEVEL = Integer.valueOf(9); + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; } - """.formatted(CoreProvider.class.getCanonicalName()) + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) ), source( - "test/TestPlugin.java", + "test/export/XmlCapability.java", """ - package test; - - import %s; + package test.export; + import %s; import %s; - + @PluginContract( - role = PluginContract.Role.BASE, - providers = { @RequiredProvider(NonConstantProviderApiLevel.class) } + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } ) - public interface TestPlugin extends Plugin { - int API_LEVEL = 1; + public interface XmlCapability extends Plugin { + int API_LEVEL = 2; } """.formatted( Plugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName(), - RequiredProvider.class.getCanonicalName() + PluginContract.class.getCanonicalName() ) ), source( - "test/NonConstantProviderApiLevelImpl.java", + "test/export/XmlExporterImpl.java", """ - package test; - + package test.export; + import %s; - + @DataversePlugin - public class NonConstantProviderApiLevelImpl implements TestPlugin { + public class XmlExporterImpl implements BaseExporter, XmlCapability { @Override public String identity() { - return "non-constant-provider-api-level"; + return "xml"; } } """.formatted(DataversePlugin.class.getCanonicalName()) ) )); - assertFalse(result.success(), "Compilation should fail"); - assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must be a compile-time int constant"); + assertTrue(result.success(), result.diagnosticsAsText()); + + String baseServicePath = "META-INF/services/test.export.BaseExporter"; + String capabilityServicePath = "META-INF/services/test.export.XmlCapability"; + + assertTrue(Files.exists(result.generatedFile(baseServicePath)), "Base service file should be generated"); + assertFalse(Files.exists(result.generatedFile(capabilityServicePath)), "Capability service file must never be generated"); + + String serviceFile = Files.readString(result.generatedFile(baseServicePath)); + assertEquals("test.export.XmlExporterImpl", serviceFile.trim()); } @Test - void discoversInheritedContractsTransitively() throws IOException { + void doesNotProcessAnnotatedImplementationTwice() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "test/TestProvider.java", + "test/TestPlugin.java", """ - package test; - - import %s; - - public interface TestProvider extends CoreProvider { - int API_LEVEL = 11; - } - """.formatted(CoreProvider.class.getCanonicalName()) + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 6; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) ), + source( + "test/OncePlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class OncePlugin implements TestPlugin { + @Override + public String identity() { + return "once"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String servicePath = "META-INF/services/test.TestPlugin"; + assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should be generated"); + + List lines = Files.readAllLines(result.generatedFile(servicePath)) + .stream() + .filter(line -> !line.isBlank()) + .toList(); + + assertEquals(1, lines.size(), "Implementation should only be registered once"); + assertEquals("test.OncePlugin", lines.get(0)); + } + + @Test + void aggregatesMultipleImplementationsIntoOneServiceFile() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/APlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class APlugin implements TestPlugin { + @Override + public String identity() { + return "a"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ), + source( + "test/BPlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class BPlugin implements TestPlugin { + @Override + public String identity() { + return "b"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String servicePath = "META-INF/services/test.TestPlugin"; + assertTrue(Files.exists(result.generatedFile(servicePath)), "Aggregated service file should be generated"); + + List lines = Files.readAllLines(result.generatedFile(servicePath)) + .stream() + .filter(line -> !line.isBlank()) + .toList(); + + assertEquals(List.of("test.APlugin", "test.BPlugin"), lines); + } + } + + @Nested + class Providers { + @Test + void compilesWhenUnusedProviderInterfaceHasApiLevel() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/LonelyProvider.java", + """ + package test; + + import %s; + + public interface LonelyProvider extends CoreProvider { + int API_LEVEL = 42; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + } + + @Test + void failsWhenRequiredProviderIsNotACoreProvider() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NotAProvider.java", + """ + package test; + + public interface NotAProvider { + int API_LEVEL = 1; + } + """ + ), + source( + "test/InvalidProviderPlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE, + providers = { @RequiredProvider(NotAProvider.class) } + ) + public interface InvalidProviderPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/InvalidProviderImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class InvalidProviderImpl implements InvalidProviderPlugin { + @Override + public String identity() { + return "invalid-provider"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "cannot be converted"); + } + + @Test + void failsWhenProviderApiLevelIsMissing() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/MissingProviderApiLevel.java", + """ + package test; + + import %s; + + public interface MissingProviderApiLevel extends CoreProvider { + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE, + providers = { @RequiredProvider(MissingProviderApiLevel.class) } + ) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/MissingProviderApiLevelImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class MissingProviderApiLevelImpl implements TestPlugin { + @Override + public String identity() { + return "missing-provider-api-level"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must declare int API_LEVEL"); + } + + @Test + void failsWhenProviderApiLevelIsNotCompileTimeConstant() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NonConstantProviderApiLevel.java", + """ + package test; + + import %s; + + public interface NonConstantProviderApiLevel extends CoreProvider { + Integer API_LEVEL = Integer.valueOf(9); + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE, + providers = { @RequiredProvider(NonConstantProviderApiLevel.class) } + ) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/NonConstantProviderApiLevelImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class NonConstantProviderApiLevelImpl implements TestPlugin { + @Override + public String identity() { + return "non-constant-provider-api-level"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must be a compile-time int constant"); + } + + @Test + void mergesDuplicateProviderRequirementsWhenLevelsMatch() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/SharedProvider.java", + """ + package test; + + import %s; + + public interface SharedProvider extends CoreProvider { + int API_LEVEL = 8; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/CapabilityOne.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BasePlugin.class }, + providers = { @RequiredProvider(SharedProvider.class) } + ) + public interface CapabilityOne extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/CapabilityTwo.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BasePlugin.class }, + providers = { @RequiredProvider(SharedProvider.class) } + ) + public interface CapabilityTwo extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/MergedProviderImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class MergedProviderImpl implements BasePlugin, CapabilityOne, CapabilityTwo { + @Override + public String identity() { + return "merged-provider"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.MergedProviderImpl"); + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals(8, descriptor.requiredProviderLevel("test.SharedProvider")); + } + } + + @Nested + class ContractApiLevels { + @Test + void failsWhenContractApiLevelIsMissing() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/MissingApiLevelPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface MissingApiLevelPlugin extends Plugin { + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/MissingApiLevelImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class MissingApiLevelImpl implements MissingApiLevelPlugin { + @Override + public String identity() { + return "missing-api-level"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must declare int API_LEVEL"); + } + + @Test + void failsWhenContractApiLevelIsNotCompileTimeConstant() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NonConstantApiLevelPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface NonConstantApiLevelPlugin extends Plugin { + Integer API_LEVEL = Integer.valueOf(2); + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/NonConstantApiLevelImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class NonConstantApiLevelImpl implements NonConstantApiLevelPlugin { + @Override + public String identity() { + return "non-constant-api-level"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must be a compile-time int constant"); + } + } + + @Nested + class InvalidOrMissingAnnotationTargets { + @Test + void failsWhenContractIsPlacedOnClass() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/InvalidContract.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public class InvalidContract implements Plugin { + @Override + public String identity() { + return "invalid"; + } + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "@PluginContract may only be declared on interfaces extending Plugin"); + } + + @Test + void failsWhenContractIsOnNonPluginInterface() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/MissingExtendsPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface MissingExtendsPlugin { + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "@PluginContract may only be declared on interfaces extending Plugin"); + } + + @Test + void failsWhenRequiredContractIsUnannotatedPluginInterface() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/ImplicitPluginType.java", + """ + package test; + + import %s; + + public interface ImplicitPluginType extends Plugin { + int API_LEVEL = 1; + } + """.formatted(Plugin.class.getCanonicalName()) + ), + source( + "test/BadCapability.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { ImplicitPluginType.class } + ) + public interface BadCapability extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "Interfaces extending Plugin must declare @PluginContract"); + } + + @Test + void failsWhenContractIsInDifferentPackage() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/foobar/BasePlugin.java", + """ + package test.foobar; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/barbeque/CapabilityPluginA.java", + """ + package test.barbeque; + + import %s; + import %s; + import test.foobar.BasePlugin; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface CapabilityPluginA extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ))); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "share same package path"); + } + + @Test + void compilesWhenCapabilityRequiresBaseInSamePackage() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/BaseExporter.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlCapability.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 1; + + default String getMediaType() { + return "application/xml"; + } + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + } + + @Test + void compilesWhenCapabilityRequiresBaseInSubpackage() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/BaseExporter.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/xml/XmlCapability.java", + """ + package test.export.xml; + + import test.export.BaseExporter; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 1; + + default String getMediaType() { + return "application/xml"; + } + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + } + + @Test + void failsWhenCapabilityRequiresBaseInSiblingPackage() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/BaseExporter.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/importing/XmlCapability.java", + """ + package test.importing; + + import test.export.BaseExporter; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "same package path"); + } + + @Test + void failsWhenCapabilityRequiresBaseInParentPackage() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/xml/BaseExporter.java", + """ + package test.export.xml; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlCapability.java", + """ + package test.export; + + import test.export.xml.BaseExporter; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "same package path"); + } + + @Test + void failsWhenBaseContractLacksPluginContractAnnotation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/UndeclaredPluginContract.java", + """ + package test; + + import %s; + + public interface UndeclaredPluginContract extends Plugin { + int API_LEVEL = 1; + } + """.formatted(Plugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "Interfaces extending Plugin must declare @PluginContract"); + } + } + + @Nested + class BaseContractGraphRules { + @Test + void compilesWhenAnnotatedPluginInterfaceHasNoImplementation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/LonelyPluginContract.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface LonelyPluginContract extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + } + + @Test + void failsWhenBaseContractRequiresSomething() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE + ) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/InvalidPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE, + requires = BasePlugin.class + ) + public interface InvalidPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "may not require"); + } + + @Test + void failsWhenBaseContractIsExtended() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/DerivedBase.java", + """ + package test; + + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface DerivedBase extends BasePlugin { + int API_LEVEL = 2; + } + """.formatted(PluginContract.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "may not be extended"); + } + + @Test + void failsWhenBaseContractIsExtendedByCapabilityButMissingRequired() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/MissingRequiredBaseCapability.java", + """ + package test; + + import %s; + + @PluginContract(role = PluginContract.Role.CAPABILITY) + public interface MissingRequiredBaseCapability extends BasePlugin { + int API_LEVEL = 2; + } + """.formatted(PluginContract.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenContractRequiresMultipleBaseContracts() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePluginA.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePluginA extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BasePluginB.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePluginB extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BadDerivedContract.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BasePluginA.class, BasePluginB.class } + ) + public interface BadDerivedContract extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + } + + @Nested + class CapabilityContractGraphRules { + @Test + void compilesWhenCapabilityContractExtendsRequiredBaseContract() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/ExtendingCapability.java", + """ + package test; + + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface ExtendingCapability extends BasePlugin { + int API_LEVEL = 2; + } + """.formatted(PluginContract.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), "Compilation should not fail"); + } + + @Test + void failsWhenCapabilityContractExtendsNonRequiredBaseContract() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePluginA.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePluginA extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BasePluginB.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePluginB extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/ExtendingCapability.java", + """ + package test; + + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePluginA.class + ) + public interface ExtendingCapability extends BasePluginB { + int API_LEVEL = 2; + } + """.formatted(PluginContract.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "extended base must match the required base"); + } + + @Test + void failsWhenCapabilityContractIsExtendedByCapability() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/Capability.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface Capability extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/DerivedCapability.java", + """ + package test; + + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface DerivedCapability extends Capability { + int API_LEVEL = 2; + } + """.formatted(PluginContract.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "may not be extended"); + } + + @Test + void failsWhenCapabilityDoesNotDeclareRequiredBaseContract() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/MissingRequiresPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.CAPABILITY) + public interface MissingRequiresPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenCapabilityDeclaresEmptyRequiredBaseContract() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/EmptyRequiresPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = {} + ) + public interface EmptyRequiresPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenCapabilityRequiresItself() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/SelfReferencingCapability.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { SelfReferencingCapability.class } + ) + public interface SelfReferencingCapability extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenCapabilityContractRequiresCapabilityNoBase() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/CapabilityPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface CapabilityPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BadRequiringContract.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { CapabilityPlugin.class } + ) + public interface BadRequiringContract extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenContractRequiresMultipleCapabilityContracts() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( "test/BasePlugin.java", """ - package test; - - import %s; - import %s; - import %s; - - @PluginContract( - role = PluginContract.Role.BASE, - providers = { @RequiredProvider(TestProvider.class) } - ) - public interface BasePlugin extends Plugin { - int API_LEVEL = 3; - } - """.formatted( + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( Plugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName(), - RequiredProvider.class.getCanonicalName() + PluginContract.class.getCanonicalName() ) ), source( - "test/IntermediateCapability.java", + "test/CapabilityPluginA.java", """ - package test; - - import %s; - import %s; - - @PluginContract( - role = PluginContract.Role.CAPABILITY, - requires = { BasePlugin.class } - ) - public interface IntermediateCapability extends BasePlugin { - int API_LEVEL = 4; - } - """.formatted( - PluginContract.class.getCanonicalName(), + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface CapabilityPluginA extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), PluginContract.class.getCanonicalName() ) ), source( - "test/LeafCapability.java", + "test/CapabilityPluginB.java", """ - package test; - - import %s; - import %s; - - @PluginContract( - role = PluginContract.Role.CAPABILITY, - requires = { BasePlugin.class, IntermediateCapability.class } - ) - public interface LeafCapability extends IntermediateCapability { - int API_LEVEL = 5; - } - """.formatted( - PluginContract.class.getCanonicalName(), + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface CapabilityPluginB extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), PluginContract.class.getCanonicalName() ) ), source( - "test/TransitiveImpl.java", + "test/BadCapabilityContract.java", """ - package test; - - import %s; - - @DataversePlugin - public class TransitiveImpl implements LeafCapability { - @Override - public String identity() { - return "transitive"; + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { CapabilityPluginA.class, CapabilityPluginB.class } + ) + public interface BadCapabilityContract extends Plugin { + int API_LEVEL = 2; } - } - """.formatted(DataversePlugin.class.getCanonicalName()) + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName()) ) )); - assertTrue(result.success(), result.diagnosticsAsText()); - - String descriptorPath = DescriptorFormat.toPath("test.TransitiveImpl"); - assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); - - Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); - assertEquals("test.TransitiveImpl", descriptor.klass()); - assertEquals("test.BasePlugin", descriptor.kind()); - assertEquals(3, descriptor.contractLevel("test.BasePlugin")); - assertEquals(4, descriptor.contractLevel("test.IntermediateCapability")); - assertEquals(5, descriptor.contractLevel("test.LeafCapability")); - assertEquals(11, descriptor.requiredProviderLevel("test.TestProvider")); + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); } @Test - void suppressesGeneratedServiceFileForWholeContractWhenAutoServiceIsMixedWithNormalImplementations() throws IOException { + void failsWhenCapabilityRequiresSameBaseTwice() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "com/google/auto/service/AutoService.java", - """ - package com.google.auto.service; - - import java.lang.annotation.ElementType; - import java.lang.annotation.Retention; - import java.lang.annotation.RetentionPolicy; - import java.lang.annotation.Target; - - @Retention(RetentionPolicy.RUNTIME) - @Target(ElementType.TYPE) - public @interface AutoService { - Class[] value(); - } - """ - ), - source( - "test/TestPlugin.java", + "test/BasePlugin.java", """ package test; - + import %s; import %s; - + @PluginContract(role = PluginContract.Role.BASE) - public interface TestPlugin extends Plugin { - int API_LEVEL = 2; + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; } """.formatted( Plugin.class.getCanonicalName(), @@ -1493,69 +2845,34 @@ public interface TestPlugin extends Plugin { ) ), source( - "test/AutoServiceImpl.java", + "test/BadCapability.java", """ package test; - - import com.google.auto.service.AutoService; + import %s; - - @DataversePlugin - @AutoService(TestPlugin.class) - public class AutoServiceImpl implements TestPlugin { - @Override - public String identity() { - return "auto"; - } - } - """.formatted(DataversePlugin.class.getCanonicalName()) - ), - source( - "test/NormalImpl.java", - """ - package test; - import %s; - - @DataversePlugin - public class NormalImpl implements TestPlugin { - @Override - public String identity() { - return "normal"; - } + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BasePlugin.class, BasePlugin.class } + ) + public interface BadCapability extends Plugin { + int API_LEVEL = 1; } - """.formatted(DataversePlugin.class.getCanonicalName()) + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) ) )); - assertTrue(result.success(), result.diagnosticsAsText()); - - String autoDescriptorPath = DescriptorFormat.toPath("test.AutoServiceImpl"); - String normalDescriptorPath = DescriptorFormat.toPath("test.NormalImpl"); - String servicePath = "META-INF/services/test.TestPlugin"; - - assertTrue(Files.exists(result.generatedFile(autoDescriptorPath)), "AutoService descriptor should be generated"); - assertTrue(Files.exists(result.generatedFile(normalDescriptorPath)), "Normal descriptor should be generated"); - assertFalse(Files.exists(result.generatedFile(servicePath)), "Service file should be suppressed for the whole contract"); - - assertDiagnosticContains(result, Diagnostic.Kind.WARNING, "@AutoService detected"); + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); } @Test - void mergesDuplicateProviderRequirementsWhenLevelsMatch() throws IOException { + void failsWhenContractRequiresConcretePluginClass() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( - source( - "test/SharedProvider.java", - """ - package test; - - import %s; - - public interface SharedProvider extends CoreProvider { - int API_LEVEL = 8; - } - """.formatted(CoreProvider.class.getCanonicalName()) - ), source( "test/BasePlugin.java", """ @@ -1574,74 +2891,69 @@ public interface BasePlugin extends Plugin { ) ), source( - "test/CapabilityOne.java", + "test/SomePluginImpl.java", """ package test; - import %s; - import %s; - import %s; - - @PluginContract( - role = PluginContract.Role.CAPABILITY, - requires = { BasePlugin.class }, - providers = { @RequiredProvider(SharedProvider.class) } - ) - public interface CapabilityOne extends Plugin { - int API_LEVEL = 1; + public class SomePluginImpl implements BasePlugin { + @Override + public String identity() { + return "impl"; + } } - """.formatted( - Plugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName(), - RequiredProvider.class.getCanonicalName() - ) + """ ), source( - "test/CapabilityTwo.java", + "test/BadCapability.java", """ package test; - import %s; import %s; import %s; @PluginContract( role = PluginContract.Role.CAPABILITY, - requires = { BasePlugin.class }, - providers = { @RequiredProvider(SharedProvider.class) } + requires = { SomePluginImpl.class } ) - public interface CapabilityTwo extends Plugin { - int API_LEVEL = 2; + public interface BadCapability extends Plugin { + int API_LEVEL = 1; } """.formatted( Plugin.class.getCanonicalName(), - PluginContract.class.getCanonicalName(), - RequiredProvider.class.getCanonicalName() + PluginContract.class.getCanonicalName() ) - ), + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenContractDeclaresMultiplePluginContractAnnotations() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( source( - "test/MergedProviderImpl.java", + "test/DuplicateAnnotatedPlugin.java", """ - package test; - - import %s; - - @DataversePlugin - public class MergedProviderImpl implements BasePlugin, CapabilityOne, CapabilityTwo { - @Override - public String identity() { - return "merged-provider"; + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + @PluginContract(role = PluginContract.Role.CAPABILITY) + public interface DuplicateAnnotatedPlugin extends Plugin { + int API_LEVEL = 1; } - } - """.formatted(DataversePlugin.class.getCanonicalName()) + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) ) )); - assertTrue(result.success(), result.diagnosticsAsText()); - - String descriptorPath = DescriptorFormat.toPath("test.MergedProviderImpl"); - Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); - assertEquals(8, descriptor.requiredProviderLevel("test.SharedProvider")); + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "PluginContract is not a repeatable annotation type"); } } From 547f61d11393ab36c713839a4b4c36ffc388e655 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 31 Mar 2026 03:45:57 +0200 Subject: [PATCH 38/55] chore(api): use Maven Shade Plugin in API module to build Uber-JAR - Configure Maven Shade Plugin in API module for packaging adjustments. - Set SLF4J dependency scope to "provided" in Core module to exclude it from the shading process (no need to redistribute) --- api/pom.xml | 21 +++++++++++++++++++++ core/pom.xml | 1 + 2 files changed, 22 insertions(+) diff --git a/api/pom.xml b/api/pom.xml index d94ea58..d1e554c 100644 --- a/api/pom.xml +++ b/api/pom.xml @@ -38,5 +38,26 @@ export
    + + + + + org.apache.maven.plugins + maven-shade-plugin + 3.6.2 + + + package + + shade + + + false + + + + + + \ No newline at end of file diff --git a/core/pom.xml b/core/pom.xml index 1b0edc2..3f1b1df 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -21,6 +21,7 @@ org.slf4j slf4j-api + provided From b24329953f9e2d44a92dd4caff87356dfb002f4f Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 31 Mar 2026 16:27:28 +0200 Subject: [PATCH 39/55] test(meta): add tests for intermediate interface compatibility in `PluginContractProcessor` - Validate compilation when base and capability contracts extend unrelated intermediate interfaces. - Ensure compliance with `@PluginContract` rules under complex interface hierarchies. This seems to be an unlikely scenario, but for the sake of completeness, we still cover it. As the processor is only about the metadata for plugins, unrelated interfaces don't bother us. --- .../PluginContractProcessorTest.java | 103 ++++++++++++++++++ 1 file changed, 103 insertions(+) diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java index 34b5867..fe19fb8 100644 --- a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java @@ -2424,6 +2424,48 @@ public interface BadDerivedContract extends Plugin { assertFalse(result.success(), "Compilation should fail"); assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); } + + @Test + void compilesWhenBaseContractHasUnrelatedIntermediateInterface() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BaseSupertype.java", + """ + package test; + + import %s; + + public interface BaseSupertype { + String test(); + } + """.formatted(Plugin.class.getCanonicalName()) + ), + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE + ) + public interface BasePlugin extends BaseSupertype, Plugin { + int API_LEVEL = 1; + default String test() { + return "test"; + } + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertTrue(result.success(), "Compilation should not fail"); + } } @Nested @@ -2528,6 +2570,67 @@ public interface ExtendingCapability extends BasePluginB { assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "extended base must match the required base"); } + @Test + void compilesWhenCapabilityContractExtendsUnrelatedIntermediateInterface() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/Intermediate.java", + """ + package test; + + import %s; + import %s; + + public interface Intermediate { + String test(); + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/ExtendsIntermediate.java", + """ + package test; + + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface ExtendsIntermediate extends BasePlugin, Intermediate { + int API_LEVEL = 2; + + default String test() { + return "test"; + } + } + """.formatted(PluginContract.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), "Compilation should not fail"); + } + @Test void failsWhenCapabilityContractIsExtendedByCapability() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( From 7cc308b2ee065dfc50a25abacef813a9b91c3624 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 31 Mar 2026 16:28:33 +0200 Subject: [PATCH 40/55] feat(export): introduce `@PluginContract` annotations and `API_LEVEL` to Exporter interfaces - Annotate `Exporter`, `XMLExporter`, and `ExportDataProvider` with `@PluginContract` for role and provider definitions. - Define `API_LEVEL` constant in each interface to enforce versioning and compatibility. - Extend `ExportDataProvider` to implement `CoreProvider`. --- .../java/io/gdcc/spi/export/ExportDataProvider.java | 5 ++++- .../src/main/java/io/gdcc/spi/export/Exporter.java | 13 +++++++++++-- .../main/java/io/gdcc/spi/export/XMLExporter.java | 7 +++++++ 3 files changed, 22 insertions(+), 3 deletions(-) diff --git a/export/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/export/src/main/java/io/gdcc/spi/export/ExportDataProvider.java index cb3b9fe..aef1293 100644 --- a/export/src/main/java/io/gdcc/spi/export/ExportDataProvider.java +++ b/export/src/main/java/io/gdcc/spi/export/ExportDataProvider.java @@ -1,6 +1,7 @@ package io.gdcc.spi.export; +import io.gdcc.spi.meta.plugin.CoreProvider; import jakarta.json.JsonArray; import jakarta.json.JsonObject; import org.w3c.dom.Document; @@ -33,7 +34,9 @@ * @see DatasetExportQuery * @see FileExportQuery */ -public interface ExportDataProvider { +public interface ExportDataProvider extends CoreProvider { + + int API_LEVEL = 2; /** * Returns complete dataset metadata in Dataverse's standard JSON format. diff --git a/export/src/main/java/io/gdcc/spi/export/Exporter.java b/export/src/main/java/io/gdcc/spi/export/Exporter.java index 7132e74..699a975 100644 --- a/export/src/main/java/io/gdcc/spi/export/Exporter.java +++ b/export/src/main/java/io/gdcc/spi/export/Exporter.java @@ -1,5 +1,9 @@ package io.gdcc.spi.export; +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.annotations.RequiredProvider; +import io.gdcc.spi.meta.plugin.Plugin; + import java.io.OutputStream; import java.util.Locale; import java.util.Optional; @@ -10,8 +14,13 @@ * deploying new classes that implement this Exporter interface. */ -public interface Exporter { - +@PluginContract( + role = PluginContract.Role.BASE, + providers = @RequiredProvider(ExportDataProvider.class) +) +public interface Exporter extends Plugin { + + int API_LEVEL = 2; /** * When this method is called, the Exporter should write the metadata to the given OutputStream. diff --git a/export/src/main/java/io/gdcc/spi/export/XMLExporter.java b/export/src/main/java/io/gdcc/spi/export/XMLExporter.java index 910dcd0..08b7cc9 100644 --- a/export/src/main/java/io/gdcc/spi/export/XMLExporter.java +++ b/export/src/main/java/io/gdcc/spi/export/XMLExporter.java @@ -1,12 +1,19 @@ package io.gdcc.spi.export; +import io.gdcc.spi.meta.annotations.PluginContract; import jakarta.ws.rs.core.MediaType; /** * XML Exporter is an extension of the base Exporter interface that adds the * additional methods needed for generating XML metadata export formats. */ +@PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = Exporter.class +) public interface XMLExporter extends Exporter { + + int API_LEVEL = 2; /** * @implNote for the ddi exporter, this method returns "ddi:codebook:2_5" From fb20e3018d102c68f2c980074e533d4c63d58861 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 31 Mar 2026 17:06:08 +0200 Subject: [PATCH 41/55] feat(export): add default `identity()` method to `Exporter` interface Provide a default implementation for `identity()` returning `getFormatName()` to avoid boilerplate code and enhance backward compatibility. --- export/src/main/java/io/gdcc/spi/export/Exporter.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/export/src/main/java/io/gdcc/spi/export/Exporter.java b/export/src/main/java/io/gdcc/spi/export/Exporter.java index 699a975..fc439a9 100644 --- a/export/src/main/java/io/gdcc/spi/export/Exporter.java +++ b/export/src/main/java/io/gdcc/spi/export/Exporter.java @@ -21,6 +21,10 @@ public interface Exporter extends Plugin { int API_LEVEL = 2; + + default String identity() { + return this.getFormatName(); + } /** * When this method is called, the Exporter should write the metadata to the given OutputStream. From 84e28a14948b6ca2c1fd07cc67260fc2383bd34e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Tue, 31 Mar 2026 17:06:36 +0200 Subject: [PATCH 42/55] test(export): add integration tests for descriptor and service file generation - Validate proper descriptor generation for `Exporter` and `XMLExporter` implementations. - Ensure service file creation for base contracts and absence for capability contracts. - Expand coverage with stub classes for JSON and DDI exporters. --- .../io/gdcc/spi/export/ExporterImplTest.java | 105 ++++++++++++++++++ .../spi/export/fixtures/StubDdiExporter.java | 57 ++++++++++ .../spi/export/fixtures/StubJsonExporter.java | 44 ++++++++ 3 files changed, 206 insertions(+) create mode 100644 export/src/test/java/io/gdcc/spi/export/ExporterImplTest.java create mode 100644 export/src/test/java/io/gdcc/spi/export/fixtures/StubDdiExporter.java create mode 100644 export/src/test/java/io/gdcc/spi/export/fixtures/StubJsonExporter.java diff --git a/export/src/test/java/io/gdcc/spi/export/ExporterImplTest.java b/export/src/test/java/io/gdcc/spi/export/ExporterImplTest.java new file mode 100644 index 0000000..bc0c4fd --- /dev/null +++ b/export/src/test/java/io/gdcc/spi/export/ExporterImplTest.java @@ -0,0 +1,105 @@ + +package io.gdcc.spi.export; + +import io.gdcc.spi.export.fixtures.StubDdiExporter; +import io.gdcc.spi.export.fixtures.StubJsonExporter; +import io.gdcc.spi.meta.descriptor.Descriptor; +import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * Verifies that the annotation processor generates correct descriptors and service files + * when compiling real Exporter SPI implementations. + * + *

    The test implementation classes in this package are compiled with the processor on the + * classpath. The processor writes descriptors and service files into {@code target/test-classes/}, + * which this test reads at runtime to verify correctness.

    + */ +class ExporterImplTest { + + @Test + void generatesDescriptorAndServiceFileForBaseExporterImplementation() throws IOException { + Class implClass = StubJsonExporter.class; + + Descriptor descriptor = readDescriptor(implClass); + assertNotNull(descriptor, "Descriptor should be generated for " + implClass); + + assertEquals(DescriptorFormat.transformClassName(implClass), descriptor.klass()); + assertEquals(Exporter.class.getCanonicalName(), descriptor.kind()); + assertEquals(Exporter.API_LEVEL, descriptor.contractLevel(Exporter.class.getCanonicalName())); + assertEquals(ExportDataProvider.API_LEVEL, descriptor.requiredProviderLevel(ExportDataProvider.class.getCanonicalName())); + + String serviceFile = readServiceFile(Exporter.class); + assertNotNull(serviceFile, "Service file should be generated for Exporter"); + assertTrue(serviceFile.contains(DescriptorFormat.transformClassName(implClass)), "Service file should contain " + implClass); + } + + @Test + void generatesDescriptorWithBaseAndCapabilityForXmlExporterImplementation() throws IOException { + Class implClass = StubDdiExporter.class; + + Descriptor descriptor = readDescriptor(implClass); + assertNotNull(descriptor, "Descriptor should be generated for " + implClass); + + assertEquals(DescriptorFormat.transformClassName(implClass), descriptor.klass()); + assertEquals(Exporter.class.getCanonicalName(), descriptor.kind()); + assertEquals(Exporter.API_LEVEL, descriptor.contractLevel(Exporter.class.getCanonicalName())); + assertEquals(XMLExporter.API_LEVEL, descriptor.contractLevel(XMLExporter.class.getCanonicalName())); + assertEquals(ExportDataProvider.API_LEVEL, descriptor.requiredProviderLevel(ExportDataProvider.class.getCanonicalName())); + + String serviceFile = readServiceFile(Exporter.class); + assertNotNull(serviceFile, "Service file should be generated for Exporter"); + assertTrue(serviceFile.contains(DescriptorFormat.transformClassName(implClass)), "Service file should contain " + implClass); + } + + @Test + void doesNotGenerateServiceFileForXmlExporterCapability() { + String serviceFile = readServiceFile(XMLExporter.class); + assertNull(serviceFile, "Service file must never be generated for capability contract XMLExporter"); + } + + @Test + void xmlExporterDefaultMediaTypeSatisfiesBaseContract() throws IOException { + // StubDdiExporter implements XMLExporter (which extends Exporter) and does NOT + // override getMediaType(). Because XMLExporter extends Exporter in the Java type + // hierarchy, the default on XMLExporter satisfies the abstract declaration on Exporter. + // If this were not the case, compilation would have failed and no descriptor would exist. + Class implClass = StubDdiExporter.class; + + Descriptor descriptor = readDescriptor(implClass); + assertNotNull(descriptor, "Descriptor should exist, proving compilation succeeded without explicit getMediaType() override"); + } + + // ── Helpers ───────────────────────────────────────────────────────────────── + + private Descriptor readDescriptor(Class implClass) throws IOException { + String resourcePath = DescriptorFormat.toPath(implClass); + try (InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath)) { + if (is == null) { + return null; + } + return DescriptorFormat.read(new String(is.readAllBytes(), StandardCharsets.UTF_8)); + } + } + + private String readServiceFile(Class serviceType) { + String resourcePath = "META-INF/services/" + serviceType.getName(); + try (InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath)) { + if (is == null) { + return null; + } + return new String(is.readAllBytes(), StandardCharsets.UTF_8); + } catch (IOException e) { + return null; + } + } +} \ No newline at end of file diff --git a/export/src/test/java/io/gdcc/spi/export/fixtures/StubDdiExporter.java b/export/src/test/java/io/gdcc/spi/export/fixtures/StubDdiExporter.java new file mode 100644 index 0000000..973a091 --- /dev/null +++ b/export/src/test/java/io/gdcc/spi/export/fixtures/StubDdiExporter.java @@ -0,0 +1,57 @@ +package io.gdcc.spi.export.fixtures; + +import io.gdcc.spi.export.ExportDataProvider; +import io.gdcc.spi.export.XMLExporter; +import io.gdcc.spi.meta.annotations.DataversePlugin; + +import java.io.OutputStream; +import java.util.Locale; + +/** + * Minimal XMLExporter implementation for processor integration testing. + * + *

    Does NOT override {@code getMediaType()} — the default from {@link XMLExporter} + * satisfies the abstract declaration on {@link io.gdcc.spi.export.Exporter} because XMLExporter extends Exporter.

    + */ +@DataversePlugin +public class StubDdiExporter implements XMLExporter { + @Override + public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) { + /* Intentionally left blank for test class */ + } + + @Override + public String getFormatName() { + return "stub-ddi"; + } + + @Override + public String getDisplayName(Locale locale) { + return "Stub DDI"; + } + + @Override + public Boolean isHarvestable() { + return true; + } + + @Override + public Boolean isAvailableToUsers() { + return true; + } + + @Override + public String getXMLNameSpace() { + return "ddi:codebook:2_5"; + } + + @Override + public String getXMLSchemaLocation() { + return "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd"; + } + + @Override + public String getXMLSchemaVersion() { + return "2.5"; + } +} diff --git a/export/src/test/java/io/gdcc/spi/export/fixtures/StubJsonExporter.java b/export/src/test/java/io/gdcc/spi/export/fixtures/StubJsonExporter.java new file mode 100644 index 0000000..b273c39 --- /dev/null +++ b/export/src/test/java/io/gdcc/spi/export/fixtures/StubJsonExporter.java @@ -0,0 +1,44 @@ +package io.gdcc.spi.export.fixtures; + +import io.gdcc.spi.export.ExportDataProvider; +import io.gdcc.spi.export.Exporter; +import io.gdcc.spi.meta.annotations.DataversePlugin; + +import java.io.OutputStream; +import java.util.Locale; + +/** + * Minimal base-only Exporter implementation for processor integration testing. + */ +@DataversePlugin +public class StubJsonExporter implements Exporter { + @Override + public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) { + /* Intentionally left blank for test class */ + } + + @Override + public String getFormatName() { + return "stub-json"; + } + + @Override + public String getDisplayName(Locale locale) { + return "Stub JSON"; + } + + @Override + public Boolean isHarvestable() { + return false; + } + + @Override + public Boolean isAvailableToUsers() { + return true; + } + + @Override + public String getMediaType() { + return "application/json"; + } +} From 1e6626a0d2bcef23422a1aa2bdd15f5172053b51 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 1 Apr 2026 17:00:49 +0200 Subject: [PATCH 43/55] feat(docs): add Maven site documentation and resources - Introduce structured documentation for the Dataverse SPI Plugin API, including index, examples, and modules pages. - Add custom styles, site branding, and navigation for improved developer experience. - Configure Maven Site Plugin and reporting tools for streamlined site generation. - Include GitHub Actions workflow for automated site deployment to GitHub Pages. --- .github/workflows/site.yml | 44 +++++++ pom.xml | 114 ++++++++++++++++++ src/main/javadoc/custom.css | 19 +++ src/site/markdown/examples.md | 24 ++++ src/site/markdown/index.md | 59 +++++++++ src/site/markdown/modules.md | 23 ++++ src/site/resources/images/dataverse-logo.png | Bin 0 -> 4800 bytes src/site/resources/images/gdcc-logo.png | Bin 0 -> 5257 bytes .../images/project-logo-editable.svg | 92 ++++++++++++++ src/site/resources/images/project-logo.png | Bin 0 -> 19917 bytes src/site/resources/images/project-logo.svg | 88 ++++++++++++++ src/site/site.xml | 50 ++++++++ 12 files changed, 513 insertions(+) create mode 100644 .github/workflows/site.yml create mode 100644 src/main/javadoc/custom.css create mode 100644 src/site/markdown/examples.md create mode 100644 src/site/markdown/index.md create mode 100644 src/site/markdown/modules.md create mode 100644 src/site/resources/images/dataverse-logo.png create mode 100644 src/site/resources/images/gdcc-logo.png create mode 100644 src/site/resources/images/project-logo-editable.svg create mode 100644 src/site/resources/images/project-logo.png create mode 100644 src/site/resources/images/project-logo.svg create mode 100644 src/site/site.xml diff --git a/.github/workflows/site.yml b/.github/workflows/site.yml new file mode 100644 index 0000000..d30dac1 --- /dev/null +++ b/.github/workflows/site.yml @@ -0,0 +1,44 @@ +name: Publish Maven Site + +on: + push: +# branches: +# - main +# workflow_dispatch: + +permissions: + contents: write + +concurrency: + group: publish-maven-site-${{ github.ref }} + cancel-in-progress: true + +jobs: + publish-site: + runs-on: ubuntu-latest + + steps: + - name: Check out source + uses: actions/checkout@v6 + + - name: Set up Java 17 + uses: actions/setup-java@v5 + with: + distribution: temurin + java-version: '17' + cache: maven + + - name: Build and verify project site + run: mvn -B verify site + + - name: Add .nojekyll + run: touch target/site/.nojekyll + + - name: Deploy to gh-pages + uses: JamesIves/github-pages-deploy-action@v4 + with: + branch: gh-pages + folder: target/site + target-folder: snapshot + clean: true + single-commit: true diff --git a/pom.xml b/pom.xml index d011432..d36b8b8 100644 --- a/pom.xml +++ b/pom.xml @@ -38,8 +38,22 @@ + + + ${project.license.name} + ${project.license.url} + repo + + + + + https://github.com/gdcc/dataverse-spi + + 17 + Apache License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0.txt @@ -71,4 +85,104 @@
    + + + + org.apache.maven.plugins + maven-site-plugin + + + org.sentrysoftware.maven + maven-skin-tools + 1.7.00 + + + + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + aggregate-javadocs + + false + + aggregate + + + + + protected + + none + + true + false + true + ${jdk.version} + + Dataverse SPI Plugin API Reference + Dataverse SPI Plugin API Reference + + custom.css + + + + ← Back to Dataverse SPI Plugin API homepage + + ]]> + + Version ${project.version} -  + Copyright by ${project.organization.name} -  + Licensed under ${project.license.name} |  + Dataverse SPI Plugin API |  + The Dataverse Project + + ]]> + + + + apiNote + a + API Note: + + + implSpec + a + Implementation Requirements: + + + implNote + a + Implementation Note: + + + + + + + org.apache.maven.plugins + maven-project-info-reports-plugin + 3.9.0 + + + root-project-info + + false + + team + + + + + + + diff --git a/src/main/javadoc/custom.css b/src/main/javadoc/custom.css new file mode 100644 index 0000000..07de306 --- /dev/null +++ b/src/main/javadoc/custom.css @@ -0,0 +1,19 @@ +.top-nav-backlink { + padding: 0.75rem 1rem; + font-size: 1rem; + font-weight: 600; +} + +.top-nav-backlink a { + text-decoration: none; +} + +.top-nav-backlink a:hover { + text-decoration: underline; +} + +.doc-version { + margin-left: 1rem; + color: #57606a; + font-weight: 500; +} diff --git a/src/site/markdown/examples.md b/src/site/markdown/examples.md new file mode 100644 index 0000000..2305855 --- /dev/null +++ b/src/site/markdown/examples.md @@ -0,0 +1,24 @@ +# Examples + +This page should contain practical examples for plugin authors. + +## Suggested examples to add + +- creating a minimal plugin +- implementing a service provider +- packaging and deployment +- handling metadata or export extension points + +## Documentation style + +For each example, consider using this structure: + +1. **Goal** +2. **Prerequisites** +3. **Code** +4. **Explanation** +5. **Related API links** + +## API Reference + +When relevant, link directly to classes in the [Javadocs](apidocs/index.html). \ No newline at end of file diff --git a/src/site/markdown/index.md b/src/site/markdown/index.md new file mode 100644 index 0000000..e693045 --- /dev/null +++ b/src/site/markdown/index.md @@ -0,0 +1,59 @@ +# Dataverse SPI Plugin API + +This site provides documentation for the SPI (Service Provider Interface) module for Dataverse. + +## Project Contents + +This project offers a universal Java module to create plugins for Dataverse: + +- It provides *API contracts* a plugin author implements to create a new plugin. +- It provides an easy-to-use *annotation* `@DataversePlugin`, marking an implementation class as such a plugin. +- It generates *plugin metadata* automatically to make a plugin author's life as convenient as possible. +- It allows coordinated data exchange with the core using a *core provider* concept. +- It helps the core to detect any plugins an administrator adds to their Dataverse installation and validate its compatibility. + +## Audience + +This documentation is intended for developers who want to: + +- build Dataverse plugins +- understand the available SPI contracts +- explore integration points +- browse the API reference + +## Maven Coordinates + +The (current) artifact is published as `io.gdcc:dataverse-spi` to *Maven Central*. +Use it in Maven like this: + +```xml + + io.gdcc + dataverse-spi + x.y.z + +``` + +Nnote: if you're using the GDCC Maven Parent, you may omit the version. + +## Documentation + +- [Examples](examples.html) +- [Modules](modules.html) +- [Javadocs](apidocs/index.html) + +## License + +Licensed under the same terms as the Dataverse core project: [${project.license.name}](${project.license.url}). + +## Context & History + +This module did not appear out of thin air. +Before it was moved to this project with an independent release cycle and potential governance, it was part of the Dataverse core. + +You can find the first ever commit that started it all here: [IQSS/dataverse@e560a34e](https://github.com/IQSS/dataverse/commit/e560a34e89b12a08b0e936e0cc8bd429f7a8c7c5). +In an effort back in 2022, funded by DANS and undertaken by Jim Myers, this package originally formed as a separate Maven module. +You can find the history and context in core pull request [IQSS/dataverse#9175](https://github.com/IQSS/dataverse/pull/9175). + +In 2026, it was decided within the [Dataverse Core Dev Team](https://dataverse.org/about) to move the Maven module into a separate repository, enabling an independent release cycle, tags, the works. +If you are interested in any commit history that happened before the initial Maven module creation, you can dig your way back from [IQSS/dataverse@fa0e2812](https://github.com/IQSS/dataverse/tree/fa0e28124a15b0db8042959b9fee536591f26f8d/modules/dataverse-spi) \ No newline at end of file diff --git a/src/site/markdown/modules.md b/src/site/markdown/modules.md new file mode 100644 index 0000000..ad53db9 --- /dev/null +++ b/src/site/markdown/modules.md @@ -0,0 +1,23 @@ +# Modules + +This project is split into several Maven modules. + +## api + +Core API contracts intended for plugin developers. + +## core + +Shared core functionality supporting the SPI-based extension model. + +## export + +Types related to export-oriented extension points. + +## meta + +Metadata-related SPI support. + +## How to choose + +Start with the module that exposes the contract you need, then use the [Javadocs](apidocs/index.html) to inspect the types in detail. \ No newline at end of file diff --git a/src/site/resources/images/dataverse-logo.png b/src/site/resources/images/dataverse-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..3c7c0da34a80053be6c47a46906122ff7b36a282 GIT binary patch literal 4800 zcmaJ_c{G&m-&Z1JU$SJGhV082`$V=G48}6Zh+>SfkD12E$d)YGB6|`-WJs1G`;vXh znq6dTvE)hF8tSKhPtW`Q@jmZ;&V66s>pI`h=evE*xz4#`EX)j9m;{(;XlPjAMo{GO zdgJ(&WS~F(%80V2A20kQ7>Z7 zMM5D=As9SP9{tNlo{S?LqiJX~waElD#tTaVIAh&Be6)aD4J|-`hl>`_Rs{h<5cIL` z9!58bSgV_6)|i`K7!4Po_9cKO8FEa3!;;VdGS1t_7edwo{^o@o+rN$#fPmjFBrh%C zf0IHXECBj=A{L+`uPTQDsj30gG~^Xk)YO#KWC381qKX1YNkIuD2Udb8gCNRaz~2w> zI2zH#6@r8s{2l9frv-E;kq8h41%H2kd4DB&Jkd=-QA0yR0R&b6gXNArh8`GZ zpsb>-uBZr8Q`gml=_!N3AgI0~NDZw12Mfhx{BT$w(jTnLKUf3MzhZw00!KKG48;;X zZeU#uh0V-5W>{}J!;<{$ZE zeU3FxJQjCQHnpFIhD{z0)wL$iY}f|`S&#Ev!n=Z}{dIIWUU5RtCgq>dEwrGVbha5T zv3YUDdd*zdf!!Ooe!0wMEvmNQ9ld+!?b?$}w}A{yRXsp-_eQTv=D?`$d{CmG2tFD9 z=!llMO%2%^oLxBjY`1lngY^#FWpH*Yrz4BGOWxp&0i9@^<-mT}1IcQSGRpXzRgxXt z=KElHak_A(T=7TW$5-UjYK-q5KC4-;0x>*T{VD3E^rM{|ITE9IPw{r8`V8F_%u6S# z9EK5dx*%0CvohQAF2`v0b>5KvFEN&m5`y^^7ZdcZGeYw$l(Y~Tdweo{lS_N526)t1 zi27L@j9d)nao*dUFC5E)QCZiSmaiFHSPtkai~pLxy|$ZNT9dqVrp9V2Ys%Do3_9m5 zy=*!Aqk1Z5@}AU+6UjY|?86RCdsjF_7aQbpPw-F9b$vSJ*SHr_?)aPAP>;Y6X z&kM&M($$<*FD~6`@ zkDFSKVI{u@U3&Unf!{GaZguDTl#{bSz{79?WfI(<{znQSfR zcoCTO=%y-r8YzUbUB|Lh#lE?yh5l+?c+XWksMN*gqL~%%iTREw1|AI-_)dSkws1j& zvB=L8Ul49x=gb1!X@NTt0Y>^QI^PeJoc)R+0x(7sv9VrN5alvP`yMYt=_ zn7|f^&@M(n$qOBN(&ufAIr~GIz1HOZN`w!qe_D<=a0t~}rh8G?^$ydBauk-qcDUiv zia#+tH>uq6H+4$jKk*#TBJO(2!fHLmnM`S@cst!1u-#@@pPv*I@nVyZJ4qk1eg0ef z-f$xdO5dSGPVD6qJCl^>ty8mvdNh2Y%X<8prxNY=!_Y805dfQjYr|GMcV-gbtp!`f zdAGyf8}qPuEoSY3I8@x1wb}v5Yu5pqYuLHok3(cGsw;TVdU8%TiC-qOtk2C9*=?r7 zgnnxdaTmbwEZ;cC?<P1ryoWq+L?)Z%)KA_5=fN;JPm zlAf-3+5-H!S~Qat`99pOYMMPAzr&rGUA3a+T}ZejxH(NwV{<03wzf9wT$P~pVT=yo zThQWyzC2-BnkV)0oj@gVVq=n8+!fOp=^xB6R6)OET`Tq}(WmyOzaqOAqR(NK;4Kea zu}9B{+^rVmw%9aP0nh5TGn1wm{N}4m-YyhLnJ)_TpUP%N%5jNjUr-WQUps8DYe`So zeDe9L5CqB4$WOf%zRPO!c%F4Bw;i83xpC?wwU~;d&zpFc=39T;TCYNswkD&#E7sdk z8Y!_9*j4huF4*eUQMrM$WH(8GA^n?Qg#j{Wo$+KDO;rITq`H1crUqYY zR;^8{ec6Qi$ui)F=@`>-AXebBkge4WS$Vrnahpe;j*Xl37PZtG?zMd{eLvW1;%eul+*KOkSWuO0Y(>dW)WeqT^{XP(i3cAm@AmUrS)g*Qejw z34CNknw<4d!TlZ2);AYV=ELufKAadvG2Y0j@PK3J5p;(LHqy6Zdeb+0{VNqKNSF5% z2~!XDh4E2U_7ZOo*n{@q#58jptz?_Rq@ z3Q?Bpzr49KHHdtCEjD2-Z?LqS+lGGZjV+z&SxR}hwMlr{9&uW>hx5hh^R}{9%`ce( zo@*uZ9!Lazs5MC4y3|SY;5NgN#GQ{V%Es^Zm2>v?4A|WtrEmBZbJ#DW+wNDEnOxB|<+Is8<2#woMCW^GDIb<9)})MMyv6^@P>ZJqc_ zd)KKwd6$)T5a^buf9Ryv`I!Edv-@8ms~NqQUEDtmC#ET~L&Cm@&Q~&RiEbSPatB8N zZROf3gGYQG#|TB`-%@p)oKp6s*CW3K_C<;v61!4roY0`dB&@JpuSLj+u-1HdAi|KN z9k?uezw@5HCk${kE+m7h+x6+ApcW{q^lO5?ZU;qBo%INvw4y(|Hup(&Z0h*#tRRYv ztT=yLFNl8Us22!nUZ)Exf3qzT2VWiEFsnGxvQN+HX(IeQjmqG0`+nlQDoZ0r^JtC} zUgx^=tuk-8^!DI$Iv=~(IWP|{A*%ej#O9>>od1)OkWGU64>e|xj7W%{&@7#m7t6rd z8~@kQNdB3-VLSx&8$wJHs}%-WLW7?o{MFfb%DdM^$D5rFSs|pOim*XPvpdhqGH>xV zoz$2yb`QVb31fJ6wvEINoRi`^aBfzmcn*Lcv!~>x9n_lL&Z7f_@EVOa+e3l@PrFaE zE=Y=dUxe5*vB1#nJ7J|Y-_M$3Ip|Wrw0X518IpobQztKK^-Sb1Z-uo4bAIsFgPE{R zC=ztlUdg59Nvob`qV(488DOU$y2!|`c@@7dPsnFT5C~KsxzZROx@`a#dCImuZbd_h zU;J@5%|(2i*VfYM*@onn<}GB11AIPT=>2!u%vk0IP1Kpq^XAyfI%cEG@%PtTN{o9i z_>Kf=-n*a4X=++}uk$8Rd%AB&7Z2v2;8SG&vAA8gT0dhng1GG2C*UQ@{ibag^vw_( zzrbN&oWfJuC7kf4<387wrP2>5Z2VrLT?EMAFLTJge$wIlbNok~^}Gk*!scPr(iwk? z(P!1E-`hFGAI&(tllQ6*5~5oAl{y&D?pM~y*OwsFws#=8aI>Nph}_FP4liVBWVJjP z4v5>5UOR%u^OdoTgUY?;VOe}@+B!X z4J8qtRTqbI(EeF7%UG73^bLOA8+BXP4}!fKRd}op8of&GNe>*SiyA0)-c<-^&OM(; zGv6kj)?GCn+mM@+{}FVOy1Ll;#_WGP_Jdqo5igqidD8e(I4*vCd4A(JdWa}7sYgv)6%Fz!fwxclgCr5 z$k`)87uvEitr_26bFz9Hc|OTQm&oNJL&?Im9-lf)OAX@{9j+tj0RS*F65JEf?{QcKzf(JH3E&W%ms00laA z9pdwHZf(h;5vv-7Z4pL4jp{U85uJ|g+eWsKn+&kC4-tBWI6-N%G7h1{C@Dd5ADOA5%vwo1QUPLe=m2zFey~k?9>4gTFdpIg^n^G46qt{ z{VZ;vw$@ePgm%_vt)jSAms97}vL~Yznoi8_tzZ1~asQ6MXUP#GM>B2dPb#mN)5Qwe zLwW7I-KNTopcI0N@^y-|a_adjSL14I36{O=S(VcndsnB%86r&L0kjl}6#%je4C_|yyXaX;=1-H*~8p>3`c k^cZXGx+lHuc!HjWFtNCE=Gtk#U;mWgFf-^gJ*V6M0dSd_CjbBd literal 0 HcmV?d00001 diff --git a/src/site/resources/images/gdcc-logo.png b/src/site/resources/images/gdcc-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..6ffdaf9a8ccf7aabf97450547696eeefc263dc42 GIT binary patch literal 5257 zcmaJ_cQ71W*I(_?t!}ljQ4=MrL?^7WM2HqaqCYx8bP;v+wg?h6i;(EOhUi%}dheZ8 zR$0Bg-tU?D=KcQo-ZOL0IlteW-`qKK&z-q*BegVP;_yD;+bC!#000&7q}S#I_ZY`b2L=U{4zg_D19nSQ9Ss1$mkR(03;_Tx?(X9~ z?*Rb-cFX_(sm}lagL9&3m&(1aO6!H5^4;Ct%6~QZ|JZ%Wef&@VpYR|4pUQvyJ@fnb z?|Xl5|1hJYqyH=U&&vOj|0B=L%$%E>yU*w6YNsZnW@n3*m#6MkLxZq^KB2*WnVIRY z<74h4!^Vq?{Tmw_=jZ45O>q8(Am&3Yu1H$JcCfoCM_gf>8$hGCE(=ofMs;bR} z_NyV))z#ILardK%fT8-zg{jHufxf;hKlQ5%Q~UKH*sq%Bsgft(40qc;W2$`4 zGL()AEf34yZH#s7bmwkuZAC>z4G$0h`t^&)_Y8i2)DrGGnz{gXvp^S!*3c4V`OsGk zCBj8s9`Y5l(OQl$TVj{V#GbSUwrB}@&G$!8iR^!|E`0b(sj2g-GQvULLGItPW1$XH z0z?q7s@?-)7ga+y0DzR{-wOwjmH`3)r~=iL6!g4j_KYtSS?cJl8yF1U^;VpisZY+C ztUg%fq>&~h?RzIFUSX$^C^cY324ZHbc6DU&>$019){w?%r!r#eK7goU{rHR1Y#_>K zu|@Z#j&Wwtf44T~IK7XGik7-1cO-Zb8}xn34vHmr)OdmJ>Fc@5it01c_BVey(~p$; zzV$|z)GgxN4e*MLn9s6i*7!$?6B`GH)h#v?xoiu-V6Byl5r4&Ee+Z5;cs#M*IuJKfE2Y)$IdVh z_8se-I#hFPD~r7ZExcz*kl%jE(H)*3oS0P-m4W6JZ_~S=4h-tLVNSDX|4&^m0&g zeu?y=EF$5o704IOnW8^QuPR{Y`2a?H;+q}jzgvoRXc62xe@WOY_wr85CC2ujo_J5)8Iy1SXIe+vI1{=&Nm zXZ$Uw(p*H&_R;U+Xq}Hu^6qMOk)U0Gp$=F33G5I@%eZT7%{ibT)0-#8|F`{3ZJFE- zE3rvKe1KK&Z!k&UUttKB2*Tv$B9&jYTVkEKcsM!!Xx)Oc*D!fO96Ob6| zz{wU!1qQ!u-!U`l5~e=+Qlh ze0j|`bt0GjWFnRK+#}S;@yZr!$|G{j)+L<(9lsI;H+I6i#PljT)UpEP=NLfj&lZB{ z+Tl*8vaPPH@jrM?FY6dx(@c&l@Izvf%D)0c5mjWR2Lto4i1}wmZ}39peR-ebDagMv z;=9r=tac>Cd*5V*KEo#Z(_Qw_1~O2Jdjw`K{qAL|Hnk;ds7-(RxE+Umje{F3AD0lRT7%y)!`$@YN*qXe>O9|~IrbJ| ze>%Sy+1kf|T6#XQ+`145(-^*UazB!}t zo_hBFBQXkFw-iNZUR{~rjlxkKFDrj5C8BNWI%a7T|1w;?$HO2Sjx=D9TaPdtSZ_7V zy5x2&|4dT3i}F0nM^H?TWQ3Pb5Q=AWas56b-YQJo)N5?Dg_bog!0S_9 zWo57GlpFWadh7f{?DnhdxR)(1X^bF0358u`hOc-1Dx`uWmVV({+I~(77~n$Sx$eMo z-F#WolxI>}c_Bgl0oa}-VU2zh7Dd*^t&4MXam_eZB#YxO!G*JSoMcN6>Cwx{^%!C+ z4EXf$WK}hiBCv^qgN!cdU8YS{{;`NBsHz&@+9_R<|CzwqkuJ{q)M!u4gi=&~ZvW%s zQZxpK@dsQFKLwQQG%*Kf4Hd!XsY+Pgt+G&~%XyG3%kZIEtWk#PXQR`%pEaFdJ7UX3 z&oQ8j-gKPBiP5$i!_C1dnT!OE&IDT1m)^kaFHzz~Y$ktrtCU4i&)hEprKo$A&1wg) zL-Ge_{FWB44cn%sZPhf~?4Hp$xRZ^27+IN^$xItVrac3?kWwYKS?`@Jgw%ZM3vdf$ zR7)a&!M-H99ToG%$KjPldnFaIx2ElV?`-0H$Dx7=rFt^RL~$p3{E&FI>f8kku9fZa zJCJPC4F%<{vhVdo&C+bHd5&DLA1fW#7Zg)*_dDALz(c)QS9%*uUKVxpe4*3U6q;O4sJ=P zHA&R*8Wx7;n2*>S{ZU+|!iS3W_G>Mq_u&tJZN@2*06jO>wl_g-{`v5W-4q%}*^3I_ z_;N^n^+V<{khU7;z(H5|S?~Ec5kUi8Dk6j#IX5Jo61Mx9R4rBb){R{esUy2W^_l#I z;=YRu2TsOXa4v8Y3v(fctWTe5E>*b23ZW!10lGmlAd0?(q(6EDC8#-Dq zN{~)ng&G>Ja`8QY>Xq!63i@yvwXv*5^yimbKYb4>#fp`?^>^Ei7Pb?8)YZ=acqTj9 zH`Nx(izh+#^p8vcZq2Zf3v_;!frnql69g{SpRU~JR6w;+WHR}h)7O@3PKQBXKy7Vg zy0Ty=$*-A9@N9IK50hxbva~%)?4k6e)BKLfy?o~uAG}nHo*2e1Q9?h~6-*yTyeFy| zma<*OEDHrcm7t|jh04t3xM5AwC@bwBGB&ypA?1crxwZ?H67IVs7sai(cA<_fpl=b$ z%WI~9uI8%~dBVAO-7x|A?dm;rY!AR|sUH$0-wZsWox5guXRET9?WL0U5}R&qf-1PL zgVnQee^S$55PH<_lk-)~h&3by`$82J?_b0c;akR|jA{YHaoCd%Lg6yUtNeL7J zLMe#&B%WLU+D_uOzUH@3#MKMI-ZZP*+HSeL-<1y{i6twBgeW3ug2S5)6A^AN4!gi` zKYH!{e6=`4XNL(nl==r&s=n+KD2JAx0^IrQV)5MwBJ#PleeTOlg#C!Qv!y6__n|Ti z6qQU?{S%t9u1tqhGhCply5d9B!V{DtU`U$R=M7)`MilutPWf62*FN{vFeiFU+uER_ zxaqLLr9zzw%D{j|4dU;wG|+sJcV~kHIlk#lqxc ze4MYM28H`3#l?OUT+5GAP_3j%xqv|hhtX9C4f}b~nD~iCGB2dqg_&~xgsquMNfpJ3 z)mDN*Sf7VWsgd{;GaB5tz{HQn$&NpWywP(=@sK8#dpDxNQ+(>>dobq1fBK4@^W~^Q zmm|{J*y4-j;{$wsf`Q>)mlT%^KE3z|H%)!&$E{6WUe{?_DtwPDvquc49AvY3+W!zi zAG)fH31?@AtSDb)kv8Uf=ym$GFRuzUESIMt2Bh+giHfEhHLIJoQYm{e44p4&-C;#V zs(cWm;RY0q@717D|DNuHIZ(7jP*@GCf<*|D9*lgrmxoCRKDD)yD5`&w3-WpIXh^Dr zwh&6tZ{Uz34U)^0+j@Du&jg1njz2;^rmuz-b=d8F?5Q<+#uA?PA+tMas>D!SvH5`N z&CJeV^n!6FrTz~4+*5-gwvyi{AwKf}(7W2x)1~fL3Ay*LoLsKxmRRa0vT-7Fuc(L^ zOK`WXtV%KL#ArX7kYA3Tfx*HCeT5`<8VZHPU8E-tQ~5xxb7sZ~gatbpODGN~Y7Za+ zKDWEyJ_G_R&uoM?#5}6CSxrfU3XXtPw-Se`CRvo49OE*RBD5Cw*Nw!a<8*7eis6Zg zjQ{1xRYF7GkSq$U*_2V29V`7LeUc^} zlIrSS>n^EC_92TypmD~a?O+(DJEiA3a#twyWBNZ&AAg5VRYtV$ez zSa55);Xp;6qr^CE-^#*LrS`DzkSl97(Es1?Zv^;nDa^)zel#-Z&rw?MiO1tgedqhWqasTnG+7ok#MMFc zlQb=$YtT5J!uzX7*v~w2;}s`br3kS{DodGW(lDx)wFQAc1~;30)}7J%`7yx}3GWoF zRBi|6)iK>DX4u*G3)cXkNk$z;pb^rTkY?4-8zJhaB1Yi#;|VgFtRxd%ceZ~ZU0+p) zzB)YWC1DzRIe_z|kkW+JVhIpxgmha6sr59{1%-|zx>=K_13^b=EK1Y?w`6KME529B z>l$e>y^U_O${w;&zbD2QXF<7e~1jiC3?oti*xy=sjQU8*8lr=t>9tvns16U@`M&Tl#;D!^|RiH zcch$e43k9P@_|^tbYt*cz98mVY~(HBw68@LzVJ&TrBDu@eO5}OtyZpbUe&+ zBYu-JJV^W6&EdOXrJ;#nvVIhx=-Y@<12xl7w#7%em5wbxtzl7+s{rUT}l~xl}uIw|!f(s@<_l z&oZjW*&fT&(Y&F&6CFRRVtH;|j+7j@ItO=zGa>LQ2s!P`e`H;Tg_SBWkdu?k%H|MW zR&K3o1PhWb=1nO^@;J5GqPCcCPv zlBTYFvpTIS8tN>ETV(}Yqehq>7|6SNm6-0=C{bo{%M)wDnuK1bwK~SgLRfNLNeVhd zuRG;=Hq-)_CrM+ZU=Wj7-_B0n+xgn6j^35sJy5Hh+1I9tCnuL75b-7EEZwC%F9~&- z3?&ycE%60S)$u{k7}fKn;RGbkQ3J(jOFAvDfUDZ}e(ZIl*Xc%BhjlVAb$0eDwR8*=8UYn`4o_#+JQcFui6{tZF(=oYX^Jh8Qa&NCV-_{S-!Wr4*+3^kc0q4TtHY%Pe@EkL`>@GQ$7ep3Ib`i!HxVs d0w?Fc?W}$Ne?h-g + + + diff --git a/src/site/resources/images/project-logo.png b/src/site/resources/images/project-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..5c46c526b73437bfb517caeea68614d90dbddc59 GIT binary patch literal 19917 zcmd3O^;=YJ)b-Ha-6bKVbcZs4N_Qg-(%nc23@y?M2uKdyUD93B-AH%Gd&cK|ukT;@ ze&FI_IOmRi_St){wbwo2%8IgB=;Y`i5D4r2JFqGU1lI%mK}7~WIWl2;3H(BHe5d0A z0%3Q;e&Bi?3e12H$y}wiUDX^cTs@4P%|RX>9-N=-ZCp%^9nCo%oGpJGiIRgrw4nFk zH|m~g2TNYQ>RWT-C;Q@6h^VNgHRzm)DXF+M#3AxQ#MKUZ)g^9nj2H>?A8JZ8Y^HRJ zx^-)Gi)yU*NAKv(Izl;X%ZOHHSe@Q8(f(om`W`icXi?xRUwo>|^V7R(xc3%J=V?Mb z@%#6;H>2z`E(lIXX*{F!P;`2F`aSao0}vjb3cs=G3*fJ?=r{~7#xB!qI(JZo26N{lnhp5mud=s6o$cV?Pt8 zJgVyrP*tG7RL-+dFXpJd<%&mil?T*-l|%2Vbz_VQjQnX2JLe|cY_ke3{H+2#%8@%2n8pN{S9NV~{Uz?c z6X@4cX}2+YTv^!OJV}EADju+>%E_v#nAEY2@JF=(gvC-uscaJ+=41}trp3dzkQ*vs z;!hb|oeLY$0@%OuP(hy;VqWQRF{j|fN3!&uippfO^O!vB3lt3ShK0?cufEjqny)-S z?K+w;coGnB*k2ZR-nps2{_4@IeVTsXu`l)k9IRYKnaz_aI93>h$W2F`d{5`h{9S~|K0S_y2`n*$3q)MPs5b6K)B-rg<$_d#kIa46~3n-!bAx*ZV7fcosfNDgvWGs>sLehmKMb1w?TJ9oNj>ofa_Sibv*~ z*8RG}_RXFV56m5W#*h^*eDm<5!-ZGwnJ;U5y2Z1K#hn8SfxR;GxwRN9WO#m^Z6(3t9EKp&i>v=}LMT;RP~HxhhX#Mt>M%eq zqn=Vt6Re@D98jiKnegm>mw?oDWnfbN(mr_oLfLQgPM%j9#9{*x1<@j|YDmSINR@FV zNGISMa^W`=dmWu_pnJQwU>X$%)?a5`kw{^svtgR$7S}XxI!Zn(A<9xf)IhxQ>B;Jzn?*;wU_46e8K5`m?mCNtNz^TA1zKQ?LydpT>p7Y}wYpTp| zQTct3g3Yck+L+JeFkXccpBkQui7l|j3el7}`%vhjRUSEt*G00jYd%31eSQiX#L5x& zZxw|mu2K#MEB^syp%^rd178j4$?v2u(D`KsNP(qXr=?6cNt2Klbs0mavw0cBZx4!1 znMhf~tdwDQd%D~I?fCn-tw79a@`s0rMc1xMEZl68qRe6h$&Vt4l@aVBh+FG;tkCIO|W?YET-%`Xpp4Q}D`ODqAE|zh)jVJpB5b+u$192w2+x7ytq5YT( zAIdB!Y@-s$VEcS`z1IuxY_IS$uK{~JDCcqzQKb^t%Hrtya#Gm3Y~^NhlQSp1afC;xpf|SOaPJ&~LI2Iu5jL`1MjmD>zw)M6h)!Rpkp{6h zKi=5u#qdWa$hS_w1A-#%MQY(a`O+~5e|v4SI%~gq&44v&95+HkPq8i}pZ0U-y!VAj%0+vhmU^+&8Oj_(`rOkU`hmN&B+rv6 zlV>t&ZfpO@YeM-w7!78#_ce5ns1Fx&I=%9*)xHY78WJy6?mnkLP{C2M_jZa5q|DBN zP4!;X?Gxt$D1eJg$+jOzJ~x;e=3VY0-U8~e9|UA=*|`Cyc7}NeAylu5T5?wM&!62| zsygyME_!{cW>9-tbugmCeGj3ybq>paMwF-Z_>MMlTsM`VZ~TG{Ywi1LGHv|=I0^n* zzSYA#Hk|!m7HhCI? zJvd@FQO{z}Psp5I!Kpdi!cLKs)O_f0PAP8P&~6|&6JP;pMhqXX`Tr7-x`@er)M zQgS#@(6LONgTHt8L3>;eHwtbk`_e0Ys+;(TC)Q!RmAi#(&5PvFN{A{w^sgd>fu3L< zDkq+w9ZMFo1h*Pmp)zX6s9=*V6EB@p!und4V-Y@NE;t@(1!GFulOj>SMX_L#I-U>Jz_`sm38gX|AC~Xv zIjZ2pw5fHlj3mNBXW_>sbjzPssraDgXDw_VFV6F}$#|NPHda!y`A3L5*g-#C3tFiQ z#5n$pHT=%<&He&BkxF?zyHz&m87|1ll~%e=_pCUVydqFh?zEJnQNgI2juc_`K4RS} zgc2(gv)xK||F1!#v$>AnJ%?hf6fY2Hve^ksv7XQ)Lt6r+|uMz?IOIO3|&n$J@^LwNimnLH7mnSh1ibVzcuj}+f zSg@K+fIJ@m!*CHlcK;K|(7lNmCOR0SOYdj z`A5~j`8$M@a1~T%^L~O8l@o1B`UqB{fMKaGn3a54B3PjvR>0^#(B! ze!pCg>*Lx)&_Brq0T5kAp<43wGS(4;B1 zhmvNxGcV7BPCD-p(5PiIc17&yO$s>lBqGj=^k3|+(Dn_T_l|%hAf%wSAFqCWMxBy< z8tm|;?*)vECDnq0)8_Z(Hz)C~XIDw5@tg$tg~ImZ6CVR;YL=rqud*#JX3xts9IA?qc z2E0qv!QH%Nc;6l~;0|C);Qwa@bCaBe%_cc%UHt;!KXTyTQ-Q;vD=wl>bIZZ`{H32K zfzg?OEkL{rp%KkS+!IN&`3l>3=J3FHeqE}a1ddt)~Ep_b!0T?=n;=Qpm>r7;nTzNDvwj_wzyn*> zM%M`<t!i9N-Q!oBunN~dnJUPcpl@dA$kU@AK{wRN>& z7{pH+MLFK$B3scR`tJdb8;1Xl=U*2Mif~4NEg*djP0-{t2WtQe7>pOx8Aj{^kG+Ko zTQXb4qXosUbx#OnT%H~UW;;U#JN-wdfEg^AqshCP@Sog(ePEZ8e@%JH7~fbsR3Gq$ z4fDEYXjyR!`#;2BWt2B+ftYNodF14_E@r=IKbRkRv!#@20_a_q+ZH{#qD%} zS)*W=1QxOqK68_O^7^6`oiGKnr80eJUFZg|V`sxU4BBk={5wkd(m|~f`ywpDdH!0R z^4f*$1+ZHc#GXLU(9gl8*=3u-U}+oq#fRtua#5X7$l1u*_X&K8(;70$3FH&*0Ji{l zkT#W;UB>j-ttzrjrg0R2NRexDR7E!)135tG2Ao^wk{P%)KTmF2Vt>{f4O4K=NO?s7 z7vZDNAnL(=SuFiULm^2TA$^5vpzq}0t67Hul-Yn!Am|#ZRVz`pk_Q@MtP%k4c)(%6 zlMaWda%;f`T3uw;BFu4Us!S9epHrfb!`FR4S1wH%W@snP5f9dC9yx1^OwmmS|K;(! zcl!t7C~y2o!q@RZrc0&FST=Oda@ToEyow)d*|*)f{&^azoDv%=Y2DB34m~y%^H;D-SoC@Rs<+$st*7S2;2EX%?i8*IzavMb zU>JWrA!-^rBk>e%13ry6Zq+)|OL{l_r}3&62wauGV;+B9(VS=HtM3~r?e-lQKD-{? zp5^DPlbAz^sz2Z?os#Zz z7Ujp}-D(Se-n*%Og;(}YO}Nf90OFyxDZx&oqsM_KU32FUGK!{}d8N*NpbRN-19-{xULXb@XZozWF22W{J`-iZ=m*>ye=Qf)SwH_eKZUs8p zooKRfdKm92fcCl~W)eAcNt{dqt_D<}QIINLK|gQ=lz~n}xJGE*;Zw+=vBe`N^_%#> zv!|Sa#LM#l~qAT_g|-O}jweJduG3?1-*BebyA2N}2rDwfu_ znc5)tzDNdw##Z?jpY(>E=QEPgvwT!OP{5P9{pD+&wJY5Z1Gpx-b$( zzQu|YA|s2`?*{T&=Wo6a2?RkN1gUPsdS=7S8M8@^NuHYSUavomD3V@GJyjZtWmZ=H zF9=NIe1IU~tiHnWr_Iu+Kv84VYYWTfDKW4@m6^U|DsT|79xY&WZM>1cjYPx(78$T( zr8w7xfvpjiroEoPe~g~ofUGM$S-DzI^Gk!2r_S#B>}qqjK!(nq)!9z>US+PHn9gK? zw{e~kJ)ySeQ&PT>J_)W2?GtKnUI%w)$0S0$6%ohwPL#E3GPDwq_SySi0!11nicB!Iy)?iMRMShm z1PK;3xBgnr^6gb<_1KCzQb7617=)5eE`?-StFG{UQ(e3v5^%$?VkoOPD0{_~#ZcPm zO@a7XxzsHgkKbHnii7%^tz?og@0YEL3)Zm8eskD*URdi$I9eMw6+C1X)13tn%-N^- z9uH(36=T4VLvSrBHM@;f+ful#fP#Z=6;{TL z4tn#3WV7rcsb#@70Qs^h@h(Y*kh;R-c^cP0x3mW?7USy9-a?Ul_(2;Ymyo^#fD$5` zbAxaL%pZ%TRS80t3dquO(w<+DrRZ26J&>Tv_R)!ANARm0=f7IakB3&mR9K6y4*Q21 zLt~u*hEedTeh5KZ`PWF;qYl`rXgiWTVEe_fqJX_=k*hq}lMeMRL>MPx+eVCar%l!e zKz_m&P$i>8uue9x)*sADk2(mX{}>*i5Pm{mRheu!6@)5SeuFtqNPg+r6`e({Bv=0 zA-Ft>7~h8bso)=&mfM6B^>dCnU5w(Lj!X?-fDK3Tjfy#cfv1a4Wz`mhJ)Hg_eYNGW zQ__HB1&fGW+bFLf*j3Sxm^5UgFa7dmAu{n^LWS4-m1_z;R1I4SoB(nw9Snxj1P?q>bOCU^*f zHm0VX=qE~``=aZQvMU-ZU&H&j3&Q^JKxYNW3RirmBjw`+Tn19~m#049<&ly4b;qcVCO`OhzM)s4^i zS#f2P$3;=t%U;f-nDD`|$lngr&#;$q+?LT{+Bn~Spxeff&~l-Ch67f{NNUQhB<3lw z6s2*lc#>LZaZI>u14k*|RTo)uR}|X{kvE+8fAy$6Y{MK4lti4Xn-tl$=BAjcj}V$D z6C0?*r&KDzH-GLz3o_u1=s`;LOgC@={^%!`Z*158+?C?UCy0nrQ62-7GKTb;zL~{&)a9S6kJ#nYc|E^*Dh;-8J$}J9kZ>DN}T<(!ddM$;{ zcXuq43RE!(x$5I~+@rOOYi1eS0~6zW9lt;^@{D?cG}Y6H{;v}nzwb~B!9&}~2Yjv# zqw-w4Ejr0R+1i3MV}}> zn}-k(RtnW5FBFajP(^(lUjCj7D-Z`%lXkJ=z2J{Cqq#(FCQbuBvqjTAIE@#nd3C~w zT+kHTBt*HgF_ao94M2(QFR@H`JzE`AvCba6S30E{;B zLZO8Km%~FXU5O4Qcrkt+jb8X-i_V7tgU^oFJ6Gx7hD!*`z7J2rU7rH>Ek!n3Kjdr*MQvf00G$%MGC7fuq@%nN5^SJdIi8X>c##mS_LWB$EUfW(zROh zv#~X$-r;Gu;Uuhf!oRI4XcLy;xy zDur`|6u#-2{_>OUNy@MdwVf-k2Yl?^g)&GnAOCR{N3!LYgq_9VrmXblkjx?PYalYS z*L>P(6ab|>w0^kuHR?`V9fvv>c5s2@kXt4UxR~qPAeeu7quv~p+cmIG3527>D-dT{ zyRK+B1H_0*(w^8YoQ?bz+5yecZHnFgA?P8m2UL!+w2(n@fK-vPjA`T_Z2C4+APL5v zZV9kjq!c5Rdv&Lvz^t8W^+65?8STzS$8TriDHeG7KdL594sBkm?tVo1kvONRcSeSH z8`{qW*IYd~t%Z%?A?B1G45R;f&hopy ze#$(q#&iLb$lD!|o}ky22*0NI8ZVDs7|}bhC;Y@;?Xh6NFLBJ!KJZt;rk+awUyL#N z;XPdf&zs9JauvmLL3mJ0$RL`3qpb13^0?=I9yqh#Vv)(LH-ZzTg6YgB#H&(s;Unc` z>SF%a3S{|%y(d)Eo;wx>HWRwNnOFOGr2N_>IVIf3&lfy=UfVCB)2@ZC7h>hiWWZns zf|#B3mTtV<+dJe(H)(_3udS18Ckrm}&s3;psr{nPKtZjNM3BfFdNaLUd3Pmo;h$N; zqztzz%9H4qvm{}CA%-B<(A=1V${*@#E&vgD96$PrvlTteW8n!$HSwM{cy{V=VcbM8 z{=W>#9{V`9JWb1li{R+=i~`G-Uz-ThD^j?-&Et;^=6J>?YgcD*U$ppJt4cv19Gxd4 z^+L}KpR>w-T3+zR8jfO`!{eGW!`w{3T%{Ko^en$6hW@~>PBUjGv!C^}DvA`tqrXfe zg%$Tqwc9_v*G?8N!YgV~-^`^dks!biON)}9E$XQyx87`|Zju87EuQoe8s2Bj1m8Z= zplT%@q3k{cu5iB)VT_5jBR|Q?%_ky)^p%A6R%cx?Dl1%~lY>@ciS@BpQ9Jn#_pV4D z`hlDa0bV(jca%8r#}OZGRsScTy2joaLD5B)>ihMdX!#XnasUM`?quf2{@e8 z39Hvxp(8|6UN-f$uG#PQ7J3-j-=BeghA`SA6Rm}2KL)G*g_-hscTmrZS{DCIpG|6p1*@V&2t59$e9Hbu^keeCF0LE1*a33?aL5)Z&=Pqx0?^`Fw!e{t)&w=jPIM~&J=W&U{91m78s~eKs5(LQN zMIO3?oP5j+Y0xL@Zfq-xk5`44?vWAb;gud{0DBkgnZG}lV5PU?3#7p~K@E76pROPw z3H#1}1j8h8Gl%dHE2~DorjazmHlK~z_1KS0?c)}*-%tjKOAN%vs!B9E$OFm60!Z5> z1}qp_1^-VA0MxR^#Q^y0fbSeyuk zJ4eCoMGU5vv<+Xh^&Fy}b^mP&ooADK)bl-!6f=4J6GvqyZr}8~@dd)h*FnGy&cMaY zpD6~Mhq2+Q^AcM z7uLPV79H~eRSa?eV8!!aD`-OT<_9nd9+ZSaeCbrd-{T^*MQmL3x(^R&9IuKla1caD zLB&_m10Xg(GV1@L+WvNAE_pqI$sNd~#kG9N0}Q?G!J^=JyAE^`%dz9Pi>x>thWBZ58}S(R`oSz=VrqT|t+joZp{5?X)Kn*+ zbpT?w{k}_F)<*NA>6Rl1--smnN-x*ld2pSe{qJ;#`vZ2@@iZqN97 zo7)LVzcf=XqN__Y^Jlkb*`yi5Lj)Den1~NSkf-coPLn%miZdecdd1*RHAN|95;R=E zuo2DfUrT7D1d`R*=2s| zwHP33l1NHLpiSU+7~iBoX_dF6+qeRXKUG?yV6jsJvPuk^mLe(Iv(@Ae z?dw@DPv&%NwyPd1XiOug?TR+F>tM%Xf` zY$G};f%4Zm>ZZq+qTREq(Az_LW4`8*hRkV~f@I$57|o~o1i@4)g8k*kKd0d5FkKt? z9gs7$g6rEasIx`^=b4n@OMF;-ijDX#zWleuex(n{FWMR!z5(#((yn#L>`uj8yxWT5 zf9+9l!`PUk5@Kn7A5ih7Z)@w4>XM8y-WddoX%)5(acg}VS=l2f zcM|3IS)Jb2=TT%nL(=bjlGCvu_6v6J-wmcsbb`57^Wd>>%%ozP&8ghjG&u3tE#*k$!&@i z`l}AI#G+%9zN_c(_SKQ|;!IgJP{z0K$@X-JwGf@2m?k!_eWh`q_ z+pbU&YnW1z9k@eqLor_pdcQVb|dWAMsBe6L~nh@rBKnQa{5~V>SpDgO zoJGm@#M&M@)Gek1zx$fD&tI;<5=$n4Og%uM&Yt!-h?7!oanoN+{~!-$)f2X-KBq?g zQtF-Z@2L(YS*H0tOY^COvq2T?K<@3bg3+;%eP{ErdZ$zjM}+AE63jW!VQgL|<8R=ILI&$NC46={|@PoR7wF}-kIiD2%D05<&!n_vhG5Uwg1MV)mt898W+xasP(?+GJmBG42LCoP+ zi~*D2Q>73;xq>Vg>Fl~v09UTma>)|ST-Z~7O-hNV4qtx*F6qw7<2%kxm93_LrLGxW z1LsPJA>1m4j}Yl92P$!QqiNlNSG8JWk`_`-jo3}lEH^r_x2@icH|-DGnmQlfB&yi8 zciQ9u1D0@U*zFQsS+;PR3lY7>{RDe;KcN6ySky0pp#r0Wy|T9++DVTU#W?NzJCZn- zYPI+iXox7Wmqd6VUFn?)@~ex^-U=)EF%P_Yh>1HJ`1awHQdL=Wywa%{;#LebK#!vb$%0sT z#WS<@0{tV65D<%|l1M-{m-W=uF5GA;_9mt|Y15+DH%_fq9u2a%s=K@(X5k5t+6+TG zE76zk9v zx))O#_01_-I1Ga90wDP1K2Xbz(zUL~b5yXxiD9(Av7a$(*9756cF{) zmVOW5V0HFW=hrvbYeq;`^+7022C>27po_9vlQ+_)mORYoxG)N`@&f>LcJYv|yr4J4 znQDury+{U7=Qdd}oCB|~ukp+QRg!s#v;0pmu;D(wj{x#MVbw{8u(S5>k+J}y<*Ng()4Ha>~m{EWv>Wj+Kg1fI0xD4*~ z{Gs<*0G3to%uY)X1qyB|u+12Z8N?#cBK)dyolU0PBcS6w9C0E%tY zW=fJypcFfXhty_~{mi#(#8(CLE9_;*zSD=Q5N9e1hE#!%#kHmF9)$IAbqYMJ$Q@L> z%kFjnHLnO80Vw;;{-eb7PLTpeT$_^C zqPBR&hPQrx?Ls&17Qn!N&;!cDX9WKP_Yi%c(_GE06H%Z+0ArCC5^=WxS#)f`RsW?RUA!f@RE}vl@SVD`&3Nj8MZ~(>OdDmg%jwvW4*6u z(njj`mr`DJ=Xmdp(~c3)jMy<~yik?G-sP{O+sc+K4Wwv*(X;r!)1i+{vuD#ec}o!fx*-bLoT2`?%2q>k`Ea|R8I1{ z|Gl@dq;jm4FHkbK?am0E0~+|>{;yAk0*>QbQM=@q;(aNqkk!byzf@{TJ37K4COB>S z6AK84pL8ykKG!b@tXKcV$XI|01m=Ls2j9Ak{nCFmQUJPYTW14z0*8o$zPVi4c;(+{ zAP)t~tmxRHGF|X1M92DtOUV=cVv(y57=&|gBFwf0x^Tg||0~UtK)Crfm|Kjp8S{4V zy1gPYW97)1k_OCjtD(C%D=x){8tDM1vg+V%h&uWnntEMv7d^t^$X~) z%{;Y3R>fmOif#=^#{QM-6>$<@N+d=un9$){@{3zHL{cf^SpeO}g#EmAge7Zn8{CP7 zt#TYm7WXUlwyV>q=u`%n;MKVIIAk?JKBlN0D5T;RxP_QhYGLzK16>d^9d|us91=~W zqi?&lY+mI@wA+zWfMPFvzMd;sE0#Zi=j~-&7!NPC$~3?ggW6#-Tlf11Hn5{>Xs9nj zwu_V-Rjh?obK14M-Ki6G4LMwbnFV;MxZ(zF3&*`BCG`|iDOGy_gh>3jf|chLGTR6a zuDH?imw6Vt20Q0Vtp@Q#p0qp+1A?dC&u706Hqd!^s8!s7xvDjh%0P~qGRSI+Td&yD zoM1?pjO^8@{Ks-*EiXSaPTFL62;*szD^=A}%yN356QmN^As5@lu7>fa%G7I6Tc=bv zUHuDnl7_)Gr_g9XiZoidO9L0>=KJ~$P|jA)o&tA(C^lZ#0xkbv*$Z{g1d|I)4?pf5FdhttJ z$`|eX8m=DO=uMB?XtR6K%ZCdHVfRc#%*-dG@23+EDSC^!`dv?-OMJ z7B)5y)6wB=dhji{bOX3>vaV^kjdI~kVH~TQKVkhVq+d!rRIS3mi#d`nQA8R9Y0*q5~&y6 z7|jZeacQS1D8?_v*2jtaDM;8hRoNn~MxoXAS6Muc&9hrj-qm5{$qnU)NxlU6Bk~WE zD>SO5TQxwPl6AvX+vUHh%pYe1T!zUmR#ITJ_<7k4gzZB6Ip0o?S(CTaU|*7-45lx` zdAh*>VY|_OP7cu?2YLqz*k%L`L0VCr?e&i>E6xQk;e*&_39Ix9J`MXoRi`|9vOh7b zuQ$yH>eJ7hL~xDPXeHg%-E6!Pa;c$}18BKPFAP3`>M2D3!gd@=NR0Q!a=8A5(k(Ee z*Ac?U=7B6dE8tO$it=|ao<8|h&>-wO($3SI_izGDwmv|XbgSqXPEdi~5TS+IU=C}o zmY>?mCONWj0?osO^WKkm<{%1FYCS`}vybSMJ78BYl`jiHTbatHo9~f`g>hSylmef9 zQJllMW1_iY=f;kOaEnZIR(jp`y1>{20rh=TpxW;v8r>dV>_l4#Pq;gUux4_W;IMDy z^gn^NFQ@syd$+?fg!=KoU515eBqb->WR_pEq~{47tqmYi87ZQr+FqaNad~IyI)@o5 zP*jScyHF zEyR--RVS*M&^zq5AI13G|~$j zd(z<-1GvCN6W)g7Dxe5?3tiUhp9?t_WlSvl1*_Mg41TpY^|L<#RhB6=-cOIbQ~7tl z{>l%Cs~nfMAcGfRT1yVmPXnukb1x!3B1hA7FkQvB;yd*Po#f*)JI<;E3K*EfN_>U~^_*y`s zLZj!m1Fwse2_F;$K2}+!3goQmg8mTZI`d>w;`)@~vk3{xz&nN~HD^`RXwzLDGrstc z(Dw;w-e%V{=4gUR$f6bjs4f3GMIaN#A+pE%5fFadOV<{8F$&GqN_@5F`PC``CFvkRIILeVB|4a4dJ=zRgmUz z<5~P_K1q}Ukj-=&S-geGPB3N?9*89o0y;5!{jnwW>V;Qt?73@mWuBLaLj5_zxdWoP zKC*m>?kTKJ8QL!^sonfRf0qJ65#;?s^Yy;7ro-ol<&dLqkWPK2YuoOXogCa_ZE$B) zP#?{_HG+U?tKil1VRiVvCWT>b#mE0uf&l`%6ckuhg>q*h^!-%VOX|K2YuhiUM%XbL zx;(ScOqo}oxjH8Bv}(qn14yoL9!~Q?LQ{oYcX%2N+|8UfY`rqO=?9Y*+!JTNYxEYt zQXM@hI9(*XKf*L*&hM(Hkw=N+_S5^QyL8q{+p%I^jFJq7*1@&EkUc}8zk77FbIedX zMc4Vntz;Rmd(LqdR zzCBB)_s(bDZo2R4E)u{_(ILVN4=IYbt^Eu!peapLrnH`~=eZq-H|jPFu(KCWK#$=i zmigx`l86@m3?zSFlhqx!wnh?u9}3&J$8C7Q3-oeke_z4Mb&wn!yIne!Ad=H<32Kbu zS229*Te5)pKhKChxOhdvCy_78pwywgPwsQSN3;l9%A2|Hkx#Z9HBPFmKc!0|8_2SL zMO&vi7X#J3_~>iW#7{l1;kZk|W%?6mpyyo!a7twxj)<#KG*e`6&WN=Lb~;4;qW67s zHAbxgHPq%~0wC+&iWJT`d3OK?;V^rbye*oymJ^S+y^;h7LCcrQNnM_GUm}D@P#MPw z?=kpfGA?FY3E;lOlWssA&HsKb++^kkC`i%(1!M+=S0CvBiKeM|`6aEI3VJh7%co&} zH7}&`^khaZ(<`FEs|-Tx1ImfSJy-|-Atc1|4Ss1$wL0HnzeM)C6uL2=fqjuu%Dp7} ze?>s7j3xZL3y;dOyTidyZW(wPd!g&Oe6OQ82Fz_dp@?WM8q_EQF4U8V!eW542z-|v zawVc6W+@NoEiHsi0S%COtc3l~;I7Nj1HEPL7qCNANXp!^HtL(H24$bm?xm9}3xm;)UVmtoQqs!@g<-9a5eAuOj%(w!WTzv}+|x>X22c zcrQaITOJg8fKqT-j7WyRCx7zr9o7D36(oJ!=#}iTIo#8OpgfKH2_E@lSRvXz`9TZm zgSvN|grXgU)Bt7g%_v8JJ#{sw9!eRT2Kp~q@9wT$WI~wyb;t?-l3auywWV$h^E2&- z?$uE}9L0|(ik12^n&|KG*_?AXRf=zU;Noj&YHs_9Vn~u z!7*Ex;+7`GV(ImsmonX}D{*u9Vxn6{{ z_@K1wx*g@Kwpq(1^<~Owudge35g<@Zulh3$HZY={l44U{AVnOtmYiX#{K7b!;`Xsg z>I3LDp^-Dv)-ChhrcBd)!`V11WA8sZe8R??Qo}B>lyFmT+N7%2E9%sSYXW9a`~tK4 zHV=oEdak^XCFVaEUGSHZioa5`1&$~IS|y>8+96-&8@$9g3ukq>L-Ggd>3*4>d8s#} z1rdl8M!h{fzOQn>Hx(`D&iEv|OENUo9irxb|AARk0^TPS8OAQK_>3(cMbIgc55|W1 z)76N5RTXVFe{E#UTjZ5-8OMym3U>;vjo=;h`7q~&3J07yM65o==ZhFISoICcP7H35 zA(Fu%3B~@*ChV5PP~{e$)(&V$)*WK~B|0?ZKXk2PGZKHbQdrrXp8qzf5RC$Y9ri>u zJK4^;>ry_~Fq<n&*yF zEgW-m8P@X)D!H93RG}|ve^%f5>XGl2D{=B>|p|_fi!ik;~6*C}j*(COFhWi?i-|U)vc)2M2#|b$*f&sJV z(A8R&mY%sB2=OYv@=FF;e7;l^g;d_f5EqCi?7E>h(CJg@la{7^oww}+$RL{_t-^ti zpcjpdO`mP+cAj$++DMjuV7E$y>trT@p37>OJa>T8Mmg1iS3FbQKZE=)CP5?bAFOW` zfI`1#Wb7cC5HGOeC!B^YZ=!Sp8c+*Ka@f93wY*MQ`l>5>H3h=|sl<{}^7MOTdcJ)T z2+8j+&bDe8pNR}7iN&ku*1HfgJbez6H&QP}1;sYLWv z(DCaXRoH@)v~fGxq3`F57cWD*pkvY9Fln)xI~}$*#_STtgsLGR$CM>_>XVthkRLD^E-=E#0(y=RkneBmqP?+|b4+ zP=!;clbTyozYMJmg`DO=ma7P2L&n=!t0aDc6*o%$_|j%z|8n{|nlgj{($9DI_ydJ2 zpZC04U9M7ZHF=OU17+n!i3D zg|!5Qu>+KszF#i2204Rbg$1KZShW(Tq%DLRF0veEsN>rR?b20e`t=GE)Brs91sy;A zBtti&Bz$@lYYs-U0mo`ukym9w?xZnwbT|IHbMDSo#+E(yPOxORtuP+*>dodnI6ngO z4GCE3m4nrD$l7A8N>|iV6Gr*;u16D?3P}D4nadAepg?*~U8#-59sfu>Zro2ynhbos0M=XB_X41?oWwx&Yi?U+ zxw0CunagA(=&i|~JZ9vV_?fvw2l>vx|0McTyIsT@LfOq(k1GJ6n&7|RrM2B}ns0ib zOY{@=V7+iK8FY|>@iFXN=43^ zBQvc3=qzn&6=dMmj|ATArihh8J$$1tZ(G$2S3buUHk%cH_XsMpysv!;%3EmXf`1AB zy7aiM1XTOhO$GlaMAo%FS(~Ag+Nr8dgc=@TDC`1jZ*+Is8Wj7fvplAgYIG?$3S7JPit~GNypr3c;Ig@{KmNr8NJO~Yjy>{i*yx`f{ieg^~(xFM9 zP+M(YJF69lb9Wd~tH_*)^fYH%_%EO+x|_@)oBxEBYOA(@thWyeAdG$>*Dye-Ckc2( zPcv>BLU~?dPzh(>RE1+pDW+P>Fk#}Q_VlF44p7X9x=uDnjO>Wp~& z2}ATiUBC>52F&3wbXSz{bu*I>=M+!Cxkb-G6sRm27E$x-n$HSN}$89s` zUlAI>WZgX}!0~_DNr&qUzIQ?c$C1yd?R0rA(ihjTrSPZACBVRYF-)`#YF?MTNCl1x z@KE+KhSmH*j*xatMHm;$K5YU%7!U*y7(Wac5Vkqd>w%ASV8VFj%6_`@ZhY-Fl-Ivq ztKqR=Cu&kh7=cq&rWxng;A5Uv0>v_^MhtX&XWC>+{3CQj7U(1;L=)~au&+2aV5f_l zc_ekHn~Kbp(w7T%8Xd1h;uI`ufNyX>-Q~dUmH^0vVt1AwEI@@F2KTZ0J{h#^L96>G z_`V_5+Fj3ES0!*>^Vi?#-}hGf2e;Up_zj`aPNXS|7wg23#iMui>+RY~#jPGZ)&nvC zT3kl%%TMpqw3@7L9N7WBEx;KAAdijj4@GD_7gAk?E|vXHqesltgaOTO6`XHx)=kRn zomyCBxPE7kpuGCxN(E87QrZAXAI^d`LZaf9+Fp_t3bWLuhvF6>PgFLw3{$%W+Z&rw z%D@f|A(Rhfd8#Catj!xWNZlp?q|CaMU56A%Mwqwj-Vo$h0;4?ZSp}S%HtGGGsT;zIr=UgkE$uGlyo_XggZ3PVx z+cc@r>R<^gbFK>1egA^;b770c_b~s2PlD+UejGJA_F>GlaIZDWpDK)h!W86ceNXF4Pa<6M>8gh&o zIhLGlDzdYE|A+6-@Av!jc|RWS$NQ)E>-8K``H^k^=RyV0YlAq{z_VP{$SXmw(fUmv z4?0GQ?4qLX@-oVDEqo7({?q~^bG#6qb8WH=$=g9x!K?|L2%Qo)^WpxBPd&yGvv`M| z9v?p=SM7p-hbfOxa(V-X0?&~qVacT+h(nWW6}o|i$!;6!_e{XaI1J^MUeSKm$E^gW zyAfPjV@pIy5)s;?`;yZBt+et*%qGd`mzfVLjU%;s-U~!0&-(BZVY|hBh-H-&(eGqb z=Id4K{}tfgf}?<&4N@*alx>}s_#SWN=Z3xs5nFAe?k%F_-@q)z!nc1<0(qF!t^oDj zC7(p-ElFi696Z4`#$H&b_4`^}eZs~BM_t}JV%sRp&_k&|iE;JLjjH5Y|Ky^QNn;gJ zUCiD!UFum^v=T|#LSe7mw8Ep3&yNHYZIAmdZFe5c%Kvm)Yd?OHm9mJ8Bb&VWyjNf= zBg!{$0W7^@M+_L=AeBY8;B$!dgDVkZL8!$t_=9;{dy?c;nan0^3D z1}#}>T34w!o6T{*I+IZ-1^ax)bknT*qx^JH;WrDHB%zeOd(||wUep?ai-Wo3^9`r^ zknrVe_NmI>px?HFwb+eyp*+R=4e?Br@gzscT|v# zN$m(I4Y=$%V_-s(W(#+iTgyTLBWBb&TIELW@zwce91e@lhuTE~c%))n+{0*i*)ZW= z?iOFOuL%;DR3o_jO>5@H+?DgY8Cct)D48;sHE?@Cu_aDnjcCno91|FuC;`=MZKii_ zoJ(b@-axrdMORL{i%?ZKr^XYeTB!ciMc$gbf8L5dz)bbsrLgDC4V=C+%#d2o!2@PY z6ihjuLFC24@oPJd#^hKK!!}65LPX-T%735d_B~l;n`Q)4-c-b%v0X^Be5tc57bIQ$ z_|3GWhJ&2qTbueR-;=e^CcJ12SKx^UwMM*LXVK5w`NGumMES7pDHuXA=+j^wP}gW? zQE66S7n|8|LwD`b>$0Vs6)O?XA%z*T!+VMIWG$XF0QDQ4dA79{kv(|4M|NF zzd{qgCL@2%Mz+X!CPZEvPzRhUw=BFd@ef!3d~)<>UsG0T{v2Iz~|PH5iu&!O0D0;Bx^Whq(U0Ux78*#=Axj^{oo> z_q zr7$Kob6hEdWFCTZY(E#fT_iJqKI}W*)>%8{DWuoSZ(If)K^J#9F@NgL$th2p3Vao> z30P%3qYfd^r9C&fRc^EdJw@r8m1CJ?GoZ-9EN$*snXXpW&Q8{mI`#q8&@ zQLaF^^s&QQu0jQ7mV3%oMSP}%4rKHSY;Mn5g6B^no@?JBvwU1x9oMBZ6FUP+E$hQW zhtdM|FyR1&U+K4ja=?98p~At{1aC(&U)KP&0hM<2n)f8yU~q!3batdlMi0Q|*S|+6 zU79XVei&(8@E0;i=IW36bjO3!WB~)GmrLG8%9?9xFR}4%t*qY_;ELS=r44~~x{uVI zmQhGLMmQUoV?48h@DbVsshoupy%DPk+;TCv(mXpi1M8^8PH4m}UWEX!OTK;{&j!F2nYv^iTDP7{MAgWLCwq~<$|#MhG#9Cmr+fdY2sr!@I+2Vn{U@2JjhTF`QG!Adu$h|KMRh$Q%p?qYvflxZraC=k#sAR7w-I UU)zw)N3j9UVVy9wR{m-K0&XVKD*ylh literal 0 HcmV?d00001 diff --git a/src/site/resources/images/project-logo.svg b/src/site/resources/images/project-logo.svg new file mode 100644 index 0000000..afa4650 --- /dev/null +++ b/src/site/resources/images/project-logo.svg @@ -0,0 +1,88 @@ + + + + diff --git a/src/site/site.xml b/src/site/site.xml new file mode 100644 index 0000000..c6e1862 --- /dev/null +++ b/src/site/site.xml @@ -0,0 +1,50 @@ + + + + org.sentrysoftware.maven + sentry-maven-skin + 8.0.00 + + + + ${project.organization.name} + + + The Dataverse Project + + + + + dataverse, plugin, spi, api + sentry-orange + + + + fa-brands fa-github + Check out the project on GitHub + ${project.scm.url} + + + fa-brands fa-zulip + Join us on Zulip + https://dataverse.zulipchat.com + + + + + + + + + + + + + + + + + \ No newline at end of file From 62fc22ac8b7319cd7137c1f2e43564f3d2fe1046 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 1 Apr 2026 17:29:22 +0200 Subject: [PATCH 44/55] feat(docs): parameterize site base URL for flexible Maven site deployment - Add `docs.site.base` property to `pom.xml` for customizable site base URL. - Update workflow and site templates to utilize the parameter for better flexibility in local development and deployment scenarios. --- .github/workflows/site.yml | 6 +++++- pom.xml | 5 +++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.github/workflows/site.yml b/.github/workflows/site.yml index d30dac1..445e09b 100644 --- a/.github/workflows/site.yml +++ b/.github/workflows/site.yml @@ -28,8 +28,12 @@ jobs: java-version: '17' cache: maven + # Later on, we need to extend this mechanism to provide both version and snapshot, + # as well as a different case the stable version and "latest". - name: Build and verify project site - run: mvn -B verify site + # By default, docs.site.base is / for local development. For deployment, this needs to be adapted. + # IMPORTANT: make sure the base ends with "/"! + run: mvn -B verify site -Ddocs.site.base=/dataverse-spi/snapshot/ - name: Add .nojekyll run: touch target/site/.nojekyll diff --git a/pom.xml b/pom.xml index d36b8b8..150d1ae 100644 --- a/pom.xml +++ b/pom.xml @@ -54,6 +54,7 @@ 17 Apache License, Version 2.0 https://www.apache.org/licenses/LICENSE-2.0.txt + / @@ -134,7 +135,7 @@ - ← Back to Dataverse SPI Plugin API homepage + ← Back to Dataverse SPI Plugin API homepage ]]> ${project.license.name} |  - Dataverse SPI Plugin API |  + Dataverse SPI Plugin APIThe Dataverse Project ]]> From 05251eb143baee7a45150dc8217ba4e4b884bea3 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 1 Apr 2026 18:50:30 +0200 Subject: [PATCH 45/55] test(core): improve test coverage and update interface validation scenarios - Adjust test case naming for clarity in `PluginLoaderTest`. - Add comments to explain intentional bypass of annotation processor in test setup. - Expand interface validation tests to cover runtime checks for invalid cases. - Ensure compatibility with `PluginContract` roles and `Plugin` extensions. --- .../io/gdcc/spi/core/loader/PluginLoaderTest.java | 15 +++++++++++---- .../meta/processor/PluginContractProcessor.java | 6 ++++++ 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java index aea7b20..b8eebba 100644 --- a/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java +++ b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java @@ -3,6 +3,7 @@ import io.gdcc.spi.core.test.basic.TestContract; import io.gdcc.spi.meta.annotations.PluginContract; import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import io.gdcc.spi.meta.plugin.Plugin; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; @@ -31,13 +32,13 @@ void validatePluginBaseClass_validBaseClass() { } @Test - void validatePluginBaseClass_invalidBaseClass_missingAnnotation() { + void validatePluginBaseClass_invalidBaseClass_unrelated() { // Given - interface MissingAnnotationPlugin { + interface UnrelatedInterfaceNotExtendingPlugin { } // When & Then - assertThrows(IllegalArgumentException.class, () -> PluginLoader.validatePluginBaseClass(MissingAnnotationPlugin.class)); + assertThrows(IllegalArgumentException.class, () -> PluginLoader.validatePluginBaseClass(UnrelatedInterfaceNotExtendingPlugin.class)); } @Test @@ -52,9 +53,15 @@ class NotAnInterfacePlugin { @Test void validatePluginBaseClass_invalidBaseClass_wrongRole() { + + // NOTE: + // This local interface bypasses the annotation processor intentionally. + // The processor would reject this at compile time in real source files. + // This test verifies the runtime validation in PluginLoader. + // Given @PluginContract(role = PluginContract.Role.CAPABILITY) - interface IncorrectRolePlugin { + interface IncorrectRolePlugin extends Plugin { } // When & Then diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java index 063a868..2814c56 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -49,6 +49,12 @@ *
  • a {@code META-INF/services/...} entry for the base plugin contract when safe to do so.
  • * * + *

    Note: + * The processor enforces contract rules for all top-level and member interfaces in compiled source files. + * Runtime validation of contracts in PluginLoader loaded from external JARs or assembled outside normal compilation + * will catch illegal usage the processor cannot see and validate (like anonymous or method body interfaces). + *

    + * *

    The descriptor captures the build-time view of:

    *
      *
    • the plugin implementation class,
    • From b696c0daef6ff3e0a0a0861c6e91c5811a74ff2c Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 1 Apr 2026 19:19:37 +0200 Subject: [PATCH 46/55] fix(meta): add validation for non-public interfaces in `PluginContractProcessor` - Ensure compilation fails when `CoreProvider`-extending or `Plugin`-extending interfaces are not public. - Add corresponding test cases to verify error reporting for non-public interfaces. --- .../processor/PluginContractProcessor.java | 8 ++++ .../PluginContractProcessorTest.java | 47 +++++++++++++++++++ 2 files changed, 55 insertions(+) diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java index 2814c56..da63fef 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -532,11 +532,19 @@ private void inspectType(TypeElement typeElement) { throw new ProcessorException(); } + if (!typeElement.getModifiers().contains(Modifier.PUBLIC)) { + error(typeElement, "Interfaces extending Plugin must be public"); + } + validateApiLevelConstant(typeElement); validateContractGraph(typeElement); } if (isProviderInterfaceCandidate(typeElement)) { + if (!typeElement.getModifiers().contains(Modifier.PUBLIC)) { + error(typeElement, "Interfaces extending CoreProvider must be public"); + } + validateApiLevelConstant(typeElement); } diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java index fe19fb8..14f668f 100644 --- a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java @@ -1764,6 +1764,27 @@ public String identity() { Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); assertEquals(8, descriptor.requiredProviderLevel("test.SharedProvider")); } + + @Test + void failsWhenProviderIsNonPublicInterface() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NonPublicProvider.java", + """ + package test; + + import %s; + + interface NonPublicProvider extends CoreProvider { + int API_LEVEL = 8; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "Interfaces extending CoreProvider must be public"); + } } @Nested @@ -1908,6 +1929,32 @@ public interface MissingExtendsPlugin { assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "@PluginContract may only be declared on interfaces extending Plugin"); } + @Test + void failsWhenContractIsOnNonPublicInterface() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NotPublicPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + interface NotPublicPlugin extends Plugin { + int API_LEVEL = 3; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "Interfaces extending Plugin must be public"); + } + @Test void failsWhenRequiredContractIsUnannotatedPluginInterface() throws IOException { ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( From a6aebde95d1c0b5302abaf6d72f38d48581d3314 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 1 Apr 2026 21:35:54 +0200 Subject: [PATCH 47/55] fix(meta): improve directory/jar scanning logic in `DescriptorScanner` Correct scanning logic to handle directories and jar files separately. Also, update exception message for null path validation for better clarity. (The scanner no longer takes a set of paths, but a single path.) --- .../io/gdcc/spi/meta/descriptor/DescriptorScanner.java | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java index d6562d1..656c76b 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java @@ -32,19 +32,20 @@ private DescriptorScanner() { * a JAR file, its internal entries will be scanned for descriptors. * @return a list of {@code SourcedPluginDescriptor} objects representing plugin descriptors * found at the given path. The list will be empty if no descriptors are found. - * @throws IllegalArgumentException if the provided {@code path} is {@code null}. + * @throws IllegalArgumentException if the provided {@code path} is {@code null} or scanning fails for other reasons caused by user. * @throws IOException if an I/O error occurs while accessing the specified path or its contents. */ public static List scanPath(Path path) throws IOException { List scanResult = new ArrayList<>(); if (path == null) { - throw new IllegalArgumentException("Set of paths may not contain null values"); + throw new IllegalArgumentException("Path may not be null"); } if (Files.isDirectory(path)) { scanDirectory(path).forEach(plugin -> scanResult.add(new SourcedDescriptor(path, plugin))); + } else { + scanJar(path).forEach(plugin -> scanResult.add(new SourcedDescriptor(path, plugin))); } - scanJar(path).forEach(plugin -> scanResult.add(new SourcedDescriptor(path, plugin))); return List.copyOf(scanResult); } From 5417734e7ef94c247dc31ea3aeedf86949cd8e5e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Wed, 1 Apr 2026 21:36:39 +0200 Subject: [PATCH 48/55] test(core): add infrastructure for dynamic plugin compilation and plugin loading tests - Implement `LoaderTestEnvironment` and related builder to enable dynamic compilation and runtime class loading of test plugins. - Add `PluginLoaderIntegrationTest` to validate API level compatibility during plugin loading. - Include utility classes `TestJavaCompiler` and `TestCompilation` for handling source compilation and classpath setups during tests. --- .../core/compiler/LoaderTestEnvironment.java | 141 ++++++++++++++ .../spi/core/compiler/TestCompilation.java | 107 +++++++++++ .../spi/core/compiler/TestJavaCompiler.java | 175 ++++++++++++++++++ .../loader/PluginLoaderIntegrationTest.java | 75 ++++++++ 4 files changed, 498 insertions(+) create mode 100644 core/src/test/java/io/gdcc/spi/core/compiler/LoaderTestEnvironment.java create mode 100644 core/src/test/java/io/gdcc/spi/core/compiler/TestCompilation.java create mode 100644 core/src/test/java/io/gdcc/spi/core/compiler/TestJavaCompiler.java create mode 100644 core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderIntegrationTest.java diff --git a/core/src/test/java/io/gdcc/spi/core/compiler/LoaderTestEnvironment.java b/core/src/test/java/io/gdcc/spi/core/compiler/LoaderTestEnvironment.java new file mode 100644 index 0000000..a05ae6d --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/compiler/LoaderTestEnvironment.java @@ -0,0 +1,141 @@ +package io.gdcc.spi.core.compiler; + +import javax.annotation.processing.Processor; +import java.io.IOException; +import java.net.URLClassLoader; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +/** + * Represents a test environment for managing the compilation and class loading + * of core and plugin components. This environment facilitates scenarios where + * Java code must be compiled, loaded, and tested dynamically. + * + * The `LoaderTestEnvironment` class is immutable and provides access to the core + * and plugin compilations, their respective outputs, and class loaders. It is + * built using the companion `Builder` class. + */ +public final class LoaderTestEnvironment { + + private final TestCompilation coreCompilation; + private final TestCompilation pluginCompilation; + private final Path pluginArtifact; + private final URLClassLoader coreClassLoader; + + private LoaderTestEnvironment( + TestCompilation coreCompilation, + TestCompilation pluginCompilation, + Path pluginArtifact, + URLClassLoader coreClassLoader + ) { + this.coreCompilation = coreCompilation; + this.pluginCompilation = pluginCompilation; + this.pluginArtifact = pluginArtifact; + this.coreClassLoader = coreClassLoader; + } + + public static Builder builder() { + return new Builder(); + } + + public TestCompilation coreCompilation() { + return coreCompilation; + } + + public TestCompilation pluginCompilation() { + return pluginCompilation; + } + + public Path pluginArtifact() { + return pluginArtifact; + } + + public URLClassLoader coreClassLoader() { + return coreClassLoader; + } + + public Path pluginClassesDirectory() { + return pluginCompilation.classOutputDir(); + } + + public static final class Builder { + private final List coreSources = new ArrayList<>(); + private final List pluginSources = new ArrayList<>(); + private final List pluginProcessors = new ArrayList<>(); + + private String release = "17"; + private boolean packagePluginAsJar = false; + private String pluginJarName = "plugin-under-test.jar"; + + private Builder() { + } + + public Builder withRelease(String release) { + this.release = release; + return this; + } + + public Builder addCoreSource(String relativePath, String content) { + this.coreSources.add(TestJavaCompiler.SourceFile.of(relativePath, content)); + return this; + } + + public Builder addPluginSource(String relativePath, String content) { + this.pluginSources.add(TestJavaCompiler.SourceFile.of(relativePath, content)); + return this; + } + + public Builder addPluginProcessor(Processor processor) { + this.pluginProcessors.add(processor); + return this; + } + + public Builder packagePluginAsJar(boolean packagePluginAsJar) { + this.packagePluginAsJar = packagePluginAsJar; + return this; + } + + public Builder withPluginJarName(String pluginJarName) { + this.pluginJarName = pluginJarName; + return this; + } + + public LoaderTestEnvironment build() throws IOException { + TestCompilation coreCompilation = TestJavaCompiler.builder() + .withRelease(release) + .build() + .compile(coreSources); + + coreCompilation.assertSuccess(); + + URLClassLoader coreClassLoader = + coreCompilation.newClassLoader(Thread.currentThread().getContextClassLoader()); + + TestJavaCompiler.Builder pluginCompilerBuilder = TestJavaCompiler.builder() + .withRelease(release) + .withClasspathEntry(coreCompilation.classOutputDir()); + + if (!pluginProcessors.isEmpty()) { + pluginCompilerBuilder.withProcessors(pluginProcessors); + } + + TestCompilation pluginCompilation = pluginCompilerBuilder + .build() + .compile(pluginSources); + + pluginCompilation.assertSuccess(); + + Path pluginArtifact = packagePluginAsJar + ? pluginCompilation.createJar(pluginJarName) + : pluginCompilation.classOutputDir(); + + return new LoaderTestEnvironment( + coreCompilation, + pluginCompilation, + pluginArtifact, + coreClassLoader + ); + } + } +} diff --git a/core/src/test/java/io/gdcc/spi/core/compiler/TestCompilation.java b/core/src/test/java/io/gdcc/spi/core/compiler/TestCompilation.java new file mode 100644 index 0000000..fe45bce --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/compiler/TestCompilation.java @@ -0,0 +1,107 @@ +package io.gdcc.spi.core.compiler; + +import javax.tools.Diagnostic; +import javax.tools.JavaFileObject; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.jar.JarEntry; +import java.util.jar.JarOutputStream; +import java.util.stream.Stream; + +/** + * An immutable class that encapsulates the result of a Java source code compilation process. + * Provides access to details such as success status, output directories, diagnostics, and utility + * methods for handling compilation results. + */ +public final class TestCompilation { + + private final boolean success; + private final Path rootDir; + private final Path sourceDir; + private final Path classOutputDir; + private final List> diagnostics; + + TestCompilation( + boolean success, + Path rootDir, + Path sourceDir, + Path classOutputDir, + List> diagnostics + ) { + this.success = success; + this.rootDir = rootDir; + this.sourceDir = sourceDir; + this.classOutputDir = classOutputDir; + this.diagnostics = diagnostics; + } + + public boolean success() { + return success; + } + + public Path rootDir() { + return rootDir; + } + + public Path sourceDir() { + return sourceDir; + } + + public Path classOutputDir() { + return classOutputDir; + } + + public List> diagnostics() { + return diagnostics; + } + + public String diagnosticsAsText() { + return diagnostics.stream() + .map(diagnostic -> diagnostic.getKind() + ": " + diagnostic.getMessage(null)) + .reduce("", (left, right) -> left + right + System.lineSeparator()); + } + + public Path generatedFile(String relativePath) { + return classOutputDir.resolve(relativePath); + } + + public URL[] classpathUrls() { + try { + return new URL[]{classOutputDir.toUri().toURL()}; + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + public URLClassLoader newClassLoader(ClassLoader parent) { + return URLClassLoader.newInstance(classpathUrls(), parent); + } + + public Path createJar(String fileName) throws IOException { + Path jarPath = rootDir.resolve(fileName); + + try (JarOutputStream jarOut = new JarOutputStream(Files.newOutputStream(jarPath)); + Stream stream = Files.walk(classOutputDir)) { + + for (Path path : stream.filter(Files::isRegularFile).toList()) { + String entryName = classOutputDir.relativize(path).toString().replace('\\', '/'); + jarOut.putNextEntry(new JarEntry(entryName)); + Files.copy(path, jarOut); + jarOut.closeEntry(); + } + } + + return jarPath; + } + + public void assertSuccess() { + if (!success) { + throw new IllegalStateException("Compilation failed:\n" + diagnosticsAsText()); + } + } +} diff --git a/core/src/test/java/io/gdcc/spi/core/compiler/TestJavaCompiler.java b/core/src/test/java/io/gdcc/spi/core/compiler/TestJavaCompiler.java new file mode 100644 index 0000000..d6ac5ad --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/compiler/TestJavaCompiler.java @@ -0,0 +1,175 @@ +package io.gdcc.spi.core.compiler; + +import javax.annotation.processing.Processor; +import javax.tools.DiagnosticCollector; +import javax.tools.JavaCompiler; +import javax.tools.JavaFileObject; +import javax.tools.StandardJavaFileManager; +import javax.tools.ToolProvider; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +/** + * A utility class for testing Java compilation using the system Java compiler. + * Provides functionality to configure custom compilation settings + * and compile a set of source files. + * The compiled files are stored in temporary directories during execution. + * This class is immutable and supports configuration through its builder. + */ +public final class TestJavaCompiler { + + private final String release; + private final List classpathEntries; + private final List processors; + private final boolean inheritRuntimeClasspath; + + private TestJavaCompiler(Builder builder) { + this.release = builder.release; + this.classpathEntries = List.copyOf(builder.classpathEntries); + this.processors = List.copyOf(builder.processors); + this.inheritRuntimeClasspath = builder.inheritRuntimeClasspath; + } + + public static Builder builder() { + return new Builder(); + } + + public TestCompilation compile(List sources) throws IOException { + JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); + if (compiler == null) { + throw new IllegalStateException( + "No system Java compiler available. Are tests running on a JRE instead of a JDK?" + ); + } + + Path tempDir = Files.createTempDirectory("test-java-compiler"); + Path sourceDir = tempDir.resolve("src"); + Path classOutputDir = tempDir.resolve("classes"); + Files.createDirectories(sourceDir); + Files.createDirectories(classOutputDir); + + List sourcePaths = new ArrayList<>(); + for (SourceFile source : sources) { + Path file = sourceDir.resolve(source.relativePath()); + Files.createDirectories(file.getParent()); + Files.writeString(file, source.content(), StandardCharsets.UTF_8); + sourcePaths.add(file); + } + + DiagnosticCollector diagnostics = new DiagnosticCollector<>(); + + try (StandardJavaFileManager fileManager = + compiler.getStandardFileManager(diagnostics, null, StandardCharsets.UTF_8)) { + + Iterable compilationUnits = + fileManager.getJavaFileObjectsFromPaths(sourcePaths); + + List options = new ArrayList<>(); + options.add("--release"); + options.add(release); + options.add("-d"); + options.add(classOutputDir.toString()); + + String classpath = buildClasspath(); + if (!classpath.isBlank()) { + options.add("-classpath"); + options.add(classpath); + } + + JavaCompiler.CompilationTask task = compiler.getTask( + null, + fileManager, + diagnostics, + options, + null, + compilationUnits + ); + + if (!processors.isEmpty()) { + task.setProcessors(processors); + } + + boolean success = Boolean.TRUE.equals(task.call()); + + return new TestCompilation( + success, + tempDir, + sourceDir, + classOutputDir, + List.copyOf(diagnostics.getDiagnostics()) + ); + } + } + + private String buildClasspath() { + List entries = new ArrayList<>(); + + if (inheritRuntimeClasspath) { + String runtimeClasspath = System.getProperty("java.class.path", ""); + if (!runtimeClasspath.isBlank()) { + entries.add(runtimeClasspath); + } + } + + for (Path classpathEntry : classpathEntries) { + entries.add(classpathEntry.toString()); + } + + return String.join(File.pathSeparator, entries); + } + + public static final class Builder { + private String release = "17"; + private final List classpathEntries = new ArrayList<>(); + private final List processors = new ArrayList<>(); + private boolean inheritRuntimeClasspath = true; + + private Builder() { + } + + public Builder withRelease(String release) { + this.release = release; + return this; + } + + public Builder withClasspathEntry(Path path) { + this.classpathEntries.add(path); + return this; + } + + public Builder withClasspathEntries(List paths) { + this.classpathEntries.addAll(paths); + return this; + } + + public Builder withProcessor(Processor processor) { + this.processors.add(processor); + return this; + } + + public Builder withProcessors(List processors) { + this.processors.addAll(processors); + return this; + } + + public Builder withInheritRuntimeClasspath(boolean inheritRuntimeClasspath) { + this.inheritRuntimeClasspath = inheritRuntimeClasspath; + return this; + } + + public TestJavaCompiler build() { + return new TestJavaCompiler(this); + } + } + + public record SourceFile(String relativePath, String content) { + public static SourceFile of(String relativePath, String content) { + return new SourceFile(relativePath, content); + } + } +} diff --git a/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderIntegrationTest.java b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderIntegrationTest.java new file mode 100644 index 0000000..f27db95 --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderIntegrationTest.java @@ -0,0 +1,75 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.core.compiler.LoaderTestEnvironment; +import io.gdcc.spi.meta.processor.PluginContractProcessor; +import org.junit.jupiter.api.Test; + +import java.nio.file.Path; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertThrows; + +class PluginLoaderIntegrationTest { + + @Test + void rejectsPluginCompiledAgainstNewerBaseApiLevel() throws Exception { + // Given + + String baseContract = """ + package test.spi; + + import io.gdcc.spi.meta.plugin.Plugin; + import io.gdcc.spi.meta.annotations.PluginContract; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = %s; + } + """; + + LoaderTestEnvironment env = LoaderTestEnvironment.builder() + .addCoreSource( + "test/spi/TestPlugin.java", + baseContract.formatted(1) + ) + .addPluginSource( + "test/spi/TestPlugin.java", + baseContract.formatted(2) + ) + .addPluginSource( + "test/plugins/NewerPlugin.java", + """ + package test.plugins; + + import io.gdcc.spi.meta.annotations.DataversePlugin; + import test.spi.TestPlugin; + + @DataversePlugin + public class NewerPlugin implements TestPlugin { + @Override + public String identity() { + return "newer"; + } + } + """ + ) + .addPluginProcessor(new PluginContractProcessor()) + .packagePluginAsJar(false) + .build(); + + Class pluginContractClass = env.coreClassLoader().loadClass("test.spi.TestPlugin"); + + @SuppressWarnings("unchecked") + Class typedContract = + (Class) pluginContractClass; + + PluginLoader loader = new PluginLoader<>(typedContract, env.coreClassLoader()); + Path pluginLocation = Path.of(env.pluginArtifact().toString()); + + // When + Then + var ex = assertThrows(LoaderException.class, () -> loader.load(pluginLocation)); + assertEquals(1, ex.getProblems().size()); + assertInstanceOf(LoaderProblem.PluginClassApiLevelMismatch.class, ex.getProblems().get(0)); + } +} From e8f131dec5f0c897d50df918025c8c8bd4560ae0 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 2 Apr 2026 12:46:03 +0200 Subject: [PATCH 49/55] test(core): parameterize API level test cases in `PluginLoaderIntegrationTest` - Replace hardcoded API level test case with `@ParameterizedTest` using `@CsvSource` for dynamic API level scenarios. - Add a test to validate plugins compiled against matching API levels. --- .../loader/PluginLoaderIntegrationTest.java | 110 +++++++++++++----- 1 file changed, 80 insertions(+), 30 deletions(-) diff --git a/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderIntegrationTest.java b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderIntegrationTest.java index f27db95..1efc568 100644 --- a/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderIntegrationTest.java +++ b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderIntegrationTest.java @@ -1,8 +1,11 @@ package io.gdcc.spi.core.loader; import io.gdcc.spi.core.compiler.LoaderTestEnvironment; +import io.gdcc.spi.meta.plugin.Plugin; import io.gdcc.spi.meta.processor.PluginContractProcessor; import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; import java.nio.file.Path; @@ -11,58 +14,68 @@ import static org.junit.jupiter.api.Assertions.assertThrows; class PluginLoaderIntegrationTest { - - @Test - void rejectsPluginCompiledAgainstNewerBaseApiLevel() throws Exception { - // Given - - String baseContract = """ - package test.spi; + + final String contractPackage = "test.spi"; + final String contractClass = "TestPlugin"; + + final String pluginPackage = "test.plugins"; + + final String baseContractClassFile = contractPackage.replace(".", "/") + "/" + contractClass + ".java"; + final String baseContractCode = """ + package %s; import io.gdcc.spi.meta.plugin.Plugin; import io.gdcc.spi.meta.annotations.PluginContract; @PluginContract(role = PluginContract.Role.BASE) - public interface TestPlugin extends Plugin { + public interface %s extends Plugin { int API_LEVEL = %s; } """; - - LoaderTestEnvironment env = LoaderTestEnvironment.builder() - .addCoreSource( - "test/spi/TestPlugin.java", - baseContract.formatted(1) - ) - .addPluginSource( - "test/spi/TestPlugin.java", - baseContract.formatted(2) - ) - .addPluginSource( - "test/plugins/NewerPlugin.java", - """ - package test.plugins; + + final String pluginCodeTemplate = """ + package %s; import io.gdcc.spi.meta.annotations.DataversePlugin; - import test.spi.TestPlugin; + import %s.%s; @DataversePlugin - public class NewerPlugin implements TestPlugin { + public class %s implements %s { @Override public String identity() { - return "newer"; + return "test"; } } - """ + """; + + final String simplePluginClass = "SimplePlugin"; + final String simplePluginClassFile = pluginPackage.replace(".", "/") + "/" + simplePluginClass + ".java"; + final String simplePluginCode = pluginCodeTemplate.formatted(pluginPackage, contractPackage, contractClass, simplePluginClass, contractClass); + + @ParameterizedTest + @CsvSource({"1,2","2,1"}) + void rejectsPluginCompiledAgainstDifferentBaseApiLevel(int coreLevel, int pluginLevel) throws Exception { + // Given + LoaderTestEnvironment env = LoaderTestEnvironment.builder() + .addCoreSource( + baseContractClassFile, + baseContractCode.formatted(contractPackage, contractClass, coreLevel) + ) + .addPluginSource( + baseContractClassFile, + baseContractCode.formatted(contractPackage, contractClass, pluginLevel) + ) + .addPluginSource( + simplePluginClassFile, + simplePluginCode ) .addPluginProcessor(new PluginContractProcessor()) .packagePluginAsJar(false) .build(); - Class pluginContractClass = env.coreClassLoader().loadClass("test.spi.TestPlugin"); - + Class pluginContractClass = env.coreClassLoader().loadClass(contractPackage + "." + contractClass); @SuppressWarnings("unchecked") - Class typedContract = - (Class) pluginContractClass; + Class typedContract = (Class) pluginContractClass; PluginLoader loader = new PluginLoader<>(typedContract, env.coreClassLoader()); Path pluginLocation = Path.of(env.pluginArtifact().toString()); @@ -72,4 +85,41 @@ public String identity() { assertEquals(1, ex.getProblems().size()); assertInstanceOf(LoaderProblem.PluginClassApiLevelMismatch.class, ex.getProblems().get(0)); } + + @Test + void acceptsPluginCompiledAgainstSameBaseApiLevel() throws Exception { + // Given + int apiLevel = 5; + + LoaderTestEnvironment env = LoaderTestEnvironment.builder() + .addCoreSource( + baseContractClassFile, + baseContractCode.formatted(contractPackage, contractClass, apiLevel) + ) + .addPluginSource( + baseContractClassFile, + baseContractCode.formatted(contractPackage, contractClass, apiLevel) + ) + .addPluginSource( + simplePluginClassFile, + simplePluginCode + ) + .addPluginProcessor(new PluginContractProcessor()) + .packagePluginAsJar(false) + .build(); + + Class pluginContractClass = env.coreClassLoader().loadClass(contractPackage + "." + contractClass); + @SuppressWarnings("unchecked") + Class typedContract = (Class) pluginContractClass; + + PluginLoader loader = new PluginLoader<>(typedContract, env.coreClassLoader()); + Path pluginLocation = Path.of(env.pluginArtifact().toString()); + + // When + var plugins = loader.load(pluginLocation); + + // Then + assertEquals(1, plugins.size()); + assertEquals(pluginPackage + "." + simplePluginClass, plugins.get(0).plugin().getClass().getName()); + } } From 52e750a5778703d9fb314f50b277cdfaf5509039 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 2 Apr 2026 13:58:17 +0200 Subject: [PATCH 50/55] feat(core,meta): add SPI record validation and corresponding tests - Implement `verifyServiceProviderRecords()` in `LoaderHelper` for validating SPI service provider records in plugin descriptors. - Add `hasServiceProviderInterfaceRecord()` to `DescriptorScanner` to check for SPI records in directory and JAR sources. - Extend `LoaderProblem` to include `MissingServiceProviderRecord` for improved error reporting. - Add comprehensive unit tests in `LoaderHelperTest` and `DescriptorScannerTest` to verify validation logic. --- .../io/gdcc/spi/core/loader/LoaderHelper.java | 43 +++++ .../gdcc/spi/core/loader/LoaderProblem.java | 9 +- .../io/gdcc/spi/core/loader/PluginLoader.java | 10 +- .../spi/core/loader/LoaderHelperTest.java | 156 +++++++++++++++++- .../meta/descriptor/DescriptorScanner.java | 96 ++++++++++- .../descriptor/DescriptorScannerTest.java | 99 ++++++++++- 6 files changed, 405 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java index b844a43..3a19425 100644 --- a/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java @@ -1,12 +1,14 @@ package io.gdcc.spi.core.loader; import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import io.gdcc.spi.meta.descriptor.DescriptorScanner; import io.gdcc.spi.meta.descriptor.PluginDescriptor; import io.gdcc.spi.meta.descriptor.SourcedDescriptor; import io.gdcc.spi.meta.plugin.CoreProvider; import io.gdcc.spi.meta.plugin.Plugin; import io.gdcc.spi.meta.processor.ProcessorConstants; +import java.io.IOException; import java.lang.reflect.Field; import java.net.MalformedURLException; import java.net.URL; @@ -224,6 +226,47 @@ static PluginValidationResult identifyNonImplementations( } + /** + * Verifies service provider records in the provided list of descriptors. + * This method examines each descriptor to determine if it contains a valid + * service provider interface (SPI) record. If a descriptor contains an SPI record, + * it is accepted; otherwise, it is rejected with a corresponding list of problems. + * The results of the verification process are returned as a {@code PluginValidationResult}. + * + * @param descriptors a list of {@code SourcedDescriptor} objects to be validated + * @return a {@code PluginValidationResult} containing accepted descriptors and associated rejection details + */ + static PluginValidationResult verifyServiceProviderRecords(List descriptors) { + // Scratch spaces to build the result + Set accepted = new HashSet<>(); + Map> rejected = new HashMap<>(); + + for (SourcedDescriptor descriptor : descriptors) { + try { + if (DescriptorScanner.hasServiceProviderInterfaceRecord(descriptor)) { + accepted.add(descriptor); + } else { + rejected.put( + descriptor, + List.of(new LoaderProblem.MissingServiceProviderRecord( + descriptor.plugin().klass(), + descriptor.plugin().kind(), + descriptor.sourceLocation()) + )); + } + } catch (IOException | IllegalArgumentException e) { + rejected.put(descriptor, List.of(new LoaderProblem.LocationFailure(descriptor.sourceLocation(), e))); + } + } + + return new PluginValidationResult<>( + Set.copyOf(accepted), + PluginValidationResult.copyProblemMap(rejected), + Map.of() + ); + } + + /** * Validates a list of plugin descriptors to ensure their API levels are compatible with the specified * plugin contract class. The method verifies that each plugin both declares and adheres to the required diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java index cf7f477..e238442 100644 --- a/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java @@ -5,7 +5,7 @@ import java.nio.file.Path; import java.util.Set; -public sealed interface LoaderProblem permits LoaderProblem.DuplicateIdentity, LoaderProblem.LocationFailure, LoaderProblem.PluginClassApiLevelMismatch, LoaderProblem.PluginClassApiLevelMissing, LoaderProblem.PluginClassMismatch, LoaderProblem.PluginClassNameCollision, LoaderProblem.PluginClassNameCollisionWithCore, LoaderProblem.PluginClassUnsupported, LoaderProblem.SourceFailure { +public sealed interface LoaderProblem permits LoaderProblem.DuplicateIdentity, LoaderProblem.LocationFailure, LoaderProblem.MissingServiceProviderRecord, LoaderProblem.PluginClassApiLevelMismatch, LoaderProblem.PluginClassApiLevelMissing, LoaderProblem.PluginClassMismatch, LoaderProblem.PluginClassNameCollision, LoaderProblem.PluginClassNameCollisionWithCore, LoaderProblem.PluginClassUnsupported, LoaderProblem.SourceFailure { String message(); @@ -40,6 +40,13 @@ public String message() { } } + record MissingServiceProviderRecord(String className, String kind, Path source) implements LoaderProblem { + @Override + public String message() { + return "Class " + className + " in " + source + " is missing entry in META-INF/services/" + kind; + } + } + record PluginClassNameCollision(String className, Path source1, Path source2) implements LoaderProblem { @Override public String message() { diff --git a/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java b/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java index f895fe0..ab68dfe 100644 --- a/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java +++ b/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java @@ -251,7 +251,8 @@ List preloadPlugins(Set sources, SourceScanner scanner) logger.debug("Scanning for non-implementations results: {}", implementationResult); // 4. Verify that every plugin class has a service loader entry. Remove any affected from the list. - // TODO: implement here - or make it a part of the resolving process when the descriptors are loaded + var serviceProviderResult = LoaderHelper.verifyServiceProviderRecords(descriptors); + logger.debug("Scanning for SPI record results: {}", serviceProviderResult); // 5. Verify that the API level of the plugin matches the core-expected level(s). var apiLevelResult = LoaderHelper.verifyPluginApiLevels(descriptors, this.pluginClass, this.parentClassLoader); @@ -261,7 +262,12 @@ List preloadPlugins(Set sources, SourceScanner scanner) // TODO: implement // Merge all the different results to receive the final picture which plugins are faulty - var finalResults = PluginValidationResult.merge(collisionResult, implementationResult, apiLevelResult); + var finalResults = PluginValidationResult.merge( + collisionResult, + implementationResult, + serviceProviderResult, + apiLevelResult + ); // Merge all the problems into one large list, to be wrapped in an exception finalResults.rejected().forEach((descriptor, problems) -> sourceProblems.addAll(problems)); diff --git a/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java b/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java index d92ec47..2417632 100644 --- a/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java +++ b/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java @@ -2,18 +2,23 @@ import io.gdcc.spi.core.test.basic.TestContract; import io.gdcc.spi.meta.annotations.PluginContract; -import io.gdcc.spi.meta.descriptor.DescriptorFormat; import io.gdcc.spi.meta.descriptor.SourcedDescriptor; import io.gdcc.spi.meta.plugin.CoreProvider; import io.gdcc.spi.meta.plugin.Plugin; import org.junit.jupiter.api.Nested; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.nio.file.Path; import java.util.List; import java.util.Map; +import java.util.jar.JarOutputStream; +import java.util.zip.ZipEntry; -import static io.gdcc.spi.meta.descriptor.DescriptorFormat.*; +import static io.gdcc.spi.meta.descriptor.DescriptorFormat.transformClassName; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertInstanceOf; import static org.junit.jupiter.api.Assertions.assertThrows; @@ -250,6 +255,153 @@ void identifyNonImplementations_mixedDescriptorsSeparatesAcceptedAndWarnings() { } } + @Nested + class VerifyServiceProviderRecords { + + @TempDir + Path tempDir; + + @Test + void verifyServiceProviderRecords_directorySourceWithMatchingRecordIsAccepted() throws Exception { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withSource(tempDir.toString()) + .build(); + + Path spiFile = createSpiFile( + tempDir, + descriptor.plugin().kind(), + descriptor.plugin().klass() + ); + + // when + var results = LoaderHelper.verifyServiceProviderRecords(List.of(descriptor)); + + // then + assertTrue(Files.isRegularFile(spiFile)); + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(descriptor)); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void verifyServiceProviderRecords_jarSourceWithMatchingRecordIsAccepted() throws Exception { + // given + Path jarPath = tempDir.resolve("plugin.jar"); + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withSource(jarPath.toString()) + .build(); + + createJarWithSpiRecord( + jarPath, + descriptor.plugin().kind(), + descriptor.plugin().klass() + ); + + // when + var results = LoaderHelper.verifyServiceProviderRecords(List.of(descriptor)); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(descriptor)); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void verifyServiceProviderRecords_missingRecordIsRejected() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withSource(tempDir.toString()) + .build(); + + // when + var results = LoaderHelper.verifyServiceProviderRecords(List.of(descriptor)); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + + List problems = results.rejected().get(descriptor); + assertEquals(1, problems.size()); + assertInstanceOf(LoaderProblem.MissingServiceProviderRecord.class, problems.get(0)); + } + + @Test + void verifyServiceProviderRecords_missingSourceIsRejectedAsLocationFailure() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withSource(tempDir.resolve("missing-plugin.jar").toString()) + .build(); + + // when + var results = LoaderHelper.verifyServiceProviderRecords(List.of(descriptor)); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + + List problems = results.rejected().get(descriptor); + assertEquals(1, problems.size()); + assertInstanceOf(LoaderProblem.LocationFailure.class, problems.get(0)); + } + + @Test + void verifyServiceProviderRecords_recordWithWhitespaceAndCommentsIsAccepted() throws Exception { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withSource(tempDir.toString()) + .build(); + + createSpiFile( + tempDir, + descriptor.plugin().kind(), + """ + # service registrations + %s # primary implementation + + com.example.OtherImplementation + """.formatted(descriptor.plugin().klass()) + ); + + // when + var results = LoaderHelper.verifyServiceProviderRecords(List.of(descriptor)); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(descriptor)); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + private Path createSpiFile(Path root, String kind, String content) throws Exception { + Path serviceFile = root.resolve("META-INF").resolve("services").resolve(kind); + Files.createDirectories(serviceFile.getParent()); + Files.writeString(serviceFile, content, StandardCharsets.UTF_8); + return serviceFile; + } + + private void createJarWithSpiRecord(Path jarPath, String kind, String content) throws Exception { + Path parent = jarPath.getParent(); + if (parent != null) { + Files.createDirectories(parent); + } + + try ( + OutputStream fileOut = Files.newOutputStream(jarPath); + JarOutputStream jarOut = new JarOutputStream(fileOut) + ) { + String entryName = "META-INF/services/" + kind; + jarOut.putNextEntry(new ZipEntry(entryName)); + jarOut.write(content.getBytes(StandardCharsets.UTF_8)); + jarOut.closeEntry(); + } + } + } + @Nested class VerifyApiLevels { diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java index 656c76b..0bb83d3 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java @@ -1,7 +1,9 @@ package io.gdcc.spi.meta.descriptor; +import java.io.BufferedReader; import java.io.FileReader; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.nio.file.Files; @@ -130,6 +132,98 @@ static List scanDirectory(Path root) throws IOException { return List.copyOf(descriptors); } - // TODO: a method to check that for a given plugin class and plugin kind there is a service loader entry present + /** + * Checks whether the source referenced by the given descriptor contains a Java SPI service + * configuration file for the descriptor's declared kind, and whether that file explicitly + * lists the descriptor's implementation class. + * + *

      The source location is expected to point either to a directory root or to a JAR file. + * In the directory case, this method looks for a regular file at + * {@code META-INF/services/} below that root. In the JAR case, it looks for the + * corresponding JAR entry.

      + * + *

      If the SPI record exists, its contents are interpreted using UTF-8. Blank lines, + * leading/trailing whitespace, and comments introduced by {@code #} are ignored in the + * same spirit as standard Java service configuration files.

      + * + * @param descriptor the descriptor whose source and implementation metadata should be checked + * @return {@code true} if a matching SPI record exists and contains the descriptor's + * implementation class; {@code false} if no such SPI record exists or the record + * does not list that implementation + * @throws IllegalArgumentException if the descriptor points to a source location that does not exist + * @throws IOException if an I/O error occurs while reading the directory entry or JAR entry + */ + public static boolean hasServiceProviderInterfaceRecord(SourcedDescriptor descriptor) throws IOException { + String spiLocation = "META-INF/services/" + descriptor.plugin().kind(); + Path source = descriptor.sourceLocation(); + + // The descriptor should already be vetted before reaching this point, so we keep validation + // intentionally lightweight here and only reject obviously invalid sources. + if (Files.notExists(source)) { + throw new IllegalArgumentException("Source descriptor contained non-existing source location " + source); + } + + // Strategy: + // - If the source is a directory, open the SPI file directly from the filesystem. + // - Otherwise, treat the source as an archive and look for the SPI record as a JAR entry. + // In both cases we funnel the actual content check through the same InputStream-based helper. + if (Files.isDirectory(source)) { + Path serviceFile = source.resolve(spiLocation); + + // No SPI record file at the expected location means there is nothing to match. + if (!Files.isRegularFile(serviceFile)) { + return false; + } + + // Open the regular file only for the duration of the content check. + try (InputStream serviceRecord = Files.newInputStream(serviceFile)) { + return spiRecordContains(serviceRecord, descriptor.plugin().klass()); + } + } + + // Important: the JAR must stay open for as long as the entry InputStream is being read. + // Therefore, both resources are owned by nested try-with-resources blocks in the same scope. + try (JarFile jar = new JarFile(source.toFile())) { + JarEntry entry = jar.getJarEntry(spiLocation); + + // Missing JAR entry means there is no SPI record for the declared kind. + if (entry == null) { + return false; + } + + // Read the JAR entry while the JAR is still open, then close both resources automatically. + try (InputStream serviceRecord = jar.getInputStream(entry)) { + return spiRecordContains(serviceRecord, descriptor.plugin().klass()); + } + } + } + + /** + * Reads a Java SPI service configuration stream and checks whether it declares the given implementation class. + * + *

      Lines are normalized in a tolerant way: comments beginning with {@code #} are stripped, + * surrounding whitespace is trimmed, and empty lines are ignored.

      + */ + private static boolean spiRecordContains(InputStream serviceRecord, String implementationClass) throws IOException { + // This helper intentionally contains the shared parsing logic so that directory-based + // and JAR-based SPI records are interpreted in exactly the same way. + try ( + InputStreamReader streamReader = new InputStreamReader(serviceRecord, StandardCharsets.UTF_8); + BufferedReader reader = new BufferedReader(streamReader) + ) { + return reader.lines() + // Strip inline comments to support standard SPI syntax. + .map(line -> { + int commentStart = line.indexOf('#'); + return commentStart >= 0 ? line.substring(0, commentStart) : line; + }) + // Normalize whitespace so that indented or padded entries still match. + .map(String::trim) + // Skip blank lines after normalization. + .filter(line -> !line.isEmpty()) + // Finally, look for the implementation class declared by the descriptor. + .anyMatch(line -> line.equals(implementationClass)); + } + } } diff --git a/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorScannerTest.java b/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorScannerTest.java index eee0998..689aebb 100644 --- a/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorScannerTest.java +++ b/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorScannerTest.java @@ -14,14 +14,15 @@ import java.util.jar.JarOutputStream; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; class DescriptorScannerTest { - + @TempDir Path tempDir; - + @Nested class Directory { @@ -174,6 +175,100 @@ void scanJar_RejectsNonJarFile() throws IOException { assertTrue(ex.getMessage().contains("not a readable JAR file")); } } + + @Nested + class ServiceProviderInterfaceExistance { + + @Test + void hasServiceProviderInterfaceRecord_ReturnTrue_ForExistingSPIRecordInJar() throws IOException { + Path jar = createJar(Map.of( + "META-INF/services/test.BasePlugin", + "test.Plugin", + "META-INF/dataverse/plugins/test.Plugin.properties", + """ + plugin.class=test.Plugin + plugin.kind=test.BasePlugin + plugin.implements.test.BasePlugin.level=2 + """ + )); + + Descriptor descriptor = new Descriptor("test.Plugin", "test.BasePlugin", Map.of(), Map.of()); + SourcedDescriptor sourcedDescriptor = new SourcedDescriptor(jar, descriptor); + + assertTrue(DescriptorScanner.hasServiceProviderInterfaceRecord(sourcedDescriptor)); + } + + @Test + void hasServiceProviderInterfaceRecord_ReturnsFalse_WhenSPIRecordDoesNotExistInJar() throws IOException { + Path jar = createJar(Map.of( + "META-INF/dataverse/plugins/test.Plugin.properties", + """ + plugin.class=test.Plugin + plugin.kind=test.BasePlugin + plugin.implements.test.BasePlugin.level=2 + """ + )); + + Descriptor descriptor = new Descriptor("test.Plugin", "test.BasePlugin", Map.of(), Map.of()); + SourcedDescriptor sourcedDescriptor = new SourcedDescriptor(jar, descriptor); + + assertFalse(DescriptorScanner.hasServiceProviderInterfaceRecord(sourcedDescriptor)); + } + + @Test + void hasServiceProviderInterfaceRecord_RejectsInvalidSourcedDescriptorJar() { + Path invalidPath = tempDir.resolve("invalid.jar"); + + Descriptor descriptor = new Descriptor("test.Plugin", "test.BasePlugin", Map.of(), Map.of()); + SourcedDescriptor invalidDescriptor = new SourcedDescriptor(invalidPath, descriptor); + + IllegalArgumentException ex = assertThrows(IllegalArgumentException.class, + () -> DescriptorScanner.hasServiceProviderInterfaceRecord(invalidDescriptor)); + + assertTrue(ex.getMessage().contains("invalid.jar")); + } + + @Test + void hasServiceProviderInterfaceRecord_ReturnsTrue_ForExistingSPIRecordInDirectory() throws IOException { + Path tmpDir = Files.createTempDirectory("descriptor-scanner"); + Path recordsDir = tmpDir.resolve(Path.of("META-INF/services")); + + Files.createDirectories(recordsDir); + Files.writeString(recordsDir.resolve(Path.of("test.BasePlugin")), "test.Plugin", StandardCharsets.UTF_8); + + Descriptor descriptor = new Descriptor("test.Plugin", "test.BasePlugin", Map.of(), Map.of()); + SourcedDescriptor sourcedDescriptor = new SourcedDescriptor(tmpDir, descriptor); + + assertTrue(DescriptorScanner.hasServiceProviderInterfaceRecord(sourcedDescriptor)); + } + + @Test + void hasServiceProviderInterfaceRecord_ReturnsFalse_ForNonexistingSPIRecordInDirectory() throws IOException { + Path tmpDir = Files.createTempDirectory("descriptor-scanner"); + Path recordsDir = tmpDir.resolve(Path.of("META-INF/services")); + + Files.createDirectories(recordsDir); + + Descriptor descriptor = new Descriptor("test.Plugin", "test.BasePlugin", Map.of(), Map.of()); + SourcedDescriptor sourcedDescriptor = new SourcedDescriptor(tmpDir, descriptor); + + assertFalse(DescriptorScanner.hasServiceProviderInterfaceRecord(sourcedDescriptor)); + } + + @Test + void hasServiceProviderInterfaceRecord_ReturnsFalse_ForNonmatchingSPIRecordInDirectory() throws IOException { + Path tmpDir = Files.createTempDirectory("descriptor-scanner"); + Path recordsDir = tmpDir.resolve(Path.of("META-INF/services")); + + Files.createDirectories(recordsDir); + Files.writeString(recordsDir.resolve(Path.of("test.BasePlugin")), "test.OtherPlugin", StandardCharsets.UTF_8); + + Descriptor descriptor = new Descriptor("test.Plugin", "test.BasePlugin", Map.of(), Map.of()); + SourcedDescriptor sourcedDescriptor = new SourcedDescriptor(tmpDir, descriptor); + + assertFalse(DescriptorScanner.hasServiceProviderInterfaceRecord(sourcedDescriptor)); + } + } private Path createJar(Map entries) throws IOException { Path jar = Files.createTempFile(tempDir, "plugin-plugin-test-", ".jar"); From d3bfa9b64ea96def38784527aeaa492a0be9d177 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 2 Apr 2026 23:44:18 +0200 Subject: [PATCH 51/55] fix(meta): validate plugin and provider levels to prevent negative values - Enforce validation in `DescriptorFormat` to reject negative values for plugin and provider levels. - Add unit tests to ensure proper error handling for invalid level values. --- .../spi/meta/descriptor/DescriptorFormat.java | 5 ++- .../meta/descriptor/DescriptorFormatTest.java | 40 +++++++++++++++++++ 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java index 4eed2bd..d20b3cb 100644 --- a/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java @@ -195,7 +195,10 @@ private static int parseLevel(String value, String key) { } try { - return Integer.parseInt(value); + int level = Integer.parseInt(value); + if (level < 1) + throw new IllegalArgumentException("Invalid integer value for property " + key + " may not be < 1, but is: " + value); + return level; } catch (NumberFormatException e) { throw new IllegalArgumentException("Invalid integer value for property " + key + ": " + value, e); } diff --git a/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorFormatTest.java b/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorFormatTest.java index 5dfaa26..8007fe9 100644 --- a/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorFormatTest.java +++ b/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorFormatTest.java @@ -306,11 +306,31 @@ void read_FailsWhenContractLevelIsNotAnInteger() { ); } + @Test + void read_FailsWhenContractLevelIsNegative() { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=-1 + """; + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> DescriptorFormat.read(new StringReader(serialized)) + ); + + assertEquals( + "Invalid integer value for property plugin.implements.io.gdcc.example.ExportPlugin.level may not be < 1, but is: -1", + ex.getMessage() + ); + } + @Test void read_FailsWhenRequiredProviderLevelIsNotAnInteger() { String serialized = """ plugin.class=io.gdcc.example.MyPlugin plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=1 plugin.requires.io.gdcc.example.ExportProvider.level=nope """; @@ -325,6 +345,26 @@ void read_FailsWhenRequiredProviderLevelIsNotAnInteger() { ); } + @Test + void read_FailsWhenProviderLevelIsNegative() { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=1 + plugin.requires.io.gdcc.example.ExportProvider.level=-1 + """; + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> DescriptorFormat.read(new StringReader(serialized)) + ); + + assertEquals( + "Invalid integer value for property plugin.requires.io.gdcc.example.ExportProvider.level may not be < 1, but is: -1", + ex.getMessage() + ); + } + @Test void read_RoundTripsWithWrite() throws IOException { Descriptor original = new Descriptor( From 1cae9a58df062d6ff980f879a6e0b093e91bd3fa Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 2 Apr 2026 23:45:54 +0200 Subject: [PATCH 52/55] feat(core): extend LoaderHelper for determination of provider API levels - Introduce `determineCoreApiLevel_providerClass` test in `LoaderHelperTest` to validate API level extraction for `TestProvider`. - Include `TestProvider` test interface with a defined `API_LEVEL`. - Update exception messages in `LoaderHelper` for improved clarity and consistency. --- .../main/java/io/gdcc/spi/core/loader/LoaderHelper.java | 6 +++--- .../java/io/gdcc/spi/core/loader/LoaderHelperTest.java | 9 +++++++++ .../java/io/gdcc/spi/core/test/basic/TestProvider.java | 7 +++++++ 3 files changed, 19 insertions(+), 3 deletions(-) create mode 100644 core/src/test/java/io/gdcc/spi/core/test/basic/TestProvider.java diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java index 3a19425..e995b60 100644 --- a/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java @@ -80,14 +80,14 @@ static boolean isClassPresent(String fqcn, ClassLoader classLoader) { static int determineCoreApiLevel(Class pluginClass) { // Looking up the plugin contract API level is only ever valid on SPI interfaces but never on implementations. if (!pluginClass.isInterface()) { - throw new IllegalArgumentException("Plugin class must be an interface"); + throw new IllegalArgumentException("Class must be an interface"); } try { // Retrieve the field from exactly this class (we don't want to search any superclasses here!) Field apiLevel = pluginClass.getDeclaredField(ProcessorConstants.API_LEVEL_FIELD_NAME); return apiLevel.getInt(pluginClass); } catch (NoSuchFieldException | IllegalAccessException e) { - throw new IllegalStateException("Plugin contract class must have an (accessible) " + ProcessorConstants.API_LEVEL_FIELD_NAME + " field"); + throw new IllegalStateException("Contract class must have an (accessible) " + ProcessorConstants.API_LEVEL_FIELD_NAME + " field"); } } @@ -116,7 +116,7 @@ static Class resolveClass(String className, ClassLoader classLoader) { try { return Class.forName(className, false, classLoader); } catch (ClassNotFoundException e) { - throw new IllegalArgumentException("Plugin class not found: " + className, e); + throw new IllegalArgumentException("Class " + className + " could not be found in core", e); } } diff --git a/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java b/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java index 2417632..bfa3333 100644 --- a/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java +++ b/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java @@ -1,6 +1,7 @@ package io.gdcc.spi.core.loader; import io.gdcc.spi.core.test.basic.TestContract; +import io.gdcc.spi.core.test.basic.TestProvider; import io.gdcc.spi.meta.annotations.PluginContract; import io.gdcc.spi.meta.descriptor.SourcedDescriptor; import io.gdcc.spi.meta.plugin.CoreProvider; @@ -53,6 +54,14 @@ void determineCoreApiLevel_invalidFQCN() { () -> LoaderHelper.determineCoreApiLevel("foo.Bar", classLoader) ); } + + @Test + void determineCoreApiLevel_providerClass() { + assertEquals( + TestProvider.API_LEVEL, + LoaderHelper.determineCoreApiLevel(transformClassName(TestProvider.class), classLoader) + ); + } } @Nested diff --git a/core/src/test/java/io/gdcc/spi/core/test/basic/TestProvider.java b/core/src/test/java/io/gdcc/spi/core/test/basic/TestProvider.java new file mode 100644 index 0000000..dc52639 --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/test/basic/TestProvider.java @@ -0,0 +1,7 @@ +package io.gdcc.spi.core.test.basic; + +import io.gdcc.spi.meta.plugin.CoreProvider; + +public interface TestProvider extends CoreProvider { + int API_LEVEL = 1; +} From a60d249f31296f6e9dd2f90e1c07eece9e89d719 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Thu, 2 Apr 2026 23:46:59 +0200 Subject: [PATCH 53/55] feat(core): add provider API level preloading validation in PluginLoader - Implement `verifyProviderApiLevels` in `LoaderHelper` to validate required provider API levels against core system levels. - Update `PluginLoader` to incorporate provider API level validation during plugin preloading. - Extend `LoaderProblem` with `ProviderApiLevelMismatch` and `ProviderClassUnsupported` for detailed error reporting. - Add comprehensive unit tests in `LoaderHelperTest` for provider level validation, covering various edge cases. --- .../io/gdcc/spi/core/loader/LoaderHelper.java | 78 +++++++++++ .../gdcc/spi/core/loader/LoaderProblem.java | 16 ++- .../io/gdcc/spi/core/loader/PluginLoader.java | 8 +- .../spi/core/loader/LoaderHelperTest.java | 127 ++++++++++++++++++ 4 files changed, 225 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java index e995b60..1bad1ae 100644 --- a/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java @@ -359,6 +359,84 @@ static PluginValidationResult verifyPluginApiLevels(List} instance. The result contains the sets of + * accepted and rejected plugins, with detailed reasons for rejection. + */ + static PluginValidationResult verifyProviderApiLevels(List descriptors, ClassLoader classLoader) { + // Scratch spaces to build the result + Set accepted = new HashSet<>(); + Map> rejected = new HashMap<>(); + + // Save a few CPU cycles by not using the classloader over and over again for the same provider class + Map lookedUpProviders = new HashMap<>(); + + // Check all the plugins + for (SourcedDescriptor descriptor : descriptors) { + // Note: the way how we create the descriptors rules out we see any null keys or values in this map. + Map requiredProviders = descriptor.plugin().requiredProviders(); + // Save all the problems identified during validation + List problems = new ArrayList<>(); + + // Iterate over all the providers required by the plugin + for (String provider : requiredProviders.keySet()) { + int pluginLevel = requiredProviders.get(provider); + + // Look up the API level for required provider within the core + int coreLevel; + try { + if (lookedUpProviders.containsKey(provider)) { + coreLevel = lookedUpProviders.get(provider); + } else { + coreLevel = determineCoreApiLevel(provider, classLoader); + lookedUpProviders.put(provider, coreLevel); + } + // In case the core does not support the provider (contract not even found), the plugin must be rejected. + } catch (IllegalArgumentException e) { + problems.add(new LoaderProblem.ProviderClassUnsupported( + descriptor.plugin().klass(), + descriptor.sourceLocation(), + provider + )); + // Skip the rest and continue with the next provider + continue; + } + + // Now match the API level against the plugin's requirements + if (coreLevel != pluginLevel) { + problems.add(new LoaderProblem.ProviderApiLevelMismatch( + descriptor.plugin().klass(), + descriptor.sourceLocation(), + provider, + coreLevel, + pluginLevel + )); + } + } + + if (problems.isEmpty()) { + accepted.add(descriptor); + } else { + rejected.put(descriptor, List.copyOf(problems)); + } + } + + return new PluginValidationResult<>( + Set.copyOf(accepted), + PluginValidationResult.copyProblemMap(rejected), + Map.of() + ); + } + + /** * Converts a {@link SourcedDescriptor} and a plugin instance into a {@link PluginDescriptor}. * diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java index e238442..6245797 100644 --- a/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java @@ -5,7 +5,7 @@ import java.nio.file.Path; import java.util.Set; -public sealed interface LoaderProblem permits LoaderProblem.DuplicateIdentity, LoaderProblem.LocationFailure, LoaderProblem.MissingServiceProviderRecord, LoaderProblem.PluginClassApiLevelMismatch, LoaderProblem.PluginClassApiLevelMissing, LoaderProblem.PluginClassMismatch, LoaderProblem.PluginClassNameCollision, LoaderProblem.PluginClassNameCollisionWithCore, LoaderProblem.PluginClassUnsupported, LoaderProblem.SourceFailure { +public sealed interface LoaderProblem permits LoaderProblem.DuplicateIdentity, LoaderProblem.LocationFailure, LoaderProblem.MissingServiceProviderRecord, LoaderProblem.PluginClassApiLevelMismatch, LoaderProblem.PluginClassApiLevelMissing, LoaderProblem.PluginClassMismatch, LoaderProblem.PluginClassNameCollision, LoaderProblem.PluginClassNameCollisionWithCore, LoaderProblem.PluginClassUnsupported, LoaderProblem.ProviderApiLevelMismatch, LoaderProblem.ProviderClassUnsupported, LoaderProblem.SourceFailure { String message(); @@ -88,4 +88,18 @@ public String message() { return "Class " + classname + " in " + source + " uses API level " + pluginLevel + " but core expects " + coreLevel; } } + + record ProviderApiLevelMismatch(String classname, Path source, String provider, int coreLevel, int pluginLevel) implements LoaderProblem { + @Override + public String message() { + return "Class " + classname + " in " + source + " requires API level " + pluginLevel + " for provider " + provider + ", but core provides " + coreLevel; + } + } + + record ProviderClassUnsupported(String className, Path source, String provider) implements LoaderProblem { + @Override + public String message() { + return "Class " + className + " in " + source + " requires unsupported provider " + provider; + } + } } diff --git a/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java b/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java index ab68dfe..b2ffd55 100644 --- a/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java +++ b/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java @@ -256,17 +256,19 @@ List preloadPlugins(Set sources, SourceScanner scanner) // 5. Verify that the API level of the plugin matches the core-expected level(s). var apiLevelResult = LoaderHelper.verifyPluginApiLevels(descriptors, this.pluginClass, this.parentClassLoader); - logger.debug("Scanning for API level matches results: {}", apiLevelResult); + logger.debug("Scanning for plugin API level matches results: {}", apiLevelResult); // 6. Verify all the provider requirements by the plugin are met - // TODO: implement + var providerLevelsResult = LoaderHelper.verifyProviderApiLevels(descriptors, this.parentClassLoader); + logger.debug("Scanning for provider API level matches results: {}", apiLevelResult); // Merge all the different results to receive the final picture which plugins are faulty var finalResults = PluginValidationResult.merge( collisionResult, implementationResult, serviceProviderResult, - apiLevelResult + apiLevelResult, + providerLevelsResult ); // Merge all the problems into one large list, to be wrapped in an exception finalResults.rejected().forEach((descriptor, problems) -> sourceProblems.addAll(problems)); diff --git a/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java b/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java index bfa3333..dc39fc0 100644 --- a/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java +++ b/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java @@ -511,6 +511,133 @@ void verifyApiLevels_reportsMultipleProblemsForSingleDescriptor() { } } + @Nested + class VerifyProviderApiLevels { + + @Test + void verifyProviderApiLevels_happyPath() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withRequiredProviders(Map.of(transformClassName(TestProvider.class), TestProvider.API_LEVEL)) + .build(); + + // when + var results = LoaderHelper.verifyProviderApiLevels(List.of(descriptor), classLoader); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(descriptor)); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void verifyProviderApiLevels_providerLevelMismatch() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withRequiredProviders(Map.of(transformClassName(TestProvider.class), TestProvider.API_LEVEL + 1)) + .build(); + + // when + var results = LoaderHelper.verifyProviderApiLevels(List.of(descriptor), classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + assertInstanceOf(LoaderProblem.ProviderApiLevelMismatch.class, results.rejected().get(descriptor).get(0)); + } + + @Test + void verifyProviderApiLevels_unsupportedProvider() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withRequiredProviders(Map.of("com.example.MissingProvider", 7)) + .build(); + + // when + var results = LoaderHelper.verifyProviderApiLevels(List.of(descriptor), classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + assertInstanceOf(LoaderProblem.ProviderClassUnsupported.class, results.rejected().get(descriptor).get(0)); + } + + @Test + void verifyProviderApiLevels_reportsMultipleProblemsForSingleDescriptor() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withRequiredProviders(Map.of( + transformClassName(TestProvider.class), TestProvider.API_LEVEL + 1, + "com.example.MissingProvider", 7 + )) + .build(); + + // when + var results = LoaderHelper.verifyProviderApiLevels(List.of(descriptor), classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + + List problems = results.rejected().get(descriptor); + assertEquals(2, problems.size()); + assertTrue(problems.stream().anyMatch( + problem -> problem.getClass().equals(LoaderProblem.ProviderApiLevelMismatch.class) + )); + assertTrue(problems.stream().anyMatch( + problem -> problem.getClass().equals(LoaderProblem.ProviderClassUnsupported.class) + )); + } + + @Test + void verifyProviderApiLevels_descriptorWithoutRequiredProvidersIsAccepted() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withRequiredProviders(Map.of()) + .build(); + + // when + var results = LoaderHelper.verifyProviderApiLevels(List.of(descriptor), classLoader); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(descriptor)); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void verifyProviderApiLevels_mixedDescriptorsSeparatesAcceptedAndRejected() { + // given + SourcedDescriptor matching = DescriptorBuilder.aDescriptor() + .withSource("matching.jar") + .withRequiredProviders(Map.of(transformClassName(TestProvider.class), TestProvider.API_LEVEL)) + .build(); + + SourcedDescriptor mismatching = DescriptorBuilder.aDescriptor() + .withSource("mismatching.jar") + .withRequiredProviders(Map.of(transformClassName(TestProvider.class), TestProvider.API_LEVEL + 1)) + .build(); + + // when + var results = LoaderHelper.verifyProviderApiLevels(List.of(matching, mismatching), classLoader); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(matching)); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(mismatching)); + } + } + @Nested class ToPluginDescriptor { From bf8d9a42819d6ca7795c83797fffed54ddd3942e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 3 Apr 2026 01:57:58 +0200 Subject: [PATCH 54/55] refactor(core): simplify `LoaderConfiguration` API and update usages - Replace `LoaderConfiguration` record with a class for immutability and extendability. - Introduce `defaults()` and `permissive()` factory methods for common configurations. - Replace direct configuration instantiation in tests with factory methods to simplify setup. - Update all references to deprecated methods and fields with new getters. --- .../spi/core/loader/LoaderConfiguration.java | 213 +++++++++++++++--- .../io/gdcc/spi/core/loader/LoaderHelper.java | 4 +- .../io/gdcc/spi/core/loader/PluginLoader.java | 10 +- .../spi/core/loader/LoaderHelperTest.java | 30 +-- .../spi/core/loader/PluginLoaderTest.java | 16 +- 5 files changed, 198 insertions(+), 75 deletions(-) diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderConfiguration.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderConfiguration.java index 5b415f9..48d3602 100644 --- a/core/src/main/java/io/gdcc/spi/core/loader/LoaderConfiguration.java +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderConfiguration.java @@ -1,41 +1,196 @@ package io.gdcc.spi.core.loader; + /** - * @param ENFORCE_SINGLE_SOURCE_MATCHING_PLUGINS_ONLY - * When activated, any source may only provide plugins for a single given base plugin contract interface. - * In case any non-compliant plugin is found within, the loader will refrain from loading from the source entirely. - * When deactivated, any non-matching plugins will simply be ignored. - * Default: false - * @param EMIT_WARNINGS_ON_MULTI_PLUGIN_SOURCE - * If {@link #ENFORCE_SINGLE_SOURCE_MATCHING_PLUGINS_ONLY} is deactivated, either log a warning (true) or not (false). - * Default: false - * @param ABORT_ON_COMPATIBILITY_PROBLEMS - * Abort loading when detecting any problems before actually loading classes (API level verification, etc.). - * Default: true - * @param ABORT_ON_DUPLICATED_IDENTITIES - * Abort loading when detecting any plugins with duplicated identities, making them undistinguishable for users. - * Default: true - * @param ENFORCE_UNAMBIGUOUS_PLUGIN_IDENTITIES - * When activated, any plugin must have a unique identity within its source, ensuring unambiguous identification. - * Any plugin's identity that differs by case or special chars only will be seen as a duplicate. - * Default: true + * Immutable configuration controlling the behavior of the plugin loader. + * + *

      Use {@link #defaults()} to start from the standard configuration and then + * adjust individual options with the fluent {@code with...} methods.

      + * + *

      Example:

      + *
      {@code
      + * LoaderConfiguration configuration = LoaderConfiguration.defaults()
      + *     .withEmitWarningsOnMultiPluginSource(true)
      + *     .withAbortOnCompatibilityProblems(false);
      + * }
      */ -public record LoaderConfiguration( - boolean ENFORCE_SINGLE_SOURCE_MATCHING_PLUGINS_ONLY, - boolean EMIT_WARNINGS_ON_MULTI_PLUGIN_SOURCE, - boolean ABORT_ON_COMPATIBILITY_PROBLEMS, - boolean ABORT_ON_DUPLICATED_IDENTITIES, - boolean ENFORCE_UNAMBIGUOUS_PLUGIN_IDENTITIES -) { - - public static LoaderConfiguration defaultConfiguration() { +public final class LoaderConfiguration { + + private final boolean enforceSingleSourceMatchingPluginsOnly; + private final boolean emitWarningsOnMultiPluginSource; + private final boolean abortOnCompatibilityProblems; + private final boolean abortOnDuplicatedIdentities; + private final boolean enforceUnambiguousPluginIdentities; + + private LoaderConfiguration( + boolean enforceSingleSourceMatchingPluginsOnly, + boolean emitWarningsOnMultiPluginSource, + boolean abortOnCompatibilityProblems, + boolean abortOnDuplicatedIdentities, + boolean enforceUnambiguousPluginIdentities + ) { + this.enforceSingleSourceMatchingPluginsOnly = enforceSingleSourceMatchingPluginsOnly; + this.emitWarningsOnMultiPluginSource = emitWarningsOnMultiPluginSource; + this.abortOnCompatibilityProblems = abortOnCompatibilityProblems; + this.abortOnDuplicatedIdentities = abortOnDuplicatedIdentities; + this.enforceUnambiguousPluginIdentities = enforceUnambiguousPluginIdentities; + } + + /** + * Returns the standard loader configuration (which is strictly enforcing). + * + *
        + *
      • {@code enforceSingleSourceMatchingPluginsOnly = false}
      • + *
      • {@code emitWarningsOnMultiPluginSource = false}
      • + *
      • {@code abortOnCompatibilityProblems = true}
      • + *
      • {@code abortOnDuplicatedIdentities = true}
      • + *
      • {@code enforceUnambiguousPluginIdentities = true}
      • + *
      + */ + public static LoaderConfiguration defaults() { return new LoaderConfiguration( - false, + true, false, true, true, true ); } - + + /** + * Returns a permissive loader configuration with all strict validation features disabled. + * It has package private visibility as the only permissive usage is in a testing context. + * + *

      + * The configuration has the following properties: + *

        + *
      • {@code enforceSingleSourceMatchingPluginsOnly = false}
      • + *
      • {@code emitWarningsOnMultiPluginSource = false}
      • + *
      • {@code abortOnCompatibilityProblems = false}
      • + *
      • {@code abortOnDuplicatedIdentities = false}
      • + *
      • {@code enforceUnambiguousPluginIdentities = false}
      • + *
      + *

      + * + * @return a {@code LoaderConfiguration} instance with permissive settings. + */ + static LoaderConfiguration permissive() { + return new LoaderConfiguration( + false, + false, + false, + false, + false + ); + } + + /** + * When enabled, a source may only provide plugins for a single requested base contract. + * If any non-matching plugin is found, loading from that source is aborted entirely. + * + *

      When disabled, non-matching plugins are ignored.

      + */ + public boolean enforceSingleSourceMatchingPluginsOnly() { + return enforceSingleSourceMatchingPluginsOnly; + } + + /** + * Returns a copy with {@link #enforceSingleSourceMatchingPluginsOnly()} updated. + */ + public LoaderConfiguration withEnforceSingleSourceMatchingPluginsOnly(boolean value) { + return new LoaderConfiguration( + value, + emitWarningsOnMultiPluginSource, + abortOnCompatibilityProblems, + abortOnDuplicatedIdentities, + enforceUnambiguousPluginIdentities + ); + } + + /** + * When {@link #enforceSingleSourceMatchingPluginsOnly()} is disabled, controls whether + * multi-plugin-contract sources should emit warnings. + */ + public boolean emitWarningsOnMultiPluginSource() { + return emitWarningsOnMultiPluginSource; + } + + /** + * Returns a copy with {@link #emitWarningsOnMultiPluginSource()} updated. + */ + public LoaderConfiguration withEmitWarningsOnMultiPluginSource(boolean value) { + return new LoaderConfiguration( + enforceSingleSourceMatchingPluginsOnly, + value, + abortOnCompatibilityProblems, + abortOnDuplicatedIdentities, + enforceUnambiguousPluginIdentities + ); + } + + /** + * When enabled, plugin loading aborts on discovered compatibility problems (for example, API level mismatches). + * No classes are actually loaded, problems are detected using plugin metadata only. + */ + public boolean abortOnCompatibilityProblems() { + return abortOnCompatibilityProblems; + } + + /** + * Returns a copy with {@link #abortOnCompatibilityProblems()} updated. + */ + public LoaderConfiguration withAbortOnCompatibilityProblems(boolean value) { + return new LoaderConfiguration( + enforceSingleSourceMatchingPluginsOnly, + emitWarningsOnMultiPluginSource, + value, + abortOnDuplicatedIdentities, + enforceUnambiguousPluginIdentities + ); + } + + /** + * When enabled, loading aborts if duplicate plugin identities are detected. + * + *

      Note: duplicated identities make plugins undistinguishable for users.

      + */ + public boolean abortOnDuplicatedIdentities() { + return abortOnDuplicatedIdentities; + } + + /** + * Returns a copy with {@link #abortOnDuplicatedIdentities()} updated. + */ + public LoaderConfiguration withAbortOnDuplicatedIdentities(boolean value) { + return new LoaderConfiguration( + enforceSingleSourceMatchingPluginsOnly, + emitWarningsOnMultiPluginSource, + abortOnCompatibilityProblems, + value, + enforceUnambiguousPluginIdentities + ); + } + + /** + * When enabled, plugin identities must be unique within a source. + * Any plugin's identity that differs by case or special chars only will be seen as a duplicate. + */ + public boolean enforceUnambiguousPluginIdentities() { + return enforceUnambiguousPluginIdentities; + } + + /** + * Returns a copy with {@link #enforceUnambiguousPluginIdentities()} updated. + */ + public LoaderConfiguration withEnforceUnambiguousPluginIdentities(boolean value) { + return new LoaderConfiguration( + enforceSingleSourceMatchingPluginsOnly, + emitWarningsOnMultiPluginSource, + abortOnCompatibilityProblems, + abortOnDuplicatedIdentities, + value + ); + } } + + diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java index 1bad1ae..64ea06b 100644 --- a/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java @@ -208,7 +208,7 @@ static PluginValidationResult identifyNonImplementations( pluginClass.getCanonicalName() )); - if (configuration.ENFORCE_SINGLE_SOURCE_MATCHING_PLUGINS_ONLY()) + if (configuration.enforceSingleSourceMatchingPluginsOnly()) rejected.put(descriptor, problems); else { warning.put(descriptor, problems); @@ -548,7 +548,7 @@ static PluginValidationResult> verifyUniqueId } } - if (configuration.ENFORCE_UNAMBIGUOUS_PLUGIN_IDENTITIES()) { + if (configuration.enforceUnambiguousPluginIdentities()) { // Return duplicates as rejected return new PluginValidationResult<>( Set.copyOf(accepted), diff --git a/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java b/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java index b2ffd55..edea2cd 100644 --- a/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java +++ b/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java @@ -59,7 +59,7 @@ public class PluginLoader { * The parent ClassLoader is set to the current thread's context ClassLoader, which allows * plugins to access classes and resources on the core's classpath. * It uses the system default configuration for plugin loading behaviors, - * see {@link LoaderConfiguration#defaultConfiguration()}. + * see {@link LoaderConfiguration#defaults()}. * * @param pluginClass the Class object representing the plugin type {@code T} to load */ @@ -70,13 +70,13 @@ public PluginLoader(Class pluginClass) { /** * Constructs a new PluginLoader that will load plugins of the specified type {@code T}. * It uses the system default configuration for plugin loading behaviors, - * see {@link LoaderConfiguration#defaultConfiguration()}. + * see {@link LoaderConfiguration#defaults()}. * * @param pluginClass the Class object representing the plugin type {@code T} to load * @param parentClassLoader the ClassLoader to be used as the parent for class loading of plugins */ public PluginLoader(Class pluginClass, ClassLoader parentClassLoader) { - this(pluginClass, LoaderConfiguration.defaultConfiguration(), parentClassLoader); + this(pluginClass, LoaderConfiguration.defaults(), parentClassLoader); } /** @@ -275,7 +275,7 @@ List preloadPlugins(Set sources, SourceScanner scanner) // By default, we should abort now. In case we are asked to keep going by configuration, // let the logs show any found problems as warnings. - if (configuration.ABORT_ON_COMPATIBILITY_PROBLEMS() && (!sourceProblems.isEmpty() || !finalResults.rejected().isEmpty())) { + if (configuration.abortOnCompatibilityProblems() && (!sourceProblems.isEmpty() || !finalResults.rejected().isEmpty())) { throw new LoaderException(sourceProblems); } @@ -364,7 +364,7 @@ List> load(List descriptors, Map // By default, we should abort now. In case we are asked to keep going by configuration, // let the logs show any found problems as warnings. - if (configuration.ABORT_ON_COMPATIBILITY_PROBLEMS() && (!sourceProblems.isEmpty() || !finalResults.rejected().isEmpty())) { + if (configuration.abortOnCompatibilityProblems() && (!sourceProblems.isEmpty() || !finalResults.rejected().isEmpty())) { throw new LoaderException(sourceProblems); } diff --git a/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java b/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java index dc39fc0..00f9f76 100644 --- a/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java +++ b/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java @@ -154,7 +154,7 @@ void identifyNonImplementations_matchingDescriptorIsAccepted() { var results = LoaderHelper.identifyNonImplementations( descriptors, TestContract.class, - enforcingConfiguration() + LoaderConfiguration.defaults() ); // then @@ -174,7 +174,7 @@ void identifyNonImplementations_nonMatchingDescriptorIsRejectedWhenEnforced() { var results = LoaderHelper.identifyNonImplementations( List.of(descriptor), TestContract.class, - enforcingConfiguration() + LoaderConfiguration.defaults() ); // then @@ -198,7 +198,7 @@ void identifyNonImplementations_nonMatchingDescriptorIsWarningWhenNotEnforced() var results = LoaderHelper.identifyNonImplementations( List.of(descriptor), TestContract.class, - permissiveConfiguration() + LoaderConfiguration.permissive() ); // then @@ -226,7 +226,7 @@ void identifyNonImplementations_mixedDescriptorsSeparatesAcceptedAndRejected() { var results = LoaderHelper.identifyNonImplementations( List.of(matching, nonMatching), TestContract.class, - enforcingConfiguration() + LoaderConfiguration.defaults() ); // then @@ -252,7 +252,7 @@ void identifyNonImplementations_mixedDescriptorsSeparatesAcceptedAndWarnings() { var results = LoaderHelper.identifyNonImplementations( List.of(matching, nonMatching), TestContract.class, - permissiveConfiguration() + LoaderConfiguration.permissive() ); // then @@ -812,24 +812,4 @@ void toPluginDescriptor_failsWhenProviderClassCannotBeResolved() { ); } } - - static LoaderConfiguration enforcingConfiguration() { - return new LoaderConfiguration( - true, - false, - true, - true, - true - ); - } - - static LoaderConfiguration permissiveConfiguration() { - return new LoaderConfiguration( - false, - false, - true, - true, - false - ); - } } diff --git a/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java index b8eebba..be21706 100644 --- a/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java +++ b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java @@ -118,21 +118,9 @@ void findSources_NoDirectory() { @Nested class Preload { - LoaderConfiguration enforcingConfig = new LoaderConfiguration( - true, - false, - true, - true, - true - ); + LoaderConfiguration enforcingConfig = LoaderConfiguration.defaults(); - LoaderConfiguration permissiveConfig = new LoaderConfiguration( - false, - false, - false, - false, - false - ); + LoaderConfiguration permissiveConfig = LoaderConfiguration.permissive(); @Test void preLoad_throwsOnNormalProblemsWhenEnforcing() { From fb05a21d1aedc53a751db2a215cf7d460ba16fd3 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 3 Apr 2026 02:17:02 +0200 Subject: [PATCH 55/55] ci: re-enable site workflow for `main` branch only Un-comment `branches: main` and `workflow_dispatch` triggers in `site.yml` to activate the workflow trigger for the `main` branch only --- .github/workflows/site.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/site.yml b/.github/workflows/site.yml index 445e09b..ff370ab 100644 --- a/.github/workflows/site.yml +++ b/.github/workflows/site.yml @@ -2,9 +2,9 @@ name: Publish Maven Site on: push: -# branches: -# - main -# workflow_dispatch: + branches: + - main + workflow_dispatch: permissions: contents: write @@ -28,8 +28,8 @@ jobs: java-version: '17' cache: maven - # Later on, we need to extend this mechanism to provide both version and snapshot, - # as well as a different case the stable version and "latest". + # TODO: Later on, we need to extend this mechanism to provide both version and snapshot, + # as well as a different case the stable version and "latest". - name: Build and verify project site # By default, docs.site.base is / for local development. For deployment, this needs to be adapted. # IMPORTANT: make sure the base ends with "/"!