pluginSources = plugins.keySet().stream().toList();
+ assertEquals(1, pluginSources.size());
+ PluginOrigin source = pluginSources.get(0);
+ assertEquals(rootClassPath, source.location());
+ assertEquals(TestPlugin.class.getName(), source.className());
+ assertEquals(new TestPlugin().identity(), source.identity());
+
+ */
+ }
+}
\ No newline at end of file
diff --git a/core/src/test/java/io/gdcc/spi/core/test/basic/TestContract.java b/core/src/test/java/io/gdcc/spi/core/test/basic/TestContract.java
new file mode 100644
index 0000000..50248a5
--- /dev/null
+++ b/core/src/test/java/io/gdcc/spi/core/test/basic/TestContract.java
@@ -0,0 +1,12 @@
+package io.gdcc.spi.core.test.basic;
+
+import io.gdcc.spi.meta.annotations.PluginContract;
+import io.gdcc.spi.meta.plugin.Plugin;
+
+@PluginContract(role = PluginContract.Role.BASE)
+public interface TestContract extends Plugin {
+
+ int API_LEVEL = 1;
+
+ void test();
+}
diff --git a/core/src/test/java/io/gdcc/spi/core/test/basic/TestPlugin.java b/core/src/test/java/io/gdcc/spi/core/test/basic/TestPlugin.java
new file mode 100644
index 0000000..7148d0c
--- /dev/null
+++ b/core/src/test/java/io/gdcc/spi/core/test/basic/TestPlugin.java
@@ -0,0 +1,15 @@
+package io.gdcc.spi.core.test.basic;
+
+import io.gdcc.spi.meta.plugin.Plugin;
+
+public class TestPlugin implements TestContract {
+ @Override
+ public String identity() {
+ return "test";
+ }
+
+ @Override
+ public void test() {
+ /* Intentionally left blank */
+ }
+}
diff --git a/core/src/test/java/io/gdcc/spi/core/test/basic/TestProvider.java b/core/src/test/java/io/gdcc/spi/core/test/basic/TestProvider.java
new file mode 100644
index 0000000..dc52639
--- /dev/null
+++ b/core/src/test/java/io/gdcc/spi/core/test/basic/TestProvider.java
@@ -0,0 +1,7 @@
+package io.gdcc.spi.core.test.basic;
+
+import io.gdcc.spi.meta.plugin.CoreProvider;
+
+public interface TestProvider extends CoreProvider {
+ int API_LEVEL = 1;
+}
diff --git a/export/pom.xml b/export/pom.xml
new file mode 100644
index 0000000..8c035ad
--- /dev/null
+++ b/export/pom.xml
@@ -0,0 +1,37 @@
+
+
+ 4.0.0
+
+ io.gdcc.spi
+ parent
+ 2.1.0-SNAPSHOT
+
+
+ export
+
+
+
+
+
+
+ io.gdcc.spi
+ meta
+
+
+
+
+ jakarta.json
+ jakarta.json-api
+ provided
+
+
+
+ jakarta.ws.rs
+ jakarta.ws.rs-api
+ provided
+
+
+
+
\ No newline at end of file
diff --git a/export/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java b/export/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java
new file mode 100644
index 0000000..6fd4b48
--- /dev/null
+++ b/export/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java
@@ -0,0 +1,206 @@
+package io.gdcc.spi.export;
+
+import java.util.Collection;
+import java.util.HashSet;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.Set;
+
+/**
+ * Defines what dataset metadata to retrieve and at what level of detail
+ * for dataset-oriented export operations.
+ *
+ * This is a pure data-shape specification: it answers which aspects of a dataset
+ * should be included in an export, and optionally how file metadata nested within
+ * that dataset should be shaped. It deliberately does not address which
+ * datasets to operate on (that is a selection concern at a higher level), nor
+ * how much data to retrieve per call — pagination is a separate,
+ * orthogonal concern expressed via a {@code PageRequest} at the method level.
+ *
+ * File metadata shaping is optional: if no {@link FileExportQuery} is provided,
+ * methods that include file metadata will apply their own defaults. Methods that
+ * do not return file metadata will ignore any nested {@link FileExportQuery}.
+ *
+ * Instances are immutable and must be constructed via {@link #builder()}.
+ * Use {@link #defaults()} for the standard query with no special filtering.
+ *
+ * @see FileExportQuery
+ * @see DatasetMetadataPredicates
+ */
+public final class DatasetExportQuery {
+
+ private final Set datasetPredicates;
+ private final FileExportQuery fileQuery;
+
+ /**
+ * Default query, including all dataset metadata and applying file metadata defaults.
+ */
+ private static final DatasetExportQuery DEFAULT = builder().build();
+
+ private DatasetExportQuery(Builder builder) {
+ this.datasetPredicates = Set.copyOf(builder.datasetPredicates);
+ this.fileQuery = builder.fileQuery;
+ }
+
+ /**
+ * Returns a builder for creating new queries.
+ *
+ * @return a new {@link Builder} instance
+ */
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ /**
+ * Returns the default query, which includes all dataset metadata with no special
+ * filtering, and defers file metadata shaping to method-level defaults.
+ *
+ * @return the shared default {@link DatasetExportQuery} instance
+ */
+ public static DatasetExportQuery defaults() {
+ return DEFAULT;
+ }
+
+ /**
+ * Builder for {@link DatasetExportQuery}.
+ *
+ * Obtain an instance via {@link DatasetExportQuery#builder()} or
+ * {@link Builder#from(DatasetExportQuery)} to derive a new query from an existing one.
+ */
+ public static class Builder {
+ private final Set datasetPredicates = new HashSet<>();
+ private FileExportQuery fileQuery = null;
+
+ private Builder() {
+ // Hiding constructor to enforce use of the static factory method
+ }
+
+ /**
+ * Sets the dataset metadata predicates, replacing any previously set predicates.
+ *
+ * @param predicates the dataset metadata predicates to set
+ * @return this builder instance
+ */
+ public Builder datasetPredicates(DatasetMetadataPredicates... predicates) {
+ this.datasetPredicates.clear();
+ this.datasetPredicates.addAll(Set.of(predicates));
+ return this;
+ }
+
+ /**
+ * Sets the dataset metadata predicates, replacing any previously set predicates.
+ *
+ * @param predicates the dataset metadata predicates to set
+ * @return this builder instance
+ */
+ public Builder datasetPredicates(Collection predicates) {
+ this.datasetPredicates.clear();
+ this.datasetPredicates.addAll(predicates);
+ return this;
+ }
+
+ /**
+ * Adds a dataset metadata predicate to the builder's collection of predicates.
+ *
+ * @param predicate the dataset metadata predicate to add
+ * @return this builder instance
+ */
+ public Builder addDatasetPredicate(DatasetMetadataPredicates predicate) {
+ this.datasetPredicates.add(predicate);
+ return this;
+ }
+
+ /**
+ * Sets the {@link FileExportQuery} to use for shaping file metadata nested
+ * within this dataset query. Replaces any previously set file query.
+ *
+ * If not set, methods that include file metadata will apply their own defaults.
+ *
+ * @param fileQuery the file export query to compose into this dataset query
+ * @return this builder instance
+ */
+ public Builder fileQuery(FileExportQuery fileQuery) {
+ this.fileQuery = fileQuery;
+ return this;
+ }
+
+ /**
+ * Builds an immutable {@link DatasetExportQuery}.
+ *
+ * @return a new, validated {@link DatasetExportQuery}
+ * @throws IllegalArgumentException if the predicate combination is invalid,
+ * e.g. due to conflicting predicates
+ */
+ public DatasetExportQuery build() {
+ return new DatasetExportQuery(this);
+ }
+
+ /**
+ * Creates a new {@link Builder} pre-populated with the state of the given query,
+ * useful for deriving a modified copy without altering the original.
+ *
+ * @param source the {@link DatasetExportQuery} instance to copy from
+ * @return a new {@code Builder} with the same predicates and file query as {@code source}
+ */
+ public Builder from(DatasetExportQuery source) {
+ return new Builder()
+ .datasetPredicates(source.datasetPredicates)
+ .fileQuery(source.fileQuery);
+ }
+ }
+
+ // Getters
+
+ /**
+ * Returns the dataset metadata predicates that control which aspects of the dataset
+ * are included in the export.
+ *
+ * @return an unmodifiable set of {@link DatasetMetadataPredicates}; never {@code null}
+ */
+ public Set getDatasetPredicates() {
+ return datasetPredicates;
+ }
+
+ /**
+ * Returns the optional {@link FileExportQuery} that controls how file metadata
+ * nested within this dataset export should be shaped.
+ *
+ * An empty {@link Optional} means no explicit file query was specified; methods
+ * that include file metadata will apply their own defaults in that case.
+ *
+ * @return an {@link Optional} containing the file export query, or empty if not set
+ */
+ public Optional getFileQuery() {
+ return Optional.ofNullable(fileQuery);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ DatasetExportQuery that = (DatasetExportQuery) o;
+ return datasetPredicates.equals(that.datasetPredicates)
+ && Objects.equals(fileQuery, that.fileQuery);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(datasetPredicates, fileQuery);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("DatasetExportQuery{");
+
+ if (!datasetPredicates.isEmpty()) {
+ sb.append("datasetPredicates=").append(datasetPredicates).append(", ");
+ }
+ if (fileQuery != null) {
+ sb.append("fileQuery=").append(fileQuery);
+ } else {
+ sb.append("fileQuery=");
+ }
+ sb.append("}");
+ return sb.toString();
+ }
+}
diff --git a/export/src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java b/export/src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java
new file mode 100644
index 0000000..74456e4
--- /dev/null
+++ b/export/src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java
@@ -0,0 +1,76 @@
+package io.gdcc.spi.export;
+
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Predicates for controlling which dataset metadata is included in an export
+ * and at what level of detail.
+ *
+ * These predicates are used in a {@link DatasetExportQuery} to shape dataset-level
+ * retrieval. They are exclusively concerned with dataset-level concerns — file-level
+ * filtering is handled separately by {@link FileMetadataPredicates} via
+ * {@link FileExportQuery}.
+ *
+ * Predicates may conflict with each other; use {@link #checkConflicts(Set)} to
+ * validate a combination before use.
+ *
+ * @see DatasetExportQuery
+ * @see FileMetadataPredicates
+ */
+public enum DatasetMetadataPredicates {
+ // NOTE: We can only define backward conflicts, as forward conflicts would lead
+ // to circular dependencies disallowed by the Java compiler.
+
+ // Placeholder — dataset-level predicates to be added here as requirements emerge.
+ // Examples of future candidates:
+ // PUBLISHED_DATASETS_ONLY — restrict to published versions
+ // DRAFT_INCLUDED — include draft versions
+ // METADATA_BLOCKS_ONLY — exclude file metadata entirely
+ ;
+
+ final Set conflicts;
+
+ DatasetMetadataPredicates(DatasetMetadataPredicates... predicates) {
+ this.conflicts = Set.of(predicates);
+ }
+
+ /**
+ * Returns {@code true} if this predicate conflicts with the given predicate.
+ *
+ * @param p the predicate to check against; {@code null} is safe and returns {@code false}
+ * @return {@code true} if a conflict exists, {@code false} otherwise
+ */
+ public boolean conflictsWith(DatasetMetadataPredicates p) {
+ if (p == null) {
+ return false;
+ }
+ return conflicts.contains(p);
+ }
+
+ /**
+ * Checks for conflicts among the given set of dataset metadata predicates.
+ * A predicate is considered conflicting if it has a conflict relationship with
+ * any other predicate in the set.
+ *
+ * @param predicates the set of predicates to check for conflicts
+ * @return an unmodifiable set of predicates from the input that conflict with at
+ * least one other predicate; empty if no conflicts exist
+ */
+ @SuppressWarnings("java:S2259")
+ public static Set checkConflicts(Set predicates) {
+ Set foundConflicts = new HashSet<>();
+
+ for (DatasetMetadataPredicates predicate : predicates) {
+ for (DatasetMetadataPredicates compare : predicates) {
+ if (predicate.conflictsWith(compare) || compare.conflictsWith(predicate)) {
+ foundConflicts.add(predicate);
+ foundConflicts.add(compare);
+ }
+ }
+ }
+
+ return Collections.unmodifiableSet(foundConflicts);
+ }
+}
\ No newline at end of file
diff --git a/export/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/export/src/main/java/io/gdcc/spi/export/ExportDataProvider.java
new file mode 100644
index 0000000..aef1293
--- /dev/null
+++ b/export/src/main/java/io/gdcc/spi/export/ExportDataProvider.java
@@ -0,0 +1,275 @@
+
+package io.gdcc.spi.export;
+
+import io.gdcc.spi.meta.plugin.CoreProvider;
+import jakarta.json.JsonArray;
+import jakarta.json.JsonObject;
+import org.w3c.dom.Document;
+
+import java.io.InputStream;
+import java.util.Optional;
+import java.util.stream.Stream;
+
+/**
+ * Provides dataset metadata that can be used by an {@link Exporter} to create
+ * new metadata export formats.
+ *
+ * This interface offers multiple methods for retrieving dataset metadata in various
+ * formats and levels of detail. Exporters should choose the method that best fits
+ * their needs, considering the completeness of metadata and performance implications.
+ *
+ *
Implementation Guide
+ * Implementers must override the context-accepting versions of all data retrieval
+ * methods. No-argument convenience methods are provided as default implementations
+ * for backward compatibility but are deprecated and will be removed in a future version.
+ *
+ * Context Handling
+ * Implementations should respect context options where applicable.
+ * Not all methods support all context options - see individual method documentation for details.
+ * All methods require a non-null {@link DatasetExportQuery} or {@link FileExportQuery}.
+ * Passing null will result in a {@link NullPointerException}.
+ * Callers should use {@link DatasetExportQuery#defaults()} respectivelly {@link FileExportQuery#defaults()} instead of passing null.
+ *
+ * @see Exporter
+ * @see DatasetExportQuery
+ * @see FileExportQuery
+ */
+public interface ExportDataProvider extends CoreProvider {
+
+ int API_LEVEL = 2;
+
+ /**
+ * Returns complete dataset metadata in Dataverse's standard JSON format.
+ *
+ * This format includes comprehensive dataset-level metadata along with basic
+ * metadata for each file in the dataset. It is the same JSON format used in
+ * the Dataverse API and available as a metadata export option in the UI.
+ *
+ * @param query specification for data retrieval
+ * @return dataset metadata in Dataverse JSON format
+ * @throws ExportException if metadata retrieval fails
+ * @throws NullPointerException if the query is null
+ * @since 2.1.0
+ * @apiNote While no formal JSON schema exists for this format, it is well-documented
+ * in the Dataverse guides. Along with OAI_ORE, this is one of only two export
+ * formats that provide complete dataset and file metadata.
+ * @implNote Implementations must respect the {@code datasetMetadataOnly} flag.
+ * When true, file-level metadata should be excluded to optimize performance
+ * for datasets with large numbers of files. Other context options
+ * (publicFilesOnly, offset, length) do not apply and should be ignored.
+ */
+ JsonObject getDatasetJson(DatasetExportQuery query);
+
+ /**
+ * Returns complete dataset metadata using default options.
+ *
+ * @return dataset metadata in Dataverse JSON format
+ * @throws ExportException if metadata retrieval fails
+ * @since 1.0.0
+ * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetJson(DatasetExportQuery)} instead.
+ */
+ @Deprecated(since = "2.1.0", forRemoval = true)
+ default JsonObject getDatasetJson() {
+ return getDatasetJson(DatasetExportQuery.defaults());
+ }
+
+ /**
+ * Returns dataset metadata in JSON-LD-based OAI-ORE format.
+ *
+ * OAI-ORE (Open Archives Initiative Object Reuse and Exchange) provides a structured way to describe
+ * aggregations of web resources. This format is used in Dataverse's archival bag export mechanism
+ * and available via UI and API.
+ *
+ * @param query specification for data retrieval
+ * @return dataset metadata in OAI-ORE format
+ * @throws ExportException if metadata retrieval fails
+ * @throws NullPointerException if the query is null
+ * @since 2.1.0
+ * @apiNote Along with the standard JSON format, this is one of only two export
+ * formats that provide complete dataset-level metadata along with basic
+ * file metadata for each file in the dataset.
+ * @implNote Implementations must respect the {@code datasetMetadataOnly} flag.
+ * Other context options do not apply and should be ignored.
+ */
+ JsonObject getDatasetORE(DatasetExportQuery query);
+
+ /**
+ * Returns dataset metadata in OAI-ORE format using default options.
+ *
+ * @return dataset metadata in OAI-ORE format
+ * @throws ExportException if metadata retrieval fails
+ * @since 1.0.0
+ * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetORE(DatasetExportQuery)} instead.
+ */
+ @Deprecated(since = "2.1.0", forRemoval = true)
+ default JsonObject getDatasetORE() {
+ return getDatasetORE(DatasetExportQuery.defaults());
+ }
+
+ /**
+ * Returns detailed metadata for files in the dataset.
+ *
+ * For tabular files that have been successfully ingested, this may include
+ * DDI-centric metadata extracted during the ingest process. This detailed
+ * metadata is not available through other methods in this interface.
+ *
+ * The query may specify filters to skip certain files or how much metadata details should be included.
+ * The resulting stream will contain a limited number of elements only, specified by a {@code PageRequest},
+ * avoiding huge memory allocations in the provider.
+ *
+ *
+ * @param query specification for file data retrieval
+ * @param request the page request containing pagination information such as page offset and page size
+ * @return JSON array with one entry per dataset file (both tabular and non-tabular)
+ * @throws ExportException if metadata retrieval fails
+ * @throws NullPointerException if the query or request is null
+ * @since 2.1.0
+ * @apiNote No formal JSON schema is available for this output. The format is not
+ * extensively documented; implementers may wish to examine the DDIExporter
+ * and JSONPrinter classes in the Dataverse codebase for usage examples.
+ */
+ Stream getDatasetFileDetails(FileExportQuery query, PageRequest request);
+
+ /**
+ * Returns detailed metadata for files in the dataset.
+ *
+ * For tabular files that have been successfully ingested, this may include
+ * DDI-centric metadata extracted during the ingest process. This detailed
+ * metadata is not available through other methods in this interface.
+ *
+ * The query may specify filters to skip certain files or how much metadata details should be included.
+ * The resulting stream will contain all matching files for consumption.
+ * In cases with large metadata quantities use {@link #getDatasetFileDetails(FileExportQuery,PageRequest)}
+ * for a stream containing a limited number of elements only, avoiding huge memory allocations in the provider.
+ *
+ *
+ * @param query specification for file data retrieval
+ * @return JSON array with one entry per dataset file (both tabular and non-tabular)
+ * @throws ExportException if metadata retrieval fails
+ * @throws NullPointerException if the query is null
+ * @since 2.1.0
+ * @apiNote No formal JSON schema is available for this output. The format is not
+ * extensively documented; implementers may wish to examine the DDIExporter
+ * and JSONPrinter classes in the Dataverse codebase for usage examples.
+ */
+ Stream getDatasetFileDetails(FileExportQuery query);
+
+ /**
+ * Returns detailed metadata for all files using default options.
+ *
+ * Note that this method will serialize all file metadata into one large JSON array.
+ * This can be memory-intensive for large datasets and should be used judiciously.
+ * There have been reports of unexportable large datasets in production installations.
+ * Using {@link #getDatasetFileDetails(FileExportQuery)} instead is advised.
+ *
+ *
+ * @return JSON array with one JSON object entry per dataset file
+ * @throws ExportException if metadata retrieval fails
+ * @since 1.0.0
+ * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetFileDetails(FileExportQuery)}
+ * or {@link #getDatasetFileDetails(FileExportQuery, PageRequest)}instead.
+ */
+ @Deprecated(since = "2.1.0", forRemoval = true)
+ JsonArray getDatasetFileDetails();
+
+ /**
+ * Returns dataset metadata conforming to the schema.org standard.
+ *
+ * This metadata subset is used in dataset page headers to improve discoverability by search engines.
+ * It provides structured data markup (JSON-LD) following the schema.org vocabulary.
+ *
+ * @param query specification for data retrieval
+ * @return dataset metadata in schema.org format
+ * @throws ExportException if metadata retrieval fails
+ * @throws NullPointerException if the query is null
+ * @since 2.1.0
+ * @apiNote This metadata export is not complete. It should only be used as a starting
+ * point for an Exporter if it simplifies implementation compared to using
+ * the complete JSON or OAI_ORE exports.
+ * @implNote All context options are ignored by this method.
+ */
+ JsonObject getDatasetSchemaDotOrg(DatasetExportQuery query);
+
+ /**
+ * Returns dataset metadata in schema.org format using default options.
+ *
+ * @return dataset metadata in schema.org format
+ * @throws ExportException if metadata retrieval fails
+ * @since 1.0.0
+ * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetSchemaDotOrg(DatasetExportQuery)} instead.
+ */
+ @Deprecated(since = "2.1.0", forRemoval = true)
+ default JsonObject getDatasetSchemaDotOrg() {
+ return getDatasetSchemaDotOrg(DatasetExportQuery.defaults());
+ }
+
+ /**
+ * Returns dataset metadata conforming to the DataCite standard as XML.
+ *
+ * This is the same metadata format sent to DataCite when DataCite DOIs are used.
+ * It provides citation metadata following the DataCite Metadata Schema.
+ *
+ * Note: the returned XML document can easily be queried using XPath and other techniques
+ *
+ *
+ * @param query specification for data retrieval
+ * @return dataset metadata as DataCite XML string
+ * @throws ExportException if metadata retrieval fails
+ * @throws NullPointerException if the query is null
+ * @since 2.1.0
+ * @apiNote This metadata export is not complete. It should only be used as a starting
+ * point for an Exporter if it simplifies implementation compared to using
+ * the complete JSON or OAI_ORE exports.
+ * @implNote All context options are ignored by this method.
+ */
+ Document getDataCiteXml(DatasetExportQuery query);
+
+ /**
+ * Returns dataset metadata in DataCite XML format using default options.
+ *
+ * @return dataset metadata as DataCite XML string
+ * @throws ExportException if metadata retrieval fails
+ * @since 1.0.0
+ * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDataCiteXml(DatasetExportQuery)} instead.
+ */
+ @Deprecated(since = "2.1.0", forRemoval = true)
+ String getDataCiteXml();
+
+ /**
+ * Returns metadata in the format specified by an Exporter's prerequisite.
+ *
+ * Some Exporters transform metadata from one standard format to another (e.g.,
+ * DDI XML to DDI HTML). Such Exporters declare a prerequisite format via
+ * {@link Exporter#getPrerequisiteFormatName()}, and this method provides access
+ * to that prerequisite metadata.
+ *
+ * @param query specifcation passed to the prerequisite exporter
+ * @return metadata in the prerequisite format, or empty if no prerequisite is configured
+ * @throws ExportException if metadata retrieval fails
+ * @throws NullPointerException if the query is null
+ * @since 2.1.0
+ * @apiNote This is useful for creating alternate representations of the same metadata
+ * (e.g., XML, HTML, PDF versions of a standard like DDI), especially when
+ * conversion libraries exist. Note that if a third-party Exporter replaces
+ * the internal exporter you depend on, this method may return unexpected results.
+ * @implNote The default implementation returns empty. Override only if your provider
+ * supports prerequisite format chaining. The prerequisite exporter receives
+ * the same context as specified in this call.
+ */
+ default Optional getPrerequisiteInputStream(DatasetExportQuery query) {
+ return Optional.empty();
+ }
+
+ /**
+ * Returns metadata in the prerequisite format using default options.
+ *
+ * @return metadata in the prerequisite format, or empty if no prerequisite is configured
+ * @throws ExportException if metadata retrieval fails
+ * @since 1.0.0
+ * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getPrerequisiteInputStream(DatasetExportQuery)} instead.
+ */
+ @Deprecated(since = "2.1.0", forRemoval = true)
+ default Optional getPrerequisiteInputStream() {
+ return getPrerequisiteInputStream(DatasetExportQuery.defaults());
+ }
+}
diff --git a/src/main/java/io/gdcc/spi/export/ExportException.java b/export/src/main/java/io/gdcc/spi/export/ExportException.java
similarity index 72%
rename from src/main/java/io/gdcc/spi/export/ExportException.java
rename to export/src/main/java/io/gdcc/spi/export/ExportException.java
index c816a60..57a6a1b 100644
--- a/src/main/java/io/gdcc/spi/export/ExportException.java
+++ b/export/src/main/java/io/gdcc/spi/export/ExportException.java
@@ -1,8 +1,6 @@
package io.gdcc.spi.export;
-import java.io.IOException;
-
-public class ExportException extends IOException {
+public class ExportException extends RuntimeException {
public ExportException(String message) {
super(message);
}
diff --git a/src/main/java/io/gdcc/spi/export/Exporter.java b/export/src/main/java/io/gdcc/spi/export/Exporter.java
similarity index 92%
rename from src/main/java/io/gdcc/spi/export/Exporter.java
rename to export/src/main/java/io/gdcc/spi/export/Exporter.java
index 7132e74..fc439a9 100644
--- a/src/main/java/io/gdcc/spi/export/Exporter.java
+++ b/export/src/main/java/io/gdcc/spi/export/Exporter.java
@@ -1,5 +1,9 @@
package io.gdcc.spi.export;
+import io.gdcc.spi.meta.annotations.PluginContract;
+import io.gdcc.spi.meta.annotations.RequiredProvider;
+import io.gdcc.spi.meta.plugin.Plugin;
+
import java.io.OutputStream;
import java.util.Locale;
import java.util.Optional;
@@ -10,8 +14,17 @@
* deploying new classes that implement this Exporter interface.
*/
-public interface Exporter {
-
+@PluginContract(
+ role = PluginContract.Role.BASE,
+ providers = @RequiredProvider(ExportDataProvider.class)
+)
+public interface Exporter extends Plugin {
+
+ int API_LEVEL = 2;
+
+ default String identity() {
+ return this.getFormatName();
+ }
/**
* When this method is called, the Exporter should write the metadata to the given OutputStream.
diff --git a/export/src/main/java/io/gdcc/spi/export/FileExportQuery.java b/export/src/main/java/io/gdcc/spi/export/FileExportQuery.java
new file mode 100644
index 0000000..5ebcacd
--- /dev/null
+++ b/export/src/main/java/io/gdcc/spi/export/FileExportQuery.java
@@ -0,0 +1,162 @@
+package io.gdcc.spi.export;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Objects;
+import java.util.Set;
+
+import static io.gdcc.spi.export.FileMetadataPredicates.*;
+
+/**
+ * Defines what file metadata to retrieve and at what level of detail
+ * for file-oriented export operations.
+ *
+ * This is a pure data-shape specification: it answers which files should be included
+ * and how much detail about them should be fetched. It deliberately does not address
+ * how much data to retrieve per call — pagination is a separate,
+ * orthogonal concern expressed via a {@code PageRequest} at the method level.
+ *
+ * A {@code FileExportQuery} may be used standalone in file-centric export methods,
+ * or composed inside a {@code DatasetExportQuery} to specify how file metadata
+ * should be shaped within a dataset export.
+ *
+ * Instances are immutable and must be constructed via {@link #builder()}.
+ * Use {@link #defaults()} for the standard all-files query with no special filtering.
+ *
+ * @see FileMetadataPredicates
+ */
+public final class FileExportQuery {
+
+ private final Set filePredicates;
+
+ /**
+ * Default query with no special options.
+ */
+ private static final FileExportQuery DEFAULT = builder().addFilePredicate(ALL_FILES).build();
+
+ private FileExportQuery(Builder builder) {
+ this.filePredicates = builder.filePredicates;
+ }
+
+ /**
+ * Returns a builder for creating new queries.
+ */
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ /**
+ * Returns a default query, which includes all files without filtering or detail restrictions.
+ */
+ public static FileExportQuery defaults() {
+ return DEFAULT;
+ }
+
+ /**
+ * Builder for {@link FileExportQuery}.
+ *
+ * Obtain an instance via {@link FileExportQuery#builder()} or
+ * {@link Builder#from(FileExportQuery)} to derive a new query from an existing one.
+ */
+ public static class Builder {
+ private final Set filePredicates = new HashSet<>();
+
+ private Builder() {
+ // Hiding constructor to enforce use of the static factory method
+ }
+
+ /**
+ * Sets the file metadata predicates, replacing any previously set predicates.
+ *
+ * @param predicates the file metadata predicates to set
+ * @return this builder instance
+ */
+ public Builder filePredicates(FileMetadataPredicates... predicates) {
+ this.filePredicates.clear();
+ this.filePredicates.addAll(Set.of(predicates));
+ return this;
+ }
+
+ /**
+ * Sets the file metadata predicates, replacing any previously set predicates.
+ *
+ * @param predicates the file metadata predicates to set
+ * @return this builder instance
+ */
+ public Builder filePredicates(Collection predicates) {
+ this.filePredicates.clear();
+ this.filePredicates.addAll(predicates);
+ return this;
+ }
+
+ /**
+ * Adds a file metadata predicate to the builder's collection of predicates.
+ *
+ * @param predicate the file metadata predicate to add
+ * @return this builder instance
+ */
+ public Builder addFilePredicate(FileMetadataPredicates predicate) {
+ this.filePredicates.add(predicate);
+ return this;
+ }
+
+ /**
+ * Builds an immutable {@link FileExportQuery}.
+ *
+ * @return validated context
+ * @throws IllegalArgumentException if validation fails
+ */
+ public FileExportQuery build() {
+ return new FileExportQuery(this);
+ }
+
+ /**
+ * Copies the properties from the given {@link FileExportQuery} instance into a new {@code Builder}.
+ *
+ * @param source the {@code FileExportQuery} instance from which to copy properties
+ * @return a new {@code Builder} instance with properties copied from the provided query
+ */
+ public Builder from(FileExportQuery source) {
+ return new Builder()
+ .filePredicates(source.filePredicates);
+ }
+ }
+
+ // Getters
+
+ /**
+ * Returns the file metadata predicates that control which files are included
+ * and what level of detail is fetched for each.
+ *
+ * @return an unmodifiable set of {@link FileMetadataPredicates}; never {@code null}
+ */
+ public Set getFilePredicates() {
+ return Collections.unmodifiableSet(filePredicates);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ FileExportQuery that = (FileExportQuery) o;
+ return filePredicates.equals(that.filePredicates);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(filePredicates);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("FileExportQuery{");
+
+ if (!filePredicates.isEmpty()) {
+ sb.append("filePredicates=").append(filePredicates);
+ }
+
+ sb.append("}");
+ return sb.toString();
+ }
+}
diff --git a/export/src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java b/export/src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java
new file mode 100644
index 0000000..81f6a65
--- /dev/null
+++ b/export/src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java
@@ -0,0 +1,84 @@
+package io.gdcc.spi.export;
+
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+
+/**
+ * Enum representing predicates for filtering file metadata during export operations.
+ * Each predicate defines criteria for including or excluding specific types of files.
+ * Conflicts between predicates are explicitly defined to prevent ambiguous or contradictory filtering rules.
+ * Any predicates should follow the pattern (SKIP|ONLY|INCLUDE)_[ADJECTIVE]_[OBJECT].
+ */
+public enum FileMetadataPredicates {
+ // NOTE: We can only define backward conflicts, as forward conflicts would lead
+ // to circular dependencies disallowed by the Java compiler.
+
+ /**
+ * Includes metadata for all files without restriction.
+ * Conflicts with any other predicate selecting files.
+ */
+ ALL_FILES(),
+ /**
+ * Excludes metadata for all files.
+ * Conflicts with any other file selecting predicate.
+ */
+ SKIP_FILES(ALL_FILES),
+ /**
+ * Only include files with public visibility.
+ * Conflicts with {@link #ALL_FILES} and {@link #SKIP_FILES}.
+ */
+ ONLY_PUBLIC_FILES(ALL_FILES, SKIP_FILES),
+ /**
+ * Only include tabular data files.
+ * Conflicts with {@link #ALL_FILES} and {@link #SKIP_FILES}.
+ */
+ ONLY_TABULAR_FILES(ALL_FILES, SKIP_FILES),
+ /**
+ * For tabular data files, control if variable details are included or not.
+ * (That can be huge and heterogeneous data with slow DB queries!)
+ * It has no conflicting predicates, as it is about detail inclusion, not file selection.
+ */
+ INCLUDE_TABULAR_DATA_VARIABLES()
+ ;
+
+ final Set conflicts;
+
+ FileMetadataPredicates(FileMetadataPredicates... predicates) {
+ this.conflicts = Set.of(predicates);
+ }
+
+ public boolean conflictsWith(FileMetadataPredicates p) {
+ if (p == null) {
+ return false;
+ }
+ return conflicts.contains(p);
+ }
+
+ /**
+ * Checks for conflicts among the given set of export file predicates.
+ * A predicate is considered conflicting if it has a conflict relationship with
+ * any other predicate defined in the {@link FileMetadataPredicates} enum.
+ *
+ * @param predicates the set of predicates to check for conflicts
+ * @return an unmodifiable set of predicates from the input that conflict with at least one other predicate (empty if no conflict)
+ */
+ @SuppressWarnings("java:S2259")
+ public static Set checkConflicts(Set predicates) {
+ Set foundConflicts = new HashSet<>();
+
+ // Iterate via O(n^2) through all predicates to check any existing predicate for a conflict.
+ // This way, a forward check is enough, as we iterate through the cartesian product.
+ for (FileMetadataPredicates predicate : predicates) {
+ for (FileMetadataPredicates compare : predicates) {
+ if (predicate.conflictsWith(compare) || compare.conflictsWith(predicate)) {
+ foundConflicts.add(predicate);
+ foundConflicts.add(compare);
+ }
+ }
+ }
+
+ return Collections.unmodifiableSet(foundConflicts);
+ }
+
+}
diff --git a/export/src/main/java/io/gdcc/spi/export/PageRequest.java b/export/src/main/java/io/gdcc/spi/export/PageRequest.java
new file mode 100644
index 0000000..335e8ed
--- /dev/null
+++ b/export/src/main/java/io/gdcc/spi/export/PageRequest.java
@@ -0,0 +1,67 @@
+package io.gdcc.spi.export;
+
+import java.util.Objects;
+
+/**
+ * Defines pagination parameters for data retrieval methods that return
+ * potentially large collections of results.
+ *
+ * Use {@link #unpaged()} for requests that should return all results in a single batch.
+ */
+public final class PageRequest {
+
+ private static final PageRequest UNPAGED = new PageRequest(0, Integer.MAX_VALUE);
+
+ private final int offset;
+ private final int limit;
+
+ private PageRequest(int offset, int limit) {
+ if (offset < 0) throw new IllegalArgumentException("Offset must be >= 0, was: " + offset);
+ if (limit < 1) throw new IllegalArgumentException("Limit must be >= 1, was: " + limit);
+ this.offset = offset;
+ this.limit = limit;
+ }
+
+ /**
+ * Creates a page request with the given offset and limit.
+ *
+ * @param offset zero-based index of the first result to return
+ * @param limit maximum number of results to return
+ * @return a new PageRequest
+ */
+ public static PageRequest of(int offset, int limit) {
+ return new PageRequest(offset, limit);
+ }
+
+ /**
+ * Returns a request for all results (no pagination).
+ */
+ public static PageRequest unpaged() {
+ return UNPAGED;
+ }
+
+ public int getOffset() { return offset; }
+ public int getLimit() { return limit; }
+
+ public boolean isPaged() { return !this.equals(UNPAGED); }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+ PageRequest that = (PageRequest) o;
+ return offset == that.offset && limit == that.limit;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(offset, limit);
+ }
+
+ @Override
+ public String toString() {
+ return isPaged()
+ ? "PageRequest{offset=" + offset + ", limit=" + limit + "}"
+ : "PageRequest{unpaged}";
+ }
+}
diff --git a/src/main/java/io/gdcc/spi/export/XMLExporter.java b/export/src/main/java/io/gdcc/spi/export/XMLExporter.java
similarity index 82%
rename from src/main/java/io/gdcc/spi/export/XMLExporter.java
rename to export/src/main/java/io/gdcc/spi/export/XMLExporter.java
index 3c3fa35..08b7cc9 100644
--- a/src/main/java/io/gdcc/spi/export/XMLExporter.java
+++ b/export/src/main/java/io/gdcc/spi/export/XMLExporter.java
@@ -1,12 +1,19 @@
package io.gdcc.spi.export;
+import io.gdcc.spi.meta.annotations.PluginContract;
import jakarta.ws.rs.core.MediaType;
/**
* XML Exporter is an extension of the base Exporter interface that adds the
* additional methods needed for generating XML metadata export formats.
*/
+@PluginContract(
+ role = PluginContract.Role.CAPABILITY,
+ requires = Exporter.class
+)
public interface XMLExporter extends Exporter {
+
+ int API_LEVEL = 2;
/**
* @implNote for the ddi exporter, this method returns "ddi:codebook:2_5"
@@ -31,7 +38,8 @@ public interface XMLExporter extends Exporter {
/**
* @return - should always be MediaType.APPLICATION_XML
*/
- public default String getMediaType() {
+ @Override
+ default String getMediaType() {
return MediaType.APPLICATION_XML;
- };
+ }
}
diff --git a/export/src/test/java/io/gdcc/spi/export/ExporterImplTest.java b/export/src/test/java/io/gdcc/spi/export/ExporterImplTest.java
new file mode 100644
index 0000000..bc0c4fd
--- /dev/null
+++ b/export/src/test/java/io/gdcc/spi/export/ExporterImplTest.java
@@ -0,0 +1,105 @@
+
+package io.gdcc.spi.export;
+
+import io.gdcc.spi.export.fixtures.StubDdiExporter;
+import io.gdcc.spi.export.fixtures.StubJsonExporter;
+import io.gdcc.spi.meta.descriptor.Descriptor;
+import io.gdcc.spi.meta.descriptor.DescriptorFormat;
+import org.junit.jupiter.api.Test;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+/**
+ * Verifies that the annotation processor generates correct descriptors and service files
+ * when compiling real Exporter SPI implementations.
+ *
+ * The test implementation classes in this package are compiled with the processor on the
+ * classpath. The processor writes descriptors and service files into {@code target/test-classes/},
+ * which this test reads at runtime to verify correctness.
+ */
+class ExporterImplTest {
+
+ @Test
+ void generatesDescriptorAndServiceFileForBaseExporterImplementation() throws IOException {
+ Class> implClass = StubJsonExporter.class;
+
+ Descriptor descriptor = readDescriptor(implClass);
+ assertNotNull(descriptor, "Descriptor should be generated for " + implClass);
+
+ assertEquals(DescriptorFormat.transformClassName(implClass), descriptor.klass());
+ assertEquals(Exporter.class.getCanonicalName(), descriptor.kind());
+ assertEquals(Exporter.API_LEVEL, descriptor.contractLevel(Exporter.class.getCanonicalName()));
+ assertEquals(ExportDataProvider.API_LEVEL, descriptor.requiredProviderLevel(ExportDataProvider.class.getCanonicalName()));
+
+ String serviceFile = readServiceFile(Exporter.class);
+ assertNotNull(serviceFile, "Service file should be generated for Exporter");
+ assertTrue(serviceFile.contains(DescriptorFormat.transformClassName(implClass)), "Service file should contain " + implClass);
+ }
+
+ @Test
+ void generatesDescriptorWithBaseAndCapabilityForXmlExporterImplementation() throws IOException {
+ Class> implClass = StubDdiExporter.class;
+
+ Descriptor descriptor = readDescriptor(implClass);
+ assertNotNull(descriptor, "Descriptor should be generated for " + implClass);
+
+ assertEquals(DescriptorFormat.transformClassName(implClass), descriptor.klass());
+ assertEquals(Exporter.class.getCanonicalName(), descriptor.kind());
+ assertEquals(Exporter.API_LEVEL, descriptor.contractLevel(Exporter.class.getCanonicalName()));
+ assertEquals(XMLExporter.API_LEVEL, descriptor.contractLevel(XMLExporter.class.getCanonicalName()));
+ assertEquals(ExportDataProvider.API_LEVEL, descriptor.requiredProviderLevel(ExportDataProvider.class.getCanonicalName()));
+
+ String serviceFile = readServiceFile(Exporter.class);
+ assertNotNull(serviceFile, "Service file should be generated for Exporter");
+ assertTrue(serviceFile.contains(DescriptorFormat.transformClassName(implClass)), "Service file should contain " + implClass);
+ }
+
+ @Test
+ void doesNotGenerateServiceFileForXmlExporterCapability() {
+ String serviceFile = readServiceFile(XMLExporter.class);
+ assertNull(serviceFile, "Service file must never be generated for capability contract XMLExporter");
+ }
+
+ @Test
+ void xmlExporterDefaultMediaTypeSatisfiesBaseContract() throws IOException {
+ // StubDdiExporter implements XMLExporter (which extends Exporter) and does NOT
+ // override getMediaType(). Because XMLExporter extends Exporter in the Java type
+ // hierarchy, the default on XMLExporter satisfies the abstract declaration on Exporter.
+ // If this were not the case, compilation would have failed and no descriptor would exist.
+ Class> implClass = StubDdiExporter.class;
+
+ Descriptor descriptor = readDescriptor(implClass);
+ assertNotNull(descriptor, "Descriptor should exist, proving compilation succeeded without explicit getMediaType() override");
+ }
+
+ // ── Helpers ─────────────────────────────────────────────────────────────────
+
+ private Descriptor readDescriptor(Class implClass) throws IOException {
+ String resourcePath = DescriptorFormat.toPath(implClass);
+ try (InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath)) {
+ if (is == null) {
+ return null;
+ }
+ return DescriptorFormat.read(new String(is.readAllBytes(), StandardCharsets.UTF_8));
+ }
+ }
+
+ private String readServiceFile(Class> serviceType) {
+ String resourcePath = "META-INF/services/" + serviceType.getName();
+ try (InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath)) {
+ if (is == null) {
+ return null;
+ }
+ return new String(is.readAllBytes(), StandardCharsets.UTF_8);
+ } catch (IOException e) {
+ return null;
+ }
+ }
+}
\ No newline at end of file
diff --git a/export/src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java b/export/src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java
new file mode 100644
index 0000000..4157b95
--- /dev/null
+++ b/export/src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java
@@ -0,0 +1,32 @@
+package io.gdcc.spi.export;
+
+import org.junit.jupiter.api.Test;
+
+import java.util.Set;
+
+import static io.gdcc.spi.export.FileMetadataPredicates.*;
+import static org.junit.jupiter.api.Assertions.*;
+
+class FileMetadataPredicatesTest {
+
+ @Test
+ void checkForwardAndBackwardConflicts_All_None() {
+ Set predicates = Set.of(ALL_FILES, SKIP_FILES);
+ Set conflicts = FileMetadataPredicates.checkConflicts(predicates);
+ assertEquals(2, conflicts.size(), conflicts::toString);
+ }
+
+ @Test
+ void checkForwardAndBackwardConflicts_All_SthElse() {
+ Set predicates = Set.of(ALL_FILES, ONLY_PUBLIC_FILES);
+ Set conflicts = FileMetadataPredicates.checkConflicts(predicates);
+ assertEquals(2, conflicts.size(), conflicts::toString);
+ }
+
+ @Test
+ void checkNoConflicts_Public_Tabular() {
+ Set predicates = Set.of(ONLY_PUBLIC_FILES, ONLY_TABULAR_FILES);
+ Set conflicts = FileMetadataPredicates.checkConflicts(predicates);
+ assertTrue(conflicts.isEmpty(), conflicts::toString);
+ }
+}
\ No newline at end of file
diff --git a/export/src/test/java/io/gdcc/spi/export/PageRequestTest.java b/export/src/test/java/io/gdcc/spi/export/PageRequestTest.java
new file mode 100644
index 0000000..917625d
--- /dev/null
+++ b/export/src/test/java/io/gdcc/spi/export/PageRequestTest.java
@@ -0,0 +1,64 @@
+package io.gdcc.spi.export;
+
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+class PageRequestTest {
+
+ /**
+ * Tests for the `of` method in the `PageRequest` class.
+ * The `of` method is responsible for creating a new `PageRequest` object with the given offset and limit values.
+ * This test class ensures various scenarios for valid and invalid inputs are handled correctly.
+ */
+
+ @Test
+ void testOf_createsValidPageRequest() {
+ // Arrange & Act
+ PageRequest pageRequest = PageRequest.of(10, 20);
+
+ // Assert
+ assertEquals(10, pageRequest.getOffset());
+ assertEquals(20, pageRequest.getLimit());
+ assertTrue(pageRequest.isPaged());
+ }
+
+ @Test
+ void testOf_withZeroOffsetAndValidLimit() {
+ // Arrange & Act
+ PageRequest pageRequest = PageRequest.of(0, 5);
+
+ // Assert
+ assertEquals(0, pageRequest.getOffset());
+ assertEquals(5, pageRequest.getLimit());
+ assertTrue(pageRequest.isPaged());
+ }
+
+ @Test
+ void testOf_throwsExceptionForNegativeOffset() {
+ // Arrange & Act & Assert
+ IllegalArgumentException exception = assertThrows(
+ IllegalArgumentException.class,
+ () -> PageRequest.of(-1, 10)
+ );
+ assertEquals("Offset must be >= 0, was: -1", exception.getMessage());
+ }
+
+ @Test
+ void testOf_throwsExceptionForZeroOrNegativeLimit() {
+ // Arrange & Act & Assert
+ IllegalArgumentException exception1 = assertThrows(
+ IllegalArgumentException.class,
+ () -> PageRequest.of(5, 0)
+ );
+ assertEquals("Limit must be >= 1, was: 0", exception1.getMessage());
+
+ IllegalArgumentException exception2 = assertThrows(
+ IllegalArgumentException.class,
+ () -> PageRequest.of(5, -1)
+ );
+ assertEquals("Limit must be >= 1, was: -1", exception2.getMessage());
+ }
+}
\ No newline at end of file
diff --git a/export/src/test/java/io/gdcc/spi/export/fixtures/StubDdiExporter.java b/export/src/test/java/io/gdcc/spi/export/fixtures/StubDdiExporter.java
new file mode 100644
index 0000000..973a091
--- /dev/null
+++ b/export/src/test/java/io/gdcc/spi/export/fixtures/StubDdiExporter.java
@@ -0,0 +1,57 @@
+package io.gdcc.spi.export.fixtures;
+
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.XMLExporter;
+import io.gdcc.spi.meta.annotations.DataversePlugin;
+
+import java.io.OutputStream;
+import java.util.Locale;
+
+/**
+ * Minimal XMLExporter implementation for processor integration testing.
+ *
+ * Does NOT override {@code getMediaType()} — the default from {@link XMLExporter}
+ * satisfies the abstract declaration on {@link io.gdcc.spi.export.Exporter} because XMLExporter extends Exporter.
+ */
+@DataversePlugin
+public class StubDdiExporter implements XMLExporter {
+ @Override
+ public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) {
+ /* Intentionally left blank for test class */
+ }
+
+ @Override
+ public String getFormatName() {
+ return "stub-ddi";
+ }
+
+ @Override
+ public String getDisplayName(Locale locale) {
+ return "Stub DDI";
+ }
+
+ @Override
+ public Boolean isHarvestable() {
+ return true;
+ }
+
+ @Override
+ public Boolean isAvailableToUsers() {
+ return true;
+ }
+
+ @Override
+ public String getXMLNameSpace() {
+ return "ddi:codebook:2_5";
+ }
+
+ @Override
+ public String getXMLSchemaLocation() {
+ return "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd";
+ }
+
+ @Override
+ public String getXMLSchemaVersion() {
+ return "2.5";
+ }
+}
diff --git a/export/src/test/java/io/gdcc/spi/export/fixtures/StubJsonExporter.java b/export/src/test/java/io/gdcc/spi/export/fixtures/StubJsonExporter.java
new file mode 100644
index 0000000..b273c39
--- /dev/null
+++ b/export/src/test/java/io/gdcc/spi/export/fixtures/StubJsonExporter.java
@@ -0,0 +1,44 @@
+package io.gdcc.spi.export.fixtures;
+
+import io.gdcc.spi.export.ExportDataProvider;
+import io.gdcc.spi.export.Exporter;
+import io.gdcc.spi.meta.annotations.DataversePlugin;
+
+import java.io.OutputStream;
+import java.util.Locale;
+
+/**
+ * Minimal base-only Exporter implementation for processor integration testing.
+ */
+@DataversePlugin
+public class StubJsonExporter implements Exporter {
+ @Override
+ public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) {
+ /* Intentionally left blank for test class */
+ }
+
+ @Override
+ public String getFormatName() {
+ return "stub-json";
+ }
+
+ @Override
+ public String getDisplayName(Locale locale) {
+ return "Stub JSON";
+ }
+
+ @Override
+ public Boolean isHarvestable() {
+ return false;
+ }
+
+ @Override
+ public Boolean isAvailableToUsers() {
+ return true;
+ }
+
+ @Override
+ public String getMediaType() {
+ return "application/json";
+ }
+}
diff --git a/meta/pom.xml b/meta/pom.xml
new file mode 100644
index 0000000..c489747
--- /dev/null
+++ b/meta/pom.xml
@@ -0,0 +1,56 @@
+
+
+ 4.0.0
+
+ io.gdcc.spi
+ parent
+ 2.1.0-SNAPSHOT
+
+
+ meta
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+
+ ${jdk.version}
+ ${compilerArgument}
+
+
+
+
+ default-compile
+ compile
+
+ compile
+
+
+
+ none
+
+
+
+ default-testCompile
+ test-compile
+
+ testCompile
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java
new file mode 100644
index 0000000..59989a4
--- /dev/null
+++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java
@@ -0,0 +1,39 @@
+package io.gdcc.spi.meta.annotations;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Marks a concrete plugin implementation class for metadata generation.
+ *
+ * Plugin authors should place this annotation on every concrete plugin implementation
+ * class that is meant to be discovered and loaded by Dataverse.
+ *
+ * Annotated classes are validated by the {@link io.gdcc.spi.meta.processor.PluginContractProcessor annotation processor}
+ * and contribute generated compatibility metadata used during plugin loading.
+ *
+ * Implementation rules:
+ *
+ * - the annotated type must be a {@code public}, non-abstract class,
+ * - it must implement exactly one {@link PluginContract.Role#BASE base contract},
+ * - it may additionally implement any number of {@link PluginContract.Role#CAPABILITY capability contracts}.
+ *
+ *
+ * A capability contract is never loadable on its own. A plugin implementing a capability
+ * must also implement the capability's required base contract. The base contract is the single hook
+ * the Dataverse core uses to discover and load your plugin.
+ *
+ * @implNote Example where {@code Exporter} is a base contract and {@code FooExporter} a capability:
+ * {@code
+ * @DataversePlugin
+ * public class MyBarExporter implements Exporter, FooExporter {
+ * // Your implementation goes here...
+ * }
+ * }
+ */
+@Retention(RetentionPolicy.SOURCE)
+@Target(ElementType.TYPE)
+public @interface DataversePlugin {
+}
diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java
new file mode 100644
index 0000000..c52124e
--- /dev/null
+++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java
@@ -0,0 +1,103 @@
+package io.gdcc.spi.meta.annotations;
+
+import io.gdcc.spi.meta.plugin.Plugin;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Declares a versioned plugin contract interface.
+ *
+ * A plugin contract defines either a directly loadable plugin kind
+ * ({@link Role#BASE}) or an additional, non-loadable capability
+ * ({@link Role#CAPABILITY}).
+ *
+ * The annotated type must be an {@code interface} extending {@link Plugin}
+ * and must declare a compile-time constant primitive {@code int API_LEVEL} field.
+ *
+ *
+ * General contract rules:
+ *
+ * - Plugin contracts may only be declared on interfaces.
+ * - Plugin contracts must extend {@link Plugin}.
+ * - Plugin contracts may not extend other plugin contracts. (One exception, see below.)
+ * - A plugin implementation may implement exactly one {@link Role#BASE base contract}.
+ *
+ *
+ * Base contracts are used as the unique service-loading identity of a plugin.
+ * Capability contracts are never loaded directly; they add optional functionality
+ * and are discovered through generated plugin metadata.
+ *
+ * Capability rules:
+ *
+ * - A capability contract must declare {@link #requires()}.
+ * - A capability must require exactly one base contract.
+ * - A capability may extend the required base contract to provide default implementations.
+ * - For now, requiring or extending another capability is not supported.
+ * - A plugin implementing a capability must also implement its required base contract.
+ *
+ *
+ * Note: this annotation cannot be used repeatedly on the same type.
+ *
+ * @implNote Example base contract:
+ * {@code
+ * @PluginContract(role = PluginContract.Role.BASE)
+ * public interface FooBar extends Plugin {
+ * int API_LEVEL = 1;
+ * }
+ * }
+ * Example capability contract:
+ * {@code
+ * @PluginContract(
+ * role = PluginContract.Role.CAPABILITY,
+ * requires = { FooBar.class }
+ * )
+ * public interface BarBeque extends Plugin {
+ * int API_LEVEL = 1;
+ *
+ * default String getMediaType() {
+ * return "application/bbq";
+ * }
+ * }
+ * }
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE)
+public @interface PluginContract {
+
+ /**
+ * Declares whether this contract is a directly loadable base contract or an additional capability contract.
+ */
+ Role role();
+
+ /**
+ * Other plugin contracts that must also be implemented when this contract is implemented.
+ *
+ * For {@link Role#CAPABILITY capabilities}, this must currently contain exactly one
+ * required {@link Role#BASE base contract}. Capabilities are not directly loadable and
+ * therefore must always be paired with their base contract.
+ */
+ Class extends Plugin>[] requires() default {};
+
+ /**
+ * Core provider contracts required by this plugin contract.
+ */
+ RequiredProvider[] providers() default {};
+
+ /**
+ * Distinguishes directly loadable base contracts from additional capability contracts.
+ */
+ enum Role {
+ /**
+ * A directly loadable plugin contract.
+ */
+ BASE,
+
+ /**
+ * An additional plugin capability that refines behavior but is not directly loadable.
+ */
+ CAPABILITY
+ }
+}
diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/RequiredProvider.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/RequiredProvider.java
new file mode 100644
index 0000000..7760776
--- /dev/null
+++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/RequiredProvider.java
@@ -0,0 +1,17 @@
+package io.gdcc.spi.meta.annotations;
+
+import io.gdcc.spi.meta.plugin.CoreProvider;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * Declares that a {@link PluginContract} requires a specific core provider contract.
+ *
+ * The provider API level is taken from the provider interface's
+ * {@code API_LEVEL} constant at compile time by the annotation processor.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+public @interface RequiredProvider {
+ Class extends CoreProvider> value();
+}
diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/package-info.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/package-info.java
new file mode 100644
index 0000000..bac2b22
--- /dev/null
+++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/package-info.java
@@ -0,0 +1,56 @@
+/**
+ * Annotations used to declare Dataverse plugin contracts, plugin implementations,
+ * and required core providers.
+ *
+ * This package defines the author-facing SPI model:
+ *
+ * - a {@linkplain io.gdcc.spi.meta.annotations.PluginContract.Role#BASE base contract}
+ * is the unique, directly loadable identity of a plugin,
+ * - a {@linkplain io.gdcc.spi.meta.annotations.PluginContract.Role#CAPABILITY capability contract}
+ * adds optional functionality but is never loaded directly,
+ * - a {@linkplain io.gdcc.spi.meta.annotations.DataversePlugin plugin implementation}
+ * must implement exactly one base contract and may additionally implement compatible capabilities,
+ * - a {@linkplain io.gdcc.spi.meta.annotations.RequiredProvider required provider}
+ * declares Dataverse infrastructure contracts needed by a plugin contract.
+ *
+ *
+ * Only base contracts are used as plugin loading identities.
+ *
+ * Contract interfaces must extend {@link io.gdcc.spi.meta.plugin.Plugin}, declare
+ * {@link io.gdcc.spi.meta.annotations.PluginContract}, and provide a compile-time
+ * {@code int API_LEVEL} constant. Plugin contracts must not extend other plugin contracts
+ * (with the single exception of a capability extending a required base contract).
+ *
+ * Capabilities are attached to a plugin through normal Java interface implementation.
+ * This allows SPI authors to provide additional methods and default implementations
+ * without introducing ambiguity into plugin loading. If multiple implemented interfaces
+ * contribute conflicting default methods, the plugin implementation class must resolve
+ * that conflict explicitly.
+ *
+ * Example with extending base contract:
+ * {@code
+ * @PluginContract(role = PluginContract.Role.BASE)
+ * public interface FooBar extends Plugin {
+ * int API_LEVEL = 1;
+ * String getMediaType();
+ * }
+ *
+ * @PluginContract(
+ * role = PluginContract.Role.CAPABILITY,
+ * requires = { FooBar.class }
+ * )
+ * public interface BarBeque extends FooBar {
+ * int API_LEVEL = 1;
+ *
+ * default String getMediaType() {
+ * return "application/bbq";
+ * }
+ * }
+ *
+ * @DataversePlugin
+ * public class Grill implements FooBar, BarBeque {
+ * // no override needed unless another default conflicts
+ * }
+ * }
+ */
+package io.gdcc.spi.meta.annotations;
diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/Descriptor.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/Descriptor.java
new file mode 100644
index 0000000..a4431f2
--- /dev/null
+++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/Descriptor.java
@@ -0,0 +1,142 @@
+package io.gdcc.spi.meta.descriptor;
+
+import java.util.Map;
+import java.util.Objects;
+import java.util.OptionalInt;
+
+/**
+ * In-memory representation of raw metadata de/serialized from a Dataverse Plugin Metadata file.
+ *
+ * @param klass implementation class name
+ * @param kind fully qualified base contract name
+ * @param contracts map of implemented contract names to API levels. May not contain null keys or values.
+ * @param requiredProviders map of required provider names to API levels. May not contain null keys or values.
+ */
+public record Descriptor(
+ String klass,
+ String kind,
+ Map contracts,
+ Map requiredProviders
+) {
+
+ /**
+ * Creates a new plugin and defensively copies the contract/provider maps.
+ *
+ * This ensures the plugin remains immutable even if callers pass in
+ * mutable maps. All arguments must be non-null.
+ *
+ * Please note: the provided maps may not contain any null keys or values.
+ */
+ public Descriptor {
+ Objects.requireNonNull(klass);
+ Objects.requireNonNull(kind);
+ contracts = Map.copyOf(Objects.requireNonNull(contracts));
+ requiredProviders = Map.copyOf(Objects.requireNonNull(requiredProviders));
+ }
+
+ /**
+ * Determines whether this plugin is of a specified base contract kind.
+ * Checks both {@code kind} and implemented {@code contracts}.
+ *
+ * @param kindFqcn the fully qualified class name of the kind to check
+ * @return {@code true} if the plugin's kind matches the given class name and its contract is implemented,
+ * otherwise {@code false}
+ */
+ public boolean isOfKind(String kindFqcn) {
+ Objects.requireNonNull(kindFqcn);
+ return kind.equals(kindFqcn) && implementsContract(kindFqcn);
+ }
+
+ public boolean isOfKind(Class> kind) {
+ Objects.requireNonNull(kind);
+ return isOfKind(DescriptorFormat.transformClassName(kind));
+ }
+
+ /**
+ * Checks whether this plugin declares the given implemented contract.
+ *
+ * @param contractFqcn the fully qualified contract class name
+ * @return {@code true} if the contract is present in this plugin
+ */
+ public boolean implementsContract(String contractFqcn) {
+ Objects.requireNonNull(contractFqcn);
+ return contracts.containsKey(contractFqcn);
+ }
+
+ /**
+ * Checks whether this plugin declares the given implemented contract.
+ *
+ * @param contractClass the contract class
+ * @return {@code true} if the contract is present in this plugin
+ */
+ public boolean implementsContract(Class> contractClass) {
+ Objects.requireNonNull(contractClass);
+ return implementsContract(DescriptorFormat.transformClassName(contractClass));
+ }
+
+ /**
+ * Returns the declared API level for the given implemented contract, if present.
+ *
+ * @param contractFqcn the fully qualified contract class name
+ * @return the declared API level wrapped in an {@link OptionalInt}, or an empty value if absent
+ */
+ public int contractLevel(String contractFqcn) {
+ Objects.requireNonNull(contractFqcn);
+ return contracts.get(contractFqcn);
+ }
+
+ /**
+ * Returns the declared API level for the given implemented contract, if present.
+ *
+ * @param contractClass the contract class
+ * @return the declared API level wrapped in an {@link OptionalInt}, or an empty value if absent
+ */
+ public int contractLevel(Class> contractClass) {
+ Objects.requireNonNull(contractClass);
+ return contractLevel(DescriptorFormat.transformClassName(contractClass));
+ }
+
+ /**
+ * Checks whether this plugin declares the given required provider.
+ *
+ * @param providerFqcn the fully qualified provider class name
+ * @return {@code true} if the provider is present in this plugin
+ */
+ public boolean requiresProvider(String providerFqcn) {
+ Objects.requireNonNull(providerFqcn);
+ return requiredProviders.containsKey(providerFqcn);
+ }
+
+ /**
+ * Checks whether this plugin declares the given required provider.
+ *
+ * @param providerClass the provider class
+ * @return {@code true} if the provider is present in this plugin
+ */
+ public boolean requiresProvider(Class> providerClass) {
+ Objects.requireNonNull(providerClass);
+ return requiresProvider(DescriptorFormat.transformClassName(providerClass));
+ }
+
+ /**
+ * Returns the declared required API level for the given provider, if present.
+ *
+ * @param providerFqcn the fully qualified provider class name
+ * @return the required provider API level wrapped in an {@link OptionalInt}, or an empty value if absent
+ */
+ public int requiredProviderLevel(String providerFqcn) {
+ Objects.requireNonNull(providerFqcn);
+ return requiredProviders.get(providerFqcn);
+ }
+
+ /**
+ * Returns the declared required API level for the given provider, if present.
+ *
+ * @param providerClass the provider class
+ * @return the required provider API level wrapped in an {@link OptionalInt}, or an empty value if absent
+ */
+ public int requiredProviderLevel(Class> providerClass) {
+ Objects.requireNonNull(providerClass);
+ return requiredProviderLevel(DescriptorFormat.transformClassName(providerClass));
+ }
+}
\ No newline at end of file
diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java
new file mode 100644
index 0000000..d20b3cb
--- /dev/null
+++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java
@@ -0,0 +1,206 @@
+package io.gdcc.spi.meta.descriptor;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.io.StringReader;
+import java.io.Writer;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Properties;
+
+/**
+ * Defines constants for the format and structure of plugin plugin files.
+ * These descriptors provide metadata about plugins, including their
+ * implementation class, type, contracts, and required providers.
+ *
+ *
+ * - DESCRIPTOR_DIRECTORY: Specifies the directory where plugin plugin files are located.
+ * - PLUGIN_CLASS_FIELD: Defines the key used to identify the plugin's implementation class.
+ * - PLUGIN_KIND_FIELD: Defines the key used to specify the base contract type of the plugin.
+ * - CONTRACT_PREFIX and CONTRACT_SUFFIX: Define the keys used to denote the contracts
+ * implemented by the plugin and their associated API levels.
+ * - REQUIRED_PROVIDER_PREFIX and REQUIRED_PROVIDER_SUFFIX: Define the keys used to represent
+ * required providers and their associated API levels.
+ *
+ */
+public final class DescriptorFormat {
+ public static final String DESCRIPTOR_DIRECTORY = "META-INF/dataverse/plugins/";
+ public static final String DESCRIPTOR_EXTENSION = ".properties";
+
+ public static final String PLUGIN_CLASS_FIELD = "plugin.class";
+ public static final String PLUGIN_KIND_FIELD = "plugin.kind";
+ public static final String CONTRACT_PREFIX = "plugin.implements.";
+ public static final String CONTRACT_SUFFIX = ".level";
+ public static final String REQUIRED_PROVIDER_PREFIX = "plugin.requires.";
+ public static final String REQUIRED_PROVIDER_SUFFIX = ".level";
+
+ private DescriptorFormat() {
+ /* Intentionally left blank for helper class */
+ }
+
+ /**
+ * Transforms the provided class into its canonical name representation.
+ * Reusable in different places to keep serialization from class to FQCN aligned.
+ *
+ * @param klass the {@link Class} object whose canonical name is to be returned
+ * @return the name of the specified class, or null if the class does not have a name.
+ * Note: not using the canonical name to avoid issues with inner classes and de/serialization.
+ */
+ public static String transformClassName(Class> klass) {
+ return klass.getName();
+ }
+
+ public static String toFilename(Class> klass) {
+ return toFilename(transformClassName(klass));
+ }
+
+ public static String toFilename(String fqcn) {
+ // The FQCN may contain "$" from inner classes. This would be bad for filenames.
+ return fqcn.replace('$', '.') + DESCRIPTOR_EXTENSION;
+ }
+
+ public static String toPath(Class> klass) {
+ return toPath(transformClassName(klass));
+ }
+
+ public static String toPath(String fqcn) {
+ return DESCRIPTOR_DIRECTORY + toFilename(fqcn);
+ }
+
+ public static String toContractLevel(Class> contractClass) {
+ return toContractLevel(transformClassName(contractClass));
+ }
+
+ public static String toContractLevel(String contractFQCN) {
+ return CONTRACT_PREFIX + contractFQCN + CONTRACT_SUFFIX;
+ }
+
+ public static String toRequiredProviderLevel(Class> providerClass) {
+ return toRequiredProviderLevel(transformClassName(providerClass));
+ }
+
+ public static String toRequiredProviderLevel(String providerFQCN) {
+ return REQUIRED_PROVIDER_PREFIX + providerFQCN + REQUIRED_PROVIDER_SUFFIX;
+ }
+
+ /**
+ * Serializes the provided {@link Descriptor} into the given {@link Writer}
+ * in the form of a properties file, encoding plugin metadata such as plugin class,
+ * plugin kind, implemented contracts, and required providers.
+ *
+ * @param descriptor the {@link Descriptor} containing the plugin metadata to be serialized
+ * @param writer the {@link Writer} where the plugin properties will be written
+ * @throws IOException if an I/O error occurs while writing to the {@link Writer}
+ */
+ public static void write(Descriptor descriptor, Writer writer) throws IOException {
+ Properties properties = new Properties();
+ properties.setProperty(PLUGIN_CLASS_FIELD, descriptor.klass());
+ properties.setProperty(PLUGIN_KIND_FIELD, descriptor.kind());
+
+ descriptor.contracts().forEach((contract, level) ->
+ properties.setProperty(toContractLevel(contract), Integer.toString(level)));
+
+ descriptor.requiredProviders().forEach((provider, level) ->
+ properties.setProperty(toRequiredProviderLevel(provider), Integer.toString(level)));
+
+ properties.store(writer, "Generated plugin contract metadata");
+ }
+
+ /**
+ * Reads a plugin plugin from the serialized properties format.
+ *
+ * The returned plugin contains the mandatory plugin class and base contract fields,
+ * plus all parsed contract/provider API levels found in the input.
+ *
+ * @param reader the character stream containing plugin properties
+ * @return the parsed plugin.
+ * @throws IOException if the properties cannot be read
+ * @throws IllegalArgumentException if mandatory fields are missing or if any level value
+ * cannot be parsed as an integer
+ */
+ public static Descriptor read(Reader reader) throws IOException {
+ Properties properties = new Properties();
+ properties.load(reader);
+
+ String pluginClass = properties.getProperty(PLUGIN_CLASS_FIELD);
+ if (pluginClass == null || pluginClass.isBlank()) {
+ throw new IllegalArgumentException("Missing required property " + PLUGIN_CLASS_FIELD);
+ }
+
+ String pluginKind = properties.getProperty(PLUGIN_KIND_FIELD);
+ if (pluginKind == null || pluginKind.isBlank()) {
+ throw new IllegalArgumentException("Missing required property " + PLUGIN_KIND_FIELD);
+ }
+
+ Map contracts = new LinkedHashMap<>();
+ Map requiredProviders = new LinkedHashMap<>();
+
+ for (String key : properties.stringPropertyNames()) {
+ if (PLUGIN_CLASS_FIELD.equals(key) || PLUGIN_KIND_FIELD.equals(key)) {
+ continue;
+ }
+
+ if (key.startsWith(CONTRACT_PREFIX) && key.endsWith(CONTRACT_SUFFIX)) {
+ String contractName = key.substring(
+ CONTRACT_PREFIX.length(),
+ key.length() - CONTRACT_SUFFIX.length()
+ );
+ contracts.put(contractName, parseLevel(properties.getProperty(key), key));
+ }
+
+ if (key.startsWith(REQUIRED_PROVIDER_PREFIX) && key.endsWith(REQUIRED_PROVIDER_SUFFIX)) {
+ String providerName = key.substring(
+ REQUIRED_PROVIDER_PREFIX.length(),
+ key.length() - REQUIRED_PROVIDER_SUFFIX.length()
+ );
+ requiredProviders.put(providerName, parseLevel(properties.getProperty(key), key));
+ }
+ }
+
+ return new Descriptor(
+ pluginClass,
+ pluginKind,
+ contracts,
+ requiredProviders
+ );
+ }
+
+ /**
+ * Reads a plugin plugin from the given string content.
+ *
+ * This method parses the input string into a {@link Descriptor} object. It internally utilizes
+ * a {@link StringReader} to read the string and expects the content to be in a properties-based serialized format.
+ *
+ * @param content the string content containing serialized plugin properties
+ * @return the parsed {@link Descriptor}
+ * @throws RuntimeException if an I/O error occurs
+ * @throws IllegalArgumentException if mandatory fields are missing
+ */
+ public static Descriptor read(String content) {
+ Descriptor descriptor = null;
+
+ try (StringReader reader = new StringReader(content)) {
+ descriptor = read(reader);
+ } catch (IOException e) {
+ // As we read from an in-memory string, this seems highly unlikely to happen.
+ throw new RuntimeException(e);
+ }
+
+ return descriptor;
+ }
+
+ private static int parseLevel(String value, String key) {
+ if (value == null || value.isBlank()) {
+ throw new IllegalArgumentException("Missing level value for property " + key);
+ }
+
+ try {
+ int level = Integer.parseInt(value);
+ if (level < 1)
+ throw new IllegalArgumentException("Invalid integer value for property " + key + " may not be < 1, but is: " + value);
+ return level;
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException("Invalid integer value for property " + key + ": " + value, e);
+ }
+ }
+}
diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java
new file mode 100644
index 0000000..0bb83d3
--- /dev/null
+++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java
@@ -0,0 +1,229 @@
+package io.gdcc.spi.meta.descriptor;
+
+import java.io.BufferedReader;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+
+import static io.gdcc.spi.meta.descriptor.DescriptorFormat.DESCRIPTOR_DIRECTORY;
+import static io.gdcc.spi.meta.descriptor.DescriptorFormat.DESCRIPTOR_EXTENSION;
+import static io.gdcc.spi.meta.descriptor.DescriptorFormat.read;
+
+public final class DescriptorScanner {
+
+ private DescriptorScanner() {
+ /* Intentionally private constructor for helper class without instances */
+ }
+
+ /**
+ * Scans the specified path to identify plugin descriptors. The path can either be a directory
+ * or a JAR file. The method determines the type of the path and invokes the appropriate
+ * scanning logic to extract plugin descriptors.
+ *
+ * @param path the path to be scanned; must not be null. If the path represents a directory,
+ * plugin files contained within it will be scanned. If the path represents
+ * a JAR file, its internal entries will be scanned for descriptors.
+ * @return a list of {@code SourcedPluginDescriptor} objects representing plugin descriptors
+ * found at the given path. The list will be empty if no descriptors are found.
+ * @throws IllegalArgumentException if the provided {@code path} is {@code null} or scanning fails for other reasons caused by user.
+ * @throws IOException if an I/O error occurs while accessing the specified path or its contents.
+ */
+ public static List scanPath(Path path) throws IOException {
+ List scanResult = new ArrayList<>();
+
+ if (path == null) {
+ throw new IllegalArgumentException("Path may not be null");
+ }
+ if (Files.isDirectory(path)) {
+ scanDirectory(path).forEach(plugin -> scanResult.add(new SourcedDescriptor(path, plugin)));
+ } else {
+ scanJar(path).forEach(plugin -> scanResult.add(new SourcedDescriptor(path, plugin)));
+ }
+
+ return List.copyOf(scanResult);
+ }
+
+ /**
+ * Scans the specified JAR file for plugin plugin entries and extracts them into a list of
+ * {@link Descriptor} objects. The method looks for plugin plugin files based on predefined
+ * directory and file extension constants.
+ *
+ * @param jarPath the path to the JAR file to be scanned; must be a valid, readable, and regular file
+ * with a ".jar" extension. Usage of symbolic links is allowed.
+ * @return a list of {@code PluginDescriptor} objects extracted from the JAR file. If no plugin
+ * descriptors are found, the returned list will be empty.
+ * @throws IllegalArgumentException if the provided {@code jarPath} is {@code null}, does not exist,
+ * is unreadable, is not a regular file, or does not have a ".jar"
+ * extension.
+ * @throws IOException if an I/O error occurs while reading the JAR file or its entries.
+ */
+ static List scanJar(Path jarPath) throws IOException {
+ if (jarPath == null || !Files.exists(jarPath) || !Files.isReadable(jarPath) ||
+ !Files.isRegularFile(jarPath) || !jarPath.getFileName().toString().toLowerCase().endsWith(".jar")) {
+ throw new IllegalArgumentException("jarPath '" + jarPath + "' is not a readable JAR file");
+ }
+
+ // Iterate over the entries in the JAR file, read the ones we know to be plugin descriptors
+ List descriptors = new ArrayList<>();
+ try (var jarFile = new JarFile(jarPath.toFile())) {
+ for (Iterator it = jarFile.entries().asIterator(); it.hasNext(); ) {
+ JarEntry entry = it.next();
+ String name = entry.getName();
+
+ if (name.startsWith(DESCRIPTOR_DIRECTORY) && name.endsWith(DESCRIPTOR_EXTENSION)) {
+ try(InputStreamReader reader = new InputStreamReader(jarFile.getInputStream(entry), StandardCharsets.UTF_8)) {
+ Descriptor descriptor = read(reader);
+ descriptors.add(descriptor);
+ }
+ }
+ }
+ }
+
+ return List.copyOf(descriptors);
+ }
+
+ /**
+ * Scans the specified directory for plugin plugin files and extracts them into a list of
+ * {@link Descriptor} objects. The method searches for plugin files in a predefined
+ * subdirectory and processes files with a specific file extension.
+ *
+ * @param root the root directory to be scanned; must be a valid, readable, and existing directory.
+ * @return a list of {@code PluginDescriptor} objects extracted from the directory. If no plugin
+ * descriptors are found, the returned list will be empty.
+ * @throws IllegalArgumentException if the provided {@code root} is {@code null}, does not exist,
+ * is unreadable, or is not a directory.
+ * @throws IOException if an I/O error occurs while reading the directory or its contents.
+ */
+ static List scanDirectory(Path root) throws IOException {
+ if (root == null || !Files.exists(root) || !Files.isReadable(root) || !Files.isDirectory(root)) {
+ throw new IllegalArgumentException("directory '" + root + "' is not a readable directory");
+ }
+
+ // Look up the plugin metadata directory - if it does not exist, there are no plugins here.
+ Path descriptorDir = root.resolve(DescriptorFormat.DESCRIPTOR_DIRECTORY);
+ if (!Files.isDirectory(descriptorDir)) {
+ return List.of();
+ }
+
+ // Scan the directory for plugin metadata, read it, and add it to a list
+ List descriptors = new ArrayList<>();
+ try (var paths = Files.list(descriptorDir)) {
+ for (Path path : paths.toList()) {
+ String name = path.getFileName().toString();
+
+ if (name.endsWith(DESCRIPTOR_EXTENSION)) {
+ try (FileReader reader = new FileReader(path.toFile(), StandardCharsets.UTF_8)) {
+ Descriptor descriptor = read(reader);
+ descriptors.add(descriptor);
+ }
+ }
+ }
+ }
+
+ return List.copyOf(descriptors);
+ }
+
+
+ /**
+ * Checks whether the source referenced by the given descriptor contains a Java SPI service
+ * configuration file for the descriptor's declared kind, and whether that file explicitly
+ * lists the descriptor's implementation class.
+ *
+ * The source location is expected to point either to a directory root or to a JAR file.
+ * In the directory case, this method looks for a regular file at
+ * {@code META-INF/services/} below that root. In the JAR case, it looks for the
+ * corresponding JAR entry.
+ *
+ * If the SPI record exists, its contents are interpreted using UTF-8. Blank lines,
+ * leading/trailing whitespace, and comments introduced by {@code #} are ignored in the
+ * same spirit as standard Java service configuration files.
+ *
+ * @param descriptor the descriptor whose source and implementation metadata should be checked
+ * @return {@code true} if a matching SPI record exists and contains the descriptor's
+ * implementation class; {@code false} if no such SPI record exists or the record
+ * does not list that implementation
+ * @throws IllegalArgumentException if the descriptor points to a source location that does not exist
+ * @throws IOException if an I/O error occurs while reading the directory entry or JAR entry
+ */
+ public static boolean hasServiceProviderInterfaceRecord(SourcedDescriptor descriptor) throws IOException {
+ String spiLocation = "META-INF/services/" + descriptor.plugin().kind();
+ Path source = descriptor.sourceLocation();
+
+ // The descriptor should already be vetted before reaching this point, so we keep validation
+ // intentionally lightweight here and only reject obviously invalid sources.
+ if (Files.notExists(source)) {
+ throw new IllegalArgumentException("Source descriptor contained non-existing source location " + source);
+ }
+
+ // Strategy:
+ // - If the source is a directory, open the SPI file directly from the filesystem.
+ // - Otherwise, treat the source as an archive and look for the SPI record as a JAR entry.
+ // In both cases we funnel the actual content check through the same InputStream-based helper.
+ if (Files.isDirectory(source)) {
+ Path serviceFile = source.resolve(spiLocation);
+
+ // No SPI record file at the expected location means there is nothing to match.
+ if (!Files.isRegularFile(serviceFile)) {
+ return false;
+ }
+
+ // Open the regular file only for the duration of the content check.
+ try (InputStream serviceRecord = Files.newInputStream(serviceFile)) {
+ return spiRecordContains(serviceRecord, descriptor.plugin().klass());
+ }
+ }
+
+ // Important: the JAR must stay open for as long as the entry InputStream is being read.
+ // Therefore, both resources are owned by nested try-with-resources blocks in the same scope.
+ try (JarFile jar = new JarFile(source.toFile())) {
+ JarEntry entry = jar.getJarEntry(spiLocation);
+
+ // Missing JAR entry means there is no SPI record for the declared kind.
+ if (entry == null) {
+ return false;
+ }
+
+ // Read the JAR entry while the JAR is still open, then close both resources automatically.
+ try (InputStream serviceRecord = jar.getInputStream(entry)) {
+ return spiRecordContains(serviceRecord, descriptor.plugin().klass());
+ }
+ }
+ }
+
+ /**
+ * Reads a Java SPI service configuration stream and checks whether it declares the given implementation class.
+ *
+ * Lines are normalized in a tolerant way: comments beginning with {@code #} are stripped,
+ * surrounding whitespace is trimmed, and empty lines are ignored.
+ */
+ private static boolean spiRecordContains(InputStream serviceRecord, String implementationClass) throws IOException {
+ // This helper intentionally contains the shared parsing logic so that directory-based
+ // and JAR-based SPI records are interpreted in exactly the same way.
+ try (
+ InputStreamReader streamReader = new InputStreamReader(serviceRecord, StandardCharsets.UTF_8);
+ BufferedReader reader = new BufferedReader(streamReader)
+ ) {
+ return reader.lines()
+ // Strip inline comments to support standard SPI syntax.
+ .map(line -> {
+ int commentStart = line.indexOf('#');
+ return commentStart >= 0 ? line.substring(0, commentStart) : line;
+ })
+ // Normalize whitespace so that indented or padded entries still match.
+ .map(String::trim)
+ // Skip blank lines after normalization.
+ .filter(line -> !line.isEmpty())
+ // Finally, look for the implementation class declared by the descriptor.
+ .anyMatch(line -> line.equals(implementationClass));
+ }
+ }
+}
diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java
new file mode 100644
index 0000000..ecea6bd
--- /dev/null
+++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java
@@ -0,0 +1,134 @@
+package io.gdcc.spi.meta.descriptor;
+
+import io.gdcc.spi.meta.plugin.CoreProvider;
+import io.gdcc.spi.meta.plugin.Plugin;
+
+import java.nio.file.Path;
+import java.util.Map;
+import java.util.Objects;
+import java.util.OptionalInt;
+
+/**
+ * Runtime-facing descriptor of a resolved and loaded plugin implementation.
+ *
+ * This descriptor represents a plugin after metadata has been interpreted in the context of the
+ * running application and the relevant Java types have been resolved. Unlike the build-time or
+ * serialized descriptor form, this model uses actual {@link Class} references for the plugin
+ * implementation, its base contract, all implemented plugin contracts, and all required core
+ * providers.
+ *
+ * The generic type parameter {@code T} represents the base plugin contract under which the plugin
+ * was resolved and loaded. The {@code pluginClass} is therefore guaranteed to implement that base
+ * contract, while {@code kindClass} denotes the concrete base plugin contract itself.
+ *
+ * The {@code contracts} map contains all plugin contracts implemented by the plugin together with
+ * their declared API levels. This includes the base contract as well as any optional capability
+ * contracts. The {@code requiredProviders} map contains all core providers required by the plugin's
+ * implemented contracts, again paired with their declared API levels.
+ *
+ * @param the base plugin contract type under which this plugin was resolved
+ * @param sourceLocation the source location from which the plugin was loaded, such as a JAR file or
+ * exploded classpath directory
+ * @param identity the logical plugin identity reported by the plugin instance; intended for
+ * distinguishing plugins at runtime
+ * @param pluginClass the concrete implementation class of the plugin
+ * @param kindClass the resolved base plugin contract implemented by the plugin
+ * @param contracts all resolved plugin contracts implemented by the plugin, mapped to their
+ * declared API levels
+ * @param requiredProviders all resolved core providers required by the plugin, mapped to their
+ * required API levels
+ */
+public record PluginDescriptor(
+ Path sourceLocation,
+ String identity,
+ Class extends T> pluginClass,
+ Class kindClass,
+ Map, Integer> contracts,
+ Map, Integer> requiredProviders
+) {
+
+ public PluginDescriptor {
+ Objects.requireNonNull(sourceLocation);
+ Objects.requireNonNull(identity);
+ Objects.requireNonNull(pluginClass);
+ Objects.requireNonNull(kindClass);
+ Objects.requireNonNull(contracts);
+ Objects.requireNonNull(requiredProviders);
+
+ // Immutability is key
+ contracts = Map.copyOf(contracts);
+ requiredProviders = Map.copyOf(requiredProviders);
+
+ // Sane structure checks
+ if (identity.isBlank())
+ throw new IllegalArgumentException("Plugin identity cannot be blank");
+ if (contracts.isEmpty())
+ throw new IllegalArgumentException("Plugin must implement at least one contract (the kindClass one)");
+ }
+
+ public boolean implementsContract(Class extends Plugin> contractClass) {
+ return this.contracts.containsKey(contractClass);
+ }
+
+ public OptionalInt contractLevel(Class extends Plugin> contractClass) {
+ return implementsContract(contractClass)
+ ? OptionalInt.of(this.contracts.get(contractClass))
+ : OptionalInt.empty();
+ }
+
+ public boolean requiresProvider(Class extends CoreProvider> providerClass) {
+ return this.requiredProviders.containsKey(providerClass);
+ }
+
+ public OptionalInt requiredProviderLevel(Class extends CoreProvider> providerClass) {
+ return requiresProvider(providerClass)
+ ? OptionalInt.of(this.requiredProviders.get(providerClass))
+ : OptionalInt.empty();
+ }
+
+ /**
+ * Returns the normalized identity string of this plugin.
+ * The normalization process converts the identity to lowercase
+ * and removes special characters such as "/\\-_:.#~*", ensuring
+ * a consistent format for comparison purposes.
+ *
+ * @return the normalized identity string, or null if the original identity is null
+ */
+ public String normalizedIdentity() {
+ return normalizeIdentity(this.identity);
+ }
+
+ /**
+ * Normalizes the given identity string for comparison purposes by converting it to lowercase
+ * and removing all occurrences of the characters "/\-_:.#~*", which are commonly used to separate words.
+ * This avoids having multiple plugins targeting the same thing, like an export format with a slightly different
+ * case or special characters.
+ *
+ * @param identity the identity string to normalize
+ * @return the normalized identity string, or null if the input is null
+ */
+ private String normalizeIdentity(String identity) {
+ if (identity == null) return null;
+ return identity.toLowerCase().replaceAll("[/\\\\_\\-:.#~*]+", "");
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) return true;
+ if (obj == null || getClass() != obj.getClass()) return false;
+
+ PluginDescriptor> that = (PluginDescriptor>) obj;
+
+ return Objects.equals(sourceLocation, that.sourceLocation) &&
+ Objects.equals(pluginClass, that.pluginClass) &&
+ (
+ Objects.equals(identity, that.identity) ||
+ Objects.equals(normalizeIdentity(identity), normalizeIdentity(that.identity))
+ );
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(sourceLocation, pluginClass, normalizeIdentity(identity));
+ }
+}
diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/SourcedDescriptor.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/SourcedDescriptor.java
new file mode 100644
index 0000000..a94bddb
--- /dev/null
+++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/SourcedDescriptor.java
@@ -0,0 +1,23 @@
+package io.gdcc.spi.meta.descriptor;
+
+import java.nio.file.Path;
+import java.util.Objects;
+
+/**
+ * A record representing a descriptor that is sourced from a specific location.
+ * Combines information about a descriptor and its source location.
+ *
+ * @param sourceLocation the path to the source location of the descriptor, must not be null
+ * @param plugin the {@link Descriptor} representing the plugin information, must not be null
+ */
+public record SourcedDescriptor(Path sourceLocation, Descriptor plugin) {
+
+ public SourcedDescriptor {
+ Objects.requireNonNull(sourceLocation);
+ Objects.requireNonNull(plugin);
+ }
+
+ public boolean isOfKind(Class> contractClass) {
+ return plugin.isOfKind(contractClass);
+ }
+}
diff --git a/meta/src/main/java/io/gdcc/spi/meta/plugin/CoreProvider.java b/meta/src/main/java/io/gdcc/spi/meta/plugin/CoreProvider.java
new file mode 100644
index 0000000..5b70630
--- /dev/null
+++ b/meta/src/main/java/io/gdcc/spi/meta/plugin/CoreProvider.java
@@ -0,0 +1,15 @@
+package io.gdcc.spi.meta.plugin;
+
+/**
+ * Represents a provider interface for core functionality within the plugin system.
+ * CoreProvider serves as a marker or extension point within the framework to be implemented
+ * by classes that provide essential services or functionality to the core system.
+ *
+ * Implementations of this interface are expected to integrate with the broader plugin system,
+ * potentially enabling the core system to interface with specific features or subsystems.
+ *
+ * @see Plugin
+ */
+public interface CoreProvider {
+
+}
diff --git a/meta/src/main/java/io/gdcc/spi/meta/plugin/Plugin.java b/meta/src/main/java/io/gdcc/spi/meta/plugin/Plugin.java
new file mode 100644
index 0000000..8edb989
--- /dev/null
+++ b/meta/src/main/java/io/gdcc/spi/meta/plugin/Plugin.java
@@ -0,0 +1,26 @@
+package io.gdcc.spi.meta.plugin;
+
+/**
+ * Represents the contract for plugins in the system. Implementations of this interface serve
+ * as modular components that can be dynamically loaded and integrated into the broader application.
+ *
+ * Each plugin must provide a unique, machine-readable identifier to ensure proper identification
+ * and usage within the system.
+ *
+ * Implementers are required to define the {@link #identity()} method to specify their unique
+ * identifier.
+ *
+ * @see CoreProvider
+ */
+public interface Plugin {
+
+ /**
+ * Returns the unique, machine-readable identifier for this plugin.
+ * This will be the primary key within the core to identify a specific plugin implementation.
+ *
+ * @return the plugin's identity string, which must be non-null, non-blank, and URL compatible.
+ * @implSpec This method must be overridden by any plugin implementation and return a non-null, non-blank,
+ * URL-compatible string. No plugin interface may provide a default implementation.
+ */
+ String identity();
+}
diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java
new file mode 100644
index 0000000..da63fef
--- /dev/null
+++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java
@@ -0,0 +1,1587 @@
+package io.gdcc.spi.meta.processor;
+
+import io.gdcc.spi.meta.annotations.PluginContract;
+import io.gdcc.spi.meta.descriptor.Descriptor;
+import io.gdcc.spi.meta.descriptor.DescriptorFormat;
+import io.gdcc.spi.meta.plugin.Plugin;
+
+import javax.annotation.processing.AbstractProcessor;
+import javax.annotation.processing.ProcessingEnvironment;
+import javax.annotation.processing.RoundEnvironment;
+import javax.lang.model.SourceVersion;
+import javax.lang.model.element.AnnotationMirror;
+import javax.lang.model.element.AnnotationValue;
+import javax.lang.model.element.Element;
+import javax.lang.model.element.ElementKind;
+import javax.lang.model.element.ExecutableElement;
+import javax.lang.model.element.Modifier;
+import javax.lang.model.element.TypeElement;
+import javax.lang.model.element.VariableElement;
+import javax.lang.model.type.DeclaredType;
+import javax.lang.model.type.TypeKind;
+import javax.lang.model.type.TypeMirror;
+import javax.lang.model.util.Elements;
+import javax.lang.model.util.Types;
+import javax.tools.Diagnostic;
+import javax.tools.FileObject;
+import javax.tools.StandardLocation;
+import java.io.IOException;
+import java.io.Writer;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.Deque;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+
+/**
+ * Annotation processor generating build-time metadata for plugin implementations.
+ *
+ * This processor scans classes marked with {@code @DataversePlugin}, discovers all implemented
+ * plugin contracts annotated with {@code @PluginContract}, validates the contract graph, and emits:
+ *
+ *
+ * - a per-plugin descriptor under {@value DescriptorFormat#DESCRIPTOR_DIRECTORY}, and
+ * - a {@code META-INF/services/...} entry for the base plugin contract when safe to do so.
+ *
+ *
+ * Note:
+ * The processor enforces contract rules for all top-level and member interfaces in compiled source files.
+ * Runtime validation of contracts in PluginLoader loaded from external JARs or assembled outside normal compilation
+ * will catch illegal usage the processor cannot see and validate (like anonymous or method body interfaces).
+ *
+ *
+ * The descriptor captures the build-time view of:
+ *
+ * - the plugin implementation class,
+ * - the plugin's base contract,
+ * - all implemented contract API levels,
+ * - all required provider API levels.
+ *
+ *
+ * Contract graph rules
+ *
+ * The processor enforces a strict contract hierarchy:
+ *
+ * - A {@link PluginContract.Role#BASE base contract} is the unique, directly loadable identity
+ * of a plugin. Base contracts may not extend other contracts and may not declare
+ * {@code requires}.
+ * - A {@link PluginContract.Role#CAPABILITY capability contract} adds optional functionality.
+ * It must declare exactly one base contract in {@code requires}. A capability may optionally
+ * extend its required base contract in the Java type hierarchy to provide default
+ * implementations for methods declared by the base. A capability may not extend another
+ * capability.
+ *
+ *
+ * Service registration
+ *
+ * Service registration generation is intentionally cautious. If any implementation of a given base
+ * contract uses {@code @AutoService}, this processor suppresses generated service output for that
+ * entire contract to avoid two processors writing the same {@code META-INF/services/...} file.
+ *
+ * Errors are reported against the offending source element and then converted into a local
+ * {@link ProcessorException}. This aborts processing of the current implementation only, allowing
+ * the processor to continue and surface additional problems in the same compilation run.
+ */
+public final class PluginContractProcessor extends AbstractProcessor {
+
+ /**
+ * Output directory for generated ServiceLoader files.
+ */
+ private static final String SERVICES_DIRECTORY = "META-INF/services/";
+
+ /**
+ * Cached utility for type operations such as assignability checks.
+ */
+ private Types types;
+
+ /**
+ * Cached utility for element lookup and annotation default resolution.
+ */
+ private Elements elements;
+
+ /**
+ * Descriptor models accumulated during processing, keyed by implementation class name.
+ *
+ * Descriptors are written only after processing is over, which keeps resource generation
+ * deterministic and avoids partial aggregate state.
+ */
+ private final Map descriptors = new LinkedHashMap<>();
+
+ /**
+ * Service registrations grouped by base contract name.
+ *
+ * Each map entry corresponds to one future {@code META-INF/services/} file. A sorted
+ * set is used to make generated output stable across compiler runs.
+ */
+ private final Map> serviceImplementationsByContract = new LinkedHashMap<>();
+
+ /**
+ * Base contract names for which service file generation must be skipped.
+ *
+ * If any implementation of a base contract uses {@code @AutoService}, that service type is
+ * considered externally managed and this processor suppresses its own output for the same path.
+ * This way, we do not have a race condition / conflict over one service file.
+ */
+ private final Set serviceTypesManagedExternally = new LinkedHashSet<>();
+
+ /**
+ * Types already inspected during the current compilation.
+ *
+ * The processor performs additional model-wide validation beyond explicit {@code @DataversePlugin}
+ * usages. Since the same type may reappear through multiple roots or hierarchy traversals, this set
+ * keeps those checks idempotent and avoids duplicate diagnostics.
+ */
+ private final Set inspectedTypes = new LinkedHashSet<>();
+
+ /**
+ * Plugin implementations already converted into generated output models.
+ *
+ * This is needed because implementations may be processed either explicitly through
+ * {@code @DataversePlugin} or implicitly when they are discovered as plain {@code Plugin}
+ * implementations during hierarchy inspection.
+ */
+ private final Set processedImplementations = new LinkedHashSet<>();
+
+ /**
+ * Initializes compiler utility helpers from the processing environment.
+ *
+ * @param processingEnv the active annotation processing environment
+ */
+ @Override
+ public synchronized void init(ProcessingEnvironment processingEnv) {
+ super.init(processingEnv);
+ this.types = processingEnv.getTypeUtils();
+ this.elements = processingEnv.getElementUtils();
+ }
+
+ /**
+ * Returns the annotation types directly claimed by this processor.
+ *
+ * The processor claims all annotations because it does not only react to explicitly annotated
+ * {@code @DataversePlugin} classes. It also performs project-wide validation for plugin contracts,
+ * provider contracts, and unannotated plugin implementations discovered in the type model.
+ *
+ * @return the supported top-level annotation types
+ */
+ @Override
+ public Set getSupportedAnnotationTypes() {
+ return Set.of("*");
+ }
+
+ /**
+ * Advertises support for the latest source version understood by the current compiler.
+ *
+ * This is preferred over a hard-coded release because the processor mainly operates on the
+ * annotation/type model and should remain usable across newer Java releases automatically.
+ *
+ * @return the latest source version supported by the running compiler
+ */
+ @Override
+ public SourceVersion getSupportedSourceVersion() {
+ return SourceVersion.latestSupported();
+ }
+
+ /**
+ * Main processor entry point for each annotation processing round.
+ *
+ * During normal rounds, this processor performs two tasks:
+ *
+ * - it inspects all root types and their hierarchies for project-wide contract validation,
+ * - it processes explicitly annotated {@code @DataversePlugin} classes.
+ *
+ *
+ * During the final round, all accumulated descriptor and service models are written to the
+ * compiler output.
+ *
+ * @param annotations the annotations requested for this round
+ * @param roundEnv the current round environment
+ * @return {@code false} so other processors may continue to participate normally
+ */
+ @Override
+ public boolean process(Set extends TypeElement> annotations, RoundEnvironment roundEnv) {
+ TypeElement markerAnnotation = elements.getTypeElement(ProcessorConstants.PLUGIN_IMPLEMENTATION_ANNOTATION);
+ if (markerAnnotation == null) {
+ // If the marker annotation itself cannot be resolved, something is wrong with the
+ // processor classpath. Returning false leaves room for other processors to continue.
+ return false;
+ }
+
+ // Inspect all roots, not just annotated types. This enables strict enforcement for
+ // plugin/provider contracts and lets us discover plain Plugin implementations that
+ // should have used @DataversePlugin.
+ for (Element root : roundEnv.getRootElements()) {
+ if (root instanceof TypeElement typeElement) {
+ try {
+ inspectTypeHierarchy(typeElement);
+ } catch (ProcessorException ignored) {
+ // The concrete error has already been reported with source location.
+ // Continue with remaining roots to surface as many problems as possible.
+ }
+ }
+ }
+
+ for (Element element : roundEnv.getElementsAnnotatedWith(markerAnnotation)) {
+ if (!(element instanceof TypeElement implementation)) {
+ error(element, "@DataversePlugin may only be used on classes");
+ continue;
+ }
+
+ try {
+ processImplementation(implementation);
+ } catch (ProcessorException ignored) {
+ // A concrete error has already been reported with source location.
+ // Continue with the next implementation so the user gets more than one error per run.
+ }
+ }
+
+ if (roundEnv.processingOver()) {
+ writeAllGeneratedResources();
+ }
+
+ return false;
+ }
+
+ // ── Implementation processing ───────────────────────────────────────────────
+
+ /**
+ * Processes one plugin implementation class.
+ *
+ * The workflow is:
+ *
+ * - validate the class structurally,
+ * - discover all implemented contracts in the full type hierarchy,
+ * - identify exactly one base contract,
+ * - collect contract and provider API levels,
+ * - record descriptor output,
+ * - record ServiceLoader output unless {@code @AutoService} takes over.
+ *
+ *
+ * @param implementation the plugin implementation class
+ */
+ private void processImplementation(TypeElement implementation) {
+ String implementationClassName = implementation.getQualifiedName().toString();
+ if (!processedImplementations.add(implementationClassName)) {
+ // The implementation was already processed earlier in this compilation, for example
+ // when discovered implicitly during type hierarchy inspection.
+ return;
+ }
+
+ validateImplementationClass(implementation);
+
+ Set contracts = collectImplementedContracts(implementation);
+ if (contracts.isEmpty()) {
+ error(
+ implementation,
+ "No implemented plugin contracts found; " +
+ "implementations must implement a specific @PluginContract interface " +
+ "which itself extends " + Plugin.class.getName()
+ );
+ throw new ProcessorException();
+ }
+
+ TypeElement baseContract = null;
+ Map contractLevels = new LinkedHashMap<>();
+ Map providerLevels = new LinkedHashMap<>();
+
+ for (TypeElement contract : sortByQualifiedName(contracts)) {
+ PluginContractModel model = readPluginContractModel(contract);
+
+ if (model.role() == PluginContract.Role.BASE) {
+ if (baseContract != null) {
+ error(
+ implementation,
+ "Implementation must implement exactly one Role.BASE @PluginContract, but implements: "
+ + baseContract.getQualifiedName() + " and " + contract.getQualifiedName()
+ );
+ throw new ProcessorException();
+ }
+ baseContract = contract;
+ }
+
+ validateRequiredContracts(implementation, contract, contracts, model);
+
+ // The API level is intentionally read from the compile-time constant present on the
+ // contract interface visible during this compilation. This preserves the build-time
+ // contract snapshot we later need at runtime.
+ int contractApiLevel = readIntConstant(contract, ProcessorConstants.API_LEVEL_FIELD_NAME);
+ String contractFQCN = contract.getQualifiedName().toString();
+ // The following is just a precaution. As we look into these during compile time, it's hard to imagine
+ // a scenario where the levels ever actually differ.
+ if (contractLevels.containsKey(contractFQCN) && contractLevels.get(contractFQCN) != contractApiLevel) {
+ error(implementation, "Conflicting API levels on contract implementation: " + contractFQCN);
+ } else {
+ contractLevels.put(contract.getQualifiedName().toString(), contractApiLevel);
+ }
+
+ // Provider requirements accumulate across all implemented contracts/capabilities.
+ // Conflicting requirements are rejected below.
+ Map requiredProviders = readProviderLevels(model.providers(), implementation);
+ mergeProviderLevels(providerLevels, requiredProviders, implementation);
+ }
+
+ if (baseContract == null) {
+ error(implementation, "Implementation must implement exactly one Role.BASE @PluginContract");
+ throw new ProcessorException();
+ }
+
+ String baseContractName = baseContract.getQualifiedName().toString();
+
+ descriptors.put(
+ implementationClassName,
+ new Descriptor(
+ implementationClassName,
+ baseContractName,
+ contractLevels,
+ providerLevels
+ )
+ );
+
+ if (hasAutoServiceAnnotation(implementation)) {
+ // Skip generated META-INF/services output for the entire base contract to avoid
+ // resource collisions with AutoService, which writes the same aggregate file path.
+ serviceTypesManagedExternally.add(baseContractName);
+ warning(
+ implementation,
+ "@AutoService detected; generated META-INF/services entry for "
+ + baseContractName
+ + " will be skipped to avoid conflicts"
+ );
+ } else {
+ serviceImplementationsByContract
+ .computeIfAbsent(baseContractName, ignored -> new TreeSet<>())
+ .add(implementationClassName);
+ }
+ }
+
+ /**
+ * Validates the basic structural requirements for a plugin implementation.
+ *
+ * A valid plugin implementation must be a public, non-abstract class. These constraints
+ * ensure that the ServiceLoader can instantiate the class at runtime.
+ *
+ * @param implementation the implementation class to validate
+ */
+ private void validateImplementationClass(TypeElement implementation) {
+ if (implementation.getKind() != ElementKind.CLASS) {
+ error(implementation, "@DataversePlugin may only be used on classes");
+ throw new ProcessorException();
+ }
+
+ if (!implementation.getModifiers().contains(Modifier.PUBLIC)) {
+ error(implementation, "@DataversePlugin implementations must be public");
+ throw new ProcessorException();
+ }
+
+ if (implementation.getModifiers().contains(Modifier.ABSTRACT)) {
+ error(implementation, "@DataversePlugin implementations must not be abstract");
+ throw new ProcessorException();
+ }
+ }
+
+ /**
+ * Collects all plugin contracts implemented by the given class, including inherited ones.
+ *
+ * The traversal walks the full type hierarchy breadth-first across both superclasses and
+ * interfaces so that indirectly inherited contracts and capability interfaces are discovered too.
+ *
+ * @param implementation the implementation class to inspect
+ * @return all implemented types recognized as plugin contracts
+ */
+ private Set collectImplementedContracts(TypeElement implementation) {
+ Set result = new LinkedHashSet<>();
+ Set visited = new LinkedHashSet<>();
+ Deque queue = new ArrayDeque<>();
+ queue.addLast(implementation.asType());
+
+ while (!queue.isEmpty()) {
+ TypeMirror current = queue.removeFirst();
+ if (current.getKind() == TypeKind.NONE) {
+ continue;
+ }
+ if (!(current instanceof DeclaredType declaredType)) {
+ continue;
+ }
+
+ Element currentElement = declaredType.asElement();
+ if (!(currentElement instanceof TypeElement currentType)) {
+ continue;
+ }
+
+ String qualifiedName = currentType.getQualifiedName().toString();
+ if (!visited.add(qualifiedName)) {
+ continue;
+ }
+
+ if (isPluginContract(currentType)) {
+ result.add(currentType);
+ }
+
+ for (TypeMirror iface : currentType.getInterfaces()) {
+ queue.addLast(iface);
+ }
+
+ TypeMirror superclass = currentType.getSuperclass();
+ if (superclass != null && superclass.getKind() != TypeKind.NONE) {
+ queue.addLast(superclass);
+ }
+ }
+
+ return result;
+ }
+
+ /**
+ * Validates that all contracts required by the current contract are also implemented
+ * by the plugin implementation class.
+ *
+ * This ensures that a plugin implementing a capability also implements the capability's
+ * required base contract, which is the only loadable identity for the plugin.
+ *
+ * @param implementation the concrete plugin implementation
+ * @param contract the contract currently being validated
+ * @param allImplementedContracts all discovered contracts of the implementation
+ * @param model the parsed model of the current contract
+ */
+ private void validateRequiredContracts(
+ TypeElement implementation,
+ TypeElement contract,
+ Set allImplementedContracts,
+ PluginContractModel model
+ ) {
+ Set implementedNames = new LinkedHashSet<>();
+ for (TypeElement implemented : allImplementedContracts) {
+ implementedNames.add(implemented.getQualifiedName().toString());
+ }
+
+ for (TypeElement requiredContract : model.requiredContracts()) {
+ String requiredName = requiredContract.getQualifiedName().toString();
+ if (!implementedNames.contains(requiredName)) {
+ error(
+ implementation,
+ "Implementation of contract " + contract.getQualifiedName()
+ + " also requires contract " + requiredName
+ );
+ throw new ProcessorException();
+ }
+ }
+ }
+
+ // ── Type hierarchy inspection ───────────────────────────────────────────────
+
+ /**
+ * Traverses a type hierarchy and applies project-wide validation rules.
+ *
+ * This method exists because the processor validates more than explicitly annotated
+ * implementations. It also enforces that:
+ *
+ * - plugin interfaces carry {@code @PluginContract},
+ * - provider interfaces declare {@code API_LEVEL},
+ * - concrete plugin implementations use {@code @DataversePlugin}, or at least trigger a warning.
+ *
+ *
+ * @param typeElement the root type to inspect
+ */
+ private void inspectTypeHierarchy(TypeElement typeElement) {
+ Deque queue = new ArrayDeque<>();
+ queue.addLast(typeElement);
+
+ while (!queue.isEmpty()) {
+ TypeElement current = queue.removeFirst();
+ String qualifiedName = current.getQualifiedName().toString();
+ if (!inspectedTypes.add(qualifiedName)) {
+ continue;
+ }
+
+ inspectType(current);
+
+ for (TypeMirror iface : current.getInterfaces()) {
+ TypeElement interfaceType = asTypeElement(iface);
+ if (interfaceType != null) {
+ queue.addLast(interfaceType);
+ }
+ }
+
+ TypeMirror superclass = current.getSuperclass();
+ TypeElement superType = asTypeElement(superclass);
+ if (superType != null && superclass.getKind() != TypeKind.NONE) {
+ queue.addLast(superType);
+ }
+ }
+ }
+
+ /**
+ * Applies validation rules to a single type discovered during hierarchy inspection.
+ *
+ * This method dispatches to specialized validators based on the nature of the type:
+ * contract interfaces, provider interfaces, and plugin implementation candidates each
+ * have their own set of rules.
+ *
+ * @param typeElement the type to inspect
+ */
+ private void inspectType(TypeElement typeElement) {
+ validatePluginContractUsage(typeElement);
+ validateDirectBaseTypeImplementations(typeElement);
+
+ if (isPluginInterfaceCandidate(typeElement)) {
+ if (findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) == null) {
+ error(typeElement, "Interfaces extending Plugin must declare @PluginContract");
+ throw new ProcessorException();
+ }
+
+ if (!typeElement.getModifiers().contains(Modifier.PUBLIC)) {
+ error(typeElement, "Interfaces extending Plugin must be public");
+ }
+
+ validateApiLevelConstant(typeElement);
+ validateContractGraph(typeElement);
+ }
+
+ if (isProviderInterfaceCandidate(typeElement)) {
+ if (!typeElement.getModifiers().contains(Modifier.PUBLIC)) {
+ error(typeElement, "Interfaces extending CoreProvider must be public");
+ }
+
+ validateApiLevelConstant(typeElement);
+ }
+
+ if (isPluginImplementationCandidate(typeElement)
+ && findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_IMPLEMENTATION_ANNOTATION) == null) {
+ warning(
+ typeElement,
+ "Plugin implementation should declare @DataversePlugin; processing it implicitly"
+ );
+
+ // Even without the annotation, we still process the implementation. This keeps the
+ // migration path smooth and ensures metadata generation does not depend solely on
+ // authors remembering one annotation.
+ processImplementation(typeElement);
+ }
+ }
+
+ // ── Contract graph validation ───────────────────────────────────────────────
+
+ /**
+ * Validates the contract graph rules for a {@code @PluginContract}-annotated interface.
+ *
+ * This is the central method enforcing the structural rules of the contract hierarchy.
+ * It reads the contract's role and delegates to role-specific validation:
+ *
+ *
+ * - BASE contracts may not declare {@code requires} and may not extend
+ * other contracts.
+ * - CAPABILITY contracts must declare exactly one base contract in
+ * {@code requires}. They may optionally extend their required base contract in the
+ * Java type hierarchy (to provide default implementations), but may not extend another
+ * capability.
+ *
+ *
+ * Additionally, for capabilities, this method enforces package locality: the capability
+ * must reside in the same package or a subpackage of its required base contract.
+ *
+ * @param contract the contract interface to validate
+ */
+ private void validateContractGraph(TypeElement contract) {
+ AnnotationMirror annotation = findAnnotationMirror(contract, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION);
+ if (annotation == null) {
+ return;
+ }
+
+ PluginContract.Role role = readContractRole(annotation, contract);
+ List requiredContracts = readClassArrayAnnotationValue(annotation, "requires");
+
+ // Collect all parent types that are themselves plugin contracts. This is used to enforce
+ // the extension rules: which contracts may extend which other contracts.
+ List extendedContracts = findExtendedPluginContracts(contract);
+
+ if (role == PluginContract.Role.BASE) {
+ validateBaseContractGraph(contract, requiredContracts, extendedContracts);
+ } else {
+ validateCapabilityContractGraph(contract, requiredContracts, extendedContracts);
+ }
+ }
+
+ /**
+ * Validates the graph rules specific to a {@link PluginContract.Role#BASE base contract}.
+ *
+ * Base contracts are the loading identities of plugins. They form the roots of the
+ * contract graph and therefore:
+ *
+ * - must not declare {@code requires} — they do not depend on other contracts,
+ * - must not extend other contracts — there can only be one loading identity per plugin.
+ *
+ *
+ * @param contract the base contract being validated
+ * @param requiredContracts the contracts listed in the {@code requires} attribute
+ * @param extendedContracts parent contracts found in the Java type hierarchy
+ */
+ private void validateBaseContractGraph(
+ TypeElement contract,
+ List requiredContracts,
+ List extendedContracts
+ ) {
+ // Base contracts are self-contained loading identities and may not require other contracts.
+ if (!requiredContracts.isEmpty()) {
+ error(
+ contract,
+ "Base contract " + contract.getQualifiedName()
+ + " may not require other contracts; only capabilities may declare requires"
+ );
+ throw new ProcessorException();
+ }
+
+ // Base contracts must not extend other contracts. Allowing this would create ambiguous
+ // loading identities — the plugin loader would not know which base to register under.
+ if (!extendedContracts.isEmpty()) {
+ TypeElement parent = extendedContracts.get(0);
+ error(
+ contract,
+ "Contract " + parent.getQualifiedName()
+ + " may not be extended by base contract " + contract.getQualifiedName()
+ + "; base contracts must not extend other contracts"
+ );
+ throw new ProcessorException();
+ }
+ }
+
+ /**
+ * Validates the graph rules specific to a {@link PluginContract.Role#CAPABILITY capability contract}.
+ *
+ * Capabilities are non-loadable extensions. The rules ensure a clean, unambiguous graph:
+ *
+ * - A capability must require exactly one base contract.
+ * - A capability may extend its required base contract to provide default implementations
+ * for methods declared by the base.
+ * - A capability may not extend another capability contract.
+ * - A capability must reside in the same package or a subpackage of its required base.
+ *
+ *
+ * @param contract the capability contract being validated
+ * @param requiredContracts the contracts listed in the {@code requires} attribute
+ * @param extendedContracts parent contracts found in the Java type hierarchy
+ */
+ private void validateCapabilityContractGraph(
+ TypeElement contract,
+ List requiredContracts,
+ List extendedContracts
+ ) {
+ // A capability must require exactly one base contract. This links the capability to its
+ // loading identity and ensures the plugin loader can always resolve the plugin's kind.
+ TypeElement requiredBase = validateCapabilityRequires(contract, requiredContracts);
+
+ // Validate the Java extends hierarchy: a capability may extend its required base, but
+ // must not extend any other contract (especially not another capability).
+ validateCapabilityExtensions(contract, extendedContracts, requiredBase);
+
+ // Capabilities must be co-located with their base contract so that SPI authors maintain
+ // a cohesive package structure.
+ validatePackageLocality(contract, requiredBase);
+ }
+
+ /**
+ * Validates the {@code requires} attribute of a capability contract.
+ *
+ * A capability must require exactly one entry, and that entry must be a base contract
+ * interface — not a capability, not a class, and not the capability itself.
+ *
+ * @param contract the capability contract being validated
+ * @param requiredContracts the contracts listed in the {@code requires} attribute
+ * @return the single required base contract
+ */
+ private TypeElement validateCapabilityRequires(
+ TypeElement contract,
+ List requiredContracts
+ ) {
+ String errorMessage = "Capability contract %s must require single base @PluginContract interface".formatted(contract.getQualifiedName());
+
+ // Exactly one entry is required. Zero entries, multiple entries, or entries that are
+ // not base contracts all fail with the same message.
+ if (requiredContracts.size() != 1) {
+ error(contract, errorMessage);
+ throw new ProcessorException();
+ }
+
+ TypeElement required = requiredContracts.get(0);
+
+ // The required type must be an interface (not a class) and must carry @PluginContract.
+ if (required.getKind() != ElementKind.INTERFACE) {
+ error(contract, errorMessage);
+ throw new ProcessorException();
+ }
+
+ // Self-references are meaningless and would create a cycle.
+ if (required.getQualifiedName().contentEquals(contract.getQualifiedName())) {
+ error(contract, errorMessage);
+ throw new ProcessorException();
+ }
+
+ // The required contract must be annotated with @PluginContract.
+ AnnotationMirror requiredAnnotation = findAnnotationMirror(
+ required, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION
+ );
+ if (requiredAnnotation == null) {
+ error(contract, errorMessage);
+ throw new ProcessorException();
+ }
+
+ // The required contract must have the BASE role. Capabilities requiring other capabilities
+ // are not supported.
+ PluginContract.Role requiredRole = readContractRole(requiredAnnotation, required);
+ if (requiredRole != PluginContract.Role.BASE) {
+ error(contract, errorMessage);
+ throw new ProcessorException();
+ }
+
+ return required;
+ }
+
+ /**
+ * Validates the Java {@code extends} hierarchy of a capability contract.
+ *
+ * A capability may extend its required base contract — this is the mechanism that allows
+ * the capability to provide default implementations for methods declared by the base, and
+ * Java's type system will correctly resolve them without requiring bridge methods in the
+ * plugin implementation class.
+ *
+ * However, a capability may not extend another capability contract. Capability-to-capability
+ * inheritance is not supported in the current model.
+ *
+ * @param contract the capability contract being validated
+ * @param extendedContracts all parent types that are plugin contracts
+ * @param requiredBase the single required base contract from the {@code requires} attribute
+ */
+ private void validateCapabilityExtensions(
+ TypeElement contract,
+ List extendedContracts,
+ TypeElement requiredBase
+ ) {
+ for (TypeElement parent : extendedContracts) {
+ AnnotationMirror parentAnnotation = findAnnotationMirror(
+ parent, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION
+ );
+ if (parentAnnotation == null) {
+ // Should not happen since findExtendedPluginContracts only returns annotated types,
+ // but guard defensively.
+ continue;
+ }
+
+ PluginContract.Role parentRole = readContractRole(parentAnnotation, parent);
+
+ if (parentRole == PluginContract.Role.CAPABILITY) {
+ // Capability-to-capability extension is not allowed. Each capability is a
+ // standalone extension point attached to a base contract.
+ error(
+ contract,
+ "Contract " + parent.getQualifiedName()
+ + " may not be extended by capability contract " + contract.getQualifiedName()
+ + "; capabilities may not extend other capabilities"
+ );
+ throw new ProcessorException();
+ }
+
+ if (parentRole == PluginContract.Role.BASE) {
+ // A capability may extend a base contract, but only if that base is the same one
+ // declared in requires. Extending an unrelated base would silently introduce a
+ // second loading identity into the hierarchy.
+ if (!parent.getQualifiedName().contentEquals(requiredBase.getQualifiedName())) {
+ error(
+ contract,
+ "Capability contract " + contract.getQualifiedName()
+ + " extends base contract " + parent.getQualifiedName()
+ + " but requires " + requiredBase.getQualifiedName()
+ + "; the extended base must match the required base"
+ );
+ throw new ProcessorException();
+ }
+ // Extension matches requires — this is the allowed case.
+ }
+ }
+
+ // If the capability extends a base contract but does not declare it in requires, the
+ // contract graph would be inconsistent. Check the reverse: if the capability extends
+ // a base, that base must appear in requires (already validated above). But if the
+ // capability extends a base that is NOT in extendedContracts check, we need to also
+ // check: does the contract extend the required base without declaring requires?
+ // Actually, this direction is already covered: we validate requires first, and then
+ // check that any extended base matches requires. The remaining case is: the capability
+ // extends a base but forgot requires entirely — that's caught by validateCapabilityRequires.
+
+ // Additional check: if the capability extends a base contract in its Java type hierarchy,
+ // that base MUST be declared in requires. This handles the case where the capability
+ // extends a base but declares a different (or no) base in requires.
+ for (TypeElement parent : extendedContracts) {
+ AnnotationMirror parentAnnotation = findAnnotationMirror(
+ parent, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION
+ );
+ if (parentAnnotation == null) {
+ continue;
+ }
+ PluginContract.Role parentRole = readContractRole(parentAnnotation, parent);
+ if (parentRole == PluginContract.Role.BASE
+ && !parent.getQualifiedName().contentEquals(requiredBase.getQualifiedName())) {
+ error(
+ contract,
+ "Capability contract " + contract.getQualifiedName()
+ + " must require extended base contract interface " + parent.getQualifiedName()
+ );
+ throw new ProcessorException();
+ }
+ }
+ }
+
+ /**
+ * Finds all direct parent interfaces of the given contract that are themselves plugin contracts.
+ *
+ * This only looks at the directly declared {@code extends} clause of the interface, not at
+ * transitive ancestors. The common super-interface {@code Plugin} is excluded because it is
+ * a framework marker, not a contract.
+ *
+ * @param contract the contract interface to inspect
+ * @return parent types that carry {@code @PluginContract}, in declaration order
+ */
+ private List findExtendedPluginContracts(TypeElement contract) {
+ List result = new ArrayList<>();
+ for (TypeMirror iface : contract.getInterfaces()) {
+ TypeElement parent = asTypeElement(iface);
+ if (parent == null) {
+ continue;
+ }
+
+ // Skip the Plugin marker interface — it is a framework type, not a contract.
+ if (parent.getQualifiedName().contentEquals(ProcessorConstants.PLUGIN_INTERFACE)) {
+ continue;
+ }
+
+ // Only consider interfaces that are annotated with @PluginContract.
+ if (findAnnotationMirror(parent, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) != null) {
+ result.add(parent);
+ }
+ }
+ return result;
+ }
+
+ /**
+ * Validates that the capability resides in the same package or a subpackage of its required
+ * base contract.
+ *
+ * This rule enforces package locality for SPI cohesion: capabilities should be defined
+ * close to their base contract so that related contracts form a coherent API surface.
+ *
+ * @param capability the capability contract being validated
+ * @param requiredBase the required base contract
+ */
+ private void validatePackageLocality(TypeElement capability, TypeElement requiredBase) {
+ String capabilityPackage = getPackageName(capability);
+ String basePackage = getPackageName(requiredBase);
+
+ // The capability must be in the same package or a subpackage of the base.
+ // "test.export.xml".startsWith("test.export.") covers subpackages.
+ // Direct equality covers the same-package case.
+ boolean sameOrSubpackage = capabilityPackage.equals(basePackage)
+ || capabilityPackage.startsWith(basePackage + ".");
+
+ if (!sameOrSubpackage) {
+ error(
+ capability,
+ "Capability contract " + capability.getQualifiedName()
+ + " and its required base contract " + requiredBase.getQualifiedName()
+ + " must share same package path; " + capabilityPackage
+ + " is not within " + basePackage
+ );
+ throw new ProcessorException();
+ }
+ }
+
+ // ── Direct base type validation ─────────────────────────────────────────────
+
+ /**
+ * Rejects direct implementations of the foundational base types {@code Plugin} and
+ * {@code CoreProvider}.
+ *
+ * These two types are infrastructure-level marker/base interfaces only. Loadable plugins
+ * and concrete providers must instead implement a specific contract interface extending one
+ * of these base types. Otherwise, no meaningful compatibility contract can be derived.
+ *
+ * @param typeElement the type currently being inspected
+ */
+ private void validateDirectBaseTypeImplementations(TypeElement typeElement) {
+ if (typeElement.getKind() != ElementKind.CLASS) {
+ return;
+ }
+
+ if (directlyImplementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE)) {
+ error(
+ typeElement,
+ "Plugin implementations must implement a specific plugin contract interface, not Plugin directly"
+ );
+ throw new ProcessorException();
+ }
+
+ if (directlyImplementsType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE)) {
+ error(
+ typeElement,
+ "Core provider implementations must implement a specific provider interface, not CoreProvider directly"
+ );
+ throw new ProcessorException();
+ }
+ }
+
+ /**
+ * Checks whether a type directly declares the given interface in its {@code implements} clause.
+ *
+ * This is stricter than assignability: it only matches explicit direct implementation and is
+ * used to reject classes that target the framework base interfaces {@code Plugin} or
+ * {@code CoreProvider} directly.
+ *
+ * @param typeElement the type to inspect
+ * @param targetTypeName the fully qualified interface name to look for
+ * @return {@code true} if the type directly implements the target interface
+ */
+ private boolean directlyImplementsType(TypeElement typeElement, String targetTypeName) {
+ for (TypeMirror interfaceType : typeElement.getInterfaces()) {
+ TypeElement interfaceElement = asTypeElement(interfaceType);
+ if (interfaceElement != null && interfaceElement.getQualifiedName().contentEquals(targetTypeName)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ // ── @PluginContract usage validation ────────────────────────────────────────
+
+ /**
+ * Verifies that {@code @PluginContract} is only used on interfaces that extend {@code Plugin}.
+ *
+ * Although the annotation targets {@code ElementType.TYPE}, Java's annotation target model
+ * cannot express "interfaces extending Plugin only". This processor therefore enforces the
+ * rule explicitly and fails compilation when the annotation is placed on classes, enums,
+ * records, or interfaces that do not extend {@code Plugin}.
+ *
+ * @param typeElement the type currently being inspected
+ */
+ private void validatePluginContractUsage(TypeElement typeElement) {
+ if (findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) == null) {
+ return;
+ }
+
+ // @PluginContract must be on an interface that extends Plugin.
+ if (typeElement.getKind() != ElementKind.INTERFACE
+ || !implementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE)) {
+ error(
+ typeElement,
+ "@PluginContract may only be declared on interfaces extending Plugin"
+ );
+ throw new ProcessorException();
+ }
+ }
+
+ // ── Contract model reading ──────────────────────────────────────────────────
+
+ /**
+ * Reads and validates the metadata of one plugin contract interface.
+ *
+ * This extracts the role, required contracts, and provider dependencies from the
+ * {@code @PluginContract} annotation. It also validates the presence and correctness
+ * of the {@code API_LEVEL} compile-time constant.
+ *
+ * @param contract the contract interface
+ * @return the extracted in-memory contract model
+ */
+ private PluginContractModel readPluginContractModel(TypeElement contract) {
+ AnnotationMirror annotation = findAnnotationMirror(contract, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION);
+ if (annotation == null) {
+ error(contract, "Missing @PluginContract");
+ throw new ProcessorException();
+ }
+
+ validateApiLevelConstant(contract);
+
+ PluginContract.Role role = readContractRole(annotation, contract);
+ List requiredContracts = readClassArrayAnnotationValue(annotation, "requires");
+ List providers = readRequiredProviders(annotation);
+
+ return new PluginContractModel(role, List.copyOf(requiredContracts), List.copyOf(providers));
+ }
+
+ /**
+ * Reads the {@code role} member of a {@code @PluginContract} annotation.
+ *
+ * During annotation processing, enum-valued annotation members appear as
+ * {@link VariableElement} instances. This method extracts the constant name and
+ * maps it back to the {@link PluginContract.Role} enum.
+ *
+ * @param annotation the contract annotation mirror
+ * @param contract the annotated contract, used for diagnostics
+ * @return the parsed contract role
+ */
+ private PluginContract.Role readContractRole(AnnotationMirror annotation, TypeElement contract) {
+ AnnotationValue value = getAnnotationValue(annotation, "role");
+ if (value == null) {
+ error(contract, "@PluginContract.role is required");
+ throw new ProcessorException();
+ }
+
+ Object raw = value.getValue();
+ if (!(raw instanceof VariableElement enumConstant)) {
+ error(contract, "@PluginContract.role must be an enum constant");
+ throw new ProcessorException();
+ }
+
+ try {
+ return PluginContract.Role.valueOf(enumConstant.getSimpleName().toString());
+ } catch (IllegalArgumentException ex) {
+ error(contract, "Unsupported @PluginContract.role: " + enumConstant.getSimpleName());
+ throw new ProcessorException();
+ }
+ }
+
+ // ── API level validation ────────────────────────────────────────────────────
+
+ /**
+ * Verifies that the given type declares a valid compile-time constant {@code API_LEVEL} field.
+ *
+ * @param contract the contract or provider type to validate
+ */
+ private void validateApiLevelConstant(TypeElement contract) {
+ readIntConstant(contract, ProcessorConstants.API_LEVEL_FIELD_NAME);
+ }
+
+ /**
+ * Reads a compile-time {@code int} constant from a type.
+ *
+ * The field must be a primitive {@code int} with a compile-time constant value.
+ * Boxed {@code Integer} fields or fields initialized with method calls do not qualify
+ * because their values are not available to the annotation processor at compile time.
+ *
+ * @param type the owning type
+ * @param fieldName the field to locate
+ * @return the constant value
+ */
+ private int readIntConstant(TypeElement type, String fieldName) {
+ for (Element enclosed : type.getEnclosedElements()) {
+ if (enclosed instanceof VariableElement variable
+ && variable.getSimpleName().contentEquals(fieldName)) {
+ Object value = variable.getConstantValue();
+ if (value instanceof Integer intValue) {
+ return intValue;
+ }
+
+ error(type, type.getQualifiedName() + "." + fieldName + " must be a compile-time int constant");
+ throw new ProcessorException();
+ }
+ }
+
+ error(type, type.getQualifiedName() + " must declare int " + fieldName);
+ throw new ProcessorException();
+ }
+
+ // ── Provider handling ───────────────────────────────────────────────────────
+
+ /**
+ * Resolves the API levels of all providers required by the current contract.
+ *
+ * Each provider type referenced in a {@code @RequiredProvider} annotation must be an
+ * interface extending {@code CoreProvider} and must declare a compile-time {@code API_LEVEL}
+ * constant.
+ *
+ * @param providerTypes the provider interfaces referenced by the contract annotation
+ * @param implementation the concrete implementation being processed, used for diagnostics
+ * @return a map from provider class name to required API level
+ */
+ private Map readProviderLevels(List providerTypes, TypeElement implementation) {
+ Map result = new LinkedHashMap<>();
+ TypeElement coreProviderType = elements.getTypeElement(ProcessorConstants.CORE_PROVIDER_INTERFACE);
+ if (coreProviderType == null) {
+ error(implementation, "Cannot resolve " + ProcessorConstants.CORE_PROVIDER_INTERFACE);
+ throw new ProcessorException();
+ }
+
+ for (TypeElement providerType : providerTypes) {
+ if (!types.isAssignable(
+ types.erasure(providerType.asType()),
+ types.erasure(coreProviderType.asType())
+ )) {
+ error(
+ implementation,
+ "Required provider " + providerType.getQualifiedName()
+ + " does not implement " + ProcessorConstants.CORE_PROVIDER_INTERFACE
+ );
+ throw new ProcessorException();
+ }
+
+ int apiLevel = readIntConstant(providerType, ProcessorConstants.API_LEVEL_FIELD_NAME);
+ result.put(providerType.getQualifiedName().toString(), apiLevel);
+ }
+
+ return result;
+ }
+
+ /**
+ * Merges provider API level requirements from one contract into the accumulated set.
+ *
+ * If the same provider is required with different API levels by different contracts,
+ * processing fails because the resulting runtime expectation would be ambiguous.
+ *
+ * @param merged the accumulated provider requirements
+ * @param additional the provider requirements from the current contract
+ * @param implementation the concrete implementation, used for diagnostics
+ */
+ private void mergeProviderLevels(
+ Map merged,
+ Map additional,
+ TypeElement implementation
+ ) {
+ additional.forEach((providerName, apiLevel) -> {
+ Integer existing = merged.putIfAbsent(providerName, apiLevel);
+ if (existing != null && existing.intValue() != apiLevel) {
+ error(
+ implementation,
+ "Conflicting API levels for provider " + providerName
+ + ": " + existing + " vs " + apiLevel
+ );
+ throw new ProcessorException();
+ }
+ });
+ }
+
+ // ── AutoService detection ───────────────────────────────────────────────────
+
+ /**
+ * Checks whether the implementation class uses {@code @AutoService}.
+ *
+ * The processor does not depend on AutoService directly. It merely detects the annotation
+ * by name so it can avoid generating conflicting ServiceLoader resources.
+ *
+ * @param implementation the implementation class
+ * @return {@code true} if {@code @AutoService} is present
+ */
+ private boolean hasAutoServiceAnnotation(TypeElement implementation) {
+ return findAnnotationMirror(implementation, ProcessorConstants.AUTO_SERVICE_ANNOTATION) != null;
+ }
+
+ // ── Resource generation ─────────────────────────────────────────────────────
+
+ /**
+ * Writes all accumulated generated resources after processing is complete.
+ *
+ * Descriptors are always written. ServiceLoader files are written only for service types that
+ * are not externally managed via {@code @AutoService}.
+ */
+ private void writeAllGeneratedResources() {
+ for (Descriptor descriptor : descriptors.values()) {
+ writeDescriptor(descriptor);
+ }
+
+ for (Map.Entry> entry : serviceImplementationsByContract.entrySet()) {
+ String serviceType = entry.getKey();
+ if (serviceTypesManagedExternally.contains(serviceType)) {
+ continue;
+ }
+ writeServiceFile(serviceType, entry.getValue());
+ }
+ }
+
+ /**
+ * Writes one generated plugin descriptor file.
+ *
+ * @param descriptor the plugin descriptor model to serialize
+ */
+ private void writeDescriptor(Descriptor descriptor) {
+ String resourceName = DescriptorFormat.toPath(descriptor.klass());
+
+ try {
+ FileObject resource = processingEnv
+ .getFiler()
+ .createResource(StandardLocation.CLASS_OUTPUT, "", resourceName);
+
+ try (Writer writer = resource.openWriter()) {
+ DescriptorFormat.write(descriptor, writer);
+ }
+ } catch (IOException e) {
+ processingEnv.getMessager().printMessage(
+ Diagnostic.Kind.ERROR,
+ "Failed to write descriptor for " + descriptor.klass() + ": " + e.getMessage()
+ );
+ }
+ }
+
+ /**
+ * Writes one ServiceLoader registration file for a base contract.
+ *
+ * This replaces the need for {@code @AutoService} on plugin implementations. The generated
+ * file follows the standard {@code META-INF/services/} convention.
+ *
+ * @param serviceTypeName the fully qualified name of the service interface
+ * @param implementations the implementation class names to register
+ */
+ private void writeServiceFile(String serviceTypeName, Set implementations) {
+ String resourceName = SERVICES_DIRECTORY + serviceTypeName;
+
+ try {
+ FileObject resource = processingEnv.getFiler()
+ .createResource(StandardLocation.CLASS_OUTPUT, "", resourceName);
+
+ try (Writer writer = resource.openWriter()) {
+ for (String implementation : implementations) {
+ writer.write(implementation);
+ writer.write(System.lineSeparator());
+ }
+ }
+ } catch (IOException e) {
+ processingEnv.getMessager().printMessage(
+ Diagnostic.Kind.ERROR,
+ "Failed to write service file for " + serviceTypeName + ": " + e.getMessage()
+ );
+ }
+ }
+
+ // ── Type candidate checks ───────────────────────────────────────────────────
+
+ /**
+ * Determines whether a type qualifies as an implementation candidate for a plugin.
+ *
+ * A candidate is a concrete (non-abstract) class that implements {@code Plugin} through
+ * some contract interface. Abstract base classes are intentionally excluded — they may exist
+ * as shared implementation helpers without needing {@code @DataversePlugin}.
+ *
+ * @param typeElement the type to inspect
+ * @return {@code true} if the type is a concrete class implementing {@code Plugin}
+ */
+ private boolean isPluginImplementationCandidate(TypeElement typeElement) {
+ if (typeElement.getKind() != ElementKind.CLASS) {
+ return false;
+ }
+ if (typeElement.getModifiers().contains(Modifier.ABSTRACT)) {
+ return false;
+ }
+ return implementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE)
+ && !isExactType(typeElement, ProcessorConstants.PLUGIN_INTERFACE);
+ }
+
+ /**
+ * Determines whether a type is a plugin interface candidate that must declare {@code @PluginContract}.
+ *
+ * Any interface extending {@code Plugin} (other than {@code Plugin} itself) is expected to
+ * be a contract interface and must carry the {@code @PluginContract} annotation.
+ *
+ * @param typeElement the type to inspect
+ * @return {@code true} if the type is an interface extending {@code Plugin}
+ */
+ private boolean isPluginInterfaceCandidate(TypeElement typeElement) {
+ return typeElement.getKind() == ElementKind.INTERFACE
+ && implementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE)
+ && !isExactType(typeElement, ProcessorConstants.PLUGIN_INTERFACE);
+ }
+
+ /**
+ * Determines whether a type is a provider interface candidate that must declare {@code API_LEVEL}.
+ *
+ * @param typeElement the type to inspect
+ * @return {@code true} if the type is an interface extending {@code CoreProvider}
+ */
+ private boolean isProviderInterfaceCandidate(TypeElement typeElement) {
+ return typeElement.getKind() == ElementKind.INTERFACE
+ && implementsType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE)
+ && !isExactType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE);
+ }
+
+ /**
+ * Determines whether the given type is a plugin contract.
+ *
+ * A type qualifies as a plugin contract only when it is annotated with
+ * {@code @PluginContract} and is assignable to the common plugin super-interface.
+ *
+ * @param typeElement the type to test
+ * @return {@code true} if the type is a plugin contract
+ */
+ private boolean isPluginContract(TypeElement typeElement) {
+ if (findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) == null) {
+ return false;
+ }
+
+ TypeElement pluginType = elements.getTypeElement(ProcessorConstants.PLUGIN_INTERFACE);
+ if (pluginType == null) {
+ return false;
+ }
+
+ return types.isAssignable(
+ types.erasure(typeElement.asType()),
+ types.erasure(pluginType.asType())
+ );
+ }
+
+ // ── Type system helpers ─────────────────────────────────────────────────────
+
+ /**
+ * Tests whether the given type is assignable to another type identified by fully qualified name.
+ *
+ * @param typeElement the source type
+ * @param targetTypeName the fully qualified target type name
+ * @return {@code true} if the source type is assignable to the target type
+ */
+ private boolean implementsType(TypeElement typeElement, String targetTypeName) {
+ TypeElement targetType = elements.getTypeElement(targetTypeName);
+ if (targetType == null) {
+ return false;
+ }
+
+ return types.isAssignable(
+ types.erasure(typeElement.asType()),
+ types.erasure(targetType.asType())
+ );
+ }
+
+ /**
+ * Checks whether the given type is exactly the named type itself, not merely a subtype.
+ *
+ * @param typeElement the type to inspect
+ * @param targetTypeName the fully qualified target type name
+ * @return {@code true} if both names are identical
+ */
+ private boolean isExactType(TypeElement typeElement, String targetTypeName) {
+ return typeElement.getQualifiedName().contentEquals(targetTypeName);
+ }
+
+ /**
+ * Extracts the package name from a type element.
+ *
+ * @param typeElement the type whose package to determine
+ * @return the fully qualified package name
+ */
+ private String getPackageName(TypeElement typeElement) {
+ return elements.getPackageOf(typeElement).getQualifiedName().toString();
+ }
+
+ // ── Annotation mirror helpers ───────────────────────────────────────────────
+
+ /**
+ * Finds an annotation mirror on the given element by fully qualified annotation type name.
+ *
+ * Annotation mirrors are the compile-time representation of annotations. Unlike
+ * {@code getAnnotation()}, mirrors work reliably during annotation processing even when
+ * the annotation class is being compiled in the same round.
+ *
+ * @param element the annotated element
+ * @param annotationTypeName the fully qualified annotation type name
+ * @return the matching annotation mirror, or {@code null} if absent
+ */
+ private AnnotationMirror findAnnotationMirror(Element element, String annotationTypeName) {
+ for (AnnotationMirror mirror : element.getAnnotationMirrors()) {
+ Element annotationElement = mirror.getAnnotationType().asElement();
+ if (annotationElement instanceof TypeElement annotationType
+ && annotationType.getQualifiedName().contentEquals(annotationTypeName)) {
+ return mirror;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Resolves one annotation member value, including defaults.
+ *
+ * Uses {@code Elements.getElementValuesWithDefaults()} so that annotation members with
+ * default values are visible even when not explicitly set by the author.
+ *
+ * @param annotation the annotation mirror
+ * @param memberName the member to resolve
+ * @return the resolved annotation value, or {@code null} if not found
+ */
+ private AnnotationValue getAnnotationValue(AnnotationMirror annotation, String memberName) {
+ Map extends ExecutableElement, ? extends AnnotationValue> values =
+ elements.getElementValuesWithDefaults(annotation);
+
+ for (Map.Entry extends ExecutableElement, ? extends AnnotationValue> entry : values.entrySet()) {
+ if (entry.getKey().getSimpleName().contentEquals(memberName)) {
+ return entry.getValue();
+ }
+ }
+
+ return null;
+ }
+
+ /**
+ * Reads an annotation member containing an array of class literals.
+ *
+ * During annotation processing, class-valued members are represented as {@link TypeMirror}s
+ * within {@link AnnotationValue}s. This helper converts them into {@link TypeElement}s.
+ *
+ * @param annotation the annotation mirror
+ * @param memberName the member containing class literals
+ * @return the referenced types, preserving declaration order
+ */
+ private List readClassArrayAnnotationValue(AnnotationMirror annotation, String memberName) {
+ AnnotationValue value = getAnnotationValue(annotation, memberName);
+ if (value == null) {
+ return List.of();
+ }
+
+ Object raw = value.getValue();
+ if (!(raw instanceof List> values)) {
+ return List.of();
+ }
+
+ List result = new ArrayList<>();
+ for (Object entry : values) {
+ if (!(entry instanceof AnnotationValue annotationValue)) {
+ continue;
+ }
+
+ Object classValue = annotationValue.getValue();
+ if (!(classValue instanceof TypeMirror typeMirror)) {
+ continue;
+ }
+
+ TypeElement typeElement = asTypeElement(typeMirror);
+ if (typeElement != null) {
+ result.add(typeElement);
+ }
+ }
+
+ return List.copyOf(result);
+ }
+
+ /**
+ * Reads the nested {@code providers()} member of a {@code @PluginContract} annotation.
+ *
+ * The provider information is stored as nested {@code @RequiredProvider} annotations. This helper
+ * unwraps those nested annotations and returns the referenced provider types.
+ *
+ * @param pluginContractAnnotation the plugin contract annotation mirror
+ * @return provider types referenced by the contract
+ */
+ private List readRequiredProviders(AnnotationMirror pluginContractAnnotation) {
+ AnnotationValue providersValue = getAnnotationValue(pluginContractAnnotation, "providers");
+ if (providersValue == null) {
+ return List.of();
+ }
+
+ Object raw = providersValue.getValue();
+ if (!(raw instanceof List> values)) {
+ return List.of();
+ }
+
+ List result = new ArrayList<>();
+ for (Object entry : values) {
+ if (!(entry instanceof AnnotationValue annotationValue)) {
+ continue;
+ }
+
+ Object nested = annotationValue.getValue();
+ if (!(nested instanceof AnnotationMirror providerAnnotation)) {
+ continue;
+ }
+
+ TypeElement providerAnnotationType = asTypeElement(providerAnnotation.getAnnotationType());
+ if (providerAnnotationType == null
+ || !providerAnnotationType.getQualifiedName().contentEquals(ProcessorConstants.REQUIRED_PROVIDER_ANNOTATION)) {
+ continue;
+ }
+
+ AnnotationValue providerClassValue = getAnnotationValue(providerAnnotation, "value");
+ if (providerClassValue == null) {
+ continue;
+ }
+
+ Object providerRaw = providerClassValue.getValue();
+ if (!(providerRaw instanceof TypeMirror providerTypeMirror)) {
+ continue;
+ }
+
+ TypeElement providerType = asTypeElement(providerTypeMirror);
+ if (providerType != null) {
+ result.add(providerType);
+ }
+ }
+
+ return List.copyOf(result);
+ }
+
+ /**
+ * Converts a declared type mirror into its corresponding type element.
+ *
+ * During annotation processing, types are represented as {@link TypeMirror} instances.
+ * This utility extracts the underlying {@link TypeElement} when the mirror represents a
+ * declared (class/interface) type.
+ *
+ * @param typeMirror the type mirror to convert
+ * @return the type element, or {@code null} if the mirror is not a declared type
+ */
+ private TypeElement asTypeElement(TypeMirror typeMirror) {
+ if (!(typeMirror instanceof DeclaredType declaredType)) {
+ return null;
+ }
+
+ Element element = declaredType.asElement();
+ return element instanceof TypeElement typeElement ? typeElement : null;
+ }
+
+ // ── Ordering helpers ────────────────────────────────────────────────────────
+
+ /**
+ * Returns the given types sorted by fully qualified name for deterministic processing order.
+ *
+ * Sorting ensures that error messages and generated output are stable across compiler
+ * runs regardless of the order in which the compiler discovers types.
+ *
+ * @param typesToSort the types to sort
+ * @return a sorted list view
+ */
+ private List sortByQualifiedName(Set