diff --git a/.github/workflows/site.yml b/.github/workflows/site.yml new file mode 100644 index 0000000..ff370ab --- /dev/null +++ b/.github/workflows/site.yml @@ -0,0 +1,48 @@ +name: Publish Maven Site + +on: + push: + branches: + - main + workflow_dispatch: + +permissions: + contents: write + +concurrency: + group: publish-maven-site-${{ github.ref }} + cancel-in-progress: true + +jobs: + publish-site: + runs-on: ubuntu-latest + + steps: + - name: Check out source + uses: actions/checkout@v6 + + - name: Set up Java 17 + uses: actions/setup-java@v5 + with: + distribution: temurin + java-version: '17' + cache: maven + + # TODO: Later on, we need to extend this mechanism to provide both version and snapshot, + # as well as a different case the stable version and "latest". + - name: Build and verify project site + # By default, docs.site.base is / for local development. For deployment, this needs to be adapted. + # IMPORTANT: make sure the base ends with "/"! + run: mvn -B verify site -Ddocs.site.base=/dataverse-spi/snapshot/ + + - name: Add .nojekyll + run: touch target/site/.nojekyll + + - name: Deploy to gh-pages + uses: JamesIves/github-pages-deploy-action@v4 + with: + branch: gh-pages + folder: target/site + target-folder: snapshot + clean: true + single-commit: true diff --git a/api/pom.xml b/api/pom.xml new file mode 100644 index 0000000..d1e554c --- /dev/null +++ b/api/pom.xml @@ -0,0 +1,63 @@ + + + 4.0.0 + + io.gdcc.spi + parent + 2.1.0-SNAPSHOT + + + io.gdcc + dataverse-spi + + + + + jar + + + + + io.gdcc.spi + meta + + + io.gdcc.spi + core + + + io.gdcc.spi + export + + + + + + + org.apache.maven.plugins + maven-shade-plugin + 3.6.2 + + + package + + shade + + + false + + + + + + + + \ No newline at end of file diff --git a/core/pom.xml b/core/pom.xml new file mode 100644 index 0000000..3f1b1df --- /dev/null +++ b/core/pom.xml @@ -0,0 +1,34 @@ + + + 4.0.0 + + io.gdcc.spi + parent + 2.1.0-SNAPSHOT + + + core + + + + + + io.gdcc.spi + meta + + + org.slf4j + slf4j-api + provided + + + + org.slf4j + slf4j-simple + test + + + + \ No newline at end of file diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderConfiguration.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderConfiguration.java new file mode 100644 index 0000000..48d3602 --- /dev/null +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderConfiguration.java @@ -0,0 +1,196 @@ +package io.gdcc.spi.core.loader; + + +/** + * Immutable configuration controlling the behavior of the plugin loader. + * + *

Use {@link #defaults()} to start from the standard configuration and then + * adjust individual options with the fluent {@code with...} methods.

+ * + *

Example:

+ *
{@code
+ * LoaderConfiguration configuration = LoaderConfiguration.defaults()
+ *     .withEmitWarningsOnMultiPluginSource(true)
+ *     .withAbortOnCompatibilityProblems(false);
+ * }
+ */ +public final class LoaderConfiguration { + + private final boolean enforceSingleSourceMatchingPluginsOnly; + private final boolean emitWarningsOnMultiPluginSource; + private final boolean abortOnCompatibilityProblems; + private final boolean abortOnDuplicatedIdentities; + private final boolean enforceUnambiguousPluginIdentities; + + private LoaderConfiguration( + boolean enforceSingleSourceMatchingPluginsOnly, + boolean emitWarningsOnMultiPluginSource, + boolean abortOnCompatibilityProblems, + boolean abortOnDuplicatedIdentities, + boolean enforceUnambiguousPluginIdentities + ) { + this.enforceSingleSourceMatchingPluginsOnly = enforceSingleSourceMatchingPluginsOnly; + this.emitWarningsOnMultiPluginSource = emitWarningsOnMultiPluginSource; + this.abortOnCompatibilityProblems = abortOnCompatibilityProblems; + this.abortOnDuplicatedIdentities = abortOnDuplicatedIdentities; + this.enforceUnambiguousPluginIdentities = enforceUnambiguousPluginIdentities; + } + + /** + * Returns the standard loader configuration (which is strictly enforcing). + * + * + */ + public static LoaderConfiguration defaults() { + return new LoaderConfiguration( + true, + false, + true, + true, + true + ); + } + + /** + * Returns a permissive loader configuration with all strict validation features disabled. + * It has package private visibility as the only permissive usage is in a testing context. + * + *

+ * The configuration has the following properties: + *

+ *

+ * + * @return a {@code LoaderConfiguration} instance with permissive settings. + */ + static LoaderConfiguration permissive() { + return new LoaderConfiguration( + false, + false, + false, + false, + false + ); + } + + /** + * When enabled, a source may only provide plugins for a single requested base contract. + * If any non-matching plugin is found, loading from that source is aborted entirely. + * + *

When disabled, non-matching plugins are ignored.

+ */ + public boolean enforceSingleSourceMatchingPluginsOnly() { + return enforceSingleSourceMatchingPluginsOnly; + } + + /** + * Returns a copy with {@link #enforceSingleSourceMatchingPluginsOnly()} updated. + */ + public LoaderConfiguration withEnforceSingleSourceMatchingPluginsOnly(boolean value) { + return new LoaderConfiguration( + value, + emitWarningsOnMultiPluginSource, + abortOnCompatibilityProblems, + abortOnDuplicatedIdentities, + enforceUnambiguousPluginIdentities + ); + } + + /** + * When {@link #enforceSingleSourceMatchingPluginsOnly()} is disabled, controls whether + * multi-plugin-contract sources should emit warnings. + */ + public boolean emitWarningsOnMultiPluginSource() { + return emitWarningsOnMultiPluginSource; + } + + /** + * Returns a copy with {@link #emitWarningsOnMultiPluginSource()} updated. + */ + public LoaderConfiguration withEmitWarningsOnMultiPluginSource(boolean value) { + return new LoaderConfiguration( + enforceSingleSourceMatchingPluginsOnly, + value, + abortOnCompatibilityProblems, + abortOnDuplicatedIdentities, + enforceUnambiguousPluginIdentities + ); + } + + /** + * When enabled, plugin loading aborts on discovered compatibility problems (for example, API level mismatches). + * No classes are actually loaded, problems are detected using plugin metadata only. + */ + public boolean abortOnCompatibilityProblems() { + return abortOnCompatibilityProblems; + } + + /** + * Returns a copy with {@link #abortOnCompatibilityProblems()} updated. + */ + public LoaderConfiguration withAbortOnCompatibilityProblems(boolean value) { + return new LoaderConfiguration( + enforceSingleSourceMatchingPluginsOnly, + emitWarningsOnMultiPluginSource, + value, + abortOnDuplicatedIdentities, + enforceUnambiguousPluginIdentities + ); + } + + /** + * When enabled, loading aborts if duplicate plugin identities are detected. + * + *

Note: duplicated identities make plugins undistinguishable for users.

+ */ + public boolean abortOnDuplicatedIdentities() { + return abortOnDuplicatedIdentities; + } + + /** + * Returns a copy with {@link #abortOnDuplicatedIdentities()} updated. + */ + public LoaderConfiguration withAbortOnDuplicatedIdentities(boolean value) { + return new LoaderConfiguration( + enforceSingleSourceMatchingPluginsOnly, + emitWarningsOnMultiPluginSource, + abortOnCompatibilityProblems, + value, + enforceUnambiguousPluginIdentities + ); + } + + /** + * When enabled, plugin identities must be unique within a source. + * Any plugin's identity that differs by case or special chars only will be seen as a duplicate. + */ + public boolean enforceUnambiguousPluginIdentities() { + return enforceUnambiguousPluginIdentities; + } + + /** + * Returns a copy with {@link #enforceUnambiguousPluginIdentities()} updated. + */ + public LoaderConfiguration withEnforceUnambiguousPluginIdentities(boolean value) { + return new LoaderConfiguration( + enforceSingleSourceMatchingPluginsOnly, + emitWarningsOnMultiPluginSource, + abortOnCompatibilityProblems, + abortOnDuplicatedIdentities, + value + ); + } +} + + diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderException.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderException.java new file mode 100644 index 0000000..9975004 --- /dev/null +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderException.java @@ -0,0 +1,17 @@ +package io.gdcc.spi.core.loader; + +import java.util.List; + +public class LoaderException extends RuntimeException { + + private final List problems; + + public LoaderException(List problems) { + super("Multiple problems have been detected by the loader, accessible from getProblems()."); + this.problems = List.copyOf(problems); + } + + public List getProblems() { + return problems; + } +} diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java new file mode 100644 index 0000000..64ea06b --- /dev/null +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderHelper.java @@ -0,0 +1,568 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import io.gdcc.spi.meta.descriptor.DescriptorScanner; +import io.gdcc.spi.meta.descriptor.PluginDescriptor; +import io.gdcc.spi.meta.descriptor.SourcedDescriptor; +import io.gdcc.spi.meta.plugin.CoreProvider; +import io.gdcc.spi.meta.plugin.Plugin; +import io.gdcc.spi.meta.processor.ProcessorConstants; + +import java.io.IOException; +import java.lang.reflect.Field; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Provides utility methods for handling plugin loading and validation operations, + * such as converting file paths to URLs, checking class presence, determining core API levels, + * and validating plugin descriptors. It's intended for internal use only. + */ +final class LoaderHelper { + + private LoaderHelper() { + // Intentionally left blank for helper singleton + } + + /** + * Converts a {@link Path} object to a {@link URL} with optional prefix and suffix. + * + * @param path the {@link Path} to convert to a {@link URL}; must not be null + * @param urlPrefix an optional String to prepend to the URL (e.g., a scheme); can be null or blank + * @param urlSuffix an optional String to append to the URL; can be null or blank + * @return the constructed {@link URL} based on the provided path, prefix, and suffix + * @throws MalformedURLException if the constructed URL is invalid + */ + static URL pathToUrl(Path path, String urlPrefix, String urlSuffix) throws MalformedURLException { + return new URL( + (urlPrefix == null || urlPrefix.isBlank() ? "" : urlPrefix + ":" ) + + path.toUri().toURL() + + ( urlSuffix == null || urlSuffix.isBlank() ? "" : urlSuffix ) + ); + } + + /** + * Checks if a class with the specified fully qualified class name (FQCN) is present and + * accessible using the given {@link ClassLoader}. + * + * @param fqcn the fully qualified name of the class to be checked + * @param classLoader the {@link ClassLoader} to use for detecting the class + * @return true if the class is present and accessible; false otherwise + */ + static boolean isClassPresent(String fqcn, ClassLoader classLoader) { + try { + Class.forName(fqcn, false, classLoader); + return true; + } catch (ClassNotFoundException | LinkageError e) { + return false; + } + } + + + /** + * Determines the core API level of the given plugin class. This method checks for a static field + * named "API_LEVEL" in the specified class and returns its integer value if present and accessible. + * + * @param pluginClass the plugin class to check for the API level. The class must be an interface. + * @return an {@code OptionalInt} containing the API level if the field exists and is accessible; + * {@code OptionalInt.empty()} otherwise. + * @throws IllegalArgumentException if the provided class is not an interface. + */ + static int determineCoreApiLevel(Class pluginClass) { + // Looking up the plugin contract API level is only ever valid on SPI interfaces but never on implementations. + if (!pluginClass.isInterface()) { + throw new IllegalArgumentException("Class must be an interface"); + } + try { + // Retrieve the field from exactly this class (we don't want to search any superclasses here!) + Field apiLevel = pluginClass.getDeclaredField(ProcessorConstants.API_LEVEL_FIELD_NAME); + return apiLevel.getInt(pluginClass); + } catch (NoSuchFieldException | IllegalAccessException e) { + throw new IllegalStateException("Contract class must have an (accessible) " + ProcessorConstants.API_LEVEL_FIELD_NAME + " field"); + } + } + + /** + * Determines the core API level of a given plugin class by its fully qualified class name. + * + * @param className the fully qualified name of the class to evaluate + * @return the core API level of the specified plugin class + * @throws IllegalArgumentException if the specified class cannot be found + */ + static int determineCoreApiLevel(String className, ClassLoader classLoader) { + Class pluginClass = resolveClass(className, classLoader); + return determineCoreApiLevel(pluginClass); + } + + /** + * Resolves and returns the {@link Class} object for the specified fully qualified class name + * using the provided {@link ClassLoader}. + * + * @param className the fully qualified name of the class to resolve; must not be null or empty + * @param classLoader the {@link ClassLoader} to use for loading the class; must not be null + * @return the {@link Class} object representing the loaded class + * @throws IllegalArgumentException if the class cannot be found or loaded + */ + static Class resolveClass(String className, ClassLoader classLoader) { + try { + return Class.forName(className, false, classLoader); + } catch (ClassNotFoundException e) { + throw new IllegalArgumentException("Class " + className + " could not be found in core", e); + } + } + + /** + * Validates a list of plugin descriptors to ensure there are no class name collisions either between + * plugins or with the core Java classpath. The method identifies plugins with conflicting class names + * and records the issues for further analysis. + * + * @param descriptors a list of {@code SourcedDescriptor} objects representing the plugins to validate. + * Each descriptor contains information about the plugin's source location and associated class. + * @param classLoader the {@code ClassLoader} used to check for class name conflicts with the core system. + * @return a {@code PluginValidationResult} object that contains two categories: + * - {@code accepted}: A list of plugins that passed validation without conflicts. + * - {@code rejected}: A map of rejected plugins to a list of {@code LoaderProblem} objects detailing + * the specific reasons for rejection. + */ + static PluginValidationResult verifyNoClassCollisions(List descriptors, ClassLoader classLoader) { + // Scratch spaces to build the result + Set accepted = new HashSet<>(); + Map> rejected = new HashMap<>(); + + // Just an ephemeral scratch space to save what we've already seen as plugins from where + Map classToSourcePath = new HashMap<>(); + + // Iterate through all the discovered plugins + descriptors.forEach(descriptor -> { + + String className = descriptor.plugin().klass(); + Path source = descriptor.sourceLocation(); + List problems = new ArrayList<>(); + + // Check if the classname was already provided from a different location + if (classToSourcePath.containsKey(className)) { + problems.add(new LoaderProblem.PluginClassNameCollision(className, classToSourcePath.get(className), source)); + } else { + classToSourcePath.put(className, source); + } + + // Check if the classname is already present on the current class path (the core) + if (isClassPresent(className, classLoader)) { + problems.add(new LoaderProblem.PluginClassNameCollisionWithCore(className, source)); + } + + // Let the record show... + if (problems.isEmpty()) { + accepted.add(descriptor); + } else { + rejected.put(descriptor, problems); + } + }); + + return new PluginValidationResult<>(Set.copyOf(accepted), PluginValidationResult.copyProblemMap(rejected), Map.of()); + } + + + /** + * Identifies and classifies plugin descriptors that are not implementations of the specified plugin + * contract class. The method evaluates each descriptor to determine whether its associated plugin + * adheres to the given plugin contract. Plugins are then categorized as accepted, rejected, or + * warning-based, depending on their compatibility and the loader configuration. + * + * @param descriptors a list of {@code SourcedDescriptor} objects representing the plugins to be evaluated. + * Each descriptor contains information about the plugin's source location and associated class. + * @param pluginClass the {@code Class} object representing the plugin contract that the plugins + * should implement. + * @param configuration a {@code LoaderConfiguration} object that dictates specific validation behaviors + * and enforcement rules when classifying the plugins. + * @return a {@code PluginValidationResult} object containing: + * - {@code accepted}: A set of plugins that fully adhere to the specified plugin contract. + * - {@code warning}: A map of plugins with potential issues or warnings that do not warrant rejection. + * - {@code rejected}: A map of plugins that were rejected due to failing to meet the plugin contract + * or violating enforced loader rules. + */ + static PluginValidationResult identifyNonImplementations( + List descriptors, Class pluginClass, LoaderConfiguration configuration) { + // Scratch spaces to build the result + Set accepted = new HashSet<>(); + Map> warning = new HashMap<>(); + Map> rejected = new HashMap<>(); + + // Check if the plugin to be loaded is an implementation of the plugin contract the loader was signed up for + descriptors.forEach(descriptor -> { + List problems = new ArrayList<>(); + + if (!descriptor.isOfKind(pluginClass)) { + problems.add(new LoaderProblem.PluginClassMismatch( + descriptor.plugin().klass(), + descriptor.sourceLocation(), + pluginClass.getCanonicalName() + )); + + if (configuration.enforceSingleSourceMatchingPluginsOnly()) + rejected.put(descriptor, problems); + else { + warning.put(descriptor, problems); + } + } else { + accepted.add(descriptor); + } + }); + + return new PluginValidationResult<>( + Set.copyOf(accepted), + PluginValidationResult.copyProblemMap(rejected), + PluginValidationResult.copyProblemMap(warning) + ); + } + + + /** + * Verifies service provider records in the provided list of descriptors. + * This method examines each descriptor to determine if it contains a valid + * service provider interface (SPI) record. If a descriptor contains an SPI record, + * it is accepted; otherwise, it is rejected with a corresponding list of problems. + * The results of the verification process are returned as a {@code PluginValidationResult}. + * + * @param descriptors a list of {@code SourcedDescriptor} objects to be validated + * @return a {@code PluginValidationResult} containing accepted descriptors and associated rejection details + */ + static PluginValidationResult verifyServiceProviderRecords(List descriptors) { + // Scratch spaces to build the result + Set accepted = new HashSet<>(); + Map> rejected = new HashMap<>(); + + for (SourcedDescriptor descriptor : descriptors) { + try { + if (DescriptorScanner.hasServiceProviderInterfaceRecord(descriptor)) { + accepted.add(descriptor); + } else { + rejected.put( + descriptor, + List.of(new LoaderProblem.MissingServiceProviderRecord( + descriptor.plugin().klass(), + descriptor.plugin().kind(), + descriptor.sourceLocation()) + )); + } + } catch (IOException | IllegalArgumentException e) { + rejected.put(descriptor, List.of(new LoaderProblem.LocationFailure(descriptor.sourceLocation(), e))); + } + } + + return new PluginValidationResult<>( + Set.copyOf(accepted), + PluginValidationResult.copyProblemMap(rejected), + Map.of() + ); + } + + + /** + * Validates a list of plugin descriptors to ensure their API levels are compatible with the specified + * plugin contract class. The method verifies that each plugin both declares and adheres to the required + * API level for its declared contracts. Any discrepancies, such as missing contracts, mismatched API levels, + * or unsupported contracts, are recorded as validation problems. + * + * @param descriptors a list of {@code SourcedDescriptor} objects representing the plugins to validate. + * Each descriptor contains information about the plugin's source location and associated plugin details, + * including the contracts and API levels it supports. + * @param pluginClass the {@code Class} object representing the plugin contract that the plugins must + * comply with. The desired API level for this contract will be determined and used as part of + * the validation process. + * @return a {@code PluginValidationResult} object containing: + * - {@code accepted}: A set of plugins that fully adhere to the API level requirements for the + * specified plugin contract. + * - {@code rejected}: A map of plugins that failed validation, paired with a list of + * {@code LoaderProblem} objects describing the specific issues encountered. + */ + static PluginValidationResult verifyPluginApiLevels(List descriptors, Class pluginClass, ClassLoader classLoader) { + // Scratch spaces to build the result + Set accepted = new HashSet<>(); + Map> rejected = new HashMap<>(); + + // Determine the plugin contract's name once as a string for comparisons + String desiredPluginClass = DescriptorFormat.transformClassName(pluginClass); + int desiredPluginApiLevel = determineCoreApiLevel(pluginClass); + + // Iterate over all the plugins + for (SourcedDescriptor descriptor : descriptors) { + // Need to track if the base contract appears in the contracts including a level + boolean hasBaseClassContract = false; + // Save all the problems identified during validation + List problems = new ArrayList<>(); + + String pluginImplementationClass = descriptor.plugin().klass(); + + // Iterate all the implemented capabilities and check their levels + for (Map.Entry contract : descriptor.plugin().contracts().entrySet()) { + String pluginContractClass = contract.getKey(); + int pluginContractApiLevel = contract.getValue(); + + // Let the record show that this plugin does claim to implement the base contract for this loader + if (desiredPluginClass.equals(pluginContractClass)) { + hasBaseClassContract = true; + } + + try { + // Extract the API level the core is expecting for this contract + int coreContractApiLevel = determineCoreApiLevel(pluginContractClass, classLoader); + + // Compare base class core levels: they must match exactly, otherwise record a problem + if (coreContractApiLevel != pluginContractApiLevel) { + problems.add(new LoaderProblem.PluginClassApiLevelMismatch( + pluginImplementationClass, + descriptor.sourceLocation(), + coreContractApiLevel, + pluginContractApiLevel) + ); + } + } catch (IllegalArgumentException e) { + problems.add(new LoaderProblem.PluginClassUnsupported( + pluginImplementationClass, + descriptor.sourceLocation(), + pluginContractClass) + ); + } + } + + // If the plugin did not provide a level for the base contract this loader expects, this is a serious problem... + if (!hasBaseClassContract) { + problems.add(new LoaderProblem.PluginClassApiLevelMissing( + pluginImplementationClass, + descriptor.sourceLocation(), + desiredPluginClass, + desiredPluginApiLevel) + ); + } + + if (problems.isEmpty()) { + accepted.add(descriptor); + } else { + rejected.put(descriptor, List.copyOf(problems)); + } + } + + return new PluginValidationResult<>( + Set.copyOf(accepted), + PluginValidationResult.copyProblemMap(rejected), + Map.of() + ); + } + + /** + * Verifies the API levels of required providers declared by plugins against the available API levels in the core system. + * This method validates all plugins in the provided list and determines whether they are compatible with the + * API levels exposed by the core system. It returns a validation result containing accepted plugins and details + * of rejected plugins with their associated problems. + * + * @param descriptors the list of plugin descriptors to validate. Each descriptor contains information about + * the plugin and its source location. + * @param classLoader the class loader used to resolve provider classes and determine their API levels. + * @return a {@code PluginValidationResult} instance. The result contains the sets of + * accepted and rejected plugins, with detailed reasons for rejection. + */ + static PluginValidationResult verifyProviderApiLevels(List descriptors, ClassLoader classLoader) { + // Scratch spaces to build the result + Set accepted = new HashSet<>(); + Map> rejected = new HashMap<>(); + + // Save a few CPU cycles by not using the classloader over and over again for the same provider class + Map lookedUpProviders = new HashMap<>(); + + // Check all the plugins + for (SourcedDescriptor descriptor : descriptors) { + // Note: the way how we create the descriptors rules out we see any null keys or values in this map. + Map requiredProviders = descriptor.plugin().requiredProviders(); + // Save all the problems identified during validation + List problems = new ArrayList<>(); + + // Iterate over all the providers required by the plugin + for (String provider : requiredProviders.keySet()) { + int pluginLevel = requiredProviders.get(provider); + + // Look up the API level for required provider within the core + int coreLevel; + try { + if (lookedUpProviders.containsKey(provider)) { + coreLevel = lookedUpProviders.get(provider); + } else { + coreLevel = determineCoreApiLevel(provider, classLoader); + lookedUpProviders.put(provider, coreLevel); + } + // In case the core does not support the provider (contract not even found), the plugin must be rejected. + } catch (IllegalArgumentException e) { + problems.add(new LoaderProblem.ProviderClassUnsupported( + descriptor.plugin().klass(), + descriptor.sourceLocation(), + provider + )); + // Skip the rest and continue with the next provider + continue; + } + + // Now match the API level against the plugin's requirements + if (coreLevel != pluginLevel) { + problems.add(new LoaderProblem.ProviderApiLevelMismatch( + descriptor.plugin().klass(), + descriptor.sourceLocation(), + provider, + coreLevel, + pluginLevel + )); + } + } + + if (problems.isEmpty()) { + accepted.add(descriptor); + } else { + rejected.put(descriptor, List.copyOf(problems)); + } + } + + return new PluginValidationResult<>( + Set.copyOf(accepted), + PluginValidationResult.copyProblemMap(rejected), + Map.of() + ); + } + + + /** + * Converts a {@link SourcedDescriptor} and a plugin instance into a {@link PluginDescriptor}. + * + * @param The type of the plugin, constrained to extend {@link Plugin}. + * @param sourceDescriptor The descriptor providing metadata about the source and configuration of the plugin. + * Must not be null. + * @param plugin The actual plugin instance to be described. Must not be null and must have a valid identity. + * @param classLoader The {@link ClassLoader} used to resolve any required classes. Must not be null. + * @return A {@link PluginDescriptor} object that encapsulates the metadata, identity, and other properties + * of the provided plugin. + * @throws NullPointerException If any of the input parameters is null. + * @throws IllegalArgumentException If the plugin's identity is null or blank, or any classes cannot be resolved. + */ + static PluginDescriptor toPluginDescriptor( + SourcedDescriptor sourceDescriptor, + T plugin, + ClassLoader classLoader + ) { + Objects.requireNonNull(sourceDescriptor); + Objects.requireNonNull(plugin); + Objects.requireNonNull(classLoader); + + String identity = plugin.identity(); + if (identity == null || identity.isBlank()) { + throw new IllegalArgumentException("Plugin identity may not be null or blank"); + } + + // The cast is necessary as getClass() returns T, but the actual implementation class is required + // At this point, we know that "plugin" is an instance of an implementation, so this operation is safe. + // T is an interface, as checked by the plugin loader during construction. + @SuppressWarnings("unchecked") + Class pluginClass = (Class) plugin.getClass(); + + // When reaching this point, the metadata verification already made sure that the plugin kind equals T. + // Casting here is safe. + @SuppressWarnings("unchecked") + Class kindClass = (Class) resolveClass(sourceDescriptor.plugin().kind(), classLoader); + + Map, Integer> contracts = new HashMap<>(); + sourceDescriptor.plugin().contracts().forEach((contractName, apiLevel) -> { + // Again, the metadata was already vetted to contain valid contract classes. Casting is safe here. + @SuppressWarnings("unchecked") + Class contractClass = (Class) resolveClass(contractName, classLoader); + contracts.put(contractClass, apiLevel); + }); + + Map, Integer> requiredProviders = new HashMap<>(); + sourceDescriptor.plugin().requiredProviders().forEach((providerName, apiLevel) -> { + // Again, the metadata was already vetted to contain valid provider requirements. Casting is safe here. + @SuppressWarnings("unchecked") + Class providerClass = (Class) resolveClass(providerName, classLoader); + requiredProviders.put(providerClass, apiLevel); + }); + + return new PluginDescriptor<>( + sourceDescriptor.sourceLocation(), + identity, + pluginClass, + kindClass, + contracts, + requiredProviders + ); + } + + /** + * Verifies the uniqueness of plugin identities within the provided set of plugins. + * Identifies duplicates based on the normalized identity of each plugin descriptor + * and returns a validation result categorizing acceptable and problematic plugins. + * + * If the provided configuration enforces unambiguous identities, duplicates will + * be treated as rejected. Otherwise, duplicates will be reported as warnings. + * + * @param The type of the plugin. + * @param plugins The set of plugin descriptors to validate. + * @param configuration The loader configuration that dictates validation behavior. + * @return A {@link PluginValidationResult} containing the accepted plugins, + * rejected duplicates if enforcement is enabled, or warnings for duplicates + * if enforcement is disabled. + */ + static PluginValidationResult> verifyUniqueIdentities(List> plugins, LoaderConfiguration configuration) { + // Group all descriptors by normalized identity + Map>> groups = plugins.stream() + .collect(Collectors.groupingBy( + entry -> entry.descriptor().normalizedIdentity(), + Collectors.toList()) + ); + + Set> accepted = new HashSet<>(); + Map, List> duplicates = new HashMap<>(); + + for (Map.Entry>> group : groups.entrySet()) { + String normalizedIdentity = group.getKey(); + List> members = group.getValue(); + + // Get all acceptable plugins that have no duplicates (single entry sets) + if (members.size() == 1) { + accepted.add(members.get(0)); + // There are no empty sets possible, so else equals size>1 + } else { + for (PluginHandle member : members) { + // Generate the list of duplication problems but skip for the current plugin + List problems = members.stream() + .filter(handle -> !handle.equals(member)) + .map(handle -> new LoaderProblem.DuplicateIdentity(normalizedIdentity, member.descriptor(), handle.descriptor())) + .collect(Collectors.toList()); + + duplicates.put(member, problems); + } + } + } + + if (configuration.enforceUnambiguousPluginIdentities()) { + // Return duplicates as rejected + return new PluginValidationResult<>( + Set.copyOf(accepted), + PluginValidationResult.copyProblemMap(duplicates), + Map.of() + ); + } + + // Configuration says we only warn about duplicates + return new PluginValidationResult<>( + Set.copyOf(accepted), + Map.of(), + PluginValidationResult.copyProblemMap(duplicates) + ); + } + +} diff --git a/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java b/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java new file mode 100644 index 0000000..6245797 --- /dev/null +++ b/core/src/main/java/io/gdcc/spi/core/loader/LoaderProblem.java @@ -0,0 +1,105 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.meta.descriptor.PluginDescriptor; + +import java.nio.file.Path; +import java.util.Set; + +public sealed interface LoaderProblem permits LoaderProblem.DuplicateIdentity, LoaderProblem.LocationFailure, LoaderProblem.MissingServiceProviderRecord, LoaderProblem.PluginClassApiLevelMismatch, LoaderProblem.PluginClassApiLevelMissing, LoaderProblem.PluginClassMismatch, LoaderProblem.PluginClassNameCollision, LoaderProblem.PluginClassNameCollisionWithCore, LoaderProblem.PluginClassUnsupported, LoaderProblem.ProviderApiLevelMismatch, LoaderProblem.ProviderClassUnsupported, LoaderProblem.SourceFailure { + + String message(); + + record SourceFailure(Throwable cause) implements LoaderProblem { + @Override + public String message() { + return cause.getClass().getSimpleName() + ": " + cause.getMessage(); + } + } + + record LocationFailure(Path location, Throwable cause) implements LoaderProblem { + @Override + public String message() { + return "Loading from " + location + " failed: " + cause.getMessage(); + } + } + + record DuplicateIdentity(String normalizedIdentity, PluginDescriptor source, PluginDescriptor duplicate) implements LoaderProblem { + @Override + public String message() { + return """ + Plugin %s ( %s @ %s) normalized identity %s collides with Plugin %s ( %s @ %s) + """.formatted( + source.pluginClass().getCanonicalName(), + source.identity(), + source.sourceLocation(), + normalizedIdentity, + duplicate.pluginClass().getCanonicalName(), + duplicate.identity(), + duplicate.sourceLocation() + ); + } + } + + record MissingServiceProviderRecord(String className, String kind, Path source) implements LoaderProblem { + @Override + public String message() { + return "Class " + className + " in " + source + " is missing entry in META-INF/services/" + kind; + } + } + + record PluginClassNameCollision(String className, Path source1, Path source2) implements LoaderProblem { + @Override + public String message() { + return "Class " + className + " is defined in both " + source1 + " and " + source2; + } + } + + record PluginClassNameCollisionWithCore(String className, Path source) implements LoaderProblem { + @Override + public String message() { + return "Class " + className + " is defined in both core and " + source; + } + } + + record PluginClassMismatch(String className, Path source, String pluginKind) implements LoaderProblem { + @Override + public String message() { + return "Class " + className + " in " + source + " does not implement " + pluginKind; + } + } + + record PluginClassUnsupported(String className, Path source, String pluginContract) implements LoaderProblem { + @Override + public String message() { + return "Class " + className + " in " + source + " implements unsupported plugin contract " + pluginContract; + } + } + + record PluginClassApiLevelMissing(String classname, Path source, String contractClass, int coreLevel) implements LoaderProblem { + @Override + public String message() { + return "Class " + classname + " in " + source + " provides no API level for " + contractClass + ", but core expects " + coreLevel; + } + } + + record PluginClassApiLevelMismatch(String classname, Path source, int coreLevel, int pluginLevel) implements LoaderProblem { + @Override + public String message() { + return "Class " + classname + " in " + source + " uses API level " + pluginLevel + " but core expects " + coreLevel; + } + } + + record ProviderApiLevelMismatch(String classname, Path source, String provider, int coreLevel, int pluginLevel) implements LoaderProblem { + @Override + public String message() { + return "Class " + classname + " in " + source + " requires API level " + pluginLevel + " for provider " + provider + ", but core provides " + coreLevel; + } + } + + record ProviderClassUnsupported(String className, Path source, String provider) implements LoaderProblem { + @Override + public String message() { + return "Class " + className + " in " + source + " requires unsupported provider " + provider; + } + } +} diff --git a/core/src/main/java/io/gdcc/spi/core/loader/PluginHandle.java b/core/src/main/java/io/gdcc/spi/core/loader/PluginHandle.java new file mode 100644 index 0000000..6ecfd07 --- /dev/null +++ b/core/src/main/java/io/gdcc/spi/core/loader/PluginHandle.java @@ -0,0 +1,30 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.meta.descriptor.PluginDescriptor; +import io.gdcc.spi.meta.plugin.Plugin; + +import java.util.Objects; + +/** + * Encapsulates a plugin and its corresponding descriptor, providing a unified representation + * of a resolved plugin and its metadata in the runtime context. + * + *

The {@code PluginHandle} is an immutable record that binds a concrete plugin instance + * with its associated {@link PluginDescriptor}. This ensures both the metadata and the + * operational plugin instance are accessible and linked together, facilitating plugin management + * and execution.

+ * + * @param the type of the plugin instance, which must extend {@link Plugin} + * @param descriptor the runtime descriptor containing metadata and implementation details + * about the plugin; must not be null + * @param plugin the actual plugin instance associated with the descriptor; must not be null + */ +public record PluginHandle( + PluginDescriptor descriptor, + T plugin +) { + public PluginHandle { + Objects.requireNonNull(descriptor, "Plugin descriptor cannot be null"); + Objects.requireNonNull(plugin, "Plugin instance cannot be null"); + } +} diff --git a/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java b/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java new file mode 100644 index 0000000..edea2cd --- /dev/null +++ b/core/src/main/java/io/gdcc/spi/core/loader/PluginLoader.java @@ -0,0 +1,387 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.descriptor.DescriptorScanner; +import io.gdcc.spi.meta.descriptor.PluginDescriptor; +import io.gdcc.spi.meta.descriptor.SourcedDescriptor; +import io.gdcc.spi.meta.plugin.Plugin; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.ServiceConfigurationError; +import java.util.ServiceLoader; +import java.util.Set; +import java.util.regex.PatternSyntaxException; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * Loads plugins of a specified type from JAR files in a given directory using the Java ServiceLoader mechanism. + *

+ * Each plugin must implement the {@link Plugin} interface and provide a non-null, non-blank identity via the + * {@link Plugin#identity()} method. Plugins are loaded from individual JAR files, each loaded in its own + * {@link URLClassLoader}, enabling isolated class loading for plugin dependencies. + *

+ *

+ * This class supports custom ClassLoader hierarchies to accommodate complex deployment environments, and + * aggregates errors encountered during plugin discovery and loading into a single {@link LoaderException} + * if no plugins are successfully loaded. + *

+ *

+ * It supports one-time classloading, but no reloading of changed JARs at runtime. + * An application restart is required to pick up changes to plugin JARs. + *

+ * + * @param the type of plugin to load, constrained to implement the {@link Plugin} interface + */ +public class PluginLoader { + + private static final Logger logger = LoggerFactory.getLogger(PluginLoader.class); + + private final Class pluginClass; + private final ClassLoader parentClassLoader; + private final LoaderConfiguration configuration; + + /** + * Constructs a new PluginLoader that will load plugins of the specified type {@code T}. + * The parent ClassLoader is set to the current thread's context ClassLoader, which allows + * plugins to access classes and resources on the core's classpath. + * It uses the system default configuration for plugin loading behaviors, + * see {@link LoaderConfiguration#defaults()}. + * + * @param pluginClass the Class object representing the plugin type {@code T} to load + */ + public PluginLoader(Class pluginClass) { + this(pluginClass, Thread.currentThread().getContextClassLoader()); + } + + /** + * Constructs a new PluginLoader that will load plugins of the specified type {@code T}. + * It uses the system default configuration for plugin loading behaviors, + * see {@link LoaderConfiguration#defaults()}. + * + * @param pluginClass the Class object representing the plugin type {@code T} to load + * @param parentClassLoader the ClassLoader to be used as the parent for class loading of plugins + */ + public PluginLoader(Class pluginClass, ClassLoader parentClassLoader) { + this(pluginClass, LoaderConfiguration.defaults(), parentClassLoader); + } + + /** + * Constructs a new PluginLoader that will load plugins of the specified type {@code T}. + * The parent ClassLoader is set to the current thread's context ClassLoader, which allows + * plugins to access classes and resources on the core's classpath. + * + * @param pluginClass the Class object representing the type of plugin {@code T} to load + * @param configuration the LoaderConfiguration specifying custom plugin loading behaviors + */ + public PluginLoader(Class pluginClass, LoaderConfiguration configuration) { + this(pluginClass, configuration, Thread.currentThread().getContextClassLoader()); + } + + /** + * Constructs a new instance of the PluginLoader, which is responsible for loading plugins of the specified type {@code T}. + * + * @param pluginClass the Class object representing the type of plugin {@code T} to load + * @param configuration the LoaderConfiguration specifying custom plugin loading behaviors + * @param parentClassLoader the ClassLoader to be used as the parent for loading plugin classes and resources + */ + public PluginLoader(Class pluginClass, LoaderConfiguration configuration, ClassLoader parentClassLoader) { + + // Basic Verification + Objects.requireNonNull(pluginClass); + Objects.requireNonNull(configuration); + Objects.requireNonNull(parentClassLoader); + this.pluginClass = pluginClass; + this.configuration = configuration; + this.parentClassLoader = parentClassLoader; + + // Check that the plugin class is a base plugin contract + validatePluginBaseClass(pluginClass); + } + + /** + * Validates that the provided class is a valid Dataverse Plugin Interface. + * The class must be an interface and annotated with {@link PluginContract}. + * + * @param pluginClass the class to validate + * @throws IllegalArgumentException if the class is not an interface or is not annotated with {@code @PluginContract} + */ + static void validatePluginBaseClass(Class pluginClass) { + if (!pluginClass.isInterface() || + !pluginClass.isAnnotationPresent(PluginContract.class) || + pluginClass.getDeclaredAnnotationsByType(PluginContract.class)[0].role() != PluginContract.Role.BASE) + throw new IllegalArgumentException("Class argument must be a Dataverse Plugin Interface annotated with @PluginContract and have a role of BASE"); + } + + /** + *

Loads all plugins of type {@code T} from JAR files located in the specified directory. + * Each JAR file is loaded using a dedicated {@link URLClassLoader}, and plugins are + * discovered via the Java {@link ServiceLoader} mechanism (META-INF/services/package.plus.service.ClassName file). + * + *

For each discovered plugin, its {@link Plugin#identity()} must be non-null and non-blank; + * otherwise, it is skipped and an error is recorded. + *

+ * + * @param pluginJarsLocation the directory containing JAR files to scan for plugins + * @return a list of {@link PluginHandle}, linking each plugin's metadata to the corresponding plugin instance + * @throws LoaderException if one or more errors occur during loading, if no plugins + * could be successfully loaded, or if there are any duplicates. + * Note: The exception may contain multiple causes, each associated with a specific file or failure point + */ + public List> load(Path pluginJarsLocation) { + + // Find all potential sources within the given location + Map sources = findSources(pluginJarsLocation); + + // Preload the plugins (already validating via metadata before handing off to any classloader) + List descriptors = preloadPlugins(sources.keySet()); + + // Load the pre-validated plugins + return load(descriptors, sources); + } + + /** + * Locates plugin files within the given directory (but not subdirectories) and constructs + * corresponding URL arrays for class loading. + * + * @param pluginsLocation the root directory path to search for plugins + * @return a map where each key is a path to a root classpath (a JAR file or directory) and the corresponding value is + * a single-element array containing the generated URL for that location. + * Note: for JARs, the URL is of the form "jar:!/" as required by {@code URLClassLoader} + * @throws LoaderException if one or more errors occur during directory scanning or URL construction + * and no valid mappings could be produced; the exception may contain multiple causes + * each associated with a specific file or failure point + */ + Map findSources(Path pluginsLocation) { + // Collect as many problems as possible before throwing an exception + List problems = new ArrayList<>(); + Map classRoots = new HashMap<>(); + + // Find all JAR files at the given location (ignoring potential subdirectories) + try (DirectoryStream stream = Files.newDirectoryStream(pluginsLocation, "*.jar")) { + // Using the foreach loop here to enable catching the URI/URL exceptions + for (Path path : stream) { + try { + // The URL[] is necessary as classloaders can deal with multiple locations at once. + // Note: "jar:!/" is the special syntax required to scan a complete JAR file for classes + classRoots.put(path, new URL[]{LoaderHelper.pathToUrl(path, "jar", "!/")}); + // This is not likely to happen, as we construct the URL from a valid path only + } catch (MalformedURLException e) { + problems.add(new LoaderProblem.LocationFailure(path, e)); + } + } + + // In addition: put the directory itself to enable loading exploded archives (mostly useful for testing). + // The location must be a browsable directory as otherwise exceptions would have been raised. + classRoots.put(pluginsLocation, new URL[]{LoaderHelper.pathToUrl(pluginsLocation, "", "")}); + + // NotDirectoryException | AccessDeniedException is a subset of IOException and covers these cases. + } catch (PatternSyntaxException | IOException e) { + problems.add(new LoaderProblem.SourceFailure(e)); + } + + if (problems.isEmpty()) { + return classRoots; + } + throw new LoaderException(problems); + } + + /** + * Preloads plugins by scanning the provided source paths and retrieving their descriptors. + * Uses {@link DescriptorScanner#scanPath(Path)} to scan each source path. + * @see PluginLoader#preloadPlugins(Set, SourceScanner) for more details. + * + * @param sources A set of file paths representing the sources to scan for plugins. + * @return A list of SourcedDescriptor objects representing the preloaded plugins. + * @throws LoaderException If an error occurs while scanning the paths or loading plugin descriptors. + */ + List preloadPlugins(Set sources) throws LoaderException { + return preloadPlugins(sources, DescriptorScanner::scanPath); + } + + /** + * Preloads plugin descriptors from the given list of source paths. This method scans the provided + * sources for plugin descriptors, and validates them against various criteria (such as class name collisions, + * proper implementations of the desired plugin class, and API compatibility). It returns a set of plugin + * descriptors either valid or associated with a warning-level incompatibility. + * + *

Problems encountered during the loading process either trigger an exception to abort loading or + * are logged only based on configuration settings.

+ * + * @param sources the list of paths to scan for plugin descriptors + * @return a list of valid plugin descriptors that were successfully scanned and passed all validation checks + * @throws LoaderException if validation problems are encountered and the configuration mandates an abort + */ + List preloadPlugins(Set sources, SourceScanner scanner) throws LoaderException { + // Try to continue as long as possible before erroring out, catching as many problems as possible at once. + List sourceProblems = new ArrayList<>(); + // Scratch space to collect descriptors from the given sources. + List descriptors = new ArrayList<>(); + + // 1. Grab all the plugin descriptors from the given sources + for (Path source : sources) { + try { + descriptors.addAll(scanner.scanPath(source)); + } catch (IOException e) { + sourceProblems.add(new LoaderProblem.LocationFailure(source, e)); + logger.debug("Failed to scan source: {}", source, e); + } + } + logger.debug("Scanning for plugin descriptors found {} plugins: {}", descriptors.size(), descriptors); + + // 2. Verify that no class name collisions exist any plugins to be loaded. + var collisionResult = LoaderHelper.verifyNoClassCollisions(descriptors, this.parentClassLoader); + logger.debug("Scanning for class name collisions results: {}", collisionResult); + + // 3. Filter the descriptors to only include those that implement the desired plugin (base) class. + var implementationResult = LoaderHelper.identifyNonImplementations(descriptors, this.pluginClass, this.configuration); + logger.debug("Scanning for non-implementations results: {}", implementationResult); + + // 4. Verify that every plugin class has a service loader entry. Remove any affected from the list. + var serviceProviderResult = LoaderHelper.verifyServiceProviderRecords(descriptors); + logger.debug("Scanning for SPI record results: {}", serviceProviderResult); + + // 5. Verify that the API level of the plugin matches the core-expected level(s). + var apiLevelResult = LoaderHelper.verifyPluginApiLevels(descriptors, this.pluginClass, this.parentClassLoader); + logger.debug("Scanning for plugin API level matches results: {}", apiLevelResult); + + // 6. Verify all the provider requirements by the plugin are met + var providerLevelsResult = LoaderHelper.verifyProviderApiLevels(descriptors, this.parentClassLoader); + logger.debug("Scanning for provider API level matches results: {}", apiLevelResult); + + // Merge all the different results to receive the final picture which plugins are faulty + var finalResults = PluginValidationResult.merge( + collisionResult, + implementationResult, + serviceProviderResult, + apiLevelResult, + providerLevelsResult + ); + // Merge all the problems into one large list, to be wrapped in an exception + finalResults.rejected().forEach((descriptor, problems) -> sourceProblems.addAll(problems)); + + // By default, we should abort now. In case we are asked to keep going by configuration, + // let the logs show any found problems as warnings. + if (configuration.abortOnCompatibilityProblems() && (!sourceProblems.isEmpty() || !finalResults.rejected().isEmpty())) { + throw new LoaderException(sourceProblems); + } + + logger.warn("Pre-loading validation failed for {} plugins with {} problems, continuing with {} valid and {} warning plugins as requested.", + finalResults.rejected().size() + finalResults.warning().size(), + sourceProblems.size(), + finalResults.accepted().size(), + finalResults.warning().size()); + sourceProblems.forEach(problem -> logger.warn(problem.message())); + finalResults.warning().forEach((descriptor, problems) -> + logger.warn("Plugin {} has {} potential compatibility problems: {}", + descriptor, + problems.size(), + problems.stream().map(LoaderProblem::message).collect(Collectors.joining(", ")) + )); + + return Stream.concat(finalResults.accepted().stream(), finalResults.warning().keySet().stream()).toList(); + } + + + /** + * Loads plugins of type {@code T} from the specified mapping of locations to JAR URLs. + * Each location is processed by creating a dedicated {@link URLClassLoader}, and plugins are + * discovered via the Java {@link ServiceLoader} mechanism using the configured plugin class. + * + * For each discovered plugin, its {@link Plugin#identity()} must be non-null and non-blank; + * otherwise, it is skipped and an error is recorded. + * + * The returned map's keys describe the source of each loaded plugin via {@link PluginDescriptor}, + * associating the plugin's logical identity, class name, and JAR file location. It is the + * caller's responsibility to verify no duplicates (by class name or identity) exist before + * handing the plugins to the core. + * + * @param sources a mapping from (JAR) file paths to their corresponding URLs used for class loading + * @return a map from {@link PluginDescriptor} metadata to the corresponding plugin instance + * @throws LoaderException if one or more errors occur during loading and no plugins + * could be successfully loaded; the exception may contain multiple causes, + * each associated with a specific JAR file or failure point + */ + List> load(List descriptors, Map sources) { + List sourceProblems = new ArrayList<>(); + List> loadedPlugins = new ArrayList<>(); + + // Create URLClassLoader for each file and load the plugin + descriptors.forEach(descriptor -> { + URL[] sourceUrl = sources.get(descriptor.sourceLocation()); + try (URLClassLoader classLoader = URLClassLoader.newInstance(sourceUrl, this.parentClassLoader)) { + // Load all plugins that can be found within the source for type T + ServiceLoader loader = ServiceLoader.load(this.pluginClass, classLoader); + + // Iterate over all found plugins and add to the plugin map, including source information + loader.forEach(plugin -> { + String identity = plugin.identity(); + if (identity == null || identity.isBlank()) { + sourceProblems.add(new LoaderProblem.LocationFailure( + descriptor.sourceLocation(), + new IllegalArgumentException(plugin.getClass().getCanonicalName() + "'s identity cannot be null or blank"))); + return; + } + + // Save the plugin and its metadata to the set of already loaded plugins + loadedPlugins.add( + new PluginHandle<>( + LoaderHelper.toPluginDescriptor( + descriptor, + plugin, + this.parentClassLoader), + plugin) + ); + }); + } catch (IOException | NoSuchMethodError | ServiceConfigurationError | UnsupportedClassVersionError e) { + sourceProblems.add(new LoaderProblem.LocationFailure(descriptor.sourceLocation(), e)); + } + }); + logger.debug("Loader was able to load {} plugins from {} sources.", loadedPlugins.size(), sources.size()); + + // Make sure there are no duplicate plugin identities + PluginValidationResult> duplicationChecks = LoaderHelper.verifyUniqueIdentities(loadedPlugins, configuration); + + // Merge all the different results to receive the final picture which plugins are faulty + // (For now, we only have a single check at this stage) + var finalResults = PluginValidationResult.merge(duplicationChecks); + + // Merge all the problems into one large list, to be wrapped in an exception + finalResults.rejected().forEach((descriptor, problems) -> sourceProblems.addAll(problems)); + + // By default, we should abort now. In case we are asked to keep going by configuration, + // let the logs show any found problems as warnings. + if (configuration.abortOnCompatibilityProblems() && (!sourceProblems.isEmpty() || !finalResults.rejected().isEmpty())) { + throw new LoaderException(sourceProblems); + } + + logger.warn("Validation after loading failed for {} plugins with {} problems, continuing with {} valid and {} warning plugins as requested.", + finalResults.rejected().size() + finalResults.warning().size(), + sourceProblems.size(), + finalResults.accepted().size(), + finalResults.warning().size()); + sourceProblems.forEach(problem -> logger.warn(problem.message())); + finalResults.warning().forEach((descriptor, problems) -> + logger.warn("Plugin {} has {} potential problems: {}", + descriptor, + problems.size(), + problems.stream().map(LoaderProblem::message).collect(Collectors.joining(", ")) + )); + + return Stream.concat(finalResults.accepted().stream(), finalResults.warning().keySet().stream()).toList(); + } + +} diff --git a/core/src/main/java/io/gdcc/spi/core/loader/PluginValidationResult.java b/core/src/main/java/io/gdcc/spi/core/loader/PluginValidationResult.java new file mode 100644 index 0000000..f55106e --- /dev/null +++ b/core/src/main/java/io/gdcc/spi/core/loader/PluginValidationResult.java @@ -0,0 +1,98 @@ +package io.gdcc.spi.core.loader; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +/** + * Represents the result of validating a set of plugins. + * It's intended for internal use only. + * + * @param The type of the plugin descriptors being validated. + * @param accepted The set of plugins that were successfully validated and accepted. + * @param rejected A mapping of plugins to a list of {@link LoaderProblem} instances providing detailed + * reasons for their rejection. + * @param warning A mapping of plugins to a list of {@link LoaderProblem} instances providing detailed + * reasons why they would usually be rejected but are included by configuration choice. + */ +record PluginValidationResult( + Set accepted, + Map> rejected, + Map> warning +) { + + /** + * Combines multiple {@code PluginValidationResult} instances into a single result by merging their + * accepted keys, rejected problems, and warnings. Accepted keys that are also present in either + * rejected or warning collections are excluded from the final result. + * + * @param the type of keys in the validation results + * @param results the array of {@code PluginValidationResult} instances to combine; may include null values + * @return a new {@code PluginValidationResult} instance where all provided results are merged, + * ensuring no overlap between accepted, rejected, and warning entries + */ + @SafeVarargs + static PluginValidationResult merge(PluginValidationResult... results) { + Set mergedAccepted = new HashSet<>(); + Map> mergedRejected = new HashMap<>(); + Map> mergedWarning = new HashMap<>(); + + Arrays.stream(results) + .filter(Objects::nonNull) + .forEach(result -> { + if (result.accepted() != null) { + mergedAccepted.addAll(result.accepted()); + } + + if (result.rejected() != null) { + result.rejected().forEach((key, value) -> + mergedRejected.merge(key, new ArrayList<>(value), (left, right) -> { + left.addAll(right); + return left; + }) + ); + } + + if (result.warning() != null) { + result.warning().forEach((key, value) -> + mergedWarning.merge(key, new ArrayList<>(value), (left, right) -> { + left.addAll(right); + return left; + }) + ); + } + }); + + // Warnings and rejection win over acceptance. + // Note: Keep warnings around even if a rejection also exists for the same descriptor, + // because the diagnostic information is still useful. + mergedAccepted.removeAll(mergedWarning.keySet()); + mergedAccepted.removeAll(mergedRejected.keySet()); + + return new PluginValidationResult<>( + Set.copyOf(mergedAccepted), + copyProblemMap(mergedRejected), + copyProblemMap(mergedWarning) + ); + } + + /** + * Creates a defensive copy of the provided map where each key-value mapping is preserved, and the lists of + * {@link LoaderProblem} are converted into immutable copies. + * + * @param the type of the keys in the map + * @param input the map containing keys and lists of {@link LoaderProblem} that needs to be copied + * @return a new map where the original map's structure is maintained, and all lists are immutable + */ + static Map> copyProblemMap(Map> input) { + Map> copy = new HashMap<>(); + input.forEach((key, value) -> copy.put(key, List.copyOf(value))); + return Map.copyOf(copy); + } + +} diff --git a/core/src/main/java/io/gdcc/spi/core/loader/SourceScanner.java b/core/src/main/java/io/gdcc/spi/core/loader/SourceScanner.java new file mode 100644 index 0000000..6a5d32d --- /dev/null +++ b/core/src/main/java/io/gdcc/spi/core/loader/SourceScanner.java @@ -0,0 +1,25 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.meta.descriptor.SourcedDescriptor; + +import java.io.IOException; +import java.nio.file.Path; +import java.util.List; + +/** + * Functional interface for scanning a specified path on the filesystem + * to discover and retrieve plugin descriptors. + * + * Implementations of this interface are responsible for performing the + * scanning operation on the provided {@link Path} and returning a list + * of descriptors that represent the discovered plugins. + * + * For now, this is mostly used to allow injecting custom scanners for + * testing purposes. As such, it is kept package-private. + * + * @see io.gdcc.spi.meta.descriptor.DescriptorScanner + */ +@FunctionalInterface +interface SourceScanner { + List scanPath(Path source) throws IOException; +} diff --git a/core/src/test/java/io/gdcc/spi/core/compiler/LoaderTestEnvironment.java b/core/src/test/java/io/gdcc/spi/core/compiler/LoaderTestEnvironment.java new file mode 100644 index 0000000..a05ae6d --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/compiler/LoaderTestEnvironment.java @@ -0,0 +1,141 @@ +package io.gdcc.spi.core.compiler; + +import javax.annotation.processing.Processor; +import java.io.IOException; +import java.net.URLClassLoader; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +/** + * Represents a test environment for managing the compilation and class loading + * of core and plugin components. This environment facilitates scenarios where + * Java code must be compiled, loaded, and tested dynamically. + * + * The `LoaderTestEnvironment` class is immutable and provides access to the core + * and plugin compilations, their respective outputs, and class loaders. It is + * built using the companion `Builder` class. + */ +public final class LoaderTestEnvironment { + + private final TestCompilation coreCompilation; + private final TestCompilation pluginCompilation; + private final Path pluginArtifact; + private final URLClassLoader coreClassLoader; + + private LoaderTestEnvironment( + TestCompilation coreCompilation, + TestCompilation pluginCompilation, + Path pluginArtifact, + URLClassLoader coreClassLoader + ) { + this.coreCompilation = coreCompilation; + this.pluginCompilation = pluginCompilation; + this.pluginArtifact = pluginArtifact; + this.coreClassLoader = coreClassLoader; + } + + public static Builder builder() { + return new Builder(); + } + + public TestCompilation coreCompilation() { + return coreCompilation; + } + + public TestCompilation pluginCompilation() { + return pluginCompilation; + } + + public Path pluginArtifact() { + return pluginArtifact; + } + + public URLClassLoader coreClassLoader() { + return coreClassLoader; + } + + public Path pluginClassesDirectory() { + return pluginCompilation.classOutputDir(); + } + + public static final class Builder { + private final List coreSources = new ArrayList<>(); + private final List pluginSources = new ArrayList<>(); + private final List pluginProcessors = new ArrayList<>(); + + private String release = "17"; + private boolean packagePluginAsJar = false; + private String pluginJarName = "plugin-under-test.jar"; + + private Builder() { + } + + public Builder withRelease(String release) { + this.release = release; + return this; + } + + public Builder addCoreSource(String relativePath, String content) { + this.coreSources.add(TestJavaCompiler.SourceFile.of(relativePath, content)); + return this; + } + + public Builder addPluginSource(String relativePath, String content) { + this.pluginSources.add(TestJavaCompiler.SourceFile.of(relativePath, content)); + return this; + } + + public Builder addPluginProcessor(Processor processor) { + this.pluginProcessors.add(processor); + return this; + } + + public Builder packagePluginAsJar(boolean packagePluginAsJar) { + this.packagePluginAsJar = packagePluginAsJar; + return this; + } + + public Builder withPluginJarName(String pluginJarName) { + this.pluginJarName = pluginJarName; + return this; + } + + public LoaderTestEnvironment build() throws IOException { + TestCompilation coreCompilation = TestJavaCompiler.builder() + .withRelease(release) + .build() + .compile(coreSources); + + coreCompilation.assertSuccess(); + + URLClassLoader coreClassLoader = + coreCompilation.newClassLoader(Thread.currentThread().getContextClassLoader()); + + TestJavaCompiler.Builder pluginCompilerBuilder = TestJavaCompiler.builder() + .withRelease(release) + .withClasspathEntry(coreCompilation.classOutputDir()); + + if (!pluginProcessors.isEmpty()) { + pluginCompilerBuilder.withProcessors(pluginProcessors); + } + + TestCompilation pluginCompilation = pluginCompilerBuilder + .build() + .compile(pluginSources); + + pluginCompilation.assertSuccess(); + + Path pluginArtifact = packagePluginAsJar + ? pluginCompilation.createJar(pluginJarName) + : pluginCompilation.classOutputDir(); + + return new LoaderTestEnvironment( + coreCompilation, + pluginCompilation, + pluginArtifact, + coreClassLoader + ); + } + } +} diff --git a/core/src/test/java/io/gdcc/spi/core/compiler/TestCompilation.java b/core/src/test/java/io/gdcc/spi/core/compiler/TestCompilation.java new file mode 100644 index 0000000..fe45bce --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/compiler/TestCompilation.java @@ -0,0 +1,107 @@ +package io.gdcc.spi.core.compiler; + +import javax.tools.Diagnostic; +import javax.tools.JavaFileObject; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.net.URL; +import java.net.URLClassLoader; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.jar.JarEntry; +import java.util.jar.JarOutputStream; +import java.util.stream.Stream; + +/** + * An immutable class that encapsulates the result of a Java source code compilation process. + * Provides access to details such as success status, output directories, diagnostics, and utility + * methods for handling compilation results. + */ +public final class TestCompilation { + + private final boolean success; + private final Path rootDir; + private final Path sourceDir; + private final Path classOutputDir; + private final List> diagnostics; + + TestCompilation( + boolean success, + Path rootDir, + Path sourceDir, + Path classOutputDir, + List> diagnostics + ) { + this.success = success; + this.rootDir = rootDir; + this.sourceDir = sourceDir; + this.classOutputDir = classOutputDir; + this.diagnostics = diagnostics; + } + + public boolean success() { + return success; + } + + public Path rootDir() { + return rootDir; + } + + public Path sourceDir() { + return sourceDir; + } + + public Path classOutputDir() { + return classOutputDir; + } + + public List> diagnostics() { + return diagnostics; + } + + public String diagnosticsAsText() { + return diagnostics.stream() + .map(diagnostic -> diagnostic.getKind() + ": " + diagnostic.getMessage(null)) + .reduce("", (left, right) -> left + right + System.lineSeparator()); + } + + public Path generatedFile(String relativePath) { + return classOutputDir.resolve(relativePath); + } + + public URL[] classpathUrls() { + try { + return new URL[]{classOutputDir.toUri().toURL()}; + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + + public URLClassLoader newClassLoader(ClassLoader parent) { + return URLClassLoader.newInstance(classpathUrls(), parent); + } + + public Path createJar(String fileName) throws IOException { + Path jarPath = rootDir.resolve(fileName); + + try (JarOutputStream jarOut = new JarOutputStream(Files.newOutputStream(jarPath)); + Stream stream = Files.walk(classOutputDir)) { + + for (Path path : stream.filter(Files::isRegularFile).toList()) { + String entryName = classOutputDir.relativize(path).toString().replace('\\', '/'); + jarOut.putNextEntry(new JarEntry(entryName)); + Files.copy(path, jarOut); + jarOut.closeEntry(); + } + } + + return jarPath; + } + + public void assertSuccess() { + if (!success) { + throw new IllegalStateException("Compilation failed:\n" + diagnosticsAsText()); + } + } +} diff --git a/core/src/test/java/io/gdcc/spi/core/compiler/TestJavaCompiler.java b/core/src/test/java/io/gdcc/spi/core/compiler/TestJavaCompiler.java new file mode 100644 index 0000000..d6ac5ad --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/compiler/TestJavaCompiler.java @@ -0,0 +1,175 @@ +package io.gdcc.spi.core.compiler; + +import javax.annotation.processing.Processor; +import javax.tools.DiagnosticCollector; +import javax.tools.JavaCompiler; +import javax.tools.JavaFileObject; +import javax.tools.StandardJavaFileManager; +import javax.tools.ToolProvider; +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +/** + * A utility class for testing Java compilation using the system Java compiler. + * Provides functionality to configure custom compilation settings + * and compile a set of source files. + * The compiled files are stored in temporary directories during execution. + * This class is immutable and supports configuration through its builder. + */ +public final class TestJavaCompiler { + + private final String release; + private final List classpathEntries; + private final List processors; + private final boolean inheritRuntimeClasspath; + + private TestJavaCompiler(Builder builder) { + this.release = builder.release; + this.classpathEntries = List.copyOf(builder.classpathEntries); + this.processors = List.copyOf(builder.processors); + this.inheritRuntimeClasspath = builder.inheritRuntimeClasspath; + } + + public static Builder builder() { + return new Builder(); + } + + public TestCompilation compile(List sources) throws IOException { + JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); + if (compiler == null) { + throw new IllegalStateException( + "No system Java compiler available. Are tests running on a JRE instead of a JDK?" + ); + } + + Path tempDir = Files.createTempDirectory("test-java-compiler"); + Path sourceDir = tempDir.resolve("src"); + Path classOutputDir = tempDir.resolve("classes"); + Files.createDirectories(sourceDir); + Files.createDirectories(classOutputDir); + + List sourcePaths = new ArrayList<>(); + for (SourceFile source : sources) { + Path file = sourceDir.resolve(source.relativePath()); + Files.createDirectories(file.getParent()); + Files.writeString(file, source.content(), StandardCharsets.UTF_8); + sourcePaths.add(file); + } + + DiagnosticCollector diagnostics = new DiagnosticCollector<>(); + + try (StandardJavaFileManager fileManager = + compiler.getStandardFileManager(diagnostics, null, StandardCharsets.UTF_8)) { + + Iterable compilationUnits = + fileManager.getJavaFileObjectsFromPaths(sourcePaths); + + List options = new ArrayList<>(); + options.add("--release"); + options.add(release); + options.add("-d"); + options.add(classOutputDir.toString()); + + String classpath = buildClasspath(); + if (!classpath.isBlank()) { + options.add("-classpath"); + options.add(classpath); + } + + JavaCompiler.CompilationTask task = compiler.getTask( + null, + fileManager, + diagnostics, + options, + null, + compilationUnits + ); + + if (!processors.isEmpty()) { + task.setProcessors(processors); + } + + boolean success = Boolean.TRUE.equals(task.call()); + + return new TestCompilation( + success, + tempDir, + sourceDir, + classOutputDir, + List.copyOf(diagnostics.getDiagnostics()) + ); + } + } + + private String buildClasspath() { + List entries = new ArrayList<>(); + + if (inheritRuntimeClasspath) { + String runtimeClasspath = System.getProperty("java.class.path", ""); + if (!runtimeClasspath.isBlank()) { + entries.add(runtimeClasspath); + } + } + + for (Path classpathEntry : classpathEntries) { + entries.add(classpathEntry.toString()); + } + + return String.join(File.pathSeparator, entries); + } + + public static final class Builder { + private String release = "17"; + private final List classpathEntries = new ArrayList<>(); + private final List processors = new ArrayList<>(); + private boolean inheritRuntimeClasspath = true; + + private Builder() { + } + + public Builder withRelease(String release) { + this.release = release; + return this; + } + + public Builder withClasspathEntry(Path path) { + this.classpathEntries.add(path); + return this; + } + + public Builder withClasspathEntries(List paths) { + this.classpathEntries.addAll(paths); + return this; + } + + public Builder withProcessor(Processor processor) { + this.processors.add(processor); + return this; + } + + public Builder withProcessors(List processors) { + this.processors.addAll(processors); + return this; + } + + public Builder withInheritRuntimeClasspath(boolean inheritRuntimeClasspath) { + this.inheritRuntimeClasspath = inheritRuntimeClasspath; + return this; + } + + public TestJavaCompiler build() { + return new TestJavaCompiler(this); + } + } + + public record SourceFile(String relativePath, String content) { + public static SourceFile of(String relativePath, String content) { + return new SourceFile(relativePath, content); + } + } +} diff --git a/core/src/test/java/io/gdcc/spi/core/loader/DescriptorBuilder.java b/core/src/test/java/io/gdcc/spi/core/loader/DescriptorBuilder.java new file mode 100644 index 0000000..4b3d65f --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/loader/DescriptorBuilder.java @@ -0,0 +1,191 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.core.test.basic.TestContract; +import io.gdcc.spi.core.test.basic.TestPlugin; +import io.gdcc.spi.meta.descriptor.Descriptor; +import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import io.gdcc.spi.meta.descriptor.SourcedDescriptor; + +import java.nio.file.Path; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.function.IntUnaryOperator; +import java.util.function.UnaryOperator; + +final class DescriptorBuilder { + private Path source; + private String className; + private String kind; + private Map contracts; + private Map requiredProviders; + + private DescriptorBuilder() { + } + + static DescriptorBuilder aDescriptor() { + DescriptorBuilder builder = new DescriptorBuilder(); + builder.source = Path.of("target", "test-classes"); + builder.className = DescriptorFormat.transformClassName(TestPlugin.class); + builder.kind = DescriptorFormat.transformClassName(TestContract.class); + builder.contracts = new LinkedHashMap<>( + Map.of( + DescriptorFormat.transformClassName(TestContract.class), + TestContract.API_LEVEL + )); + builder.requiredProviders = new LinkedHashMap<>(); + return builder; + } + + DescriptorBuilder but() { + DescriptorBuilder copy = new DescriptorBuilder(); + copy.source = this.source; + copy.className = this.className; + copy.kind = this.kind; + copy.contracts = new LinkedHashMap<>(this.contracts); + copy.requiredProviders = new LinkedHashMap<>(this.requiredProviders); + return copy; + } + + DescriptorBuilder withSource(Path source) { + this.source = source; + return this; + } + + DescriptorBuilder withSource(String first, String... more) { + this.source = Path.of(first, more); + return this; + } + + DescriptorBuilder mapSource(UnaryOperator mapper) { + this.source = mapper.apply(this.source); + return this; + } + + DescriptorBuilder withClassName(String className) { + this.className = className; + return this; + } + + DescriptorBuilder withClassName(Class implementationClass) { + this.className = DescriptorFormat.transformClassName(implementationClass); + return this; + } + + DescriptorBuilder mapClassName(UnaryOperator mapper) { + this.className = mapper.apply(this.className); + return this; + } + + DescriptorBuilder withClassPackage(String packageName) { + int lastDot = className.lastIndexOf('.'); + String simpleName = lastDot >= 0 ? className.substring(lastDot + 1) : className; + this.className = packageName == null || packageName.isBlank() + ? simpleName + : packageName + "." + simpleName; + return this; + } + + DescriptorBuilder withKind(String kind) { + this.kind = kind; + return this; + } + + DescriptorBuilder withKind(Class kindClass) { + this.kind = DescriptorFormat.transformClassName(kindClass); + return this; + } + + DescriptorBuilder mapKind(UnaryOperator mapper) { + this.kind = mapper.apply(this.kind); + return this; + } + + DescriptorBuilder withContracts(Map contracts) { + this.contracts = new LinkedHashMap<>(contracts); + return this; + } + + DescriptorBuilder withoutContracts() { + this.contracts.clear(); + return this; + } + + DescriptorBuilder withContract(String contract, int level) { + this.contracts.put(contract, level); + return this; + } + + DescriptorBuilder withContract(Class contractClass, int level) { + return withContract(DescriptorFormat.transformClassName(contractClass), level); + } + + DescriptorBuilder withoutContract(String contract) { + this.contracts.remove(contract); + return this; + } + + DescriptorBuilder withoutContract(Class contractClass) { + return withoutContract(DescriptorFormat.transformClassName(contractClass)); + } + + DescriptorBuilder mapContract(String contract, IntUnaryOperator mapper) { + Integer current = this.contracts.get(contract); + if (current == null) { + throw new IllegalArgumentException("Contract not present: " + contract); + } + this.contracts.put(contract, mapper.applyAsInt(current)); + return this; + } + + DescriptorBuilder mapContract(Class contractClass, IntUnaryOperator mapper) { + return mapContract(DescriptorFormat.transformClassName(contractClass), mapper); + } + + DescriptorBuilder withRequiredProviders(Map requiredProviders) { + this.requiredProviders = new LinkedHashMap<>(requiredProviders); + return this; + } + + DescriptorBuilder withRequiredProvider(String provider, int level) { + this.requiredProviders.put(provider, level); + return this; + } + + DescriptorBuilder withRequiredProvider(Class providerClass, int level) { + return withRequiredProvider(DescriptorFormat.transformClassName(providerClass), level); + } + + DescriptorBuilder withoutRequiredProvider(String provider) { + this.requiredProviders.remove(provider); + return this; + } + + DescriptorBuilder withoutRequiredProvider(Class providerClass) { + return withoutRequiredProvider(DescriptorFormat.transformClassName(providerClass)); + } + + DescriptorBuilder mapRequiredProvider(String provider, IntUnaryOperator mapper) { + Integer current = this.requiredProviders.get(provider); + if (current == null) { + throw new IllegalArgumentException("Required provider not present: " + provider); + } + this.requiredProviders.put(provider, mapper.applyAsInt(current)); + return this; + } + + DescriptorBuilder mapRequiredProvider(Class providerClass, IntUnaryOperator mapper) { + return mapRequiredProvider(DescriptorFormat.transformClassName(providerClass), mapper); + } + + SourcedDescriptor build() { + return new SourcedDescriptor( + source, + new Descriptor( + className, + kind, + Map.copyOf(contracts), + Map.copyOf(requiredProviders) + ) + ); + } +} \ No newline at end of file diff --git a/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java b/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java new file mode 100644 index 0000000..00f9f76 --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/loader/LoaderHelperTest.java @@ -0,0 +1,815 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.core.test.basic.TestContract; +import io.gdcc.spi.core.test.basic.TestProvider; +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.descriptor.SourcedDescriptor; +import io.gdcc.spi.meta.plugin.CoreProvider; +import io.gdcc.spi.meta.plugin.Plugin; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; +import java.util.jar.JarOutputStream; +import java.util.zip.ZipEntry; + +import static io.gdcc.spi.meta.descriptor.DescriptorFormat.transformClassName; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class LoaderHelperTest { + + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + + @Nested + class DetermineApiLevel { + + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + + @Test + void determineCoreApiLevel_validClass() { + assertEquals(TestContract.API_LEVEL, LoaderHelper.determineCoreApiLevel(TestContract.class)); + } + + @Test + void determineCoreApiLevel_validFQCN() { + assertEquals( + TestContract.API_LEVEL, + LoaderHelper.determineCoreApiLevel(transformClassName(TestContract.class), classLoader) + ); + } + + @Test + void determineCoreApiLevel_invalidFQCN() { + assertThrows( + IllegalArgumentException.class, + () -> LoaderHelper.determineCoreApiLevel("foo.Bar", classLoader) + ); + } + + @Test + void determineCoreApiLevel_providerClass() { + assertEquals( + TestProvider.API_LEVEL, + LoaderHelper.determineCoreApiLevel(transformClassName(TestProvider.class), classLoader) + ); + } + } + + @Nested + class NoClassCollisions { + + @Test + void noCollisions_happyPath() { + // given + List descriptors = List.of( + DescriptorBuilder.aDescriptor().withClassPackage("com.example").build(), + DescriptorBuilder.aDescriptor().withClassPackage("com.foobar").build() + ); + + // when + var results = LoaderHelper.verifyNoClassCollisions(descriptors, classLoader); + + System.out.println(results); + + // then + assertEquals(2, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void noCollisions_pluginClassesCollide() { + // given + // Same FQCN (com.foobar.TestPlugin), but from different sources. + List descriptors = List.of( + DescriptorBuilder.aDescriptor() + .withSource("foobar.jar") + .withClassPackage("com.foobar") + .build(), + DescriptorBuilder.aDescriptor() + .withSource("example.jar") + .withClassPackage("com.foobar") + .build() + ); + + // when + var results = LoaderHelper.verifyNoClassCollisions(descriptors, classLoader); + + // then + assertEquals(1, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + for (Map.Entry> result : results.rejected().entrySet()) { + List problems = result.getValue(); + assertTrue(problems.stream().allMatch( + problem -> problem.getClass().equals(LoaderProblem.PluginClassNameCollision.class) + )); + } + } + + @Test + void noCollisions_pluginClassesCollideWithCore() { + // given + // Same FQCN as TestPlugin, but from different source + List descriptors = List.of( + DescriptorBuilder.aDescriptor().withSource("foobar.jar").build() + ); + + // when + var results = LoaderHelper.verifyNoClassCollisions(descriptors, classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + for (Map.Entry> result : results.rejected().entrySet()) { + List problems = result.getValue(); + assertTrue(problems.stream().allMatch( + problem -> problem.getClass().equals(LoaderProblem.PluginClassNameCollisionWithCore.class) + )); + } + } + } + + @Nested + class IdentifyNonImplementations { + + @Test + void identifyNonImplementations_matchingDescriptorIsAccepted() { + // given + List descriptors = List.of( + DescriptorBuilder.aDescriptor().build() + ); + + // when + var results = LoaderHelper.identifyNonImplementations( + descriptors, + TestContract.class, + LoaderConfiguration.defaults() + ); + + // then + assertEquals(1, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void identifyNonImplementations_nonMatchingDescriptorIsRejectedWhenEnforced() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withKind("com.example.OtherContract") + .build(); + + // when + var results = LoaderHelper.identifyNonImplementations( + List.of(descriptor), + TestContract.class, + LoaderConfiguration.defaults() + ); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + + List problems = results.rejected().get(descriptor); + assertEquals(1, problems.size()); + assertInstanceOf(LoaderProblem.PluginClassMismatch.class, problems.get(0)); + } + + @Test + void identifyNonImplementations_nonMatchingDescriptorIsWarningWhenNotEnforced() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withKind("com.example.OtherContract") + .build(); + + // when + var results = LoaderHelper.identifyNonImplementations( + List.of(descriptor), + TestContract.class, + LoaderConfiguration.permissive() + ); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(1, results.warning().size()); + assertEquals(0, results.rejected().size()); + + List problems = results.warning().get(descriptor); + assertEquals(1, problems.size()); + assertInstanceOf(LoaderProblem.PluginClassMismatch.class, problems.get(0)); + } + + @Test + void identifyNonImplementations_mixedDescriptorsSeparatesAcceptedAndRejected() { + // given + SourcedDescriptor matching = DescriptorBuilder.aDescriptor() + .withSource("matching.jar") + .build(); + SourcedDescriptor nonMatching = DescriptorBuilder.aDescriptor() + .withSource("non-matching.jar") + .withKind("com.example.OtherContract") + .build(); + + // when + var results = LoaderHelper.identifyNonImplementations( + List.of(matching, nonMatching), + TestContract.class, + LoaderConfiguration.defaults() + ); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(matching)); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(nonMatching)); + } + + @Test + void identifyNonImplementations_mixedDescriptorsSeparatesAcceptedAndWarnings() { + // given + SourcedDescriptor matching = DescriptorBuilder.aDescriptor() + .withSource("matching.jar") + .build(); + SourcedDescriptor nonMatching = DescriptorBuilder.aDescriptor() + .withSource("non-matching.jar") + .withKind("com.example.OtherContract") + .build(); + + // when + var results = LoaderHelper.identifyNonImplementations( + List.of(matching, nonMatching), + TestContract.class, + LoaderConfiguration.permissive() + ); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(matching)); + assertEquals(1, results.warning().size()); + assertTrue(results.warning().containsKey(nonMatching)); + assertEquals(0, results.rejected().size()); + } + } + + @Nested + class VerifyServiceProviderRecords { + + @TempDir + Path tempDir; + + @Test + void verifyServiceProviderRecords_directorySourceWithMatchingRecordIsAccepted() throws Exception { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withSource(tempDir.toString()) + .build(); + + Path spiFile = createSpiFile( + tempDir, + descriptor.plugin().kind(), + descriptor.plugin().klass() + ); + + // when + var results = LoaderHelper.verifyServiceProviderRecords(List.of(descriptor)); + + // then + assertTrue(Files.isRegularFile(spiFile)); + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(descriptor)); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void verifyServiceProviderRecords_jarSourceWithMatchingRecordIsAccepted() throws Exception { + // given + Path jarPath = tempDir.resolve("plugin.jar"); + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withSource(jarPath.toString()) + .build(); + + createJarWithSpiRecord( + jarPath, + descriptor.plugin().kind(), + descriptor.plugin().klass() + ); + + // when + var results = LoaderHelper.verifyServiceProviderRecords(List.of(descriptor)); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(descriptor)); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void verifyServiceProviderRecords_missingRecordIsRejected() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withSource(tempDir.toString()) + .build(); + + // when + var results = LoaderHelper.verifyServiceProviderRecords(List.of(descriptor)); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + + List problems = results.rejected().get(descriptor); + assertEquals(1, problems.size()); + assertInstanceOf(LoaderProblem.MissingServiceProviderRecord.class, problems.get(0)); + } + + @Test + void verifyServiceProviderRecords_missingSourceIsRejectedAsLocationFailure() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withSource(tempDir.resolve("missing-plugin.jar").toString()) + .build(); + + // when + var results = LoaderHelper.verifyServiceProviderRecords(List.of(descriptor)); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + + List problems = results.rejected().get(descriptor); + assertEquals(1, problems.size()); + assertInstanceOf(LoaderProblem.LocationFailure.class, problems.get(0)); + } + + @Test + void verifyServiceProviderRecords_recordWithWhitespaceAndCommentsIsAccepted() throws Exception { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withSource(tempDir.toString()) + .build(); + + createSpiFile( + tempDir, + descriptor.plugin().kind(), + """ + # service registrations + %s # primary implementation + + com.example.OtherImplementation + """.formatted(descriptor.plugin().klass()) + ); + + // when + var results = LoaderHelper.verifyServiceProviderRecords(List.of(descriptor)); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(descriptor)); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + private Path createSpiFile(Path root, String kind, String content) throws Exception { + Path serviceFile = root.resolve("META-INF").resolve("services").resolve(kind); + Files.createDirectories(serviceFile.getParent()); + Files.writeString(serviceFile, content, StandardCharsets.UTF_8); + return serviceFile; + } + + private void createJarWithSpiRecord(Path jarPath, String kind, String content) throws Exception { + Path parent = jarPath.getParent(); + if (parent != null) { + Files.createDirectories(parent); + } + + try ( + OutputStream fileOut = Files.newOutputStream(jarPath); + JarOutputStream jarOut = new JarOutputStream(fileOut) + ) { + String entryName = "META-INF/services/" + kind; + jarOut.putNextEntry(new ZipEntry(entryName)); + jarOut.write(content.getBytes(StandardCharsets.UTF_8)); + jarOut.closeEntry(); + } + } + } + + @Nested + class VerifyApiLevels { + + @Test + void verifyApiLevels_happyPath() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor().build(); + + // when + var results = LoaderHelper.verifyPluginApiLevels(List.of(descriptor), TestContract.class, classLoader); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(descriptor)); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void verifyApiLevels_baseContractLevelMismatch() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .mapContract(TestContract.class, level -> level + 1) + .build(); + + // when + var results = LoaderHelper.verifyPluginApiLevels(List.of(descriptor), TestContract.class, classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + assertInstanceOf(LoaderProblem.PluginClassApiLevelMismatch.class, results.rejected().get(descriptor).get(0)); + } + + @Test + void verifyApiLevels_missingBaseContract() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withoutContract(TestContract.class) + .build(); + + // when + var results = LoaderHelper.verifyPluginApiLevels(List.of(descriptor), TestContract.class, classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + assertInstanceOf(LoaderProblem.PluginClassApiLevelMissing.class, results.rejected().get(descriptor).get(0)); + } + + @Test + void verifyApiLevels_unsupportedContract() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withContract("com.example.UnsupportedContract", 7) + .build(); + + // when + var results = LoaderHelper.verifyPluginApiLevels(List.of(descriptor), TestContract.class, classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + assertInstanceOf(LoaderProblem.PluginClassUnsupported.class, results.rejected().get(descriptor).get(0)); + } + + @Test + void verifyApiLevels_reportsMultipleProblemsForSingleDescriptor() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withoutContract(TestContract.class) + .withContract("com.example.UnsupportedContract", 7) + .build(); + + // when + var results = LoaderHelper.verifyPluginApiLevels(List.of(descriptor), TestContract.class, classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + + List problems = results.rejected().get(descriptor); + assertEquals(2, problems.size()); + assertTrue(problems.stream().anyMatch( + problem -> problem.getClass().equals(LoaderProblem.PluginClassUnsupported.class) + )); + assertTrue(problems.stream().anyMatch( + problem -> problem.getClass().equals(LoaderProblem.PluginClassApiLevelMissing.class) + )); + } + } + + @Nested + class VerifyProviderApiLevels { + + @Test + void verifyProviderApiLevels_happyPath() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withRequiredProviders(Map.of(transformClassName(TestProvider.class), TestProvider.API_LEVEL)) + .build(); + + // when + var results = LoaderHelper.verifyProviderApiLevels(List.of(descriptor), classLoader); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(descriptor)); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void verifyProviderApiLevels_providerLevelMismatch() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withRequiredProviders(Map.of(transformClassName(TestProvider.class), TestProvider.API_LEVEL + 1)) + .build(); + + // when + var results = LoaderHelper.verifyProviderApiLevels(List.of(descriptor), classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + assertInstanceOf(LoaderProblem.ProviderApiLevelMismatch.class, results.rejected().get(descriptor).get(0)); + } + + @Test + void verifyProviderApiLevels_unsupportedProvider() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withRequiredProviders(Map.of("com.example.MissingProvider", 7)) + .build(); + + // when + var results = LoaderHelper.verifyProviderApiLevels(List.of(descriptor), classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + assertInstanceOf(LoaderProblem.ProviderClassUnsupported.class, results.rejected().get(descriptor).get(0)); + } + + @Test + void verifyProviderApiLevels_reportsMultipleProblemsForSingleDescriptor() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withRequiredProviders(Map.of( + transformClassName(TestProvider.class), TestProvider.API_LEVEL + 1, + "com.example.MissingProvider", 7 + )) + .build(); + + // when + var results = LoaderHelper.verifyProviderApiLevels(List.of(descriptor), classLoader); + + // then + assertEquals(0, results.accepted().size()); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(descriptor)); + + List problems = results.rejected().get(descriptor); + assertEquals(2, problems.size()); + assertTrue(problems.stream().anyMatch( + problem -> problem.getClass().equals(LoaderProblem.ProviderApiLevelMismatch.class) + )); + assertTrue(problems.stream().anyMatch( + problem -> problem.getClass().equals(LoaderProblem.ProviderClassUnsupported.class) + )); + } + + @Test + void verifyProviderApiLevels_descriptorWithoutRequiredProvidersIsAccepted() { + // given + SourcedDescriptor descriptor = DescriptorBuilder.aDescriptor() + .withRequiredProviders(Map.of()) + .build(); + + // when + var results = LoaderHelper.verifyProviderApiLevels(List.of(descriptor), classLoader); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(descriptor)); + assertEquals(0, results.warning().size()); + assertEquals(0, results.rejected().size()); + } + + @Test + void verifyProviderApiLevels_mixedDescriptorsSeparatesAcceptedAndRejected() { + // given + SourcedDescriptor matching = DescriptorBuilder.aDescriptor() + .withSource("matching.jar") + .withRequiredProviders(Map.of(transformClassName(TestProvider.class), TestProvider.API_LEVEL)) + .build(); + + SourcedDescriptor mismatching = DescriptorBuilder.aDescriptor() + .withSource("mismatching.jar") + .withRequiredProviders(Map.of(transformClassName(TestProvider.class), TestProvider.API_LEVEL + 1)) + .build(); + + // when + var results = LoaderHelper.verifyProviderApiLevels(List.of(matching, mismatching), classLoader); + + // then + assertEquals(1, results.accepted().size()); + assertTrue(results.accepted().contains(matching)); + assertEquals(0, results.warning().size()); + assertEquals(1, results.rejected().size()); + assertTrue(results.rejected().containsKey(mismatching)); + } + } + + @Nested + class ToPluginDescriptor { + + interface TestProvider extends CoreProvider { + int API_LEVEL = 7; + } + + @PluginContract(role = PluginContract.Role.BASE) + interface TestBasePlugin extends Plugin { + int API_LEVEL = 3; + } + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { TestBasePlugin.class } + ) + interface TestCapabilityPlugin extends Plugin { + int API_LEVEL = 5; + } + + static class GoodPlugin implements TestBasePlugin, TestCapabilityPlugin { + @Override + public String identity() { + return "good-plugin"; + } + } + + static class NullIdentityPlugin implements TestBasePlugin { + @Override + public String identity() { + return null; + } + } + + static class BlankIdentityPlugin implements TestBasePlugin { + @Override + public String identity() { + return " "; + } + } + + @Test + void toPluginDescriptor_happyPath() { + // given + SourcedDescriptor sourceDescriptor = DescriptorBuilder.aDescriptor() + .withSource("plugins", "good-plugin.jar") + .withClassName(GoodPlugin.class) + .withKind(TestBasePlugin.class) + .withContracts(Map.of( + transformClassName(TestBasePlugin.class), TestBasePlugin.API_LEVEL, + transformClassName(TestCapabilityPlugin.class), TestCapabilityPlugin.API_LEVEL + )) + .withRequiredProviders(Map.of( + transformClassName(TestProvider.class), TestProvider.API_LEVEL + )) + .build(); + GoodPlugin plugin = new GoodPlugin(); + + // when + var result = LoaderHelper.toPluginDescriptor(sourceDescriptor, plugin, classLoader); + + // then + assertEquals(Path.of("plugins", "good-plugin.jar"), result.sourceLocation()); + assertEquals("good-plugin", result.identity()); + assertEquals(GoodPlugin.class, result.pluginClass()); + assertEquals(TestBasePlugin.class, result.kindClass()); + assertEquals(2, result.contracts().size()); + assertEquals(TestBasePlugin.API_LEVEL, result.contracts().get(TestBasePlugin.class)); + assertEquals(TestCapabilityPlugin.API_LEVEL, result.contracts().get(TestCapabilityPlugin.class)); + assertEquals(1, result.requiredProviders().size()); + assertEquals(TestProvider.API_LEVEL, result.requiredProviders().get(TestProvider.class)); + } + + @Test + void toPluginDescriptor_rejectsNullIdentity() { + // given + SourcedDescriptor sourceDescriptor = DescriptorBuilder.aDescriptor() + .withSource("plugins", "null-identity.jar") + .withClassName(NullIdentityPlugin.class) + .withKind(TestBasePlugin.class) + .withContracts(Map.of( + transformClassName(TestBasePlugin.class), TestBasePlugin.API_LEVEL + )) + .build(); + NullIdentityPlugin plugin = new NullIdentityPlugin(); + + // when + then + var exception = assertThrows( + IllegalArgumentException.class, + () -> LoaderHelper.toPluginDescriptor(sourceDescriptor, plugin, classLoader) + ); + assertTrue(exception.getMessage().contains("Plugin identity may not be")); + } + + @Test + void toPluginDescriptor_rejectsBlankIdentity() { + // given + SourcedDescriptor sourceDescriptor = DescriptorBuilder.aDescriptor() + .withSource("plugins", "blank-identity.jar") + .withClassName(BlankIdentityPlugin.class) + .withKind(TestBasePlugin.class) + .withContracts(Map.of( + transformClassName(TestBasePlugin.class), TestBasePlugin.API_LEVEL + )) + .build(); + BlankIdentityPlugin plugin = new BlankIdentityPlugin(); + + // when + then + var exception = assertThrows( + IllegalArgumentException.class, + () -> LoaderHelper.toPluginDescriptor(sourceDescriptor, plugin, classLoader) + ); + assertTrue(exception.getMessage().contains("Plugin identity may not be")); + } + + @Test + void toPluginDescriptor_failsWhenContractClassCannotBeResolved() { + // given + SourcedDescriptor sourceDescriptor = DescriptorBuilder.aDescriptor() + .withClassName(GoodPlugin.class) + .withKind(TestBasePlugin.class) + .withContracts(Map.of( + "com.example.DoesNotExist", 99 + )) + .build(); + GoodPlugin plugin = new GoodPlugin(); + + // when + then + assertThrows( + IllegalArgumentException.class, + () -> LoaderHelper.toPluginDescriptor(sourceDescriptor, plugin, classLoader) + ); + } + + @Test + void toPluginDescriptor_failsWhenContractClassEmpty() { + // given + SourcedDescriptor sourceDescriptor = DescriptorBuilder.aDescriptor() + .withClassName(GoodPlugin.class) + .withKind(TestBasePlugin.class) + .withContracts(Map.of()) + .withRequiredProviders(Map.of()) + .build(); + GoodPlugin plugin = new GoodPlugin(); + + // when + then + assertThrows( + IllegalArgumentException.class, + () -> LoaderHelper.toPluginDescriptor(sourceDescriptor, plugin, classLoader) + ); + } + + @Test + void toPluginDescriptor_failsWhenProviderClassCannotBeResolved() { + // given + SourcedDescriptor sourceDescriptor = DescriptorBuilder.aDescriptor() + .withClassName(GoodPlugin.class) + .withKind(TestBasePlugin.class) + .withContracts(Map.of( + transformClassName(TestBasePlugin.class), TestBasePlugin.API_LEVEL + )) + .withRequiredProviders(Map.of( + "com.example.MissingProvider", 42 + )) + .build(); + GoodPlugin plugin = new GoodPlugin(); + + // when + then + assertThrows( + IllegalArgumentException.class, + () -> LoaderHelper.toPluginDescriptor(sourceDescriptor, plugin, classLoader) + ); + } + } +} diff --git a/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderIntegrationTest.java b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderIntegrationTest.java new file mode 100644 index 0000000..1efc568 --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderIntegrationTest.java @@ -0,0 +1,125 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.core.compiler.LoaderTestEnvironment; +import io.gdcc.spi.meta.plugin.Plugin; +import io.gdcc.spi.meta.processor.PluginContractProcessor; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; + +import java.nio.file.Path; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertThrows; + +class PluginLoaderIntegrationTest { + + final String contractPackage = "test.spi"; + final String contractClass = "TestPlugin"; + + final String pluginPackage = "test.plugins"; + + final String baseContractClassFile = contractPackage.replace(".", "/") + "/" + contractClass + ".java"; + final String baseContractCode = """ + package %s; + + import io.gdcc.spi.meta.plugin.Plugin; + import io.gdcc.spi.meta.annotations.PluginContract; + + @PluginContract(role = PluginContract.Role.BASE) + public interface %s extends Plugin { + int API_LEVEL = %s; + } + """; + + final String pluginCodeTemplate = """ + package %s; + + import io.gdcc.spi.meta.annotations.DataversePlugin; + import %s.%s; + + @DataversePlugin + public class %s implements %s { + @Override + public String identity() { + return "test"; + } + } + """; + + final String simplePluginClass = "SimplePlugin"; + final String simplePluginClassFile = pluginPackage.replace(".", "/") + "/" + simplePluginClass + ".java"; + final String simplePluginCode = pluginCodeTemplate.formatted(pluginPackage, contractPackage, contractClass, simplePluginClass, contractClass); + + @ParameterizedTest + @CsvSource({"1,2","2,1"}) + void rejectsPluginCompiledAgainstDifferentBaseApiLevel(int coreLevel, int pluginLevel) throws Exception { + // Given + LoaderTestEnvironment env = LoaderTestEnvironment.builder() + .addCoreSource( + baseContractClassFile, + baseContractCode.formatted(contractPackage, contractClass, coreLevel) + ) + .addPluginSource( + baseContractClassFile, + baseContractCode.formatted(contractPackage, contractClass, pluginLevel) + ) + .addPluginSource( + simplePluginClassFile, + simplePluginCode + ) + .addPluginProcessor(new PluginContractProcessor()) + .packagePluginAsJar(false) + .build(); + + Class pluginContractClass = env.coreClassLoader().loadClass(contractPackage + "." + contractClass); + @SuppressWarnings("unchecked") + Class typedContract = (Class) pluginContractClass; + + PluginLoader loader = new PluginLoader<>(typedContract, env.coreClassLoader()); + Path pluginLocation = Path.of(env.pluginArtifact().toString()); + + // When + Then + var ex = assertThrows(LoaderException.class, () -> loader.load(pluginLocation)); + assertEquals(1, ex.getProblems().size()); + assertInstanceOf(LoaderProblem.PluginClassApiLevelMismatch.class, ex.getProblems().get(0)); + } + + @Test + void acceptsPluginCompiledAgainstSameBaseApiLevel() throws Exception { + // Given + int apiLevel = 5; + + LoaderTestEnvironment env = LoaderTestEnvironment.builder() + .addCoreSource( + baseContractClassFile, + baseContractCode.formatted(contractPackage, contractClass, apiLevel) + ) + .addPluginSource( + baseContractClassFile, + baseContractCode.formatted(contractPackage, contractClass, apiLevel) + ) + .addPluginSource( + simplePluginClassFile, + simplePluginCode + ) + .addPluginProcessor(new PluginContractProcessor()) + .packagePluginAsJar(false) + .build(); + + Class pluginContractClass = env.coreClassLoader().loadClass(contractPackage + "." + contractClass); + @SuppressWarnings("unchecked") + Class typedContract = (Class) pluginContractClass; + + PluginLoader loader = new PluginLoader<>(typedContract, env.coreClassLoader()); + Path pluginLocation = Path.of(env.pluginArtifact().toString()); + + // When + var plugins = loader.load(pluginLocation); + + // Then + assertEquals(1, plugins.size()); + assertEquals(pluginPackage + "." + simplePluginClass, plugins.get(0).plugin().getClass().getName()); + } +} diff --git a/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java new file mode 100644 index 0000000..be21706 --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/loader/PluginLoaderTest.java @@ -0,0 +1,200 @@ +package io.gdcc.spi.core.loader; + +import io.gdcc.spi.core.test.basic.TestContract; +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import io.gdcc.spi.meta.plugin.Plugin; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.URL; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class PluginLoaderTest { + + @Nested + class ValidateBaseClass { + @Test + void validatePluginBaseClass_validBaseClass() { + assertDoesNotThrow(() -> PluginLoader.validatePluginBaseClass(TestContract.class)); + } + + @Test + void validatePluginBaseClass_invalidBaseClass_unrelated() { + // Given + interface UnrelatedInterfaceNotExtendingPlugin { + } + + // When & Then + assertThrows(IllegalArgumentException.class, () -> PluginLoader.validatePluginBaseClass(UnrelatedInterfaceNotExtendingPlugin.class)); + } + + @Test + void validatePluginBaseClass_invalidBaseClass_wrongType() { + // Given + class NotAnInterfacePlugin { + } + + // When & Then + assertThrows(IllegalArgumentException.class, () -> PluginLoader.validatePluginBaseClass(NotAnInterfacePlugin.class)); + } + + @Test + void validatePluginBaseClass_invalidBaseClass_wrongRole() { + + // NOTE: + // This local interface bypasses the annotation processor intentionally. + // The processor would reject this at compile time in real source files. + // This test verifies the runtime validation in PluginLoader. + + // Given + @PluginContract(role = PluginContract.Role.CAPABILITY) + interface IncorrectRolePlugin extends Plugin { + } + + // When & Then + assertThrows(IllegalArgumentException.class, () -> PluginLoader.validatePluginBaseClass(IncorrectRolePlugin.class)); + } + } + + @Nested + class FindSources { + @Test + void findSources_HappyPath() { + // Given + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut); + Path directory = Path.of("target/test-classes/" + sut.getPackageName().replaceAll("\\.", "/")); + + // When + Map sources = assertDoesNotThrow(() -> loader.findSources(directory)); + + // Then + assertFalse(sources.isEmpty()); + assertTrue(sources.containsKey(directory)); + } + + @Test + void findSources_NoSuchFile() { + // Given + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut); + Path directory = Path.of("nosuchdir"); + + // When + LoaderException ex = assertThrows(LoaderException.class, () -> loader.findSources(directory)); + + // Then + assertEquals("NoSuchFileException: nosuchdir", ex.getProblems().get(0).message()); + } + + @Test + void findSources_NoDirectory() { + // Given + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut); + Path notDirectory = Path.of("target/test-classes/" + DescriptorFormat.transformClassName(sut).replaceAll("\\.", "/") + ".class"); + + // When + LoaderException ex = assertThrows(LoaderException.class, () -> loader.findSources(notDirectory)); + + // Then + assertEquals("NotDirectoryException: " + notDirectory, ex.getProblems().get(0).message()); + } + } + + @Nested + class Preload { + + LoaderConfiguration enforcingConfig = LoaderConfiguration.defaults(); + + LoaderConfiguration permissiveConfig = LoaderConfiguration.permissive(); + + @Test + void preLoad_throwsOnNormalProblemsWhenEnforcing() { + // given + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut, enforcingConfig); + Path rootClassPath = Path.of("target/test-classes/"); + // Should generate class name conflict with core + SourceScanner scanner = source -> List.of(DescriptorBuilder.aDescriptor().build()); + + // when + var exception = assertThrows(LoaderException.class, () -> loader.preloadPlugins(Set.of(rootClassPath), scanner)); + + // then + assertInstanceOf(LoaderProblem.PluginClassNameCollisionWithCore.class, exception.getProblems().get(0)); + } + + @Test + void preLoad_throwsOnIOExceptionsWhenEnforcing() { + // given + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut, enforcingConfig); + Path rootClassPath = Path.of("target/test-classes/"); + // Should generate class name conflict with core + SourceScanner scanner = source -> { + throw new IOException("Test exception"); + }; + + // when + var exception = assertThrows(LoaderException.class, () -> loader.preloadPlugins(Set.of(rootClassPath), scanner)); + + // then + assertInstanceOf(LoaderProblem.LocationFailure.class, exception.getProblems().get(0)); + } + + @Test + void preLoad_continuesOnProblemsWhenPermissive() { + // given + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut, permissiveConfig); + Path rootClassPath = Path.of("target/test-classes/"); + // Should generate class name conflict with core + SourceScanner scanner = source -> List.of(DescriptorBuilder.aDescriptor().build()); + + // when + var result = loader.preloadPlugins(Set.of(rootClassPath), scanner); + + // then + assertEquals(0, result.size()); + } + } + + @Test + void load() throws MalformedURLException { + // Given + Class sut = TestContract.class; + PluginLoader loader = new PluginLoader<>(sut); + Path rootClassPath = Path.of("target/test-classes/"); + + /* + Map sources = Map.of(rootClassPath, new URL[]{PluginLoader.pathToUrl(rootClassPath, null, null)}); + + // When + Map plugins = loader.load(sources); + + // Then + assertFalse(plugins.isEmpty()); + List pluginSources = plugins.keySet().stream().toList(); + assertEquals(1, pluginSources.size()); + PluginOrigin source = pluginSources.get(0); + assertEquals(rootClassPath, source.location()); + assertEquals(TestPlugin.class.getName(), source.className()); + assertEquals(new TestPlugin().identity(), source.identity()); + + */ + } +} \ No newline at end of file diff --git a/core/src/test/java/io/gdcc/spi/core/test/basic/TestContract.java b/core/src/test/java/io/gdcc/spi/core/test/basic/TestContract.java new file mode 100644 index 0000000..50248a5 --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/test/basic/TestContract.java @@ -0,0 +1,12 @@ +package io.gdcc.spi.core.test.basic; + +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.plugin.Plugin; + +@PluginContract(role = PluginContract.Role.BASE) +public interface TestContract extends Plugin { + + int API_LEVEL = 1; + + void test(); +} diff --git a/core/src/test/java/io/gdcc/spi/core/test/basic/TestPlugin.java b/core/src/test/java/io/gdcc/spi/core/test/basic/TestPlugin.java new file mode 100644 index 0000000..7148d0c --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/test/basic/TestPlugin.java @@ -0,0 +1,15 @@ +package io.gdcc.spi.core.test.basic; + +import io.gdcc.spi.meta.plugin.Plugin; + +public class TestPlugin implements TestContract { + @Override + public String identity() { + return "test"; + } + + @Override + public void test() { + /* Intentionally left blank */ + } +} diff --git a/core/src/test/java/io/gdcc/spi/core/test/basic/TestProvider.java b/core/src/test/java/io/gdcc/spi/core/test/basic/TestProvider.java new file mode 100644 index 0000000..dc52639 --- /dev/null +++ b/core/src/test/java/io/gdcc/spi/core/test/basic/TestProvider.java @@ -0,0 +1,7 @@ +package io.gdcc.spi.core.test.basic; + +import io.gdcc.spi.meta.plugin.CoreProvider; + +public interface TestProvider extends CoreProvider { + int API_LEVEL = 1; +} diff --git a/export/pom.xml b/export/pom.xml new file mode 100644 index 0000000..8c035ad --- /dev/null +++ b/export/pom.xml @@ -0,0 +1,37 @@ + + + 4.0.0 + + io.gdcc.spi + parent + 2.1.0-SNAPSHOT + + + export + + + + + + + io.gdcc.spi + meta + + + + + jakarta.json + jakarta.json-api + provided + + + + jakarta.ws.rs + jakarta.ws.rs-api + provided + + + + \ No newline at end of file diff --git a/export/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java b/export/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java new file mode 100644 index 0000000..6fd4b48 --- /dev/null +++ b/export/src/main/java/io/gdcc/spi/export/DatasetExportQuery.java @@ -0,0 +1,206 @@ +package io.gdcc.spi.export; + +import java.util.Collection; +import java.util.HashSet; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; + +/** + * Defines what dataset metadata to retrieve and at what level of detail + * for dataset-oriented export operations. + *

+ * This is a pure data-shape specification: it answers which aspects of a dataset + * should be included in an export, and optionally how file metadata nested within + * that dataset should be shaped. It deliberately does not address which + * datasets to operate on (that is a selection concern at a higher level), nor + * how much data to retrieve per call — pagination is a separate, + * orthogonal concern expressed via a {@code PageRequest} at the method level. + *

+ * File metadata shaping is optional: if no {@link FileExportQuery} is provided, + * methods that include file metadata will apply their own defaults. Methods that + * do not return file metadata will ignore any nested {@link FileExportQuery}. + *

+ * Instances are immutable and must be constructed via {@link #builder()}. + * Use {@link #defaults()} for the standard query with no special filtering. + * + * @see FileExportQuery + * @see DatasetMetadataPredicates + */ +public final class DatasetExportQuery { + + private final Set datasetPredicates; + private final FileExportQuery fileQuery; + + /** + * Default query, including all dataset metadata and applying file metadata defaults. + */ + private static final DatasetExportQuery DEFAULT = builder().build(); + + private DatasetExportQuery(Builder builder) { + this.datasetPredicates = Set.copyOf(builder.datasetPredicates); + this.fileQuery = builder.fileQuery; + } + + /** + * Returns a builder for creating new queries. + * + * @return a new {@link Builder} instance + */ + public static Builder builder() { + return new Builder(); + } + + /** + * Returns the default query, which includes all dataset metadata with no special + * filtering, and defers file metadata shaping to method-level defaults. + * + * @return the shared default {@link DatasetExportQuery} instance + */ + public static DatasetExportQuery defaults() { + return DEFAULT; + } + + /** + * Builder for {@link DatasetExportQuery}. + *

+ * Obtain an instance via {@link DatasetExportQuery#builder()} or + * {@link Builder#from(DatasetExportQuery)} to derive a new query from an existing one. + */ + public static class Builder { + private final Set datasetPredicates = new HashSet<>(); + private FileExportQuery fileQuery = null; + + private Builder() { + // Hiding constructor to enforce use of the static factory method + } + + /** + * Sets the dataset metadata predicates, replacing any previously set predicates. + * + * @param predicates the dataset metadata predicates to set + * @return this builder instance + */ + public Builder datasetPredicates(DatasetMetadataPredicates... predicates) { + this.datasetPredicates.clear(); + this.datasetPredicates.addAll(Set.of(predicates)); + return this; + } + + /** + * Sets the dataset metadata predicates, replacing any previously set predicates. + * + * @param predicates the dataset metadata predicates to set + * @return this builder instance + */ + public Builder datasetPredicates(Collection predicates) { + this.datasetPredicates.clear(); + this.datasetPredicates.addAll(predicates); + return this; + } + + /** + * Adds a dataset metadata predicate to the builder's collection of predicates. + * + * @param predicate the dataset metadata predicate to add + * @return this builder instance + */ + public Builder addDatasetPredicate(DatasetMetadataPredicates predicate) { + this.datasetPredicates.add(predicate); + return this; + } + + /** + * Sets the {@link FileExportQuery} to use for shaping file metadata nested + * within this dataset query. Replaces any previously set file query. + *

+ * If not set, methods that include file metadata will apply their own defaults. + * + * @param fileQuery the file export query to compose into this dataset query + * @return this builder instance + */ + public Builder fileQuery(FileExportQuery fileQuery) { + this.fileQuery = fileQuery; + return this; + } + + /** + * Builds an immutable {@link DatasetExportQuery}. + * + * @return a new, validated {@link DatasetExportQuery} + * @throws IllegalArgumentException if the predicate combination is invalid, + * e.g. due to conflicting predicates + */ + public DatasetExportQuery build() { + return new DatasetExportQuery(this); + } + + /** + * Creates a new {@link Builder} pre-populated with the state of the given query, + * useful for deriving a modified copy without altering the original. + * + * @param source the {@link DatasetExportQuery} instance to copy from + * @return a new {@code Builder} with the same predicates and file query as {@code source} + */ + public Builder from(DatasetExportQuery source) { + return new Builder() + .datasetPredicates(source.datasetPredicates) + .fileQuery(source.fileQuery); + } + } + + // Getters + + /** + * Returns the dataset metadata predicates that control which aspects of the dataset + * are included in the export. + * + * @return an unmodifiable set of {@link DatasetMetadataPredicates}; never {@code null} + */ + public Set getDatasetPredicates() { + return datasetPredicates; + } + + /** + * Returns the optional {@link FileExportQuery} that controls how file metadata + * nested within this dataset export should be shaped. + *

+ * An empty {@link Optional} means no explicit file query was specified; methods + * that include file metadata will apply their own defaults in that case. + * + * @return an {@link Optional} containing the file export query, or empty if not set + */ + public Optional getFileQuery() { + return Optional.ofNullable(fileQuery); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DatasetExportQuery that = (DatasetExportQuery) o; + return datasetPredicates.equals(that.datasetPredicates) + && Objects.equals(fileQuery, that.fileQuery); + } + + @Override + public int hashCode() { + return Objects.hash(datasetPredicates, fileQuery); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("DatasetExportQuery{"); + + if (!datasetPredicates.isEmpty()) { + sb.append("datasetPredicates=").append(datasetPredicates).append(", "); + } + if (fileQuery != null) { + sb.append("fileQuery=").append(fileQuery); + } else { + sb.append("fileQuery="); + } + sb.append("}"); + return sb.toString(); + } +} diff --git a/export/src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java b/export/src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java new file mode 100644 index 0000000..74456e4 --- /dev/null +++ b/export/src/main/java/io/gdcc/spi/export/DatasetMetadataPredicates.java @@ -0,0 +1,76 @@ +package io.gdcc.spi.export; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +/** + * Predicates for controlling which dataset metadata is included in an export + * and at what level of detail. + *

+ * These predicates are used in a {@link DatasetExportQuery} to shape dataset-level + * retrieval. They are exclusively concerned with dataset-level concerns — file-level + * filtering is handled separately by {@link FileMetadataPredicates} via + * {@link FileExportQuery}. + *

+ * Predicates may conflict with each other; use {@link #checkConflicts(Set)} to + * validate a combination before use. + * + * @see DatasetExportQuery + * @see FileMetadataPredicates + */ +public enum DatasetMetadataPredicates { + // NOTE: We can only define backward conflicts, as forward conflicts would lead + // to circular dependencies disallowed by the Java compiler. + + // Placeholder — dataset-level predicates to be added here as requirements emerge. + // Examples of future candidates: + // PUBLISHED_DATASETS_ONLY — restrict to published versions + // DRAFT_INCLUDED — include draft versions + // METADATA_BLOCKS_ONLY — exclude file metadata entirely + ; + + final Set conflicts; + + DatasetMetadataPredicates(DatasetMetadataPredicates... predicates) { + this.conflicts = Set.of(predicates); + } + + /** + * Returns {@code true} if this predicate conflicts with the given predicate. + * + * @param p the predicate to check against; {@code null} is safe and returns {@code false} + * @return {@code true} if a conflict exists, {@code false} otherwise + */ + public boolean conflictsWith(DatasetMetadataPredicates p) { + if (p == null) { + return false; + } + return conflicts.contains(p); + } + + /** + * Checks for conflicts among the given set of dataset metadata predicates. + * A predicate is considered conflicting if it has a conflict relationship with + * any other predicate in the set. + * + * @param predicates the set of predicates to check for conflicts + * @return an unmodifiable set of predicates from the input that conflict with at + * least one other predicate; empty if no conflicts exist + */ + @SuppressWarnings("java:S2259") + public static Set checkConflicts(Set predicates) { + Set foundConflicts = new HashSet<>(); + + for (DatasetMetadataPredicates predicate : predicates) { + for (DatasetMetadataPredicates compare : predicates) { + if (predicate.conflictsWith(compare) || compare.conflictsWith(predicate)) { + foundConflicts.add(predicate); + foundConflicts.add(compare); + } + } + } + + return Collections.unmodifiableSet(foundConflicts); + } +} \ No newline at end of file diff --git a/export/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/export/src/main/java/io/gdcc/spi/export/ExportDataProvider.java new file mode 100644 index 0000000..aef1293 --- /dev/null +++ b/export/src/main/java/io/gdcc/spi/export/ExportDataProvider.java @@ -0,0 +1,275 @@ + +package io.gdcc.spi.export; + +import io.gdcc.spi.meta.plugin.CoreProvider; +import jakarta.json.JsonArray; +import jakarta.json.JsonObject; +import org.w3c.dom.Document; + +import java.io.InputStream; +import java.util.Optional; +import java.util.stream.Stream; + +/** + * Provides dataset metadata that can be used by an {@link Exporter} to create + * new metadata export formats. + *

+ * This interface offers multiple methods for retrieving dataset metadata in various + * formats and levels of detail. Exporters should choose the method that best fits + * their needs, considering the completeness of metadata and performance implications. + * + *

Implementation Guide

+ * Implementers must override the context-accepting versions of all data retrieval + * methods. No-argument convenience methods are provided as default implementations + * for backward compatibility but are deprecated and will be removed in a future version. + * + *

Context Handling

+ * Implementations should respect context options where applicable. + * Not all methods support all context options - see individual method documentation for details. + * All methods require a non-null {@link DatasetExportQuery} or {@link FileExportQuery}. + * Passing null will result in a {@link NullPointerException}. + * Callers should use {@link DatasetExportQuery#defaults()} respectivelly {@link FileExportQuery#defaults()} instead of passing null. + * + * @see Exporter + * @see DatasetExportQuery + * @see FileExportQuery + */ +public interface ExportDataProvider extends CoreProvider { + + int API_LEVEL = 2; + + /** + * Returns complete dataset metadata in Dataverse's standard JSON format. + *

+ * This format includes comprehensive dataset-level metadata along with basic + * metadata for each file in the dataset. It is the same JSON format used in + * the Dataverse API and available as a metadata export option in the UI. + * + * @param query specification for data retrieval + * @return dataset metadata in Dataverse JSON format + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if the query is null + * @since 2.1.0 + * @apiNote While no formal JSON schema exists for this format, it is well-documented + * in the Dataverse guides. Along with OAI_ORE, this is one of only two export + * formats that provide complete dataset and file metadata. + * @implNote Implementations must respect the {@code datasetMetadataOnly} flag. + * When true, file-level metadata should be excluded to optimize performance + * for datasets with large numbers of files. Other context options + * (publicFilesOnly, offset, length) do not apply and should be ignored. + */ + JsonObject getDatasetJson(DatasetExportQuery query); + + /** + * Returns complete dataset metadata using default options. + * + * @return dataset metadata in Dataverse JSON format + * @throws ExportException if metadata retrieval fails + * @since 1.0.0 + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetJson(DatasetExportQuery)} instead. + */ + @Deprecated(since = "2.1.0", forRemoval = true) + default JsonObject getDatasetJson() { + return getDatasetJson(DatasetExportQuery.defaults()); + } + + /** + * Returns dataset metadata in JSON-LD-based OAI-ORE format. + *

+ * OAI-ORE (Open Archives Initiative Object Reuse and Exchange) provides a structured way to describe + * aggregations of web resources. This format is used in Dataverse's archival bag export mechanism + * and available via UI and API. + * + * @param query specification for data retrieval + * @return dataset metadata in OAI-ORE format + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if the query is null + * @since 2.1.0 + * @apiNote Along with the standard JSON format, this is one of only two export + * formats that provide complete dataset-level metadata along with basic + * file metadata for each file in the dataset. + * @implNote Implementations must respect the {@code datasetMetadataOnly} flag. + * Other context options do not apply and should be ignored. + */ + JsonObject getDatasetORE(DatasetExportQuery query); + + /** + * Returns dataset metadata in OAI-ORE format using default options. + * + * @return dataset metadata in OAI-ORE format + * @throws ExportException if metadata retrieval fails + * @since 1.0.0 + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetORE(DatasetExportQuery)} instead. + */ + @Deprecated(since = "2.1.0", forRemoval = true) + default JsonObject getDatasetORE() { + return getDatasetORE(DatasetExportQuery.defaults()); + } + + /** + * Returns detailed metadata for files in the dataset. + *

+ * For tabular files that have been successfully ingested, this may include + * DDI-centric metadata extracted during the ingest process. This detailed + * metadata is not available through other methods in this interface. + *

+ * The query may specify filters to skip certain files or how much metadata details should be included. + * The resulting stream will contain a limited number of elements only, specified by a {@code PageRequest}, + * avoiding huge memory allocations in the provider. + *

+ * + * @param query specification for file data retrieval + * @param request the page request containing pagination information such as page offset and page size + * @return JSON array with one entry per dataset file (both tabular and non-tabular) + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if the query or request is null + * @since 2.1.0 + * @apiNote No formal JSON schema is available for this output. The format is not + * extensively documented; implementers may wish to examine the DDIExporter + * and JSONPrinter classes in the Dataverse codebase for usage examples. + */ + Stream getDatasetFileDetails(FileExportQuery query, PageRequest request); + + /** + * Returns detailed metadata for files in the dataset. + *

+ * For tabular files that have been successfully ingested, this may include + * DDI-centric metadata extracted during the ingest process. This detailed + * metadata is not available through other methods in this interface. + *

+ * The query may specify filters to skip certain files or how much metadata details should be included. + * The resulting stream will contain all matching files for consumption. + * In cases with large metadata quantities use {@link #getDatasetFileDetails(FileExportQuery,PageRequest)} + * for a stream containing a limited number of elements only, avoiding huge memory allocations in the provider. + *

+ * + * @param query specification for file data retrieval + * @return JSON array with one entry per dataset file (both tabular and non-tabular) + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if the query is null + * @since 2.1.0 + * @apiNote No formal JSON schema is available for this output. The format is not + * extensively documented; implementers may wish to examine the DDIExporter + * and JSONPrinter classes in the Dataverse codebase for usage examples. + */ + Stream getDatasetFileDetails(FileExportQuery query); + + /** + * Returns detailed metadata for all files using default options. + *

+ * Note that this method will serialize all file metadata into one large JSON array. + * This can be memory-intensive for large datasets and should be used judiciously. + * There have been reports of unexportable large datasets in production installations. + * Using {@link #getDatasetFileDetails(FileExportQuery)} instead is advised. + *

+ * + * @return JSON array with one JSON object entry per dataset file + * @throws ExportException if metadata retrieval fails + * @since 1.0.0 + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetFileDetails(FileExportQuery)} + * or {@link #getDatasetFileDetails(FileExportQuery, PageRequest)}instead. + */ + @Deprecated(since = "2.1.0", forRemoval = true) + JsonArray getDatasetFileDetails(); + + /** + * Returns dataset metadata conforming to the schema.org standard. + *

+ * This metadata subset is used in dataset page headers to improve discoverability by search engines. + * It provides structured data markup (JSON-LD) following the schema.org vocabulary. + * + * @param query specification for data retrieval + * @return dataset metadata in schema.org format + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if the query is null + * @since 2.1.0 + * @apiNote This metadata export is not complete. It should only be used as a starting + * point for an Exporter if it simplifies implementation compared to using + * the complete JSON or OAI_ORE exports. + * @implNote All context options are ignored by this method. + */ + JsonObject getDatasetSchemaDotOrg(DatasetExportQuery query); + + /** + * Returns dataset metadata in schema.org format using default options. + * + * @return dataset metadata in schema.org format + * @throws ExportException if metadata retrieval fails + * @since 1.0.0 + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDatasetSchemaDotOrg(DatasetExportQuery)} instead. + */ + @Deprecated(since = "2.1.0", forRemoval = true) + default JsonObject getDatasetSchemaDotOrg() { + return getDatasetSchemaDotOrg(DatasetExportQuery.defaults()); + } + + /** + * Returns dataset metadata conforming to the DataCite standard as XML. + *

+ * This is the same metadata format sent to DataCite when DataCite DOIs are used. + * It provides citation metadata following the DataCite Metadata Schema. + *

+ * Note: the returned XML document can easily be queried using XPath and other techniques + *

+ * + * @param query specification for data retrieval + * @return dataset metadata as DataCite XML string + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if the query is null + * @since 2.1.0 + * @apiNote This metadata export is not complete. It should only be used as a starting + * point for an Exporter if it simplifies implementation compared to using + * the complete JSON or OAI_ORE exports. + * @implNote All context options are ignored by this method. + */ + Document getDataCiteXml(DatasetExportQuery query); + + /** + * Returns dataset metadata in DataCite XML format using default options. + * + * @return dataset metadata as DataCite XML string + * @throws ExportException if metadata retrieval fails + * @since 1.0.0 + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getDataCiteXml(DatasetExportQuery)} instead. + */ + @Deprecated(since = "2.1.0", forRemoval = true) + String getDataCiteXml(); + + /** + * Returns metadata in the format specified by an Exporter's prerequisite. + *

+ * Some Exporters transform metadata from one standard format to another (e.g., + * DDI XML to DDI HTML). Such Exporters declare a prerequisite format via + * {@link Exporter#getPrerequisiteFormatName()}, and this method provides access + * to that prerequisite metadata. + * + * @param query specifcation passed to the prerequisite exporter + * @return metadata in the prerequisite format, or empty if no prerequisite is configured + * @throws ExportException if metadata retrieval fails + * @throws NullPointerException if the query is null + * @since 2.1.0 + * @apiNote This is useful for creating alternate representations of the same metadata + * (e.g., XML, HTML, PDF versions of a standard like DDI), especially when + * conversion libraries exist. Note that if a third-party Exporter replaces + * the internal exporter you depend on, this method may return unexpected results. + * @implNote The default implementation returns empty. Override only if your provider + * supports prerequisite format chaining. The prerequisite exporter receives + * the same context as specified in this call. + */ + default Optional getPrerequisiteInputStream(DatasetExportQuery query) { + return Optional.empty(); + } + + /** + * Returns metadata in the prerequisite format using default options. + * + * @return metadata in the prerequisite format, or empty if no prerequisite is configured + * @throws ExportException if metadata retrieval fails + * @since 1.0.0 + * @deprecated since 2.1.0, for removal in 3.0.0. Use {@link #getPrerequisiteInputStream(DatasetExportQuery)} instead. + */ + @Deprecated(since = "2.1.0", forRemoval = true) + default Optional getPrerequisiteInputStream() { + return getPrerequisiteInputStream(DatasetExportQuery.defaults()); + } +} diff --git a/src/main/java/io/gdcc/spi/export/ExportException.java b/export/src/main/java/io/gdcc/spi/export/ExportException.java similarity index 72% rename from src/main/java/io/gdcc/spi/export/ExportException.java rename to export/src/main/java/io/gdcc/spi/export/ExportException.java index c816a60..57a6a1b 100644 --- a/src/main/java/io/gdcc/spi/export/ExportException.java +++ b/export/src/main/java/io/gdcc/spi/export/ExportException.java @@ -1,8 +1,6 @@ package io.gdcc.spi.export; -import java.io.IOException; - -public class ExportException extends IOException { +public class ExportException extends RuntimeException { public ExportException(String message) { super(message); } diff --git a/src/main/java/io/gdcc/spi/export/Exporter.java b/export/src/main/java/io/gdcc/spi/export/Exporter.java similarity index 92% rename from src/main/java/io/gdcc/spi/export/Exporter.java rename to export/src/main/java/io/gdcc/spi/export/Exporter.java index 7132e74..fc439a9 100644 --- a/src/main/java/io/gdcc/spi/export/Exporter.java +++ b/export/src/main/java/io/gdcc/spi/export/Exporter.java @@ -1,5 +1,9 @@ package io.gdcc.spi.export; +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.annotations.RequiredProvider; +import io.gdcc.spi.meta.plugin.Plugin; + import java.io.OutputStream; import java.util.Locale; import java.util.Optional; @@ -10,8 +14,17 @@ * deploying new classes that implement this Exporter interface. */ -public interface Exporter { - +@PluginContract( + role = PluginContract.Role.BASE, + providers = @RequiredProvider(ExportDataProvider.class) +) +public interface Exporter extends Plugin { + + int API_LEVEL = 2; + + default String identity() { + return this.getFormatName(); + } /** * When this method is called, the Exporter should write the metadata to the given OutputStream. diff --git a/export/src/main/java/io/gdcc/spi/export/FileExportQuery.java b/export/src/main/java/io/gdcc/spi/export/FileExportQuery.java new file mode 100644 index 0000000..5ebcacd --- /dev/null +++ b/export/src/main/java/io/gdcc/spi/export/FileExportQuery.java @@ -0,0 +1,162 @@ +package io.gdcc.spi.export; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.Objects; +import java.util.Set; + +import static io.gdcc.spi.export.FileMetadataPredicates.*; + +/** + * Defines what file metadata to retrieve and at what level of detail + * for file-oriented export operations. + *

+ * This is a pure data-shape specification: it answers which files should be included + * and how much detail about them should be fetched. It deliberately does not address + * how much data to retrieve per call — pagination is a separate, + * orthogonal concern expressed via a {@code PageRequest} at the method level. + *

+ * A {@code FileExportQuery} may be used standalone in file-centric export methods, + * or composed inside a {@code DatasetExportQuery} to specify how file metadata + * should be shaped within a dataset export. + *

+ * Instances are immutable and must be constructed via {@link #builder()}. + * Use {@link #defaults()} for the standard all-files query with no special filtering. + * + * @see FileMetadataPredicates + */ +public final class FileExportQuery { + + private final Set filePredicates; + + /** + * Default query with no special options. + */ + private static final FileExportQuery DEFAULT = builder().addFilePredicate(ALL_FILES).build(); + + private FileExportQuery(Builder builder) { + this.filePredicates = builder.filePredicates; + } + + /** + * Returns a builder for creating new queries. + */ + public static Builder builder() { + return new Builder(); + } + + /** + * Returns a default query, which includes all files without filtering or detail restrictions. + */ + public static FileExportQuery defaults() { + return DEFAULT; + } + + /** + * Builder for {@link FileExportQuery}. + *

+ * Obtain an instance via {@link FileExportQuery#builder()} or + * {@link Builder#from(FileExportQuery)} to derive a new query from an existing one. + */ + public static class Builder { + private final Set filePredicates = new HashSet<>(); + + private Builder() { + // Hiding constructor to enforce use of the static factory method + } + + /** + * Sets the file metadata predicates, replacing any previously set predicates. + * + * @param predicates the file metadata predicates to set + * @return this builder instance + */ + public Builder filePredicates(FileMetadataPredicates... predicates) { + this.filePredicates.clear(); + this.filePredicates.addAll(Set.of(predicates)); + return this; + } + + /** + * Sets the file metadata predicates, replacing any previously set predicates. + * + * @param predicates the file metadata predicates to set + * @return this builder instance + */ + public Builder filePredicates(Collection predicates) { + this.filePredicates.clear(); + this.filePredicates.addAll(predicates); + return this; + } + + /** + * Adds a file metadata predicate to the builder's collection of predicates. + * + * @param predicate the file metadata predicate to add + * @return this builder instance + */ + public Builder addFilePredicate(FileMetadataPredicates predicate) { + this.filePredicates.add(predicate); + return this; + } + + /** + * Builds an immutable {@link FileExportQuery}. + * + * @return validated context + * @throws IllegalArgumentException if validation fails + */ + public FileExportQuery build() { + return new FileExportQuery(this); + } + + /** + * Copies the properties from the given {@link FileExportQuery} instance into a new {@code Builder}. + * + * @param source the {@code FileExportQuery} instance from which to copy properties + * @return a new {@code Builder} instance with properties copied from the provided query + */ + public Builder from(FileExportQuery source) { + return new Builder() + .filePredicates(source.filePredicates); + } + } + + // Getters + + /** + * Returns the file metadata predicates that control which files are included + * and what level of detail is fetched for each. + * + * @return an unmodifiable set of {@link FileMetadataPredicates}; never {@code null} + */ + public Set getFilePredicates() { + return Collections.unmodifiableSet(filePredicates); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FileExportQuery that = (FileExportQuery) o; + return filePredicates.equals(that.filePredicates); + } + + @Override + public int hashCode() { + return Objects.hash(filePredicates); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder("FileExportQuery{"); + + if (!filePredicates.isEmpty()) { + sb.append("filePredicates=").append(filePredicates); + } + + sb.append("}"); + return sb.toString(); + } +} diff --git a/export/src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java b/export/src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java new file mode 100644 index 0000000..81f6a65 --- /dev/null +++ b/export/src/main/java/io/gdcc/spi/export/FileMetadataPredicates.java @@ -0,0 +1,84 @@ +package io.gdcc.spi.export; + +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +/** + * Enum representing predicates for filtering file metadata during export operations. + * Each predicate defines criteria for including or excluding specific types of files. + * Conflicts between predicates are explicitly defined to prevent ambiguous or contradictory filtering rules. + * Any predicates should follow the pattern (SKIP|ONLY|INCLUDE)_[ADJECTIVE]_[OBJECT]. + */ +public enum FileMetadataPredicates { + // NOTE: We can only define backward conflicts, as forward conflicts would lead + // to circular dependencies disallowed by the Java compiler. + + /** + * Includes metadata for all files without restriction. + * Conflicts with any other predicate selecting files. + */ + ALL_FILES(), + /** + * Excludes metadata for all files. + * Conflicts with any other file selecting predicate. + */ + SKIP_FILES(ALL_FILES), + /** + * Only include files with public visibility. + * Conflicts with {@link #ALL_FILES} and {@link #SKIP_FILES}. + */ + ONLY_PUBLIC_FILES(ALL_FILES, SKIP_FILES), + /** + * Only include tabular data files. + * Conflicts with {@link #ALL_FILES} and {@link #SKIP_FILES}. + */ + ONLY_TABULAR_FILES(ALL_FILES, SKIP_FILES), + /** + * For tabular data files, control if variable details are included or not. + * (That can be huge and heterogeneous data with slow DB queries!) + * It has no conflicting predicates, as it is about detail inclusion, not file selection. + */ + INCLUDE_TABULAR_DATA_VARIABLES() + ; + + final Set conflicts; + + FileMetadataPredicates(FileMetadataPredicates... predicates) { + this.conflicts = Set.of(predicates); + } + + public boolean conflictsWith(FileMetadataPredicates p) { + if (p == null) { + return false; + } + return conflicts.contains(p); + } + + /** + * Checks for conflicts among the given set of export file predicates. + * A predicate is considered conflicting if it has a conflict relationship with + * any other predicate defined in the {@link FileMetadataPredicates} enum. + * + * @param predicates the set of predicates to check for conflicts + * @return an unmodifiable set of predicates from the input that conflict with at least one other predicate (empty if no conflict) + */ + @SuppressWarnings("java:S2259") + public static Set checkConflicts(Set predicates) { + Set foundConflicts = new HashSet<>(); + + // Iterate via O(n^2) through all predicates to check any existing predicate for a conflict. + // This way, a forward check is enough, as we iterate through the cartesian product. + for (FileMetadataPredicates predicate : predicates) { + for (FileMetadataPredicates compare : predicates) { + if (predicate.conflictsWith(compare) || compare.conflictsWith(predicate)) { + foundConflicts.add(predicate); + foundConflicts.add(compare); + } + } + } + + return Collections.unmodifiableSet(foundConflicts); + } + +} diff --git a/export/src/main/java/io/gdcc/spi/export/PageRequest.java b/export/src/main/java/io/gdcc/spi/export/PageRequest.java new file mode 100644 index 0000000..335e8ed --- /dev/null +++ b/export/src/main/java/io/gdcc/spi/export/PageRequest.java @@ -0,0 +1,67 @@ +package io.gdcc.spi.export; + +import java.util.Objects; + +/** + * Defines pagination parameters for data retrieval methods that return + * potentially large collections of results. + * + *

Use {@link #unpaged()} for requests that should return all results in a single batch.

+ */ +public final class PageRequest { + + private static final PageRequest UNPAGED = new PageRequest(0, Integer.MAX_VALUE); + + private final int offset; + private final int limit; + + private PageRequest(int offset, int limit) { + if (offset < 0) throw new IllegalArgumentException("Offset must be >= 0, was: " + offset); + if (limit < 1) throw new IllegalArgumentException("Limit must be >= 1, was: " + limit); + this.offset = offset; + this.limit = limit; + } + + /** + * Creates a page request with the given offset and limit. + * + * @param offset zero-based index of the first result to return + * @param limit maximum number of results to return + * @return a new PageRequest + */ + public static PageRequest of(int offset, int limit) { + return new PageRequest(offset, limit); + } + + /** + * Returns a request for all results (no pagination). + */ + public static PageRequest unpaged() { + return UNPAGED; + } + + public int getOffset() { return offset; } + public int getLimit() { return limit; } + + public boolean isPaged() { return !this.equals(UNPAGED); } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + PageRequest that = (PageRequest) o; + return offset == that.offset && limit == that.limit; + } + + @Override + public int hashCode() { + return Objects.hash(offset, limit); + } + + @Override + public String toString() { + return isPaged() + ? "PageRequest{offset=" + offset + ", limit=" + limit + "}" + : "PageRequest{unpaged}"; + } +} diff --git a/src/main/java/io/gdcc/spi/export/XMLExporter.java b/export/src/main/java/io/gdcc/spi/export/XMLExporter.java similarity index 82% rename from src/main/java/io/gdcc/spi/export/XMLExporter.java rename to export/src/main/java/io/gdcc/spi/export/XMLExporter.java index 3c3fa35..08b7cc9 100644 --- a/src/main/java/io/gdcc/spi/export/XMLExporter.java +++ b/export/src/main/java/io/gdcc/spi/export/XMLExporter.java @@ -1,12 +1,19 @@ package io.gdcc.spi.export; +import io.gdcc.spi.meta.annotations.PluginContract; import jakarta.ws.rs.core.MediaType; /** * XML Exporter is an extension of the base Exporter interface that adds the * additional methods needed for generating XML metadata export formats. */ +@PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = Exporter.class +) public interface XMLExporter extends Exporter { + + int API_LEVEL = 2; /** * @implNote for the ddi exporter, this method returns "ddi:codebook:2_5" @@ -31,7 +38,8 @@ public interface XMLExporter extends Exporter { /** * @return - should always be MediaType.APPLICATION_XML */ - public default String getMediaType() { + @Override + default String getMediaType() { return MediaType.APPLICATION_XML; - }; + } } diff --git a/export/src/test/java/io/gdcc/spi/export/ExporterImplTest.java b/export/src/test/java/io/gdcc/spi/export/ExporterImplTest.java new file mode 100644 index 0000000..bc0c4fd --- /dev/null +++ b/export/src/test/java/io/gdcc/spi/export/ExporterImplTest.java @@ -0,0 +1,105 @@ + +package io.gdcc.spi.export; + +import io.gdcc.spi.export.fixtures.StubDdiExporter; +import io.gdcc.spi.export.fixtures.StubJsonExporter; +import io.gdcc.spi.meta.descriptor.Descriptor; +import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * Verifies that the annotation processor generates correct descriptors and service files + * when compiling real Exporter SPI implementations. + * + *

The test implementation classes in this package are compiled with the processor on the + * classpath. The processor writes descriptors and service files into {@code target/test-classes/}, + * which this test reads at runtime to verify correctness.

+ */ +class ExporterImplTest { + + @Test + void generatesDescriptorAndServiceFileForBaseExporterImplementation() throws IOException { + Class implClass = StubJsonExporter.class; + + Descriptor descriptor = readDescriptor(implClass); + assertNotNull(descriptor, "Descriptor should be generated for " + implClass); + + assertEquals(DescriptorFormat.transformClassName(implClass), descriptor.klass()); + assertEquals(Exporter.class.getCanonicalName(), descriptor.kind()); + assertEquals(Exporter.API_LEVEL, descriptor.contractLevel(Exporter.class.getCanonicalName())); + assertEquals(ExportDataProvider.API_LEVEL, descriptor.requiredProviderLevel(ExportDataProvider.class.getCanonicalName())); + + String serviceFile = readServiceFile(Exporter.class); + assertNotNull(serviceFile, "Service file should be generated for Exporter"); + assertTrue(serviceFile.contains(DescriptorFormat.transformClassName(implClass)), "Service file should contain " + implClass); + } + + @Test + void generatesDescriptorWithBaseAndCapabilityForXmlExporterImplementation() throws IOException { + Class implClass = StubDdiExporter.class; + + Descriptor descriptor = readDescriptor(implClass); + assertNotNull(descriptor, "Descriptor should be generated for " + implClass); + + assertEquals(DescriptorFormat.transformClassName(implClass), descriptor.klass()); + assertEquals(Exporter.class.getCanonicalName(), descriptor.kind()); + assertEquals(Exporter.API_LEVEL, descriptor.contractLevel(Exporter.class.getCanonicalName())); + assertEquals(XMLExporter.API_LEVEL, descriptor.contractLevel(XMLExporter.class.getCanonicalName())); + assertEquals(ExportDataProvider.API_LEVEL, descriptor.requiredProviderLevel(ExportDataProvider.class.getCanonicalName())); + + String serviceFile = readServiceFile(Exporter.class); + assertNotNull(serviceFile, "Service file should be generated for Exporter"); + assertTrue(serviceFile.contains(DescriptorFormat.transformClassName(implClass)), "Service file should contain " + implClass); + } + + @Test + void doesNotGenerateServiceFileForXmlExporterCapability() { + String serviceFile = readServiceFile(XMLExporter.class); + assertNull(serviceFile, "Service file must never be generated for capability contract XMLExporter"); + } + + @Test + void xmlExporterDefaultMediaTypeSatisfiesBaseContract() throws IOException { + // StubDdiExporter implements XMLExporter (which extends Exporter) and does NOT + // override getMediaType(). Because XMLExporter extends Exporter in the Java type + // hierarchy, the default on XMLExporter satisfies the abstract declaration on Exporter. + // If this were not the case, compilation would have failed and no descriptor would exist. + Class implClass = StubDdiExporter.class; + + Descriptor descriptor = readDescriptor(implClass); + assertNotNull(descriptor, "Descriptor should exist, proving compilation succeeded without explicit getMediaType() override"); + } + + // ── Helpers ───────────────────────────────────────────────────────────────── + + private Descriptor readDescriptor(Class implClass) throws IOException { + String resourcePath = DescriptorFormat.toPath(implClass); + try (InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath)) { + if (is == null) { + return null; + } + return DescriptorFormat.read(new String(is.readAllBytes(), StandardCharsets.UTF_8)); + } + } + + private String readServiceFile(Class serviceType) { + String resourcePath = "META-INF/services/" + serviceType.getName(); + try (InputStream is = getClass().getClassLoader().getResourceAsStream(resourcePath)) { + if (is == null) { + return null; + } + return new String(is.readAllBytes(), StandardCharsets.UTF_8); + } catch (IOException e) { + return null; + } + } +} \ No newline at end of file diff --git a/export/src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java b/export/src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java new file mode 100644 index 0000000..4157b95 --- /dev/null +++ b/export/src/test/java/io/gdcc/spi/export/FileMetadataPredicatesTest.java @@ -0,0 +1,32 @@ +package io.gdcc.spi.export; + +import org.junit.jupiter.api.Test; + +import java.util.Set; + +import static io.gdcc.spi.export.FileMetadataPredicates.*; +import static org.junit.jupiter.api.Assertions.*; + +class FileMetadataPredicatesTest { + + @Test + void checkForwardAndBackwardConflicts_All_None() { + Set predicates = Set.of(ALL_FILES, SKIP_FILES); + Set conflicts = FileMetadataPredicates.checkConflicts(predicates); + assertEquals(2, conflicts.size(), conflicts::toString); + } + + @Test + void checkForwardAndBackwardConflicts_All_SthElse() { + Set predicates = Set.of(ALL_FILES, ONLY_PUBLIC_FILES); + Set conflicts = FileMetadataPredicates.checkConflicts(predicates); + assertEquals(2, conflicts.size(), conflicts::toString); + } + + @Test + void checkNoConflicts_Public_Tabular() { + Set predicates = Set.of(ONLY_PUBLIC_FILES, ONLY_TABULAR_FILES); + Set conflicts = FileMetadataPredicates.checkConflicts(predicates); + assertTrue(conflicts.isEmpty(), conflicts::toString); + } +} \ No newline at end of file diff --git a/export/src/test/java/io/gdcc/spi/export/PageRequestTest.java b/export/src/test/java/io/gdcc/spi/export/PageRequestTest.java new file mode 100644 index 0000000..917625d --- /dev/null +++ b/export/src/test/java/io/gdcc/spi/export/PageRequestTest.java @@ -0,0 +1,64 @@ +package io.gdcc.spi.export; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class PageRequestTest { + + /** + * Tests for the `of` method in the `PageRequest` class. + * The `of` method is responsible for creating a new `PageRequest` object with the given offset and limit values. + * This test class ensures various scenarios for valid and invalid inputs are handled correctly. + */ + + @Test + void testOf_createsValidPageRequest() { + // Arrange & Act + PageRequest pageRequest = PageRequest.of(10, 20); + + // Assert + assertEquals(10, pageRequest.getOffset()); + assertEquals(20, pageRequest.getLimit()); + assertTrue(pageRequest.isPaged()); + } + + @Test + void testOf_withZeroOffsetAndValidLimit() { + // Arrange & Act + PageRequest pageRequest = PageRequest.of(0, 5); + + // Assert + assertEquals(0, pageRequest.getOffset()); + assertEquals(5, pageRequest.getLimit()); + assertTrue(pageRequest.isPaged()); + } + + @Test + void testOf_throwsExceptionForNegativeOffset() { + // Arrange & Act & Assert + IllegalArgumentException exception = assertThrows( + IllegalArgumentException.class, + () -> PageRequest.of(-1, 10) + ); + assertEquals("Offset must be >= 0, was: -1", exception.getMessage()); + } + + @Test + void testOf_throwsExceptionForZeroOrNegativeLimit() { + // Arrange & Act & Assert + IllegalArgumentException exception1 = assertThrows( + IllegalArgumentException.class, + () -> PageRequest.of(5, 0) + ); + assertEquals("Limit must be >= 1, was: 0", exception1.getMessage()); + + IllegalArgumentException exception2 = assertThrows( + IllegalArgumentException.class, + () -> PageRequest.of(5, -1) + ); + assertEquals("Limit must be >= 1, was: -1", exception2.getMessage()); + } +} \ No newline at end of file diff --git a/export/src/test/java/io/gdcc/spi/export/fixtures/StubDdiExporter.java b/export/src/test/java/io/gdcc/spi/export/fixtures/StubDdiExporter.java new file mode 100644 index 0000000..973a091 --- /dev/null +++ b/export/src/test/java/io/gdcc/spi/export/fixtures/StubDdiExporter.java @@ -0,0 +1,57 @@ +package io.gdcc.spi.export.fixtures; + +import io.gdcc.spi.export.ExportDataProvider; +import io.gdcc.spi.export.XMLExporter; +import io.gdcc.spi.meta.annotations.DataversePlugin; + +import java.io.OutputStream; +import java.util.Locale; + +/** + * Minimal XMLExporter implementation for processor integration testing. + * + *

Does NOT override {@code getMediaType()} — the default from {@link XMLExporter} + * satisfies the abstract declaration on {@link io.gdcc.spi.export.Exporter} because XMLExporter extends Exporter.

+ */ +@DataversePlugin +public class StubDdiExporter implements XMLExporter { + @Override + public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) { + /* Intentionally left blank for test class */ + } + + @Override + public String getFormatName() { + return "stub-ddi"; + } + + @Override + public String getDisplayName(Locale locale) { + return "Stub DDI"; + } + + @Override + public Boolean isHarvestable() { + return true; + } + + @Override + public Boolean isAvailableToUsers() { + return true; + } + + @Override + public String getXMLNameSpace() { + return "ddi:codebook:2_5"; + } + + @Override + public String getXMLSchemaLocation() { + return "https://ddialliance.org/Specification/DDI-Codebook/2.5/XMLSchema/codebook.xsd"; + } + + @Override + public String getXMLSchemaVersion() { + return "2.5"; + } +} diff --git a/export/src/test/java/io/gdcc/spi/export/fixtures/StubJsonExporter.java b/export/src/test/java/io/gdcc/spi/export/fixtures/StubJsonExporter.java new file mode 100644 index 0000000..b273c39 --- /dev/null +++ b/export/src/test/java/io/gdcc/spi/export/fixtures/StubJsonExporter.java @@ -0,0 +1,44 @@ +package io.gdcc.spi.export.fixtures; + +import io.gdcc.spi.export.ExportDataProvider; +import io.gdcc.spi.export.Exporter; +import io.gdcc.spi.meta.annotations.DataversePlugin; + +import java.io.OutputStream; +import java.util.Locale; + +/** + * Minimal base-only Exporter implementation for processor integration testing. + */ +@DataversePlugin +public class StubJsonExporter implements Exporter { + @Override + public void exportDataset(ExportDataProvider dataProvider, OutputStream outputStream) { + /* Intentionally left blank for test class */ + } + + @Override + public String getFormatName() { + return "stub-json"; + } + + @Override + public String getDisplayName(Locale locale) { + return "Stub JSON"; + } + + @Override + public Boolean isHarvestable() { + return false; + } + + @Override + public Boolean isAvailableToUsers() { + return true; + } + + @Override + public String getMediaType() { + return "application/json"; + } +} diff --git a/meta/pom.xml b/meta/pom.xml new file mode 100644 index 0000000..c489747 --- /dev/null +++ b/meta/pom.xml @@ -0,0 +1,56 @@ + + + 4.0.0 + + io.gdcc.spi + parent + 2.1.0-SNAPSHOT + + + meta + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${jdk.version} + ${compilerArgument} + + + + + default-compile + compile + + compile + + + + none + + + + default-testCompile + test-compile + + testCompile + + + + + + + + + + + \ No newline at end of file diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java new file mode 100644 index 0000000..59989a4 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/DataversePlugin.java @@ -0,0 +1,39 @@ +package io.gdcc.spi.meta.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Marks a concrete plugin implementation class for metadata generation. + * + *

Plugin authors should place this annotation on every concrete plugin implementation + * class that is meant to be discovered and loaded by Dataverse.

+ * + *

Annotated classes are validated by the {@link io.gdcc.spi.meta.processor.PluginContractProcessor annotation processor} + * and contribute generated compatibility metadata used during plugin loading.

+ * + *

Implementation rules:

+ *
    + *
  • the annotated type must be a {@code public}, non-abstract class,
  • + *
  • it must implement exactly one {@link PluginContract.Role#BASE base contract},
  • + *
  • it may additionally implement any number of {@link PluginContract.Role#CAPABILITY capability contracts}.
  • + *
+ * + *

A capability contract is never loadable on its own. A plugin implementing a capability + * must also implement the capability's required base contract. The base contract is the single hook + * the Dataverse core uses to discover and load your plugin.

+ * + * @implNote Example where {@code Exporter} is a base contract and {@code FooExporter} a capability: + *
{@code
+ * @DataversePlugin
+ * public class MyBarExporter implements Exporter, FooExporter {
+ *     // Your implementation goes here...
+ * }
+ * }
+ */ +@Retention(RetentionPolicy.SOURCE) +@Target(ElementType.TYPE) +public @interface DataversePlugin { +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java new file mode 100644 index 0000000..c52124e --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/PluginContract.java @@ -0,0 +1,103 @@ +package io.gdcc.spi.meta.annotations; + +import io.gdcc.spi.meta.plugin.Plugin; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Declares a versioned plugin contract interface. + * + *

A plugin contract defines either a directly loadable plugin kind + * ({@link Role#BASE}) or an additional, non-loadable capability + * ({@link Role#CAPABILITY}).

+ * + *

The annotated type must be an {@code interface} extending {@link Plugin} + * and must declare a compile-time constant primitive {@code int API_LEVEL} field. + *

+ * + *

General contract rules:

+ *
    + *
  1. Plugin contracts may only be declared on interfaces.
  2. + *
  3. Plugin contracts must extend {@link Plugin}.
  4. + *
  5. Plugin contracts may not extend other plugin contracts. (One exception, see below.)
  6. + *
  7. A plugin implementation may implement exactly one {@link Role#BASE base contract}.
  8. + *
+ * + *

Base contracts are used as the unique service-loading identity of a plugin. + * Capability contracts are never loaded directly; they add optional functionality + * and are discovered through generated plugin metadata.

+ * + *

Capability rules:

+ *
    + *
  1. A capability contract must declare {@link #requires()}.
  2. + *
  3. A capability must require exactly one base contract.
  4. + *
  5. A capability may extend the required base contract to provide default implementations.
  6. + *
  7. For now, requiring or extending another capability is not supported.
  8. + *
  9. A plugin implementing a capability must also implement its required base contract.
  10. + *
+ * + * Note: this annotation cannot be used repeatedly on the same type. + * + * @implNote Example base contract: + *
{@code
+ * @PluginContract(role = PluginContract.Role.BASE)
+ * public interface FooBar extends Plugin {
+ *     int API_LEVEL = 1;
+ * }
+ * }
+ * Example capability contract: + *
{@code
+ * @PluginContract(
+ *     role = PluginContract.Role.CAPABILITY,
+ *     requires = { FooBar.class }
+ * )
+ * public interface BarBeque extends Plugin {
+ *     int API_LEVEL = 1;
+ *
+ *     default String getMediaType() {
+ *         return "application/bbq";
+ *     }
+ * }
+ * }
+ */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE) +public @interface PluginContract { + + /** + * Declares whether this contract is a directly loadable base contract or an additional capability contract. + */ + Role role(); + + /** + * Other plugin contracts that must also be implemented when this contract is implemented. + * + *

For {@link Role#CAPABILITY capabilities}, this must currently contain exactly one + * required {@link Role#BASE base contract}. Capabilities are not directly loadable and + * therefore must always be paired with their base contract.

+ */ + Class[] requires() default {}; + + /** + * Core provider contracts required by this plugin contract. + */ + RequiredProvider[] providers() default {}; + + /** + * Distinguishes directly loadable base contracts from additional capability contracts. + */ + enum Role { + /** + * A directly loadable plugin contract. + */ + BASE, + + /** + * An additional plugin capability that refines behavior but is not directly loadable. + */ + CAPABILITY + } +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/RequiredProvider.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/RequiredProvider.java new file mode 100644 index 0000000..7760776 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/RequiredProvider.java @@ -0,0 +1,17 @@ +package io.gdcc.spi.meta.annotations; + +import io.gdcc.spi.meta.plugin.CoreProvider; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +/** + * Declares that a {@link PluginContract} requires a specific core provider contract. + * + *

The provider API level is taken from the provider interface's + * {@code API_LEVEL} constant at compile time by the annotation processor.

+ */ +@Retention(RetentionPolicy.RUNTIME) +public @interface RequiredProvider { + Class value(); +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/annotations/package-info.java b/meta/src/main/java/io/gdcc/spi/meta/annotations/package-info.java new file mode 100644 index 0000000..bac2b22 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/annotations/package-info.java @@ -0,0 +1,56 @@ +/** + * Annotations used to declare Dataverse plugin contracts, plugin implementations, + * and required core providers. + * + *

This package defines the author-facing SPI model:

+ *
    + *
  • a {@linkplain io.gdcc.spi.meta.annotations.PluginContract.Role#BASE base contract} + * is the unique, directly loadable identity of a plugin,
  • + *
  • a {@linkplain io.gdcc.spi.meta.annotations.PluginContract.Role#CAPABILITY capability contract} + * adds optional functionality but is never loaded directly,
  • + *
  • a {@linkplain io.gdcc.spi.meta.annotations.DataversePlugin plugin implementation} + * must implement exactly one base contract and may additionally implement compatible capabilities,
  • + *
  • a {@linkplain io.gdcc.spi.meta.annotations.RequiredProvider required provider} + * declares Dataverse infrastructure contracts needed by a plugin contract.
  • + *
+ * + *

Only base contracts are used as plugin loading identities.

+ * + *

Contract interfaces must extend {@link io.gdcc.spi.meta.plugin.Plugin}, declare + * {@link io.gdcc.spi.meta.annotations.PluginContract}, and provide a compile-time + * {@code int API_LEVEL} constant. Plugin contracts must not extend other plugin contracts + * (with the single exception of a capability extending a required base contract).

+ * + *

Capabilities are attached to a plugin through normal Java interface implementation. + * This allows SPI authors to provide additional methods and default implementations + * without introducing ambiguity into plugin loading. If multiple implemented interfaces + * contribute conflicting default methods, the plugin implementation class must resolve + * that conflict explicitly.

+ * + *

Example with extending base contract:

+ *
{@code
+ * @PluginContract(role = PluginContract.Role.BASE)
+ * public interface FooBar extends Plugin {
+ *     int API_LEVEL = 1;
+ *     String getMediaType();
+ * }
+ *
+ * @PluginContract(
+ *     role = PluginContract.Role.CAPABILITY,
+ *     requires = { FooBar.class }
+ * )
+ * public interface BarBeque extends FooBar {
+ *     int API_LEVEL = 1;
+ *
+ *     default String getMediaType() {
+ *         return "application/bbq";
+ *     }
+ * }
+ *
+ * @DataversePlugin
+ * public class Grill implements FooBar, BarBeque {
+ *     // no override needed unless another default conflicts
+ * }
+ * }
+ */ +package io.gdcc.spi.meta.annotations; diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/Descriptor.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/Descriptor.java new file mode 100644 index 0000000..a4431f2 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/Descriptor.java @@ -0,0 +1,142 @@ +package io.gdcc.spi.meta.descriptor; + +import java.util.Map; +import java.util.Objects; +import java.util.OptionalInt; + +/** + * In-memory representation of raw metadata de/serialized from a Dataverse Plugin Metadata file. + * + * @param klass implementation class name + * @param kind fully qualified base contract name + * @param contracts map of implemented contract names to API levels. May not contain null keys or values. + * @param requiredProviders map of required provider names to API levels. May not contain null keys or values. + */ +public record Descriptor( + String klass, + String kind, + Map contracts, + Map requiredProviders +) { + + /** + * Creates a new plugin and defensively copies the contract/provider maps. + * + *

This ensures the plugin remains immutable even if callers pass in + * mutable maps. All arguments must be non-null.

+ * + *

Please note: the provided maps may not contain any null keys or values.

+ */ + public Descriptor { + Objects.requireNonNull(klass); + Objects.requireNonNull(kind); + contracts = Map.copyOf(Objects.requireNonNull(contracts)); + requiredProviders = Map.copyOf(Objects.requireNonNull(requiredProviders)); + } + + /** + * Determines whether this plugin is of a specified base contract kind. + * Checks both {@code kind} and implemented {@code contracts}. + * + * @param kindFqcn the fully qualified class name of the kind to check + * @return {@code true} if the plugin's kind matches the given class name and its contract is implemented, + * otherwise {@code false} + */ + public boolean isOfKind(String kindFqcn) { + Objects.requireNonNull(kindFqcn); + return kind.equals(kindFqcn) && implementsContract(kindFqcn); + } + + public boolean isOfKind(Class kind) { + Objects.requireNonNull(kind); + return isOfKind(DescriptorFormat.transformClassName(kind)); + } + + /** + * Checks whether this plugin declares the given implemented contract. + * + * @param contractFqcn the fully qualified contract class name + * @return {@code true} if the contract is present in this plugin + */ + public boolean implementsContract(String contractFqcn) { + Objects.requireNonNull(contractFqcn); + return contracts.containsKey(contractFqcn); + } + + /** + * Checks whether this plugin declares the given implemented contract. + * + * @param contractClass the contract class + * @return {@code true} if the contract is present in this plugin + */ + public boolean implementsContract(Class contractClass) { + Objects.requireNonNull(contractClass); + return implementsContract(DescriptorFormat.transformClassName(contractClass)); + } + + /** + * Returns the declared API level for the given implemented contract, if present. + * + * @param contractFqcn the fully qualified contract class name + * @return the declared API level wrapped in an {@link OptionalInt}, or an empty value if absent + */ + public int contractLevel(String contractFqcn) { + Objects.requireNonNull(contractFqcn); + return contracts.get(contractFqcn); + } + + /** + * Returns the declared API level for the given implemented contract, if present. + * + * @param contractClass the contract class + * @return the declared API level wrapped in an {@link OptionalInt}, or an empty value if absent + */ + public int contractLevel(Class contractClass) { + Objects.requireNonNull(contractClass); + return contractLevel(DescriptorFormat.transformClassName(contractClass)); + } + + /** + * Checks whether this plugin declares the given required provider. + * + * @param providerFqcn the fully qualified provider class name + * @return {@code true} if the provider is present in this plugin + */ + public boolean requiresProvider(String providerFqcn) { + Objects.requireNonNull(providerFqcn); + return requiredProviders.containsKey(providerFqcn); + } + + /** + * Checks whether this plugin declares the given required provider. + * + * @param providerClass the provider class + * @return {@code true} if the provider is present in this plugin + */ + public boolean requiresProvider(Class providerClass) { + Objects.requireNonNull(providerClass); + return requiresProvider(DescriptorFormat.transformClassName(providerClass)); + } + + /** + * Returns the declared required API level for the given provider, if present. + * + * @param providerFqcn the fully qualified provider class name + * @return the required provider API level wrapped in an {@link OptionalInt}, or an empty value if absent + */ + public int requiredProviderLevel(String providerFqcn) { + Objects.requireNonNull(providerFqcn); + return requiredProviders.get(providerFqcn); + } + + /** + * Returns the declared required API level for the given provider, if present. + * + * @param providerClass the provider class + * @return the required provider API level wrapped in an {@link OptionalInt}, or an empty value if absent + */ + public int requiredProviderLevel(Class providerClass) { + Objects.requireNonNull(providerClass); + return requiredProviderLevel(DescriptorFormat.transformClassName(providerClass)); + } +} \ No newline at end of file diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java new file mode 100644 index 0000000..d20b3cb --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorFormat.java @@ -0,0 +1,206 @@ +package io.gdcc.spi.meta.descriptor; + +import java.io.IOException; +import java.io.Reader; +import java.io.StringReader; +import java.io.Writer; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Properties; + +/** + * Defines constants for the format and structure of plugin plugin files. + * These descriptors provide metadata about plugins, including their + * implementation class, type, contracts, and required providers. + * + *
    + * - DESCRIPTOR_DIRECTORY: Specifies the directory where plugin plugin files are located. + * - PLUGIN_CLASS_FIELD: Defines the key used to identify the plugin's implementation class. + * - PLUGIN_KIND_FIELD: Defines the key used to specify the base contract type of the plugin. + * - CONTRACT_PREFIX and CONTRACT_SUFFIX: Define the keys used to denote the contracts + * implemented by the plugin and their associated API levels. + * - REQUIRED_PROVIDER_PREFIX and REQUIRED_PROVIDER_SUFFIX: Define the keys used to represent + * required providers and their associated API levels. + *
+ */ +public final class DescriptorFormat { + public static final String DESCRIPTOR_DIRECTORY = "META-INF/dataverse/plugins/"; + public static final String DESCRIPTOR_EXTENSION = ".properties"; + + public static final String PLUGIN_CLASS_FIELD = "plugin.class"; + public static final String PLUGIN_KIND_FIELD = "plugin.kind"; + public static final String CONTRACT_PREFIX = "plugin.implements."; + public static final String CONTRACT_SUFFIX = ".level"; + public static final String REQUIRED_PROVIDER_PREFIX = "plugin.requires."; + public static final String REQUIRED_PROVIDER_SUFFIX = ".level"; + + private DescriptorFormat() { + /* Intentionally left blank for helper class */ + } + + /** + * Transforms the provided class into its canonical name representation. + * Reusable in different places to keep serialization from class to FQCN aligned. + * + * @param klass the {@link Class} object whose canonical name is to be returned + * @return the name of the specified class, or null if the class does not have a name. + * Note: not using the canonical name to avoid issues with inner classes and de/serialization. + */ + public static String transformClassName(Class klass) { + return klass.getName(); + } + + public static String toFilename(Class klass) { + return toFilename(transformClassName(klass)); + } + + public static String toFilename(String fqcn) { + // The FQCN may contain "$" from inner classes. This would be bad for filenames. + return fqcn.replace('$', '.') + DESCRIPTOR_EXTENSION; + } + + public static String toPath(Class klass) { + return toPath(transformClassName(klass)); + } + + public static String toPath(String fqcn) { + return DESCRIPTOR_DIRECTORY + toFilename(fqcn); + } + + public static String toContractLevel(Class contractClass) { + return toContractLevel(transformClassName(contractClass)); + } + + public static String toContractLevel(String contractFQCN) { + return CONTRACT_PREFIX + contractFQCN + CONTRACT_SUFFIX; + } + + public static String toRequiredProviderLevel(Class providerClass) { + return toRequiredProviderLevel(transformClassName(providerClass)); + } + + public static String toRequiredProviderLevel(String providerFQCN) { + return REQUIRED_PROVIDER_PREFIX + providerFQCN + REQUIRED_PROVIDER_SUFFIX; + } + + /** + * Serializes the provided {@link Descriptor} into the given {@link Writer} + * in the form of a properties file, encoding plugin metadata such as plugin class, + * plugin kind, implemented contracts, and required providers. + * + * @param descriptor the {@link Descriptor} containing the plugin metadata to be serialized + * @param writer the {@link Writer} where the plugin properties will be written + * @throws IOException if an I/O error occurs while writing to the {@link Writer} + */ + public static void write(Descriptor descriptor, Writer writer) throws IOException { + Properties properties = new Properties(); + properties.setProperty(PLUGIN_CLASS_FIELD, descriptor.klass()); + properties.setProperty(PLUGIN_KIND_FIELD, descriptor.kind()); + + descriptor.contracts().forEach((contract, level) -> + properties.setProperty(toContractLevel(contract), Integer.toString(level))); + + descriptor.requiredProviders().forEach((provider, level) -> + properties.setProperty(toRequiredProviderLevel(provider), Integer.toString(level))); + + properties.store(writer, "Generated plugin contract metadata"); + } + + /** + * Reads a plugin plugin from the serialized properties format. + * + *

The returned plugin contains the mandatory plugin class and base contract fields, + * plus all parsed contract/provider API levels found in the input.

+ * + * @param reader the character stream containing plugin properties + * @return the parsed plugin. + * @throws IOException if the properties cannot be read + * @throws IllegalArgumentException if mandatory fields are missing or if any level value + * cannot be parsed as an integer + */ + public static Descriptor read(Reader reader) throws IOException { + Properties properties = new Properties(); + properties.load(reader); + + String pluginClass = properties.getProperty(PLUGIN_CLASS_FIELD); + if (pluginClass == null || pluginClass.isBlank()) { + throw new IllegalArgumentException("Missing required property " + PLUGIN_CLASS_FIELD); + } + + String pluginKind = properties.getProperty(PLUGIN_KIND_FIELD); + if (pluginKind == null || pluginKind.isBlank()) { + throw new IllegalArgumentException("Missing required property " + PLUGIN_KIND_FIELD); + } + + Map contracts = new LinkedHashMap<>(); + Map requiredProviders = new LinkedHashMap<>(); + + for (String key : properties.stringPropertyNames()) { + if (PLUGIN_CLASS_FIELD.equals(key) || PLUGIN_KIND_FIELD.equals(key)) { + continue; + } + + if (key.startsWith(CONTRACT_PREFIX) && key.endsWith(CONTRACT_SUFFIX)) { + String contractName = key.substring( + CONTRACT_PREFIX.length(), + key.length() - CONTRACT_SUFFIX.length() + ); + contracts.put(contractName, parseLevel(properties.getProperty(key), key)); + } + + if (key.startsWith(REQUIRED_PROVIDER_PREFIX) && key.endsWith(REQUIRED_PROVIDER_SUFFIX)) { + String providerName = key.substring( + REQUIRED_PROVIDER_PREFIX.length(), + key.length() - REQUIRED_PROVIDER_SUFFIX.length() + ); + requiredProviders.put(providerName, parseLevel(properties.getProperty(key), key)); + } + } + + return new Descriptor( + pluginClass, + pluginKind, + contracts, + requiredProviders + ); + } + + /** + * Reads a plugin plugin from the given string content. + * + * This method parses the input string into a {@link Descriptor} object. It internally utilizes + * a {@link StringReader} to read the string and expects the content to be in a properties-based serialized format. + * + * @param content the string content containing serialized plugin properties + * @return the parsed {@link Descriptor} + * @throws RuntimeException if an I/O error occurs + * @throws IllegalArgumentException if mandatory fields are missing + */ + public static Descriptor read(String content) { + Descriptor descriptor = null; + + try (StringReader reader = new StringReader(content)) { + descriptor = read(reader); + } catch (IOException e) { + // As we read from an in-memory string, this seems highly unlikely to happen. + throw new RuntimeException(e); + } + + return descriptor; + } + + private static int parseLevel(String value, String key) { + if (value == null || value.isBlank()) { + throw new IllegalArgumentException("Missing level value for property " + key); + } + + try { + int level = Integer.parseInt(value); + if (level < 1) + throw new IllegalArgumentException("Invalid integer value for property " + key + " may not be < 1, but is: " + value); + return level; + } catch (NumberFormatException e) { + throw new IllegalArgumentException("Invalid integer value for property " + key + ": " + value, e); + } + } +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java new file mode 100644 index 0000000..0bb83d3 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/DescriptorScanner.java @@ -0,0 +1,229 @@ +package io.gdcc.spi.meta.descriptor; + +import java.io.BufferedReader; +import java.io.FileReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.jar.JarEntry; +import java.util.jar.JarFile; + +import static io.gdcc.spi.meta.descriptor.DescriptorFormat.DESCRIPTOR_DIRECTORY; +import static io.gdcc.spi.meta.descriptor.DescriptorFormat.DESCRIPTOR_EXTENSION; +import static io.gdcc.spi.meta.descriptor.DescriptorFormat.read; + +public final class DescriptorScanner { + + private DescriptorScanner() { + /* Intentionally private constructor for helper class without instances */ + } + + /** + * Scans the specified path to identify plugin descriptors. The path can either be a directory + * or a JAR file. The method determines the type of the path and invokes the appropriate + * scanning logic to extract plugin descriptors. + * + * @param path the path to be scanned; must not be null. If the path represents a directory, + * plugin files contained within it will be scanned. If the path represents + * a JAR file, its internal entries will be scanned for descriptors. + * @return a list of {@code SourcedPluginDescriptor} objects representing plugin descriptors + * found at the given path. The list will be empty if no descriptors are found. + * @throws IllegalArgumentException if the provided {@code path} is {@code null} or scanning fails for other reasons caused by user. + * @throws IOException if an I/O error occurs while accessing the specified path or its contents. + */ + public static List scanPath(Path path) throws IOException { + List scanResult = new ArrayList<>(); + + if (path == null) { + throw new IllegalArgumentException("Path may not be null"); + } + if (Files.isDirectory(path)) { + scanDirectory(path).forEach(plugin -> scanResult.add(new SourcedDescriptor(path, plugin))); + } else { + scanJar(path).forEach(plugin -> scanResult.add(new SourcedDescriptor(path, plugin))); + } + + return List.copyOf(scanResult); + } + + /** + * Scans the specified JAR file for plugin plugin entries and extracts them into a list of + * {@link Descriptor} objects. The method looks for plugin plugin files based on predefined + * directory and file extension constants. + * + * @param jarPath the path to the JAR file to be scanned; must be a valid, readable, and regular file + * with a ".jar" extension. Usage of symbolic links is allowed. + * @return a list of {@code PluginDescriptor} objects extracted from the JAR file. If no plugin + * descriptors are found, the returned list will be empty. + * @throws IllegalArgumentException if the provided {@code jarPath} is {@code null}, does not exist, + * is unreadable, is not a regular file, or does not have a ".jar" + * extension. + * @throws IOException if an I/O error occurs while reading the JAR file or its entries. + */ + static List scanJar(Path jarPath) throws IOException { + if (jarPath == null || !Files.exists(jarPath) || !Files.isReadable(jarPath) || + !Files.isRegularFile(jarPath) || !jarPath.getFileName().toString().toLowerCase().endsWith(".jar")) { + throw new IllegalArgumentException("jarPath '" + jarPath + "' is not a readable JAR file"); + } + + // Iterate over the entries in the JAR file, read the ones we know to be plugin descriptors + List descriptors = new ArrayList<>(); + try (var jarFile = new JarFile(jarPath.toFile())) { + for (Iterator it = jarFile.entries().asIterator(); it.hasNext(); ) { + JarEntry entry = it.next(); + String name = entry.getName(); + + if (name.startsWith(DESCRIPTOR_DIRECTORY) && name.endsWith(DESCRIPTOR_EXTENSION)) { + try(InputStreamReader reader = new InputStreamReader(jarFile.getInputStream(entry), StandardCharsets.UTF_8)) { + Descriptor descriptor = read(reader); + descriptors.add(descriptor); + } + } + } + } + + return List.copyOf(descriptors); + } + + /** + * Scans the specified directory for plugin plugin files and extracts them into a list of + * {@link Descriptor} objects. The method searches for plugin files in a predefined + * subdirectory and processes files with a specific file extension. + * + * @param root the root directory to be scanned; must be a valid, readable, and existing directory. + * @return a list of {@code PluginDescriptor} objects extracted from the directory. If no plugin + * descriptors are found, the returned list will be empty. + * @throws IllegalArgumentException if the provided {@code root} is {@code null}, does not exist, + * is unreadable, or is not a directory. + * @throws IOException if an I/O error occurs while reading the directory or its contents. + */ + static List scanDirectory(Path root) throws IOException { + if (root == null || !Files.exists(root) || !Files.isReadable(root) || !Files.isDirectory(root)) { + throw new IllegalArgumentException("directory '" + root + "' is not a readable directory"); + } + + // Look up the plugin metadata directory - if it does not exist, there are no plugins here. + Path descriptorDir = root.resolve(DescriptorFormat.DESCRIPTOR_DIRECTORY); + if (!Files.isDirectory(descriptorDir)) { + return List.of(); + } + + // Scan the directory for plugin metadata, read it, and add it to a list + List descriptors = new ArrayList<>(); + try (var paths = Files.list(descriptorDir)) { + for (Path path : paths.toList()) { + String name = path.getFileName().toString(); + + if (name.endsWith(DESCRIPTOR_EXTENSION)) { + try (FileReader reader = new FileReader(path.toFile(), StandardCharsets.UTF_8)) { + Descriptor descriptor = read(reader); + descriptors.add(descriptor); + } + } + } + } + + return List.copyOf(descriptors); + } + + + /** + * Checks whether the source referenced by the given descriptor contains a Java SPI service + * configuration file for the descriptor's declared kind, and whether that file explicitly + * lists the descriptor's implementation class. + * + *

The source location is expected to point either to a directory root or to a JAR file. + * In the directory case, this method looks for a regular file at + * {@code META-INF/services/} below that root. In the JAR case, it looks for the + * corresponding JAR entry.

+ * + *

If the SPI record exists, its contents are interpreted using UTF-8. Blank lines, + * leading/trailing whitespace, and comments introduced by {@code #} are ignored in the + * same spirit as standard Java service configuration files.

+ * + * @param descriptor the descriptor whose source and implementation metadata should be checked + * @return {@code true} if a matching SPI record exists and contains the descriptor's + * implementation class; {@code false} if no such SPI record exists or the record + * does not list that implementation + * @throws IllegalArgumentException if the descriptor points to a source location that does not exist + * @throws IOException if an I/O error occurs while reading the directory entry or JAR entry + */ + public static boolean hasServiceProviderInterfaceRecord(SourcedDescriptor descriptor) throws IOException { + String spiLocation = "META-INF/services/" + descriptor.plugin().kind(); + Path source = descriptor.sourceLocation(); + + // The descriptor should already be vetted before reaching this point, so we keep validation + // intentionally lightweight here and only reject obviously invalid sources. + if (Files.notExists(source)) { + throw new IllegalArgumentException("Source descriptor contained non-existing source location " + source); + } + + // Strategy: + // - If the source is a directory, open the SPI file directly from the filesystem. + // - Otherwise, treat the source as an archive and look for the SPI record as a JAR entry. + // In both cases we funnel the actual content check through the same InputStream-based helper. + if (Files.isDirectory(source)) { + Path serviceFile = source.resolve(spiLocation); + + // No SPI record file at the expected location means there is nothing to match. + if (!Files.isRegularFile(serviceFile)) { + return false; + } + + // Open the regular file only for the duration of the content check. + try (InputStream serviceRecord = Files.newInputStream(serviceFile)) { + return spiRecordContains(serviceRecord, descriptor.plugin().klass()); + } + } + + // Important: the JAR must stay open for as long as the entry InputStream is being read. + // Therefore, both resources are owned by nested try-with-resources blocks in the same scope. + try (JarFile jar = new JarFile(source.toFile())) { + JarEntry entry = jar.getJarEntry(spiLocation); + + // Missing JAR entry means there is no SPI record for the declared kind. + if (entry == null) { + return false; + } + + // Read the JAR entry while the JAR is still open, then close both resources automatically. + try (InputStream serviceRecord = jar.getInputStream(entry)) { + return spiRecordContains(serviceRecord, descriptor.plugin().klass()); + } + } + } + + /** + * Reads a Java SPI service configuration stream and checks whether it declares the given implementation class. + * + *

Lines are normalized in a tolerant way: comments beginning with {@code #} are stripped, + * surrounding whitespace is trimmed, and empty lines are ignored.

+ */ + private static boolean spiRecordContains(InputStream serviceRecord, String implementationClass) throws IOException { + // This helper intentionally contains the shared parsing logic so that directory-based + // and JAR-based SPI records are interpreted in exactly the same way. + try ( + InputStreamReader streamReader = new InputStreamReader(serviceRecord, StandardCharsets.UTF_8); + BufferedReader reader = new BufferedReader(streamReader) + ) { + return reader.lines() + // Strip inline comments to support standard SPI syntax. + .map(line -> { + int commentStart = line.indexOf('#'); + return commentStart >= 0 ? line.substring(0, commentStart) : line; + }) + // Normalize whitespace so that indented or padded entries still match. + .map(String::trim) + // Skip blank lines after normalization. + .filter(line -> !line.isEmpty()) + // Finally, look for the implementation class declared by the descriptor. + .anyMatch(line -> line.equals(implementationClass)); + } + } +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java new file mode 100644 index 0000000..ecea6bd --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/PluginDescriptor.java @@ -0,0 +1,134 @@ +package io.gdcc.spi.meta.descriptor; + +import io.gdcc.spi.meta.plugin.CoreProvider; +import io.gdcc.spi.meta.plugin.Plugin; + +import java.nio.file.Path; +import java.util.Map; +import java.util.Objects; +import java.util.OptionalInt; + +/** + * Runtime-facing descriptor of a resolved and loaded plugin implementation. + * + *

This descriptor represents a plugin after metadata has been interpreted in the context of the + * running application and the relevant Java types have been resolved. Unlike the build-time or + * serialized descriptor form, this model uses actual {@link Class} references for the plugin + * implementation, its base contract, all implemented plugin contracts, and all required core + * providers.

+ * + *

The generic type parameter {@code T} represents the base plugin contract under which the plugin + * was resolved and loaded. The {@code pluginClass} is therefore guaranteed to implement that base + * contract, while {@code kindClass} denotes the concrete base plugin contract itself.

+ * + *

The {@code contracts} map contains all plugin contracts implemented by the plugin together with + * their declared API levels. This includes the base contract as well as any optional capability + * contracts. The {@code requiredProviders} map contains all core providers required by the plugin's + * implemented contracts, again paired with their declared API levels.

+ * + * @param the base plugin contract type under which this plugin was resolved + * @param sourceLocation the source location from which the plugin was loaded, such as a JAR file or + * exploded classpath directory + * @param identity the logical plugin identity reported by the plugin instance; intended for + * distinguishing plugins at runtime + * @param pluginClass the concrete implementation class of the plugin + * @param kindClass the resolved base plugin contract implemented by the plugin + * @param contracts all resolved plugin contracts implemented by the plugin, mapped to their + * declared API levels + * @param requiredProviders all resolved core providers required by the plugin, mapped to their + * required API levels + */ +public record PluginDescriptor( + Path sourceLocation, + String identity, + Class pluginClass, + Class kindClass, + Map, Integer> contracts, + Map, Integer> requiredProviders +) { + + public PluginDescriptor { + Objects.requireNonNull(sourceLocation); + Objects.requireNonNull(identity); + Objects.requireNonNull(pluginClass); + Objects.requireNonNull(kindClass); + Objects.requireNonNull(contracts); + Objects.requireNonNull(requiredProviders); + + // Immutability is key + contracts = Map.copyOf(contracts); + requiredProviders = Map.copyOf(requiredProviders); + + // Sane structure checks + if (identity.isBlank()) + throw new IllegalArgumentException("Plugin identity cannot be blank"); + if (contracts.isEmpty()) + throw new IllegalArgumentException("Plugin must implement at least one contract (the kindClass one)"); + } + + public boolean implementsContract(Class contractClass) { + return this.contracts.containsKey(contractClass); + } + + public OptionalInt contractLevel(Class contractClass) { + return implementsContract(contractClass) + ? OptionalInt.of(this.contracts.get(contractClass)) + : OptionalInt.empty(); + } + + public boolean requiresProvider(Class providerClass) { + return this.requiredProviders.containsKey(providerClass); + } + + public OptionalInt requiredProviderLevel(Class providerClass) { + return requiresProvider(providerClass) + ? OptionalInt.of(this.requiredProviders.get(providerClass)) + : OptionalInt.empty(); + } + + /** + * Returns the normalized identity string of this plugin. + * The normalization process converts the identity to lowercase + * and removes special characters such as "/\\-_:.#~*", ensuring + * a consistent format for comparison purposes. + * + * @return the normalized identity string, or null if the original identity is null + */ + public String normalizedIdentity() { + return normalizeIdentity(this.identity); + } + + /** + * Normalizes the given identity string for comparison purposes by converting it to lowercase + * and removing all occurrences of the characters "/\-_:.#~*", which are commonly used to separate words. + * This avoids having multiple plugins targeting the same thing, like an export format with a slightly different + * case or special characters. + * + * @param identity the identity string to normalize + * @return the normalized identity string, or null if the input is null + */ + private String normalizeIdentity(String identity) { + if (identity == null) return null; + return identity.toLowerCase().replaceAll("[/\\\\_\\-:.#~*]+", ""); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null || getClass() != obj.getClass()) return false; + + PluginDescriptor that = (PluginDescriptor) obj; + + return Objects.equals(sourceLocation, that.sourceLocation) && + Objects.equals(pluginClass, that.pluginClass) && + ( + Objects.equals(identity, that.identity) || + Objects.equals(normalizeIdentity(identity), normalizeIdentity(that.identity)) + ); + } + + @Override + public int hashCode() { + return Objects.hash(sourceLocation, pluginClass, normalizeIdentity(identity)); + } +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/descriptor/SourcedDescriptor.java b/meta/src/main/java/io/gdcc/spi/meta/descriptor/SourcedDescriptor.java new file mode 100644 index 0000000..a94bddb --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/descriptor/SourcedDescriptor.java @@ -0,0 +1,23 @@ +package io.gdcc.spi.meta.descriptor; + +import java.nio.file.Path; +import java.util.Objects; + +/** + * A record representing a descriptor that is sourced from a specific location. + * Combines information about a descriptor and its source location. + * + * @param sourceLocation the path to the source location of the descriptor, must not be null + * @param plugin the {@link Descriptor} representing the plugin information, must not be null + */ +public record SourcedDescriptor(Path sourceLocation, Descriptor plugin) { + + public SourcedDescriptor { + Objects.requireNonNull(sourceLocation); + Objects.requireNonNull(plugin); + } + + public boolean isOfKind(Class contractClass) { + return plugin.isOfKind(contractClass); + } +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/plugin/CoreProvider.java b/meta/src/main/java/io/gdcc/spi/meta/plugin/CoreProvider.java new file mode 100644 index 0000000..5b70630 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/plugin/CoreProvider.java @@ -0,0 +1,15 @@ +package io.gdcc.spi.meta.plugin; + +/** + * Represents a provider interface for core functionality within the plugin system. + * CoreProvider serves as a marker or extension point within the framework to be implemented + * by classes that provide essential services or functionality to the core system. + * + * Implementations of this interface are expected to integrate with the broader plugin system, + * potentially enabling the core system to interface with specific features or subsystems. + * + * @see Plugin + */ +public interface CoreProvider { + +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/plugin/Plugin.java b/meta/src/main/java/io/gdcc/spi/meta/plugin/Plugin.java new file mode 100644 index 0000000..8edb989 --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/plugin/Plugin.java @@ -0,0 +1,26 @@ +package io.gdcc.spi.meta.plugin; + +/** + * Represents the contract for plugins in the system. Implementations of this interface serve + * as modular components that can be dynamically loaded and integrated into the broader application. + * + * Each plugin must provide a unique, machine-readable identifier to ensure proper identification + * and usage within the system. + * + * Implementers are required to define the {@link #identity()} method to specify their unique + * identifier. + * + * @see CoreProvider + */ +public interface Plugin { + + /** + * Returns the unique, machine-readable identifier for this plugin. + * This will be the primary key within the core to identify a specific plugin implementation. + * + * @return the plugin's identity string, which must be non-null, non-blank, and URL compatible. + * @implSpec This method must be overridden by any plugin implementation and return a non-null, non-blank, + * URL-compatible string. No plugin interface may provide a default implementation. + */ + String identity(); +} diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java new file mode 100644 index 0000000..da63fef --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/PluginContractProcessor.java @@ -0,0 +1,1587 @@ +package io.gdcc.spi.meta.processor; + +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.descriptor.Descriptor; +import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import io.gdcc.spi.meta.plugin.Plugin; + +import javax.annotation.processing.AbstractProcessor; +import javax.annotation.processing.ProcessingEnvironment; +import javax.annotation.processing.RoundEnvironment; +import javax.lang.model.SourceVersion; +import javax.lang.model.element.AnnotationMirror; +import javax.lang.model.element.AnnotationValue; +import javax.lang.model.element.Element; +import javax.lang.model.element.ElementKind; +import javax.lang.model.element.ExecutableElement; +import javax.lang.model.element.Modifier; +import javax.lang.model.element.TypeElement; +import javax.lang.model.element.VariableElement; +import javax.lang.model.type.DeclaredType; +import javax.lang.model.type.TypeKind; +import javax.lang.model.type.TypeMirror; +import javax.lang.model.util.Elements; +import javax.lang.model.util.Types; +import javax.tools.Diagnostic; +import javax.tools.FileObject; +import javax.tools.StandardLocation; +import java.io.IOException; +import java.io.Writer; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.Deque; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; + +/** + * Annotation processor generating build-time metadata for plugin implementations. + * + *

This processor scans classes marked with {@code @DataversePlugin}, discovers all implemented + * plugin contracts annotated with {@code @PluginContract}, validates the contract graph, and emits:

+ * + *
    + *
  1. a per-plugin descriptor under {@value DescriptorFormat#DESCRIPTOR_DIRECTORY}, and
  2. + *
  3. a {@code META-INF/services/...} entry for the base plugin contract when safe to do so.
  4. + *
+ * + *

Note: + * The processor enforces contract rules for all top-level and member interfaces in compiled source files. + * Runtime validation of contracts in PluginLoader loaded from external JARs or assembled outside normal compilation + * will catch illegal usage the processor cannot see and validate (like anonymous or method body interfaces). + *

+ * + *

The descriptor captures the build-time view of:

+ *
    + *
  • the plugin implementation class,
  • + *
  • the plugin's base contract,
  • + *
  • all implemented contract API levels,
  • + *
  • all required provider API levels.
  • + *
+ * + *

Contract graph rules

+ * + *

The processor enforces a strict contract hierarchy:

+ *
    + *
  • A {@link PluginContract.Role#BASE base contract} is the unique, directly loadable identity + * of a plugin. Base contracts may not extend other contracts and may not declare + * {@code requires}.
  • + *
  • A {@link PluginContract.Role#CAPABILITY capability contract} adds optional functionality. + * It must declare exactly one base contract in {@code requires}. A capability may optionally + * extend its required base contract in the Java type hierarchy to provide default + * implementations for methods declared by the base. A capability may not extend another + * capability.
  • + *
+ * + *

Service registration

+ * + *

Service registration generation is intentionally cautious. If any implementation of a given base + * contract uses {@code @AutoService}, this processor suppresses generated service output for that + * entire contract to avoid two processors writing the same {@code META-INF/services/...} file.

+ * + *

Errors are reported against the offending source element and then converted into a local + * {@link ProcessorException}. This aborts processing of the current implementation only, allowing + * the processor to continue and surface additional problems in the same compilation run.

+ */ +public final class PluginContractProcessor extends AbstractProcessor { + + /** + * Output directory for generated ServiceLoader files. + */ + private static final String SERVICES_DIRECTORY = "META-INF/services/"; + + /** + * Cached utility for type operations such as assignability checks. + */ + private Types types; + + /** + * Cached utility for element lookup and annotation default resolution. + */ + private Elements elements; + + /** + * Descriptor models accumulated during processing, keyed by implementation class name. + * + *

Descriptors are written only after processing is over, which keeps resource generation + * deterministic and avoids partial aggregate state.

+ */ + private final Map descriptors = new LinkedHashMap<>(); + + /** + * Service registrations grouped by base contract name. + * + *

Each map entry corresponds to one future {@code META-INF/services/} file. A sorted + * set is used to make generated output stable across compiler runs.

+ */ + private final Map> serviceImplementationsByContract = new LinkedHashMap<>(); + + /** + * Base contract names for which service file generation must be skipped. + * + *

If any implementation of a base contract uses {@code @AutoService}, that service type is + * considered externally managed and this processor suppresses its own output for the same path. + * This way, we do not have a race condition / conflict over one service file.

+ */ + private final Set serviceTypesManagedExternally = new LinkedHashSet<>(); + + /** + * Types already inspected during the current compilation. + * + *

The processor performs additional model-wide validation beyond explicit {@code @DataversePlugin} + * usages. Since the same type may reappear through multiple roots or hierarchy traversals, this set + * keeps those checks idempotent and avoids duplicate diagnostics.

+ */ + private final Set inspectedTypes = new LinkedHashSet<>(); + + /** + * Plugin implementations already converted into generated output models. + * + *

This is needed because implementations may be processed either explicitly through + * {@code @DataversePlugin} or implicitly when they are discovered as plain {@code Plugin} + * implementations during hierarchy inspection.

+ */ + private final Set processedImplementations = new LinkedHashSet<>(); + + /** + * Initializes compiler utility helpers from the processing environment. + * + * @param processingEnv the active annotation processing environment + */ + @Override + public synchronized void init(ProcessingEnvironment processingEnv) { + super.init(processingEnv); + this.types = processingEnv.getTypeUtils(); + this.elements = processingEnv.getElementUtils(); + } + + /** + * Returns the annotation types directly claimed by this processor. + * + *

The processor claims all annotations because it does not only react to explicitly annotated + * {@code @DataversePlugin} classes. It also performs project-wide validation for plugin contracts, + * provider contracts, and unannotated plugin implementations discovered in the type model.

+ * + * @return the supported top-level annotation types + */ + @Override + public Set getSupportedAnnotationTypes() { + return Set.of("*"); + } + + /** + * Advertises support for the latest source version understood by the current compiler. + * + *

This is preferred over a hard-coded release because the processor mainly operates on the + * annotation/type model and should remain usable across newer Java releases automatically.

+ * + * @return the latest source version supported by the running compiler + */ + @Override + public SourceVersion getSupportedSourceVersion() { + return SourceVersion.latestSupported(); + } + + /** + * Main processor entry point for each annotation processing round. + * + *

During normal rounds, this processor performs two tasks:

+ *
    + *
  1. it inspects all root types and their hierarchies for project-wide contract validation,
  2. + *
  3. it processes explicitly annotated {@code @DataversePlugin} classes.
  4. + *
+ * + *

During the final round, all accumulated descriptor and service models are written to the + * compiler output.

+ * + * @param annotations the annotations requested for this round + * @param roundEnv the current round environment + * @return {@code false} so other processors may continue to participate normally + */ + @Override + public boolean process(Set annotations, RoundEnvironment roundEnv) { + TypeElement markerAnnotation = elements.getTypeElement(ProcessorConstants.PLUGIN_IMPLEMENTATION_ANNOTATION); + if (markerAnnotation == null) { + // If the marker annotation itself cannot be resolved, something is wrong with the + // processor classpath. Returning false leaves room for other processors to continue. + return false; + } + + // Inspect all roots, not just annotated types. This enables strict enforcement for + // plugin/provider contracts and lets us discover plain Plugin implementations that + // should have used @DataversePlugin. + for (Element root : roundEnv.getRootElements()) { + if (root instanceof TypeElement typeElement) { + try { + inspectTypeHierarchy(typeElement); + } catch (ProcessorException ignored) { + // The concrete error has already been reported with source location. + // Continue with remaining roots to surface as many problems as possible. + } + } + } + + for (Element element : roundEnv.getElementsAnnotatedWith(markerAnnotation)) { + if (!(element instanceof TypeElement implementation)) { + error(element, "@DataversePlugin may only be used on classes"); + continue; + } + + try { + processImplementation(implementation); + } catch (ProcessorException ignored) { + // A concrete error has already been reported with source location. + // Continue with the next implementation so the user gets more than one error per run. + } + } + + if (roundEnv.processingOver()) { + writeAllGeneratedResources(); + } + + return false; + } + + // ── Implementation processing ─────────────────────────────────────────────── + + /** + * Processes one plugin implementation class. + * + *

The workflow is:

+ *
    + *
  1. validate the class structurally,
  2. + *
  3. discover all implemented contracts in the full type hierarchy,
  4. + *
  5. identify exactly one base contract,
  6. + *
  7. collect contract and provider API levels,
  8. + *
  9. record descriptor output,
  10. + *
  11. record ServiceLoader output unless {@code @AutoService} takes over.
  12. + *
+ * + * @param implementation the plugin implementation class + */ + private void processImplementation(TypeElement implementation) { + String implementationClassName = implementation.getQualifiedName().toString(); + if (!processedImplementations.add(implementationClassName)) { + // The implementation was already processed earlier in this compilation, for example + // when discovered implicitly during type hierarchy inspection. + return; + } + + validateImplementationClass(implementation); + + Set contracts = collectImplementedContracts(implementation); + if (contracts.isEmpty()) { + error( + implementation, + "No implemented plugin contracts found; " + + "implementations must implement a specific @PluginContract interface " + + "which itself extends " + Plugin.class.getName() + ); + throw new ProcessorException(); + } + + TypeElement baseContract = null; + Map contractLevels = new LinkedHashMap<>(); + Map providerLevels = new LinkedHashMap<>(); + + for (TypeElement contract : sortByQualifiedName(contracts)) { + PluginContractModel model = readPluginContractModel(contract); + + if (model.role() == PluginContract.Role.BASE) { + if (baseContract != null) { + error( + implementation, + "Implementation must implement exactly one Role.BASE @PluginContract, but implements: " + + baseContract.getQualifiedName() + " and " + contract.getQualifiedName() + ); + throw new ProcessorException(); + } + baseContract = contract; + } + + validateRequiredContracts(implementation, contract, contracts, model); + + // The API level is intentionally read from the compile-time constant present on the + // contract interface visible during this compilation. This preserves the build-time + // contract snapshot we later need at runtime. + int contractApiLevel = readIntConstant(contract, ProcessorConstants.API_LEVEL_FIELD_NAME); + String contractFQCN = contract.getQualifiedName().toString(); + // The following is just a precaution. As we look into these during compile time, it's hard to imagine + // a scenario where the levels ever actually differ. + if (contractLevels.containsKey(contractFQCN) && contractLevels.get(contractFQCN) != contractApiLevel) { + error(implementation, "Conflicting API levels on contract implementation: " + contractFQCN); + } else { + contractLevels.put(contract.getQualifiedName().toString(), contractApiLevel); + } + + // Provider requirements accumulate across all implemented contracts/capabilities. + // Conflicting requirements are rejected below. + Map requiredProviders = readProviderLevels(model.providers(), implementation); + mergeProviderLevels(providerLevels, requiredProviders, implementation); + } + + if (baseContract == null) { + error(implementation, "Implementation must implement exactly one Role.BASE @PluginContract"); + throw new ProcessorException(); + } + + String baseContractName = baseContract.getQualifiedName().toString(); + + descriptors.put( + implementationClassName, + new Descriptor( + implementationClassName, + baseContractName, + contractLevels, + providerLevels + ) + ); + + if (hasAutoServiceAnnotation(implementation)) { + // Skip generated META-INF/services output for the entire base contract to avoid + // resource collisions with AutoService, which writes the same aggregate file path. + serviceTypesManagedExternally.add(baseContractName); + warning( + implementation, + "@AutoService detected; generated META-INF/services entry for " + + baseContractName + + " will be skipped to avoid conflicts" + ); + } else { + serviceImplementationsByContract + .computeIfAbsent(baseContractName, ignored -> new TreeSet<>()) + .add(implementationClassName); + } + } + + /** + * Validates the basic structural requirements for a plugin implementation. + * + *

A valid plugin implementation must be a public, non-abstract class. These constraints + * ensure that the ServiceLoader can instantiate the class at runtime.

+ * + * @param implementation the implementation class to validate + */ + private void validateImplementationClass(TypeElement implementation) { + if (implementation.getKind() != ElementKind.CLASS) { + error(implementation, "@DataversePlugin may only be used on classes"); + throw new ProcessorException(); + } + + if (!implementation.getModifiers().contains(Modifier.PUBLIC)) { + error(implementation, "@DataversePlugin implementations must be public"); + throw new ProcessorException(); + } + + if (implementation.getModifiers().contains(Modifier.ABSTRACT)) { + error(implementation, "@DataversePlugin implementations must not be abstract"); + throw new ProcessorException(); + } + } + + /** + * Collects all plugin contracts implemented by the given class, including inherited ones. + * + *

The traversal walks the full type hierarchy breadth-first across both superclasses and + * interfaces so that indirectly inherited contracts and capability interfaces are discovered too.

+ * + * @param implementation the implementation class to inspect + * @return all implemented types recognized as plugin contracts + */ + private Set collectImplementedContracts(TypeElement implementation) { + Set result = new LinkedHashSet<>(); + Set visited = new LinkedHashSet<>(); + Deque queue = new ArrayDeque<>(); + queue.addLast(implementation.asType()); + + while (!queue.isEmpty()) { + TypeMirror current = queue.removeFirst(); + if (current.getKind() == TypeKind.NONE) { + continue; + } + if (!(current instanceof DeclaredType declaredType)) { + continue; + } + + Element currentElement = declaredType.asElement(); + if (!(currentElement instanceof TypeElement currentType)) { + continue; + } + + String qualifiedName = currentType.getQualifiedName().toString(); + if (!visited.add(qualifiedName)) { + continue; + } + + if (isPluginContract(currentType)) { + result.add(currentType); + } + + for (TypeMirror iface : currentType.getInterfaces()) { + queue.addLast(iface); + } + + TypeMirror superclass = currentType.getSuperclass(); + if (superclass != null && superclass.getKind() != TypeKind.NONE) { + queue.addLast(superclass); + } + } + + return result; + } + + /** + * Validates that all contracts required by the current contract are also implemented + * by the plugin implementation class. + * + *

This ensures that a plugin implementing a capability also implements the capability's + * required base contract, which is the only loadable identity for the plugin.

+ * + * @param implementation the concrete plugin implementation + * @param contract the contract currently being validated + * @param allImplementedContracts all discovered contracts of the implementation + * @param model the parsed model of the current contract + */ + private void validateRequiredContracts( + TypeElement implementation, + TypeElement contract, + Set allImplementedContracts, + PluginContractModel model + ) { + Set implementedNames = new LinkedHashSet<>(); + for (TypeElement implemented : allImplementedContracts) { + implementedNames.add(implemented.getQualifiedName().toString()); + } + + for (TypeElement requiredContract : model.requiredContracts()) { + String requiredName = requiredContract.getQualifiedName().toString(); + if (!implementedNames.contains(requiredName)) { + error( + implementation, + "Implementation of contract " + contract.getQualifiedName() + + " also requires contract " + requiredName + ); + throw new ProcessorException(); + } + } + } + + // ── Type hierarchy inspection ─────────────────────────────────────────────── + + /** + * Traverses a type hierarchy and applies project-wide validation rules. + * + *

This method exists because the processor validates more than explicitly annotated + * implementations. It also enforces that:

+ *
    + *
  • plugin interfaces carry {@code @PluginContract},
  • + *
  • provider interfaces declare {@code API_LEVEL},
  • + *
  • concrete plugin implementations use {@code @DataversePlugin}, or at least trigger a warning.
  • + *
+ * + * @param typeElement the root type to inspect + */ + private void inspectTypeHierarchy(TypeElement typeElement) { + Deque queue = new ArrayDeque<>(); + queue.addLast(typeElement); + + while (!queue.isEmpty()) { + TypeElement current = queue.removeFirst(); + String qualifiedName = current.getQualifiedName().toString(); + if (!inspectedTypes.add(qualifiedName)) { + continue; + } + + inspectType(current); + + for (TypeMirror iface : current.getInterfaces()) { + TypeElement interfaceType = asTypeElement(iface); + if (interfaceType != null) { + queue.addLast(interfaceType); + } + } + + TypeMirror superclass = current.getSuperclass(); + TypeElement superType = asTypeElement(superclass); + if (superType != null && superclass.getKind() != TypeKind.NONE) { + queue.addLast(superType); + } + } + } + + /** + * Applies validation rules to a single type discovered during hierarchy inspection. + * + *

This method dispatches to specialized validators based on the nature of the type: + * contract interfaces, provider interfaces, and plugin implementation candidates each + * have their own set of rules.

+ * + * @param typeElement the type to inspect + */ + private void inspectType(TypeElement typeElement) { + validatePluginContractUsage(typeElement); + validateDirectBaseTypeImplementations(typeElement); + + if (isPluginInterfaceCandidate(typeElement)) { + if (findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) == null) { + error(typeElement, "Interfaces extending Plugin must declare @PluginContract"); + throw new ProcessorException(); + } + + if (!typeElement.getModifiers().contains(Modifier.PUBLIC)) { + error(typeElement, "Interfaces extending Plugin must be public"); + } + + validateApiLevelConstant(typeElement); + validateContractGraph(typeElement); + } + + if (isProviderInterfaceCandidate(typeElement)) { + if (!typeElement.getModifiers().contains(Modifier.PUBLIC)) { + error(typeElement, "Interfaces extending CoreProvider must be public"); + } + + validateApiLevelConstant(typeElement); + } + + if (isPluginImplementationCandidate(typeElement) + && findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_IMPLEMENTATION_ANNOTATION) == null) { + warning( + typeElement, + "Plugin implementation should declare @DataversePlugin; processing it implicitly" + ); + + // Even without the annotation, we still process the implementation. This keeps the + // migration path smooth and ensures metadata generation does not depend solely on + // authors remembering one annotation. + processImplementation(typeElement); + } + } + + // ── Contract graph validation ─────────────────────────────────────────────── + + /** + * Validates the contract graph rules for a {@code @PluginContract}-annotated interface. + * + *

This is the central method enforcing the structural rules of the contract hierarchy. + * It reads the contract's role and delegates to role-specific validation:

+ * + *
    + *
  • BASE contracts may not declare {@code requires} and may not extend + * other contracts.
  • + *
  • CAPABILITY contracts must declare exactly one base contract in + * {@code requires}. They may optionally extend their required base contract in the + * Java type hierarchy (to provide default implementations), but may not extend another + * capability.
  • + *
+ * + *

Additionally, for capabilities, this method enforces package locality: the capability + * must reside in the same package or a subpackage of its required base contract.

+ * + * @param contract the contract interface to validate + */ + private void validateContractGraph(TypeElement contract) { + AnnotationMirror annotation = findAnnotationMirror(contract, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION); + if (annotation == null) { + return; + } + + PluginContract.Role role = readContractRole(annotation, contract); + List requiredContracts = readClassArrayAnnotationValue(annotation, "requires"); + + // Collect all parent types that are themselves plugin contracts. This is used to enforce + // the extension rules: which contracts may extend which other contracts. + List extendedContracts = findExtendedPluginContracts(contract); + + if (role == PluginContract.Role.BASE) { + validateBaseContractGraph(contract, requiredContracts, extendedContracts); + } else { + validateCapabilityContractGraph(contract, requiredContracts, extendedContracts); + } + } + + /** + * Validates the graph rules specific to a {@link PluginContract.Role#BASE base contract}. + * + *

Base contracts are the loading identities of plugins. They form the roots of the + * contract graph and therefore:

+ *
    + *
  • must not declare {@code requires} — they do not depend on other contracts,
  • + *
  • must not extend other contracts — there can only be one loading identity per plugin.
  • + *
+ * + * @param contract the base contract being validated + * @param requiredContracts the contracts listed in the {@code requires} attribute + * @param extendedContracts parent contracts found in the Java type hierarchy + */ + private void validateBaseContractGraph( + TypeElement contract, + List requiredContracts, + List extendedContracts + ) { + // Base contracts are self-contained loading identities and may not require other contracts. + if (!requiredContracts.isEmpty()) { + error( + contract, + "Base contract " + contract.getQualifiedName() + + " may not require other contracts; only capabilities may declare requires" + ); + throw new ProcessorException(); + } + + // Base contracts must not extend other contracts. Allowing this would create ambiguous + // loading identities — the plugin loader would not know which base to register under. + if (!extendedContracts.isEmpty()) { + TypeElement parent = extendedContracts.get(0); + error( + contract, + "Contract " + parent.getQualifiedName() + + " may not be extended by base contract " + contract.getQualifiedName() + + "; base contracts must not extend other contracts" + ); + throw new ProcessorException(); + } + } + + /** + * Validates the graph rules specific to a {@link PluginContract.Role#CAPABILITY capability contract}. + * + *

Capabilities are non-loadable extensions. The rules ensure a clean, unambiguous graph:

+ *
    + *
  • A capability must require exactly one base contract.
  • + *
  • A capability may extend its required base contract to provide default implementations + * for methods declared by the base.
  • + *
  • A capability may not extend another capability contract.
  • + *
  • A capability must reside in the same package or a subpackage of its required base.
  • + *
+ * + * @param contract the capability contract being validated + * @param requiredContracts the contracts listed in the {@code requires} attribute + * @param extendedContracts parent contracts found in the Java type hierarchy + */ + private void validateCapabilityContractGraph( + TypeElement contract, + List requiredContracts, + List extendedContracts + ) { + // A capability must require exactly one base contract. This links the capability to its + // loading identity and ensures the plugin loader can always resolve the plugin's kind. + TypeElement requiredBase = validateCapabilityRequires(contract, requiredContracts); + + // Validate the Java extends hierarchy: a capability may extend its required base, but + // must not extend any other contract (especially not another capability). + validateCapabilityExtensions(contract, extendedContracts, requiredBase); + + // Capabilities must be co-located with their base contract so that SPI authors maintain + // a cohesive package structure. + validatePackageLocality(contract, requiredBase); + } + + /** + * Validates the {@code requires} attribute of a capability contract. + * + *

A capability must require exactly one entry, and that entry must be a base contract + * interface — not a capability, not a class, and not the capability itself.

+ * + * @param contract the capability contract being validated + * @param requiredContracts the contracts listed in the {@code requires} attribute + * @return the single required base contract + */ + private TypeElement validateCapabilityRequires( + TypeElement contract, + List requiredContracts + ) { + String errorMessage = "Capability contract %s must require single base @PluginContract interface".formatted(contract.getQualifiedName()); + + // Exactly one entry is required. Zero entries, multiple entries, or entries that are + // not base contracts all fail with the same message. + if (requiredContracts.size() != 1) { + error(contract, errorMessage); + throw new ProcessorException(); + } + + TypeElement required = requiredContracts.get(0); + + // The required type must be an interface (not a class) and must carry @PluginContract. + if (required.getKind() != ElementKind.INTERFACE) { + error(contract, errorMessage); + throw new ProcessorException(); + } + + // Self-references are meaningless and would create a cycle. + if (required.getQualifiedName().contentEquals(contract.getQualifiedName())) { + error(contract, errorMessage); + throw new ProcessorException(); + } + + // The required contract must be annotated with @PluginContract. + AnnotationMirror requiredAnnotation = findAnnotationMirror( + required, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION + ); + if (requiredAnnotation == null) { + error(contract, errorMessage); + throw new ProcessorException(); + } + + // The required contract must have the BASE role. Capabilities requiring other capabilities + // are not supported. + PluginContract.Role requiredRole = readContractRole(requiredAnnotation, required); + if (requiredRole != PluginContract.Role.BASE) { + error(contract, errorMessage); + throw new ProcessorException(); + } + + return required; + } + + /** + * Validates the Java {@code extends} hierarchy of a capability contract. + * + *

A capability may extend its required base contract — this is the mechanism that allows + * the capability to provide default implementations for methods declared by the base, and + * Java's type system will correctly resolve them without requiring bridge methods in the + * plugin implementation class.

+ * + *

However, a capability may not extend another capability contract. Capability-to-capability + * inheritance is not supported in the current model.

+ * + * @param contract the capability contract being validated + * @param extendedContracts all parent types that are plugin contracts + * @param requiredBase the single required base contract from the {@code requires} attribute + */ + private void validateCapabilityExtensions( + TypeElement contract, + List extendedContracts, + TypeElement requiredBase + ) { + for (TypeElement parent : extendedContracts) { + AnnotationMirror parentAnnotation = findAnnotationMirror( + parent, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION + ); + if (parentAnnotation == null) { + // Should not happen since findExtendedPluginContracts only returns annotated types, + // but guard defensively. + continue; + } + + PluginContract.Role parentRole = readContractRole(parentAnnotation, parent); + + if (parentRole == PluginContract.Role.CAPABILITY) { + // Capability-to-capability extension is not allowed. Each capability is a + // standalone extension point attached to a base contract. + error( + contract, + "Contract " + parent.getQualifiedName() + + " may not be extended by capability contract " + contract.getQualifiedName() + + "; capabilities may not extend other capabilities" + ); + throw new ProcessorException(); + } + + if (parentRole == PluginContract.Role.BASE) { + // A capability may extend a base contract, but only if that base is the same one + // declared in requires. Extending an unrelated base would silently introduce a + // second loading identity into the hierarchy. + if (!parent.getQualifiedName().contentEquals(requiredBase.getQualifiedName())) { + error( + contract, + "Capability contract " + contract.getQualifiedName() + + " extends base contract " + parent.getQualifiedName() + + " but requires " + requiredBase.getQualifiedName() + + "; the extended base must match the required base" + ); + throw new ProcessorException(); + } + // Extension matches requires — this is the allowed case. + } + } + + // If the capability extends a base contract but does not declare it in requires, the + // contract graph would be inconsistent. Check the reverse: if the capability extends + // a base, that base must appear in requires (already validated above). But if the + // capability extends a base that is NOT in extendedContracts check, we need to also + // check: does the contract extend the required base without declaring requires? + // Actually, this direction is already covered: we validate requires first, and then + // check that any extended base matches requires. The remaining case is: the capability + // extends a base but forgot requires entirely — that's caught by validateCapabilityRequires. + + // Additional check: if the capability extends a base contract in its Java type hierarchy, + // that base MUST be declared in requires. This handles the case where the capability + // extends a base but declares a different (or no) base in requires. + for (TypeElement parent : extendedContracts) { + AnnotationMirror parentAnnotation = findAnnotationMirror( + parent, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION + ); + if (parentAnnotation == null) { + continue; + } + PluginContract.Role parentRole = readContractRole(parentAnnotation, parent); + if (parentRole == PluginContract.Role.BASE + && !parent.getQualifiedName().contentEquals(requiredBase.getQualifiedName())) { + error( + contract, + "Capability contract " + contract.getQualifiedName() + + " must require extended base contract interface " + parent.getQualifiedName() + ); + throw new ProcessorException(); + } + } + } + + /** + * Finds all direct parent interfaces of the given contract that are themselves plugin contracts. + * + *

This only looks at the directly declared {@code extends} clause of the interface, not at + * transitive ancestors. The common super-interface {@code Plugin} is excluded because it is + * a framework marker, not a contract.

+ * + * @param contract the contract interface to inspect + * @return parent types that carry {@code @PluginContract}, in declaration order + */ + private List findExtendedPluginContracts(TypeElement contract) { + List result = new ArrayList<>(); + for (TypeMirror iface : contract.getInterfaces()) { + TypeElement parent = asTypeElement(iface); + if (parent == null) { + continue; + } + + // Skip the Plugin marker interface — it is a framework type, not a contract. + if (parent.getQualifiedName().contentEquals(ProcessorConstants.PLUGIN_INTERFACE)) { + continue; + } + + // Only consider interfaces that are annotated with @PluginContract. + if (findAnnotationMirror(parent, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) != null) { + result.add(parent); + } + } + return result; + } + + /** + * Validates that the capability resides in the same package or a subpackage of its required + * base contract. + * + *

This rule enforces package locality for SPI cohesion: capabilities should be defined + * close to their base contract so that related contracts form a coherent API surface.

+ * + * @param capability the capability contract being validated + * @param requiredBase the required base contract + */ + private void validatePackageLocality(TypeElement capability, TypeElement requiredBase) { + String capabilityPackage = getPackageName(capability); + String basePackage = getPackageName(requiredBase); + + // The capability must be in the same package or a subpackage of the base. + // "test.export.xml".startsWith("test.export.") covers subpackages. + // Direct equality covers the same-package case. + boolean sameOrSubpackage = capabilityPackage.equals(basePackage) + || capabilityPackage.startsWith(basePackage + "."); + + if (!sameOrSubpackage) { + error( + capability, + "Capability contract " + capability.getQualifiedName() + + " and its required base contract " + requiredBase.getQualifiedName() + + " must share same package path; " + capabilityPackage + + " is not within " + basePackage + ); + throw new ProcessorException(); + } + } + + // ── Direct base type validation ───────────────────────────────────────────── + + /** + * Rejects direct implementations of the foundational base types {@code Plugin} and + * {@code CoreProvider}. + * + *

These two types are infrastructure-level marker/base interfaces only. Loadable plugins + * and concrete providers must instead implement a specific contract interface extending one + * of these base types. Otherwise, no meaningful compatibility contract can be derived.

+ * + * @param typeElement the type currently being inspected + */ + private void validateDirectBaseTypeImplementations(TypeElement typeElement) { + if (typeElement.getKind() != ElementKind.CLASS) { + return; + } + + if (directlyImplementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE)) { + error( + typeElement, + "Plugin implementations must implement a specific plugin contract interface, not Plugin directly" + ); + throw new ProcessorException(); + } + + if (directlyImplementsType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE)) { + error( + typeElement, + "Core provider implementations must implement a specific provider interface, not CoreProvider directly" + ); + throw new ProcessorException(); + } + } + + /** + * Checks whether a type directly declares the given interface in its {@code implements} clause. + * + *

This is stricter than assignability: it only matches explicit direct implementation and is + * used to reject classes that target the framework base interfaces {@code Plugin} or + * {@code CoreProvider} directly.

+ * + * @param typeElement the type to inspect + * @param targetTypeName the fully qualified interface name to look for + * @return {@code true} if the type directly implements the target interface + */ + private boolean directlyImplementsType(TypeElement typeElement, String targetTypeName) { + for (TypeMirror interfaceType : typeElement.getInterfaces()) { + TypeElement interfaceElement = asTypeElement(interfaceType); + if (interfaceElement != null && interfaceElement.getQualifiedName().contentEquals(targetTypeName)) { + return true; + } + } + return false; + } + + // ── @PluginContract usage validation ──────────────────────────────────────── + + /** + * Verifies that {@code @PluginContract} is only used on interfaces that extend {@code Plugin}. + * + *

Although the annotation targets {@code ElementType.TYPE}, Java's annotation target model + * cannot express "interfaces extending Plugin only". This processor therefore enforces the + * rule explicitly and fails compilation when the annotation is placed on classes, enums, + * records, or interfaces that do not extend {@code Plugin}.

+ * + * @param typeElement the type currently being inspected + */ + private void validatePluginContractUsage(TypeElement typeElement) { + if (findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) == null) { + return; + } + + // @PluginContract must be on an interface that extends Plugin. + if (typeElement.getKind() != ElementKind.INTERFACE + || !implementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE)) { + error( + typeElement, + "@PluginContract may only be declared on interfaces extending Plugin" + ); + throw new ProcessorException(); + } + } + + // ── Contract model reading ────────────────────────────────────────────────── + + /** + * Reads and validates the metadata of one plugin contract interface. + * + *

This extracts the role, required contracts, and provider dependencies from the + * {@code @PluginContract} annotation. It also validates the presence and correctness + * of the {@code API_LEVEL} compile-time constant.

+ * + * @param contract the contract interface + * @return the extracted in-memory contract model + */ + private PluginContractModel readPluginContractModel(TypeElement contract) { + AnnotationMirror annotation = findAnnotationMirror(contract, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION); + if (annotation == null) { + error(contract, "Missing @PluginContract"); + throw new ProcessorException(); + } + + validateApiLevelConstant(contract); + + PluginContract.Role role = readContractRole(annotation, contract); + List requiredContracts = readClassArrayAnnotationValue(annotation, "requires"); + List providers = readRequiredProviders(annotation); + + return new PluginContractModel(role, List.copyOf(requiredContracts), List.copyOf(providers)); + } + + /** + * Reads the {@code role} member of a {@code @PluginContract} annotation. + * + *

During annotation processing, enum-valued annotation members appear as + * {@link VariableElement} instances. This method extracts the constant name and + * maps it back to the {@link PluginContract.Role} enum.

+ * + * @param annotation the contract annotation mirror + * @param contract the annotated contract, used for diagnostics + * @return the parsed contract role + */ + private PluginContract.Role readContractRole(AnnotationMirror annotation, TypeElement contract) { + AnnotationValue value = getAnnotationValue(annotation, "role"); + if (value == null) { + error(contract, "@PluginContract.role is required"); + throw new ProcessorException(); + } + + Object raw = value.getValue(); + if (!(raw instanceof VariableElement enumConstant)) { + error(contract, "@PluginContract.role must be an enum constant"); + throw new ProcessorException(); + } + + try { + return PluginContract.Role.valueOf(enumConstant.getSimpleName().toString()); + } catch (IllegalArgumentException ex) { + error(contract, "Unsupported @PluginContract.role: " + enumConstant.getSimpleName()); + throw new ProcessorException(); + } + } + + // ── API level validation ──────────────────────────────────────────────────── + + /** + * Verifies that the given type declares a valid compile-time constant {@code API_LEVEL} field. + * + * @param contract the contract or provider type to validate + */ + private void validateApiLevelConstant(TypeElement contract) { + readIntConstant(contract, ProcessorConstants.API_LEVEL_FIELD_NAME); + } + + /** + * Reads a compile-time {@code int} constant from a type. + * + *

The field must be a primitive {@code int} with a compile-time constant value. + * Boxed {@code Integer} fields or fields initialized with method calls do not qualify + * because their values are not available to the annotation processor at compile time.

+ * + * @param type the owning type + * @param fieldName the field to locate + * @return the constant value + */ + private int readIntConstant(TypeElement type, String fieldName) { + for (Element enclosed : type.getEnclosedElements()) { + if (enclosed instanceof VariableElement variable + && variable.getSimpleName().contentEquals(fieldName)) { + Object value = variable.getConstantValue(); + if (value instanceof Integer intValue) { + return intValue; + } + + error(type, type.getQualifiedName() + "." + fieldName + " must be a compile-time int constant"); + throw new ProcessorException(); + } + } + + error(type, type.getQualifiedName() + " must declare int " + fieldName); + throw new ProcessorException(); + } + + // ── Provider handling ─────────────────────────────────────────────────────── + + /** + * Resolves the API levels of all providers required by the current contract. + * + *

Each provider type referenced in a {@code @RequiredProvider} annotation must be an + * interface extending {@code CoreProvider} and must declare a compile-time {@code API_LEVEL} + * constant.

+ * + * @param providerTypes the provider interfaces referenced by the contract annotation + * @param implementation the concrete implementation being processed, used for diagnostics + * @return a map from provider class name to required API level + */ + private Map readProviderLevels(List providerTypes, TypeElement implementation) { + Map result = new LinkedHashMap<>(); + TypeElement coreProviderType = elements.getTypeElement(ProcessorConstants.CORE_PROVIDER_INTERFACE); + if (coreProviderType == null) { + error(implementation, "Cannot resolve " + ProcessorConstants.CORE_PROVIDER_INTERFACE); + throw new ProcessorException(); + } + + for (TypeElement providerType : providerTypes) { + if (!types.isAssignable( + types.erasure(providerType.asType()), + types.erasure(coreProviderType.asType()) + )) { + error( + implementation, + "Required provider " + providerType.getQualifiedName() + + " does not implement " + ProcessorConstants.CORE_PROVIDER_INTERFACE + ); + throw new ProcessorException(); + } + + int apiLevel = readIntConstant(providerType, ProcessorConstants.API_LEVEL_FIELD_NAME); + result.put(providerType.getQualifiedName().toString(), apiLevel); + } + + return result; + } + + /** + * Merges provider API level requirements from one contract into the accumulated set. + * + *

If the same provider is required with different API levels by different contracts, + * processing fails because the resulting runtime expectation would be ambiguous.

+ * + * @param merged the accumulated provider requirements + * @param additional the provider requirements from the current contract + * @param implementation the concrete implementation, used for diagnostics + */ + private void mergeProviderLevels( + Map merged, + Map additional, + TypeElement implementation + ) { + additional.forEach((providerName, apiLevel) -> { + Integer existing = merged.putIfAbsent(providerName, apiLevel); + if (existing != null && existing.intValue() != apiLevel) { + error( + implementation, + "Conflicting API levels for provider " + providerName + + ": " + existing + " vs " + apiLevel + ); + throw new ProcessorException(); + } + }); + } + + // ── AutoService detection ─────────────────────────────────────────────────── + + /** + * Checks whether the implementation class uses {@code @AutoService}. + * + *

The processor does not depend on AutoService directly. It merely detects the annotation + * by name so it can avoid generating conflicting ServiceLoader resources.

+ * + * @param implementation the implementation class + * @return {@code true} if {@code @AutoService} is present + */ + private boolean hasAutoServiceAnnotation(TypeElement implementation) { + return findAnnotationMirror(implementation, ProcessorConstants.AUTO_SERVICE_ANNOTATION) != null; + } + + // ── Resource generation ───────────────────────────────────────────────────── + + /** + * Writes all accumulated generated resources after processing is complete. + * + *

Descriptors are always written. ServiceLoader files are written only for service types that + * are not externally managed via {@code @AutoService}.

+ */ + private void writeAllGeneratedResources() { + for (Descriptor descriptor : descriptors.values()) { + writeDescriptor(descriptor); + } + + for (Map.Entry> entry : serviceImplementationsByContract.entrySet()) { + String serviceType = entry.getKey(); + if (serviceTypesManagedExternally.contains(serviceType)) { + continue; + } + writeServiceFile(serviceType, entry.getValue()); + } + } + + /** + * Writes one generated plugin descriptor file. + * + * @param descriptor the plugin descriptor model to serialize + */ + private void writeDescriptor(Descriptor descriptor) { + String resourceName = DescriptorFormat.toPath(descriptor.klass()); + + try { + FileObject resource = processingEnv + .getFiler() + .createResource(StandardLocation.CLASS_OUTPUT, "", resourceName); + + try (Writer writer = resource.openWriter()) { + DescriptorFormat.write(descriptor, writer); + } + } catch (IOException e) { + processingEnv.getMessager().printMessage( + Diagnostic.Kind.ERROR, + "Failed to write descriptor for " + descriptor.klass() + ": " + e.getMessage() + ); + } + } + + /** + * Writes one ServiceLoader registration file for a base contract. + * + *

This replaces the need for {@code @AutoService} on plugin implementations. The generated + * file follows the standard {@code META-INF/services/} convention.

+ * + * @param serviceTypeName the fully qualified name of the service interface + * @param implementations the implementation class names to register + */ + private void writeServiceFile(String serviceTypeName, Set implementations) { + String resourceName = SERVICES_DIRECTORY + serviceTypeName; + + try { + FileObject resource = processingEnv.getFiler() + .createResource(StandardLocation.CLASS_OUTPUT, "", resourceName); + + try (Writer writer = resource.openWriter()) { + for (String implementation : implementations) { + writer.write(implementation); + writer.write(System.lineSeparator()); + } + } + } catch (IOException e) { + processingEnv.getMessager().printMessage( + Diagnostic.Kind.ERROR, + "Failed to write service file for " + serviceTypeName + ": " + e.getMessage() + ); + } + } + + // ── Type candidate checks ─────────────────────────────────────────────────── + + /** + * Determines whether a type qualifies as an implementation candidate for a plugin. + * + *

A candidate is a concrete (non-abstract) class that implements {@code Plugin} through + * some contract interface. Abstract base classes are intentionally excluded — they may exist + * as shared implementation helpers without needing {@code @DataversePlugin}.

+ * + * @param typeElement the type to inspect + * @return {@code true} if the type is a concrete class implementing {@code Plugin} + */ + private boolean isPluginImplementationCandidate(TypeElement typeElement) { + if (typeElement.getKind() != ElementKind.CLASS) { + return false; + } + if (typeElement.getModifiers().contains(Modifier.ABSTRACT)) { + return false; + } + return implementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE) + && !isExactType(typeElement, ProcessorConstants.PLUGIN_INTERFACE); + } + + /** + * Determines whether a type is a plugin interface candidate that must declare {@code @PluginContract}. + * + *

Any interface extending {@code Plugin} (other than {@code Plugin} itself) is expected to + * be a contract interface and must carry the {@code @PluginContract} annotation.

+ * + * @param typeElement the type to inspect + * @return {@code true} if the type is an interface extending {@code Plugin} + */ + private boolean isPluginInterfaceCandidate(TypeElement typeElement) { + return typeElement.getKind() == ElementKind.INTERFACE + && implementsType(typeElement, ProcessorConstants.PLUGIN_INTERFACE) + && !isExactType(typeElement, ProcessorConstants.PLUGIN_INTERFACE); + } + + /** + * Determines whether a type is a provider interface candidate that must declare {@code API_LEVEL}. + * + * @param typeElement the type to inspect + * @return {@code true} if the type is an interface extending {@code CoreProvider} + */ + private boolean isProviderInterfaceCandidate(TypeElement typeElement) { + return typeElement.getKind() == ElementKind.INTERFACE + && implementsType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE) + && !isExactType(typeElement, ProcessorConstants.CORE_PROVIDER_INTERFACE); + } + + /** + * Determines whether the given type is a plugin contract. + * + *

A type qualifies as a plugin contract only when it is annotated with + * {@code @PluginContract} and is assignable to the common plugin super-interface.

+ * + * @param typeElement the type to test + * @return {@code true} if the type is a plugin contract + */ + private boolean isPluginContract(TypeElement typeElement) { + if (findAnnotationMirror(typeElement, ProcessorConstants.PLUGIN_CONTRACT_ANNOTATION) == null) { + return false; + } + + TypeElement pluginType = elements.getTypeElement(ProcessorConstants.PLUGIN_INTERFACE); + if (pluginType == null) { + return false; + } + + return types.isAssignable( + types.erasure(typeElement.asType()), + types.erasure(pluginType.asType()) + ); + } + + // ── Type system helpers ───────────────────────────────────────────────────── + + /** + * Tests whether the given type is assignable to another type identified by fully qualified name. + * + * @param typeElement the source type + * @param targetTypeName the fully qualified target type name + * @return {@code true} if the source type is assignable to the target type + */ + private boolean implementsType(TypeElement typeElement, String targetTypeName) { + TypeElement targetType = elements.getTypeElement(targetTypeName); + if (targetType == null) { + return false; + } + + return types.isAssignable( + types.erasure(typeElement.asType()), + types.erasure(targetType.asType()) + ); + } + + /** + * Checks whether the given type is exactly the named type itself, not merely a subtype. + * + * @param typeElement the type to inspect + * @param targetTypeName the fully qualified target type name + * @return {@code true} if both names are identical + */ + private boolean isExactType(TypeElement typeElement, String targetTypeName) { + return typeElement.getQualifiedName().contentEquals(targetTypeName); + } + + /** + * Extracts the package name from a type element. + * + * @param typeElement the type whose package to determine + * @return the fully qualified package name + */ + private String getPackageName(TypeElement typeElement) { + return elements.getPackageOf(typeElement).getQualifiedName().toString(); + } + + // ── Annotation mirror helpers ─────────────────────────────────────────────── + + /** + * Finds an annotation mirror on the given element by fully qualified annotation type name. + * + *

Annotation mirrors are the compile-time representation of annotations. Unlike + * {@code getAnnotation()}, mirrors work reliably during annotation processing even when + * the annotation class is being compiled in the same round.

+ * + * @param element the annotated element + * @param annotationTypeName the fully qualified annotation type name + * @return the matching annotation mirror, or {@code null} if absent + */ + private AnnotationMirror findAnnotationMirror(Element element, String annotationTypeName) { + for (AnnotationMirror mirror : element.getAnnotationMirrors()) { + Element annotationElement = mirror.getAnnotationType().asElement(); + if (annotationElement instanceof TypeElement annotationType + && annotationType.getQualifiedName().contentEquals(annotationTypeName)) { + return mirror; + } + } + return null; + } + + /** + * Resolves one annotation member value, including defaults. + * + *

Uses {@code Elements.getElementValuesWithDefaults()} so that annotation members with + * default values are visible even when not explicitly set by the author.

+ * + * @param annotation the annotation mirror + * @param memberName the member to resolve + * @return the resolved annotation value, or {@code null} if not found + */ + private AnnotationValue getAnnotationValue(AnnotationMirror annotation, String memberName) { + Map values = + elements.getElementValuesWithDefaults(annotation); + + for (Map.Entry entry : values.entrySet()) { + if (entry.getKey().getSimpleName().contentEquals(memberName)) { + return entry.getValue(); + } + } + + return null; + } + + /** + * Reads an annotation member containing an array of class literals. + * + *

During annotation processing, class-valued members are represented as {@link TypeMirror}s + * within {@link AnnotationValue}s. This helper converts them into {@link TypeElement}s.

+ * + * @param annotation the annotation mirror + * @param memberName the member containing class literals + * @return the referenced types, preserving declaration order + */ + private List readClassArrayAnnotationValue(AnnotationMirror annotation, String memberName) { + AnnotationValue value = getAnnotationValue(annotation, memberName); + if (value == null) { + return List.of(); + } + + Object raw = value.getValue(); + if (!(raw instanceof List values)) { + return List.of(); + } + + List result = new ArrayList<>(); + for (Object entry : values) { + if (!(entry instanceof AnnotationValue annotationValue)) { + continue; + } + + Object classValue = annotationValue.getValue(); + if (!(classValue instanceof TypeMirror typeMirror)) { + continue; + } + + TypeElement typeElement = asTypeElement(typeMirror); + if (typeElement != null) { + result.add(typeElement); + } + } + + return List.copyOf(result); + } + + /** + * Reads the nested {@code providers()} member of a {@code @PluginContract} annotation. + * + *

The provider information is stored as nested {@code @RequiredProvider} annotations. This helper + * unwraps those nested annotations and returns the referenced provider types.

+ * + * @param pluginContractAnnotation the plugin contract annotation mirror + * @return provider types referenced by the contract + */ + private List readRequiredProviders(AnnotationMirror pluginContractAnnotation) { + AnnotationValue providersValue = getAnnotationValue(pluginContractAnnotation, "providers"); + if (providersValue == null) { + return List.of(); + } + + Object raw = providersValue.getValue(); + if (!(raw instanceof List values)) { + return List.of(); + } + + List result = new ArrayList<>(); + for (Object entry : values) { + if (!(entry instanceof AnnotationValue annotationValue)) { + continue; + } + + Object nested = annotationValue.getValue(); + if (!(nested instanceof AnnotationMirror providerAnnotation)) { + continue; + } + + TypeElement providerAnnotationType = asTypeElement(providerAnnotation.getAnnotationType()); + if (providerAnnotationType == null + || !providerAnnotationType.getQualifiedName().contentEquals(ProcessorConstants.REQUIRED_PROVIDER_ANNOTATION)) { + continue; + } + + AnnotationValue providerClassValue = getAnnotationValue(providerAnnotation, "value"); + if (providerClassValue == null) { + continue; + } + + Object providerRaw = providerClassValue.getValue(); + if (!(providerRaw instanceof TypeMirror providerTypeMirror)) { + continue; + } + + TypeElement providerType = asTypeElement(providerTypeMirror); + if (providerType != null) { + result.add(providerType); + } + } + + return List.copyOf(result); + } + + /** + * Converts a declared type mirror into its corresponding type element. + * + *

During annotation processing, types are represented as {@link TypeMirror} instances. + * This utility extracts the underlying {@link TypeElement} when the mirror represents a + * declared (class/interface) type.

+ * + * @param typeMirror the type mirror to convert + * @return the type element, or {@code null} if the mirror is not a declared type + */ + private TypeElement asTypeElement(TypeMirror typeMirror) { + if (!(typeMirror instanceof DeclaredType declaredType)) { + return null; + } + + Element element = declaredType.asElement(); + return element instanceof TypeElement typeElement ? typeElement : null; + } + + // ── Ordering helpers ──────────────────────────────────────────────────────── + + /** + * Returns the given types sorted by fully qualified name for deterministic processing order. + * + *

Sorting ensures that error messages and generated output are stable across compiler + * runs regardless of the order in which the compiler discovers types.

+ * + * @param typesToSort the types to sort + * @return a sorted list view + */ + private List sortByQualifiedName(Set typesToSort) { + return typesToSort.stream() + .sorted(Comparator.comparing(type -> type.getQualifiedName().toString())) + .toList(); + } + + // ── Diagnostic helpers ────────────────────────────────────────────────────── + + /** + * Emits a compiler error message associated with a source element. + * + * @param element the source element to associate with the diagnostic + * @param message the diagnostic text + */ + private void error(Element element, String message) { + processingEnv.getMessager().printMessage(Diagnostic.Kind.ERROR, message, element); + } + + /** + * Emits a compiler warning message associated with a source element. + * + * @param element the source element to associate with the diagnostic + * @param message the diagnostic text + */ + private void warning(Element element, String message) { + processingEnv.getMessager().printMessage(Diagnostic.Kind.WARNING, message, element); + } + + // ── Internal model ────────────────────────────────────────────────────────── + + /** + * Internal in-memory representation of one contract interface's annotation metadata. + * + *

This record captures the parsed state of a {@code @PluginContract} annotation for + * use during validation and descriptor generation.

+ * + * @param role whether the contract is a base contract or a capability + * @param requiredContracts contracts that must also be implemented + * @param providers providers required by this contract + */ + private record PluginContractModel( + PluginContract.Role role, + List requiredContracts, + List providers + ) { + } + + /** + * Local control-flow exception used to abort processing of a single implementation after an error. + * + *

This avoids deeply nested conditional code while still allowing the processor to continue + * with other plugin implementations in the same round.

+ */ + private static final class ProcessorException extends RuntimeException { + } +} \ No newline at end of file diff --git a/meta/src/main/java/io/gdcc/spi/meta/processor/ProcessorConstants.java b/meta/src/main/java/io/gdcc/spi/meta/processor/ProcessorConstants.java new file mode 100644 index 0000000..e1c2d1a --- /dev/null +++ b/meta/src/main/java/io/gdcc/spi/meta/processor/ProcessorConstants.java @@ -0,0 +1,64 @@ +package io.gdcc.spi.meta.processor; + +import io.gdcc.spi.meta.annotations.DataversePlugin; +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.annotations.RequiredProvider; +import io.gdcc.spi.meta.plugin.CoreProvider; +import io.gdcc.spi.meta.plugin.Plugin; + +public class ProcessorConstants { + + private ProcessorConstants() { + /* Intentionally left blank for singleton */ + } + + /** + * Name of the compile-time constant field carrying the contract version. + */ + public static final String API_LEVEL_FIELD_NAME = "API_LEVEL"; + + /** + * Fully qualified name of the implementation marker annotation. + * + *

A string constant is used instead of a direct class literal so this processor can stay + * tolerant during bootstrapping and module boundary changes.

+ * + * @see io.gdcc.spi.meta.annotations.DataversePlugin + */ + public static final String PLUGIN_IMPLEMENTATION_ANNOTATION = DataversePlugin.class.getName(); + + /** + * Fully qualified name of the contract annotation found on plugin contract interfaces. + * + * @see io.gdcc.spi.meta.annotations.PluginContract + */ + public static final String PLUGIN_CONTRACT_ANNOTATION = PluginContract.class.getName(); + + /** + * Fully qualified name of the nested provider requirement annotation used inside + * {@code @PluginContract.providers()}. + * + * @see io.gdcc.spi.meta.annotations.RequiredProvider + */ + public static final String REQUIRED_PROVIDER_ANNOTATION = RequiredProvider.class.getName(); + + /** + * Fully qualified name of {@code @AutoService}. + * + *

The processor does not depend on AutoService directly. It merely detects the annotation by + * name so it can avoid generating conflicting ServiceLoader resources.

+ */ + public static final String AUTO_SERVICE_ANNOTATION = "com.google.auto.service.AutoService"; + + /** + * Fully qualified name of the common plugin super-interface. + * @see io.gdcc.spi.meta.plugin.Plugin + */ + public static final String PLUGIN_INTERFACE = Plugin.class.getName(); + + /** + * Fully qualified name of the common provider super-interface. + * @see io.gdcc.spi.meta.plugin.CoreProvider + */ + public static final String CORE_PROVIDER_INTERFACE = CoreProvider.class.getName(); +} diff --git a/meta/src/main/resources/META-INF/services/javax.annotation.processing.Processor b/meta/src/main/resources/META-INF/services/javax.annotation.processing.Processor new file mode 100644 index 0000000..5bbc814 --- /dev/null +++ b/meta/src/main/resources/META-INF/services/javax.annotation.processing.Processor @@ -0,0 +1 @@ +io.gdcc.spi.meta.processor.PluginContractProcessor \ No newline at end of file diff --git a/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorFormatTest.java b/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorFormatTest.java new file mode 100644 index 0000000..8007fe9 --- /dev/null +++ b/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorFormatTest.java @@ -0,0 +1,403 @@ +package io.gdcc.spi.meta.descriptor; + +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.io.StringReader; +import java.io.StringWriter; +import java.util.Map; +import java.util.Properties; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class DescriptorFormatTest { + + @Nested + class Fields { + @Test + void toFilename_UsesDescriptorExtension_ForString() { + String result = DescriptorFormat.toFilename("io.gdcc.example.MyPlugin"); + + assertEquals("io.gdcc.example.MyPlugin.properties", result); + } + + @Test + void toFilename_UsesDescriptorExtension_ForClass() { + String result = DescriptorFormat.toFilename(SamplePlugin.class); + + assertEquals("io.gdcc.spi.meta.descriptor.DescriptorFormatTest.SamplePlugin.properties", result); + } + + @Test + void toPath_PrependsDescriptorDirectory_ForString() { + String result = DescriptorFormat.toPath("io.gdcc.example.MyPlugin"); + + assertEquals( + "META-INF/dataverse/plugins/io.gdcc.example.MyPlugin.properties", + result + ); + } + + @Test + void toPath_PrependsDescriptorDirectory_ForClass() { + String result = DescriptorFormat.toPath(SamplePlugin.class); + + assertEquals( + "META-INF/dataverse/plugins/io.gdcc.spi.meta.descriptor.DescriptorFormatTest.SamplePlugin.properties", + result + ); + } + + @Test + void toContractLevel_CreatesExpectedPropertyKey_ForString() { + String result = DescriptorFormat.toContractLevel("io.gdcc.example.ExportPlugin"); + + assertEquals( + "plugin.implements.io.gdcc.example.ExportPlugin.level", + result + ); + } + + @Test + void toContractLevel_CreatesExpectedPropertyKey_ForClass() { + String result = DescriptorFormat.toContractLevel(SampleContract.class); + + assertEquals( + "plugin.implements.io.gdcc.spi.meta.descriptor.DescriptorFormatTest$SampleContract.level", + result + ); + } + + @Test + void toRequiredProviderLevel_CreatesExpectedPropertyKey_ForString() { + String result = DescriptorFormat.toRequiredProviderLevel("io.gdcc.example.ExportProvider"); + + assertEquals( + "plugin.requires.io.gdcc.example.ExportProvider.level", + result + ); + } + + @Test + void toRequiredProviderLevel_CreatesExpectedPropertyKey_ForClass() { + String result = DescriptorFormat.toRequiredProviderLevel(SampleProvider.class); + + assertEquals( + "plugin.requires.io.gdcc.spi.meta.descriptor.DescriptorFormatTest$SampleProvider.level", + result + ); + } + } + + @Nested + class Write { + @Test + void write_WritesCoreFieldsContractsAndProviders() throws IOException { + Descriptor descriptor = new Descriptor( + "io.gdcc.example.MyPlugin", + "io.gdcc.example.ExportPlugin", + Map.of( + "io.gdcc.example.ExportPlugin", 2, + "io.gdcc.example.XmlCapability", 1 + ), + Map.of( + "io.gdcc.example.ExportProvider", 5 + ) + ); + + StringWriter writer = new StringWriter(); + + DescriptorFormat.write(descriptor, writer); + + Properties properties = loadProperties(writer.toString()); + + assertEquals("io.gdcc.example.MyPlugin", properties.getProperty(DescriptorFormat.PLUGIN_CLASS_FIELD)); + assertEquals("io.gdcc.example.ExportPlugin", properties.getProperty(DescriptorFormat.PLUGIN_KIND_FIELD)); + assertEquals("2", properties.getProperty("plugin.implements.io.gdcc.example.ExportPlugin.level")); + assertEquals("1", properties.getProperty("plugin.implements.io.gdcc.example.XmlCapability.level")); + assertEquals("5", properties.getProperty("plugin.requires.io.gdcc.example.ExportProvider.level")); + } + + @Test + void write_WritesCoreFields_WhenContractsAndProvidersAreEmpty() throws IOException { + Descriptor descriptor = new Descriptor( + "io.gdcc.example.MinimalPlugin", + "io.gdcc.example.ExportPlugin", + Map.of(), + Map.of() + ); + + StringWriter writer = new StringWriter(); + + DescriptorFormat.write(descriptor, writer); + + Properties properties = loadProperties(writer.toString()); + + assertEquals("io.gdcc.example.MinimalPlugin", properties.getProperty(DescriptorFormat.PLUGIN_CLASS_FIELD)); + assertEquals("io.gdcc.example.ExportPlugin", properties.getProperty(DescriptorFormat.PLUGIN_KIND_FIELD)); + assertEquals(2, properties.size(), "Only the two mandatory core fields should be present"); + } + + @Test + void write_UsesHelperGeneratedPropertyKeys() throws IOException { + Descriptor descriptor = new Descriptor( + "io.gdcc.example.MyPlugin", + "io.gdcc.example.ExportPlugin", + Map.of("io.gdcc.example.ExportPlugin", 7), + Map.of("io.gdcc.example.ExportProvider", 11) + ); + + StringWriter writer = new StringWriter(); + + DescriptorFormat.write(descriptor, writer); + + String serialized = writer.toString(); + + assertTrue(serialized.contains(DescriptorFormat.toContractLevel("io.gdcc.example.ExportPlugin") + "=7")); + assertTrue(serialized.contains(DescriptorFormat.toRequiredProviderLevel("io.gdcc.example.ExportProvider") + "=11")); + } + + @Test + void stringAndClassOverloadsProduceEquivalentResults() { + assertEquals( + DescriptorFormat.toFilename(SamplePlugin.class), + DescriptorFormat.toFilename(SamplePlugin.class.getName()) + ); + + assertEquals( + DescriptorFormat.toPath(SamplePlugin.class), + DescriptorFormat.toPath(SamplePlugin.class.getName()) + ); + + assertEquals( + DescriptorFormat.toContractLevel(SampleContract.class), + DescriptorFormat.toContractLevel(SampleContract.class.getName()) + ); + + assertEquals( + DescriptorFormat.toRequiredProviderLevel(SampleProvider.class), + DescriptorFormat.toRequiredProviderLevel(SampleProvider.class.getName()) + ); + } + + private static Properties loadProperties(String text) throws IOException { + Properties properties = new Properties(); + properties.load(new java.io.StringReader(text)); + return properties; + } + } + + @Nested + class Read { + @Test + void read_ReadsMandatoryFieldsAndEmptyMaps() throws IOException { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + """; + + Descriptor descriptor = DescriptorFormat.read(new StringReader(serialized)); + + assertEquals("io.gdcc.example.MyPlugin", descriptor.klass()); + assertEquals("io.gdcc.example.ExportPlugin", descriptor.kind()); + assertEquals(Map.of(), descriptor.contracts()); + assertEquals(Map.of(), descriptor.requiredProviders()); + } + + @Test + void read_ReadsContractsAndRequiredProviders() throws IOException { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=2 + plugin.implements.io.gdcc.example.XmlCapability.level=1 + plugin.requires.io.gdcc.example.ExportProvider.level=5 + plugin.requires.io.gdcc.example.BatchProvider.level=9 + """; + + Descriptor descriptor = DescriptorFormat.read(new StringReader(serialized)); + + assertEquals("io.gdcc.example.MyPlugin", descriptor.klass()); + assertEquals("io.gdcc.example.ExportPlugin", descriptor.kind()); + assertEquals( + Map.of( + "io.gdcc.example.ExportPlugin", 2, + "io.gdcc.example.XmlCapability", 1 + ), + descriptor.contracts() + ); + assertEquals( + Map.of( + "io.gdcc.example.ExportProvider", 5, + "io.gdcc.example.BatchProvider", 9 + ), + descriptor.requiredProviders() + ); + } + + @Test + void read_IgnoresUnknownProperties() throws IOException { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=2 + plugin.requires.io.gdcc.example.ExportProvider.level=5 + plugin.something.unrelated=value + unrelated.field=42 + """; + + Descriptor descriptor = DescriptorFormat.read(new StringReader(serialized)); + + assertEquals("io.gdcc.example.MyPlugin", descriptor.klass()); + assertEquals("io.gdcc.example.ExportPlugin", descriptor.kind()); + assertEquals(Map.of("io.gdcc.example.ExportPlugin", 2), descriptor.contracts()); + assertEquals(Map.of("io.gdcc.example.ExportProvider", 5), descriptor.requiredProviders()); + } + + @Test + void read_FailsWhenPluginClassIsMissing() { + String serialized = """ + plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=2 + """; + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> DescriptorFormat.read(new StringReader(serialized)) + ); + + assertEquals("Missing required property plugin.class", ex.getMessage()); + } + + @Test + void read_FailsWhenPluginKindIsMissing() { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=2 + """; + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> DescriptorFormat.read(new StringReader(serialized)) + ); + + assertEquals("Missing required property plugin.kind", ex.getMessage()); + } + + @Test + void read_FailsWhenContractLevelIsNotAnInteger() { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=not-a-number + """; + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> DescriptorFormat.read(new StringReader(serialized)) + ); + + assertEquals( + "Invalid integer value for property plugin.implements.io.gdcc.example.ExportPlugin.level: not-a-number", + ex.getMessage() + ); + } + + @Test + void read_FailsWhenContractLevelIsNegative() { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=-1 + """; + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> DescriptorFormat.read(new StringReader(serialized)) + ); + + assertEquals( + "Invalid integer value for property plugin.implements.io.gdcc.example.ExportPlugin.level may not be < 1, but is: -1", + ex.getMessage() + ); + } + + @Test + void read_FailsWhenRequiredProviderLevelIsNotAnInteger() { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=1 + plugin.requires.io.gdcc.example.ExportProvider.level=nope + """; + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> DescriptorFormat.read(new StringReader(serialized)) + ); + + assertEquals( + "Invalid integer value for property plugin.requires.io.gdcc.example.ExportProvider.level: nope", + ex.getMessage() + ); + } + + @Test + void read_FailsWhenProviderLevelIsNegative() { + String serialized = """ + plugin.class=io.gdcc.example.MyPlugin + plugin.kind=io.gdcc.example.ExportPlugin + plugin.implements.io.gdcc.example.ExportPlugin.level=1 + plugin.requires.io.gdcc.example.ExportProvider.level=-1 + """; + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> DescriptorFormat.read(new StringReader(serialized)) + ); + + assertEquals( + "Invalid integer value for property plugin.requires.io.gdcc.example.ExportProvider.level may not be < 1, but is: -1", + ex.getMessage() + ); + } + + @Test + void read_RoundTripsWithWrite() throws IOException { + Descriptor original = new Descriptor( + "io.gdcc.example.MyPlugin", + "io.gdcc.example.ExportPlugin", + Map.of( + "io.gdcc.example.ExportPlugin", 2, + "io.gdcc.example.XmlCapability", 1 + ), + Map.of( + "io.gdcc.example.ExportProvider", 5 + ) + ); + + StringWriter writer = new StringWriter(); + DescriptorFormat.write(original, writer); + + Descriptor reread = DescriptorFormat.read(new StringReader(writer.toString())); + + assertEquals(original.klass(), reread.klass()); + assertEquals(original.kind(), reread.kind()); + assertEquals(original.contracts(), reread.contracts()); + assertEquals(original.requiredProviders(), reread.requiredProviders()); + } + } + + + private static final class SamplePlugin { + } + + private interface SampleContract { + } + + private interface SampleProvider { + } +} \ No newline at end of file diff --git a/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorScannerTest.java b/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorScannerTest.java new file mode 100644 index 0000000..689aebb --- /dev/null +++ b/meta/src/test/java/io/gdcc/spi/meta/descriptor/DescriptorScannerTest.java @@ -0,0 +1,287 @@ +package io.gdcc.spi.meta.descriptor; + +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.List; +import java.util.Map; +import java.util.jar.JarEntry; +import java.util.jar.JarOutputStream; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class DescriptorScannerTest { + + @TempDir + Path tempDir; + + @Nested + class Directory { + + @Test + void scanDirectory_ReturnsEmpty_WhenDescriptorDirectoryDoesNotExist() throws IOException { + List descriptors = DescriptorScanner.scanDirectory(tempDir); + + assertTrue(descriptors.isEmpty()); + } + + @Test + void scanDirectory_ReadsSingleDescriptor() throws IOException { + Path descriptorDir = tempDir.resolve(DescriptorFormat.DESCRIPTOR_DIRECTORY); + Files.createDirectories(descriptorDir); + + Path descriptorFile = descriptorDir.resolve("test.Plugin.properties"); + Files.writeString( + descriptorFile, + """ + plugin.class=test.Plugin + plugin.kind=test.BasePlugin + plugin.implements.test.BasePlugin.level=1 + plugin.requires.test.Provider.level=2 + """, + StandardCharsets.UTF_8 + ); + + List descriptors = DescriptorScanner.scanDirectory(tempDir); + + assertEquals(1, descriptors.size()); + Descriptor descriptor = descriptors.get(0); + assertEquals("test.Plugin", descriptor.klass()); + assertEquals("test.BasePlugin", descriptor.kind()); + assertEquals(1, descriptor.contractLevel("test.BasePlugin")); + assertEquals(2, descriptor.requiredProviderLevel("test.Provider")); + } + + @Test + void scanDirectory_IgnoresNonPropertyFiles() throws IOException { + Path descriptorDir = tempDir.resolve(DescriptorFormat.DESCRIPTOR_DIRECTORY); + Files.createDirectories(descriptorDir); + + Files.writeString( + descriptorDir.resolve("test.Plugin.properties"), + """ + plugin.class=test.Plugin + plugin.kind=test.BasePlugin + """, + StandardCharsets.UTF_8 + ); + Files.writeString(descriptorDir.resolve("README.txt"), "ignore me", StandardCharsets.UTF_8); + + List descriptors = DescriptorScanner.scanDirectory(tempDir); + + assertEquals(1, descriptors.size()); + assertEquals("test.Plugin", descriptors.get(0).klass()); + } + + @Test + void scanDirectory_RejectsNonDirectory() throws IOException { + Path file = Files.createTempFile(tempDir, "not-a-directory", ".txt"); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> DescriptorScanner.scanDirectory(file) + ); + + assertTrue(ex.getMessage().contains("not a readable directory")); + } + } + + @Nested + class Jar { + + @Test + void scanJar_ReadsSingleDescriptor() throws IOException { + Path jar = createJar(Map.of( + "META-INF/dataverse/plugins/test.Plugin.properties", + """ + plugin.class=test.Plugin + plugin.kind=test.BasePlugin + plugin.implements.test.BasePlugin.level=3 + plugin.requires.test.Provider.level=4 + """, + "META-INF/services/test.BasePlugin", + "test.Plugin" + )); + + List descriptors = DescriptorScanner.scanJar(jar); + + assertEquals(1, descriptors.size()); + Descriptor descriptor = descriptors.get(0); + assertEquals("test.Plugin", descriptor.klass()); + assertEquals("test.BasePlugin", descriptor.kind()); + assertEquals(3, descriptor.contractLevel("test.BasePlugin")); + assertEquals(4, descriptor.requiredProviderLevel("test.Provider")); + } + + @Test + void scanJar_ReadsMultipleDescriptors() throws IOException { + Path jar = createJar(Map.of( + "META-INF/dataverse/plugins/test.A.properties", + """ + plugin.class=test.A + plugin.kind=test.BasePlugin + plugin.implements.test.BasePlugin.level=1 + """, + "META-INF/dataverse/plugins/test.B.properties", + """ + plugin.class=test.B + plugin.kind=test.BasePlugin + plugin.implements.test.BasePlugin.level=2 + """ + )); + + List descriptors = DescriptorScanner.scanJar(jar); + + assertEquals(2, descriptors.size()); + } + + @Test + void scanJar_IgnoresNonDescriptorEntries() throws IOException { + Path jar = createJar(Map.of( + "META-INF/dataverse/plugins/test.Plugin.properties", + """ + plugin.class=test.Plugin + plugin.kind=test.BasePlugin + """, + "META-INF/dataverse/plugins/README.txt", + "ignore me", + "some/other/resource.txt", + "ignore me too" + )); + + List descriptors = DescriptorScanner.scanJar(jar); + + assertEquals(1, descriptors.size()); + assertEquals("test.Plugin", descriptors.get(0).klass()); + } + + @Test + void scanJar_RejectsNonJarFile() throws IOException { + Path file = Files.createTempFile(tempDir, "not-a-jar", ".txt"); + + IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> DescriptorScanner.scanJar(file) + ); + + assertTrue(ex.getMessage().contains("not a readable JAR file")); + } + } + + @Nested + class ServiceProviderInterfaceExistance { + + @Test + void hasServiceProviderInterfaceRecord_ReturnTrue_ForExistingSPIRecordInJar() throws IOException { + Path jar = createJar(Map.of( + "META-INF/services/test.BasePlugin", + "test.Plugin", + "META-INF/dataverse/plugins/test.Plugin.properties", + """ + plugin.class=test.Plugin + plugin.kind=test.BasePlugin + plugin.implements.test.BasePlugin.level=2 + """ + )); + + Descriptor descriptor = new Descriptor("test.Plugin", "test.BasePlugin", Map.of(), Map.of()); + SourcedDescriptor sourcedDescriptor = new SourcedDescriptor(jar, descriptor); + + assertTrue(DescriptorScanner.hasServiceProviderInterfaceRecord(sourcedDescriptor)); + } + + @Test + void hasServiceProviderInterfaceRecord_ReturnsFalse_WhenSPIRecordDoesNotExistInJar() throws IOException { + Path jar = createJar(Map.of( + "META-INF/dataverse/plugins/test.Plugin.properties", + """ + plugin.class=test.Plugin + plugin.kind=test.BasePlugin + plugin.implements.test.BasePlugin.level=2 + """ + )); + + Descriptor descriptor = new Descriptor("test.Plugin", "test.BasePlugin", Map.of(), Map.of()); + SourcedDescriptor sourcedDescriptor = new SourcedDescriptor(jar, descriptor); + + assertFalse(DescriptorScanner.hasServiceProviderInterfaceRecord(sourcedDescriptor)); + } + + @Test + void hasServiceProviderInterfaceRecord_RejectsInvalidSourcedDescriptorJar() { + Path invalidPath = tempDir.resolve("invalid.jar"); + + Descriptor descriptor = new Descriptor("test.Plugin", "test.BasePlugin", Map.of(), Map.of()); + SourcedDescriptor invalidDescriptor = new SourcedDescriptor(invalidPath, descriptor); + + IllegalArgumentException ex = assertThrows(IllegalArgumentException.class, + () -> DescriptorScanner.hasServiceProviderInterfaceRecord(invalidDescriptor)); + + assertTrue(ex.getMessage().contains("invalid.jar")); + } + + @Test + void hasServiceProviderInterfaceRecord_ReturnsTrue_ForExistingSPIRecordInDirectory() throws IOException { + Path tmpDir = Files.createTempDirectory("descriptor-scanner"); + Path recordsDir = tmpDir.resolve(Path.of("META-INF/services")); + + Files.createDirectories(recordsDir); + Files.writeString(recordsDir.resolve(Path.of("test.BasePlugin")), "test.Plugin", StandardCharsets.UTF_8); + + Descriptor descriptor = new Descriptor("test.Plugin", "test.BasePlugin", Map.of(), Map.of()); + SourcedDescriptor sourcedDescriptor = new SourcedDescriptor(tmpDir, descriptor); + + assertTrue(DescriptorScanner.hasServiceProviderInterfaceRecord(sourcedDescriptor)); + } + + @Test + void hasServiceProviderInterfaceRecord_ReturnsFalse_ForNonexistingSPIRecordInDirectory() throws IOException { + Path tmpDir = Files.createTempDirectory("descriptor-scanner"); + Path recordsDir = tmpDir.resolve(Path.of("META-INF/services")); + + Files.createDirectories(recordsDir); + + Descriptor descriptor = new Descriptor("test.Plugin", "test.BasePlugin", Map.of(), Map.of()); + SourcedDescriptor sourcedDescriptor = new SourcedDescriptor(tmpDir, descriptor); + + assertFalse(DescriptorScanner.hasServiceProviderInterfaceRecord(sourcedDescriptor)); + } + + @Test + void hasServiceProviderInterfaceRecord_ReturnsFalse_ForNonmatchingSPIRecordInDirectory() throws IOException { + Path tmpDir = Files.createTempDirectory("descriptor-scanner"); + Path recordsDir = tmpDir.resolve(Path.of("META-INF/services")); + + Files.createDirectories(recordsDir); + Files.writeString(recordsDir.resolve(Path.of("test.BasePlugin")), "test.OtherPlugin", StandardCharsets.UTF_8); + + Descriptor descriptor = new Descriptor("test.Plugin", "test.BasePlugin", Map.of(), Map.of()); + SourcedDescriptor sourcedDescriptor = new SourcedDescriptor(tmpDir, descriptor); + + assertFalse(DescriptorScanner.hasServiceProviderInterfaceRecord(sourcedDescriptor)); + } + } + + private Path createJar(Map entries) throws IOException { + Path jar = Files.createTempFile(tempDir, "plugin-plugin-test-", ".jar"); + + try (JarOutputStream out = new JarOutputStream(Files.newOutputStream(jar))) { + for (Map.Entry entry : entries.entrySet()) { + JarEntry jarEntry = new JarEntry(entry.getKey()); + out.putNextEntry(jarEntry); + out.write(entry.getValue().getBytes(StandardCharsets.UTF_8)); + out.closeEntry(); + } + } + + return jar; + } +} diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java new file mode 100644 index 0000000..14f668f --- /dev/null +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/PluginContractProcessorTest.java @@ -0,0 +1,3155 @@ +package io.gdcc.spi.meta.processor; + +import io.gdcc.spi.meta.annotations.DataversePlugin; +import io.gdcc.spi.meta.annotations.PluginContract; +import io.gdcc.spi.meta.annotations.RequiredProvider; +import io.gdcc.spi.meta.descriptor.Descriptor; +import io.gdcc.spi.meta.descriptor.DescriptorFormat; +import io.gdcc.spi.meta.plugin.CoreProvider; +import io.gdcc.spi.meta.plugin.Plugin; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; + +import javax.tools.Diagnostic; +import java.io.IOException; +import java.nio.file.Files; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; + +class PluginContractProcessorTest { + + private final ProcessorTestCompiler compiler = new ProcessorTestCompiler(); + + @Nested + class ImplementationContractGraphRules { + @Test + void compilesWhenImplementationOverridesConflictingDefaultMethodsFromCapabilities() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/BaseExporter.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + + String getMediaType(); + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlCapability.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 2; + + default String getMediaType() { + return "application/xml"; + } + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/TextCapability.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface TextCapability extends Plugin { + int API_LEVEL = 3; + + default String getMediaType() { + return "text/plain"; + } + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/MultiCapabilityExporter.java", + """ + package test.export; + + import %s; + + @DataversePlugin + public class MultiCapabilityExporter implements BaseExporter, XmlCapability, TextCapability { + @Override + public String identity() { + return "multi"; + } + + @Override + public String getMediaType() { + return XmlCapability.super.getMediaType(); + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.export.MultiCapabilityExporter"); + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); + + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.export.BaseExporter", descriptor.kind()); + assertEquals(1, descriptor.contractLevel("test.export.BaseExporter")); + assertEquals(2, descriptor.contractLevel("test.export.XmlCapability")); + assertEquals(3, descriptor.contractLevel("test.export.TextCapability")); + } + + @Test + void failsWhenNoBaseContractIsImplemented() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/CapabilityPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.CAPABILITY) + public interface CapabilityPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/CapabilityOnlyImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class CapabilityOnlyImpl implements CapabilityPlugin { + @Override + public String identity() { + return "capability-only"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "exactly one Role.BASE @PluginContract"); + } + + @Test + void failsWhenMultipleBaseContractsAreImplemented() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePluginA.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePluginA extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BasePluginB.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePluginB extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BadPlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class BadPlugin implements BasePluginA, BasePluginB { + @Override + public String identity() { + return "bad"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "exactly one Role.BASE @PluginContract"); + } + + @Test + void failsWhenRequiredContractIsMissing() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/CapabilityPlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BasePlugin.class } + ) + public interface CapabilityPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/MissingBasePluginImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class MissingBasePluginImpl implements CapabilityPlugin { + @Override + public String identity() { + return "missing-base"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "also requires contract test.BasePlugin"); + } + + @Test + void failsWhenImplementationIsNotPublic() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/HiddenPlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + class HiddenPlugin implements TestPlugin { + @Override + public String identity() { + return "hidden"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must be public"); + } + + @Test + void failsWhenImplementationIsAbstract() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/AbstractPluginImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public abstract class AbstractPluginImpl implements TestPlugin { + @Override + public String identity() { + return "abstract"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must not be abstract"); + } + + @Test + void failsWhenDataversePluginAnnotatedClassIsNotAPluginImplementation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NotAPlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class NotAPlugin { + public String identity() { + return "not-a-plugin"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "No implemented plugin contracts found"); + } + + @Test + void failsWhenImplementationDirectlyImplementsPlugin() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/RawPluginImpl.java", + """ + package test; + + import %s; + import %s; + + @DataversePlugin + public class RawPluginImpl implements Plugin { + @Override + public String identity() { + return "raw-plugin"; + } + } + """.formatted( + Plugin.class.getCanonicalName(), + DataversePlugin.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains( + result, + Diagnostic.Kind.ERROR, + "must implement a specific plugin contract interface, not Plugin directly" + ); + } + + @Test + // because we want to allow base classes, as long as concrete classes are annotated @DataversePlugin + void doesNotWarnForAbstractUnannotatedPluginBaseClass() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/AbstractBasePlugin.java", + """ + package test; + + public abstract class AbstractBasePlugin implements TestPlugin { + @Override + public String identity() { + return "base"; + } + } + """ + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + assertDiagnosticDoesNotContain(result, Diagnostic.Kind.WARNING, "@DataversePlugin"); + } + } + + @Nested + class DescriptorFileGeneration { + @Test + void generatesDescriptorAndServiceFileForValidPlugin() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestProvider.java", + """ + package test; + + import %s; + + public interface TestProvider extends CoreProvider { + int API_LEVEL = 7; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE, + providers = { @RequiredProvider(TestProvider.class) } + ) + public interface TestPlugin extends Plugin { + int API_LEVEL = 3; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/GoodPlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class GoodPlugin implements TestPlugin { + @Override + public String identity() { + return "good"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.GoodPlugin"); + String servicePath = "META-INF/services/test.TestPlugin"; + + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); + assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should be generated"); + + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.GoodPlugin", descriptor.klass()); + assertEquals("test.TestPlugin", descriptor.kind()); + assertEquals(3, descriptor.contractLevel("test.TestPlugin")); + assertEquals(7, descriptor.requiredProviderLevel("test.TestProvider")); + + String serviceFile = Files.readString(result.generatedFile(servicePath)); + assertEquals("test.GoodPlugin", serviceFile.trim()); + } + + @Test + void compilesWhenPluginImplementsOneBaseAndOneCapability() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/BaseExporter.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + + String getMediaType(); + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlCapability.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends BaseExporter { + int API_LEVEL = 2; + + default String getMediaType() { + return "application/xml"; + } + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlExporterImpl.java", + """ + package test.export; + + import %s; + + @DataversePlugin + public class XmlExporterImpl implements XmlCapability { + @Override + public String identity() { + return "xml"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.export.XmlExporterImpl"); + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); + + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.export.XmlExporterImpl", descriptor.klass()); + assertEquals("test.export.BaseExporter", descriptor.kind()); + assertEquals(1, descriptor.contractLevel("test.export.BaseExporter")); + assertEquals(2, descriptor.contractLevel("test.export.XmlCapability")); + } + + @Test + void compilesWhenPluginImplementsOneBaseAndTwoCapabilities() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/BaseExporter.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlCapability.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/PrettyPrintCapability.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface PrettyPrintCapability extends Plugin { + int API_LEVEL = 3; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlPrettyExporterImpl.java", + """ + package test.export; + + import %s; + + @DataversePlugin + public class XmlPrettyExporterImpl implements BaseExporter, XmlCapability, PrettyPrintCapability { + @Override + public String identity() { + return "xml-pretty"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.export.XmlPrettyExporterImpl"); + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); + + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.export.BaseExporter", descriptor.kind()); + assertEquals(1, descriptor.contractLevel("test.export.BaseExporter")); + assertEquals(2, descriptor.contractLevel("test.export.XmlCapability")); + assertEquals(3, descriptor.contractLevel("test.export.PrettyPrintCapability")); + } + + @Test + void mergesProviderRequirementsFromBaseAndCapability() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/BaseProvider.java", + """ + package test.export; + + import %s; + + public interface BaseProvider extends CoreProvider { + int API_LEVEL = 10; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/export/XmlProvider.java", + """ + package test.export; + + import %s; + + public interface XmlProvider extends CoreProvider { + int API_LEVEL = 20; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/export/BaseExporter.java", + """ + package test.export; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE, + providers = { @RequiredProvider(BaseProvider.class) } + ) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/export/XmlCapability.java", + """ + package test.export; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class }, + providers = { @RequiredProvider(XmlProvider.class) } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/export/XmlExporterImpl.java", + """ + package test.export; + + import %s; + + @DataversePlugin + public class XmlExporterImpl implements BaseExporter, XmlCapability { + @Override + public String identity() { + return "xml"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.export.XmlExporterImpl"); + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + + assertEquals(10, descriptor.requiredProviderLevel("test.export.BaseProvider")); + assertEquals(20, descriptor.requiredProviderLevel("test.export.XmlProvider")); + } + + @Test + void warnsWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/ImplicitPlugin.java", + """ + package test; + + public class ImplicitPlugin implements TestPlugin { + @Override + public String identity() { + return "implicit"; + } + } + """ + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + assertDiagnosticContains(result, Diagnostic.Kind.WARNING, "@DataversePlugin"); + } + + @Test + void createsDescriptorEvenWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 4; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/ImplicitPlugin.java", + """ + package test; + + public class ImplicitPlugin implements TestPlugin { + @Override + public String identity() { + return "implicit"; + } + } + """ + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.ImplicitPlugin"); + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should still be generated"); + + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.ImplicitPlugin", descriptor.klass()); + assertEquals("test.TestPlugin", descriptor.kind()); + assertEquals(4, descriptor.contractLevel("test.TestPlugin")); + } + + @Test + void usesBaseContractAsDescriptorKindWhenCapabilityIsAlsoImplemented() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/BaseExporter.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlCapability.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlExporterImpl.java", + """ + package test.export; + + import %s; + + @DataversePlugin + public class XmlExporterImpl implements BaseExporter, XmlCapability { + @Override + public String identity() { + return "xml"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.export.XmlExporterImpl"); + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should be generated"); + + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals("test.export.BaseExporter", descriptor.kind()); + assertEquals(1, descriptor.contractLevel("test.export.BaseExporter")); + assertEquals(2, descriptor.contractLevel("test.export.XmlCapability")); + } + } + + @Nested + class AutoServiceFileGeneration { + @Test + void suppressesGeneratedServiceFileForWholeContractWhenAutoServiceIsMixedWithNormalImplementations() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "com/google/auto/service/AutoService.java", + """ + package com.google.auto.service; + + import java.lang.annotation.ElementType; + import java.lang.annotation.Retention; + import java.lang.annotation.RetentionPolicy; + import java.lang.annotation.Target; + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + public @interface AutoService { + Class[] value(); + } + """ + ), + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/AutoServiceImpl.java", + """ + package test; + + import com.google.auto.service.AutoService; + import %s; + + @DataversePlugin + @AutoService(TestPlugin.class) + public class AutoServiceImpl implements TestPlugin { + @Override + public String identity() { + return "auto"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ), + source( + "test/NormalImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class NormalImpl implements TestPlugin { + @Override + public String identity() { + return "normal"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String autoDescriptorPath = DescriptorFormat.toPath("test.AutoServiceImpl"); + String normalDescriptorPath = DescriptorFormat.toPath("test.NormalImpl"); + String servicePath = "META-INF/services/test.TestPlugin"; + + assertTrue(Files.exists(result.generatedFile(autoDescriptorPath)), "AutoService descriptor should be generated"); + assertTrue(Files.exists(result.generatedFile(normalDescriptorPath)), "Normal descriptor should be generated"); + assertFalse(Files.exists(result.generatedFile(servicePath)), "Service file should be suppressed for the whole contract"); + + assertDiagnosticContains(result, Diagnostic.Kind.WARNING, "@AutoService detected"); + } + + @Test + void createsServiceFileEvenWhenPluginImplementationOmitsDataversePluginAnnotation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 5; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/ImplicitPlugin.java", + """ + package test; + + public class ImplicitPlugin implements TestPlugin { + @Override + public String identity() { + return "implicit"; + } + } + """ + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String servicePath = "META-INF/services/test.TestPlugin"; + assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should still be generated"); + + String serviceFile = Files.readString(result.generatedFile(servicePath)); + assertEquals("test.ImplicitPlugin", serviceFile.trim()); + } + + @Test + void suppressesGeneratedServiceFileWhenAutoServiceIsPresent() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "com/google/auto/service/AutoService.java", + """ + package com.google.auto.service; + + import java.lang.annotation.ElementType; + import java.lang.annotation.Retention; + import java.lang.annotation.RetentionPolicy; + import java.lang.annotation.Target; + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + public @interface AutoService { + Class[] value(); + } + """ + ), + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/AutoServicePlugin.java", + """ + package test; + + import com.google.auto.service.AutoService; + import %s; + + @DataversePlugin + @AutoService(TestPlugin.class) + public class AutoServicePlugin implements TestPlugin { + @Override + public String identity() { + return "auto"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.AutoServicePlugin"); + String servicePath = "META-INF/services/test.TestPlugin"; + + assertTrue(Files.exists(result.generatedFile(descriptorPath)), "Descriptor should still be generated"); + assertFalse(Files.exists(result.generatedFile(servicePath)), "Service file should be suppressed"); + + assertDiagnosticContains(result, Diagnostic.Kind.WARNING, "@AutoService detected"); + } + + @Test + void suppressesServiceGenerationOnlyForPluginKindManagedByAutoService() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "com/google/auto/service/AutoService.java", + """ + package com.google.auto.service; + + import java.lang.annotation.ElementType; + import java.lang.annotation.Retention; + import java.lang.annotation.RetentionPolicy; + import java.lang.annotation.Target; + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + public @interface AutoService { + Class[] value(); + } + """ + ), + source( + "test/PluginTypeA.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface PluginTypeA extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/PluginTypeB.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface PluginTypeB extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/AutoManagedA.java", + """ + package test; + + import com.google.auto.service.AutoService; + import %s; + + @DataversePlugin + @AutoService(PluginTypeA.class) + public class AutoManagedA implements PluginTypeA { + @Override + public String identity() { + return "a"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ), + source( + "test/NormalB.java", + """ + package test; + + import %s; + + @DataversePlugin + public class NormalB implements PluginTypeB { + @Override + public String identity() { + return "b"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String servicePathA = "META-INF/services/test.PluginTypeA"; + String servicePathB = "META-INF/services/test.PluginTypeB"; + + assertFalse(Files.exists(result.generatedFile(servicePathA)), "PluginTypeA service file should be suppressed"); + assertTrue(Files.exists(result.generatedFile(servicePathB)), "PluginTypeB service file should still be generated"); + + String serviceFileB = Files.readString(result.generatedFile(servicePathB)); + assertEquals("test.NormalB", serviceFileB.trim()); + } + + @Test + void doesNotGenerateServiceFileForCapabilityContracts() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/BaseExporter.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlCapability.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlExporterImpl.java", + """ + package test.export; + + import %s; + + @DataversePlugin + public class XmlExporterImpl implements BaseExporter, XmlCapability { + @Override + public String identity() { + return "xml"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String baseServicePath = "META-INF/services/test.export.BaseExporter"; + String capabilityServicePath = "META-INF/services/test.export.XmlCapability"; + + assertTrue(Files.exists(result.generatedFile(baseServicePath)), "Base service file should be generated"); + assertFalse(Files.exists(result.generatedFile(capabilityServicePath)), "Capability service file must never be generated"); + + String serviceFile = Files.readString(result.generatedFile(baseServicePath)); + assertEquals("test.export.XmlExporterImpl", serviceFile.trim()); + } + + @Test + void doesNotProcessAnnotatedImplementationTwice() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 6; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/OncePlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class OncePlugin implements TestPlugin { + @Override + public String identity() { + return "once"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String servicePath = "META-INF/services/test.TestPlugin"; + assertTrue(Files.exists(result.generatedFile(servicePath)), "Service file should be generated"); + + List lines = Files.readAllLines(result.generatedFile(servicePath)) + .stream() + .filter(line -> !line.isBlank()) + .toList(); + + assertEquals(1, lines.size(), "Implementation should only be registered once"); + assertEquals("test.OncePlugin", lines.get(0)); + } + + @Test + void aggregatesMultipleImplementationsIntoOneServiceFile() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/APlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class APlugin implements TestPlugin { + @Override + public String identity() { + return "a"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ), + source( + "test/BPlugin.java", + """ + package test; + + import %s; + + @DataversePlugin + public class BPlugin implements TestPlugin { + @Override + public String identity() { + return "b"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String servicePath = "META-INF/services/test.TestPlugin"; + assertTrue(Files.exists(result.generatedFile(servicePath)), "Aggregated service file should be generated"); + + List lines = Files.readAllLines(result.generatedFile(servicePath)) + .stream() + .filter(line -> !line.isBlank()) + .toList(); + + assertEquals(List.of("test.APlugin", "test.BPlugin"), lines); + } + } + + @Nested + class Providers { + @Test + void compilesWhenUnusedProviderInterfaceHasApiLevel() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/LonelyProvider.java", + """ + package test; + + import %s; + + public interface LonelyProvider extends CoreProvider { + int API_LEVEL = 42; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + } + + @Test + void failsWhenRequiredProviderIsNotACoreProvider() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NotAProvider.java", + """ + package test; + + public interface NotAProvider { + int API_LEVEL = 1; + } + """ + ), + source( + "test/InvalidProviderPlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE, + providers = { @RequiredProvider(NotAProvider.class) } + ) + public interface InvalidProviderPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/InvalidProviderImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class InvalidProviderImpl implements InvalidProviderPlugin { + @Override + public String identity() { + return "invalid-provider"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "cannot be converted"); + } + + @Test + void failsWhenProviderApiLevelIsMissing() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/MissingProviderApiLevel.java", + """ + package test; + + import %s; + + public interface MissingProviderApiLevel extends CoreProvider { + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE, + providers = { @RequiredProvider(MissingProviderApiLevel.class) } + ) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/MissingProviderApiLevelImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class MissingProviderApiLevelImpl implements TestPlugin { + @Override + public String identity() { + return "missing-provider-api-level"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must declare int API_LEVEL"); + } + + @Test + void failsWhenProviderApiLevelIsNotCompileTimeConstant() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NonConstantProviderApiLevel.java", + """ + package test; + + import %s; + + public interface NonConstantProviderApiLevel extends CoreProvider { + Integer API_LEVEL = Integer.valueOf(9); + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/TestPlugin.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE, + providers = { @RequiredProvider(NonConstantProviderApiLevel.class) } + ) + public interface TestPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/NonConstantProviderApiLevelImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class NonConstantProviderApiLevelImpl implements TestPlugin { + @Override + public String identity() { + return "non-constant-provider-api-level"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must be a compile-time int constant"); + } + + @Test + void mergesDuplicateProviderRequirementsWhenLevelsMatch() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/SharedProvider.java", + """ + package test; + + import %s; + + public interface SharedProvider extends CoreProvider { + int API_LEVEL = 8; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ), + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/CapabilityOne.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BasePlugin.class }, + providers = { @RequiredProvider(SharedProvider.class) } + ) + public interface CapabilityOne extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/CapabilityTwo.java", + """ + package test; + + import %s; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BasePlugin.class }, + providers = { @RequiredProvider(SharedProvider.class) } + ) + public interface CapabilityTwo extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName(), + RequiredProvider.class.getCanonicalName() + ) + ), + source( + "test/MergedProviderImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class MergedProviderImpl implements BasePlugin, CapabilityOne, CapabilityTwo { + @Override + public String identity() { + return "merged-provider"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + + String descriptorPath = DescriptorFormat.toPath("test.MergedProviderImpl"); + Descriptor descriptor = DescriptorFormat.read(Files.readString(result.generatedFile(descriptorPath))); + assertEquals(8, descriptor.requiredProviderLevel("test.SharedProvider")); + } + + @Test + void failsWhenProviderIsNonPublicInterface() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NonPublicProvider.java", + """ + package test; + + import %s; + + interface NonPublicProvider extends CoreProvider { + int API_LEVEL = 8; + } + """.formatted(CoreProvider.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "Interfaces extending CoreProvider must be public"); + } + } + + @Nested + class ContractApiLevels { + @Test + void failsWhenContractApiLevelIsMissing() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/MissingApiLevelPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface MissingApiLevelPlugin extends Plugin { + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/MissingApiLevelImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class MissingApiLevelImpl implements MissingApiLevelPlugin { + @Override + public String identity() { + return "missing-api-level"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must declare int API_LEVEL"); + } + + @Test + void failsWhenContractApiLevelIsNotCompileTimeConstant() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NonConstantApiLevelPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface NonConstantApiLevelPlugin extends Plugin { + Integer API_LEVEL = Integer.valueOf(2); + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/NonConstantApiLevelImpl.java", + """ + package test; + + import %s; + + @DataversePlugin + public class NonConstantApiLevelImpl implements NonConstantApiLevelPlugin { + @Override + public String identity() { + return "non-constant-api-level"; + } + } + """.formatted(DataversePlugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must be a compile-time int constant"); + } + } + + @Nested + class InvalidOrMissingAnnotationTargets { + @Test + void failsWhenContractIsPlacedOnClass() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/InvalidContract.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public class InvalidContract implements Plugin { + @Override + public String identity() { + return "invalid"; + } + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "@PluginContract may only be declared on interfaces extending Plugin"); + } + + @Test + void failsWhenContractIsOnNonPluginInterface() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/MissingExtendsPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface MissingExtendsPlugin { + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "@PluginContract may only be declared on interfaces extending Plugin"); + } + + @Test + void failsWhenContractIsOnNonPublicInterface() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/NotPublicPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + interface NotPublicPlugin extends Plugin { + int API_LEVEL = 3; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "Interfaces extending Plugin must be public"); + } + + @Test + void failsWhenRequiredContractIsUnannotatedPluginInterface() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/ImplicitPluginType.java", + """ + package test; + + import %s; + + public interface ImplicitPluginType extends Plugin { + int API_LEVEL = 1; + } + """.formatted(Plugin.class.getCanonicalName()) + ), + source( + "test/BadCapability.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { ImplicitPluginType.class } + ) + public interface BadCapability extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "Interfaces extending Plugin must declare @PluginContract"); + } + + @Test + void failsWhenContractIsInDifferentPackage() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/foobar/BasePlugin.java", + """ + package test.foobar; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/barbeque/CapabilityPluginA.java", + """ + package test.barbeque; + + import %s; + import %s; + import test.foobar.BasePlugin; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface CapabilityPluginA extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ))); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "share same package path"); + } + + @Test + void compilesWhenCapabilityRequiresBaseInSamePackage() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/BaseExporter.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlCapability.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 1; + + default String getMediaType() { + return "application/xml"; + } + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + } + + @Test + void compilesWhenCapabilityRequiresBaseInSubpackage() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/BaseExporter.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/xml/XmlCapability.java", + """ + package test.export.xml; + + import test.export.BaseExporter; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 1; + + default String getMediaType() { + return "application/xml"; + } + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + } + + @Test + void failsWhenCapabilityRequiresBaseInSiblingPackage() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/BaseExporter.java", + """ + package test.export; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/importing/XmlCapability.java", + """ + package test.importing; + + import test.export.BaseExporter; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "same package path"); + } + + @Test + void failsWhenCapabilityRequiresBaseInParentPackage() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/export/xml/BaseExporter.java", + """ + package test.export.xml; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BaseExporter extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/export/XmlCapability.java", + """ + package test.export; + + import test.export.xml.BaseExporter; + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BaseExporter.class } + ) + public interface XmlCapability extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "same package path"); + } + + @Test + void failsWhenBaseContractLacksPluginContractAnnotation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/UndeclaredPluginContract.java", + """ + package test; + + import %s; + + public interface UndeclaredPluginContract extends Plugin { + int API_LEVEL = 1; + } + """.formatted(Plugin.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "Interfaces extending Plugin must declare @PluginContract"); + } + } + + @Nested + class BaseContractGraphRules { + @Test + void compilesWhenAnnotatedPluginInterfaceHasNoImplementation() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/LonelyPluginContract.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface LonelyPluginContract extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertTrue(result.success(), result.diagnosticsAsText()); + } + + @Test + void failsWhenBaseContractRequiresSomething() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE + ) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/InvalidPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE, + requires = BasePlugin.class + ) + public interface InvalidPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "may not require"); + } + + @Test + void failsWhenBaseContractIsExtended() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/DerivedBase.java", + """ + package test; + + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface DerivedBase extends BasePlugin { + int API_LEVEL = 2; + } + """.formatted(PluginContract.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "may not be extended"); + } + + @Test + void failsWhenBaseContractIsExtendedByCapabilityButMissingRequired() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/MissingRequiredBaseCapability.java", + """ + package test; + + import %s; + + @PluginContract(role = PluginContract.Role.CAPABILITY) + public interface MissingRequiredBaseCapability extends BasePlugin { + int API_LEVEL = 2; + } + """.formatted(PluginContract.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenContractRequiresMultipleBaseContracts() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePluginA.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePluginA extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BasePluginB.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePluginB extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BadDerivedContract.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BasePluginA.class, BasePluginB.class } + ) + public interface BadDerivedContract extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void compilesWhenBaseContractHasUnrelatedIntermediateInterface() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BaseSupertype.java", + """ + package test; + + import %s; + + public interface BaseSupertype { + String test(); + } + """.formatted(Plugin.class.getCanonicalName()) + ), + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.BASE + ) + public interface BasePlugin extends BaseSupertype, Plugin { + int API_LEVEL = 1; + default String test() { + return "test"; + } + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertTrue(result.success(), "Compilation should not fail"); + } + } + + @Nested + class CapabilityContractGraphRules { + @Test + void compilesWhenCapabilityContractExtendsRequiredBaseContract() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/ExtendingCapability.java", + """ + package test; + + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface ExtendingCapability extends BasePlugin { + int API_LEVEL = 2; + } + """.formatted(PluginContract.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), "Compilation should not fail"); + } + + @Test + void failsWhenCapabilityContractExtendsNonRequiredBaseContract() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePluginA.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePluginA extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BasePluginB.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePluginB extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/ExtendingCapability.java", + """ + package test; + + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePluginA.class + ) + public interface ExtendingCapability extends BasePluginB { + int API_LEVEL = 2; + } + """.formatted(PluginContract.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "extended base must match the required base"); + } + + @Test + void compilesWhenCapabilityContractExtendsUnrelatedIntermediateInterface() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/Intermediate.java", + """ + package test; + + import %s; + import %s; + + public interface Intermediate { + String test(); + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/ExtendsIntermediate.java", + """ + package test; + + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface ExtendsIntermediate extends BasePlugin, Intermediate { + int API_LEVEL = 2; + + default String test() { + return "test"; + } + } + """.formatted(PluginContract.class.getCanonicalName()) + ) + )); + + assertTrue(result.success(), "Compilation should not fail"); + } + + @Test + void failsWhenCapabilityContractIsExtendedByCapability() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/Capability.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface Capability extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/DerivedCapability.java", + """ + package test; + + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface DerivedCapability extends Capability { + int API_LEVEL = 2; + } + """.formatted(PluginContract.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "may not be extended"); + } + + @Test + void failsWhenCapabilityDoesNotDeclareRequiredBaseContract() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/MissingRequiresPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.CAPABILITY) + public interface MissingRequiresPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenCapabilityDeclaresEmptyRequiredBaseContract() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/EmptyRequiresPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = {} + ) + public interface EmptyRequiresPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenCapabilityRequiresItself() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/SelfReferencingCapability.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { SelfReferencingCapability.class } + ) + public interface SelfReferencingCapability extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenCapabilityContractRequiresCapabilityNoBase() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/CapabilityPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface CapabilityPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BadRequiringContract.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { CapabilityPlugin.class } + ) + public interface BadRequiringContract extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenContractRequiresMultipleCapabilityContracts() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/CapabilityPluginA.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface CapabilityPluginA extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/CapabilityPluginB.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = BasePlugin.class + ) + public interface CapabilityPluginB extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BadCapabilityContract.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { CapabilityPluginA.class, CapabilityPluginB.class } + ) + public interface BadCapabilityContract extends Plugin { + int API_LEVEL = 2; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName()) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenCapabilityRequiresSameBaseTwice() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/BadCapability.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { BasePlugin.class, BasePlugin.class } + ) + public interface BadCapability extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenContractRequiresConcretePluginClass() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/BasePlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + public interface BasePlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ), + source( + "test/SomePluginImpl.java", + """ + package test; + + public class SomePluginImpl implements BasePlugin { + @Override + public String identity() { + return "impl"; + } + } + """ + ), + source( + "test/BadCapability.java", + """ + package test; + + import %s; + import %s; + + @PluginContract( + role = PluginContract.Role.CAPABILITY, + requires = { SomePluginImpl.class } + ) + public interface BadCapability extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "must require single base @PluginContract interface"); + } + + @Test + void failsWhenContractDeclaresMultiplePluginContractAnnotations() throws IOException { + ProcessorTestCompiler.CompilationResult result = compiler.compile(List.of( + source( + "test/DuplicateAnnotatedPlugin.java", + """ + package test; + + import %s; + import %s; + + @PluginContract(role = PluginContract.Role.BASE) + @PluginContract(role = PluginContract.Role.CAPABILITY) + public interface DuplicateAnnotatedPlugin extends Plugin { + int API_LEVEL = 1; + } + """.formatted( + Plugin.class.getCanonicalName(), + PluginContract.class.getCanonicalName() + ) + ) + )); + + assertFalse(result.success(), "Compilation should fail"); + assertDiagnosticContains(result, Diagnostic.Kind.ERROR, "PluginContract is not a repeatable annotation type"); + } + } + + private static ProcessorTestCompiler.SourceFile source(String relativePath, String content) { + return new ProcessorTestCompiler.SourceFile(relativePath, content); + } + + private static void assertDiagnosticContains( + ProcessorTestCompiler.CompilationResult result, + Diagnostic.Kind kind, + String fragment + ) { + boolean found = result.diagnostics().stream() + .filter(diagnostic -> diagnostic.getKind() == kind) + .map(diagnostic -> diagnostic.getMessage(null)) + .anyMatch(message -> message.contains(fragment)); + + assertTrue( + found, + () -> "Expected diagnostic containing '%s' but got:%n%s".formatted(fragment, result.diagnosticsAsText()) + ); + } + + private static void assertDiagnosticDoesNotContain( + ProcessorTestCompiler.CompilationResult result, + Diagnostic.Kind kind, + String fragment + ) { + boolean found = result.diagnostics().stream() + .filter(diagnostic -> diagnostic.getKind() == kind) + .map(diagnostic -> diagnostic.getMessage(null)) + .anyMatch(message -> message.contains(fragment)); + + assertFalse( + found, + () -> "Did not expect diagnostic containing '%s' but got:%n%s".formatted(fragment, result.diagnosticsAsText()) + ); + } + + private static int countOccurrences(String text, String fragment) { + int count = 0; + int index = 0; + while ((index = text.indexOf(fragment, index)) >= 0) { + count++; + index += fragment.length(); + } + return count; + } +} \ No newline at end of file diff --git a/meta/src/test/java/io/gdcc/spi/meta/processor/ProcessorTestCompiler.java b/meta/src/test/java/io/gdcc/spi/meta/processor/ProcessorTestCompiler.java new file mode 100644 index 0000000..f41aa80 --- /dev/null +++ b/meta/src/test/java/io/gdcc/spi/meta/processor/ProcessorTestCompiler.java @@ -0,0 +1,93 @@ +package io.gdcc.spi.meta.processor; + +import javax.tools.Diagnostic; +import javax.tools.DiagnosticCollector; +import javax.tools.JavaCompiler; +import javax.tools.JavaFileObject; +import javax.tools.StandardJavaFileManager; +import javax.tools.ToolProvider; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +/** + * A utility class for compiling Java source files during tests, leveraging an in-memory + * approach to simulate compilation and validation of Java code. This is typically used + * in scenarios where processor-based code validation is needed. + * + * This class uses the Java Compiler API to compile source files provided as input and + * returns a result encapsulating success state, diagnostics information, and the path + * to generated class files. + */ +final class ProcessorTestCompiler { + + CompilationResult compile(List sources) throws IOException { + JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); + if (compiler == null) { + throw new IllegalStateException("No system Java compiler available. Are tests running on a JRE instead of a JDK?"); + } + + Path tempDir = Files.createTempDirectory("plugin-contract-processor-test"); + Path sourceDir = tempDir.resolve("src"); + Path classOutputDir = tempDir.resolve("classes"); + Files.createDirectories(sourceDir); + Files.createDirectories(classOutputDir); + + List sourcePaths = new ArrayList<>(); + for (SourceFile source : sources) { + Path file = sourceDir.resolve(source.relativePath()); + Files.createDirectories(file.getParent()); + Files.writeString(file, source.content(), StandardCharsets.UTF_8); + sourcePaths.add(file); + } + + DiagnosticCollector diagnostics = new DiagnosticCollector<>(); + + try (StandardJavaFileManager fileManager = compiler.getStandardFileManager(diagnostics, null, StandardCharsets.UTF_8)) { + Iterable compilationUnits = + fileManager.getJavaFileObjectsFromPaths(sourcePaths); + + List options = List.of( + "--release", "17", + "-classpath", System.getProperty("java.class.path"), + "-d", classOutputDir.toString() + ); + + JavaCompiler.CompilationTask task = compiler.getTask( + null, + fileManager, + diagnostics, + options, + null, + compilationUnits + ); + + task.setProcessors(List.of(new PluginContractProcessor())); + + boolean success = task.call(); + return new CompilationResult(success, List.copyOf(diagnostics.getDiagnostics()), classOutputDir); + } + } + + record SourceFile(String relativePath, String content) { + } + + record CompilationResult( + boolean success, + List> diagnostics, + Path classOutputDir + ) { + String diagnosticsAsText() { + return diagnostics.stream() + .map(diagnostic -> diagnostic.getKind() + ": " + diagnostic.getMessage(null)) + .reduce("", (left, right) -> left + right + System.lineSeparator()); + } + + Path generatedFile(String relativePath) { + return classOutputDir.resolve(relativePath); + } + } +} diff --git a/pom.xml b/pom.xml index 031e27d..150d1ae 100644 --- a/pom.xml +++ b/pom.xml @@ -10,13 +10,20 @@ 0.13.1 - io.gdcc - dataverse-spi + io.gdcc.spi + parent + 2.1.0-SNAPSHOT - jar + pom Dataverse SPI Plugin API https://dataverse.org + + api + core + export + meta + A package to create out-of-tree Java code for Dataverse Software. Plugin projects can use this package as an API dependency just like Jakarta EE APIs if they want to create external plugins. These will be loaded @@ -31,19 +38,152 @@ + + + ${project.license.name} + ${project.license.url} + repo + + + + + https://github.com/gdcc/dataverse-spi + + + + 17 + Apache License, Version 2.0 + https://www.apache.org/licenses/LICENSE-2.0.txt + / + + + + + + io.gdcc.spi + meta + ${project.version} + + + io.gdcc.spi + core + ${project.version} + + + io.gdcc.spi + export + ${project.version} + + + + + - jakarta.json - jakarta.json-api - provided - - - - jakarta.ws.rs - jakarta.ws.rs-api - provided - + org.junit.jupiter + junit-jupiter + test + + + + org.apache.maven.plugins + maven-site-plugin + + + org.sentrysoftware.maven + maven-skin-tools + 1.7.00 + + + + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + + + aggregate-javadocs + + false + + aggregate + + + + + protected + + none + + true + false + true + ${jdk.version} + + Dataverse SPI Plugin API Reference + Dataverse SPI Plugin API Reference + + custom.css + + + + ← Back to Dataverse SPI Plugin API homepage + + ]]> + + Version ${project.version} -  + Copyright by ${project.organization.name} -  + Licensed under ${project.license.name} |  + Dataverse SPI Plugin API |  + The Dataverse Project + + ]]> + + + + apiNote + a + API Note: + + + implSpec + a + Implementation Requirements: + + + implNote + a + Implementation Note: + + + + + + + org.apache.maven.plugins + maven-project-info-reports-plugin + 3.9.0 + + + root-project-info + + false + + team + + + + + + + diff --git a/src/main/java/io/gdcc/spi/export/ExportDataContext.java b/src/main/java/io/gdcc/spi/export/ExportDataContext.java deleted file mode 100644 index 9478d39..0000000 --- a/src/main/java/io/gdcc/spi/export/ExportDataContext.java +++ /dev/null @@ -1,61 +0,0 @@ -package io.gdcc.spi.export; - -/** - * - * @author landreev - * Provides an optional mechanism for defining various data retrieval options - * for the export subsystem in a way that should allow us adding support for - * more options going forward with minimal or no changes to the already - * implemented export plugins. - */ -public class ExportDataContext { - private boolean datasetMetadataOnly = false; - private boolean publicFilesOnly = false; - private Integer offset = null; - private Integer length = null; - - private ExportDataContext() { - - } - - public static ExportDataContext context() { - ExportDataContext context = new ExportDataContext(); - return context; - } - - public ExportDataContext withDatasetMetadataOnly() { - this.datasetMetadataOnly = true; - return this; - } - - public ExportDataContext withPublicFilesOnly() { - this.publicFilesOnly = true; - return this; - } - - public ExportDataContext withOffset(Integer offset) { - this.offset = offset; - return this; - } - - public ExportDataContext withLength(Integer length) { - this.length = length; - return this; - } - - public boolean isDatasetMetadataOnly() { - return datasetMetadataOnly; - } - - public boolean isPublicFilesOnly() { - return publicFilesOnly; - } - - public Integer getOffset() { - return offset; - } - - public Integer getLength() { - return length; - } -} diff --git a/src/main/java/io/gdcc/spi/export/ExportDataProvider.java b/src/main/java/io/gdcc/spi/export/ExportDataProvider.java deleted file mode 100644 index 4197d97..0000000 --- a/src/main/java/io/gdcc/spi/export/ExportDataProvider.java +++ /dev/null @@ -1,123 +0,0 @@ -package io.gdcc.spi.export; - -import java.io.InputStream; -import java.util.Optional; - -import jakarta.json.JsonArray; -import jakarta.json.JsonObject; - -/** - * Provides all the metadata Dataverse has about a given dataset that can then - * be used by an @see Exporter to create a new metadata export format. - * - */ -public interface ExportDataProvider { - - /** - * @return - dataset metadata in the standard Dataverse JSON format used in the - * API and available as the JSON metadata export via the user interface. - * @apiNote - there is no JSON schema defining this output, but the format is - * well documented in the Dataverse online guides. This, and the - * OAI_ORE export are the only two that provide 'complete' - * dataset-level metadata along with basic file metadata for each file - * in the dataset. - * @param context - supplies optional parameters. Needs to support - * context.isDatasetMetadataOnly(). In a situation where we - * need to generate a format like DC that has no use for the - * file-level metadata, it makes sense to skip retrieving and - * formatting it, since there can be a very large number of - * files in a dataset. - */ - JsonObject getDatasetJson(ExportDataContext... context); - - /** - * - * @return - dataset metadata in the JSON-LD based OAI_ORE format used in - * Dataverse's archival bag export mechanism and as available in the - * user interface and by API. - * @apiNote - THis, and the JSON format are the only two that provide complete - * dataset-level metadata along with basic file metadata for each file - * in the dataset. - * @param context - supplies optional parameters. - */ - JsonObject getDatasetORE(ExportDataContext... context); - - /** - * Dataverse is capable of extracting DDI-centric metadata from tabular - * datafiles. This detailed metadata, which is only available for successfully - * "ingested" tabular files, is not included in the output of any other methods - * in this interface. - * - * @return - a JSONArray with one entry per ingested tabular dataset file. - * @apiNote - there is no JSON schema available for this output and the format - * is not well documented. Implementers may wish to expore the @see - * edu.harvard.iq.dataverse.export.DDIExporter and the @see - * edu.harvard.iq.dataverse.util.json.JSONPrinter classes where this - * output is used/generated (respectively). - * @param context - supplies optional parameters. - */ - JsonArray getDatasetFileDetails(ExportDataContext... context); - - /** - * Similar to the above, but - * a) retrieves the information for the ingested/tabular data files _only_ - * b) provides an option for retrieving this stuff in batches - * c) provides an option for skipping restricted/embargoed etc. files. - * Intended for datasets with massive numbers of tabular files and datavariables. - * @param context - supplies optional parameters. - * current (2.1.0) known use cases: - * context.isPublicFilesOnly(); - * context.getOffset(); - * context.getLength(); - * @return json array containing the datafile/filemetadata->datatable->datavariable metadata - * @throws ExportException - */ - JsonArray getTabularDataDetails(ExportDataContext ... context) throws ExportException; - - /** - * - * @return - the subset of metadata conforming to the schema.org standard as - * available in the user interface and as included as header metadata in - * dataset pages (for use by search engines) - * @apiNote - as this metadata export is not complete, it should only be used as - * a starting point for an Exporter if it simplifies your exporter - * relative to using the JSON or OAI_ORE exports. - * @param context - supplies optional parameters. - */ - JsonObject getDatasetSchemaDotOrg(ExportDataContext... context); - - /** - * - * @return - the subset of metadata conforming to the DataCite standard as - * available in the Dataverse user interface and as sent to DataCite when DataCite DOIs are used. - * @apiNote - as this metadata export is not complete, it should only be used as - * a starting point for an Exporter if it simplifies your exporter - * relative to using the JSON or OAI_ORE exports. - * @param context - supplies optional parameters. - */ - String getDataCiteXml(ExportDataContext... context); - - /** - * If an Exporter has specified a prerequisite format name via the - * getPrerequisiteFormatName() method, it can call this method to retrieve - * metadata in that format. - * - * @return - metadata in the specified prerequisite format (if available from - * another internal or added Exporter) as an Optional - * @apiNote - This functionality is intended as way to easily generate alternate - * formats of the ~same metadata, e.g. to support download as XML, - * HTML, PDF for a specific metadata standard (e.g. DDI). It can be - * particularly useful, reative to starting from the output of one of - * the getDataset* methods above, if there are existing libraries that - * can convert between these formats. Note that, since Exporters can be - * replaced, relying on this method could cause your Exporter to - * malfunction, e.g. if you depend on format "ddi" and a third party - * Exporter is configured to replace the internal ddi Exporter in - * Dataverse. - * @param context - supplies optional parameters. - */ - default Optional getPrerequisiteInputStream(ExportDataContext... context) { - return Optional.empty(); - } - - } diff --git a/src/main/javadoc/custom.css b/src/main/javadoc/custom.css new file mode 100644 index 0000000..07de306 --- /dev/null +++ b/src/main/javadoc/custom.css @@ -0,0 +1,19 @@ +.top-nav-backlink { + padding: 0.75rem 1rem; + font-size: 1rem; + font-weight: 600; +} + +.top-nav-backlink a { + text-decoration: none; +} + +.top-nav-backlink a:hover { + text-decoration: underline; +} + +.doc-version { + margin-left: 1rem; + color: #57606a; + font-weight: 500; +} diff --git a/src/site/markdown/examples.md b/src/site/markdown/examples.md new file mode 100644 index 0000000..2305855 --- /dev/null +++ b/src/site/markdown/examples.md @@ -0,0 +1,24 @@ +# Examples + +This page should contain practical examples for plugin authors. + +## Suggested examples to add + +- creating a minimal plugin +- implementing a service provider +- packaging and deployment +- handling metadata or export extension points + +## Documentation style + +For each example, consider using this structure: + +1. **Goal** +2. **Prerequisites** +3. **Code** +4. **Explanation** +5. **Related API links** + +## API Reference + +When relevant, link directly to classes in the [Javadocs](apidocs/index.html). \ No newline at end of file diff --git a/src/site/markdown/index.md b/src/site/markdown/index.md new file mode 100644 index 0000000..e693045 --- /dev/null +++ b/src/site/markdown/index.md @@ -0,0 +1,59 @@ +# Dataverse SPI Plugin API + +This site provides documentation for the SPI (Service Provider Interface) module for Dataverse. + +## Project Contents + +This project offers a universal Java module to create plugins for Dataverse: + +- It provides *API contracts* a plugin author implements to create a new plugin. +- It provides an easy-to-use *annotation* `@DataversePlugin`, marking an implementation class as such a plugin. +- It generates *plugin metadata* automatically to make a plugin author's life as convenient as possible. +- It allows coordinated data exchange with the core using a *core provider* concept. +- It helps the core to detect any plugins an administrator adds to their Dataverse installation and validate its compatibility. + +## Audience + +This documentation is intended for developers who want to: + +- build Dataverse plugins +- understand the available SPI contracts +- explore integration points +- browse the API reference + +## Maven Coordinates + +The (current) artifact is published as `io.gdcc:dataverse-spi` to *Maven Central*. +Use it in Maven like this: + +```xml + + io.gdcc + dataverse-spi + x.y.z + +``` + +Nnote: if you're using the GDCC Maven Parent, you may omit the version. + +## Documentation + +- [Examples](examples.html) +- [Modules](modules.html) +- [Javadocs](apidocs/index.html) + +## License + +Licensed under the same terms as the Dataverse core project: [${project.license.name}](${project.license.url}). + +## Context & History + +This module did not appear out of thin air. +Before it was moved to this project with an independent release cycle and potential governance, it was part of the Dataverse core. + +You can find the first ever commit that started it all here: [IQSS/dataverse@e560a34e](https://github.com/IQSS/dataverse/commit/e560a34e89b12a08b0e936e0cc8bd429f7a8c7c5). +In an effort back in 2022, funded by DANS and undertaken by Jim Myers, this package originally formed as a separate Maven module. +You can find the history and context in core pull request [IQSS/dataverse#9175](https://github.com/IQSS/dataverse/pull/9175). + +In 2026, it was decided within the [Dataverse Core Dev Team](https://dataverse.org/about) to move the Maven module into a separate repository, enabling an independent release cycle, tags, the works. +If you are interested in any commit history that happened before the initial Maven module creation, you can dig your way back from [IQSS/dataverse@fa0e2812](https://github.com/IQSS/dataverse/tree/fa0e28124a15b0db8042959b9fee536591f26f8d/modules/dataverse-spi) \ No newline at end of file diff --git a/src/site/markdown/modules.md b/src/site/markdown/modules.md new file mode 100644 index 0000000..ad53db9 --- /dev/null +++ b/src/site/markdown/modules.md @@ -0,0 +1,23 @@ +# Modules + +This project is split into several Maven modules. + +## api + +Core API contracts intended for plugin developers. + +## core + +Shared core functionality supporting the SPI-based extension model. + +## export + +Types related to export-oriented extension points. + +## meta + +Metadata-related SPI support. + +## How to choose + +Start with the module that exposes the contract you need, then use the [Javadocs](apidocs/index.html) to inspect the types in detail. \ No newline at end of file diff --git a/src/site/resources/images/dataverse-logo.png b/src/site/resources/images/dataverse-logo.png new file mode 100644 index 0000000..3c7c0da Binary files /dev/null and b/src/site/resources/images/dataverse-logo.png differ diff --git a/src/site/resources/images/gdcc-logo.png b/src/site/resources/images/gdcc-logo.png new file mode 100644 index 0000000..6ffdaf9 Binary files /dev/null and b/src/site/resources/images/gdcc-logo.png differ diff --git a/src/site/resources/images/project-logo-editable.svg b/src/site/resources/images/project-logo-editable.svg new file mode 100644 index 0000000..bac8f03 --- /dev/null +++ b/src/site/resources/images/project-logo-editable.svg @@ -0,0 +1,92 @@ + + + + diff --git a/src/site/resources/images/project-logo.png b/src/site/resources/images/project-logo.png new file mode 100644 index 0000000..5c46c52 Binary files /dev/null and b/src/site/resources/images/project-logo.png differ diff --git a/src/site/resources/images/project-logo.svg b/src/site/resources/images/project-logo.svg new file mode 100644 index 0000000..afa4650 --- /dev/null +++ b/src/site/resources/images/project-logo.svg @@ -0,0 +1,88 @@ + + + + diff --git a/src/site/site.xml b/src/site/site.xml new file mode 100644 index 0000000..c6e1862 --- /dev/null +++ b/src/site/site.xml @@ -0,0 +1,50 @@ + + + + org.sentrysoftware.maven + sentry-maven-skin + 8.0.00 + + + + ${project.organization.name} + + + The Dataverse Project + + + + + dataverse, plugin, spi, api + sentry-orange + + + + fa-brands fa-github + Check out the project on GitHub + ${project.scm.url} + + + fa-brands fa-zulip + Join us on Zulip + https://dataverse.zulipchat.com + + + + + + + + + + + + + + + + + \ No newline at end of file