From bb487a5b4f68f342c097ffc42c36c5c7815cd77d Mon Sep 17 00:00:00 2001 From: Hoan-Vu Tran-Ho Date: Mon, 20 Apr 2026 12:52:58 -0400 Subject: [PATCH 01/13] KMS-663: Add scripts to download and process past keyword versions --- archive-processor/README.md | 67 +++ .../schemes-rdf/schemes_published.rdf | 239 +++++++++ .../scripts/download-rdf-from-S3.js | 299 +++++++++++ archive-processor/scripts/process-rdf.js | 506 ++++++++++++++++++ archive-processor/scripts/scripts-config.sh | 73 +++ archive-processor/scripts/upload-csv-to-S3.js | 271 ++++++++++ package.json | 5 +- 7 files changed, 1459 insertions(+), 1 deletion(-) create mode 100644 archive-processor/README.md create mode 100644 archive-processor/schemes-rdf/schemes_published.rdf create mode 100644 archive-processor/scripts/download-rdf-from-S3.js create mode 100644 archive-processor/scripts/process-rdf.js create mode 100644 archive-processor/scripts/scripts-config.sh create mode 100644 archive-processor/scripts/upload-csv-to-S3.js diff --git a/archive-processor/README.md b/archive-processor/README.md new file mode 100644 index 00000000..ff4142df --- /dev/null +++ b/archive-processor/README.md @@ -0,0 +1,67 @@ +"# Archive Processor Scripts + +This directory contains a set of Node.js scripts designed to manage the lifecycle of KMS data, from downloading RDF files from an S3 bucket to processing them and uploading the resulting CSV files back to S3. + +## Scripts + +- `download-rdf-from-S3.js`: Downloads versioned RDF files from the configured S3 bucket into the local `archive-processor/downloaded-rdf` directory. +- `process-rdf.js`: Orchestrates a complex workflow that uses the downloaded RDF files. For each version, it loads the RDF data into an RDF4J repository and then triggers a local KMS application to generate corresponding CSV files in the `archive-processor/local-kms-csv` directory. +- `upload-csv-to-S3.js`: Uploads the generated CSV files from the `archive-processor/local-kms-csv` directory to the configured S3 bucket. + +## Configuration + +All scripts are configured via a single, centralized shell script: + +`archive-processor/scripts/scripts-config.sh` + +The settings in this file are loaded automatically when you use the `npm run` commands for these scripts. + +This file allows you to set: +- The S3 bucket name and AWS region. +- Your AWS profile. +- Delays between S3 API calls to prevent rate-limiting. +- Specific lists of versions to download, upload, or process. + +## Recommended Workflow + +The scripts are designed to be run in a specific sequence. Ensure all prerequisites are met before starting. + +### Prerequisites + +- **Node.js and Dependencies**: You must have Node.js installed and have run `npm install` from the project root. +- **AWS Credentials**: Your AWS credentials must be configured locally, typically via the `~/.aws/credentials` file. Ensure you have access to the target S3 bucket. +- **Running Services**: The `process-rdf` script requires running instances of **RDF4J** and **Redis** that are accessible to the script. The default URLs are `http://127.0.0.1:8081` for RDF4J and `redis://localhost:6380` for Redis. + +### Step 1: Configure Your Environment + +1. Open `archive-processor/scripts/scripts-config.sh` in your editor. +2. Set the `S3_BUCKET_NAME` and `AWS_PROFILE` if they differ from the defaults. +3. Optionally, specify which versions you want to work with using the `TO_BE_DOWNLOADED_VERSIONS`, `TO_BE_PROCESSED_VERSIONS`, and `TO_BE_UPLOADED_VERSIONS` variables. If you leave them empty, the scripts will process all available versions. + +### Step 2: Download RDF Files from S3 + +This step populates the `archive-processor/downloaded-rdf` directory with the master RDF files from S3. + +```shell +npm run download-rdf +``` + +### Step 3: Process RDFs and Generate CSVs + +This is the main processing step. It reads the downloaded RDF files, loads them into RDF4J, and then uses a local application endpoint to generate CSV files in the `archive-processor/local-kms-csv` directory. + +**Important**: Ensure your RDF4J and Redis services are running before executing this command. + +```shell +npm run process-rdf +``` + +### Step 4: Upload Generated CSVs to S3 + +This final step takes the newly created CSV files from the `local-kms-csv` directory and uploads them to the correct versioned path in your S3 bucket. + +```shell +npm run upload-csv +``` + +By following these steps, you can perform a full download-process-upload cycle for your KMS data." \ No newline at end of file diff --git a/archive-processor/schemes-rdf/schemes_published.rdf b/archive-processor/schemes-rdf/schemes_published.rdf new file mode 100644 index 00000000..c751cd54 --- /dev/null +++ b/archive-processor/schemes-rdf/schemes_published.rdf @@ -0,0 +1,239 @@ + + + Chronostratigraphic Units + chronounits + 2025-07-25 + Eon,Era,Period,Epoch,Age,Sub-Age,UUID + + + Locations + locations + 2025-07-25 + Location_Category,Location_Type,Location_Subregion1,Location_Subregion2,Location_Subregion3,Location_Subregion4,UUID + + + Providers + providers + 2025-07-28 + Bucket_Level0,Bucket_Level1,Bucket_Level2,Bucket_Level3,Short_Name,Long_Name,Data_Center_URL,UUID + + + Platforms + platforms + 2025-07-25 + Basis,Category,Sub_Category,Short_Name,Long_Name,UUID + + + Instruments + instruments + 2025-07-28 + Category,Class,Type,Subtype,Short_Name,Long_Name,UUID + + + Projects + projects + 2025-07-28 + Bucket,Short_Name,Long_Name,UUID + + + Disciplines + discipline + 2025-07-25 + Discipline_Name,Subdiscipline,UUID + + + IDN Nodes + idnnode + 2025-07-25 + Short_Name,Long_Name,UUID + + + ISO Topic Categories + isotopiccategory + 2025-07-25 + ISO_Topic_Category,UUID + + + Temporal Resolution Ranges + temporalresolutionrange + 2025-07-25 + Temporal_Resolution_Range,UUID + + + Vertical Resolution Ranges + verticalresolutionrange + 2025-07-25 + Vertical_Resolution_Range,UUID + + + Horizontal Resolution Ranges + horizontalresolutionrange + 2025-07-25 + Horizontal_Resolution_Range,UUID + + + Personnel Role + PersonnelRole + 2025-07-25 + + + Dataset Language + DatasetLanguage + 2025-07-25 + + + Product Flag + ProductFlag + 2025-07-25 + + + Dataset Progress + DatasetProgress + 2025-07-25 + + + Collection Data Type + CollectionDataType + 2025-07-25 + + + Platform Type + PlatformType + 2025-07-25 + + + Private + Private + 2025-07-25 + + + Metadata Association Type + MetadataAssociationType + 2025-07-25 + + + Organization Personnel Role + OrganizationPersonnelRole + 2025-07-25 + + + Duration Unit + DurationUnit + 2025-07-25 + + + Phone Type + PhoneType + 2025-07-25 + + + Persistent Identifier + PersistentIdentifier + 2025-07-25 + + + Organization Type + OrganizationType + 2025-07-25 + + + Granule Spatial Representation + GranuleSpatialRepresentation + 2025-07-25 + + + Coordinate System + CoordinateSystem + 2025-07-25 + + + Product Level Id + ProductLevelId + 2025-07-25 + + + Spatial Coverage Type + SpatialCoverageType + 2025-07-25 + + + Contact Type + ContactType + 2025-07-25 + + + Multimedia Format + MultimediaFormat + 2025-07-25 + + + Mime Type + MimeType + 2025-07-25 + + + Distribution Size Unit + DistributionSizeUnit + 2025-07-25 + + + Related URL Content Types + rucontenttype + 2025-07-25 + URLContentType,Type,Subtype,UUID + + + Metadata Language + MetadataLanguage + 2025-07-25 + + + Data Format + DataFormat + 2025-07-25 + Short_Name,Long_Name,UUID + + + Measurement Name + MeasurementName + 2025-07-25 + Context_Medium,Object,Quantity,UUID + + + Projection Name + ProjectionName + 2025-07-25 + + + Science Keywords + sciencekeywords + 2025-07-25 + Category,Topic,Term,Variable_Level_1,Variable_Level_2,Variable_Level_3,Detailed_Variable,UUID + + + Operations + Operations + 2025-07-25 + + + Projection Authority + ProjectionAuthority + 2025-07-25 + + + Chained Operations + ChainedOperations + 2025-07-25 + + + Projection Datum Names + ProjectionDatumNames + 2025-07-25 + + + 23.3 + published + 2026-02-13 + 2026-02-13 + + diff --git a/archive-processor/scripts/download-rdf-from-S3.js b/archive-processor/scripts/download-rdf-from-S3.js new file mode 100644 index 00000000..d8fef69a --- /dev/null +++ b/archive-processor/scripts/download-rdf-from-S3.js @@ -0,0 +1,299 @@ +import { createWriteStream, mkdirSync } from 'fs' +import { dirname, join } from 'path' +import { pipeline } from 'stream/promises' +import { fileURLToPath } from 'url' + +import { + GetObjectCommand, + ListObjectsV2Command, + S3Client +} from '@aws-sdk/client-s3' + +const scriptPath = fileURLToPath(import.meta.url) +const scriptDir = dirname(scriptPath) + +/** + * AWS Region for the S3 bucket + * @type {string} + */ +const region = process.env.AWS_REGION || 'us-east-1' + +/** + * S3 bucket name to download files from + * @type {string} + */ +const bucketName = process.env.S3_BUCKET_NAME || 'kms-rdf-backup-sit' + +/** + * AWS Profile to use (optional) + * @type {string|undefined} + */ +const awsProfile = process.env.AWS_PROFILE + +/** + * Output directory for downloaded files + * @type {string} + */ +const outputDir = join(scriptDir, '..', 'downloaded-rdf') + +/** + * Delay in milliseconds between downloads to avoid rate limiting + * Default: 100ms (configurable via DOWNLOAD_DELAY_MS environment variable) + * Set to 0 to disable delay + * @type {number} + */ +const downloadDelayMs = parseInt(process.env.DOWNLOAD_DELAY_MS || '100', 10) + +/** + * Optional comma-separated list of specific versions to download. + * If empty, the script will download all RDF files. + * These should be the S3 key prefixes (e.g., "10.0", "KMS-654-Testing"). + * @type {string} + */ +const toBeDownloadedVersions = process.env.TO_BE_DOWNLOADED_VERSIONS || '' + +/** + * S3 Client configured for the specified region + * @type {S3Client} + */ +const s3Client = new S3Client({ + region, + ...(awsProfile && { credentials: undefined }) +}) + +/** + * Extracts the version name from an S3 object key + * Examples: + * "10.0/rdf.xml" -> "10.0" + * "draft/2026/03/16/rdf.xml" -> "draft-2026-03-16" + * "KMS-654-Testing/rdf.xml" -> "KMS-654-Testing" + * + * @param {string} key - S3 object key + * @returns {string} Version name suitable for use as a filename + */ +const extractVersionName = (key) => { + // Remove the trailing "/rdf.xml" + const versionPath = key.replace('/rdf.xml', '') + + // Replace slashes with hyphens for flat file structure + return versionPath.replace(/\//g, '-') +} + +/** + * Delays execution for the specified number of milliseconds + * + * @param {number} ms - Milliseconds to delay + * @returns {Promise} + */ +const delay = (ms) => new Promise((resolve) => { + setTimeout(resolve, ms) +}) + +/** + * Downloads a single RDF file from S3 + * + * @param {string} key - S3 object key + * @param {number} index - Current file index (for progress display) + * @param {number} total - Total number of files to download + * @returns {Promise<{success: boolean, key: string, outputPath?: string, error?: Error}>} + */ +const downloadRdfFile = async (key, index, total) => { + const versionName = extractVersionName(key) + const fileName = `${versionName}.rdf.xml` + const outputPath = join(outputDir, fileName) + + try { + console.log(`[${index}/${total}] Downloading: ${key} -> ${fileName}`) + + const command = new GetObjectCommand({ + Bucket: bucketName, + Key: key + }) + + const response = await s3Client.send(command) + + if (!response.Body) { + throw new Error('No data returned from S3') + } + + // Ensure output directory exists + mkdirSync(dirname(outputPath), { recursive: true }) + + // Stream the file to disk + const writeStream = createWriteStream(outputPath) + await pipeline(response.Body, writeStream) + + console.log(`[${index}/${total}] ✓ Downloaded: ${fileName}`) + + return { + success: true, + key, + outputPath + } + } catch (error) { + console.error(`[${index}/${total}] ✗ Failed to download ${key}:`, error.message) + + return { + success: false, + key, + error + } + } +} + +/** + * Lists all RDF objects from S3 bucket, excluding drafts + * + * @returns {Promise>} Array of S3 object keys + */ +const listS3Objects = async () => { + console.log(`Listing objects from bucket: ${bucketName}...`) + + const allObjectKeys = [] + let continuationToken + let pageCount = 0 + + /* eslint-disable no-await-in-loop */ + do { + pageCount += 1 + console.log(`Fetching page ${pageCount}...`) + + const command = new ListObjectsV2Command({ + Bucket: bucketName, + ContinuationToken: continuationToken + }) + + const response = await s3Client.send(command) + + // Extract object keys from the current page, excluding drafts + if (response.Contents) { + const keys = response.Contents + .map((obj) => obj.Key) + .filter(Boolean) + .filter((key) => !key.startsWith('draft/')) // Exclude draft objects + .filter((key) => key.endsWith('/rdf.xml')) // Only include main RDF files + allObjectKeys.push(...keys) + console.log(`Found ${keys.length} objects in page ${pageCount} (draft objects excluded)`) + } + + // Check if there are more pages to fetch + continuationToken = response.NextContinuationToken + } while (continuationToken) + /* eslint-enable no-await-in-loop */ + + console.log(`\nTotal objects found: ${allObjectKeys.length}\n`) + + return allObjectKeys +} + +/** + * Downloads all RDF files from S3 + * + * @returns {Promise} + */ +const downloadAllRdfFiles = async () => { + // List all objects from S3 + let objects + + const versionList = toBeDownloadedVersions.split(',').map((v) => v.trim()).filter(Boolean) + + // If a non-empty list of versions is provided, construct the object keys + if (versionList.length > 0) { + console.log(`\nFound ${versionList.length} specific versions to download.`) + objects = versionList.map((version) => `${version}/rdf.xml`) + } else { + // Otherwise, list all objects from S3 + objects = await listS3Objects() + } + + const totalFiles = objects.length + + console.log(`Starting download of ${totalFiles} RDF files...`) + console.log(`Output directory: ${outputDir}\n`) + + const results = [] + + // Download files sequentially to avoid overwhelming the connection + /* eslint-disable no-await-in-loop */ + for (let i = 0; i < objects.length; i += 1) { + const key = objects[i] + const result = await downloadRdfFile(key, i + 1, totalFiles) + results.push(result) + + // Add delay between downloads if configured + if (downloadDelayMs > 0 && i < objects.length - 1) { + await delay(downloadDelayMs) + } + } + /* eslint-enable no-await-in-loop */ + + // Summary + const successful = results.filter((r) => r.success).length + const failed = results.filter((r) => !r.success).length + + console.log(`\n${'='.repeat(60)}`) + console.log('Download Summary') + console.log('='.repeat(60)) + console.log(`Total files: ${totalFiles}`) + console.log(`✓ Successful: ${successful}`) + console.log(`✗ Failed: ${failed}`) + console.log(`Output directory: ${outputDir}`) + + if (failed > 0) { + console.log('\nFailed downloads:') + results.filter((r) => !r.success).forEach((r) => { + console.log(` - ${r.key}: ${r.error.message}`) + }) + } +} + +/** + * Main execution function + */ +const main = async () => { + console.log('AWS S3 RDF Files Downloader') + console.log('===========================\n') + + // Ensure required directories exist before starting + try { + mkdirSync(outputDir, { recursive: true }) + } catch (error) { + console.error('✗ Failed to create the output directory:', error.message) + process.exit(1) + } + + console.log('Configuration loaded from environment variables.') + console.log('You can set these in archive-processor/scripts/scripts-config.sh and run `source archive-processor/scripts/scripts-config.sh`\n') + console.log(`Bucket: ${bucketName}`) + console.log(`Region: ${region}`) + console.log(`Output: ${outputDir}`) + console.log(`Delay between downloads: ${downloadDelayMs}ms`) + + if (awsProfile) { + console.log(`Profile: ${awsProfile}`) + } + + if (toBeDownloadedVersions) { + console.log(`Versions to download: ${toBeDownloadedVersions}`) + } else { + console.log('Versions to download: All versions') + } + + try { + await downloadAllRdfFiles() + console.log('\n✓ Download completed successfully!') + } catch (error) { + console.error('\n✗ Failed to download RDF files:', error.message) + + if (error.name === 'InvalidAccessKeyId' || error.name === 'CredentialsProviderError') { + console.error('\n⚠️ AWS Credentials Error:') + console.error('Please configure your AWS credentials. Run with:') + console.error(' AWS_PROFILE=kms-sit node scripts/downloadRdfFiles.js') + } + + process.exit(1) + } +} + +// Execute the script +main() diff --git a/archive-processor/scripts/process-rdf.js b/archive-processor/scripts/process-rdf.js new file mode 100644 index 00000000..eaa2c7bf --- /dev/null +++ b/archive-processor/scripts/process-rdf.js @@ -0,0 +1,506 @@ +/* eslint-disable no-console */ +/* eslint-disable @typescript-eslint/no-require-imports */ +/** + * Combines the RDF upload and CSV download processes for all RDF files in the data directory. + * + * For each RDF file found in `../data`: + * 1. It uploads the RDF file along with `schemes_published.rdf` to an RDF4J repository, + * after clearing the specified context and the Redis cache. + * 2. It then triggers a download process for the same version, parsing the RDF file + * to find concept schemes and downloading each as a CSV. + * + * This script is controlled by environment variables for services like RDF4J and Redis. + * + * Usage: + * node scripts/process_rdf.js + */ +import { execSync } from 'node:child_process' +import { + appendFileSync, + existsSync, + mkdirSync, + readFileSync, + writeFileSync +} from 'node:fs' +import { readdir, readFile } from 'node:fs/promises' +import { dirname, join } from 'node:path' +import { fileURLToPath } from 'url' + +import { createClient } from 'redis' + +const scriptPath = fileURLToPath(import.meta.url) +const scriptDir = dirname(scriptPath) + +// --- Configuration --- + +// RDF4J and Redis configuration from environment variables +const serviceUrl = process.env.RDF4J_SERVICE_URL || 'http://127.0.0.1:8081' +const baseUrl = `${serviceUrl}/rdf4j-server` +const repoId = process.env.RDF4J_REPOSITORY_ID || 'kms' +const rdf4jStatementsUrl = `${baseUrl}/repositories/${repoId}/statements` +const username = process.env.RDF4J_USER_NAME || 'rdf4j' +const password = process.env.RDF4J_PASSWORD || 'rdf4j' +const base64Credentials = Buffer.from(`${username}:${password}`).toString('base64') +const serverCheckAttempts = Number(process.env.RDF4J_SERVER_CHECK_ATTEMPTS || '60') +const serverCheckDelayMs = Number(process.env.RDF4J_SERVER_CHECK_DELAY_MS || '1000') +const uploadContext = process.env.RDF4J_UPLOAD_CONTEXT || 'published' +const redisUrl = process.env.REDIS_URL || 'redis://localhost:6380' + +// Download configuration +const KMS_APP_BASE_URL = 'http://localhost:3013' +const CONCEPTS_BASE_URL = `${KMS_APP_BASE_URL}/concepts/concept_scheme` +const OUTPUT_BASE_DIR = join(scriptDir, '..', 'local-kms-csv') + +// Delays +const csvDownloadDelayMs = Number(process.env.PROCESS_CSV_DOWNLOAD_DELAY_MS || '1000') +const versionProcessingDelayMs = Number(process.env.PROCESS_VERSION_DELAY_MS || '5000') + +// --- RDF4J Upload Functions --- + +/** + * Builds the HTTP Basic auth header for RDF4J admin/repository calls. + * @returns {string} Basic auth header value. + */ +const getAuthHeader = () => `Basic ${base64Credentials}` + +/** + * Sleeps for the provided delay. + * @param {number} ms - Milliseconds to wait. + * @returns {Promise} + */ +const sleep = (ms) => new Promise((resolve) => { + setTimeout(resolve, ms) +}) + +/** + * Polls RDF4J protocol endpoint until the server is ready or attempts are exhausted. + * @returns {Promise} + * @throws {Error} If server is not reachable within configured attempts. + */ +const waitForServer = async () => { + const checkAttempt = async (attempt) => { + if (attempt > serverCheckAttempts) { + throw new Error(`RDF4J server not ready after ${serverCheckAttempts} attempts`) + } + + try { + console.log(`Checking RDF4J server (${attempt}/${serverCheckAttempts}) at ${baseUrl}`) + const response = await fetch(`${baseUrl}/protocol`, { + headers: { Authorization: getAuthHeader() } + }) + if (response.ok) { + console.log('RDF4J server is up and running') + + return + } + } catch (error) { + const code = error?.cause?.code || error?.code || error?.message || 'unknown' + console.log(`RDF4J server check failed (${attempt}/${serverCheckAttempts}): ${code}`) + } + + await sleep(serverCheckDelayMs) + await checkAttempt(attempt + 1) + } + + await checkAttempt(1) +} + +/** + * Executes a SPARQL query to count statements in a specific graph. + * @param {string} graphUri - The URI of the graph to query. + * @returns {Promise} The number of statements in the graph. + */ +const getGraphStatementCount = async (graphUri) => { + const sparqlQuery = `SELECT (COUNT(*) AS ?count) WHERE { GRAPH <${graphUri}> { ?s ?p ?o } }` + const url = new URL(`${baseUrl}/repositories/${repoId}`) + url.searchParams.append('query', sparqlQuery) + + try { + const response = await fetch(url, { + method: 'GET', + headers: { + Accept: 'application/sparql-results+json', + Authorization: getAuthHeader() + } + }) + + if (!response.ok) { + const text = await response.text() + throw new Error(`SPARQL count query failed: ${response.status} ${response.statusText} ${text}`) + } + + const result = await response.json() + const count = parseInt(result.results.bindings[0]?.count?.value || '0', 10) + + return count + } catch (error) { + // If the query fails (e.g., repo not ready), assume count is 0 but log it. + console.warn(`Warning: Could not retrieve statement count for ${graphUri}. Returning 0. Error: ${error.message}`) + + return 0 + } +} + +/** + * Clears a graph context in the repository for a given version. + * @param {string} version - Graph version (for example `published` or `draft`). + * @returns {Promise} + * @throws {Error} If context delete fails. + */ +const clearContext = async (version) => { + console.log(`Clearing context: ${version}`) + const graphUri = `https://gcmd.earthdata.nasa.gov/kms/version/${version}` + + // 1. Get a statement count before clearing. + const preClearCount = await getGraphStatementCount(graphUri) + console.log(`Found ${preClearCount} statements in context <${graphUri}> before clearing.`) + + if (preClearCount === 0) { + console.log('Context is already empty. No deletion needed.') + + return // No need to delete if it's already empty. + } + + // 2. Perform the deletion. + const url = new URL(rdf4jStatementsUrl) + url.searchParams.append('context', `<${graphUri}>`) + + const response = await fetch(url, { + method: 'DELETE', + headers: { Authorization: getAuthHeader() } + }) + + if (!response.ok && response.status !== 404) { + const text = await response.text() + throw new Error(`Failed to clear context ${version}: ${response.status} ${response.statusText} ${text}`) + } + + // 3. Verify that the context is now empty. + const postClearCount = await getGraphStatementCount(graphUri) + if (postClearCount > 0) { + throw new Error(`Context clearing verification failed. Found ${postClearCount} statements in <${graphUri}> after deletion.`) + } + + console.log(`Context '${version}' cleared and verified successfully (statement count is 0).`) +} + +/** + * Clears the Redis cache. + * @returns {Promise} + * @throws {Error} If Redis connection or flush fails. + */ +const clearRedisCache = async () => { + if (!redisUrl) { + console.log('REDIS_URL not set, skipping cache clearing.') + + return + } + + console.log(`Connecting to Redis at ${redisUrl}`) + const client = createClient({ url: redisUrl }) + + try { + await client.connect() + + // Set a dummy key to verify the flush operation + const dummyKey = `cache-flush-test-${Date.now()}` + await client.set(dummyKey, '1') + + console.log('Flushing Redis cache...') + await client.flushDb() + + // Verify the flush + const keyExists = await client.exists(dummyKey) + if (keyExists) { + throw new Error('Redis cache flush verification failed. Dummy key still exists.') + } + + console.log('Redis cache cleared and verified successfully.') + } catch (error) { + throw new Error(`Failed to clear Redis cache: ${error.message}`) + } finally { + if (client.isOpen) { + await client.disconnect() + } + } +} + +/** + * Loads a local RDF/XML file into the RDF4J statements endpoint for the target context. + * @param {string} filePath - Local path to RDF/XML file. + * @param {string} version - Context version (for example `published` or `draft`). + * @returns {Promise} + * @throws {Error} If file read or POST load fails. + */ +const loadRdfFile = async (filePath, version) => { + const xmlData = await readFile(filePath, 'utf8') + console.log(`Read ${xmlData.length} bytes from file: ${filePath}`) + + const graphUri = `https://gcmd.earthdata.nasa.gov/kms/version/${version}` + const postUrl = new URL(rdf4jStatementsUrl) + postUrl.searchParams.append('context', `<${graphUri}>`) + + const response = await fetch(postUrl, { + method: 'POST', + body: xmlData, + headers: { + 'Content-Type': 'application/rdf+xml', + Authorization: getAuthHeader() + } + }) + if (!response.ok) { + const responseText = await response.text() + throw new Error(`Failed loading ${filePath}: ${response.status} ${response.statusText} ${responseText}`) + } + + console.log(`Successfully loaded ${filePath} into context '${version}'`) +} + +/** + * Reads the schemes RDF file, replaces the version name with the provided version, + * and then uploads it to the RDF4J statements endpoint. + * @param {string} filePath - Local path to the schemes RDF/XML file. + * @param {string} targetContext - Context version for the upload (e.g., `published`). + * @param {string} fileContentVersion - The version string to inject into the file's content. + * @returns {Promise} + * @throws {Error} If file read, modification, or POST load fails. + */ +const loadModifiedSchemesFile = async (filePath, targetContext, fileContentVersion) => { + let xmlData = await readFile(filePath, 'utf8') + console.log(`Read and modifying schemes file: ${filePath} for version ${fileContentVersion}`) + + // Replace the version name in the file content. + const versionTagRegex = /.*<\/gcmd:versionName>/ + const newVersionTag = `${fileContentVersion}` + + if (versionTagRegex.test(xmlData)) { + xmlData = xmlData.replace(versionTagRegex, newVersionTag) + console.log(`Replaced version tag. New tag: ${newVersionTag}`) + } else { + // This is not a fatal error, as some schemes files may not have this tag. + console.warn(`Warning: Could not find tag in ${filePath}. Uploading file as-is.`) + } + + const graphUri = `https://gcmd.earthdata.nasa.gov/kms/version/${targetContext}` + const postUrl = new URL(rdf4jStatementsUrl) + postUrl.searchParams.append('context', `<${graphUri}>`) + + const response = await fetch(postUrl, { + method: 'POST', + body: xmlData, + headers: { + 'Content-Type': 'application/rdf+xml', + Authorization: getAuthHeader() + } + }) + + if (!response.ok) { + const responseText = await response.text() + throw new Error(`Failed loading modified schemes file ${filePath}: ${response.status} ${response.statusText} ${responseText}`) + } + + console.log(`Successfully loaded modified ${filePath} into context '${targetContext}'`) +} + +// --- CSV Download Function --- + +/** + * Downloads all concept scheme CSV files for a given version. + * @param {string} version - The version identifier, derived from the filename. + * @param {string} rdfFilePath - The full path to the RDF file to parse for schemes. + */ +const downloadCsvForVersion = async (version, rdfFilePath) => { + // 1. Create the output directory and define log path + // Note: Assumes this script is run from the `archive-processor` directory. + const outputDir = join(OUTPUT_BASE_DIR, version) + const logFilePath = join(outputDir, 'log.txt') + console.log(`Creating output directory: ${outputDir}`) + mkdirSync(outputDir, { recursive: true }) + + // 2. Create a logging function to write errors to log.txt + const logError = (message, errorDetails) => { + const timestamp = new Date().toISOString() + let details = 'No additional error information available.' + if (typeof errorDetails === 'string') { + details = errorDetails + } else if (errorDetails) { + details = errorDetails.stderr?.toString() + || errorDetails.message + || JSON.stringify(errorDetails) + } + + const logEntry = `${timestamp} - ${message}\nDetails: ${details}\n\n` + + try { + appendFileSync(logFilePath, logEntry) + console.error(`Error: ${message} Check ${logFilePath} for details.`) + } catch (logWriteError) { + console.error('--- CRITICAL: FAILED TO WRITE TO LOG FILE ---') + console.error('Original Error:', logEntry) + console.error('Log Write Error:', logWriteError.message) + } + } + + try { + // 3. Read the RDF file to find the list of schemes + console.log(`Reading schemes from ${rdfFilePath}...`) + + if (!existsSync(rdfFilePath)) { + logError(`RDF file not found at ${rdfFilePath}`, 'File does not exist.') + + return // Stop processing for this version + } + + const rdfXml = readFileSync(rdfFilePath, 'utf-8') + + // 4. Parse scheme names from the RDF/XML + const resourceUrlRegex = //g + const urlMatches = rdfXml.match(resourceUrlRegex) || [] + const schemeUrls = urlMatches.map((tag) => tag.slice(34, -3)) + + let schemeNames = [ + ...new Set(schemeUrls.map((url) => url.substring(url.lastIndexOf('/') + 1))) + ].filter((name) => name !== 'Trash') + + // Handle mapping 'GranuleDataFormat' to 'DataFormat' + if (schemeNames.includes('GranuleDataFormat')) { + console.log("Mapping scheme 'GranuleDataFormat' to 'DataFormat' for download.") + schemeNames = schemeNames.map((name) => (name === 'GranuleDataFormat' ? 'DataFormat' : name)) + // Re-apply Set to handle cases where 'DataFormat' already existed, ensuring no duplicates. + schemeNames = [...new Set(schemeNames)] + } + + if (schemeNames.length === 0) { + console.warn(`No schemes found in ${rdfFilePath}. Exiting download step.`) + + return + } + + console.log(`Found schemes: ${schemeNames.join(', ')}`) + + // 5. Download each scheme's CSV file + await schemeNames.reduce(async (previousPromise, schemeName) => { + await previousPromise + + const csvUrl = `${CONCEPTS_BASE_URL}/${schemeName}?format=csv` + const outputPath = join(outputDir, `${schemeName}.csv`) + console.log(`Downloading ${schemeName}.csv from ${csvUrl}...`) + + try { + const curlCommand = `curl -s -f -H "Cache-Control: no-cache" -H "Pragma: no-cache" "${csvUrl}"` + const responseBody = execSync(curlCommand).toString() + + if (responseBody.trim().startsWith('{"error"')) { + logError(`Server returned an error for ${schemeName}.csv`, responseBody.trim()) + } else { + writeFileSync(outputPath, responseBody) + console.log(` -> Successfully saved to ${outputPath}`) + } + } catch (e) { + logError(`Failed to download ${schemeName}.csv`, e) + } + + console.log(`Pausing for ${csvDownloadDelayMs / 1000} seconds after download...`) + await sleep(csvDownloadDelayMs) + }, Promise.resolve()) + + console.log(`\nDownload process for version ${version} completed.`) + } catch (error) { + logError(`An unexpected error occurred during the download for version ${version}`, error) + } +} + +// --- Main Execution --- + +/** + * Executes the combined upload and download process. + */ +const main = async () => { + try { + // Note: Assumes being run from `archive-processor`, so paths are relative from there. + const dataDir = join(scriptDir, '..', 'downloaded-rdf') + const schemesFile = join(scriptDir, '..', 'schemes-rdf', 'schemes_published.rdf') + + if (!existsSync(schemesFile)) { + console.error(`Fatal: Schemes file not found at ${schemesFile}`) + process.exit(1) + } + + // Read the list of versions to process from the environment variable + const versionsToProcess = (process.env.TO_BE_PROCESSED_VERSIONS || '' + ).split(',').map((v) => v.trim()).filter(Boolean) + + const allFiles = await readdir(dataDir) + let rdfFiles = allFiles.filter((f) => f.endsWith('.rdf') || f.endsWith('.rdf.xml')) + + // If a specific list of versions is provided, filter the files + if (versionsToProcess.length > 0) { + console.log(`Processing only specified versions: ${versionsToProcess.join(', ')}`) + rdfFiles = rdfFiles.filter((filename) => { + const version = filename.replace('.rdf.xml', '').replace('.rdf', '') + + return versionsToProcess.includes(version) + }) + } else { + console.log('Processing all available versions.') + } + + if (rdfFiles.length === 0) { + console.log('No RDF files found to process for the specified versions.') + + return + } + + console.log(`Found ${rdfFiles.length} RDF files to process.`) + + // Wait for RDF4J server to be ready before starting the loop + await waitForServer() + + await rdfFiles.reduce(async (previousPromise, rdfFilename, index) => { + await previousPromise + + // Pause before processing the next file, but not before the first one. + if (index > 0) { + console.log(`\nPausing for ${versionProcessingDelayMs / 1000} seconds before the next version...`) + await sleep(versionProcessingDelayMs) + } + + const version = rdfFilename.replace('.rdf.xml', '').replace('.rdf', '') + const conceptsFile = join(dataDir, rdfFilename) + + console.log(`\n--- Processing version: ${version} (File ${index + 1}/${rdfFiles.length}) ---\n`) + + try { + // --- UPLOAD --- + console.log(`Starting RDF upload for ${rdfFilename}`) + await clearRedisCache() + await clearContext(uploadContext) + await loadRdfFile(conceptsFile, uploadContext) + + // The schemes file is uploaded with every concept, unless it's the concept itself + if (conceptsFile !== schemesFile) { + // When uploading the schemes file, modify its content to include the current version. + await loadModifiedSchemesFile(schemesFile, uploadContext, version) + } + + console.log(`Upload for ${version} completed successfully.`) + + // --- DOWNLOAD --- + console.log(`\nStarting CSV download for ${version}`) + await downloadCsvForVersion(version, conceptsFile) + + console.log(`\n--- Finished processing version: ${version} ---`) + } catch (error) { + console.error(`Failed to process version ${version}:`, error) + // Continue to the next file + } + }, Promise.resolve()) + + console.log('\nAll versions processed successfully.') + } catch (error) { + console.error('A fatal error occurred in the main process:', error) + process.exit(1) + } +} + +main() diff --git a/archive-processor/scripts/scripts-config.sh b/archive-processor/scripts/scripts-config.sh new file mode 100644 index 00000000..73c714cb --- /dev/null +++ b/archive-processor/scripts/scripts-config.sh @@ -0,0 +1,73 @@ +#!/bin/bash + +# ----------------------------------------------------------------------------- +# Unified Configuration for S3 Scripts +# ----------------------------------------------------------------------------- +# This file contains settings for both downloading RDF files from S3 and +# uploading CSV files to S3. +# +# To use, load these environment variables into your session before running +# a script, for example: +# +# source archive-processor/scripts/scripts_config.sh +# npm run download-rdf +# +# ----------------------------------------------------------------------------- + + +# ============================================================================= +# Common AWS & S3 Settings (Used by all scripts) +# ============================================================================= + +# AWS S3 bucket name for all operations +export S3_BUCKET_NAME="kms-rdf-backup-sit" + +# AWS Region for the S3 bucket +export AWS_REGION="us-east-1" + +# Optional: Specify an AWS profile to use for authentication. +# If this is commented out, the script will use default credential resolution. +# export AWS_PROFILE="your-aws-profile" + + + +# ============================================================================= +# RDF Downloader Settings (for download-rdf-from-S3.js) +# ============================================================================= + +# Delay in milliseconds between downloads to avoid rate limiting. +export DOWNLOAD_DELAY_MS="100" + +# Optional: Comma-separated list of specific versions to download. +# If this string is empty, the script will download all RDF files. +# Example: export TO_BE_DOWNLOADED_VERSIONS="10.0,11.0,KMS-654-Testing" +export TO_BE_DOWNLOADED_VERSIONS="7.0,8.0" + + +# ============================================================================= +# RDF Processor Settings (for process-rdf.js) +# ============================================================================= + +# Optional: Comma-separated list of specific versions to process from the +# 'downloaded-rdf' directory. If empty, all found RDF files will be processed. +# Example: export TO_BE_PROCESSED_VERSIONS="10.0,KMS-123" +export TO_BE_PROCESSED_VERSIONS="7.0,8.0" + +# Delay in milliseconds to wait between downloading each concept scheme CSV file. +export PROCESS_CSV_DOWNLOAD_DELAY_MS="100" + +# Delay in milliseconds to wait between processing each version (RDF file). +export PROCESS_VERSION_DELAY_MS="5000" + + +# ============================================================================= +# CSV Uploader Settings (for upload-csv-to-S3.js) +# ============================================================================= + +# Delay in milliseconds between uploads to avoid rate limiting. +export UPLOAD_DELAY_MS="100" + +# Optional: Comma-separated list of specific versions to upload. +# If this string is empty, the script will upload all CSV files from all version folders. +# Example: export TO_BE_UPLOADED_VERSIONS="10.0,11.0" +export TO_BE_UPLOADED_VERSIONS="7.0,8.0" \ No newline at end of file diff --git a/archive-processor/scripts/upload-csv-to-S3.js b/archive-processor/scripts/upload-csv-to-S3.js new file mode 100644 index 00000000..af9a5c58 --- /dev/null +++ b/archive-processor/scripts/upload-csv-to-S3.js @@ -0,0 +1,271 @@ +import { createReadStream, mkdirSync } from 'fs' +import { readdir, stat } from 'fs/promises' +import { + dirname, + join, + relative +} from 'path' +import { fileURLToPath } from 'url' + +import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3' + +const scriptPath = fileURLToPath(import.meta.url) +const scriptDir = dirname(scriptPath) + +/** + * AWS Region for the S3 bucket + * @type {string} + */ +const region = process.env.AWS_REGION || 'us-east-1' + +/** + * S3 bucket name to upload files to + * @type {string} + */ +const bucketName = process.env.S3_BUCKET_NAME || 'kms-rdf-backup-sit' + +/** + * AWS Profile to use (optional) + * @type {string|undefined} + */ +const awsProfile = process.env.AWS_PROFILE + +/** + * Input directory for CSV files + * @type {string} + */ +const inputDir = join(scriptDir, '..', 'local-kms-csv') + +/** + * Delay in milliseconds between uploads to avoid rate limiting + * Default: 100ms (configurable via UPLOAD_DELAY_MS environment variable) + * Set to 0 to disable delay + * @type {number} + */ +const uploadDelayMs = parseInt(process.env.UPLOAD_DELAY_MS || '100', 10) + +/** + * Optional comma-separated list of specific versions to upload. + * If empty, the script will upload all CSV files from all version folders. + * @type {string} + */ +const toBeUploadedVersions = process.env.TO_BE_UPLOADED_VERSIONS || '' + +/** + * S3 Client configured for the specified region + * @type {S3Client} + */ +const s3Client = new S3Client({ + region, + ...(awsProfile && { credentials: undefined }) +}) + +/** + * Delays execution for the specified number of milliseconds + * + * @param {number} ms - Milliseconds to delay + * @returns {Promise} + */ +const delay = (ms) => new Promise((resolve) => { + setTimeout(resolve, ms) +}) + +/** + * Uploads a single CSV file to S3 + * + * @param {string} filePath - Path to the local CSV file + * @param {number} index - Current file index (for progress display) + * @param {number} total - Total number of files to upload + * @returns {Promise<{success: boolean, filePath: string, s3Key?: string, error?: Error}>} + */ +const uploadCsvFile = async (filePath, index, total) => { + // Construct the S3 key from the file path relative to the input directory + // e.g., local-kms-csv/10.0/file.csv -> 10.0/file.csv + const s3Key = relative(inputDir, filePath).replace(/\\/g, '/') + + try { + console.log(`[${index}/${total}] Uploading: ${filePath} -> s3://${bucketName}/${s3Key}`) + + const fileStream = createReadStream(filePath) + + const command = new PutObjectCommand({ + Bucket: bucketName, + Key: s3Key, + Body: fileStream + }) + + await s3Client.send(command) + + console.log(`[${index}/${total}] ✓ Uploaded: ${s3Key}`) + + return { + success: true, + filePath, + s3Key + } + } catch (error) { + console.error(`[${index}/${total}] ✗ Failed to upload ${filePath}:`, error.message) + + return { + success: false, + filePath, + error + } + } +} + +/** + * Finds all CSV files in the specified directory, optionally filtered by version. + * + * @param {string} dir - The base directory to search in. + * @param {string[]} versionList - An array of specific versions to look for. If empty, searches all subdirectories. + * @returns {Promise} A list of file paths. + */ +const findCsvFiles = async (dir, versionList) => { + let dirsToSearch = [] + + if (versionList.length > 0) { + // If specific versions are provided, only look in those directories + dirsToSearch = versionList.map((v) => join(dir, v)) + } else { + // Otherwise, list all items in the base directory + const topLevelItems = await readdir(dir) + // Assume all items in the base directory are version folders + dirsToSearch = await Promise.all(topLevelItems.map(async (item) => { + const fullPath = join(dir, item) + const stats = await stat(fullPath) + + return stats.isDirectory() ? fullPath : null + })).then((results) => results.filter(Boolean)) + } + + console.log(`Searching for CSV files in: ${dirsToSearch.join(', ')}`) + + const filePromises = dirsToSearch.map(async (versionDir) => { + try { + const dirItems = await readdir(versionDir) + + return dirItems + .filter((item) => item.endsWith('.csv')) + .map((item) => join(versionDir, item)) + } catch (err) { + if (err.code === 'ENOENT') { + console.warn(`Warning: Directory not found, skipping: ${versionDir}`) + + return [] // Return an empty array for non-existent directories + } + + throw err // Re-throw other errors + } + }) + + const filesByDir = await Promise.all(filePromises) + + return filesByDir.flat() +} + +/** + * Main function to upload all CSV files + * + * @returns {Promise} + */ +const uploadAllCsvFiles = async () => { + const versionList = toBeUploadedVersions.split(',').map((v) => v.trim()).filter(Boolean) + + console.log('Finding CSV files to upload...') + const filesToUpload = await findCsvFiles(inputDir, versionList) + + if (filesToUpload.length === 0) { + console.log('No CSV files found to upload.') + + return + } + + const totalFiles = filesToUpload.length + + console.log(`\nStarting upload of ${totalFiles} CSV files...`) + console.log(`Input directory: ${inputDir}\n`) + + const results = await filesToUpload.reduce(async (previousPromise, filePath, index) => { + const accResults = await previousPromise + + const result = await uploadCsvFile(filePath, index + 1, totalFiles) + + if (uploadDelayMs > 0 && index < totalFiles - 1) { + await delay(uploadDelayMs) + } + + return [...accResults, result] + }, Promise.resolve([])) + + // Summary + const successful = results.filter((r) => r.success).length + const failed = results.filter((r) => !r.success).length + + console.log(`\n${'='.repeat(60)}`) + console.log('Upload Summary') + console.log('='.repeat(60)) + console.log(`Total files: ${totalFiles}`) + console.log(`✓ Successful: ${successful}`) + console.log(`✗ Failed: ${failed}`) + console.log(`Target bucket: ${bucketName}`) + + if (failed > 0) { + console.log('\nFailed uploads:') + results.filter((r) => !r.success).forEach((r) => { + console.log(` - ${r.filePath}: ${r.error.message}`) + }) + } +} + +/** + * Main execution function + */ +const main = async () => { + console.log('AWS S3 CSV Files Uploader') + console.log('========================\n') + + // Ensure required directories exist before starting + try { + mkdirSync(inputDir, { recursive: true }) + } catch (error) { + console.error('✗ Failed to create the input directory:', error.message) + process.exit(1) + } + + console.log('Configuration loaded from environment variables.') + console.log('You can set these in archive-processor/scripts/scripts-config.sh and run `source archive-processor/scripts/scripts-config.sh`\n') + console.log(`Bucket: ${bucketName}`) + console.log(`Region: ${region}`) + console.log(`Input: ${inputDir}`) + console.log(`Delay between uploads: ${uploadDelayMs}ms`) + + if (awsProfile) { + console.log(`Profile: ${awsProfile}`) + } + + if (toBeUploadedVersions) { + console.log(`Versions to upload: ${toBeUploadedVersions}`) + } else { + console.log('Versions to upload: All versions') + } + + console.log('\n') + + try { + await uploadAllCsvFiles() + console.log('\n✓ Upload completed successfully!') + } catch (error) { + console.error('\n✗ Failed to upload CSV files:', error.message) + + if (error.name === 'InvalidAccessKeyId' || error.name === 'CredentialsProviderError') { + console.error('\n⚠️ AWS Credentials Error:') + console.error('Please configure your AWS credentials.') + } + + process.exit(1) + } +} + +// Execute the script +main() diff --git a/package.json b/package.json index bba3d566..9f262623 100644 --- a/package.json +++ b/package.json @@ -28,7 +28,10 @@ "redis:memory_used": "bash bin/redis/memory_used.sh", "prime-cache:invoke-local": "bash scripts/local/invoke_prime_concepts_cache.sh", "export-data": "vite-node --config vite.config.js setup/scripts/exportData.js -all", - "create-rdf-files": "vite-node --config vite.config.js setup/scripts/createRdfFiles.js" + "create-rdf-files": "vite-node --config vite.config.js setup/scripts/createRdfFiles.js", + "download-rdf": "source archive-processor/scripts/scripts-config.sh && vite-node archive-processor/scripts/download-rdf-from-S3.js", + "process-rdf": "source archive-processor/scripts/scripts-config.sh && vite-node archive-processor/scripts/process-rdf.js", + "upload-csv": "source archive-processor/scripts/scripts-config.sh && vite-node archive-processor/scripts/upload-csv-to-S3.js" }, "dependencies": { "@aws-sdk/client-eventbridge": "^3.997.0", From 995694e917946ae2336c1060c0d6fb50622201b6 Mon Sep 17 00:00:00 2001 From: Hoan-Vu Tran-Ho Date: Mon, 20 Apr 2026 13:11:15 -0400 Subject: [PATCH 02/13] KMS-663: Clean up logic of concept file and scheme file existence --- archive-processor/scripts/process-rdf.js | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/archive-processor/scripts/process-rdf.js b/archive-processor/scripts/process-rdf.js index eaa2c7bf..16573c2b 100644 --- a/archive-processor/scripts/process-rdf.js +++ b/archive-processor/scripts/process-rdf.js @@ -52,7 +52,7 @@ const CONCEPTS_BASE_URL = `${KMS_APP_BASE_URL}/concepts/concept_scheme` const OUTPUT_BASE_DIR = join(scriptDir, '..', 'local-kms-csv') // Delays -const csvDownloadDelayMs = Number(process.env.PROCESS_CSV_DOWNLOAD_DELAY_MS || '1000') +const csvDownloadDelayMs = Number(process.env.PROCESS_CSV_DOWNLOAD_DELAY_MS || '100') const versionProcessingDelayMs = Number(process.env.PROCESS_VERSION_DELAY_MS || '5000') // --- RDF4J Upload Functions --- @@ -477,11 +477,9 @@ const main = async () => { await clearContext(uploadContext) await loadRdfFile(conceptsFile, uploadContext) - // The schemes file is uploaded with every concept, unless it's the concept itself - if (conceptsFile !== schemesFile) { - // When uploading the schemes file, modify its content to include the current version. - await loadModifiedSchemesFile(schemesFile, uploadContext, version) - } + // The schemes file is uploaded with every concept file to ensure the context is complete. + // When uploading the schemes file, its content is modified to include the current version name. + await loadModifiedSchemesFile(schemesFile, uploadContext, version) console.log(`Upload for ${version} completed successfully.`) From 48c1a538352ecd9487070e5da07a7b9ccb426f48 Mon Sep 17 00:00:00 2001 From: Hoan-Vu Tran-Ho Date: Tue, 21 Apr 2026 11:59:03 -0400 Subject: [PATCH 03/13] KMS-663: Set 'Revision' to 'N/A' in csv --- archive-processor/scripts/process-rdf.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/archive-processor/scripts/process-rdf.js b/archive-processor/scripts/process-rdf.js index 16573c2b..ff0d1078 100644 --- a/archive-processor/scripts/process-rdf.js +++ b/archive-processor/scripts/process-rdf.js @@ -281,6 +281,9 @@ const loadModifiedSchemesFile = async (filePath, targetContext, fileContentVersi console.warn(`Warning: Could not find tag in ${filePath}. Uploading file as-is.`) } + // Also set created date to 'N/A'. In the scheme csv output, this is 'Revision' + xmlData = xmlData.replace(/.*<\/dcterms:created>/g, 'N/A') + const graphUri = `https://gcmd.earthdata.nasa.gov/kms/version/${targetContext}` const postUrl = new URL(rdf4jStatementsUrl) postUrl.searchParams.append('context', `<${graphUri}>`) From 2756020854742d713445627f25cffb91bfb68c4a Mon Sep 17 00:00:00 2001 From: Hoan-Vu Tran-Ho Date: Tue, 21 Apr 2026 18:56:06 -0400 Subject: [PATCH 04/13] KMS-663: Add csv upload and rdf upload to publisher, remove rdf cron --- bin/localstack/start.sh | 2 +- cdk/app/lib/helper/KmsLambdaFunctions.ts | 38 ---- .../exportRdfToS3/__tests__/handler.test.js | 178 ----------------- serverless/src/exportRdfToS3/handler.js | 118 ------------ serverless/src/publish/handler.js | 5 +- .../src/publisher/__tests__/handler.test.js | 107 +++++++++++ serverless/src/publisher/handler.js | 37 +++- .../src/shared/__tests__/awsClients.test.js | 132 +++++++++++++ .../exportPublishSchemeCsvToS3.test.js | 181 ++++++++++++++++++ .../shared/__tests__/exportRdfToS3.test.js | 143 ++++++++++++++ .../__tests__/publishKeywordEvent.test.js | 6 +- serverless/src/shared/awsClients.js | 50 +++++ .../src/shared/exportPublishSchemeCsvToS3.js | 78 ++++++++ serverless/src/shared/exportRdfToS3.js | 78 ++++++++ serverless/src/shared/publishKeywordEvent.js | 25 +-- 15 files changed, 815 insertions(+), 363 deletions(-) delete mode 100644 serverless/src/exportRdfToS3/__tests__/handler.test.js delete mode 100644 serverless/src/exportRdfToS3/handler.js create mode 100644 serverless/src/shared/__tests__/awsClients.test.js create mode 100644 serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js create mode 100644 serverless/src/shared/__tests__/exportRdfToS3.test.js create mode 100644 serverless/src/shared/awsClients.js create mode 100644 serverless/src/shared/exportPublishSchemeCsvToS3.js create mode 100644 serverless/src/shared/exportRdfToS3.js diff --git a/bin/localstack/start.sh b/bin/localstack/start.sh index 28094d2f..297adf51 100755 --- a/bin/localstack/start.sh +++ b/bin/localstack/start.sh @@ -6,7 +6,7 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" # shellcheck source=bin/env/local_env.sh source "${SCRIPT_DIR}/../env/local_env.sh" -REQUIRED_SERVICES="sns,sqs,events" +REQUIRED_SERVICES="sns,sqs,events,s3" if ! docker network inspect "${KMS_DOCKER_NETWORK}" >/dev/null 2>&1; then docker network create "${KMS_DOCKER_NETWORK}" >/dev/null diff --git a/cdk/app/lib/helper/KmsLambdaFunctions.ts b/cdk/app/lib/helper/KmsLambdaFunctions.ts index 21e5dd2c..baaeda66 100644 --- a/cdk/app/lib/helper/KmsLambdaFunctions.ts +++ b/cdk/app/lib/helper/KmsLambdaFunctions.ts @@ -135,7 +135,6 @@ export class LambdaFunctions { this.createNightlyCachePrimeCron(scope) this.createCrudOperationApiLambdas(scope) this.createPublishEventBridgeWiring(scope) - this.createExportRdfCrons(scope) } /** @@ -474,43 +473,6 @@ export class LambdaFunctions { })) } - /** - * Creates Lambda function for exporting RDF to S3 and sets up associated cron jobs - * @param {Construct} scope - The scope in which to define these constructs - * @private - */ - private createExportRdfCrons(scope: Construct) { - // Create the exportRdfToS3 Lambda - const exportRdfToS3Lambda = this.createApiLambda( - scope, - 'exportRdfToS3/handler.js', - 'export-rdf-to-s3', - 'handler', - '/export-rdf', - 'POST', - false, - Duration.minutes(15) - ) - - // Set up cron jobs for exportRdfToS3 - // Runs at 1am nightly. - this.setupCronJob( - scope, - exportRdfToS3Lambda, - 'cron(0 6 * * ? *)', - { version: 'published' }, - 'Published' - ) - - this.setupCronJob( - scope, - exportRdfToS3Lambda, - 'cron(5 6 * * ? *)', - { version: 'draft' }, - 'Draft' - ) - } - /** * Creates the publisher Lambda that analyzes keyword changes. * This Lambda consumes publish events and emits publisher events. diff --git a/serverless/src/exportRdfToS3/__tests__/handler.test.js b/serverless/src/exportRdfToS3/__tests__/handler.test.js deleted file mode 100644 index d9103a88..00000000 --- a/serverless/src/exportRdfToS3/__tests__/handler.test.js +++ /dev/null @@ -1,178 +0,0 @@ -import { - CreateBucketCommand, - HeadBucketCommand, - PutObjectCommand, - S3Client -} from '@aws-sdk/client-s3' -import { - beforeEach, - describe, - expect, - vi -} from 'vitest' - -import { getApplicationConfig } from '@/shared/getConfig' -import { getVersionMetadata } from '@/shared/getVersionMetadata' -import { sparqlRequest } from '@/shared/sparqlRequest' - -import { handler } from '../handler' - -// Mock dependencies -vi.mock('@/shared/getConfig') -vi.mock('@/shared/sparqlRequest') -vi.mock('@aws-sdk/client-s3') -vi.mock('@/shared/getVersionMetadata') - -describe('exportRdfToS3 handler', () => { - beforeEach(() => { - vi.resetAllMocks() - vi.useFakeTimers() - vi.spyOn(console, 'error').mockImplementation(() => {}) - vi.spyOn(console, 'log').mockImplementation(() => {}) - - getApplicationConfig.mockReturnValue({ - env: 'test', - defaultResponseHeaders: { 'Content-Type': 'application/json' } - }) - - getVersionMetadata.mockReturnValue({ versionName: '21.4' }) - - sparqlRequest.mockResolvedValue({ - ok: true, - text: vi.fn().mockResolvedValue('Test RDF Data') - }) - - S3Client.prototype.send = vi.fn().mockResolvedValue({}) - }) - - afterEach(() => { - vi.useRealTimers() - }) - - describe('when export process runs', () => { - test('should successfully export RDF data to S3', async () => { - const result = await handler({ version: 'published' }) - - expect(result.statusCode).toBe(200) - expect(JSON.parse(result.body).message).toBe('RDF export process complete for version published') - - expect(sparqlRequest).toHaveBeenCalledWith({ - method: 'GET', - path: '/statements', - accept: 'application/rdf+xml', - version: 'published' - }) - - expect(S3Client.prototype.send).toHaveBeenCalledWith(expect.any(HeadBucketCommand)) - expect(S3Client.prototype.send).toHaveBeenCalledWith(expect.any(PutObjectCommand)) - expect(console.log).toHaveBeenCalledWith(expect.stringContaining('RDF data for version published exported successfully')) - }) - - test('should use versionName as S3 key for published version', async () => { - const result = await handler({ version: 'published' }) - - expect(result.statusCode).toBe(200) - expect(PutObjectCommand).toHaveBeenCalledWith(expect.objectContaining({ - Bucket: 'kms-rdf-backup-test', - Key: '21.4/rdf.xml', - Body: expect.any(String), - ContentType: 'application/rdf+xml' - })) - }) - - test('should use date-based S3 key for non-published versions', async () => { - vi.setSystemTime(new Date('2023-06-01T12:00:00Z')) - - const result = await handler({ version: 'draft' }) - - expect(result.statusCode).toBe(200) - expect(PutObjectCommand).toHaveBeenCalledWith(expect.objectContaining({ - Bucket: 'kms-rdf-backup-test', - Key: 'draft/2023/06/01/rdf.xml', - Body: expect.any(String), - ContentType: 'application/rdf+xml' - })) - }) - - test('should use default version if not provided', async () => { - const result = await handler({}) - - expect(result.statusCode).toBe(200) - expect(JSON.parse(result.body).message).toBe('RDF export process complete for version published') - expect(PutObjectCommand).toHaveBeenCalledWith(expect.objectContaining({ - Key: '21.4/rdf.xml' - })) - }) - - test('should create S3 bucket if it does not exist', async () => { - S3Client.prototype.send.mockRejectedValueOnce({ name: 'NotFound' }) - - const result = await handler({ version: 'published' }) - - expect(result.statusCode).toBe(200) - expect(S3Client.prototype.send).toHaveBeenCalledWith(expect.any(CreateBucketCommand)) - }) - - test('should handle S3 upload failure', async () => { - S3Client.prototype.send.mockResolvedValueOnce({}) // HeadBucketCommand - S3Client.prototype.send.mockRejectedValueOnce(new Error('S3 upload failed')) // PutObjectCommand - - const result = await handler({ version: 'published' }) - - expect(result.statusCode).toBe(500) // The handler catches the error and still returns 200 - expect(JSON.parse(result.body).message).toBe('Error in RDF export process') - expect(console.error).toHaveBeenCalledWith( - 'Error in export process:', - expect.objectContaining({ - message: 'S3 upload failed' - }) - ) - }) - - test('should handle unexpected S3 errors', async () => { - S3Client.prototype.send.mockRejectedValueOnce(new Error('Unexpected S3 error')) - - const result = await handler({ version: 'published' }) - - expect(result.statusCode).toBe(500) - expect(JSON.parse(result.body).message).toBe('Error in RDF export process') - expect(console.error).toHaveBeenCalledWith( - 'Error in export process:', - expect.objectContaining({ - message: 'Unexpected S3 error' - }) - ) - }) - - test('should handle sparqlRequest HTTP error', async () => { - sparqlRequest.mockResolvedValue({ - ok: false, - status: 503 - }) - - const result = await handler({ version: 'published' }) - - expect(result.statusCode).toBe(500) - expect(JSON.parse(result.body).message).toBe('Error in RDF export process') - expect(console.error).toHaveBeenCalledWith( - 'Error in export process:', - expect.objectContaining({ - message: 'HTTP error! status: 503' - }) - ) - }) - }) - - describe('Configuration and settings', () => { - test('should set correct ContentType for S3 upload', async () => { - await handler({ version: 'published' }) - - expect(PutObjectCommand).toHaveBeenCalledWith(expect.objectContaining({ - Bucket: 'kms-rdf-backup-test', - Key: '21.4/rdf.xml', - Body: expect.any(String), - ContentType: 'application/rdf+xml' - })) - }) - }) -}) diff --git a/serverless/src/exportRdfToS3/handler.js b/serverless/src/exportRdfToS3/handler.js deleted file mode 100644 index 43a901d7..00000000 --- a/serverless/src/exportRdfToS3/handler.js +++ /dev/null @@ -1,118 +0,0 @@ -import { - CreateBucketCommand, - HeadBucketCommand, - PutObjectCommand, - S3Client -} from '@aws-sdk/client-s3' - -import { ensureBucketAndLifecycleRule } from '@/shared/ensureBucketAndLifeCycleRule' -import { getApplicationConfig } from '@/shared/getConfig' -import { getVersionMetadata } from '@/shared/getVersionMetadata' -import { sparqlRequest } from '@/shared/sparqlRequest' - -/** - * Handler for exporting RDF data to Amazon S3. - * - * This function is designed to be invoked on a schedule or on-demand. It exports RDF data - * to an S3 bucket, with different behaviors for 'published' and 'draft' version. - * - * For 'published' version: - * - Creates a file at `{versionName}/rdf.xml` - * - Overwrites existing file if present - * - * For 'draft' version: - * - Creates a new file with a date-based path: `draft/{year}/{month}/{day}/rdf.xml` - * - A new file is created each time, preserving historical versions - * - * The function will create the S3 bucket if it doesn't exist using the stage name - * as part of the bucket name, e.g., kms-rdf-backup-${stage} - * - * @async - * @function handler - * @param {Object} event - The event object containing the export information. - * @param {string} [event.version='published'] - The version of the RDF data to export (e.g., 'published', 'draft'). - * @returns {Promise} A promise that resolves to an object containing: - * - statusCode: HTTP status code (200 for success, 500 for error) - * - headers: Response headers - * - body: A JSON string with a message indicating the result of the export process - * @throws Will throw an error if the RDF data fetch fails or if there are issues with S3 operations. - */ -export const handler = async (event) => { - const { defaultResponseHeaders, env: stage } = getApplicationConfig() - const version = event.version || 'published' // Default to 'published' if not specified - - try { - const s3BucketName = `kms-rdf-backup-${stage}` - const s3Client = new S3Client({}) - - // Ensure bucket exists and lifecycle rule is set - await ensureBucketAndLifecycleRule(s3Client, s3BucketName, 30, 'draft/') // Expire after 30 days - - // Fetch RDF data from the repository using sparqlRequest - const response = await sparqlRequest({ - method: 'GET', - path: '/statements', - accept: 'application/rdf+xml', - version - }) - - if (!response.ok) { - console.log('error fetching rdfxml for ', version) - throw new Error(`HTTP error! status: ${response.status}`) - } - - const rdfData = await response.text() - - // Generate the S3 key based on the current date and version - - let s3Key - if (version === 'published') { - const { versionName } = await getVersionMetadata(version) - s3Key = `${versionName}/rdf.xml` - } else { - const currentDate = new Date() - const year = currentDate.getUTCFullYear() - const month = String(currentDate.getUTCMonth() + 1).padStart(2, '0') - const day = String(currentDate.getUTCDate()).padStart(2, '0') - s3Key = `${version}/${year}/${month}/${day}/rdf.xml` - } - - // Check if bucket exists - try { - await s3Client.send(new HeadBucketCommand({ Bucket: s3BucketName })) - } catch (error) { - if (error.name === 'NotFound') { - console.log(`Bucket ${s3BucketName} not found. Creating...`) - await s3Client.send(new CreateBucketCommand({ Bucket: s3BucketName })) - } else { - throw error - } - } - - // Upload RDF data to S3 - await s3Client.send(new PutObjectCommand({ - Bucket: s3BucketName, - Key: s3Key, - Body: rdfData, - ContentType: 'application/rdf+xml' - })) - - console.log(`RDF data for version ${version} exported successfully to ${s3Key}`) - - return { - statusCode: 200, - headers: defaultResponseHeaders, - body: JSON.stringify({ message: `RDF export process complete for version ${version}` }) - } - } catch (error) { - console.error('Error in export process:', error) - - return { - statusCode: 500, - headers: defaultResponseHeaders, - body: JSON.stringify({ message: 'Error in RDF export process' }) - } - } -} - -export default handler diff --git a/serverless/src/publish/handler.js b/serverless/src/publish/handler.js index 3ce9e4b8..43cad12e 100644 --- a/serverless/src/publish/handler.js +++ b/serverless/src/publish/handler.js @@ -1,5 +1,6 @@ -import { EventBridgeClient, PutEventsCommand } from '@aws-sdk/client-eventbridge' +import { PutEventsCommand } from '@aws-sdk/client-eventbridge' +import { getEventBridgeClient } from '@/shared/awsClients' import { getApplicationConfig } from '@/shared/getConfig' import { logAnalyticsData } from '@/shared/logAnalyticsData' import { logger } from '@/shared/logger' @@ -7,7 +8,7 @@ import { logger } from '@/shared/logger' const PUBLISH_EVENT_SOURCE = 'kms.publish' const PUBLISH_EVENT_DETAIL_TYPE = 'kms.published.version.changed' -const publishEventClient = new EventBridgeClient({}) +const publishEventClient = getEventBridgeClient() /** * Emits a publish-version-changed event to EventBridge to trigger the publisher handler. diff --git a/serverless/src/publisher/__tests__/handler.test.js b/serverless/src/publisher/__tests__/handler.test.js index 17abf458..3bb824ed 100644 --- a/serverless/src/publisher/__tests__/handler.test.js +++ b/serverless/src/publisher/__tests__/handler.test.js @@ -8,6 +8,8 @@ import { import { CsvComparator } from '@/shared/csvComparator' import { downloadConcepts } from '@/shared/downloadConcepts' +import { exportPublishSchemeCsvToS3 } from '@/shared/exportPublishSchemeCsvToS3' +import { exportRdfToS3 } from '@/shared/exportRdfToS3' import { getConceptSchemeDetails } from '@/shared/getConceptSchemeDetails' import { logger } from '@/shared/logger' import { getPublishUpdateQuery } from '@/shared/operations/updates/getPublishUpdateQuery' @@ -48,6 +50,8 @@ vi.mock('@/shared/getConceptSchemeDetails') vi.mock('@/shared/operations/updates/getPublishUpdateQuery') vi.mock('@/shared/publishKeywordEvent') vi.mock('@/shared/sparqlRequest') +vi.mock('@/shared/exportRdfToS3') +vi.mock('@/shared/exportPublishSchemeCsvToS3') vi.mock('@aws-sdk/client-eventbridge', () => ({ EventBridgeClient: vi.fn(() => ({ send: sendEventBridgeMock @@ -60,6 +64,8 @@ describe('publisher handler', () => { vi.resetAllMocks() delete process.env.BLOCK_PUBLISH_ON_KEYWORD_DIFF_FAILURE sendEventBridgeMock.mockResolvedValue({ FailedEntryCount: 0 }) + exportRdfToS3.mockResolvedValue({ s3Key: 'test/rdf.xml' }) + exportPublishSchemeCsvToS3.mockResolvedValue() publishKeywordEvent.mockResolvedValue({ messageId: 'message-1', message: '{}', @@ -951,6 +957,11 @@ describe('publisher handler', () => { }) expect(logger.info).toHaveBeenCalledWith('[publisher] Publish update completed for version=v1.0.0') + + // Verify S3 exports were called + expect(exportRdfToS3).toHaveBeenCalledWith({ version: 'published' }) + expect(exportRdfToS3).toHaveBeenCalledWith({ version: 'draft' }) + expect(exportPublishSchemeCsvToS3).toHaveBeenCalled() expect(publishKeywordEvent).toHaveBeenCalledTimes(1) expect(publishKeywordEvent).toHaveBeenCalledWith(expect.objectContaining({ EventType: 'INSERTED', @@ -983,6 +994,102 @@ describe('publisher handler', () => { expect(sendEventBridgeMock).not.toHaveBeenCalled() }) + test('should return partial_success when S3 exports fail', async () => { + const mockSchemes = [{ notation: 'sciencekeywords' }] + getConceptSchemeDetails.mockResolvedValue(mockSchemes) + downloadConcepts.mockResolvedValue('csv content') + + const mockComparison = { + addedKeywords: new Map(), + removedKeywords: new Map(), + changedKeywords: new Map() + } + + const mockComparator = { + compare: vi.fn().mockReturnValue(mockComparison), + getSummary: vi.fn().mockReturnValue({ + addedCount: 0, + removedCount: 0, + changedCount: 0 + }) + } + CsvComparator.mockImplementation(() => mockComparator) + + // Mock exports to fail + exportRdfToS3 + .mockRejectedValueOnce(new Error('S3 RDF published export failed')) + .mockResolvedValue({ s3Key: 'test/rdf.xml' }) // Draft export succeeds + + exportPublishSchemeCsvToS3.mockRejectedValue(new Error('S3 CSV export failed')) + + const result = await publisher(mockEvent) + + expect(result.status).toBe('partial_success') + expect(result.postPublishFailures).toHaveLength(2) + expect(result.postPublishFailures).toContain('Failed to export Published RDF to S3: S3 RDF published export failed') + expect(result.postPublishFailures).toContain('Failed to export Published Scheme CSVs to S3: S3 CSV export failed') + + // Ensure both RDF exports were attempted + expect(exportRdfToS3).toHaveBeenCalledTimes(2) + + // Ensure CSV export was attempted + expect(exportPublishSchemeCsvToS3).toHaveBeenCalledTimes(1) + + // Ensure cache-prime event was still emitted + expect(result.cachePrimeEventEmitted).toBe(true) + expect(sendEventBridgeMock).toHaveBeenCalledTimes(1) + + expect(logger.error).toHaveBeenCalledWith('[publisher] Failed to export Published RDF to S3: S3 RDF published export failed') + expect(logger.error).toHaveBeenCalledWith('[publisher] Failed to export Published Scheme CSVs to S3: S3 CSV export failed') + }) + + test('should return partial_success when draft S3 RDF export fails', async () => { + const mockSchemes = [{ notation: 'sciencekeywords' }] + getConceptSchemeDetails.mockResolvedValue(mockSchemes) + downloadConcepts.mockResolvedValue('csv content') + + const mockComparison = { + addedKeywords: new Map(), + removedKeywords: new Map(), + changedKeywords: new Map() + } + const mockComparator = { + compare: vi.fn().mockReturnValue(mockComparison), + getSummary: vi.fn().mockReturnValue({ + addedCount: 0, + removedCount: 0, + changedCount: 0 + }) + } + CsvComparator.mockImplementation(() => mockComparator) + + // Mock exports: published succeeds, draft fails + exportRdfToS3.mockImplementation(async ({ version }) => { + if (version === 'draft') { + throw new Error('S3 Draft export failed') + } + + return { s3Key: 'published/rdf.xml' } + }) + + const result = await publisher(mockEvent) + + expect(result.status).toBe('partial_success') + expect(result.postPublishFailures).toHaveLength(1) + expect(result.postPublishFailures).toContain('Failed to export Draft RDF to S3: S3 Draft export failed') + + expect(exportRdfToS3).toHaveBeenCalledTimes(2) + expect(exportRdfToS3).toHaveBeenCalledWith({ version: 'published' }) + expect(exportRdfToS3).toHaveBeenCalledWith({ version: 'draft' }) + + expect(exportPublishSchemeCsvToS3).toHaveBeenCalledTimes(1) + + expect(result.cachePrimeEventEmitted).toBe(true) + expect(sendEventBridgeMock).toHaveBeenCalledTimes(1) + + expect(logger.error).toHaveBeenCalledWith('[publisher] Failed to export Draft RDF to S3: S3 Draft export failed') + }) + test('should continue publish when keyword diff retries exhaust and blocking is disabled', async () => { vi.useFakeTimers() diff --git a/serverless/src/publisher/handler.js b/serverless/src/publisher/handler.js index 56f66e5d..69e75203 100644 --- a/serverless/src/publisher/handler.js +++ b/serverless/src/publisher/handler.js @@ -1,7 +1,10 @@ -import { EventBridgeClient, PutEventsCommand } from '@aws-sdk/client-eventbridge' +import { PutEventsCommand } from '@aws-sdk/client-eventbridge' +import { getEventBridgeClient } from '@/shared/awsClients' import { CsvComparator } from '@/shared/csvComparator' import { downloadConcepts } from '@/shared/downloadConcepts' +import { exportPublishSchemeCsvToS3 } from '@/shared/exportPublishSchemeCsvToS3' +import { exportRdfToS3 } from '@/shared/exportRdfToS3' import { getConceptSchemeDetails } from '@/shared/getConceptSchemeDetails' import { logger } from '@/shared/logger' import { getPublishUpdateQuery } from '@/shared/operations/updates/getPublishUpdateQuery' @@ -12,7 +15,7 @@ const PUBLISHER_EVENT_SOURCE = 'kms.publisher' const PUBLISHER_EVENT_DETAIL_TYPE = 'kms.publisher.analysis.completed' const KEYWORD_EVENT_PUBLISH_RETRIES = 3 -const publisherEventClient = new EventBridgeClient({}) +const publisherEventClient = getEventBridgeClient() const shouldBlockPublishOnKeywordDiffFailure = () => ( process.env.BLOCK_PUBLISH_ON_KEYWORD_DIFF_FAILURE === 'true' @@ -507,6 +510,36 @@ export const publisher = async (event) => { logger.info('[publisher] No keyword events generated, skipping SNS publish') } + // Export RDF and CSV data to S3 after publishing + logger.info('[publisher] Starting S3 exports of RDF and CSV data.') + + try { + await exportRdfToS3({ version: 'published' }) + logger.info('[publisher] Successfully exported Published RDF to S3.') + } catch (exportError) { + const failureMessage = `Failed to export Published RDF to S3: ${exportError.message}` + postPublishFailures.push(failureMessage) + logger.error(`[publisher] ${failureMessage}`) + } + + try { + await exportRdfToS3({ version: 'draft' }) + logger.info('[publisher] Successfully exported Draft RDF to S3.') + } catch (exportError) { + const failureMessage = `Failed to export Draft RDF to S3: ${exportError.message}` + postPublishFailures.push(failureMessage) + logger.error(`[publisher] ${failureMessage}`) + } + + try { + await exportPublishSchemeCsvToS3() + logger.info('[publisher] Successfully exported Published Scheme CSVs to S3.') + } catch (exportError) { + const failureMessage = `Failed to export Published Scheme CSVs to S3: ${exportError.message}` + postPublishFailures.push(failureMessage) + logger.error(`[publisher] ${failureMessage}`) + } + // Emit event for cache-prime to consume let cachePrimeEventEmitted = false diff --git a/serverless/src/shared/__tests__/awsClients.test.js b/serverless/src/shared/__tests__/awsClients.test.js new file mode 100644 index 00000000..9766f022 --- /dev/null +++ b/serverless/src/shared/__tests__/awsClients.test.js @@ -0,0 +1,132 @@ +import { + afterEach, + beforeEach, + describe, + expect, + test, + vi +} from 'vitest' + +// Mock the AWS SDK clients before any imports +const { s3ClientMock, eventBridgeClientMock, snsClientMock } = vi.hoisted(() => ({ + s3ClientMock: vi.fn(), + eventBridgeClientMock: vi.fn(), + snsClientMock: vi.fn() +})) + +vi.mock('@aws-sdk/client-s3', () => ({ S3Client: s3ClientMock })) +vi.mock('@aws-sdk/client-eventbridge', () => ({ EventBridgeClient: eventBridgeClientMock })) +vi.mock('@aws-sdk/client-sns', () => ({ SNSClient: snsClientMock })) + +describe('awsClients', () => { + beforeEach(() => { + // Reset modules to clear singleton instances between tests + vi.resetModules() + vi.clearAllMocks() + // Ensure a clean environment for each test + delete process.env.AWS_ENDPOINT_URL + }) + + afterEach(() => { + delete process.env.AWS_ENDPOINT_URL + }) + + describe('when AWS_ENDPOINT_URL is not set (standard AWS)', () => { + test('getS3Client should create a client with default config', async () => { + const { getS3Client } = await import('../awsClients') + + getS3Client() + + expect(s3ClientMock).toHaveBeenCalledWith({}) + }) + + test('getEventBridgeClient should create a client with default config', async () => { + const { getEventBridgeClient } = await import('../awsClients') + + getEventBridgeClient() + + expect(eventBridgeClientMock).toHaveBeenCalledWith({}) + }) + + test('getSnsClient should create a client with default config', async () => { + const { getSnsClient } = await import('../awsClients') + + getSnsClient() + + expect(snsClientMock).toHaveBeenCalledWith({}) + }) + }) + + describe('when AWS_ENDPOINT_URL is set (LocalStack)', () => { + const localstackEndpoint = 'http://localhost:4566' + const expectedConfig = { + endpoint: localstackEndpoint, + region: 'us-east-1', + credentials: { + accessKeyId: 'test', + secretAccessKey: 'test' + }, + forcePathStyle: true + } + + beforeEach(() => { + process.env.AWS_ENDPOINT_URL = localstackEndpoint + }) + + test('getS3Client should create a client with LocalStack config', async () => { + const { getS3Client } = await import('../awsClients') + + getS3Client() + + expect(s3ClientMock).toHaveBeenCalledWith(expectedConfig) + }) + + test('getEventBridgeClient should create a client with LocalStack config', async () => { + const { getEventBridgeClient } = await import('../awsClients') + + getEventBridgeClient() + + expect(eventBridgeClientMock).toHaveBeenCalledWith(expectedConfig) + }) + + test('getSnsClient should create a client with LocalStack config', async () => { + const { getSnsClient } = await import('../awsClients') + + getSnsClient() + + expect(snsClientMock).toHaveBeenCalledWith(expectedConfig) + }) + }) + + describe('singleton behavior', () => { + test('getS3Client should only create one instance', async () => { + const { getS3Client } = await import('../awsClients') + + const client1 = getS3Client() + const client2 = getS3Client() + + expect(s3ClientMock).toHaveBeenCalledTimes(1) + expect(client1).toBe(client2) + }) + + test('getEventBridgeClient should only create one instance', async () => { + const { getEventBridgeClient } = await import('../awsClients') + + const client1 = getEventBridgeClient() + const client2 = getEventBridgeClient() + + expect(eventBridgeClientMock).toHaveBeenCalledTimes(1) + expect(client1).toBe(client2) + }) + + test('getSnsClient should only create one instance', async () => { + const { getSnsClient } = await import('../awsClients') + + const client1 = getSnsClient() + const client2 = getSnsClient() + + expect(snsClientMock).toHaveBeenCalledTimes(1) + expect(client1).toBe(client2) + }) + }) +}) diff --git a/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js b/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js new file mode 100644 index 00000000..002000f4 --- /dev/null +++ b/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js @@ -0,0 +1,181 @@ +import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3' +import { + afterEach, + beforeEach, + describe, + expect, + test, + vi +} from 'vitest' + +import { downloadConcepts } from '@/shared/downloadConcepts' +import { getConceptSchemeDetails } from '@/shared/getConceptSchemeDetails' +import { getVersionMetadata } from '@/shared/getVersionMetadata' +import { logger } from '@/shared/logger' + +// Mock dependencies +vi.mock('@aws-sdk/client-s3') +vi.mock('@/shared/downloadConcepts') +vi.mock('@/shared/getConceptSchemeDetails') +vi.mock('@/shared/getVersionMetadata') +vi.mock('@/shared/logger') + +describe('exportPublishSchemeCsvToS3', () => { + beforeEach(() => { + // Use fake timers to control setTimeout used in the delay function + vi.useFakeTimers() + vi.resetModules() + vi.resetAllMocks() + process.env.NODE_ENV = 'test' + + // Mock S3Client's send method globally + S3Client.prototype.send = vi.fn().mockResolvedValue({}) + + // Mock logger methods to prevent actual logging during tests + vi.spyOn(logger, 'info').mockImplementation(() => {}) + vi.spyOn(logger, 'warn').mockImplementation(() => {}) + vi.spyOn(logger, 'error').mockImplementation(() => {}) + + // Default successful mocks for dependencies + getVersionMetadata.mockResolvedValue({ versionName: 'v22.1' }) + getConceptSchemeDetails.mockResolvedValue([ + { notation: 'SCHEME1' }, + { notation: 'SCHEME2' } + ]) + + // Mocking resolved values for each download call + downloadConcepts.mockImplementation(async ({ conceptScheme }) => { + if (conceptScheme === 'SCHEME1') return 'csv,data,for,scheme1' + if (conceptScheme === 'SCHEME2') return 'csv,data,for,scheme2' + + return '' + }) + }) + + afterEach(() => { + delete process.env.NODE_ENV + // Restore real timers after each test + vi.useRealTimers() + }) + + test('should successfully export CSVs for all published schemes', async () => { + const { exportPublishSchemeCsvToS3 } = await import('../exportPublishSchemeCsvToS3') + + await Promise.all([exportPublishSchemeCsvToS3(), vi.runAllTimersAsync()]) + + // Verify initial setup calls + expect(getVersionMetadata).toHaveBeenCalledWith('published') + expect(getConceptSchemeDetails).toHaveBeenCalledWith({ version: 'published' }) + + // Verify CSV download for each scheme + expect(downloadConcepts).toHaveBeenCalledTimes(2) + expect(downloadConcepts).toHaveBeenCalledWith({ + conceptScheme: 'SCHEME1', + format: 'csv', + version: 'published' + }) + + expect(downloadConcepts).toHaveBeenCalledWith({ + conceptScheme: 'SCHEME2', + format: 'csv', + version: 'published' + }) + + // Verify S3 upload for each scheme + const S3ClientMock = vi.mocked(S3Client) + const s3SendMock = S3ClientMock.mock.results[0].value.send + expect(s3SendMock).toHaveBeenCalledTimes(2) + expect(PutObjectCommand).toHaveBeenCalledWith({ + Bucket: 'kms-rdf-backup-test', + Key: 'v22.1/SCHEME1.csv', + Body: 'csv,data,for,scheme1', + ContentType: 'text/csv' + }) + + expect(PutObjectCommand).toHaveBeenCalledWith({ + Bucket: 'kms-rdf-backup-test', + Key: 'v22.1/SCHEME2.csv', + Body: 'csv,data,for,scheme2', + ContentType: 'text/csv' + }) + + // Verify logging + expect(logger.info).toHaveBeenCalledWith('Exporting published CSVs for version: v22.1') + expect(logger.info).toHaveBeenCalledWith('Uploading SCHEME1.csv to s3://kms-rdf-backup-test/v22.1/SCHEME1.csv') + expect(logger.info).toHaveBeenCalledWith('Finished exporting all published scheme CSVs to S3.') + }) + + test('should log a warning and exit if no schemes are found', async () => { + vi.mocked(getConceptSchemeDetails).mockResolvedValue([]) + + const { exportPublishSchemeCsvToS3 } = await import('../exportPublishSchemeCsvToS3') + + await Promise.all([exportPublishSchemeCsvToS3(), vi.runAllTimersAsync()]) + + expect(logger.warn).toHaveBeenCalledWith('No published concept schemes found to export.') + expect(downloadConcepts).not.toHaveBeenCalled() + const S3ClientMock = vi.mocked(S3Client) + if (S3ClientMock.mock.results[0]) { + const s3SendMock = S3ClientMock.mock.results[0].value.send + expect(s3SendMock).not.toHaveBeenCalled() + } + }) + + test('should throw an error if versionName cannot be determined', async () => { + vi.mocked(getVersionMetadata).mockResolvedValue({ versionName: null }) + + const { exportPublishSchemeCsvToS3 } = await import('../exportPublishSchemeCsvToS3') + + await expect(exportPublishSchemeCsvToS3()).rejects.toThrow('Could not determine published version name.') + expect(logger.error).toHaveBeenCalledWith('Error in exportPublishSchemeCsvToS3: Could not determine published version name.') + }) + + test('should throw an error if getVersionMetadata rejects', async () => { + vi.mocked(getVersionMetadata).mockRejectedValue(new Error('API failure')) + + const { exportPublishSchemeCsvToS3 } = await import('../exportPublishSchemeCsvToS3') + + await expect(exportPublishSchemeCsvToS3()).rejects.toThrow('API failure') + expect(logger.error).toHaveBeenCalledWith('Error in exportPublishSchemeCsvToS3: API failure') + }) + + test('should continue processing other schemes if one fails', async () => { + vi.mocked(downloadConcepts).mockImplementation(async ({ conceptScheme }) => { + if (conceptScheme === 'SCHEME1') throw new Error('Download failed') + if (conceptScheme === 'SCHEME2') return 'csv,data,for,scheme2' + + return '' + }) + + const { exportPublishSchemeCsvToS3 } = await import('../exportPublishSchemeCsvToS3') + + await Promise.all([exportPublishSchemeCsvToS3(), vi.runAllTimersAsync()]) + + expect(logger.error).toHaveBeenCalledWith('Failed to process scheme SCHEME1: Download failed') + // Check that the second scheme was still processed successfully + expect(downloadConcepts).toHaveBeenCalledTimes(2) + + const S3ClientMock = vi.mocked(S3Client) + const s3SendMock = S3ClientMock.mock.results[0].value.send + expect(s3SendMock).toHaveBeenCalledTimes(1) + expect(PutObjectCommand).toHaveBeenCalledWith({ + Bucket: 'kms-rdf-backup-test', + Key: 'v22.1/SCHEME2.csv', + Body: 'csv,data,for,scheme2', + ContentType: 'text/csv' + }) + + expect(logger.info).toHaveBeenCalledWith('Finished exporting all published scheme CSVs to S3.') + }) + + test('should use "dev" environment if NODE_ENV is not set', async () => { + const { exportPublishSchemeCsvToS3 } = await import('../exportPublishSchemeCsvToS3') + delete process.env.NODE_ENV + + await Promise.all([exportPublishSchemeCsvToS3(), vi.runAllTimersAsync()]) + + expect(PutObjectCommand).toHaveBeenCalledWith(expect.objectContaining({ + Bucket: 'kms-rdf-backup-dev' + })) + }) +}) diff --git a/serverless/src/shared/__tests__/exportRdfToS3.test.js b/serverless/src/shared/__tests__/exportRdfToS3.test.js new file mode 100644 index 00000000..e73c6893 --- /dev/null +++ b/serverless/src/shared/__tests__/exportRdfToS3.test.js @@ -0,0 +1,143 @@ +import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3' +import { + afterEach, + beforeEach, + describe, + expect, + vi +} from 'vitest' + +import { ensureBucketAndLifecycleRule } from '@/shared/ensureBucketAndLifeCycleRule' +import { getVersionMetadata } from '@/shared/getVersionMetadata' +import { sparqlRequest } from '@/shared/sparqlRequest' + +import { exportRdfToS3 } from '../exportRdfToS3' + +// Mock dependencies +vi.mock('@/shared/sparqlRequest') +vi.mock('@aws-sdk/client-s3') +vi.mock('@/shared/getVersionMetadata') +vi.mock('@/shared/ensureBucketAndLifeCycleRule') + +describe('exportRdfToS3', () => { + beforeEach(() => { + vi.resetAllMocks() + vi.useFakeTimers() + vi.spyOn(console, 'error').mockImplementation(() => {}) + vi.spyOn(console, 'log').mockImplementation(() => {}) + + process.env.NODE_ENV = 'test' + + getVersionMetadata.mockReturnValue({ versionName: '21.4' }) + + sparqlRequest.mockResolvedValue({ + ok: true, + text: vi.fn().mockResolvedValue('Test RDF Data') + }) + + S3Client.prototype.send = vi.fn().mockResolvedValue({}) + ensureBucketAndLifecycleRule.mockResolvedValue() + }) + + afterEach(() => { + vi.useRealTimers() + }) + + describe('when export process runs', () => { + test('should successfully export RDF data to S3 for published version', async () => { + const result = await exportRdfToS3({ version: 'published' }) + + expect(result.s3Key).toBe('21.4/rdf.xml') + + expect(ensureBucketAndLifecycleRule).toHaveBeenCalledWith(expect.any(S3Client), 'kms-rdf-backup-test', 30, 'draft/') + + expect(sparqlRequest).toHaveBeenCalledWith({ + method: 'GET', + path: '/statements', + accept: 'application/rdf+xml', + version: 'published' + }) + + expect(S3Client.prototype.send).toHaveBeenCalledWith(expect.any(PutObjectCommand)) + expect(PutObjectCommand).toHaveBeenCalledWith(expect.objectContaining({ + Bucket: 'kms-rdf-backup-test', + Key: '21.4/rdf.xml', + Body: 'Test RDF Data', + ContentType: 'application/rdf+xml' + })) + + expect(console.log).toHaveBeenCalledWith('RDF data for version published exported successfully to 21.4/rdf.xml') + }) + + test('should use date-based S3 key for non-published versions', async () => { + vi.setSystemTime(new Date('2023-06-01T12:00:00Z')) + + const result = await exportRdfToS3({ version: 'draft' }) + + expect(result.s3Key).toBe('draft/2023/06/01/rdf.xml') + + expect(PutObjectCommand).toHaveBeenCalledWith(expect.objectContaining({ + Bucket: 'kms-rdf-backup-test', + Key: 'draft/2023/06/01/rdf.xml', + Body: expect.any(String), + ContentType: 'application/rdf+xml' + })) + }) + + test('should handle S3 upload failure', async () => { + S3Client.prototype.send.mockRejectedValueOnce(new Error('S3 upload failed')) + + await expect(exportRdfToS3({ version: 'published' })).rejects.toThrow('S3 upload failed') + }) + + test('should handle sparqlRequest HTTP error', async () => { + sparqlRequest.mockResolvedValue({ + ok: false, + status: 503 + }) + + await expect(exportRdfToS3({ version: 'published' })).rejects.toThrow('HTTP error! status: 503') + expect(console.log).toHaveBeenCalledWith('error fetching rdfxml for ', 'published') + }) + + test('should handle getVersionMetadata failure', async () => { + getVersionMetadata.mockRejectedValue(new Error('Metadata fetch failed')) + + await expect(exportRdfToS3({ version: 'published' })).rejects.toThrow('Metadata fetch failed') + }) + + test('should handle ensureBucketAndLifecycleRule failure', async () => { + ensureBucketAndLifecycleRule.mockRejectedValue(new Error('Bucket setup failed')) + + await expect(exportRdfToS3({ version: 'published' })).rejects.toThrow('Bucket setup failed') + }) + }) + + describe('Configuration and settings', () => { + test('should set correct ContentType for S3 upload', async () => { + await exportRdfToS3({ version: 'published' }) + + expect(PutObjectCommand).toHaveBeenCalledWith(expect.objectContaining({ + ContentType: 'application/rdf+xml' + })) + }) + + test('should use correct bucket name based on NODE_ENV', async () => { + process.env.NODE_ENV = 'staging' + await exportRdfToS3({ version: 'published' }) + + expect(PutObjectCommand).toHaveBeenCalledWith(expect.objectContaining({ + Bucket: 'kms-rdf-backup-staging' + })) + }) + + test('should use dev as default bucket name if NODE_ENV is not set', async () => { + delete process.env.NODE_ENV + await exportRdfToS3({ version: 'published' }) + + expect(PutObjectCommand).toHaveBeenCalledWith(expect.objectContaining({ + Bucket: 'kms-rdf-backup-dev' + })) + }) + }) +}) diff --git a/serverless/src/shared/__tests__/publishKeywordEvent.test.js b/serverless/src/shared/__tests__/publishKeywordEvent.test.js index 18745f5c..05f1c905 100644 --- a/serverless/src/shared/__tests__/publishKeywordEvent.test.js +++ b/serverless/src/shared/__tests__/publishKeywordEvent.test.js @@ -64,7 +64,8 @@ describe('when the keyword event publisher is used', () => { credentials: { accessKeyId: 'test', secretAccessKey: 'test' - } + }, + forcePathStyle: true }) }) @@ -80,7 +81,8 @@ describe('when the keyword event publisher is used', () => { credentials: { accessKeyId: 'test', secretAccessKey: 'test' - } + }, + forcePathStyle: true }) }) }) diff --git a/serverless/src/shared/awsClients.js b/serverless/src/shared/awsClients.js new file mode 100644 index 00000000..b3f764f7 --- /dev/null +++ b/serverless/src/shared/awsClients.js @@ -0,0 +1,50 @@ +import { EventBridgeClient } from '@aws-sdk/client-eventbridge' +import { S3Client } from '@aws-sdk/client-s3' +import { SNSClient } from '@aws-sdk/client-sns' + +const getClientConfig = () => { + const endpoint = process.env.AWS_ENDPOINT_URL + const config = endpoint + ? { + endpoint, + region: process.env.AWS_REGION || 'us-east-1', + credentials: { + accessKeyId: process.env.AWS_ACCESS_KEY_ID || 'test', + secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || 'test' + } + } + : {} + + if (endpoint) { + config.forcePathStyle = true + } + + return config +} + +let s3Client +export const getS3Client = () => { + if (!s3Client) { + s3Client = new S3Client(getClientConfig()) + } + + return s3Client +} + +let eventBridgeClient +export const getEventBridgeClient = () => { + if (!eventBridgeClient) { + eventBridgeClient = new EventBridgeClient(getClientConfig()) + } + + return eventBridgeClient +} + +let snsClient +export const getSnsClient = () => { + if (!snsClient) { + snsClient = new SNSClient(getClientConfig()) + } + + return snsClient +} diff --git a/serverless/src/shared/exportPublishSchemeCsvToS3.js b/serverless/src/shared/exportPublishSchemeCsvToS3.js new file mode 100644 index 00000000..186700de --- /dev/null +++ b/serverless/src/shared/exportPublishSchemeCsvToS3.js @@ -0,0 +1,78 @@ +import { PutObjectCommand } from '@aws-sdk/client-s3' + +import { getS3Client } from '@/shared/awsClients' +import { downloadConcepts } from '@/shared/downloadConcepts' +import { getConceptSchemeDetails } from '@/shared/getConceptSchemeDetails' +import { getVersionMetadata } from '@/shared/getVersionMetadata' +import { logger } from '@/shared/logger' + +const s3 = getS3Client() + +const delay = (ms) => new Promise((resolve) => { setTimeout(resolve, ms) }) + +/** + * For all published schemes, downloads the concepts as CSV and uploads them to an S3 bucket. + * The bucket is determined by the environment, and the key includes the published version name. + * @returns {Promise} + */ +export const exportPublishSchemeCsvToS3 = async () => { + const s3ExportDelayMs = parseInt(process.env.S3_EXPORT_DELAY_MS || '100', 10) + const env = process.env.NODE_ENV || 'dev' + const bucketName = `kms-rdf-backup-${env}` + + try { + const { versionName } = await getVersionMetadata('published') + if (!versionName) { + throw new Error('Could not determine published version name.') + } + + logger.info(`Exporting published CSVs for version: ${versionName}`) + + const schemes = await getConceptSchemeDetails({ version: 'published' }) + if (!schemes || schemes.length === 0) { + logger.warn('No published concept schemes found to export.') + + return + } + + logger.info(`Found ${schemes.length} published schemes to export.`) + + await schemes.reduce((previousPromise, scheme, index) => previousPromise.then(async () => { + const { notation } = scheme + try { + logger.info(`Downloading CSV for scheme: ${notation}`) + const csvData = await downloadConcepts({ + conceptScheme: notation, + format: 'csv', + version: 'published' + }) + + const s3Key = `${versionName}/${notation}.csv` + + logger.info(`Uploading ${notation}.csv to s3://${bucketName}/${s3Key}`) + + await s3.send(new PutObjectCommand({ + Bucket: bucketName, + Key: s3Key, + Body: csvData, + ContentType: 'text/csv' + })) + + logger.info(`Successfully uploaded ${notation}.csv to S3.`) + + if (s3ExportDelayMs > 0 && index < schemes.length - 1) { + await delay(s3ExportDelayMs) + } + } catch (error) { + logger.error(`Failed to process scheme ${notation}: ${error.message}`) + } + }), Promise.resolve()) + + logger.info('Finished exporting all published scheme CSVs to S3.') + } catch (error) { + logger.error(`Error in exportPublishSchemeCsvToS3: ${error.message}`) + throw error + } +} + +export default exportPublishSchemeCsvToS3 diff --git a/serverless/src/shared/exportRdfToS3.js b/serverless/src/shared/exportRdfToS3.js new file mode 100644 index 00000000..476bfe15 --- /dev/null +++ b/serverless/src/shared/exportRdfToS3.js @@ -0,0 +1,78 @@ +import { PutObjectCommand } from '@aws-sdk/client-s3' + +import { getS3Client } from '@/shared/awsClients' +import { ensureBucketAndLifecycleRule } from '@/shared/ensureBucketAndLifeCycleRule' +import { getVersionMetadata } from '@/shared/getVersionMetadata' +import { sparqlRequest } from '@/shared/sparqlRequest' + +/** + * Exports RDF data to an S3 bucket. + * + * This function exports RDF data to an S3 bucket, with different behaviors for 'published' and 'draft' versions. + * + * For 'published' version: + * - Creates a file at `{versionName}/rdf.xml` + * - Overwrites existing file if present + * + * For 'draft' version: + * - Creates a new file with a date-based path: `draft/{year}/{month}/{day}/rdf.xml` + * - A new file is created each time, preserving historical versions + * + * The function will create the S3 bucket if it doesn't exist using the stage name + * as part of the bucket name, e.g., kms-rdf-backup-${stage} + * + * @async + * @function exportRdfToS3 + * @param {Object} params - The parameters for the export. + * @param {string} params.version - The version of the RDF data to export (e.g., 'published', 'draft'). + * @returns {Promise<{s3Key: string}>} A promise that resolves to an object containing the S3 key of the exported file. + * @throws Will throw an error if the RDF data fetch fails or if there are issues with S3 operations. + */ +export const exportRdfToS3 = async ({ version }) => { + const env = process.env.NODE_ENV || 'dev' + const s3BucketName = `kms-rdf-backup-${env}` + const s3Client = getS3Client() + + // Ensure bucket exists and lifecycle rule is set + await ensureBucketAndLifecycleRule(s3Client, s3BucketName, 30, 'draft/') // Expire after 30 days + + // Fetch RDF data from the repository using sparqlRequest + const response = await sparqlRequest({ + method: 'GET', + path: '/statements', + accept: 'application/rdf+xml', + version + }) + + if (!response.ok) { + console.log('error fetching rdfxml for ', version) + throw new Error(`HTTP error! status: ${response.status}`) + } + + const rdfData = await response.text() + + // Generate the S3 key based on the current date and version + let s3Key + if (version === 'published') { + const { versionName } = await getVersionMetadata(version) + s3Key = `${versionName}/rdf.xml` + } else { + const currentDate = new Date() + const year = currentDate.getUTCFullYear() + const month = String(currentDate.getUTCMonth() + 1).padStart(2, '0') + const day = String(currentDate.getUTCDate()).padStart(2, '0') + s3Key = `${version}/${year}/${month}/${day}/rdf.xml` + } + + // Upload RDF data to S3 + await s3Client.send(new PutObjectCommand({ + Bucket: s3BucketName, + Key: s3Key, + Body: rdfData, + ContentType: 'application/rdf+xml' + })) + + console.log(`RDF data for version ${version} exported successfully to ${s3Key}`) + + return { s3Key } +} diff --git a/serverless/src/shared/publishKeywordEvent.js b/serverless/src/shared/publishKeywordEvent.js index 3338c57a..69cb5781 100644 --- a/serverless/src/shared/publishKeywordEvent.js +++ b/serverless/src/shared/publishKeywordEvent.js @@ -1,27 +1,8 @@ -import { PublishCommand, SNSClient } from '@aws-sdk/client-sns' +import { PublishCommand } from '@aws-sdk/client-sns' -/** - * Creates an SNS client for either real AWS or a LocalStack endpoint override. - * - * @returns {SNSClient} Configured SNS client instance. - */ -const createSnsClient = () => { - const endpoint = process.env.AWS_ENDPOINT_URL - const config = endpoint - ? { - endpoint, - region: process.env.AWS_REGION || 'us-east-1', - credentials: { - accessKeyId: process.env.AWS_ACCESS_KEY_ID || 'test', - secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || 'test' - } - } - : {} - - return new SNSClient(config) -} +import { getSnsClient } from '@/shared/awsClients' -const snsClient = createSnsClient() +const snsClient = getSnsClient() /** * Publishes a keyword event payload to the configured SNS topic. From 295941854106c2099e8c8cfd3868e70c20ccf82e Mon Sep 17 00:00:00 2001 From: Hoan-Vu Tran-Ho Date: Wed, 22 Apr 2026 11:45:04 -0400 Subject: [PATCH 05/13] KMS-663: Fix S3 bucket name --- .../exportPublishSchemeCsvToS3.test.js | 18 +++++++++-------- .../shared/__tests__/exportRdfToS3.test.js | 20 ++++++++----------- .../src/shared/exportPublishSchemeCsvToS3.js | 3 ++- serverless/src/shared/exportRdfToS3.js | 5 +++-- 4 files changed, 23 insertions(+), 23 deletions(-) diff --git a/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js b/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js index 002000f4..5cbd793e 100644 --- a/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js +++ b/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js @@ -10,6 +10,7 @@ import { import { downloadConcepts } from '@/shared/downloadConcepts' import { getConceptSchemeDetails } from '@/shared/getConceptSchemeDetails' +import { getApplicationConfig } from '@/shared/getConfig' import { getVersionMetadata } from '@/shared/getVersionMetadata' import { logger } from '@/shared/logger' @@ -19,6 +20,7 @@ vi.mock('@/shared/downloadConcepts') vi.mock('@/shared/getConceptSchemeDetails') vi.mock('@/shared/getVersionMetadata') vi.mock('@/shared/logger') +vi.mock('@/shared/getConfig') describe('exportPublishSchemeCsvToS3', () => { beforeEach(() => { @@ -26,7 +28,6 @@ describe('exportPublishSchemeCsvToS3', () => { vi.useFakeTimers() vi.resetModules() vi.resetAllMocks() - process.env.NODE_ENV = 'test' // Mock S3Client's send method globally S3Client.prototype.send = vi.fn().mockResolvedValue({}) @@ -37,6 +38,7 @@ describe('exportPublishSchemeCsvToS3', () => { vi.spyOn(logger, 'error').mockImplementation(() => {}) // Default successful mocks for dependencies + getApplicationConfig.mockReturnValue({ env: 'sit' }) getVersionMetadata.mockResolvedValue({ versionName: 'v22.1' }) getConceptSchemeDetails.mockResolvedValue([ { notation: 'SCHEME1' }, @@ -53,7 +55,6 @@ describe('exportPublishSchemeCsvToS3', () => { }) afterEach(() => { - delete process.env.NODE_ENV // Restore real timers after each test vi.useRealTimers() }) @@ -86,14 +87,14 @@ describe('exportPublishSchemeCsvToS3', () => { const s3SendMock = S3ClientMock.mock.results[0].value.send expect(s3SendMock).toHaveBeenCalledTimes(2) expect(PutObjectCommand).toHaveBeenCalledWith({ - Bucket: 'kms-rdf-backup-test', + Bucket: 'kms-rdf-backup-sit', Key: 'v22.1/SCHEME1.csv', Body: 'csv,data,for,scheme1', ContentType: 'text/csv' }) expect(PutObjectCommand).toHaveBeenCalledWith({ - Bucket: 'kms-rdf-backup-test', + Bucket: 'kms-rdf-backup-sit', Key: 'v22.1/SCHEME2.csv', Body: 'csv,data,for,scheme2', ContentType: 'text/csv' @@ -101,7 +102,7 @@ describe('exportPublishSchemeCsvToS3', () => { // Verify logging expect(logger.info).toHaveBeenCalledWith('Exporting published CSVs for version: v22.1') - expect(logger.info).toHaveBeenCalledWith('Uploading SCHEME1.csv to s3://kms-rdf-backup-test/v22.1/SCHEME1.csv') + expect(logger.info).toHaveBeenCalledWith('Uploading SCHEME1.csv to s3://kms-rdf-backup-sit/v22.1/SCHEME1.csv') expect(logger.info).toHaveBeenCalledWith('Finished exporting all published scheme CSVs to S3.') }) @@ -159,7 +160,7 @@ describe('exportPublishSchemeCsvToS3', () => { const s3SendMock = S3ClientMock.mock.results[0].value.send expect(s3SendMock).toHaveBeenCalledTimes(1) expect(PutObjectCommand).toHaveBeenCalledWith({ - Bucket: 'kms-rdf-backup-test', + Bucket: 'kms-rdf-backup-sit', Key: 'v22.1/SCHEME2.csv', Body: 'csv,data,for,scheme2', ContentType: 'text/csv' @@ -168,9 +169,10 @@ describe('exportPublishSchemeCsvToS3', () => { expect(logger.info).toHaveBeenCalledWith('Finished exporting all published scheme CSVs to S3.') }) - test('should use "dev" environment if NODE_ENV is not set', async () => { + test('should construct bucket name based on environment from application config', async () => { + vi.mocked(getApplicationConfig).mockReturnValue({ env: 'dev' }) + const { exportPublishSchemeCsvToS3 } = await import('../exportPublishSchemeCsvToS3') - delete process.env.NODE_ENV await Promise.all([exportPublishSchemeCsvToS3(), vi.runAllTimersAsync()]) diff --git a/serverless/src/shared/__tests__/exportRdfToS3.test.js b/serverless/src/shared/__tests__/exportRdfToS3.test.js index e73c6893..676a2b87 100644 --- a/serverless/src/shared/__tests__/exportRdfToS3.test.js +++ b/serverless/src/shared/__tests__/exportRdfToS3.test.js @@ -8,6 +8,7 @@ import { } from 'vitest' import { ensureBucketAndLifecycleRule } from '@/shared/ensureBucketAndLifeCycleRule' +import { getApplicationConfig } from '@/shared/getConfig' import { getVersionMetadata } from '@/shared/getVersionMetadata' import { sparqlRequest } from '@/shared/sparqlRequest' @@ -18,6 +19,7 @@ vi.mock('@/shared/sparqlRequest') vi.mock('@aws-sdk/client-s3') vi.mock('@/shared/getVersionMetadata') vi.mock('@/shared/ensureBucketAndLifeCycleRule') +vi.mock('@/shared/getConfig') describe('exportRdfToS3', () => { beforeEach(() => { @@ -26,7 +28,10 @@ describe('exportRdfToS3', () => { vi.spyOn(console, 'error').mockImplementation(() => {}) vi.spyOn(console, 'log').mockImplementation(() => {}) - process.env.NODE_ENV = 'test' + getApplicationConfig.mockReturnValue({ + env: 'test', + defaultResponseHeaders: { 'Content-Type': 'application/json' } + }) getVersionMetadata.mockReturnValue({ versionName: '21.4' }) @@ -122,17 +127,8 @@ describe('exportRdfToS3', () => { })) }) - test('should use correct bucket name based on NODE_ENV', async () => { - process.env.NODE_ENV = 'staging' - await exportRdfToS3({ version: 'published' }) - - expect(PutObjectCommand).toHaveBeenCalledWith(expect.objectContaining({ - Bucket: 'kms-rdf-backup-staging' - })) - }) - - test('should use dev as default bucket name if NODE_ENV is not set', async () => { - delete process.env.NODE_ENV + test('should construct bucket name based on environment from application config', async () => { + vi.mocked(getApplicationConfig).mockReturnValue({ env: 'dev' }) await exportRdfToS3({ version: 'published' }) expect(PutObjectCommand).toHaveBeenCalledWith(expect.objectContaining({ diff --git a/serverless/src/shared/exportPublishSchemeCsvToS3.js b/serverless/src/shared/exportPublishSchemeCsvToS3.js index 186700de..2613fa8d 100644 --- a/serverless/src/shared/exportPublishSchemeCsvToS3.js +++ b/serverless/src/shared/exportPublishSchemeCsvToS3.js @@ -3,6 +3,7 @@ import { PutObjectCommand } from '@aws-sdk/client-s3' import { getS3Client } from '@/shared/awsClients' import { downloadConcepts } from '@/shared/downloadConcepts' import { getConceptSchemeDetails } from '@/shared/getConceptSchemeDetails' +import { getApplicationConfig } from '@/shared/getConfig' import { getVersionMetadata } from '@/shared/getVersionMetadata' import { logger } from '@/shared/logger' @@ -17,7 +18,7 @@ const delay = (ms) => new Promise((resolve) => { setTimeout(resolve, ms) }) */ export const exportPublishSchemeCsvToS3 = async () => { const s3ExportDelayMs = parseInt(process.env.S3_EXPORT_DELAY_MS || '100', 10) - const env = process.env.NODE_ENV || 'dev' + const { env } = getApplicationConfig() const bucketName = `kms-rdf-backup-${env}` try { diff --git a/serverless/src/shared/exportRdfToS3.js b/serverless/src/shared/exportRdfToS3.js index 476bfe15..015f711e 100644 --- a/serverless/src/shared/exportRdfToS3.js +++ b/serverless/src/shared/exportRdfToS3.js @@ -2,6 +2,7 @@ import { PutObjectCommand } from '@aws-sdk/client-s3' import { getS3Client } from '@/shared/awsClients' import { ensureBucketAndLifecycleRule } from '@/shared/ensureBucketAndLifeCycleRule' +import { getApplicationConfig } from '@/shared/getConfig' import { getVersionMetadata } from '@/shared/getVersionMetadata' import { sparqlRequest } from '@/shared/sparqlRequest' @@ -19,7 +20,7 @@ import { sparqlRequest } from '@/shared/sparqlRequest' * - A new file is created each time, preserving historical versions * * The function will create the S3 bucket if it doesn't exist using the stage name - * as part of the bucket name, e.g., kms-rdf-backup-${stage} + * as part of the bucket name, e.g., kms-rdf-backup-${env} * * @async * @function exportRdfToS3 @@ -29,7 +30,7 @@ import { sparqlRequest } from '@/shared/sparqlRequest' * @throws Will throw an error if the RDF data fetch fails or if there are issues with S3 operations. */ export const exportRdfToS3 = async ({ version }) => { - const env = process.env.NODE_ENV || 'dev' + const { env } = getApplicationConfig() const s3BucketName = `kms-rdf-backup-${env}` const s3Client = getS3Client() From 3e3d529b6885c19d387fd4b532b5ea93ba66a0f6 Mon Sep 17 00:00:00 2001 From: Hoan-Vu Tran-Ho Date: Wed, 22 Apr 2026 17:15:56 -0400 Subject: [PATCH 06/13] KMS-663: Default to process all versions --- archive-processor/scripts/scripts-config.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/archive-processor/scripts/scripts-config.sh b/archive-processor/scripts/scripts-config.sh index 73c714cb..b38a7308 100644 --- a/archive-processor/scripts/scripts-config.sh +++ b/archive-processor/scripts/scripts-config.sh @@ -41,7 +41,7 @@ export DOWNLOAD_DELAY_MS="100" # Optional: Comma-separated list of specific versions to download. # If this string is empty, the script will download all RDF files. # Example: export TO_BE_DOWNLOADED_VERSIONS="10.0,11.0,KMS-654-Testing" -export TO_BE_DOWNLOADED_VERSIONS="7.0,8.0" +export TO_BE_DOWNLOADED_VERSIONS="" # ============================================================================= @@ -51,7 +51,7 @@ export TO_BE_DOWNLOADED_VERSIONS="7.0,8.0" # Optional: Comma-separated list of specific versions to process from the # 'downloaded-rdf' directory. If empty, all found RDF files will be processed. # Example: export TO_BE_PROCESSED_VERSIONS="10.0,KMS-123" -export TO_BE_PROCESSED_VERSIONS="7.0,8.0" +export TO_BE_PROCESSED_VERSIONS="" # Delay in milliseconds to wait between downloading each concept scheme CSV file. export PROCESS_CSV_DOWNLOAD_DELAY_MS="100" @@ -70,4 +70,4 @@ export UPLOAD_DELAY_MS="100" # Optional: Comma-separated list of specific versions to upload. # If this string is empty, the script will upload all CSV files from all version folders. # Example: export TO_BE_UPLOADED_VERSIONS="10.0,11.0" -export TO_BE_UPLOADED_VERSIONS="7.0,8.0" \ No newline at end of file +export TO_BE_UPLOADED_VERSIONS="" \ No newline at end of file From 2bfe49b94f50204a816a6ea17e479ad3dced4170 Mon Sep 17 00:00:00 2001 From: Hoan-Vu Tran-Ho Date: Wed, 22 Apr 2026 17:19:28 -0400 Subject: [PATCH 07/13] KMS-663: Exclude data in archive-processor --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 9e4cf43b..8048c4be 100644 --- a/.gitignore +++ b/.gitignore @@ -45,3 +45,5 @@ loadtest/locust/ scripts/load/hammer_endpoints_sequential.js data/scheme-size* notes/ +archive-processor/downloaded-rdf +archive-processor/local-kms-csv From 9cff289cca9a3889d8680770aaf4ad336138f9a6 Mon Sep 17 00:00:00 2001 From: Hoan-Vu Tran-Ho Date: Wed, 22 Apr 2026 17:24:30 -0400 Subject: [PATCH 08/13] KMS-663: Add comment regards lambda timeout warning --- serverless/src/publisher/handler.js | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/serverless/src/publisher/handler.js b/serverless/src/publisher/handler.js index 82ba7783..1a6bf70a 100644 --- a/serverless/src/publisher/handler.js +++ b/serverless/src/publisher/handler.js @@ -558,6 +558,18 @@ export const publisher = async (event) => { logger.info('[publisher] No keyword events generated, skipping SNS publish') } + // ######################################################################### + // ## IMPORTANT: ARCHIVAL EXPORT TIMEOUT CONSIDERATIONS + // ## + // ## The following S3 export operations are part of the critical path for + // ## publish completion. This work MUST stay comfortably under the Lambda + // ## function timeout. + // ## + // ## If S3 exports start getting close to the timeout, we should: + // ## 1. Move this work to a separate, asynchronous Lambda function. + // ## 2. Reconsider emitting the cache-prime event *before* these exports + // ## to ensure downstream processes are not blocked. + // ######################################################################### // Export RDF and CSV data to S3 after publishing logger.info('[publisher] Starting S3 exports of RDF and CSV data.') From d60bb524d0e7287a4b3a95c006096e1f80e66cb2 Mon Sep 17 00:00:00 2001 From: Hoan-Vu Tran-Ho Date: Wed, 22 Apr 2026 18:03:52 -0400 Subject: [PATCH 09/13] KMS-663: Throw error in case scheme export fails --- .../exportPublishSchemeCsvToS3.test.js | 50 +++++++++++++++++-- .../src/shared/exportPublishSchemeCsvToS3.js | 10 ++++ 2 files changed, 57 insertions(+), 3 deletions(-) diff --git a/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js b/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js index 5cbd793e..8e8c061d 100644 --- a/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js +++ b/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js @@ -140,7 +140,7 @@ describe('exportPublishSchemeCsvToS3', () => { expect(logger.error).toHaveBeenCalledWith('Error in exportPublishSchemeCsvToS3: API failure') }) - test('should continue processing other schemes if one fails', async () => { + test('should throw an error if one scheme fails to export', async () => { vi.mocked(downloadConcepts).mockImplementation(async ({ conceptScheme }) => { if (conceptScheme === 'SCHEME1') throw new Error('Download failed') if (conceptScheme === 'SCHEME2') return 'csv,data,for,scheme2' @@ -150,7 +150,8 @@ describe('exportPublishSchemeCsvToS3', () => { const { exportPublishSchemeCsvToS3 } = await import('../exportPublishSchemeCsvToS3') - await Promise.all([exportPublishSchemeCsvToS3(), vi.runAllTimersAsync()]) + await expect(Promise.all([exportPublishSchemeCsvToS3(), vi.runAllTimersAsync()])) + .rejects.toThrow('Failed to export CSV for schemes: SCHEME1') expect(logger.error).toHaveBeenCalledWith('Failed to process scheme SCHEME1: Download failed') // Check that the second scheme was still processed successfully @@ -166,7 +167,7 @@ describe('exportPublishSchemeCsvToS3', () => { ContentType: 'text/csv' }) - expect(logger.info).toHaveBeenCalledWith('Finished exporting all published scheme CSVs to S3.') + expect(logger.info).not.toHaveBeenCalledWith('Finished exporting all published scheme CSVs to S3.') }) test('should construct bucket name based on environment from application config', async () => { @@ -180,4 +181,47 @@ describe('exportPublishSchemeCsvToS3', () => { Bucket: 'kms-rdf-backup-dev' })) }) + + test('should report all failed schemes in the final error', async () => { + // Add a third scheme for this test + getConceptSchemeDetails.mockResolvedValue([ + { notation: 'SCHEME1' }, + { notation: 'SCHEME2' }, + { notation: 'SCHEME3' } + ]) + + vi.mocked(downloadConcepts).mockImplementation(async ({ conceptScheme }) => { + if (conceptScheme === 'SCHEME1') throw new Error('Download failed') + if (conceptScheme === 'SCHEME2') return 'csv,data,for,scheme2' + if (conceptScheme === 'SCHEME3') throw new Error('Another failure') + + return '' + }) + + const { exportPublishSchemeCsvToS3 } = await import('../exportPublishSchemeCsvToS3') + + await expect(Promise.all([exportPublishSchemeCsvToS3(), vi.runAllTimersAsync()])) + .rejects.toThrow('Failed to export CSV for schemes: SCHEME1, SCHEME3') + + // Verify individual errors were logged + expect(logger.error).toHaveBeenCalledWith('Failed to process scheme SCHEME1: Download failed') + expect(logger.error).toHaveBeenCalledWith('Failed to process scheme SCHEME3: Another failure') + + // Check that all schemes were attempted + expect(downloadConcepts).toHaveBeenCalledTimes(3) + + // Check that only the successful scheme was uploaded + const S3ClientMock = vi.mocked(S3Client) + const s3SendMock = S3ClientMock.mock.results[0].value.send + expect(s3SendMock).toHaveBeenCalledTimes(1) + expect(PutObjectCommand).toHaveBeenCalledWith({ + Bucket: 'kms-rdf-backup-sit', + Key: 'v22.1/SCHEME2.csv', + Body: 'csv,data,for,scheme2', + ContentType: 'text/csv' + }) + + // Verify it does NOT log the final success message + expect(logger.info).not.toHaveBeenCalledWith('Finished exporting all published scheme CSVs to S3.') + }) }) diff --git a/serverless/src/shared/exportPublishSchemeCsvToS3.js b/serverless/src/shared/exportPublishSchemeCsvToS3.js index 2613fa8d..69a75ae1 100644 --- a/serverless/src/shared/exportPublishSchemeCsvToS3.js +++ b/serverless/src/shared/exportPublishSchemeCsvToS3.js @@ -38,6 +38,8 @@ export const exportPublishSchemeCsvToS3 = async () => { logger.info(`Found ${schemes.length} published schemes to export.`) + const failedSchemes = [] + await schemes.reduce((previousPromise, scheme, index) => previousPromise.then(async () => { const { notation } = scheme try { @@ -66,9 +68,17 @@ export const exportPublishSchemeCsvToS3 = async () => { } } catch (error) { logger.error(`Failed to process scheme ${notation}: ${error.message}`) + failedSchemes.push({ + notation, + error + }) } }), Promise.resolve()) + if (failedSchemes.length > 0) { + throw new Error(`Failed to export CSV for schemes: ${failedSchemes.map(({ notation }) => notation).join(', ')}`) + } + logger.info('Finished exporting all published scheme CSVs to S3.') } catch (error) { logger.error(`Error in exportPublishSchemeCsvToS3: ${error.message}`) From 3657a1350b0432a9a9e57dad613e87445f825f91 Mon Sep 17 00:00:00 2001 From: Hoan-Vu Tran-Ho Date: Thu, 23 Apr 2026 11:44:59 -0400 Subject: [PATCH 10/13] KMS-663: Add local stack switch --- .../scripts/download-rdf-from-S3.js | 31 ++++++++++--------- archive-processor/scripts/scripts-config.sh | 20 ++++++++++++ archive-processor/scripts/upload-csv-to-S3.js | 31 ++++++++++++------- serverless/src/shared/awsClients.js | 4 +-- 4 files changed, 59 insertions(+), 27 deletions(-) diff --git a/archive-processor/scripts/download-rdf-from-S3.js b/archive-processor/scripts/download-rdf-from-S3.js index d8fef69a..05ed143f 100644 --- a/archive-processor/scripts/download-rdf-from-S3.js +++ b/archive-processor/scripts/download-rdf-from-S3.js @@ -3,11 +3,9 @@ import { dirname, join } from 'path' import { pipeline } from 'stream/promises' import { fileURLToPath } from 'url' -import { - GetObjectCommand, - ListObjectsV2Command, - S3Client -} from '@aws-sdk/client-s3' +import { GetObjectCommand, ListObjectsV2Command } from '@aws-sdk/client-s3' + +import { getS3Client } from '@/shared/awsClients' const scriptPath = fileURLToPath(import.meta.url) const scriptDir = dirname(scriptPath) @@ -53,13 +51,10 @@ const downloadDelayMs = parseInt(process.env.DOWNLOAD_DELAY_MS || '100', 10) const toBeDownloadedVersions = process.env.TO_BE_DOWNLOADED_VERSIONS || '' /** - * S3 Client configured for the specified region + * S3 Client from shared configuration * @type {S3Client} */ -const s3Client = new S3Client({ - region, - ...(awsProfile && { credentials: undefined }) -}) +const s3Client = getS3Client() /** * Extracts the version name from an S3 object key @@ -189,7 +184,7 @@ const listS3Objects = async () => { /** * Downloads all RDF files from S3 * - * @returns {Promise} + * @returns {Promise} The number of failed downloads. */ const downloadAllRdfFiles = async () => { // List all objects from S3 @@ -269,7 +264,9 @@ const main = async () => { console.log(`Output: ${outputDir}`) console.log(`Delay between downloads: ${downloadDelayMs}ms`) - if (awsProfile) { + if (process.env.AWS_ENDPOINT_URL) { + console.log(`Endpoint: ${process.env.AWS_ENDPOINT_URL}`) + } else if (awsProfile) { console.log(`Profile: ${awsProfile}`) } @@ -280,8 +277,14 @@ const main = async () => { } try { - await downloadAllRdfFiles() - console.log('\n✓ Download completed successfully!') + const failedCount = await downloadAllRdfFiles() + + if (failedCount > 0) { + console.error(`\n✗ Download process concluded with ${failedCount} failure(s).`) + process.exit(1) + } else { + console.log('\n✓ Download completed successfully!') + } } catch (error) { console.error('\n✗ Failed to download RDF files:', error.message) diff --git a/archive-processor/scripts/scripts-config.sh b/archive-processor/scripts/scripts-config.sh index b38a7308..4dba5bab 100644 --- a/archive-processor/scripts/scripts-config.sh +++ b/archive-processor/scripts/scripts-config.sh @@ -31,6 +31,26 @@ export AWS_REGION="us-east-1" +# ============================================================================= +# LocalStack Settings (Optional) +# ============================================================================= + +# Set to "true" to use LocalStack instead of AWS. +# When true, the scripts will target the LOCALSTACK_S3_ENDPOINT. +export USE_LOCALSTACK="true" + +# If USE_LOCALSTACK is true, set the AWS_ENDPOINT_URL to the LocalStack endpoint. +# Otherwise, leave it unset to use the default AWS endpoint. +if [ "$USE_LOCALSTACK" = "true" ]; then + export AWS_ENDPOINT_URL="http://localhost:4566" +else + unset AWS_ENDPOINT_URL +fi + + + + + # ============================================================================= # RDF Downloader Settings (for download-rdf-from-S3.js) # ============================================================================= diff --git a/archive-processor/scripts/upload-csv-to-S3.js b/archive-processor/scripts/upload-csv-to-S3.js index af9a5c58..9a895d54 100644 --- a/archive-processor/scripts/upload-csv-to-S3.js +++ b/archive-processor/scripts/upload-csv-to-S3.js @@ -7,7 +7,9 @@ import { } from 'path' import { fileURLToPath } from 'url' -import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3' +import { PutObjectCommand } from '@aws-sdk/client-s3' + +import { getS3Client } from '@/shared/awsClients' const scriptPath = fileURLToPath(import.meta.url) const scriptDir = dirname(scriptPath) @@ -52,13 +54,10 @@ const uploadDelayMs = parseInt(process.env.UPLOAD_DELAY_MS || '100', 10) const toBeUploadedVersions = process.env.TO_BE_UPLOADED_VERSIONS || '' /** - * S3 Client configured for the specified region + * S3 Client from shared configuration * @type {S3Client} */ -const s3Client = new S3Client({ - region, - ...(awsProfile && { credentials: undefined }) -}) +const s3Client = getS3Client() /** * Delays execution for the specified number of milliseconds @@ -167,7 +166,7 @@ const findCsvFiles = async (dir, versionList) => { /** * Main function to upload all CSV files * - * @returns {Promise} + * @returns {Promise} The number of failed uploads. */ const uploadAllCsvFiles = async () => { const versionList = toBeUploadedVersions.split(',').map((v) => v.trim()).filter(Boolean) @@ -178,7 +177,7 @@ const uploadAllCsvFiles = async () => { if (filesToUpload.length === 0) { console.log('No CSV files found to upload.') - return + return 0 } const totalFiles = filesToUpload.length @@ -216,6 +215,8 @@ const uploadAllCsvFiles = async () => { console.log(` - ${r.filePath}: ${r.error.message}`) }) } + + return failed } /** @@ -240,7 +241,9 @@ const main = async () => { console.log(`Input: ${inputDir}`) console.log(`Delay between uploads: ${uploadDelayMs}ms`) - if (awsProfile) { + if (process.env.AWS_ENDPOINT_URL) { + console.log(`Endpoint: ${process.env.AWS_ENDPOINT_URL}`) + } else if (awsProfile) { console.log(`Profile: ${awsProfile}`) } @@ -253,8 +256,14 @@ const main = async () => { console.log('\n') try { - await uploadAllCsvFiles() - console.log('\n✓ Upload completed successfully!') + const failedCount = await uploadAllCsvFiles() + + if (failedCount > 0) { + console.error(`\n✗ Upload process concluded with ${failedCount} failure(s).`) + process.exit(1) + } else { + console.log('\n✓ Upload completed successfully!') + } } catch (error) { console.error('\n✗ Failed to upload CSV files:', error.message) diff --git a/serverless/src/shared/awsClients.js b/serverless/src/shared/awsClients.js index b3f764f7..0db5204e 100644 --- a/serverless/src/shared/awsClients.js +++ b/serverless/src/shared/awsClients.js @@ -9,8 +9,8 @@ const getClientConfig = () => { endpoint, region: process.env.AWS_REGION || 'us-east-1', credentials: { - accessKeyId: process.env.AWS_ACCESS_KEY_ID || 'test', - secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || 'test' + accessKeyId: 'test', + secretAccessKey: 'test' } } : {} From 495513e43996dddfd1ec1db3db4f7c1cfdeea66a Mon Sep 17 00:00:00 2001 From: Hoan-Vu Tran-Ho Date: Thu, 23 Apr 2026 12:32:15 -0400 Subject: [PATCH 11/13] KMS-663: Npm audit fix --- package-lock.json | 1367 +++++++++++++++++++++++---------------------- 1 file changed, 699 insertions(+), 668 deletions(-) diff --git a/package-lock.json b/package-lock.json index b8a2b4a1..fb6e455d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -349,51 +349,51 @@ } }, "node_modules/@aws-sdk/client-cloudwatch": { - "version": "3.1030.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-cloudwatch/-/client-cloudwatch-3.1030.0.tgz", - "integrity": "sha512-LEPjGvcwAVsfZhVP0kMir9CBwRM0cFjIkSiyJ4tHPkpqIenrYTPEMwn54GfeF/k1IFMGvmCDwsOB3Ht58Oo8OA==", + "version": "3.1035.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-cloudwatch/-/client-cloudwatch-3.1035.0.tgz", + "integrity": "sha512-4fHws7ocdgck5ZYfDG3YWGnvo3F4BFOUOEuwrTBhkbTw6++jDigNZnJXXCg5QoeT6AD+kLGWEWPDOrL/l5qTTw==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "^3.973.27", - "@aws-sdk/credential-provider-node": "^3.972.30", - "@aws-sdk/middleware-host-header": "^3.972.9", - "@aws-sdk/middleware-logger": "^3.972.9", - "@aws-sdk/middleware-recursion-detection": "^3.972.10", - "@aws-sdk/middleware-user-agent": "^3.972.29", - "@aws-sdk/region-config-resolver": "^3.972.11", - "@aws-sdk/types": "^3.973.7", - "@aws-sdk/util-endpoints": "^3.996.6", - "@aws-sdk/util-user-agent-browser": "^3.972.9", - "@aws-sdk/util-user-agent-node": "^3.973.15", - "@smithy/config-resolver": "^4.4.14", - "@smithy/core": "^3.23.14", - "@smithy/fetch-http-handler": "^5.3.16", - "@smithy/hash-node": "^4.2.13", - "@smithy/invalid-dependency": "^4.2.13", - "@smithy/middleware-compression": "^4.3.43", - "@smithy/middleware-content-length": "^4.2.13", - "@smithy/middleware-endpoint": "^4.4.29", - "@smithy/middleware-retry": "^4.5.0", - "@smithy/middleware-serde": "^4.2.17", - "@smithy/middleware-stack": "^4.2.13", - "@smithy/node-config-provider": "^4.3.13", - "@smithy/node-http-handler": "^4.5.2", - "@smithy/protocol-http": "^5.3.13", - "@smithy/smithy-client": "^4.12.9", - "@smithy/types": "^4.14.0", - "@smithy/url-parser": "^4.2.13", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/credential-provider-node": "^3.972.35", + "@aws-sdk/middleware-host-header": "^3.972.10", + "@aws-sdk/middleware-logger": "^3.972.10", + "@aws-sdk/middleware-recursion-detection": "^3.972.11", + "@aws-sdk/middleware-user-agent": "^3.972.34", + "@aws-sdk/region-config-resolver": "^3.972.13", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/util-endpoints": "^3.996.8", + "@aws-sdk/util-user-agent-browser": "^3.972.10", + "@aws-sdk/util-user-agent-node": "^3.973.20", + "@smithy/config-resolver": "^4.4.17", + "@smithy/core": "^3.23.16", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/hash-node": "^4.2.14", + "@smithy/invalid-dependency": "^4.2.14", + "@smithy/middleware-compression": "^4.3.45", + "@smithy/middleware-content-length": "^4.2.14", + "@smithy/middleware-endpoint": "^4.4.31", + "@smithy/middleware-retry": "^4.5.4", + "@smithy/middleware-serde": "^4.2.19", + "@smithy/middleware-stack": "^4.2.14", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/node-http-handler": "^4.6.0", + "@smithy/protocol-http": "^5.3.14", + "@smithy/smithy-client": "^4.12.12", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", - "@smithy/util-defaults-mode-browser": "^4.3.45", - "@smithy/util-defaults-mode-node": "^4.2.49", - "@smithy/util-endpoints": "^3.3.4", - "@smithy/util-middleware": "^4.2.13", - "@smithy/util-retry": "^4.3.0", + "@smithy/util-defaults-mode-browser": "^4.3.48", + "@smithy/util-defaults-mode-node": "^4.2.53", + "@smithy/util-endpoints": "^3.4.2", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-retry": "^4.3.3", "@smithy/util-utf8": "^4.2.2", - "@smithy/util-waiter": "^4.2.15", + "@smithy/util-waiter": "^4.2.16", "tslib": "^2.6.2" }, "engines": { @@ -401,15 +401,15 @@ } }, "node_modules/@aws-sdk/client-cloudwatch/node_modules/@aws-sdk/util-endpoints": { - "version": "3.996.6", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.6.tgz", - "integrity": "sha512-2nUQ+2ih7CShuKHpGSIYvvAIOHy52dOZguYG36zptBukhw6iFwcvGfG0tes0oZFWQqEWvgZe9HLWaNlvXGdOrg==", + "version": "3.996.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.8.tgz", + "integrity": "sha512-oOZHcRDihk5iEe5V25NVWg45b3qEA8OpHWVdU/XQh8Zj4heVPAJqWvMphQnU7LkufmUo10EpvFPZuQMiFLJK3g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.7", - "@smithy/types": "^4.14.0", - "@smithy/url-parser": "^4.2.13", - "@smithy/util-endpoints": "^3.3.4", + "@aws-sdk/types": "^3.973.8", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", + "@smithy/util-endpoints": "^3.4.2", "tslib": "^2.6.2" }, "engines": { @@ -417,49 +417,50 @@ } }, "node_modules/@aws-sdk/client-eventbridge": { - "version": "3.997.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-eventbridge/-/client-eventbridge-3.997.0.tgz", - "integrity": "sha512-bLyqdVI0L7p1lVYlzqYCUkLvImawx0F+lu0g/oZiom1m6O1jzFDkZCqN7x2m0Jhbq9hUQyq7jijThzHhR9CdpQ==", + "version": "3.1035.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-eventbridge/-/client-eventbridge-3.1035.0.tgz", + "integrity": "sha512-1YJAR0WkwigWq+/KttJ9phAgkq4tNz4yS94iJ/GsPMFj988VuTk+UlHy8VkasA+P1PqXg9jjFF8kCEIZNdC4uQ==", + "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "^3.973.13", - "@aws-sdk/credential-provider-node": "^3.972.12", - "@aws-sdk/middleware-host-header": "^3.972.4", - "@aws-sdk/middleware-logger": "^3.972.4", - "@aws-sdk/middleware-recursion-detection": "^3.972.4", - "@aws-sdk/middleware-user-agent": "^3.972.13", - "@aws-sdk/region-config-resolver": "^3.972.4", - "@aws-sdk/signature-v4-multi-region": "^3.996.1", - "@aws-sdk/types": "^3.973.2", - "@aws-sdk/util-endpoints": "^3.996.1", - "@aws-sdk/util-user-agent-browser": "^3.972.4", - "@aws-sdk/util-user-agent-node": "^3.972.12", - "@smithy/config-resolver": "^4.4.7", - "@smithy/core": "^3.23.4", - "@smithy/fetch-http-handler": "^5.3.10", - "@smithy/hash-node": "^4.2.9", - "@smithy/invalid-dependency": "^4.2.9", - "@smithy/middleware-content-length": "^4.2.9", - "@smithy/middleware-endpoint": "^4.4.18", - "@smithy/middleware-retry": "^4.4.35", - "@smithy/middleware-serde": "^4.2.10", - "@smithy/middleware-stack": "^4.2.9", - "@smithy/node-config-provider": "^4.3.9", - "@smithy/node-http-handler": "^4.4.11", - "@smithy/protocol-http": "^5.3.9", - "@smithy/smithy-client": "^4.11.7", - "@smithy/types": "^4.12.1", - "@smithy/url-parser": "^4.2.9", - "@smithy/util-base64": "^4.3.1", - "@smithy/util-body-length-browser": "^4.2.1", - "@smithy/util-body-length-node": "^4.2.2", - "@smithy/util-defaults-mode-browser": "^4.3.34", - "@smithy/util-defaults-mode-node": "^4.2.37", - "@smithy/util-endpoints": "^3.2.9", - "@smithy/util-middleware": "^4.2.9", - "@smithy/util-retry": "^4.2.9", - "@smithy/util-utf8": "^4.2.1", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/credential-provider-node": "^3.972.35", + "@aws-sdk/middleware-host-header": "^3.972.10", + "@aws-sdk/middleware-logger": "^3.972.10", + "@aws-sdk/middleware-recursion-detection": "^3.972.11", + "@aws-sdk/middleware-user-agent": "^3.972.34", + "@aws-sdk/region-config-resolver": "^3.972.13", + "@aws-sdk/signature-v4-multi-region": "^3.996.21", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/util-endpoints": "^3.996.8", + "@aws-sdk/util-user-agent-browser": "^3.972.10", + "@aws-sdk/util-user-agent-node": "^3.973.20", + "@smithy/config-resolver": "^4.4.17", + "@smithy/core": "^3.23.16", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/hash-node": "^4.2.14", + "@smithy/invalid-dependency": "^4.2.14", + "@smithy/middleware-content-length": "^4.2.14", + "@smithy/middleware-endpoint": "^4.4.31", + "@smithy/middleware-retry": "^4.5.4", + "@smithy/middleware-serde": "^4.2.19", + "@smithy/middleware-stack": "^4.2.14", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/node-http-handler": "^4.6.0", + "@smithy/protocol-http": "^5.3.14", + "@smithy/smithy-client": "^4.12.12", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", + "@smithy/util-base64": "^4.3.2", + "@smithy/util-body-length-browser": "^4.2.2", + "@smithy/util-body-length-node": "^4.2.3", + "@smithy/util-defaults-mode-browser": "^4.3.48", + "@smithy/util-defaults-mode-node": "^4.2.53", + "@smithy/util-endpoints": "^3.4.2", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-retry": "^4.3.3", + "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" }, "engines": { @@ -467,15 +468,16 @@ } }, "node_modules/@aws-sdk/client-eventbridge/node_modules/@aws-sdk/signature-v4-multi-region": { - "version": "3.996.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.996.1.tgz", - "integrity": "sha512-Mj4npuEtVHFjGZHTBwhBvBzmgKHY7UsfroZWWzjpVP5YJaMTPeihsotuQLba5uQthEZyaeWs6dTu3Shr0qKFFw==", - "dependencies": { - "@aws-sdk/middleware-sdk-s3": "^3.972.13", - "@aws-sdk/types": "^3.973.2", - "@smithy/protocol-http": "^5.3.9", - "@smithy/signature-v4": "^5.3.9", - "@smithy/types": "^4.12.1", + "version": "3.996.21", + "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.996.21.tgz", + "integrity": "sha512-3EpT+C0QdmTMB5aVeJ5odWSLt9vg2oGzUXl1xvUazKGlkr9OBYnegNWqhhjGgZdv8RmSi5eS8nqqB+euNP2aqA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-sdk-s3": "^3.972.33", + "@aws-sdk/types": "^3.973.8", + "@smithy/protocol-http": "^5.3.14", + "@smithy/signature-v4": "^5.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -483,14 +485,15 @@ } }, "node_modules/@aws-sdk/client-eventbridge/node_modules/@aws-sdk/util-endpoints": { - "version": "3.996.1", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.1.tgz", - "integrity": "sha512-7cJyd+M5i0IoqWkJa1KFx8KNCGIx+Ywu+lT53KpqX7ReVwz03DCKUqvZ/y65vdKwo9w9/HptSAeLDluO5MpGIg==", - "dependencies": { - "@aws-sdk/types": "^3.973.2", - "@smithy/types": "^4.12.1", - "@smithy/url-parser": "^4.2.9", - "@smithy/util-endpoints": "^3.2.9", + "version": "3.996.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.8.tgz", + "integrity": "sha512-oOZHcRDihk5iEe5V25NVWg45b3qEA8OpHWVdU/XQh8Zj4heVPAJqWvMphQnU7LkufmUo10EpvFPZuQMiFLJK3g==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/types": "^3.973.8", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", + "@smithy/util-endpoints": "^3.4.2", "tslib": "^2.6.2" }, "engines": { @@ -702,48 +705,48 @@ } }, "node_modules/@aws-sdk/client-sns": { - "version": "3.1022.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sns/-/client-sns-3.1022.0.tgz", - "integrity": "sha512-HuvwJDnI8TGw2O2yof+OF6JIH5fiW5nyHtMllmYVighOtDrcDSzuYavS0GIcY8Y1dWJUJwLzNig+E7GsbKl/SQ==", + "version": "3.1035.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sns/-/client-sns-3.1035.0.tgz", + "integrity": "sha512-eUQKKT2DzAPYLgAwH1/71RP88VvGAFY0h/mEqP4LCEGJ2fBv0arnNuPTYsiltDJxo9ZCde4h8sxM50dbQFfPfQ==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/credential-provider-node": "^3.972.29", - "@aws-sdk/middleware-host-header": "^3.972.8", - "@aws-sdk/middleware-logger": "^3.972.8", - "@aws-sdk/middleware-recursion-detection": "^3.972.9", - "@aws-sdk/middleware-user-agent": "^3.972.28", - "@aws-sdk/region-config-resolver": "^3.972.10", - "@aws-sdk/types": "^3.973.6", - "@aws-sdk/util-endpoints": "^3.996.5", - "@aws-sdk/util-user-agent-browser": "^3.972.8", - "@aws-sdk/util-user-agent-node": "^3.973.14", - "@smithy/config-resolver": "^4.4.13", - "@smithy/core": "^3.23.13", - "@smithy/fetch-http-handler": "^5.3.15", - "@smithy/hash-node": "^4.2.12", - "@smithy/invalid-dependency": "^4.2.12", - "@smithy/middleware-content-length": "^4.2.12", - "@smithy/middleware-endpoint": "^4.4.28", - "@smithy/middleware-retry": "^4.4.46", - "@smithy/middleware-serde": "^4.2.16", - "@smithy/middleware-stack": "^4.2.12", - "@smithy/node-config-provider": "^4.3.12", - "@smithy/node-http-handler": "^4.5.1", - "@smithy/protocol-http": "^5.3.12", - "@smithy/smithy-client": "^4.12.8", - "@smithy/types": "^4.13.1", - "@smithy/url-parser": "^4.2.12", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/credential-provider-node": "^3.972.35", + "@aws-sdk/middleware-host-header": "^3.972.10", + "@aws-sdk/middleware-logger": "^3.972.10", + "@aws-sdk/middleware-recursion-detection": "^3.972.11", + "@aws-sdk/middleware-user-agent": "^3.972.34", + "@aws-sdk/region-config-resolver": "^3.972.13", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/util-endpoints": "^3.996.8", + "@aws-sdk/util-user-agent-browser": "^3.972.10", + "@aws-sdk/util-user-agent-node": "^3.973.20", + "@smithy/config-resolver": "^4.4.17", + "@smithy/core": "^3.23.16", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/hash-node": "^4.2.14", + "@smithy/invalid-dependency": "^4.2.14", + "@smithy/middleware-content-length": "^4.2.14", + "@smithy/middleware-endpoint": "^4.4.31", + "@smithy/middleware-retry": "^4.5.4", + "@smithy/middleware-serde": "^4.2.19", + "@smithy/middleware-stack": "^4.2.14", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/node-http-handler": "^4.6.0", + "@smithy/protocol-http": "^5.3.14", + "@smithy/smithy-client": "^4.12.12", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", - "@smithy/util-defaults-mode-browser": "^4.3.44", - "@smithy/util-defaults-mode-node": "^4.2.48", - "@smithy/util-endpoints": "^3.3.3", - "@smithy/util-middleware": "^4.2.12", - "@smithy/util-retry": "^4.2.13", + "@smithy/util-defaults-mode-browser": "^4.3.48", + "@smithy/util-defaults-mode-node": "^4.2.53", + "@smithy/util-endpoints": "^3.4.2", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-retry": "^4.3.3", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" }, @@ -752,15 +755,15 @@ } }, "node_modules/@aws-sdk/client-sns/node_modules/@aws-sdk/util-endpoints": { - "version": "3.996.5", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.5.tgz", - "integrity": "sha512-Uh93L5sXFNbyR5sEPMzUU8tJ++Ku97EY4udmC01nB8Zu+xfBPwpIwJ6F7snqQeq8h2pf+8SGN5/NoytfKgYPIw==", + "version": "3.996.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.8.tgz", + "integrity": "sha512-oOZHcRDihk5iEe5V25NVWg45b3qEA8OpHWVdU/XQh8Zj4heVPAJqWvMphQnU7LkufmUo10EpvFPZuQMiFLJK3g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.6", - "@smithy/types": "^4.13.1", - "@smithy/url-parser": "^4.2.12", - "@smithy/util-endpoints": "^3.3.3", + "@aws-sdk/types": "^3.973.8", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", + "@smithy/util-endpoints": "^3.4.2", "tslib": "^2.6.2" }, "engines": { @@ -768,50 +771,50 @@ } }, "node_modules/@aws-sdk/client-sqs": { - "version": "3.1022.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sqs/-/client-sqs-3.1022.0.tgz", - "integrity": "sha512-b9MzUJYo+/HKi4LRfoCFvb+aK/PA3BhxokI3YDKMsRS1AnhPanUvSFPe9cV57U/vX+jXuXQZfUBFcqNa3Va+Mg==", + "version": "3.1035.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sqs/-/client-sqs-3.1035.0.tgz", + "integrity": "sha512-GeYRZw5RgyxE05eHdUrRGi4YRtj1kjnvPvCms5Hwhp6Mo73veakUjclmB7LqlezOUWJS9EIEK8zVICiMzcn02w==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "^3.973.26", - "@aws-sdk/credential-provider-node": "^3.972.29", - "@aws-sdk/middleware-host-header": "^3.972.8", - "@aws-sdk/middleware-logger": "^3.972.8", - "@aws-sdk/middleware-recursion-detection": "^3.972.9", - "@aws-sdk/middleware-sdk-sqs": "^3.972.18", - "@aws-sdk/middleware-user-agent": "^3.972.28", - "@aws-sdk/region-config-resolver": "^3.972.10", - "@aws-sdk/types": "^3.973.6", - "@aws-sdk/util-endpoints": "^3.996.5", - "@aws-sdk/util-user-agent-browser": "^3.972.8", - "@aws-sdk/util-user-agent-node": "^3.973.14", - "@smithy/config-resolver": "^4.4.13", - "@smithy/core": "^3.23.13", - "@smithy/fetch-http-handler": "^5.3.15", - "@smithy/hash-node": "^4.2.12", - "@smithy/invalid-dependency": "^4.2.12", - "@smithy/md5-js": "^4.2.12", - "@smithy/middleware-content-length": "^4.2.12", - "@smithy/middleware-endpoint": "^4.4.28", - "@smithy/middleware-retry": "^4.4.46", - "@smithy/middleware-serde": "^4.2.16", - "@smithy/middleware-stack": "^4.2.12", - "@smithy/node-config-provider": "^4.3.12", - "@smithy/node-http-handler": "^4.5.1", - "@smithy/protocol-http": "^5.3.12", - "@smithy/smithy-client": "^4.12.8", - "@smithy/types": "^4.13.1", - "@smithy/url-parser": "^4.2.12", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/credential-provider-node": "^3.972.35", + "@aws-sdk/middleware-host-header": "^3.972.10", + "@aws-sdk/middleware-logger": "^3.972.10", + "@aws-sdk/middleware-recursion-detection": "^3.972.11", + "@aws-sdk/middleware-sdk-sqs": "^3.972.21", + "@aws-sdk/middleware-user-agent": "^3.972.34", + "@aws-sdk/region-config-resolver": "^3.972.13", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/util-endpoints": "^3.996.8", + "@aws-sdk/util-user-agent-browser": "^3.972.10", + "@aws-sdk/util-user-agent-node": "^3.973.20", + "@smithy/config-resolver": "^4.4.17", + "@smithy/core": "^3.23.16", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/hash-node": "^4.2.14", + "@smithy/invalid-dependency": "^4.2.14", + "@smithy/md5-js": "^4.2.14", + "@smithy/middleware-content-length": "^4.2.14", + "@smithy/middleware-endpoint": "^4.4.31", + "@smithy/middleware-retry": "^4.5.4", + "@smithy/middleware-serde": "^4.2.19", + "@smithy/middleware-stack": "^4.2.14", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/node-http-handler": "^4.6.0", + "@smithy/protocol-http": "^5.3.14", + "@smithy/smithy-client": "^4.12.12", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", - "@smithy/util-defaults-mode-browser": "^4.3.44", - "@smithy/util-defaults-mode-node": "^4.2.48", - "@smithy/util-endpoints": "^3.3.3", - "@smithy/util-middleware": "^4.2.12", - "@smithy/util-retry": "^4.2.13", + "@smithy/util-defaults-mode-browser": "^4.3.48", + "@smithy/util-defaults-mode-node": "^4.2.53", + "@smithy/util-endpoints": "^3.4.2", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-retry": "^4.3.3", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" }, @@ -820,15 +823,15 @@ } }, "node_modules/@aws-sdk/client-sqs/node_modules/@aws-sdk/util-endpoints": { - "version": "3.996.5", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.5.tgz", - "integrity": "sha512-Uh93L5sXFNbyR5sEPMzUU8tJ++Ku97EY4udmC01nB8Zu+xfBPwpIwJ6F7snqQeq8h2pf+8SGN5/NoytfKgYPIw==", + "version": "3.996.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.8.tgz", + "integrity": "sha512-oOZHcRDihk5iEe5V25NVWg45b3qEA8OpHWVdU/XQh8Zj4heVPAJqWvMphQnU7LkufmUo10EpvFPZuQMiFLJK3g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.6", - "@smithy/types": "^4.13.1", - "@smithy/url-parser": "^4.2.12", - "@smithy/util-endpoints": "^3.3.3", + "@aws-sdk/types": "^3.973.8", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", + "@smithy/util-endpoints": "^3.4.2", "tslib": "^2.6.2" }, "engines": { @@ -836,22 +839,23 @@ } }, "node_modules/@aws-sdk/core": { - "version": "3.973.27", - "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.973.27.tgz", - "integrity": "sha512-CUZ5m8hwMCH6OYI4Li/WgMfIEx10Q2PLI9Y3XOUTPGZJ53aZ0007jCv+X/ywsaERyKPdw5MRZWk877roQksQ4A==", + "version": "3.974.4", + "resolved": "https://registry.npmjs.org/@aws-sdk/core/-/core-3.974.4.tgz", + "integrity": "sha512-EbVgyzQ83/Lf6oh1O4vYY47tuYw3Aosthh865LNU77KyotKz+uvEBNmsl/bSVS/vG+IU39mCqcOHrnhmhF4lug==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.7", - "@aws-sdk/xml-builder": "^3.972.17", - "@smithy/core": "^3.23.14", - "@smithy/node-config-provider": "^4.3.13", - "@smithy/property-provider": "^4.2.13", - "@smithy/protocol-http": "^5.3.13", - "@smithy/signature-v4": "^5.3.13", - "@smithy/smithy-client": "^4.12.9", - "@smithy/types": "^4.14.0", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/xml-builder": "^3.972.18", + "@smithy/core": "^3.23.16", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/property-provider": "^4.2.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/signature-v4": "^5.3.14", + "@smithy/smithy-client": "^4.12.12", + "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", - "@smithy/util-middleware": "^4.2.13", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-retry": "^4.3.3", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" }, @@ -860,11 +864,12 @@ } }, "node_modules/@aws-sdk/crc64-nvme": { - "version": "3.972.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/crc64-nvme/-/crc64-nvme-3.972.0.tgz", - "integrity": "sha512-ThlLhTqX68jvoIVv+pryOdb5coP1cX1/MaTbB9xkGDCbWbsqQcLqzPxuSoW1DCnAAIacmXCWpzUNOB9pv+xXQw==", + "version": "3.972.7", + "resolved": "https://registry.npmjs.org/@aws-sdk/crc64-nvme/-/crc64-nvme-3.972.7.tgz", + "integrity": "sha512-QUagVVBbC8gODCF6e1aV0mE2TXWB9Opz4k8EJFdNrujUVQm5R4AjJa1mpOqzwOuROBzqJU9zawzig7M96L8Ejg==", + "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.12.0", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -872,15 +877,15 @@ } }, "node_modules/@aws-sdk/credential-provider-env": { - "version": "3.972.25", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.972.25.tgz", - "integrity": "sha512-6QfI0wv4jpG5CrdO/AO0JfZ2ux+tKwJPrUwmvxXF50vI5KIypKVGNF6b4vlkYEnKumDTI1NX2zUBi8JoU5QU3A==", + "version": "3.972.30", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.972.30.tgz", + "integrity": "sha512-dHpeqa29a0cBYq/h59IC2EK3AphLY96nKy4F35kBtiz9GuKDc32UYRTgjZaF8uuJCnqgw9omUZKR+9myyDHC2A==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.27", - "@aws-sdk/types": "^3.973.7", - "@smithy/property-provider": "^4.2.13", - "@smithy/types": "^4.14.0", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/types": "^3.973.8", + "@smithy/property-provider": "^4.2.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -888,20 +893,20 @@ } }, "node_modules/@aws-sdk/credential-provider-http": { - "version": "3.972.27", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.972.27.tgz", - "integrity": "sha512-3V3Usj9Gs93h865DqN4M2NWJhC5kXU9BvZskfN3+69omuYlE3TZxOEcVQtBGLOloJB7BVfJKXVLqeNhOzHqSlQ==", + "version": "3.972.32", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-http/-/credential-provider-http-3.972.32.tgz", + "integrity": "sha512-A+ZTT//Mswkf9DFEM6XlngwOtYdD8X4CUcoZ2wdpgI8cCs9mcGeuhgTwbGJvealub/MeONOaUr3FbRPMKmTDjg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.27", - "@aws-sdk/types": "^3.973.7", - "@smithy/fetch-http-handler": "^5.3.16", - "@smithy/node-http-handler": "^4.5.2", - "@smithy/property-provider": "^4.2.13", - "@smithy/protocol-http": "^5.3.13", - "@smithy/smithy-client": "^4.12.9", - "@smithy/types": "^4.14.0", - "@smithy/util-stream": "^4.5.22", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/types": "^3.973.8", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/node-http-handler": "^4.6.0", + "@smithy/property-provider": "^4.2.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/smithy-client": "^4.12.12", + "@smithy/types": "^4.14.1", + "@smithy/util-stream": "^4.5.24", "tslib": "^2.6.2" }, "engines": { @@ -909,14 +914,14 @@ } }, "node_modules/@aws-sdk/credential-provider-http/node_modules/@smithy/util-stream": { - "version": "4.5.22", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.22.tgz", - "integrity": "sha512-3H8iq/0BfQjUs2/4fbHZ9aG9yNzcuZs24LPkcX1Q7Z+qpqaGM8+qbGmE8zo9m2nCRgamyvS98cHdcWvR6YUsew==", + "version": "4.5.25", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.25.tgz", + "integrity": "sha512-/PFpG4k8Ze8Ei+mMKj3oiPICYekthuzePZMgZbCqMiXIHHf4n2aZ4Ps0aSRShycFTGuj/J6XldmC0x0DwednIA==", "license": "Apache-2.0", "dependencies": { - "@smithy/fetch-http-handler": "^5.3.16", - "@smithy/node-http-handler": "^4.5.2", - "@smithy/types": "^4.14.0", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/node-http-handler": "^4.6.1", + "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-hex-encoding": "^4.2.2", @@ -928,24 +933,24 @@ } }, "node_modules/@aws-sdk/credential-provider-ini": { - "version": "3.972.29", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.972.29.tgz", - "integrity": "sha512-SiBuAnXecCbT/OpAf3vqyI/AVE3mTaYr9ShXLybxZiPLBiPCCOIWSGAtYYGQWMRvobBTiqOewaB+wcgMMZI2Aw==", + "version": "3.972.34", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.972.34.tgz", + "integrity": "sha512-MoRc7tLnx3JpFkV2R826enEfBUVN8o9Cc7y3hnbMwiWzL/VJhgfxRQzHkEL9vWorMWP7tibltsRcLoid9fsVdw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.27", - "@aws-sdk/credential-provider-env": "^3.972.25", - "@aws-sdk/credential-provider-http": "^3.972.27", - "@aws-sdk/credential-provider-login": "^3.972.29", - "@aws-sdk/credential-provider-process": "^3.972.25", - "@aws-sdk/credential-provider-sso": "^3.972.29", - "@aws-sdk/credential-provider-web-identity": "^3.972.29", - "@aws-sdk/nested-clients": "^3.996.19", - "@aws-sdk/types": "^3.973.7", - "@smithy/credential-provider-imds": "^4.2.13", - "@smithy/property-provider": "^4.2.13", - "@smithy/shared-ini-file-loader": "^4.4.8", - "@smithy/types": "^4.14.0", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/credential-provider-env": "^3.972.30", + "@aws-sdk/credential-provider-http": "^3.972.32", + "@aws-sdk/credential-provider-login": "^3.972.34", + "@aws-sdk/credential-provider-process": "^3.972.30", + "@aws-sdk/credential-provider-sso": "^3.972.34", + "@aws-sdk/credential-provider-web-identity": "^3.972.34", + "@aws-sdk/nested-clients": "^3.997.2", + "@aws-sdk/types": "^3.973.8", + "@smithy/credential-provider-imds": "^4.2.14", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -953,18 +958,18 @@ } }, "node_modules/@aws-sdk/credential-provider-login": { - "version": "3.972.29", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.972.29.tgz", - "integrity": "sha512-OGOslTbOlxXexKMqhxCEbBQbUIfuhGxU5UXw3Fm56ypXHvrXH4aTt/xb5Y884LOoteP1QST1lVZzHfcTnWhiPQ==", + "version": "3.972.34", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-login/-/credential-provider-login-3.972.34.tgz", + "integrity": "sha512-XVSklkRRQ/CQDmv3VVFdZRl5hTFgncFhZrLyi0Ai4LZk5o3jpY5HIfuTK7ad7tixPKa+iQmL9+vg9qNyYZB+nw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.27", - "@aws-sdk/nested-clients": "^3.996.19", - "@aws-sdk/types": "^3.973.7", - "@smithy/property-provider": "^4.2.13", - "@smithy/protocol-http": "^5.3.13", - "@smithy/shared-ini-file-loader": "^4.4.8", - "@smithy/types": "^4.14.0", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/nested-clients": "^3.997.2", + "@aws-sdk/types": "^3.973.8", + "@smithy/property-provider": "^4.2.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -972,22 +977,22 @@ } }, "node_modules/@aws-sdk/credential-provider-node": { - "version": "3.972.30", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.972.30.tgz", - "integrity": "sha512-FMnAnWxc8PG+ZrZ2OBKzY4luCUJhe9CG0B9YwYr4pzrYGLXBS2rl+UoUvjGbAwiptxRL6hyA3lFn03Bv1TLqTw==", + "version": "3.972.35", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.972.35.tgz", + "integrity": "sha512-nVrY7AdGfzYgAa/jd9m06p3ES7QQDaB7zN9c+vXnVXxBRkAs9MjRDPB5AKogWuC6phddltfvHGFqLDJmyU9u/A==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/credential-provider-env": "^3.972.25", - "@aws-sdk/credential-provider-http": "^3.972.27", - "@aws-sdk/credential-provider-ini": "^3.972.29", - "@aws-sdk/credential-provider-process": "^3.972.25", - "@aws-sdk/credential-provider-sso": "^3.972.29", - "@aws-sdk/credential-provider-web-identity": "^3.972.29", - "@aws-sdk/types": "^3.973.7", - "@smithy/credential-provider-imds": "^4.2.13", - "@smithy/property-provider": "^4.2.13", - "@smithy/shared-ini-file-loader": "^4.4.8", - "@smithy/types": "^4.14.0", + "@aws-sdk/credential-provider-env": "^3.972.30", + "@aws-sdk/credential-provider-http": "^3.972.32", + "@aws-sdk/credential-provider-ini": "^3.972.34", + "@aws-sdk/credential-provider-process": "^3.972.30", + "@aws-sdk/credential-provider-sso": "^3.972.34", + "@aws-sdk/credential-provider-web-identity": "^3.972.34", + "@aws-sdk/types": "^3.973.8", + "@smithy/credential-provider-imds": "^4.2.14", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -995,16 +1000,16 @@ } }, "node_modules/@aws-sdk/credential-provider-process": { - "version": "3.972.25", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.972.25.tgz", - "integrity": "sha512-HR7ynNRdNhNsdVCOCegy1HsfsRzozCOPtD3RzzT1JouuaHobWyRfJzCBue/3jP7gECHt+kQyZUvwg/cYLWurNQ==", + "version": "3.972.30", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.972.30.tgz", + "integrity": "sha512-McJPomNTSEo+C6UA3Zq6pFrcyTUaVsoPPBOvbOHAoIFPc8Z2CMLndqFJOnB+9bVFiBTWQLutlVGmrocBbvv4MQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.27", - "@aws-sdk/types": "^3.973.7", - "@smithy/property-provider": "^4.2.13", - "@smithy/shared-ini-file-loader": "^4.4.8", - "@smithy/types": "^4.14.0", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/types": "^3.973.8", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1012,18 +1017,18 @@ } }, "node_modules/@aws-sdk/credential-provider-sso": { - "version": "3.972.29", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.972.29.tgz", - "integrity": "sha512-HWv4SEq3jZDYPlwryZVef97+U8CxxRos5mK8sgGO1dQaFZpV5giZLzqGE5hkDmh2csYcBO2uf5XHjPTpZcJlig==", + "version": "3.972.34", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.972.34.tgz", + "integrity": "sha512-WngYb2K+/yhkDOmDfAOjoCa9Ja3he0DZiAraboKwgWoVRkajDIcDYBCVbUTxtTUldvQoe7VvHLTrBNxvftN1aQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.27", - "@aws-sdk/nested-clients": "^3.996.19", - "@aws-sdk/token-providers": "3.1026.0", - "@aws-sdk/types": "^3.973.7", - "@smithy/property-provider": "^4.2.13", - "@smithy/shared-ini-file-loader": "^4.4.8", - "@smithy/types": "^4.14.0", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/nested-clients": "^3.997.2", + "@aws-sdk/token-providers": "3.1035.0", + "@aws-sdk/types": "^3.973.8", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1031,17 +1036,17 @@ } }, "node_modules/@aws-sdk/credential-provider-web-identity": { - "version": "3.972.29", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.972.29.tgz", - "integrity": "sha512-PdMBza1WEKEUPFEmMGCfnU2RYCz9MskU2e8JxjyUOsMKku7j9YaDKvbDi2dzC0ihFoM6ods2SbhfAAro+Gwlew==", + "version": "3.972.34", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.972.34.tgz", + "integrity": "sha512-5KLUH+XmSNRj6amJiJSrPsCxU5l/PYDfxyqPa1MxWhHoQC3sxvGPrSib3IE+HQlfRA4e2kO0bnJy7HJdjvpuuA==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.27", - "@aws-sdk/nested-clients": "^3.996.19", - "@aws-sdk/types": "^3.973.7", - "@smithy/property-provider": "^4.2.13", - "@smithy/shared-ini-file-loader": "^4.4.8", - "@smithy/types": "^4.14.0", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/nested-clients": "^3.997.2", + "@aws-sdk/types": "^3.973.8", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1080,23 +1085,24 @@ } }, "node_modules/@aws-sdk/middleware-flexible-checksums": { - "version": "3.972.3", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.972.3.tgz", - "integrity": "sha512-MkNGJ6qB9kpsLwL18kC/ZXppsJbftHVGCisqpEVbTQsum8CLYDX1Bmp/IvhRGNxsqCO2w9/4PwhDKBjG3Uvr4Q==", + "version": "3.974.12", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.974.12.tgz", + "integrity": "sha512-v7n0//P95g+UnmyjCpJkDJFB+EP/9Wx/fQJC5BEiK9Y7VHgmhh6RNPVbqDYz9gsz8mXnxzyYt3tCEVJ1kzo01w==", + "license": "Apache-2.0", "dependencies": { "@aws-crypto/crc32": "5.2.0", "@aws-crypto/crc32c": "5.2.0", "@aws-crypto/util": "5.2.0", - "@aws-sdk/core": "^3.973.5", - "@aws-sdk/crc64-nvme": "3.972.0", - "@aws-sdk/types": "^3.973.1", - "@smithy/is-array-buffer": "^4.2.0", - "@smithy/node-config-provider": "^4.3.8", - "@smithy/protocol-http": "^5.3.8", - "@smithy/types": "^4.12.0", - "@smithy/util-middleware": "^4.2.8", - "@smithy/util-stream": "^4.5.10", - "@smithy/util-utf8": "^4.2.0", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/crc64-nvme": "^3.972.7", + "@aws-sdk/types": "^3.973.8", + "@smithy/is-array-buffer": "^4.2.2", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-stream": "^4.5.24", + "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" }, "engines": { @@ -1104,17 +1110,18 @@ } }, "node_modules/@aws-sdk/middleware-flexible-checksums/node_modules/@smithy/util-stream": { - "version": "4.5.11", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.11.tgz", - "integrity": "sha512-lKmZ0S/3Qj2OF5H1+VzvDLb6kRxGzZHq6f3rAsoSu5cTLGsn3v3VQBA8czkNNXlLjoFEtVu3OQT2jEeOtOE2CA==", + "version": "4.5.25", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.25.tgz", + "integrity": "sha512-/PFpG4k8Ze8Ei+mMKj3oiPICYekthuzePZMgZbCqMiXIHHf4n2aZ4Ps0aSRShycFTGuj/J6XldmC0x0DwednIA==", + "license": "Apache-2.0", "dependencies": { - "@smithy/fetch-http-handler": "^5.3.9", - "@smithy/node-http-handler": "^4.4.9", - "@smithy/types": "^4.12.0", - "@smithy/util-base64": "^4.3.0", - "@smithy/util-buffer-from": "^4.2.0", - "@smithy/util-hex-encoding": "^4.2.0", - "@smithy/util-utf8": "^4.2.0", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/node-http-handler": "^4.6.1", + "@smithy/types": "^4.14.1", + "@smithy/util-base64": "^4.3.2", + "@smithy/util-buffer-from": "^4.2.2", + "@smithy/util-hex-encoding": "^4.2.2", + "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" }, "engines": { @@ -1122,14 +1129,14 @@ } }, "node_modules/@aws-sdk/middleware-host-header": { - "version": "3.972.9", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.972.9.tgz", - "integrity": "sha512-je5vRdNw4SkuTnmRbFZLdye4sQ0faLt8kwka5wnnSU30q1mHO4X+idGEJOOE+Tn1ME7Oryn05xxkDvIb3UaLaQ==", + "version": "3.972.10", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.972.10.tgz", + "integrity": "sha512-IJSsIMeVQ8MMCPbuh1AbltkFhLBLXn7aejzfX5YKT/VLDHn++Dcz8886tXckE+wQssyPUhaXrJhdakO2VilRhg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.7", - "@smithy/protocol-http": "^5.3.13", - "@smithy/types": "^4.14.0", + "@aws-sdk/types": "^3.973.8", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1150,13 +1157,13 @@ } }, "node_modules/@aws-sdk/middleware-logger": { - "version": "3.972.9", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.972.9.tgz", - "integrity": "sha512-HsVgDrruhqI28RkaXALm8grJ7Agc1wF6Et0xh6pom8NdO2VdO/SD9U/tPwUjewwK/pVoka+EShBxyCvgsPCtog==", + "version": "3.972.10", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.972.10.tgz", + "integrity": "sha512-OOuGvvz1Dm20SjZo5oEBePFqxt5nf8AwkNDSyUHvD9/bfNASmstcYxFAHUowy4n6Io7mWUZ04JURZwSBvyQanQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.7", - "@smithy/types": "^4.14.0", + "@aws-sdk/types": "^3.973.8", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1164,15 +1171,15 @@ } }, "node_modules/@aws-sdk/middleware-recursion-detection": { - "version": "3.972.10", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.972.10.tgz", - "integrity": "sha512-RVQQbq5orQ/GHUnXvqEOj2HHPBJm+mM+ySwZKS5UaLBwra5ugRtiH09PLUoOZRl7a1YzaOzXSuGbn9iD5j60WQ==", + "version": "3.972.11", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.972.11.tgz", + "integrity": "sha512-+zz6f79Kj9V5qFK2P+D8Ehjnw4AhphAlCAsPjUqEcInA9umtSSKMrHbSagEeOIsDNuvVrH98bjRHcyQukTrhaQ==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.7", + "@aws-sdk/types": "^3.973.8", "@aws/lambda-invoke-store": "^0.2.2", - "@smithy/protocol-http": "^5.3.13", - "@smithy/types": "^4.14.0", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1180,23 +1187,24 @@ } }, "node_modules/@aws-sdk/middleware-sdk-s3": { - "version": "3.972.13", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.972.13.tgz", - "integrity": "sha512-rGBz1n6PFxg1+5mnN1/IczesPwx0W39DZt2JPjqPiZAZ7LAqH8FS4AsawSNZqr+UFJfqtTXYpeLQnMfbMAgHhg==", + "version": "3.972.33", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.972.33.tgz", + "integrity": "sha512-n8Eh/+kq3u/EodLr8n6sQupu03QGjf122RHXCTGLaHSkavz/2beSKpRlq2oDgfmJZNkAkWF113xbyaUmyOd+YA==", + "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.13", - "@aws-sdk/types": "^3.973.2", - "@aws-sdk/util-arn-parser": "^3.972.2", - "@smithy/core": "^3.23.4", - "@smithy/node-config-provider": "^4.3.9", - "@smithy/protocol-http": "^5.3.9", - "@smithy/signature-v4": "^5.3.9", - "@smithy/smithy-client": "^4.11.7", - "@smithy/types": "^4.12.1", - "@smithy/util-config-provider": "^4.2.1", - "@smithy/util-middleware": "^4.2.9", - "@smithy/util-stream": "^4.5.14", - "@smithy/util-utf8": "^4.2.1", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/util-arn-parser": "^3.972.3", + "@smithy/core": "^3.23.16", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/signature-v4": "^5.3.14", + "@smithy/smithy-client": "^4.12.12", + "@smithy/types": "^4.14.1", + "@smithy/util-config-provider": "^4.2.2", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-stream": "^4.5.24", + "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" }, "engines": { @@ -1204,17 +1212,18 @@ } }, "node_modules/@aws-sdk/middleware-sdk-s3/node_modules/@smithy/util-stream": { - "version": "4.5.15", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.15.tgz", - "integrity": "sha512-OlOKnaqnkU9X+6wEkd7mN+WB7orPbCVDauXOj22Q7VtiTkvy7ZdSsOg4QiNAZMgI4OkvNf+/VLUC3VXkxuWJZw==", - "dependencies": { - "@smithy/fetch-http-handler": "^5.3.11", - "@smithy/node-http-handler": "^4.4.12", - "@smithy/types": "^4.13.0", - "@smithy/util-base64": "^4.3.1", - "@smithy/util-buffer-from": "^4.2.1", - "@smithy/util-hex-encoding": "^4.2.1", - "@smithy/util-utf8": "^4.2.1", + "version": "4.5.25", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.25.tgz", + "integrity": "sha512-/PFpG4k8Ze8Ei+mMKj3oiPICYekthuzePZMgZbCqMiXIHHf4n2aZ4Ps0aSRShycFTGuj/J6XldmC0x0DwednIA==", + "license": "Apache-2.0", + "dependencies": { + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/node-http-handler": "^4.6.1", + "@smithy/types": "^4.14.1", + "@smithy/util-base64": "^4.3.2", + "@smithy/util-buffer-from": "^4.2.2", + "@smithy/util-hex-encoding": "^4.2.2", + "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" }, "engines": { @@ -1222,14 +1231,14 @@ } }, "node_modules/@aws-sdk/middleware-sdk-sqs": { - "version": "3.972.18", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-sqs/-/middleware-sdk-sqs-3.972.18.tgz", - "integrity": "sha512-BdsGFuBJUX5PnuZkEV6JRB5g/6ts7iGmN3pXwyoiGCCM2HHXrlFqjkBs+iPX7yO884WqYeQJpme7nwn4DzU5xw==", + "version": "3.972.21", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-sqs/-/middleware-sdk-sqs-3.972.21.tgz", + "integrity": "sha512-INCEC8NvDWBnsnVj8RHbJiKyrD6fLFycwccaatjkJFdO26n2WSBdMkZhNO2SfD3l+Nk6n0g2uY9NJKGJ3770Vw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.6", - "@smithy/smithy-client": "^4.12.8", - "@smithy/types": "^4.13.1", + "@aws-sdk/types": "^3.973.8", + "@smithy/smithy-client": "^4.12.12", + "@smithy/types": "^4.14.1", "@smithy/util-hex-encoding": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" @@ -1252,18 +1261,18 @@ } }, "node_modules/@aws-sdk/middleware-user-agent": { - "version": "3.972.29", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.972.29.tgz", - "integrity": "sha512-f/sIRzuTfEjg6NsbMYvye2VsmnQoNgntntleQyx5uGacUYzszbfIlO3GcI6G6daWUmTm0IDZc11qMHWwF0o0mQ==", + "version": "3.972.34", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.972.34.tgz", + "integrity": "sha512-jrmJHyYlTQocR7H4VhvSFhaoedMb2rmlOTvFWD6tNBQ/EVQhTsrNfQUYFuPiOc2wUGxbm5LgCHtnvVmCPgODHw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.27", - "@aws-sdk/types": "^3.973.7", - "@aws-sdk/util-endpoints": "^3.996.6", - "@smithy/core": "^3.23.14", - "@smithy/protocol-http": "^5.3.13", - "@smithy/types": "^4.14.0", - "@smithy/util-retry": "^4.3.0", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/util-endpoints": "^3.996.8", + "@smithy/core": "^3.23.16", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", + "@smithy/util-retry": "^4.3.3", "tslib": "^2.6.2" }, "engines": { @@ -1271,15 +1280,15 @@ } }, "node_modules/@aws-sdk/middleware-user-agent/node_modules/@aws-sdk/util-endpoints": { - "version": "3.996.6", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.6.tgz", - "integrity": "sha512-2nUQ+2ih7CShuKHpGSIYvvAIOHy52dOZguYG36zptBukhw6iFwcvGfG0tes0oZFWQqEWvgZe9HLWaNlvXGdOrg==", + "version": "3.996.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.8.tgz", + "integrity": "sha512-oOZHcRDihk5iEe5V25NVWg45b3qEA8OpHWVdU/XQh8Zj4heVPAJqWvMphQnU7LkufmUo10EpvFPZuQMiFLJK3g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.7", - "@smithy/types": "^4.14.0", - "@smithy/url-parser": "^4.2.13", - "@smithy/util-endpoints": "^3.3.4", + "@aws-sdk/types": "^3.973.8", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", + "@smithy/util-endpoints": "^3.4.2", "tslib": "^2.6.2" }, "engines": { @@ -1287,47 +1296,48 @@ } }, "node_modules/@aws-sdk/nested-clients": { - "version": "3.996.19", - "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.996.19.tgz", - "integrity": "sha512-uFkmCDXvmQYLanlYdOFS0+MQWkrj9wPMt/ZCc/0J0fjPim6F5jBVBmEomvGY/j77ILW6GTPwN22Jc174Mhkw6Q==", + "version": "3.997.2", + "resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.997.2.tgz", + "integrity": "sha512-uGGQO08YetrqfInOKG5atRMrCDRQWRuZ9gGfKY6svPmuE4K7ac+XcbCkpWpjcA7yCYsBaKB/Nly4XKgPXUO1PA==", "license": "Apache-2.0", "dependencies": { "@aws-crypto/sha256-browser": "5.2.0", "@aws-crypto/sha256-js": "5.2.0", - "@aws-sdk/core": "^3.973.27", - "@aws-sdk/middleware-host-header": "^3.972.9", - "@aws-sdk/middleware-logger": "^3.972.9", - "@aws-sdk/middleware-recursion-detection": "^3.972.10", - "@aws-sdk/middleware-user-agent": "^3.972.29", - "@aws-sdk/region-config-resolver": "^3.972.11", - "@aws-sdk/types": "^3.973.7", - "@aws-sdk/util-endpoints": "^3.996.6", - "@aws-sdk/util-user-agent-browser": "^3.972.9", - "@aws-sdk/util-user-agent-node": "^3.973.15", - "@smithy/config-resolver": "^4.4.14", - "@smithy/core": "^3.23.14", - "@smithy/fetch-http-handler": "^5.3.16", - "@smithy/hash-node": "^4.2.13", - "@smithy/invalid-dependency": "^4.2.13", - "@smithy/middleware-content-length": "^4.2.13", - "@smithy/middleware-endpoint": "^4.4.29", - "@smithy/middleware-retry": "^4.5.0", - "@smithy/middleware-serde": "^4.2.17", - "@smithy/middleware-stack": "^4.2.13", - "@smithy/node-config-provider": "^4.3.13", - "@smithy/node-http-handler": "^4.5.2", - "@smithy/protocol-http": "^5.3.13", - "@smithy/smithy-client": "^4.12.9", - "@smithy/types": "^4.14.0", - "@smithy/url-parser": "^4.2.13", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/middleware-host-header": "^3.972.10", + "@aws-sdk/middleware-logger": "^3.972.10", + "@aws-sdk/middleware-recursion-detection": "^3.972.11", + "@aws-sdk/middleware-user-agent": "^3.972.34", + "@aws-sdk/region-config-resolver": "^3.972.13", + "@aws-sdk/signature-v4-multi-region": "^3.996.21", + "@aws-sdk/types": "^3.973.8", + "@aws-sdk/util-endpoints": "^3.996.8", + "@aws-sdk/util-user-agent-browser": "^3.972.10", + "@aws-sdk/util-user-agent-node": "^3.973.20", + "@smithy/config-resolver": "^4.4.17", + "@smithy/core": "^3.23.16", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/hash-node": "^4.2.14", + "@smithy/invalid-dependency": "^4.2.14", + "@smithy/middleware-content-length": "^4.2.14", + "@smithy/middleware-endpoint": "^4.4.31", + "@smithy/middleware-retry": "^4.5.4", + "@smithy/middleware-serde": "^4.2.19", + "@smithy/middleware-stack": "^4.2.14", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/node-http-handler": "^4.6.0", + "@smithy/protocol-http": "^5.3.14", + "@smithy/smithy-client": "^4.12.12", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", "@smithy/util-body-length-node": "^4.2.3", - "@smithy/util-defaults-mode-browser": "^4.3.45", - "@smithy/util-defaults-mode-node": "^4.2.49", - "@smithy/util-endpoints": "^3.3.4", - "@smithy/util-middleware": "^4.2.13", - "@smithy/util-retry": "^4.3.0", + "@smithy/util-defaults-mode-browser": "^4.3.48", + "@smithy/util-defaults-mode-node": "^4.2.53", + "@smithy/util-endpoints": "^3.4.2", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-retry": "^4.3.3", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" }, @@ -1335,16 +1345,33 @@ "node": ">=20.0.0" } }, + "node_modules/@aws-sdk/nested-clients/node_modules/@aws-sdk/signature-v4-multi-region": { + "version": "3.996.21", + "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.996.21.tgz", + "integrity": "sha512-3EpT+C0QdmTMB5aVeJ5odWSLt9vg2oGzUXl1xvUazKGlkr9OBYnegNWqhhjGgZdv8RmSi5eS8nqqB+euNP2aqA==", + "license": "Apache-2.0", + "dependencies": { + "@aws-sdk/middleware-sdk-s3": "^3.972.33", + "@aws-sdk/types": "^3.973.8", + "@smithy/protocol-http": "^5.3.14", + "@smithy/signature-v4": "^5.3.14", + "@smithy/types": "^4.14.1", + "tslib": "^2.6.2" + }, + "engines": { + "node": ">=20.0.0" + } + }, "node_modules/@aws-sdk/nested-clients/node_modules/@aws-sdk/util-endpoints": { - "version": "3.996.6", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.6.tgz", - "integrity": "sha512-2nUQ+2ih7CShuKHpGSIYvvAIOHy52dOZguYG36zptBukhw6iFwcvGfG0tes0oZFWQqEWvgZe9HLWaNlvXGdOrg==", + "version": "3.996.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.996.8.tgz", + "integrity": "sha512-oOZHcRDihk5iEe5V25NVWg45b3qEA8OpHWVdU/XQh8Zj4heVPAJqWvMphQnU7LkufmUo10EpvFPZuQMiFLJK3g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.7", - "@smithy/types": "^4.14.0", - "@smithy/url-parser": "^4.2.13", - "@smithy/util-endpoints": "^3.3.4", + "@aws-sdk/types": "^3.973.8", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", + "@smithy/util-endpoints": "^3.4.2", "tslib": "^2.6.2" }, "engines": { @@ -1352,15 +1379,15 @@ } }, "node_modules/@aws-sdk/region-config-resolver": { - "version": "3.972.11", - "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.972.11.tgz", - "integrity": "sha512-6Q8B1dcx6BBqUTY1Mc/eROKA0FImEEY5VPSd6AGPEUf0ErjExz4snVqa9kNJSoVDV1rKaNf3qrWojgcKW+SdDg==", + "version": "3.972.13", + "resolved": "https://registry.npmjs.org/@aws-sdk/region-config-resolver/-/region-config-resolver-3.972.13.tgz", + "integrity": "sha512-CvJ2ZIjK/jVD/lbOpowBVElJyC1YxLTIJ13yM0AEo0t2v7swOzGjSA6lJGH+DwZXQhcjUjoYwc8bVYCX5MDr1A==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.7", - "@smithy/config-resolver": "^4.4.14", - "@smithy/node-config-provider": "^4.3.13", - "@smithy/types": "^4.14.0", + "@aws-sdk/types": "^3.973.8", + "@smithy/config-resolver": "^4.4.17", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1384,17 +1411,17 @@ } }, "node_modules/@aws-sdk/token-providers": { - "version": "3.1026.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.1026.0.tgz", - "integrity": "sha512-Ieq/HiRrbEtrYP387Nes0XlR7H1pJiJOZKv+QyQzMYpvTiDs0VKy2ZB3E2Zf+aFovWmeE7lRE4lXyF7dYM6GgA==", + "version": "3.1035.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.1035.0.tgz", + "integrity": "sha512-E6IO3Cn+OzBe6Sb5pnubd5Y8qSUMAsVKkD5QSwFfIx5fV1g5SkYwUDRDyPlm90RuIVcCo28wpMJU6W8wXH46Aw==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/core": "^3.973.27", - "@aws-sdk/nested-clients": "^3.996.19", - "@aws-sdk/types": "^3.973.7", - "@smithy/property-provider": "^4.2.13", - "@smithy/shared-ini-file-loader": "^4.4.8", - "@smithy/types": "^4.14.0", + "@aws-sdk/core": "^3.974.4", + "@aws-sdk/nested-clients": "^3.997.2", + "@aws-sdk/types": "^3.973.8", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1402,12 +1429,12 @@ } }, "node_modules/@aws-sdk/types": { - "version": "3.973.7", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.973.7.tgz", - "integrity": "sha512-reXRwoJ6CfChoqAsBszUYajAF8Z2LRE+CRcKocvFSMpIiLOtYU3aJ9trmn6VVPAzbbY5LXF+FfmUslbXk1SYFg==", + "version": "3.973.8", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.973.8.tgz", + "integrity": "sha512-gjlAdtHMbtR9X5iIhVUvbVcy55KnznpC6bkDUWW9z915bi0ckdUr5cjf16Kp6xq0bP5HBD2xzgbL9F9Quv5vUw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.14.0", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -1415,9 +1442,10 @@ } }, "node_modules/@aws-sdk/util-arn-parser": { - "version": "3.972.2", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.972.2.tgz", - "integrity": "sha512-VkykWbqMjlSgBFDyrY3nOSqupMc6ivXuGmvci6Q3NnLq5kC+mKQe2QBZ4nrWRE/jqOxeFP2uYzLtwncYYcvQDg==", + "version": "3.972.3", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-arn-parser/-/util-arn-parser-3.972.3.tgz", + "integrity": "sha512-HzSD8PMFrvgi2Kserxuff5VitNq2sgf3w9qxmskKDiDTThWfVteJxuCS9JXiPIPtmCrp+7N9asfIaVhBFORllA==", + "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" }, @@ -1453,27 +1481,27 @@ } }, "node_modules/@aws-sdk/util-user-agent-browser": { - "version": "3.972.9", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.972.9.tgz", - "integrity": "sha512-sn/LMzTbGjYqCCF24390WxPd6hkpoSptiUn5DzVp4cD71yqw+yGEGm1YCxyEoPXyc8qciM8UzLJcZBFslxo5Uw==", + "version": "3.972.10", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.972.10.tgz", + "integrity": "sha512-FAzqXvfEssGdSIz8ejatan0bOdx1qefBWKF/gWmVBXIP1HkS7v/wjjaqrAGGKvyihrXTXW00/2/1nTJtxpXz7g==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/types": "^3.973.7", - "@smithy/types": "^4.14.0", + "@aws-sdk/types": "^3.973.8", + "@smithy/types": "^4.14.1", "bowser": "^2.11.0", "tslib": "^2.6.2" } }, "node_modules/@aws-sdk/util-user-agent-node": { - "version": "3.973.15", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.973.15.tgz", - "integrity": "sha512-fYn3s9PtKdgQkczGZCFMgkNEe8aq1JCVbnRqjqN9RSVW43xn2RV9xdcZ3z01a48Jpkuh/xCmBKJxdLOo4Ozg7w==", + "version": "3.973.20", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.973.20.tgz", + "integrity": "sha512-owEqyKr0z5hWwk+uHwudwNhyFMZ9f9eSWr/k/XD6yeDCI7hHyc56s4UOY1iBQmoramTbdAY4UCuLLEuKmjVXrg==", "license": "Apache-2.0", "dependencies": { - "@aws-sdk/middleware-user-agent": "^3.972.29", - "@aws-sdk/types": "^3.973.7", - "@smithy/node-config-provider": "^4.3.13", - "@smithy/types": "^4.14.0", + "@aws-sdk/middleware-user-agent": "^3.972.34", + "@aws-sdk/types": "^3.973.8", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/types": "^4.14.1", "@smithy/util-config-provider": "^4.2.2", "tslib": "^2.6.2" }, @@ -1490,12 +1518,12 @@ } }, "node_modules/@aws-sdk/xml-builder": { - "version": "3.972.17", - "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.972.17.tgz", - "integrity": "sha512-Ra7hjqAZf1OXRRMueB13qex7mFJRDK/pgCvdSFemXBT8KCGnQDPoKzHY1SjN+TjJVmnpSF14W5tJ1vDamFu+Gg==", + "version": "3.972.18", + "resolved": "https://registry.npmjs.org/@aws-sdk/xml-builder/-/xml-builder-3.972.18.tgz", + "integrity": "sha512-BMDNVG1ETXRhl1tnisQiYBef3RShJ1kfZA7x7afivTFMLirfHNTb6U71K569HNXhSXbQZsweHvSDZ6euBw8hPA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.14.0", + "@smithy/types": "^4.14.1", "fast-xml-parser": "5.5.8", "tslib": "^2.6.2" }, @@ -4568,16 +4596,16 @@ } }, "node_modules/@smithy/config-resolver": { - "version": "4.4.15", - "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.15.tgz", - "integrity": "sha512-BJdMBY5YO9iHh+lPLYdHv6LbX+J8IcPCYMl1IJdBt2KDWNHwONHrPVHk3ttYBqJd9wxv84wlbN0f7GlQzcQtNQ==", + "version": "4.4.17", + "resolved": "https://registry.npmjs.org/@smithy/config-resolver/-/config-resolver-4.4.17.tgz", + "integrity": "sha512-TzDZcAnhTyAHbXVxWZo7/tEcrIeFq20IBk8So3OLOetWpR8EwY/yEqBMBFaJMeyEiREDq4NfEl+qO3OAUD+vbQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.13", - "@smithy/types": "^4.14.0", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/types": "^4.14.1", "@smithy/util-config-provider": "^4.2.2", - "@smithy/util-endpoints": "^3.4.0", - "@smithy/util-middleware": "^4.2.13", + "@smithy/util-endpoints": "^3.4.2", + "@smithy/util-middleware": "^4.2.14", "tslib": "^2.6.2" }, "engines": { @@ -4585,18 +4613,18 @@ } }, "node_modules/@smithy/core": { - "version": "3.23.14", - "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.23.14.tgz", - "integrity": "sha512-vJ0IhpZxZAkFYOegMKSrxw7ujhhT2pass/1UEcZ4kfl5srTAqtPU5I7MdYQoreVas3204ykCiNhY1o7Xlz6Yyg==", + "version": "3.23.17", + "resolved": "https://registry.npmjs.org/@smithy/core/-/core-3.23.17.tgz", + "integrity": "sha512-x7BlLbUFL8NWCGjMF9C+1N5cVCxcPa7g6Tv9B4A2luWx3be3oU8hQ96wIwxe/s7OhIzvoJH73HAUSg5JXVlEtQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.13", - "@smithy/types": "^4.14.0", - "@smithy/url-parser": "^4.2.13", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", "@smithy/util-base64": "^4.3.2", "@smithy/util-body-length-browser": "^4.2.2", - "@smithy/util-middleware": "^4.2.13", - "@smithy/util-stream": "^4.5.22", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-stream": "^4.5.25", "@smithy/util-utf8": "^4.2.2", "@smithy/uuid": "^1.1.2", "tslib": "^2.6.2" @@ -4606,14 +4634,14 @@ } }, "node_modules/@smithy/core/node_modules/@smithy/util-stream": { - "version": "4.5.22", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.22.tgz", - "integrity": "sha512-3H8iq/0BfQjUs2/4fbHZ9aG9yNzcuZs24LPkcX1Q7Z+qpqaGM8+qbGmE8zo9m2nCRgamyvS98cHdcWvR6YUsew==", + "version": "4.5.25", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.25.tgz", + "integrity": "sha512-/PFpG4k8Ze8Ei+mMKj3oiPICYekthuzePZMgZbCqMiXIHHf4n2aZ4Ps0aSRShycFTGuj/J6XldmC0x0DwednIA==", "license": "Apache-2.0", "dependencies": { - "@smithy/fetch-http-handler": "^5.3.16", - "@smithy/node-http-handler": "^4.5.2", - "@smithy/types": "^4.14.0", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/node-http-handler": "^4.6.1", + "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-hex-encoding": "^4.2.2", @@ -4625,15 +4653,15 @@ } }, "node_modules/@smithy/credential-provider-imds": { - "version": "4.2.13", - "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.13.tgz", - "integrity": "sha512-wboCPijzf6RJKLOvnjDAiBxGSmSnGXj35o5ZAWKDaHa/cvQ5U3ZJ13D4tMCE8JG4dxVAZFy/P0x/V9CwwdfULQ==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/credential-provider-imds/-/credential-provider-imds-4.2.14.tgz", + "integrity": "sha512-Au28zBN48ZAoXdooGUHemuVBrkE+Ie6RPmGNIAJsFqj33Vhb6xAgRifUydZ2aY+M+KaMAETAlKk5NC5h1G7wpg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.13", - "@smithy/property-provider": "^4.2.13", - "@smithy/types": "^4.14.0", - "@smithy/url-parser": "^4.2.13", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/property-provider": "^4.2.14", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", "tslib": "^2.6.2" }, "engines": { @@ -4706,14 +4734,14 @@ } }, "node_modules/@smithy/fetch-http-handler": { - "version": "5.3.16", - "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.16.tgz", - "integrity": "sha512-nYDRUIvNd4mFmuXraRWt6w5UsZTNqtj4hXJA/iiOD4tuseIdLP9Lq38teH/SZTcIFCa2f+27o7hYpIsWktJKEQ==", + "version": "5.3.17", + "resolved": "https://registry.npmjs.org/@smithy/fetch-http-handler/-/fetch-http-handler-5.3.17.tgz", + "integrity": "sha512-bXOvQzaSm6MnmLaWA1elgfQcAtN4UP3vXqV97bHuoOrHQOJiLT3ds6o9eo5bqd0TJfRFpzdGnDQdW3FACiAVdw==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.13", - "@smithy/querystring-builder": "^4.2.13", - "@smithy/types": "^4.14.0", + "@smithy/protocol-http": "^5.3.14", + "@smithy/querystring-builder": "^4.2.14", + "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "tslib": "^2.6.2" }, @@ -4736,12 +4764,12 @@ } }, "node_modules/@smithy/hash-node": { - "version": "4.2.13", - "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.13.tgz", - "integrity": "sha512-4/oy9h0jjmY80a2gOIo75iLl8TOPhmtx4E2Hz+PfMjvx/vLtGY4TMU/35WRyH2JHPfT5CVB38u4JRow7gnmzJA==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/hash-node/-/hash-node-4.2.14.tgz", + "integrity": "sha512-8ZBDY2DD4wr+GGjTpPtiglEsqr0lUP+KHqgZcWczFf6qeZ/YRjMIOoQWVQlmwu7EtxKTd8YXD8lblmYcpBIA1g==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.14.0", + "@smithy/types": "^4.14.1", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" @@ -4764,12 +4792,12 @@ } }, "node_modules/@smithy/invalid-dependency": { - "version": "4.2.13", - "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.13.tgz", - "integrity": "sha512-jvC0RB/8BLj2SMIkY0Npl425IdnxZJxInpZJbu563zIRnVjpDMXevU3VMCRSabaLB0kf/eFIOusdGstrLJ8IDg==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/invalid-dependency/-/invalid-dependency-4.2.14.tgz", + "integrity": "sha512-c21qJiTSb25xvvOp+H2TNZzPCngrvl5vIPqPB8zQ/DmJF4QWXO19x1dWfMJZ6wZuuWUPPm0gV8C0cU3+ifcWuw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.14.0", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -4789,12 +4817,12 @@ } }, "node_modules/@smithy/md5-js": { - "version": "4.2.12", - "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.12.tgz", - "integrity": "sha512-W/oIpHCpWU2+iAkfZYyGWE+qkpuf3vEXHLxQQDx9FPNZTTdnul0dZ2d/gUFrtQ5je1G2kp4cjG0/24YueG2LbQ==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/md5-js/-/md5-js-4.2.14.tgz", + "integrity": "sha512-V2v0vx+h0iUSNG1Alt+GNBMSLGCrl9iVsdd+Ap67HPM9PN479x12V8LkuMoKImNZxn3MXeuyUjls+/7ZACZghA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.13.1", + "@smithy/types": "^4.14.1", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" }, @@ -4803,18 +4831,18 @@ } }, "node_modules/@smithy/middleware-compression": { - "version": "4.3.43", - "resolved": "https://registry.npmjs.org/@smithy/middleware-compression/-/middleware-compression-4.3.43.tgz", - "integrity": "sha512-MphcLSNTvBN9G2/ko7NBV2psEfsQRZviXmf612ZwvbSY7dJZNroc2+WPHBf+I9KO2SFl4VFz11rTTueihwWjlQ==", + "version": "4.3.46", + "resolved": "https://registry.npmjs.org/@smithy/middleware-compression/-/middleware-compression-4.3.46.tgz", + "integrity": "sha512-9f4AZ5dKqKRmO49MPhOoxFoQBLfBgxE9YKG8bQ6lsW9xk+Bn8rkfGlpW8OYlvhuarN+8mja9PjhEudFiR8wGFQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.23.14", + "@smithy/core": "^3.23.17", "@smithy/is-array-buffer": "^4.2.2", - "@smithy/node-config-provider": "^4.3.13", - "@smithy/protocol-http": "^5.3.13", - "@smithy/types": "^4.14.0", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", "@smithy/util-config-provider": "^4.2.2", - "@smithy/util-middleware": "^4.2.13", + "@smithy/util-middleware": "^4.2.14", "@smithy/util-utf8": "^4.2.2", "fflate": "0.8.1", "tslib": "^2.6.2" @@ -4824,13 +4852,13 @@ } }, "node_modules/@smithy/middleware-content-length": { - "version": "4.2.13", - "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.13.tgz", - "integrity": "sha512-IPMLm/LE4AZwu6qiE8Rr8vJsWhs9AtOdySRXrOM7xnvclp77Tyh7hMs/FRrMf26kgIe67vFJXXOSmVxS7oKeig==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/middleware-content-length/-/middleware-content-length-4.2.14.tgz", + "integrity": "sha512-xhHq7fX4/3lv5NHxLUk3OeEvl0xZ+Ek3qIbWaCL4f9JwgDZEclPBElljaZCAItdGPQl/kSM4LPMOpy1MYgprpw==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.13", - "@smithy/types": "^4.14.0", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -4838,18 +4866,18 @@ } }, "node_modules/@smithy/middleware-endpoint": { - "version": "4.4.29", - "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.4.29.tgz", - "integrity": "sha512-R9Q/58U+qBiSARGWbAbFLczECg/RmysRksX6Q8BaQEpt75I7LI6WGDZnjuC9GXSGKljEbA7N118LhGaMbfrTXw==", + "version": "4.4.32", + "resolved": "https://registry.npmjs.org/@smithy/middleware-endpoint/-/middleware-endpoint-4.4.32.tgz", + "integrity": "sha512-ZZkgyjnJppiZbIm6Qbx92pbXYi1uzenIvGhBSCDlc7NwuAkiqSgS75j1czAD25ZLs2FjMjYy1q7gyRVWG6JA0Q==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.23.14", - "@smithy/middleware-serde": "^4.2.17", - "@smithy/node-config-provider": "^4.3.13", - "@smithy/shared-ini-file-loader": "^4.4.8", - "@smithy/types": "^4.14.0", - "@smithy/url-parser": "^4.2.13", - "@smithy/util-middleware": "^4.2.13", + "@smithy/core": "^3.23.17", + "@smithy/middleware-serde": "^4.2.20", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", + "@smithy/url-parser": "^4.2.14", + "@smithy/util-middleware": "^4.2.14", "tslib": "^2.6.2" }, "engines": { @@ -4857,19 +4885,19 @@ } }, "node_modules/@smithy/middleware-retry": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.5.1.tgz", - "integrity": "sha512-/zY+Gp7Qj2D2hVm3irkCyONER7E9MiX3cUUm/k2ZmhkzZkrPgwVS4aJ5NriZUEN/M0D1hhjrgjUmX04HhRwdWA==", + "version": "4.5.5", + "resolved": "https://registry.npmjs.org/@smithy/middleware-retry/-/middleware-retry-4.5.5.tgz", + "integrity": "sha512-wnYOpB5vATFKWrY2Z9Alb0KhjZI6AbzU6Fbz3Hq2GnURdRYWB4q+qWivQtSTwXcmWUA3MZ6krfwL6Cq5MAbxsA==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.23.14", - "@smithy/node-config-provider": "^4.3.13", - "@smithy/protocol-http": "^5.3.13", - "@smithy/service-error-classification": "^4.2.13", - "@smithy/smithy-client": "^4.12.9", - "@smithy/types": "^4.14.0", - "@smithy/util-middleware": "^4.2.13", - "@smithy/util-retry": "^4.3.1", + "@smithy/core": "^3.23.17", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/service-error-classification": "^4.3.0", + "@smithy/smithy-client": "^4.12.13", + "@smithy/types": "^4.14.1", + "@smithy/util-middleware": "^4.2.14", + "@smithy/util-retry": "^4.3.4", "@smithy/uuid": "^1.1.2", "tslib": "^2.6.2" }, @@ -4878,14 +4906,14 @@ } }, "node_modules/@smithy/middleware-serde": { - "version": "4.2.17", - "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.17.tgz", - "integrity": "sha512-0T2mcaM6v9W1xku86Dk0bEW7aEseG6KenFkPK98XNw0ZhOqOiD1MrMsdnQw9QsL3/Oa85T53iSMlm0SZdSuIEQ==", + "version": "4.2.20", + "resolved": "https://registry.npmjs.org/@smithy/middleware-serde/-/middleware-serde-4.2.20.tgz", + "integrity": "sha512-Lx9JMO9vArPtiChE3wbEZ5akMIDQpWQtlu90lhACQmNOXcGXRbaDywMHDzuDZ2OkZzP+9wQfZi3YJT9F67zTQQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.23.14", - "@smithy/protocol-http": "^5.3.13", - "@smithy/types": "^4.14.0", + "@smithy/core": "^3.23.17", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -4893,12 +4921,12 @@ } }, "node_modules/@smithy/middleware-stack": { - "version": "4.2.13", - "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.13.tgz", - "integrity": "sha512-g72jN/sGDLyTanrCLH9fhg3oysO3f7tQa6eWWsMyn2BiYNCgjF24n4/I9wff/5XidFvjj9ilipAoQrurTUrLvw==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/middleware-stack/-/middleware-stack-4.2.14.tgz", + "integrity": "sha512-2dvkUKLuFdKsCRmOE4Mn63co0Djtsm+JMh0bYZQupN1pJwMeE8FmQmRLLzzEMN0dnNi7CDCYYH8F0EVwWiPBeA==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.14.0", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -4906,14 +4934,14 @@ } }, "node_modules/@smithy/node-config-provider": { - "version": "4.3.13", - "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.13.tgz", - "integrity": "sha512-iGxQ04DsKXLckbgnX4ipElrOTk+IHgTyu0q0WssZfYhDm9CQWHmu6cOeI5wmWRxpXbBDhIIfXMWz5tPEtcVqbw==", + "version": "4.3.14", + "resolved": "https://registry.npmjs.org/@smithy/node-config-provider/-/node-config-provider-4.3.14.tgz", + "integrity": "sha512-S+gFjyo/weSVL0P1b9Ts8C/CwIfNCgUPikk3sl6QVsfE/uUuO+QsF+NsE/JkpvWqqyz1wg7HFdiaZuj5CoBMRg==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.13", - "@smithy/shared-ini-file-loader": "^4.4.8", - "@smithy/types": "^4.14.0", + "@smithy/property-provider": "^4.2.14", + "@smithy/shared-ini-file-loader": "^4.4.9", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -4921,14 +4949,14 @@ } }, "node_modules/@smithy/node-http-handler": { - "version": "4.5.2", - "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.5.2.tgz", - "integrity": "sha512-/oD7u8M0oj2ZTFw7GkuuHWpIxtWdLlnyNkbrWcyVYhd5RJNDuczdkb0wfnQICyNFrVPlr8YHOhamjNy3zidhmA==", + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/@smithy/node-http-handler/-/node-http-handler-4.6.1.tgz", + "integrity": "sha512-iB+orM4x3xrr57X3YaXazfKnntl0LHlZB1kcXSGzMV1Tt0+YwEjGlbjk/44qEGtBzXAz6yFDzkYTKSV6Pj2HUg==", "license": "Apache-2.0", "dependencies": { - "@smithy/protocol-http": "^5.3.13", - "@smithy/querystring-builder": "^4.2.13", - "@smithy/types": "^4.14.0", + "@smithy/protocol-http": "^5.3.14", + "@smithy/querystring-builder": "^4.2.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -4936,12 +4964,12 @@ } }, "node_modules/@smithy/property-provider": { - "version": "4.2.13", - "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.13.tgz", - "integrity": "sha512-bGzUCthxRmezuxkbu9wD33wWg9KX3hJpCXpQ93vVkPrHn9ZW6KNNdY5xAUWNuRCwQ+VyboFuWirG1lZhhkcyRQ==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/property-provider/-/property-provider-4.2.14.tgz", + "integrity": "sha512-WuM31CgfsnQ/10i7NYr0PyxqknD72Y5uMfUMVSniPjbEPceiTErb4eIqJQ+pdxNEAUEWrewrGjIRjVbVHsxZiQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.14.0", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -4949,12 +4977,12 @@ } }, "node_modules/@smithy/protocol-http": { - "version": "5.3.13", - "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.13.tgz", - "integrity": "sha512-+HsmuJUF4u8POo6s8/a2Yb/AQ5t/YgLovCuHF9oxbocqv+SZ6gd8lC2duBFiCA/vFHoHQhoq7QjqJqZC6xOxxg==", + "version": "5.3.14", + "resolved": "https://registry.npmjs.org/@smithy/protocol-http/-/protocol-http-5.3.14.tgz", + "integrity": "sha512-dN5F8kHx8RNU0r+pCwNmFZyz6ChjMkzShy/zup6MtkRmmix4vZzJdW+di7x//b1LiynIev88FM18ie+wwPcQtQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.14.0", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -4962,12 +4990,12 @@ } }, "node_modules/@smithy/querystring-builder": { - "version": "4.2.13", - "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.13.tgz", - "integrity": "sha512-tG4aOYFCZdPMjbgfhnIQ322H//ojujldp1SrHPHpBSb3NqgUp3dwiUGRJzie87hS1DYwWGqDuPaowoDF+rYCbQ==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/querystring-builder/-/querystring-builder-4.2.14.tgz", + "integrity": "sha512-XYA5Z0IqTeF+5XDdh4BBmSA0HvbgVZIyv4cmOoUheDNR57K1HgBp9ukUMx3Cr3XpDHHpLBnexPE3LAtDsZkj2A==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.14.0", + "@smithy/types": "^4.14.1", "@smithy/util-uri-escape": "^4.2.2", "tslib": "^2.6.2" }, @@ -4976,12 +5004,12 @@ } }, "node_modules/@smithy/querystring-parser": { - "version": "4.2.13", - "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.13.tgz", - "integrity": "sha512-hqW3Q4P+CDzUyQ87GrboGMeD7XYNMOF+CuTwu936UQRB/zeYn3jys8C3w+wMkDfY7CyyyVwZQ5cNFoG0x1pYmA==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/querystring-parser/-/querystring-parser-4.2.14.tgz", + "integrity": "sha512-hr+YyqBD23GVvRxGGrcc/oOeNlK3PzT5Fu4dzrDXxzS1LpFiuL2PQQqKPs87M79aW7ziMs+nvB3qdw77SqE7Lw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.14.0", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -4989,24 +5017,24 @@ } }, "node_modules/@smithy/service-error-classification": { - "version": "4.2.13", - "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.2.13.tgz", - "integrity": "sha512-a0s8XZMfOC/qpqq7RCPvJlk93rWFrElH6O++8WJKz0FqnA4Y7fkNi/0mnGgSH1C4x6MFsuBA8VKu4zxFrMe5Vw==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/@smithy/service-error-classification/-/service-error-classification-4.3.0.tgz", + "integrity": "sha512-9jKsBYQRPR0xBLgc2415RsA5PIcP2sis4oBdN9s0D13cg1B1284mNTjx9Yc+BEERXzuPm5ObktI96OxsKh8E9A==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.14.0" + "@smithy/types": "^4.14.1" }, "engines": { "node": ">=18.0.0" } }, "node_modules/@smithy/shared-ini-file-loader": { - "version": "4.4.8", - "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.8.tgz", - "integrity": "sha512-VZCZx2bZasxdqxVgEAhREvDSlkatTPnkdWy1+Kiy8w7kYPBosW0V5IeDwzDUMvWBt56zpK658rx1cOBFOYaPaw==", + "version": "4.4.9", + "resolved": "https://registry.npmjs.org/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-4.4.9.tgz", + "integrity": "sha512-495/V2I15SHgedSJoDPD23JuSfKAp726ZI1V0wtjB07Wh7q/0tri/0e0DLefZCHgxZonrGKt/OCTpAtP1wE1kQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.14.0", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -5014,16 +5042,16 @@ } }, "node_modules/@smithy/signature-v4": { - "version": "5.3.13", - "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.13.tgz", - "integrity": "sha512-YpYSyM0vMDwKbHD/JA7bVOF6kToVRpa+FM5ateEVRpsTNu564g1muBlkTubXhSKKYXInhpADF46FPyrZcTLpXg==", + "version": "5.3.14", + "resolved": "https://registry.npmjs.org/@smithy/signature-v4/-/signature-v4-5.3.14.tgz", + "integrity": "sha512-1D9Y/nmlVjCeSivCbhZ7hgEpmHyY1h0GvpSZt3l0xcD9JjmjVC1CHOozS6+Gh+/ldMH8JuJ6cujObQqfayAVFA==", "license": "Apache-2.0", "dependencies": { "@smithy/is-array-buffer": "^4.2.2", - "@smithy/protocol-http": "^5.3.13", - "@smithy/types": "^4.14.0", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", "@smithy/util-hex-encoding": "^4.2.2", - "@smithy/util-middleware": "^4.2.13", + "@smithy/util-middleware": "^4.2.14", "@smithy/util-uri-escape": "^4.2.2", "@smithy/util-utf8": "^4.2.2", "tslib": "^2.6.2" @@ -5033,17 +5061,17 @@ } }, "node_modules/@smithy/smithy-client": { - "version": "4.12.9", - "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.12.9.tgz", - "integrity": "sha512-ovaLEcTU5olSeHcRXcxV6viaKtpkHZumn6Ps0yn7dRf2rRSfy794vpjOtrWDO0d1auDSvAqxO+lyhERSXQ03EQ==", + "version": "4.12.13", + "resolved": "https://registry.npmjs.org/@smithy/smithy-client/-/smithy-client-4.12.13.tgz", + "integrity": "sha512-y/Pcj1V9+qG98gyu1gvftHB7rDpdh+7kIBIggs55yGm3JdtBV8GT8IFF3a1qxZ79QnaJHX9GXzvBG6tAd+czJA==", "license": "Apache-2.0", "dependencies": { - "@smithy/core": "^3.23.14", - "@smithy/middleware-endpoint": "^4.4.29", - "@smithy/middleware-stack": "^4.2.13", - "@smithy/protocol-http": "^5.3.13", - "@smithy/types": "^4.14.0", - "@smithy/util-stream": "^4.5.22", + "@smithy/core": "^3.23.17", + "@smithy/middleware-endpoint": "^4.4.32", + "@smithy/middleware-stack": "^4.2.14", + "@smithy/protocol-http": "^5.3.14", + "@smithy/types": "^4.14.1", + "@smithy/util-stream": "^4.5.25", "tslib": "^2.6.2" }, "engines": { @@ -5051,14 +5079,14 @@ } }, "node_modules/@smithy/smithy-client/node_modules/@smithy/util-stream": { - "version": "4.5.22", - "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.22.tgz", - "integrity": "sha512-3H8iq/0BfQjUs2/4fbHZ9aG9yNzcuZs24LPkcX1Q7Z+qpqaGM8+qbGmE8zo9m2nCRgamyvS98cHdcWvR6YUsew==", + "version": "4.5.25", + "resolved": "https://registry.npmjs.org/@smithy/util-stream/-/util-stream-4.5.25.tgz", + "integrity": "sha512-/PFpG4k8Ze8Ei+mMKj3oiPICYekthuzePZMgZbCqMiXIHHf4n2aZ4Ps0aSRShycFTGuj/J6XldmC0x0DwednIA==", "license": "Apache-2.0", "dependencies": { - "@smithy/fetch-http-handler": "^5.3.16", - "@smithy/node-http-handler": "^4.5.2", - "@smithy/types": "^4.14.0", + "@smithy/fetch-http-handler": "^5.3.17", + "@smithy/node-http-handler": "^4.6.1", + "@smithy/types": "^4.14.1", "@smithy/util-base64": "^4.3.2", "@smithy/util-buffer-from": "^4.2.2", "@smithy/util-hex-encoding": "^4.2.2", @@ -5070,9 +5098,9 @@ } }, "node_modules/@smithy/types": { - "version": "4.14.0", - "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.14.0.tgz", - "integrity": "sha512-OWgntFLW88kx2qvf/c/67Vno1yuXm/f9M7QFAtVkkO29IJXGBIg0ycEaBTH0kvCtwmvZxRujrgP5a86RvsXJAQ==", + "version": "4.14.1", + "resolved": "https://registry.npmjs.org/@smithy/types/-/types-4.14.1.tgz", + "integrity": "sha512-59b5HtSVrVR/eYNei3BUj3DCPKD/G7EtDDe7OEJE7i7FtQFugYo6MxbotS8mVJkLNVf8gYaAlEBwwtJ9HzhWSg==", "license": "Apache-2.0", "dependencies": { "tslib": "^2.6.2" @@ -5082,13 +5110,13 @@ } }, "node_modules/@smithy/url-parser": { - "version": "4.2.13", - "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.13.tgz", - "integrity": "sha512-2G03yoboIRZlZze2+PT4GZEjgwQsJjUgn6iTsvxA02bVceHR6vp4Cuk7TUnPFWKF+ffNUk3kj4COwkENS2K3vw==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/url-parser/-/url-parser-4.2.14.tgz", + "integrity": "sha512-p06BiBigJ8bTA3MgnOfCtDUWnAMY0YfedO/GRpmc7p+wg3KW8vbXy1xwSu5ASy0wV7rRYtlfZOIKH4XqfhjSQQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/querystring-parser": "^4.2.13", - "@smithy/types": "^4.14.0", + "@smithy/querystring-parser": "^4.2.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -5159,14 +5187,14 @@ } }, "node_modules/@smithy/util-defaults-mode-browser": { - "version": "4.3.45", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.45.tgz", - "integrity": "sha512-ag9sWc6/nWZAuK3Wm9KlFJUnRkXLrXn33RFjIAmCTFThqLHY+7wCst10BGq56FxslsDrjhSie46c8OULS+BiIw==", + "version": "4.3.49", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-4.3.49.tgz", + "integrity": "sha512-a5bNrdiONYB/qE2BuKegvUMd/+ZDwdg4vsNuuSzYE8qs2EYAdK9CynL+Rzn29PbPiUqoz/cbpRbcLzD5lEevHw==", "license": "Apache-2.0", "dependencies": { - "@smithy/property-provider": "^4.2.13", - "@smithy/smithy-client": "^4.12.9", - "@smithy/types": "^4.14.0", + "@smithy/property-provider": "^4.2.14", + "@smithy/smithy-client": "^4.12.13", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -5174,17 +5202,17 @@ } }, "node_modules/@smithy/util-defaults-mode-node": { - "version": "4.2.50", - "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.50.tgz", - "integrity": "sha512-xpjncL5XozFA3No7WypTsPU1du0fFS8flIyO+Wh2nhCy7bpEapvU7BR55Bg+wrfw+1cRA+8G8UsTjaxgzrMzXg==", + "version": "4.2.54", + "resolved": "https://registry.npmjs.org/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-4.2.54.tgz", + "integrity": "sha512-g1cvrJvOnzeJgEdf7AE4luI7gp6L8weE0y9a9wQUSGtjb8QRHDbCJYuE4Sy0SD9N8RrnNPFsPltAz/OSoBR9Zw==", "license": "Apache-2.0", "dependencies": { - "@smithy/config-resolver": "^4.4.15", - "@smithy/credential-provider-imds": "^4.2.13", - "@smithy/node-config-provider": "^4.3.13", - "@smithy/property-provider": "^4.2.13", - "@smithy/smithy-client": "^4.12.9", - "@smithy/types": "^4.14.0", + "@smithy/config-resolver": "^4.4.17", + "@smithy/credential-provider-imds": "^4.2.14", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/property-provider": "^4.2.14", + "@smithy/smithy-client": "^4.12.13", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -5192,13 +5220,13 @@ } }, "node_modules/@smithy/util-endpoints": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.4.0.tgz", - "integrity": "sha512-QQHGPKkw6NPcU6TJ1rNEEa201srPtZiX4k61xL163vvs9sTqW/XKz+UEuJ00uvPqoN+5Rs4Ka1UJ7+Mp03IXJw==", + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/@smithy/util-endpoints/-/util-endpoints-3.4.2.tgz", + "integrity": "sha512-a55Tr+3OKld4TTtnT+RhKOQHyPxm3j/xL4OR83WBUhLJaKDS9dnJ7arRMOp3t31dcLhApwG9bgvrRXBHlLdIkg==", "license": "Apache-2.0", "dependencies": { - "@smithy/node-config-provider": "^4.3.13", - "@smithy/types": "^4.14.0", + "@smithy/node-config-provider": "^4.3.14", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -5218,12 +5246,12 @@ } }, "node_modules/@smithy/util-middleware": { - "version": "4.2.13", - "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.13.tgz", - "integrity": "sha512-GTooyrlmRTqvUen4eK7/K1p6kryF7bnDfq6XsAbIsf2mo51B/utaH+XThY6dKgNCWzMAaH/+OLmqaBuLhLWRow==", + "version": "4.2.14", + "resolved": "https://registry.npmjs.org/@smithy/util-middleware/-/util-middleware-4.2.14.tgz", + "integrity": "sha512-1Su2vj9RYNDEv/V+2E+jXkkwGsgR7dc4sfHn9Z7ruzQHJIEni9zzw5CauvRXlFJfmgcqYP8fWa0dkh2Q2YaQyw==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.14.0", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -5231,13 +5259,13 @@ } }, "node_modules/@smithy/util-retry": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.3.1.tgz", - "integrity": "sha512-FwmicpgWOkP5kZUjN3y+3JIom8NLGqSAJBeoIgK0rIToI817TEBHCrd0A2qGeKQlgDeP+Jzn4i0H/NLAXGy9uQ==", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/@smithy/util-retry/-/util-retry-4.3.4.tgz", + "integrity": "sha512-FY1UQQ1VFmMwiYp1GVS4MeaGD5O0blLNYK0xCRHU+mJgeoH/hSY8Ld8sJWKQ6uznkh14HveRGQJncgPyNl9J+A==", "license": "Apache-2.0", "dependencies": { - "@smithy/service-error-classification": "^4.2.13", - "@smithy/types": "^4.14.0", + "@smithy/service-error-classification": "^4.3.0", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -5459,12 +5487,12 @@ } }, "node_modules/@smithy/util-waiter": { - "version": "4.2.15", - "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.15.tgz", - "integrity": "sha512-oUt9o7n8hBv3BL56sLSneL0XeigZSuem0Hr78JaoK33D9oKieyCvVP8eTSe3j7g2mm/S1DvzxKieG7JEWNJUNg==", + "version": "4.2.16", + "resolved": "https://registry.npmjs.org/@smithy/util-waiter/-/util-waiter-4.2.16.tgz", + "integrity": "sha512-GtclrKoZ3Lt7jPQ7aTIYKfjY92OgceScftVnkTsG8e1KV8rkvZgN+ny6YSRhd9hxB8rZtwVbmln7NTvE5O3GmQ==", "license": "Apache-2.0", "dependencies": { - "@smithy/types": "^4.14.0", + "@smithy/types": "^4.14.1", "tslib": "^2.6.2" }, "engines": { @@ -6371,9 +6399,9 @@ "license": "MIT" }, "node_modules/@xmldom/xmldom": { - "version": "0.8.12", - "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.12.tgz", - "integrity": "sha512-9k/gHF6n/pAi/9tqr3m3aqkuiNosYTurLLUtc7xQ9sxB/wm7WPygCv8GYa6mS0fLJEHhqMC1ATYhz++U/lRHqg==", + "version": "0.8.13", + "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.13.tgz", + "integrity": "sha512-KRYzxepc14G/CEpEGc3Yn+JKaAeT63smlDr+vjB8jRfgTBBI9wRj/nkQEO+ucV8p8I9bfKLWp37uHgFrbntPvw==", "license": "MIT", "engines": { "node": ">=10.0.0" @@ -7208,15 +7236,15 @@ } }, "node_modules/axios": { - "version": "1.13.5", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz", - "integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==", + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.15.2.tgz", + "integrity": "sha512-wLrXxPtcrPTsNlJmKjkPnNPK2Ihe0hn0wGSaTEiHRPxwjvJwT3hKmXF4dpqxmPO9SoNb2FsYXj/xEo0gHN+D5A==", "dev": true, "license": "MIT", "dependencies": { "follow-redirects": "^1.15.11", "form-data": "^4.0.5", - "proxy-from-env": "^1.1.0" + "proxy-from-env": "^2.1.0" } }, "node_modules/axobject-query": { @@ -9491,9 +9519,9 @@ "license": "ISC" }, "node_modules/follow-redirects": { - "version": "1.15.11", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", - "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "version": "1.16.0", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.16.0.tgz", + "integrity": "sha512-y5rN/uOsadFT/JfYwhxRS5R7Qce+g3zG97+JrtFZlC9klX/W5hD7iiLzScI4nZqUS7DNUdhPgw4xI8W2LuXlUw==", "dev": true, "funding": [ { @@ -12301,11 +12329,14 @@ "license": "MIT" }, "node_modules/proxy-from-env": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-2.1.0.tgz", + "integrity": "sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA==", "dev": true, - "license": "MIT" + "license": "MIT", + "engines": { + "node": ">=10" + } }, "node_modules/ps-tree": { "version": "1.2.0", From 428615f54d023624091f47ab006fc9105109e97f Mon Sep 17 00:00:00 2001 From: Hoan-Vu Tran-Ho Date: Thu, 23 Apr 2026 13:30:23 -0400 Subject: [PATCH 12/13] KMS-663: Correctly check for the s3 service and recreate the LocalStack container if it's missing --- bin/localstack/start.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/bin/localstack/start.sh b/bin/localstack/start.sh index 4949f71b..aad49fa4 100755 --- a/bin/localstack/start.sh +++ b/bin/localstack/start.sh @@ -27,6 +27,7 @@ if [[ -n "${existing_id}" ]]; then )" if [[ ",${configured_services}," != *",events,"* ]] \ + || [[ ",${configured_services}," != *",s3,"* ]] \ || [[ ",${configured_services}," != *",cloudwatch,"* ]]; then docker rm -f "${LOCALSTACK_CONTAINER_NAME}" >/dev/null echo "Recreating LocalStack container '${LOCALSTACK_CONTAINER_NAME}' to enable services: ${REQUIRED_SERVICES}" From 45f34f195b390e0d2414b115c9b3611260fcf156 Mon Sep 17 00:00:00 2001 From: Hoan-Vu Tran-Ho Date: Thu, 23 Apr 2026 14:33:58 -0400 Subject: [PATCH 13/13] KMS-663: Remove not needed test cleanup --- .../src/shared/__tests__/exportPublishSchemeCsvToS3.test.js | 6 ------ serverless/src/shared/__tests__/exportRdfToS3.test.js | 5 ----- 2 files changed, 11 deletions(-) diff --git a/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js b/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js index 8e8c061d..6fb091bb 100644 --- a/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js +++ b/serverless/src/shared/__tests__/exportPublishSchemeCsvToS3.test.js @@ -1,6 +1,5 @@ import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3' import { - afterEach, beforeEach, describe, expect, @@ -54,11 +53,6 @@ describe('exportPublishSchemeCsvToS3', () => { }) }) - afterEach(() => { - // Restore real timers after each test - vi.useRealTimers() - }) - test('should successfully export CSVs for all published schemes', async () => { const { exportPublishSchemeCsvToS3 } = await import('../exportPublishSchemeCsvToS3') diff --git a/serverless/src/shared/__tests__/exportRdfToS3.test.js b/serverless/src/shared/__tests__/exportRdfToS3.test.js index 676a2b87..8264af81 100644 --- a/serverless/src/shared/__tests__/exportRdfToS3.test.js +++ b/serverless/src/shared/__tests__/exportRdfToS3.test.js @@ -1,6 +1,5 @@ import { PutObjectCommand, S3Client } from '@aws-sdk/client-s3' import { - afterEach, beforeEach, describe, expect, @@ -44,10 +43,6 @@ describe('exportRdfToS3', () => { ensureBucketAndLifecycleRule.mockResolvedValue() }) - afterEach(() => { - vi.useRealTimers() - }) - describe('when export process runs', () => { test('should successfully export RDF data to S3 for published version', async () => { const result = await exportRdfToS3({ version: 'published' })