[AUTO] Import OpenAPI to Jentic Public APIs: #202
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Import OpenAPI Spec to OAK | |
| on: | |
| issues: | |
| types: [ opened ] | |
| jobs: | |
| generate-repo-structure: | |
| # Only run if the issue body contains the trigger string | |
| if: contains(github.event.issue.body, 'import_oas_url:') | |
| runs-on: ubuntu-latest | |
| permissions: | |
| contents: write # To push the new branch and commit files | |
| pull-requests: write # To create the pull request | |
| issues: write # To comment on the issue (if needed) | |
| steps: | |
| - name: Check out the repository | |
| uses: actions/checkout@v4 | |
| - name: Extract Info from Issue | |
| id: extract_info | |
| run: | | |
| # set -x # Enable debugging output | |
| echo "Parsing REQUIRED OpenAPI URL and Vendor Name from issue..." | |
| BODY="${{ github.event.issue.body }}" | |
| BODY=$(echo "$BODY" | tr -d '\r') | |
| # Extract URL (Required) | |
| URL=$(echo "$BODY" | grep -oP '(?<=import_oas_url:).*' | head -n 1 | xargs) | |
| echo "URL after grep/xargs: $URL" | |
| if [ -z "$URL" ]; then | |
| echo "::error::Could not extract required 'import_oas_url:' field from issue body." | |
| echo "extract_failed=true" >> $GITHUB_OUTPUT | |
| echo "error_message=Could not extract required 'import_oas_url:' field from issue body. Please ensure your issue contains a line with 'import_oas_url: <your-url>'." >> $GITHUB_OUTPUT | |
| exit 1 | |
| fi | |
| # Extract Vendor Name (Required) | |
| VENDOR_NAME=$(echo "$BODY" | grep -oP '(?<=vendor_name:).*' | head -n 1 | xargs) | |
| echo "Vendor Name after grep/xargs: $VENDOR_NAME" | |
| if [ -z "$VENDOR_NAME" ]; then | |
| echo "::error::Could not extract required 'vendor_name:' field from issue body." | |
| echo "extract_failed=true" >> $GITHUB_OUTPUT | |
| echo "error_message=Could not extract required 'vendor_name:' field from issue body. Please ensure your issue contains a line with 'vendor_name: <your-vendor-name>'." >> $GITHUB_OUTPUT | |
| exit 1 | |
| fi | |
| # Set the Repotools Label (always the vendor name now) | |
| REPOTOOLS_LABEL="$VENDOR_NAME" | |
| echo "Using provided Vendor Name as Repotools label: $REPOTOOLS_LABEL" | |
| echo "Extracted URL: $URL" | |
| echo "Repotools Label: $REPOTOOLS_LABEL" | |
| echo "import_oas_url=$URL" >> $GITHUB_OUTPUT | |
| echo "repotools_label=$REPOTOOLS_LABEL" >> $GITHUB_OUTPUT | |
| - name: Call Repotools Service and Download ZIP | |
| id: call_repotools | |
| run: | | |
| echo "Calling Repotools Service for URL: ${{ steps.extract_info.outputs.import_oas_url }}" | |
| echo "Using Repotools Label: ${{ steps.extract_info.outputs.repotools_label }}" | |
| API_ENDPOINT="https://repo-tools.main.us-east-1.jenticprod.net/spec/download/openapi" | |
| REQUEST_BODY=$(cat <<EOF | |
| { | |
| "url": "${{ steps.extract_info.outputs.import_oas_url }}", | |
| "label": "${{ steps.extract_info.outputs.repotools_label }}" | |
| } | |
| EOF | |
| ) | |
| echo "Request Body:" | |
| echo "$REQUEST_BODY" | |
| echo "Calling endpoint: $API_ENDPOINT" | |
| # Use -s for silent, -L to follow redirects, -OJ to save output to filename header value location | |
| read -r STATUS_CODE FILE_STRUCTURE <<< $(curl -s -L -OJ -X POST \ | |
| -H "Content-Type: application/json" \ | |
| -d "$REQUEST_BODY" \ | |
| -w "%{http_code} %{filename_effective}" \ | |
| "$API_ENDPOINT") | |
| OUTPUT_ZIP=$FILE_STRUCTURE | |
| if [ "$STATUS_CODE" -ne 200 ]; then | |
| echo "::error::Repotools API call failed with status code $STATUS_CODE." | |
| echo "api_failed=true" >> $GITHUB_OUTPUT | |
| if [ -s "$OUTPUT_ZIP" ]; then | |
| echo "Response details (if any):" | |
| ERROR_RESPONSE=$(cat "$OUTPUT_ZIP") | |
| echo "$ERROR_RESPONSE" | |
| echo "error_response=$ERROR_RESPONSE" >> $GITHUB_OUTPUT | |
| else | |
| echo "error_response=Repotools API call failed with HTTP status code $STATUS_CODE. No additional error details available." >> $GITHUB_OUTPUT | |
| fi | |
| exit 1 | |
| else | |
| # Remove filename extension, Replace all ':' with '/' to point to API endpoint location | |
| FILE_STRUCTURE=${FILE_STRUCTURE%.zip} | |
| ACTUAL_CONTENT_SUBPATH_IN_ZIP=${FILE_STRUCTURE//:/\/} | |
| echo "API call successful (HTTP $STATUS_CODE). ZIP file downloaded to $OUTPUT_ZIP." | |
| # Verify zip file is not empty/tiny (basic check) | |
| if [ ! -s "$OUTPUT_ZIP" ] || [ $(stat -c%s "$OUTPUT_ZIP") -lt 100 ]; then # Check if file exists and is > 100 bytes | |
| echo "::error::Downloaded ZIP file seems empty or too small." | |
| ls -l "$OUTPUT_ZIP" | |
| echo "api_failed=true" >> $GITHUB_OUTPUT | |
| echo "error_response=Downloaded ZIP file from Repotools API is empty or too small. The OpenAPI URL may be invalid or inaccessible." >> $GITHUB_OUTPUT | |
| exit 1 | |
| fi | |
| echo "ZIP file size: $(stat -c%s "$OUTPUT_ZIP") bytes." | |
| fi | |
| echo "zip_file=$OUTPUT_ZIP" >> $GITHUB_OUTPUT | |
| echo "actual_content_subpath_in_zip=$ACTUAL_CONTENT_SUBPATH_IN_ZIP" >> $GITHUB_OUTPUT | |
| - name: Unzip Generated Files and Adjust Structure | |
| id: unzip_files | |
| run: | | |
| # set -x # Enable debugging output | |
| ZIP_FILE="${{ steps.call_repotools.outputs.zip_file }}" | |
| ACTUAL_CONTENT_SUBPATH_IN_ZIP="${{ steps.call_repotools.outputs.actual_content_subpath_in_zip }}" | |
| EXTRACT_TEMP_DIR="openapi_temp_extract" # Temporary place to unzip at root | |
| FINAL_OAK_TARGET_BASE="apis/openapi" # Base for OAK vendor/API dirs in the repo | |
| echo "Ensuring temporary extraction directory $EXTRACT_TEMP_DIR exists and is empty..." | |
| rm -rf "$EXTRACT_TEMP_DIR" | |
| mkdir -p "$EXTRACT_TEMP_DIR" | |
| echo "Unzipping $ZIP_FILE into $EXTRACT_TEMP_DIR..." | |
| unzip -o "$ZIP_FILE" -d "$EXTRACT_TEMP_DIR" | |
| if [ $? -ne 0 ]; then | |
| echo "::error::Failed to unzip $ZIP_FILE into $EXTRACT_TEMP_DIR." | |
| exit 1 | |
| fi | |
| echo "Successfully unzipped files into temporary directory $EXTRACT_TEMP_DIR." | |
| echo "Contents of $EXTRACT_TEMP_DIR:" | |
| ls -lA "$EXTRACT_TEMP_DIR" | |
| # Full path to the source content (meta.json, version dirs) within the unzipped temporary directory | |
| SOURCE_API_CONTENT_PATH="$EXTRACT_TEMP_DIR/$ACTUAL_CONTENT_SUBPATH_IN_ZIP" | |
| # Corresponding target path in the OAK repository | |
| OAK_API_CONTENT_PATH="$FINAL_OAK_TARGET_BASE/$ACTUAL_CONTENT_SUBPATH_IN_ZIP" | |
| echo "Determined actual content subpath in zip: $ACTUAL_CONTENT_SUBPATH_IN_ZIP" | |
| echo "Full source API content path (in temp extract): $SOURCE_API_CONTENT_PATH" | |
| echo "Full OAK API content path (target in repo): $OAK_API_CONTENT_PATH" | |
| if [ ! -d "$SOURCE_API_CONTENT_PATH" ]; then | |
| echo "::error::Source API content path '$SOURCE_API_CONTENT_PATH' does not exist after unzipping. Check zip structure and label logic." | |
| echo "Contents of $EXTRACT_TEMP_DIR:" | |
| ls -lA "$EXTRACT_TEMP_DIR" | |
| FIRST_PART_OF_LABEL=$(echo "$ACTUAL_CONTENT_SUBPATH_IN_ZIP" | cut -d'/' -f1) | |
| echo "Contents of $EXTRACT_TEMP_DIR/$FIRST_PART_OF_LABEL:" | |
| ls -lA "$EXTRACT_TEMP_DIR/$FIRST_PART_OF_LABEL" | |
| exit 1 | |
| fi | |
| echo "Ensuring target OAK API directory '$OAK_API_CONTENT_PATH' exists..." | |
| mkdir -p "$OAK_API_CONTENT_PATH" | |
| # Copy version directories from source to OAK target | |
| echo "Copying version directories from '$SOURCE_API_CONTENT_PATH' to '$OAK_API_CONTENT_PATH'..." | |
| # Find all directories (version directories like '1.0.0') directly under SOURCE_API_CONTENT_PATH | |
| find "$SOURCE_API_CONTENT_PATH" -mindepth 1 -maxdepth 1 -type d -print0 | while IFS= read -r -d $'\0' source_version_dir_path; do | |
| version_dir_name=$(basename "$source_version_dir_path") | |
| echo "Copying version directory: '$version_dir_name'" | |
| # Use rsync for robust copy, creating the target version directory if it doesn't exist | |
| # and syncing contents. Using -a to preserve attributes, checksum to be sure. | |
| rsync -a --checksum "$source_version_dir_path/" "$OAK_API_CONTENT_PATH/$version_dir_name/" | |
| if [ $? -ne 0 ]; then | |
| echo "::error::Failed to copy version directory '$version_dir_name' using rsync." | |
| exit 1 | |
| fi | |
| done | |
| echo "Version directories copied successfully." | |
| # Handle meta.json | |
| SOURCE_META_JSON="$SOURCE_API_CONTENT_PATH/meta.json" | |
| OAK_META_JSON="$OAK_API_CONTENT_PATH/meta.json" | |
| echo "Handling meta.json..." | |
| echo "Source meta.json: $SOURCE_META_JSON" | |
| echo "OAK target meta.json: $OAK_META_JSON" | |
| if [ ! -f "$OAK_META_JSON" ]; then | |
| echo "$OAK_META_JSON does not exist in OAK. Copying from source..." | |
| if [ -f "$SOURCE_META_JSON" ]; then | |
| cp "$SOURCE_META_JSON" "$OAK_META_JSON" | |
| echo "Copied new meta.json to $OAK_META_JSON." | |
| else | |
| echo "::warning::meta.json not found in source at $SOURCE_META_JSON. Cannot copy." | |
| fi | |
| else | |
| echo "Existing meta.json found at $OAK_META_JSON. Not overwriting." | |
| fi | |
| # Cleanup temporary files | |
| echo "Cleaning up temporary extraction directory $EXTRACT_TEMP_DIR..." | |
| rm -rf "$EXTRACT_TEMP_DIR" | |
| echo "Cleaning up ZIP file $ZIP_FILE..." | |
| rm "$ZIP_FILE" | |
| echo "Cleanup complete." | |
| echo "oak_api_content_path=$OAK_API_CONTENT_PATH" >> $GITHUB_OUTPUT # Output the OAK content path | |
| - name: Check Git Status Before PR Action | |
| run: | | |
| echo "--- Git Status After Unzip ---" | |
| git status | |
| echo "--- End Git Status ---" | |
| - name: Find, Prepare, Read, and Remove Repair Log for Commit | |
| id: prepare_repair_log | |
| run: | | |
| # set -x | |
| # Search within the path where files were copied into the repo for commit | |
| SEARCH_PATH="${{ steps.unzip_files.outputs.oak_api_content_path }}" | |
| if [ -z "$SEARCH_PATH" ]; then | |
| echo "::warning::Search path for repair log (oak_api_content_path) is empty. Skipping log processing." | |
| echo "log_found=false" >> $GITHUB_OUTPUT | |
| echo "log_content_base64=" >> $GITHUB_OUTPUT # Ensure output is set | |
| exit 0 # Exit successfully, as this isn't a fatal error for the whole workflow | |
| fi | |
| echo "Searching for repair_log.txt in $SEARCH_PATH and its subdirectories..." | |
| LOG_FILE_CANDIDATE=$(find "$SEARCH_PATH" -name "repair_log.txt" -print -quit) | |
| if [[ -f "$LOG_FILE_CANDIDATE" ]]; then | |
| echo "Repair log found at: $LOG_FILE_CANDIDATE" | |
| echo "--- Start of Repair Log Content ---" | |
| cat "$LOG_FILE_CANDIDATE" | |
| echo "--- End of Repair Log Content ---" | |
| # Read content, encode to base64, and set as output using multiline syntax | |
| LOG_CONTENT_BASE64=$(base64 -w 0 < "$LOG_FILE_CANDIDATE") | |
| echo 'log_content_base64<<EOF' >> "$GITHUB_OUTPUT" | |
| echo "$LOG_CONTENT_BASE64" >> "$GITHUB_OUTPUT" | |
| echo 'EOF' >> "$GITHUB_OUTPUT" | |
| echo "Removing original $LOG_FILE_CANDIDATE to prevent it from being committed..." | |
| rm "$LOG_FILE_CANDIDATE" | |
| if [ $? -eq 0 ]; then | |
| echo "Original $LOG_FILE_CANDIDATE removed successfully." | |
| else | |
| echo "::warning::Failed to remove original $LOG_FILE_CANDIDATE. It might be committed." | |
| fi | |
| echo "log_found=true" >> $GITHUB_OUTPUT | |
| else | |
| echo "repair_log.txt not found in $SEARCH_PATH." | |
| echo "log_found=false" >> $GITHUB_OUTPUT | |
| echo "log_content_base64=" >> $GITHUB_OUTPUT # Ensure output is set | |
| fi | |
| shell: bash | |
| - name: Create Pull Request | |
| id: create_pr_step | |
| uses: peter-evans/create-pull-request@v6 | |
| with: | |
| token: ${{ secrets.GITHUB_TOKEN }} # Make sure this token has write permissions | |
| commit-message: "feat: Import OpenAPI spec from Issue #${{ github.event.issue.number }}" | |
| title: "feat: Import OpenAPI spec from Issue #${{ github.event.issue.number }}" | |
| body: | | |
| This PR adds/updates Jentic OAK with information from the OpenAPI URL provided in Issue #${{ github.event.issue.number }}. | |
| **Source URL:** ${{ steps.extract_info.outputs.import_oas_url }} | |
| Files were automatically generated by the Jentic OpenAPI import service. | |
| Closes #${{ github.event.issue.number }} | |
| branch: feat/import-oas-${{ github.event.issue.number }} | |
| base: main | |
| delete-branch: true | |
| - name: Post Repair Log Comment to PR | |
| if: steps.prepare_repair_log.outputs.log_found == 'true' && steps.create_pr_step.outputs.pull-request-number | |
| uses: actions/github-script@v7 | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| let logContentBase64 = '${{ steps.prepare_repair_log.outputs.log_content_base64 }}'; | |
| let logContent = ''; | |
| if (logContentBase64) { | |
| try { | |
| logContent = Buffer.from(logContentBase64, 'base64').toString('utf8'); | |
| } catch (error) { | |
| console.error('Error decoding base64 log content:', error); | |
| logContent = 'Error decoding repair log content.'; | |
| } | |
| } else if ('${{ steps.prepare_repair_log.outputs.log_found }}' === 'true') { | |
| logContent = 'Repair log was found but content is missing or could not be retrieved.'; | |
| } else { | |
| // This part of the script should not be reached if log_found is false due to the step's 'if' condition, | |
| // but as a safeguard, or if the 'if' condition were removed. | |
| console.log('Repair log not found or content unavailable, skipping comment.'); | |
| return; // Exit script gracefully | |
| } | |
| const maxLength = 60000; // GitHub comment character limit is ~65535 | |
| let commentBody = `**OpenAPI Spec Repair Log:**\n\n\`\`\`text\n${logContent}\n\`\`\``; | |
| if (commentBody.length > maxLength) { | |
| const header = `**OpenAPI Spec Repair Log (truncated):**\n\n\`\`\`text\n`; | |
| const footer = `\n...\n(Log was truncated due to length.)\n\`\`\``; | |
| const availableLength = maxLength - header.length - footer.length; | |
| const truncatedContent = logContent.substring(0, availableLength); | |
| commentBody = `${header}${truncatedContent}${footer}`; | |
| } | |
| await github.rest.issues.createComment({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| issue_number: ${{ steps.create_pr_step.outputs.pull-request-number }}, | |
| body: commentBody | |
| }); | |
| - name: Comment on Issue if Import Failed | |
| if: failure() && (steps.extract_info.outputs.extract_failed == 'true' || steps.call_repotools.outputs.api_failed == 'true') | |
| uses: actions/github-script@v7 | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| let errorMessage = "❌ **Failed to import OpenAPI specification**\n\n"; | |
| // Check which step failed and provide appropriate error message | |
| if ('${{ steps.extract_info.outputs.extract_failed }}' === 'true') { | |
| errorMessage += `**Error:** ${{ steps.extract_info.outputs.error_message }}\n\n`; | |
| errorMessage += "Please check your issue format and ensure it contains the required fields:\n"; | |
| errorMessage += "- \`import_oas_url: <your-openapi-url>\`\n"; | |
| errorMessage += "- \`vendor_name: <your-vendor-name>\`\n\n"; | |
| } else if ('${{ steps.call_repotools.outputs.api_failed }}' === 'true') { | |
| const errorResponse = `${{ steps.call_repotools.outputs.error_response }}`; | |
| errorMessage += `**Error:** Import service failed\n\n`; | |
| try { | |
| const response = JSON.parse(errorResponse); | |
| if (response.error) { | |
| errorMessage += `**Details:** ${response.error}\n\n`; | |
| } | |
| if (response.message) { | |
| errorMessage += `**Message:** ${response.message}\n\n`; | |
| } | |
| } catch (e) { | |
| errorMessage += `**Details:** ${errorResponse}\n\n`; | |
| } | |
| errorMessage += "Please check that your OpenAPI URL is accessible and contains a valid OpenAPI specification.\n\n"; | |
| } | |
| errorMessage += "If you believe this is a bug with the import service or need additional support, please contact the development team at Jentic Community Discord (https://discord.gg/yrxmDZWMqB).\n\n"; | |
| errorMessage += "Alternatively create a new issue reporting the bug (https://github.com/jentic/jentic-public-apis/issues/new?template=BLANK_ISSUE)."; | |
| github.rest.issues.createComment({ | |
| issue_number: context.issue.number, | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| body: errorMessage | |
| }); |