feat(blog): Add article about advanced porting posix libs #238
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Check article details | |
| on: | |
| pull_request: | |
| branches: | |
| - main | |
| paths: | |
| - 'content/blog/**' | |
| env: | |
| TARGET_REPO_URL: "https://github.com/espressif/developer-portal.git" | |
| TARGET_BRANCH: "main" | |
| HUGO_VERSION: 0.147.5 | |
| jobs: | |
| check-article-details: | |
| runs-on: ubuntu-latest | |
| steps: | |
| - name: Checkout PR contents | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| submodules: recursive | |
| - name: Get added folders and files | |
| id: added-files | |
| run: | | |
| # Add target remote and fetch its branches | |
| git remote add target "$TARGET_REPO_URL" | |
| git fetch target "$TARGET_BRANCH" | |
| mkdir -p temp | |
| git diff --name-only --diff-filter=A origin/${{ github.base_ref }}...HEAD > temp/added-files.txt | |
| if [ ! -s temp/added-files.txt ]; then | |
| echo "✅ No files added. Nothing to check." | |
| exit 0 | |
| fi | |
| echo "List of added files:" | |
| cat temp/added-files.txt | |
| while IFS= read -r line; do | |
| if [[ "$line" =~ ^content/blog/.*/index.md$ ]]; then | |
| echo "$line" >> temp/index-files.txt | |
| fi | |
| done < temp/added-files.txt | |
| if [ ! -s temp/index-files.txt ]; then | |
| echo "✅ No index.md files added. Nothing to validate." | |
| exit 0 | |
| fi | |
| echo "List of added index files:" | |
| cat temp/index-files.txt | |
| - name: Extract article details | |
| if: success() && hashFiles('temp/index-files.txt') != '' | |
| run: | | |
| extracted_article_data="[]" | |
| while IFS= read -r index_file; do | |
| relative_path=$(dirname "${index_file#content/blog/}") | |
| # Extract article_id - last directory of the path | |
| ARTICLE_ID=$(basename "$relative_path") | |
| FOLDER_YM="" | |
| if [[ "$relative_path" =~ ^([0-9]{4})/([0-9]{2})/[^/]+$ ]]; then | |
| FOLDER_YM="${BASH_REMATCH[1]}-${BASH_REMATCH[2]}" | |
| fi | |
| ARTICLE_DATE=$(awk '/^date:/ { gsub(/["'\'']/, "", $2); print $2 }' "$index_file") | |
| [[ -z "$ARTICLE_DATE" ]] && { echo "No valid date in $index_file"; continue; } | |
| ARTICLE_YM="${ARTICLE_DATE:0:7}" | |
| ARTICLE_SUMMARY_RAW=$(awk '/^summary:/ { $1=""; gsub(/^[: \t'\''"]+|["'\'' \t]+$/, "", $0); print; exit }' "$index_file") | |
| # If summary is empty or not found, set to empty string | |
| ARTICLE_SUMMARY_RAW=${ARTICLE_SUMMARY_RAW:-""} | |
| # Extract up to first 5 words if ARTICLE_SUMMARY_RAW is not empty | |
| if [[ -n "$ARTICLE_SUMMARY_RAW" ]]; then | |
| read -r -a words <<< "$ARTICLE_SUMMARY_RAW" | |
| ARTICLE_SUMMARY="${words[0]:-}" | |
| for i in {1..4}; do | |
| [[ -n "${words[i]:-}" ]] && ARTICLE_SUMMARY+=" ${words[i]}" | |
| done | |
| else | |
| ARTICLE_SUMMARY="" | |
| fi | |
| # ARTICLE_SUMMARY=$(awk -F': "' '/^summary: "/ {gsub(/"/, "", $2); split($2, a, " "); print a[1], a[2], a[3], a[4], a[5]}' "$index_file") | |
| # [[ -z "$ARTICLE_SUMMARY" ]] && { echo "No valid summary in $index_file"; continue; } | |
| new_item=$(jq -n \ | |
| --arg id "$ARTICLE_ID" \ | |
| --arg fym "$FOLDER_YM" \ | |
| --arg aym "$ARTICLE_YM" \ | |
| --arg ad "$ARTICLE_DATE" \ | |
| --arg as "$ARTICLE_SUMMARY" \ | |
| '{ | |
| article_id: $id, | |
| folder_ym: $fym, | |
| article_ym: $aym, | |
| article_date: $ad, | |
| article_summary: $as | |
| }') | |
| extracted_article_data=$(jq --argjson item "$new_item" '. + [$item]' <<<"$extracted_article_data") | |
| done < temp/index-files.txt | |
| # Build final object keyed by article_id | |
| echo "$extracted_article_data" | jq 'reduce .[] as $item ({}; .[$item.article_id] = { | |
| folder_ym: $item.folder_ym, | |
| article_ym: $item.article_ym, | |
| article_date: $item.article_date, | |
| article_summary: $item.article_summary | |
| })' > temp/extracted_article_data.json | |
| echo "Extracted article data:" | |
| cat temp/extracted_article_data.json | |
| - name: Validate article details | |
| if: success() && hashFiles('temp/index-files.txt') != '' | |
| run: | | |
| job_error=0 | |
| extracted_article_data="temp/extracted_article_data.json" | |
| YM_REGEX='^[0-9]{4}-[0-9]{2}$' | |
| DATE_REGEX='^[0-9]{4}-[0-9]{2}-[0-9]{2}$' | |
| # Get current year and month as strings | |
| current_ym=$(date +%Y-%m) | |
| current_date=$(date +%Y-%m-%d) | |
| while read -r article_key; do | |
| article_error=0 | |
| folder_ym=$(jq -r --arg key "$article_key" '.[$key].folder_ym' "$extracted_article_data") | |
| article_ym=$(jq -r --arg key "$article_key" '.[$key].article_ym' "$extracted_article_data") | |
| article_date=$(jq -r --arg key "$article_key" '.[$key].article_date' "$extracted_article_data") | |
| article_summary=$(jq -r --arg key "$article_key" '.[$key].article_summary' "$extracted_article_data") | |
| echo | |
| echo "Article: $article_key" | |
| # Report if folder year or month is empty and skip | |
| if ! [[ "$folder_ym" =~ $YM_REGEX ]]; then | |
| echo "❌ Wrong folder." | |
| echo " Move the article to a present or future 'content/blog/YYYY/MM/' folder and add a matching publishing date in the article's YAML header." | |
| article_error=1 | |
| fi | |
| # Report if article date format is invalid and skip | |
| if [[ ! "$article_date" =~ $DATE_REGEX ]]; then | |
| echo "❌ Invalid date format in the article's YAML header: $article_date." | |
| echo " Use YYYY-MM-DD." | |
| article_error=1 | |
| fi | |
| # Skip if folder year/month or article date are empty, | |
| # or the following code will have issues | |
| if [ "$article_error" -eq 1 ]; then | |
| job_error=1 | |
| continue | |
| fi | |
| # Check article details against this decision tree | |
| # | |
| # Is folder_ym = current_ym? | |
| # ├── Yes → Is article_date < current_date? | |
| # │ ├── Yes → Warn: article date is in the past | |
| # │ └── No → OK | |
| # └── No → Is folder_ym > current_ym? | |
| # ├── Yes → Is folder_ym != article_ym? | |
| # │ ├── Yes → Warn: Make folder_ym and article_date consistent | |
| # │ └── No → OK | |
| # └── No → Warn: folder_year and folder_month are in the past | |
| # | |
| if [ "$folder_ym" = "$current_ym" ]; then | |
| if [ "$article_date" \< "$current_date" ]; then | |
| echo "❌ Publishing date in the article's YAML header is in the past: $article_date." | |
| echo " Once the article is approved, update the date to a present or future day and make sure it matches the article's 'content/blog/YYYY/MM/' folder." | |
| article_error=1 | |
| fi | |
| elif [ "$folder_ym" \> "$current_ym" ]; then | |
| if [ "$folder_ym" != "$article_ym" ]; then | |
| echo "❌ Article's YYYY/MM folder and publishing date are inconsistent: $folder_ym and $article_date." | |
| echo " Move the article to a present or future 'content/blog/YYYY/MM/' folder and add a matching publishing date in the article's YAML header." | |
| article_error=1 | |
| fi | |
| else | |
| echo "❌ Wrong folder: $folder_ym." | |
| echo " Move the article to a present or future 'content/blog/YYYY/MM/' folder and add a matching publishing date in the article's YAML header." | |
| article_error=1 | |
| fi | |
| # Report if article summary is not provided | |
| if [ -z "$article_summary" ]; then | |
| echo "❌ Missing summary." | |
| echo " Add the summary in the article's YAML header:" | |
| echo " summary: \"This is my summary.\"" | |
| article_error=1 | |
| elif [[ "$article_summary" == Replace\ it\ with* ]]; then | |
| echo "❌ Placeholder summary found." | |
| echo " Update the summary in the article's YAML header." | |
| article_error=1 | |
| fi | |
| if [ "$article_error" -eq 0 ]; then | |
| echo "✅ OK" | |
| else | |
| job_error=1 | |
| fi | |
| done < <(jq -r 'to_entries[] | .key' "$extracted_article_data") | |
| echo | |
| exit "$job_error" | |
| - name: Validate author presence | |
| if: always() && hashFiles('temp/index-files.txt') != '' | |
| run: | | |
| wget --progress=dot:giga -O ${{ runner.temp }}/hugo.deb https://github.com/gohugoio/hugo/releases/download/v${HUGO_VERSION}/hugo_extended_${HUGO_VERSION}_linux-amd64.deb | |
| sudo dpkg -i ${{ runner.temp }}/hugo.deb | |
| job_author_error=0 | |
| HUGO_TEMP_CONFIG="temp/hugo-author-check.toml" | |
| INDEX_LIST="temp/index-files.txt" | |
| mkdir -p temp | |
| cp config/_default/hugo.toml "$HUGO_TEMP_CONFIG" | |
| # Create a temp config that mounts folders with new aricles only | |
| echo '[module]' >> "$HUGO_TEMP_CONFIG" | |
| while IFS= read -r line; do | |
| dir="$(dirname "$line")" | |
| cat <<EOF >> "$HUGO_TEMP_CONFIG" | |
| [[module.mounts]] | |
| source = "$dir" | |
| target = "$dir" | |
| EOF | |
| done < "$INDEX_LIST" | |
| # Build folders with new articles only | |
| hugo \ | |
| --environment preview \ | |
| --config "$HUGO_TEMP_CONFIG" \ | |
| > /dev/null 2>&1 | |
| # Check if html files were built successfully | |
| while IFS= read -r md_file; do | |
| # Strip content/ and .md | |
| relative_path="${md_file#content/}" | |
| html_path="public/${relative_path%.md}.html" | |
| echo | |
| echo "Article: $html_path" | |
| if [[ ! -f "$html_path" ]]; then | |
| echo "❌ File not found: $html_path" | |
| job_author_error=1 | |
| fi | |
| # Scan html div blocks for presence of author and author-extra | |
| # classes; their presence ensures the presence of author entry | |
| author_divs=$(grep -oE '<div[^>]+class="[^"]*\bauthor\b[^"]*"' "$html_path" 2>/dev/null || true) | |
| if [[ -z "$author_divs" ]]; then | |
| echo "❌ Author not found" | |
| job_author_error=1 | |
| elif echo "$author_divs" | grep -q '\bauthor-extra\b'; then | |
| echo "✅ Non-default author is found" | |
| else | |
| echo "✅ Default author is found" | |
| fi | |
| done < "$INDEX_LIST" | |
| echo | |
| exit "$job_author_error" |