diff --git a/.gitea/workflows/test-skins.yml b/.gitea/workflows/test-skins.yml index deff6f6..486a6be 100644 --- a/.gitea/workflows/test-skins.yml +++ b/.gitea/workflows/test-skins.yml @@ -11,133 +11,6 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: '20' - - - name: Install link checker - run: npm install -g markdown-link-check - - - name: Check external links in all markdown files - continue-on-error: true - run: | - # Create config file to handle custom domains and retries - cat > .markdown-link-check.json << 'EOF' - { - "ignorePatterns": [ - { - "pattern": "^/" - } - ], - "replacementPatterns": [], - "httpHeaders": [ - { - "urls": ["https://git.sulej.net"], - "headers": { - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" - } - } - ], - "timeout": "20s", - "retryOn429": true, - "retryCount": 3, - "fallbackRetryDelay": "30s", - "aliveStatusCodes": [200, 206, 301, 302, 307, 308] - } - EOF - - echo "" - echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" - echo "🌐 Checking External Links in All Markdown Files" - echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" - echo "" - - # Find all markdown files - find . -name "*.md" -type f | sort > markdown_files.txt - total_files=$(wc -l < markdown_files.txt) - - echo "📊 Found $total_files markdown files to check" - echo "" - - # Extract and group all external links by context - > all_broken_links.txt - - while IFS= read -r file; do - # Run markdown-link-check and capture only errors - markdown-link-check "$file" --config .markdown-link-check.json 2>&1 | grep "^\s*\[✖\]" >> all_broken_links.txt || true - done < markdown_files.txt - - # Parse README.md to group links by skin/section - if [ -f "README.md" ]; then - echo "📋 Grouping broken links by context..." - echo "" - - # Count total broken links - total_broken=$(wc -l < all_broken_links.txt) - - if [ $total_broken -gt 0 ]; then - echo "❌ Found $total_broken broken external links" - echo "" - - # Group by category - declare -A general_links - declare -A skin_links - declare -A tag_links - - while IFS= read -r line; do - url=$(echo "$line" | grep -oP 'https://[^\s]+' || echo "$line" | grep -oP 'http://[^\s]+') - - # Categorize links - if [[ "$url" == *"/osc/skins"* ]]; then - general_links["$url"]=1 - elif [[ "$url" == *"/src/tag/"* ]] || [[ "$url" == *"/media/tag/"* ]]; then - tag_links["$url"]=1 - elif [[ "$url" == *"/export/"* ]] || [[ "$url" == *"/media/"* ]]; then - # Extract skin name from URL - skin_name=$(echo "$url" | grep -oP '/export/[^/]+' | sed 's|/export/||' | head -1) - if [ -z "$skin_name" ]; then - skin_name="Unknown" - fi - skin_links["$skin_name"]+="$url"$'\n' - else - general_links["$url"]=1 - fi - done < all_broken_links.txt - - # Display grouped results - if [ ${#general_links[@]} -gt 0 ]; then - echo "🔗 General Links:" - for url in "${!general_links[@]}"; do - echo " ❌ $url" - done - echo "" - fi - - if [ ${#skin_links[@]} -gt 0 ]; then - echo "🎨 Skin-specific Links:" - for skin in "${!skin_links[@]}"; do - echo "" - echo " $skin:" - echo "${skin_links[$skin]}" | while IFS= read -r url; do - [ -n "$url" ] && echo " ❌ $url" - done - done - echo "" - fi - - if [ ${#tag_links[@]} -gt 0 ]; then - echo "🏷️ Version Tags:" - for url in "${!tag_links[@]}"; do - echo " ❌ $url" - done - echo "" - fi - else - echo "✅ All external links are valid!" - fi - fi - - name: Check all links in markdown files run: | #!/bin/bash