Files
skins/.gitea/workflows/test-skins.yml
Arlind dad96a4847
All checks were successful
Generate Skin previews, OSK files and per skin documentation / Full CI/CD Pipeline (push) Successful in 10s
Add .gitea/workflows/test-skins.yml
2025-11-23 14:01:18 +01:00

311 lines
14 KiB
YAML

name: Check Links in README
on:
workflow_dispatch:
jobs:
check-links:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '20'
- name: Install link checker
run: npm install -g markdown-link-check
- name: Check external links in all markdown files
continue-on-error: true
run: |
# Create config file to handle custom domains and retries
cat > .markdown-link-check.json << 'EOF'
{
"ignorePatterns": [
{
"pattern": "^/"
}
],
"replacementPatterns": [],
"httpHeaders": [
{
"urls": ["https://git.sulej.net"],
"headers": {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
}
}
],
"timeout": "20s",
"retryOn429": true,
"retryCount": 3,
"fallbackRetryDelay": "30s",
"aliveStatusCodes": [200, 206, 301, 302, 307, 308]
}
EOF
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "🌐 Checking External Links in All Markdown Files"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
# Find all markdown files
find . -name "*.md" -type f | sort > markdown_files.txt
total_files=$(wc -l < markdown_files.txt)
echo "📊 Found $total_files markdown files to check"
echo ""
# Extract and group all external links by context
> all_broken_links.txt
while IFS= read -r file; do
# Run markdown-link-check and capture only errors
markdown-link-check "$file" --config .markdown-link-check.json 2>&1 | grep "^\s*\[✖\]" >> all_broken_links.txt || true
done < markdown_files.txt
# Parse README.md to group links by skin/section
if [ -f "README.md" ]; then
echo "📋 Grouping broken links by context..."
echo ""
# Count total broken links
total_broken=$(wc -l < all_broken_links.txt)
if [ $total_broken -gt 0 ]; then
echo "❌ Found $total_broken broken external links"
echo ""
# Group by category
declare -A general_links
declare -A skin_links
declare -A tag_links
while IFS= read -r line; do
url=$(echo "$line" | grep -oP 'https://[^\s]+' || echo "$line" | grep -oP 'http://[^\s]+')
# Categorize links
if [[ "$url" == *"/osc/skins"* ]]; then
general_links["$url"]=1
elif [[ "$url" == *"/src/tag/"* ]] || [[ "$url" == *"/media/tag/"* ]]; then
tag_links["$url"]=1
elif [[ "$url" == *"/export/"* ]] || [[ "$url" == *"/media/"* ]]; then
# Extract skin name from URL
skin_name=$(echo "$url" | grep -oP '/export/[^/]+' | sed 's|/export/||' | head -1)
if [ -z "$skin_name" ]; then
skin_name="Unknown"
fi
skin_links["$skin_name"]+="$url"$'\n'
else
general_links["$url"]=1
fi
done < all_broken_links.txt
# Display grouped results
if [ ${#general_links[@]} -gt 0 ]; then
echo "🔗 General Links:"
for url in "${!general_links[@]}"; do
echo " ❌ $url"
done
echo ""
fi
if [ ${#skin_links[@]} -gt 0 ]; then
echo "🎨 Skin-specific Links:"
for skin in "${!skin_links[@]}"; do
echo ""
echo " $skin:"
echo "${skin_links[$skin]}" | while IFS= read -r url; do
[ -n "$url" ] && echo " ❌ $url"
done
done
echo ""
fi
if [ ${#tag_links[@]} -gt 0 ]; then
echo "🏷️ Version Tags:"
for url in "${!tag_links[@]}"; do
echo " ❌ $url"
done
echo ""
fi
else
echo "✅ All external links are valid!"
fi
fi
- name: Check internal markdown file references
run: |
#!/bin/bash
set -e
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "🔍 Checking Internal Markdown File References in All Markdown Files"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
# Find all markdown files and extract internal links with context
> internal_links_with_context.txt
find . -name "*.md" -type f | while IFS= read -r file; do
# Extract links with the skin name from headers
current_skin=""
while IFS= read -r line; do
# Check if it's a header (skin name)
if [[ "$line" =~ ^##[[:space:]]+\[([^\]]+)\] ]]; then
current_skin="${BASH_REMATCH[1]}"
fi
# Check for internal markdown links
if [[ "$line" =~ \]\((/[^)]+\.md)\) ]]; then
link="${BASH_REMATCH[1]}"
echo "$current_skin|$link" >> internal_links_with_context.txt
fi
done < "$file"
done
total_count=$(wc -l < internal_links_with_context.txt)
echo "📊 Total internal markdown links found: $total_count"
echo ""
# Group by skin and check
declare -A skin_missing_files
checked_count=0
while IFS='|' read -r skin link; do
checked_count=$((checked_count + 1))
# Decode URL-encoded characters
decoded_link=$(echo "$link" | sed 's/%20/ /g' | sed 's/%23/#/g' | sed 's/%28/(/g' | sed 's/%29/)/g' | sed 's/%E2%80%A2/•/g' | sed 's/%E1%9A%96/ᚖ/g' | sed 's/%E3%80%8A/《/g' | sed 's/%E3%80%8B/》/g' | sed 's/%E3%80%8E/『/g' | sed 's/%E3%80%8F/』/g' | sed 's/%E2%9B%94/⛔/g' | sed 's/%E2%9C%A8/✨/g' | sed 's/%7B/{/g' | sed 's/%7D/}//g' | sed 's/%2B/+/g' | sed 's/%E3%83%86/テ/g' | sed 's/%E3%83%B3/ン/g' | sed 's/%E3%83%8D/ネ/g' | sed 's/%E3%82%B9/ス/g' | sed 's/%E3%82%A4/イ/g' | sed 's/%E3%83%BB/・/g' | sed 's/%E3%83%95/フ/g' | sed 's/%E3%83%AA/リ/g' | sed 's/%E3%83%BC/ー/g' | sed 's/%E3%83%8A/ナ/g' | sed 's/%5B/[/g' | sed 's/%5D/]/g')
# Remove leading slash to make it relative
file_path="${decoded_link#/}"
# Check if file exists
if [ ! -f "$file_path" ]; then
if [ -z "$skin" ]; then
skin="General"
fi
skin_missing_files["$skin"]+="$file_path"$'\n'
fi
done < internal_links_with_context.txt
# Report results
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
if [ ${#skin_missing_files[@]} -gt 0 ]; then
total_missing=0
for skin in "${!skin_missing_files[@]}"; do
count=$(echo "${skin_missing_files[$skin]}" | grep -c . || true)
total_missing=$((total_missing + count))
done
echo "❌ RESULT: $total_missing of $total_count internal markdown files are MISSING"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
for skin in "${!skin_missing_files[@]}"; do
echo "📄 $skin:"
echo "${skin_missing_files[$skin]}" | while IFS= read -r file; do
[ -n "$file" ] && echo " ❌ $file"
done
echo ""
done
exit 1
else
echo "✅ RESULT: All $total_count internal markdown files exist!"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
fi
- name: Check for broken image references
run: |
#!/bin/bash
set -e
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "🖼️ Checking Image References in All Markdown Files"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
# Extract all image links from all markdown files with context
> image_links_with_context.txt
find . -name "*.md" -type f | while IFS= read -r file; do
current_skin=""
while IFS= read -r line; do
# Check if it's a header (skin name)
if [[ "$line" =~ ^##[[:space:]]+\[([^\]]+)\] ]]; then
current_skin="${BASH_REMATCH[1]}"
fi
# Check for image links
if [[ "$line" =~ !\[.*?\]\(([^)]+\.(png|jpg|jpeg|gif|webp|svg))\) ]]; then
link="${BASH_REMATCH[1]}"
# Skip external URLs
if [[ ! "$link" =~ ^https?:// ]]; then
echo "$current_skin|$link" >> image_links_with_context.txt
fi
fi
done < "$file"
done
total_count=$(wc -l < image_links_with_context.txt)
echo "📊 Total local image references found: $total_count"
echo ""
# Group by skin and check
declare -A skin_missing_images
checked_count=0
while IFS='|' read -r skin link; do
checked_count=$((checked_count + 1))
# Decode URL-encoded characters
decoded_link=$(echo "$link" | sed 's/%20/ /g' | sed 's/%23/#/g' | sed 's/%28/(/g' | sed 's/%29/)/g' | sed 's/%E2%80%A2/•/g' | sed 's/%E1%9A%96/ᚖ/g' | sed 's/%E3%80%8A/《/g' | sed 's/%E3%80%8B/》/g' | sed 's/%E3%80%8E/『/g' | sed 's/%E3%80%8F/』/g' | sed 's/%E2%9B%94/⛔/g' | sed 's/%E2%9C%A8/✨/g' | sed 's/%7B/{/g' | sed 's/%7D/}/g' | sed 's/%2B/+/g' | sed 's/%E3%83%86/テ/g' | sed 's/%E3%83%B3/ン/g' | sed 's/%E3%83%8D/ネ/g' | sed 's/%E3%82%B9/ス/g' | sed 's/%E3%82%A4/イ/g' | sed 's/%E3%83%BB/・/g' | sed 's/%E3%83%95/フ/g' | sed 's/%E3%83%AA/リ/g' | sed 's/%E3%83%BC/ー/g' | sed 's/%E3%83%8A/ナ/g' | sed 's/%5B/[/g' | sed 's/%5D/]/g')
# Remove leading slash for absolute paths
if [[ "$decoded_link" =~ ^/ ]]; then
decoded_link="${decoded_link#/}"
fi
# Check if file exists
if [ ! -f "$decoded_link" ]; then
if [ -z "$skin" ]; then
skin="General"
fi
skin_missing_images["$skin"]+="$decoded_link"$'\n'
fi
done < image_links_with_context.txt
# Report results
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
if [ ${#skin_missing_images[@]} -gt 0 ]; then
total_missing=0
for skin in "${!skin_missing_images[@]}"; do
count=$(echo "${skin_missing_images[$skin]}" | grep -c . || true)
total_missing=$((total_missing + count))
done
echo "⚠️ RESULT: $total_missing of $total_count image files are MISSING (non-blocking)"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
for skin in "${!skin_missing_images[@]}"; do
echo "🖼️ $skin:"
echo "${skin_missing_images[$skin]}" | while IFS= read -r file; do
[ -n "$file" ] && echo " ❌ $file"
done
echo ""
done
# Don't fail the workflow for missing images, just warn
exit 0
else
echo "✅ RESULT: All $total_count image files exist!"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
fi