diff --git a/.gitea/workflows/test-skins.yml b/.gitea/workflows/test-skins.yml index 486a6be..c071503 100644 --- a/.gitea/workflows/test-skins.yml +++ b/.gitea/workflows/test-skins.yml @@ -39,10 +39,13 @@ jobs: file_has_errors=0 # Extract ALL links from the markdown file - # Match both []() and direct URLs - using Perl regex for better handling { + # Markdown links []() grep -oP '\]\(([^\)]+)\)' "$md_file" 2>/dev/null | sed 's/](\(.*\))/\1/' || true + # Image links ![]() grep -oP '!\[[^\]]*\]\(([^\)]+)\)' "$md_file" 2>/dev/null | sed 's/!\[.*\](\(.*\))/\1/' || true + # Video src attributes + grep -oP ']+src="([^"]+)"' "$md_file" 2>/dev/null | sed 's/.*src="\([^"]*\)".*/\1/' || true } > /tmp/links_$$.txt link_count=$(wc -l < /tmp/links_$$.txt 2>/dev/null || echo "0") @@ -56,6 +59,12 @@ jobs: echo " 📊 Found $link_count links to check" echo "" + # Categorize and check links + declare -A download_links + declare -A media_links + declare -A tag_links + declare -A video_links + # Check each link while IFS= read -r link; do [ -z "$link" ] && continue @@ -63,46 +72,95 @@ jobs: # Decode URL-encoded characters decoded_link=$(echo "$link" | sed 's/%20/ /g' | sed 's/%23/#/g' | sed 's/%28/(/g' | sed 's/%29/)/g' | sed 's/%E2%80%A2/•/g' | sed 's/%E1%9A%96/ᚖ/g' | sed 's/%E3%80%8A/《/g' | sed 's/%E3%80%8B/》/g' | sed 's/%E3%80%8E/『/g' | sed 's/%E3%80%8F/』/g' | sed 's/%E2%9B%94/⛔/g' | sed 's/%E2%9C%A8/✨/g' | sed 's/%7B/{/g' | sed 's/%7D/}/g' | sed 's/%2B/+/g' | sed 's/%E3%83%86/テ/g' | sed 's/%E3%83%B3/ン/g' | sed 's/%E3%83%8D/ネ/g' | sed 's/%E3%82%B9/ス/g' | sed 's/%E3%82%A4/イ/g' | sed 's/%E3%83%BB/・/g' | sed 's/%E3%83%95/フ/g' | sed 's/%E3%83%AA/リ/g' | sed 's/%E3%83%BC/ー/g' | sed 's/%E3%83%8A/ナ/g' | sed 's/%5B/[/g' | sed 's/%5D/]/g') + # Determine link category and validation status + status="✅" + # Check if it's an external URL if [[ "$decoded_link" =~ ^https?:// ]]; then - # URL-encode the link for curl (convert spaces and special chars) - # Use the original encoded link, not the decoded one - encoded_url="$link" - - # Check external URL with curl (with User-Agent header for better compatibility) + # Check external URL with curl http_code=$(curl -s -o /dev/null -w "%{http_code}" --max-time 10 \ -H "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" \ - "$encoded_url" 2>/dev/null || echo "000") + "$link" 2>/dev/null || echo "000") # Accept 2xx and 3xx status codes as valid - if [[ "$http_code" =~ ^[23][0-9][0-9]$ ]]; then - echo " ✅ $decoded_link" - else - echo " ❌ $decoded_link (HTTP $http_code)" + if ! [[ "$http_code" =~ ^[23][0-9][0-9]$ ]]; then + status="❌ (HTTP $http_code)" file_has_errors=1 has_errors=1 fi + + # Categorize external URLs + if [[ "$decoded_link" =~ /export/.*\.(osk|osz)$ ]]; then + download_links["$decoded_link"]="$status" + elif [[ "$decoded_link" =~ /media/gameplay/.*\.(mp4|webm)$ ]]; then + video_links["$decoded_link"]="$status" + elif [[ "$decoded_link" =~ /src/tag/ ]]; then + tag_links["$decoded_link"]="$status" + elif [[ "$decoded_link" =~ /media/(panel|icons|thumbnail)/ ]]; then + media_links["$decoded_link"]="$status" + else + download_links["$decoded_link"]="$status" + fi else # Local file - remove leading slash if present if [[ "$decoded_link" =~ ^/ ]]; then file_path="${decoded_link#/}" else - # Relative path - resolve from markdown file location md_dir=$(dirname "$md_file") file_path="$md_dir/$decoded_link" fi - # Check if file exists (using test -f which handles spaces better) - if [ -f "$file_path" ]; then - echo " ✅ $decoded_link" - else - echo " ❌ $decoded_link (file not found at: $file_path)" + # Check if file exists + if ! [ -f "$file_path" ]; then + status="❌ (not found)" file_has_errors=1 has_errors=1 fi + + # Categorize local files + if [[ "$decoded_link" =~ \.(webp|png|jpg|jpeg)$ ]]; then + media_links["$decoded_link"]="$status" + elif [[ "$decoded_link" =~ \.md$ ]]; then + tag_links["$decoded_link"]="$status" + else + download_links["$decoded_link"]="$status" + fi fi done < /tmp/links_$$.txt + # Display categorized results + if [ ${#download_links[@]} -gt 0 ]; then + echo "download:" + for link in "${!download_links[@]}"; do + echo " ${download_links[$link]} $link" + done + echo "" + fi + + if [ ${#media_links[@]} -gt 0 ]; then + echo "media:" + for link in "${!media_links[@]}"; do + echo " ${media_links[$link]} $link" + done + echo "" + fi + + if [ ${#video_links[@]} -gt 0 ]; then + echo "video:" + for link in "${!video_links[@]}"; do + echo " ${video_links[$link]} $link" + done + echo "" + fi + + if [ ${#tag_links[@]} -gt 0 ]; then + echo "tags:" + for link in "${!tag_links[@]}"; do + echo " ${tag_links[$link]} $link" + done + echo "" + fi + rm -f /tmp/links_$$.txt if [ $file_has_errors -eq 0 ]; then