Files
Skins/.gitea/workflows/test-skins.yml
Arlind 94a9b23b39
Some checks failed
Test Skins / link-check (push) Has been cancelled
Generate Skin previews, OSK files and per skin documentation / Full CI/CD Pipeline (push) Successful in 30s
Update .gitea/workflows/test-skins.yml
2025-11-23 13:16:46 +01:00

168 lines
5.4 KiB
YAML

name: Test Skins
on:
push:
pull_request:
jobs:
link-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Validate links and assets
shell: bash
run: |
set -uo pipefail
RED="\033[31m"
GREEN="\033[32m"
RESET="\033[0m"
ERRORS=()
############################################################
# FIXED: SAFE URL DECODER (no null bytes!)
############################################################
urldecode() {
local url="$1"
url="${url//+/ }"
# Perl decoding avoids raw NUL bytes and handles unicode correctly
printf '%s' "$url" | perl -pe 's/%([A-Fa-f0-9]{2})/chr(hex($1))/eg'
}
############################################################
# FIXED: Gitea "soft 404" detection (HTML body says 404)
############################################################
check_http() {
local url="$1"
echo " → Checking external: $url"
# HEAD
local status
status=$(curl -Is --max-time 10 "$url" | head -n1 | awk '{print $2}')
if [[ "$status" =~ ^2|3 ]]; then
local body
body=$(curl -Ls --max-time 10 "$url")
if echo "$body" | grep -qiE "404 Not Found|Path .* doesn't exist|File not found|does not exist|Not Found"; then
return 1
fi
return 0
fi
# GET fallback
status=$(curl -Is --max-time 10 -X GET "$url" | head -n1 | awk '{print $2}')
if [[ "$status" =~ ^2|3 ]]; then
local body
body=$(curl -Ls --max-time 10 "$url")
if echo "$body" | grep -qiE "404 Not Found|Path .* doesn't exist|File not found|does not exist|Not Found"; then
return 1
fi
return 0
fi
return 1
}
############################################################
# Local file existence check
############################################################
check_local() {
local path="$1"
path="${path#/}"
local decoded
decoded=$(urldecode "$path")
echo " → Checking local: $decoded"
if [[ ! -e "$decoded" ]]; then
return 1
fi
return 0
}
############################################################
# Extract *all* link types from a Markdown file
############################################################
extract_links() {
local f="$1"
# Markdown links
grep -oE '\[[^]]*\]\([^)]*\)' "$f" \
| sed -E 's/.*\((.*)\).*/\1/'
# Markdown images
grep -oE '!\[[^]]*\]\([^)]*\)' "$f" \
| sed -E 's/.*\((.*)\).*/\1/'
# Raw URLs
grep -oE 'https?://[^ )"]+' "$f"
# <img src="">
grep -oE '<img[^>]*src="[^"]+"' "$f" \
| sed -E 's/.*src="([^"]*)".*/\1/'
# <video src="">
grep -oE '<video[^>]*src="[^"]+"' "$f" \
| sed -E 's/.*src="([^"]*)".*/\1/'
}
echo "🔍 Scanning Markdown files..."
echo
############################################################
# Process all markdown files
############################################################
find . -type f -name '*.md' | while IFS= read -r mdfile; do
echo "📄 Checking: $mdfile"
############################################
# For each file, extract and test links
############################################
while IFS= read -r url; do
[[ -z "$url" ]] && continue
[[ "$url" == mailto:* ]] && continue
# Skip versioned /src/tag links inside docs/ (quietly)
if [[ "$mdfile" == ./docs/* ]] && [[ "$url" == *"/src/tag/"* ]]; then
continue
fi
# External URLs
if [[ "$url" == http* ]]; then
if ! check_http "$url"; then
ERRORS+=("❌ Broken external link: $url (in $mdfile)")
fi
# Local paths
else
if ! check_local "$url"; then
ERRORS+=("❌ Missing local file: $url (in $mdfile)")
fi
fi
done < <(extract_links "$mdfile")
echo
done
############################################################
# Final error reporting
############################################################
echo
if (( ${#ERRORS[@]} > 0 )); then
echo -e "${RED}✖ Errors found:${RESET}"
printf "%s\n" "${ERRORS[@]}"
echo
echo -e "${RED}❌ Failing job because broken links were found.${RESET}"
exit 1
else
echo -e "${GREEN}✔ All links OK!${RESET}"
fi