Update .gitea/workflows/test-skins.yml
Some checks failed
Generate Skin previews, OSK files and per skin documentation / Full CI/CD Pipeline (push) Successful in 28s
Test Skins / link-check (push) Has been cancelled

This commit is contained in:
2025-11-23 13:18:31 +01:00
parent 2c1920624d
commit 92a640bad8

View File

@@ -23,17 +23,7 @@ jobs:
ERRORS=()
############################################################
# Remove all NUL/control characters BEFORE any decoding
############################################################
sanitize_url() {
printf '%s' "$1" \
| tr -d '\000' \
| tr -d '\r' \
| sed 's/[[:cntrl:]]//g'
}
############################################################
# Safe unicode URL decode (no null bytes)
# Safe unicode URL decode (never produces NULL bytes)
############################################################
urldecode() {
local url="$1"
@@ -42,7 +32,7 @@ jobs:
}
############################################################
# Detect Gitea "soft 404" even when status=200 OK
# Detect Gitea "soft 404" even when HTTP=200 OK
############################################################
check_http() {
local url="$1"
@@ -50,21 +40,19 @@ jobs:
local status body
# Try HEAD first
status=$(curl -Is --max-time 10 "$url" | head -n1 | awk '{print $2}')
if [[ "$status" =~ ^2|3 ]]; then
body=$(curl -Ls --max-time 10 "$url")
if echo "$body" | grep -qiE "404 Not Found|Path .* doesn't exist|File not found|does not exist|Not Found"; then
if echo "$body" | grep -qiE "404 Not Found|doesn't exist|File not found|Not Found"; then
return 1
fi
return 0
fi
# Try GET fallback
status=$(curl -Is --max-time 10 -X GET "$url" | head -n1 | awk '{print $2}')
if [[ "$status" =~ ^2|3 ]]; then
body=$(curl -Ls --max-time 10 "$url")
if echo "$body" | grep -qiE "404 Not Found|Path .* doesn't exist|File not found|does not exist|Not Found"; then
if echo "$body" | grep -qiE "404 Not Found|doesn't exist|File not found|Not Found"; then
return 1
fi
return 0
@@ -74,7 +62,7 @@ jobs:
}
############################################################
# Local file existence check
# Local file check
############################################################
check_local() {
local path="$1"
@@ -88,27 +76,22 @@ jobs:
}
############################################################
# Extract URLs from Markdown + HTML
# Extract links from markdown + HTML
############################################################
extract_links() {
local f="$1"
# Markdown links
grep -oE '\[[^]]*\]\([^)]*\)' "$f" \
| sed -E 's/.*\((.*)\).*/\1/'
# Markdown images
grep -oE '!\[[^]]*\]\([^)]*\)' "$f" \
| sed -E 's/.*\((.*)\).*/\1/'
# Raw URLs
grep -oE 'https?://[^ )"]+' "$f"
# <img src="">
grep -oE '<img[^>]*src="[^"]+"' "$f" \
| sed -E 's/.*src="([^"]*)".*/\1/'
# <video src="">
grep -oE '<video[^>]*src="[^"]+"' "$f" \
| sed -E 's/.*src="([^"]*)".*/\1/'
}
@@ -116,6 +99,9 @@ jobs:
echo "🔍 Scanning Markdown files..."
echo
############################################################
# Process all markdown files
############################################################
find . -type f -name '*.md' | while IFS= read -r mdfile; do
echo "📄 Checking: $mdfile"
@@ -123,40 +109,36 @@ jobs:
[[ -z "$url" ]] && continue
[[ "$url" == mailto:* ]] && continue
# Clean input BEFORE use
cleaned=$(sanitize_url "$url")
# Skip tag-version links in docs
if [[ "$mdfile" == ./docs/* ]] && [[ "$cleaned" == *"/src/tag/"* ]]; then
# Skip versioned docs tag links
if [[ "$mdfile" == ./docs/* ]] && [[ "$url" == *"/src/tag/"* ]]; then
continue
fi
# external
if [[ "$cleaned" == http* ]]; then
if ! check_http "$cleaned"; then
ERRORS+=("❌ Broken external link: $cleaned (in $mdfile)")
# External links
if [[ "$url" == http* ]]; then
if ! check_http "$url"; then
ERRORS+=("❌ Broken external link: $url (in $mdfile)")
fi
# local file
else
if ! check_local "$cleaned"; then
ERRORS+=("❌ Missing local file: $cleaned (in $mdfile)")
# Local links
if ! check_local "$url"; then
ERRORS+=("❌ Missing local file: $url (in $mdfile)")
fi
fi
done < <(extract_links "$mdfile")
# 🌟 FULL FIX: sanitize extract_links output BEFORE loop
done < <(extract_links "$mdfile" | tr -d '\000' | tr -d '\r' | sed 's/[[:cntrl:]]//g')
echo
done
############################################################
# Final result
# Final report
############################################################
echo
if (( ${#ERRORS[@]} > 0 )); then
echo -e "${RED}✖ Errors found:${RESET}"
printf "%s\n" "${ERRORS[@]}"
echo
echo -e "${RED}❌ Failing job because broken links were found.${RESET}"
exit 1