Update .gitea/workflows/test-skins.yml
Some checks failed
Generate Skin previews, OSK files and per skin documentation / Full CI/CD Pipeline (push) Successful in 27s
Test Skins / link-check (push) Has been cancelled

This commit is contained in:
2025-11-23 13:17:35 +01:00
parent a07a19fada
commit 1a4b0507bf

View File

@@ -23,43 +23,47 @@ jobs:
ERRORS=() ERRORS=()
############################################################ ############################################################
# FIXED: SAFE URL DECODER (no null bytes!) # Remove all NUL/control characters BEFORE any decoding
############################################################
sanitize_url() {
printf '%s' "$1" \
| tr -d '\000' \
| tr -d '\r' \
| sed 's/[[:cntrl:]]//g'
}
############################################################
# Safe unicode URL decode (no null bytes)
############################################################ ############################################################
urldecode() { urldecode() {
local url="$1" local url="$1"
url="${url//+/ }" url="${url//+/ }"
# Perl decoding avoids raw NUL bytes and handles unicode correctly
printf '%s' "$url" | perl -pe 's/%([A-Fa-f0-9]{2})/chr(hex($1))/eg' printf '%s' "$url" | perl -pe 's/%([A-Fa-f0-9]{2})/chr(hex($1))/eg'
} }
############################################################ ############################################################
# FIXED: Gitea "soft 404" detection (HTML body says 404) # Detect Gitea "soft 404" even when status=200 OK
############################################################ ############################################################
check_http() { check_http() {
local url="$1" local url="$1"
echo " → Checking external: $url" echo " → Checking external: $url"
# HEAD local status body
local status
# Try HEAD first
status=$(curl -Is --max-time 10 "$url" | head -n1 | awk '{print $2}') status=$(curl -Is --max-time 10 "$url" | head -n1 | awk '{print $2}')
if [[ "$status" =~ ^2|3 ]]; then if [[ "$status" =~ ^2|3 ]]; then
local body
body=$(curl -Ls --max-time 10 "$url") body=$(curl -Ls --max-time 10 "$url")
if echo "$body" | grep -qiE "404 Not Found|Path .* doesn't exist|File not found|does not exist|Not Found"; then if echo "$body" | grep -qiE "404 Not Found|Path .* doesn't exist|File not found|does not exist|Not Found"; then
return 1 return 1
fi fi
return 0 return 0
fi fi
# GET fallback # Try GET fallback
status=$(curl -Is --max-time 10 -X GET "$url" | head -n1 | awk '{print $2}') status=$(curl -Is --max-time 10 -X GET "$url" | head -n1 | awk '{print $2}')
if [[ "$status" =~ ^2|3 ]]; then if [[ "$status" =~ ^2|3 ]]; then
local body
body=$(curl -Ls --max-time 10 "$url") body=$(curl -Ls --max-time 10 "$url")
if echo "$body" | grep -qiE "404 Not Found|Path .* doesn't exist|File not found|does not exist|Not Found"; then if echo "$body" | grep -qiE "404 Not Found|Path .* doesn't exist|File not found|does not exist|Not Found"; then
return 1 return 1
fi fi
@@ -74,20 +78,17 @@ jobs:
############################################################ ############################################################
check_local() { check_local() {
local path="$1" local path="$1"
path="${path#/}" path="${path#/}" # strip leading slash
local decoded local decoded
decoded=$(urldecode "$path") decoded=$(urldecode "$path")
echo " → Checking local: $decoded" echo " → Checking local: $decoded"
if [[ ! -e "$decoded" ]]; then [[ -e "$decoded" ]]
return 1
fi
return 0
} }
############################################################ ############################################################
# Extract *all* link types from a Markdown file # Extract URLs from Markdown + HTML
############################################################ ############################################################
extract_links() { extract_links() {
local f="$1" local f="$1"
@@ -115,34 +116,31 @@ jobs:
echo "🔍 Scanning Markdown files..." echo "🔍 Scanning Markdown files..."
echo echo
############################################################
# Process all markdown files
############################################################
find . -type f -name '*.md' | while IFS= read -r mdfile; do find . -type f -name '*.md' | while IFS= read -r mdfile; do
echo "📄 Checking: $mdfile" echo "📄 Checking: $mdfile"
############################################
# For each file, extract and test links
############################################
while IFS= read -r url; do while IFS= read -r url; do
[[ -z "$url" ]] && continue [[ -z "$url" ]] && continue
[[ "$url" == mailto:* ]] && continue [[ "$url" == mailto:* ]] && continue
# Skip versioned /src/tag links inside docs/ (quietly) # Clean input BEFORE use
if [[ "$mdfile" == ./docs/* ]] && [[ "$url" == *"/src/tag/"* ]]; then cleaned=$(sanitize_url "$url")
# Skip tag-version links in docs
if [[ "$mdfile" == ./docs/* ]] && [[ "$cleaned" == *"/src/tag/"* ]]; then
continue continue
fi fi
# External URLs # external
if [[ "$url" == http* ]]; then if [[ "$cleaned" == http* ]]; then
if ! check_http "$url"; then if ! check_http "$cleaned"; then
ERRORS+=("❌ Broken external link: $url (in $mdfile)") ERRORS+=("❌ Broken external link: $cleaned (in $mdfile)")
fi fi
# Local paths # local file
else else
if ! check_local "$url"; then if ! check_local "$cleaned"; then
ERRORS+=("❌ Missing local file: $url (in $mdfile)") ERRORS+=("❌ Missing local file: $cleaned (in $mdfile)")
fi fi
fi fi
@@ -152,7 +150,7 @@ jobs:
done done
############################################################ ############################################################
# Final error reporting # Final result
############################################################ ############################################################
echo echo
if (( ${#ERRORS[@]} > 0 )); then if (( ${#ERRORS[@]} > 0 )); then