generated from osc/skins-template
Update .gitea/workflows/test-skins.yml
This commit is contained in:
@@ -22,52 +22,56 @@ jobs:
|
||||
|
||||
ERRORS=()
|
||||
|
||||
############################################################
|
||||
# FIXED: SAFE URL DECODER (no null bytes!)
|
||||
############################################################
|
||||
urldecode() {
|
||||
printf '%b' "${1//%/\\x}"
|
||||
local url="$1"
|
||||
url="${url//+/ }"
|
||||
# Perl decoding avoids raw NUL bytes and handles unicode correctly
|
||||
printf '%s' "$url" | perl -pe 's/%([A-Fa-f0-9]{2})/chr(hex($1))/eg'
|
||||
}
|
||||
|
||||
##############################################
|
||||
# FIXED: Gitea soft-404 detection
|
||||
##############################################
|
||||
############################################################
|
||||
# FIXED: Gitea "soft 404" detection (HTML body says 404)
|
||||
############################################################
|
||||
check_http() {
|
||||
local url="$1"
|
||||
echo " → Checking external: $url"
|
||||
|
||||
# First try HEAD
|
||||
# HEAD
|
||||
local status
|
||||
status=$(curl -Is --max-time 10 "$url" | head -n1 | awk '{print $2}')
|
||||
|
||||
if [[ "$status" =~ ^2|3 ]]; then
|
||||
# Need to check if page content contains a Gitea 404 page
|
||||
local body
|
||||
body=$(curl -Ls --max-time 10 "$url")
|
||||
|
||||
if echo "$body" | grep -qiE "404 Not Found|doesn't exist|File not found|File does not exist|Not Found"; then
|
||||
if echo "$body" | grep -qiE "404 Not Found|Path .* doesn't exist|File not found|does not exist|Not Found"; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
fi
|
||||
|
||||
# HEAD wasn't 2xx or 3xx → try GET
|
||||
# GET fallback
|
||||
status=$(curl -Is --max-time 10 -X GET "$url" | head -n1 | awk '{print $2}')
|
||||
|
||||
if [[ "$status" =~ ^2|3 ]]; then
|
||||
local body
|
||||
body=$(curl -Ls --max-time 10 "$url")
|
||||
|
||||
if echo "$body" | grep -qiE "404 Not Found|doesn't exist|File not found|File does not exist|Not Found"; then
|
||||
if echo "$body" | grep -qiE "404 Not Found|Path .* doesn't exist|File not found|does not exist|Not Found"; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
fi
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
##############################################
|
||||
# Local path check
|
||||
##############################################
|
||||
############################################################
|
||||
# Local file existence check
|
||||
############################################################
|
||||
check_local() {
|
||||
local path="$1"
|
||||
path="${path#/}"
|
||||
@@ -82,9 +86,9 @@ jobs:
|
||||
return 0
|
||||
}
|
||||
|
||||
##############################################
|
||||
# Extract all URL types from a Markdown file
|
||||
##############################################
|
||||
############################################################
|
||||
# Extract *all* link types from a Markdown file
|
||||
############################################################
|
||||
extract_links() {
|
||||
local f="$1"
|
||||
|
||||
@@ -99,11 +103,11 @@ jobs:
|
||||
# Raw URLs
|
||||
grep -oE 'https?://[^ )"]+' "$f"
|
||||
|
||||
# HTML <img src="">
|
||||
# <img src="">
|
||||
grep -oE '<img[^>]*src="[^"]+"' "$f" \
|
||||
| sed -E 's/.*src="([^"]*)".*/\1/'
|
||||
|
||||
# HTML <video src="">
|
||||
# <video src="">
|
||||
grep -oE '<video[^>]*src="[^"]+"' "$f" \
|
||||
| sed -E 's/.*src="([^"]*)".*/\1/'
|
||||
}
|
||||
@@ -111,17 +115,20 @@ jobs:
|
||||
echo "🔍 Scanning Markdown files..."
|
||||
echo
|
||||
|
||||
##############################################
|
||||
# Loop through all Markdown files
|
||||
##############################################
|
||||
############################################################
|
||||
# Process all markdown files
|
||||
############################################################
|
||||
find . -type f -name '*.md' | while IFS= read -r mdfile; do
|
||||
echo "📄 Checking: $mdfile"
|
||||
|
||||
############################################
|
||||
# For each file, extract and test links
|
||||
############################################
|
||||
while IFS= read -r url; do
|
||||
[[ -z "$url" ]] && continue
|
||||
[[ "$url" == mailto:* ]] && continue
|
||||
|
||||
# Skip tag links inside docs/ (quietly)
|
||||
# Skip versioned /src/tag links inside docs/ (quietly)
|
||||
if [[ "$mdfile" == ./docs/* ]] && [[ "$url" == *"/src/tag/"* ]]; then
|
||||
continue
|
||||
fi
|
||||
@@ -132,7 +139,7 @@ jobs:
|
||||
ERRORS+=("❌ Broken external link: $url (in $mdfile)")
|
||||
fi
|
||||
|
||||
# Local files
|
||||
# Local paths
|
||||
else
|
||||
if ! check_local "$url"; then
|
||||
ERRORS+=("❌ Missing local file: $url (in $mdfile)")
|
||||
@@ -144,9 +151,9 @@ jobs:
|
||||
echo
|
||||
done
|
||||
|
||||
##############################################
|
||||
# Final result output
|
||||
##############################################
|
||||
############################################################
|
||||
# Final error reporting
|
||||
############################################################
|
||||
echo
|
||||
if (( ${#ERRORS[@]} > 0 )); then
|
||||
echo -e "${RED}✖ Errors found:${RESET}"
|
||||
|
||||
Reference in New Issue
Block a user