generated from osc/skins-template
Update .gitea/workflows/test-skins.yml
This commit is contained in:
@@ -26,23 +26,48 @@ jobs:
|
||||
printf '%b' "${1//%/\\x}"
|
||||
}
|
||||
|
||||
##############################################
|
||||
# FIXED: Gitea soft-404 detection
|
||||
##############################################
|
||||
check_http() {
|
||||
local url="$1"
|
||||
echo " → Checking external: $url"
|
||||
|
||||
# HEAD request
|
||||
if curl -Is --max-time 10 "$url" | head -n1 | grep -qE "HTTP/.* (200|30[0-9])"; then
|
||||
# First try HEAD
|
||||
local status
|
||||
status=$(curl -Is --max-time 10 "$url" | head -n1 | awk '{print $2}')
|
||||
|
||||
if [[ "$status" =~ ^2|3 ]]; then
|
||||
# Need to check if page content contains a Gitea 404 page
|
||||
local body
|
||||
body=$(curl -Ls --max-time 10 "$url")
|
||||
|
||||
if echo "$body" | grep -qiE "404 Not Found|doesn't exist|File not found|File does not exist|Not Found"; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
fi
|
||||
|
||||
# GET fallback
|
||||
if curl -Is --max-time 10 -X GET "$url" | head -n1 | grep -qE "HTTP/.* (200|30[0-9])"; then
|
||||
# HEAD wasn't 2xx or 3xx → try GET
|
||||
status=$(curl -Is --max-time 10 -X GET "$url" | head -n1 | awk '{print $2}')
|
||||
if [[ "$status" =~ ^2|3 ]]; then
|
||||
local body
|
||||
body=$(curl -Ls --max-time 10 "$url")
|
||||
|
||||
if echo "$body" | grep -qiE "404 Not Found|doesn't exist|File not found|File does not exist|Not Found"; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
fi
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
##############################################
|
||||
# Local path check
|
||||
##############################################
|
||||
check_local() {
|
||||
local path="$1"
|
||||
path="${path#/}"
|
||||
@@ -57,20 +82,28 @@ jobs:
|
||||
return 0
|
||||
}
|
||||
|
||||
##############################################
|
||||
# Extract all URL types from a Markdown file
|
||||
##############################################
|
||||
extract_links() {
|
||||
local f="$1"
|
||||
|
||||
# Markdown links
|
||||
grep -oE '\[[^]]*\]\([^)]*\)' "$f" \
|
||||
| sed -E 's/.*\((.*)\).*/\1/'
|
||||
|
||||
# Markdown images
|
||||
grep -oE '!\[[^]]*\]\([^)]*\)' "$f" \
|
||||
| sed -E 's/.*\((.*)\).*/\1/'
|
||||
|
||||
# Raw URLs
|
||||
grep -oE 'https?://[^ )"]+' "$f"
|
||||
|
||||
# HTML <img src="">
|
||||
grep -oE '<img[^>]*src="[^"]+"' "$f" \
|
||||
| sed -E 's/.*src="([^"]*)".*/\1/'
|
||||
|
||||
# HTML <video src="">
|
||||
grep -oE '<video[^>]*src="[^"]+"' "$f" \
|
||||
| sed -E 's/.*src="([^"]*)".*/\1/'
|
||||
}
|
||||
@@ -78,6 +111,9 @@ jobs:
|
||||
echo "🔍 Scanning Markdown files..."
|
||||
echo
|
||||
|
||||
##############################################
|
||||
# Loop through all Markdown files
|
||||
##############################################
|
||||
find . -type f -name '*.md' | while IFS= read -r mdfile; do
|
||||
echo "📄 Checking: $mdfile"
|
||||
|
||||
@@ -85,15 +121,18 @@ jobs:
|
||||
[[ -z "$url" ]] && continue
|
||||
[[ "$url" == mailto:* ]] && continue
|
||||
|
||||
# Skip tag links inside /docs → but **do NOT print them**
|
||||
# Skip tag links inside docs/ (quietly)
|
||||
if [[ "$mdfile" == ./docs/* ]] && [[ "$url" == *"/src/tag/"* ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
# External URLs
|
||||
if [[ "$url" == http* ]]; then
|
||||
if ! check_http "$url"; then
|
||||
ERRORS+=("❌ Broken external link: $url (in $mdfile)")
|
||||
fi
|
||||
|
||||
# Local files
|
||||
else
|
||||
if ! check_local "$url"; then
|
||||
ERRORS+=("❌ Missing local file: $url (in $mdfile)")
|
||||
@@ -105,6 +144,9 @@ jobs:
|
||||
echo
|
||||
done
|
||||
|
||||
##############################################
|
||||
# Final result output
|
||||
##############################################
|
||||
echo
|
||||
if (( ${#ERRORS[@]} > 0 )); then
|
||||
echo -e "${RED}✖ Errors found:${RESET}"
|
||||
|
||||
Reference in New Issue
Block a user