generated from osc/skins-template
Update .gitea/workflows/test-skins.yml
This commit is contained in:
@@ -26,23 +26,48 @@ jobs:
|
|||||||
printf '%b' "${1//%/\\x}"
|
printf '%b' "${1//%/\\x}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
##############################################
|
||||||
|
# FIXED: Gitea soft-404 detection
|
||||||
|
##############################################
|
||||||
check_http() {
|
check_http() {
|
||||||
local url="$1"
|
local url="$1"
|
||||||
echo " → Checking external: $url"
|
echo " → Checking external: $url"
|
||||||
|
|
||||||
# HEAD request
|
# First try HEAD
|
||||||
if curl -Is --max-time 10 "$url" | head -n1 | grep -qE "HTTP/.* (200|30[0-9])"; then
|
local status
|
||||||
|
status=$(curl -Is --max-time 10 "$url" | head -n1 | awk '{print $2}')
|
||||||
|
|
||||||
|
if [[ "$status" =~ ^2|3 ]]; then
|
||||||
|
# Need to check if page content contains a Gitea 404 page
|
||||||
|
local body
|
||||||
|
body=$(curl -Ls --max-time 10 "$url")
|
||||||
|
|
||||||
|
if echo "$body" | grep -qiE "404 Not Found|doesn't exist|File not found|File does not exist|Not Found"; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# GET fallback
|
# HEAD wasn't 2xx or 3xx → try GET
|
||||||
if curl -Is --max-time 10 -X GET "$url" | head -n1 | grep -qE "HTTP/.* (200|30[0-9])"; then
|
status=$(curl -Is --max-time 10 -X GET "$url" | head -n1 | awk '{print $2}')
|
||||||
|
if [[ "$status" =~ ^2|3 ]]; then
|
||||||
|
local body
|
||||||
|
body=$(curl -Ls --max-time 10 "$url")
|
||||||
|
|
||||||
|
if echo "$body" | grep -qiE "404 Not Found|doesn't exist|File not found|File does not exist|Not Found"; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
|
##############################################
|
||||||
|
# Local path check
|
||||||
|
##############################################
|
||||||
check_local() {
|
check_local() {
|
||||||
local path="$1"
|
local path="$1"
|
||||||
path="${path#/}"
|
path="${path#/}"
|
||||||
@@ -57,20 +82,28 @@ jobs:
|
|||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
##############################################
|
||||||
|
# Extract all URL types from a Markdown file
|
||||||
|
##############################################
|
||||||
extract_links() {
|
extract_links() {
|
||||||
local f="$1"
|
local f="$1"
|
||||||
|
|
||||||
|
# Markdown links
|
||||||
grep -oE '\[[^]]*\]\([^)]*\)' "$f" \
|
grep -oE '\[[^]]*\]\([^)]*\)' "$f" \
|
||||||
| sed -E 's/.*\((.*)\).*/\1/'
|
| sed -E 's/.*\((.*)\).*/\1/'
|
||||||
|
|
||||||
|
# Markdown images
|
||||||
grep -oE '!\[[^]]*\]\([^)]*\)' "$f" \
|
grep -oE '!\[[^]]*\]\([^)]*\)' "$f" \
|
||||||
| sed -E 's/.*\((.*)\).*/\1/'
|
| sed -E 's/.*\((.*)\).*/\1/'
|
||||||
|
|
||||||
|
# Raw URLs
|
||||||
grep -oE 'https?://[^ )"]+' "$f"
|
grep -oE 'https?://[^ )"]+' "$f"
|
||||||
|
|
||||||
|
# HTML <img src="">
|
||||||
grep -oE '<img[^>]*src="[^"]+"' "$f" \
|
grep -oE '<img[^>]*src="[^"]+"' "$f" \
|
||||||
| sed -E 's/.*src="([^"]*)".*/\1/'
|
| sed -E 's/.*src="([^"]*)".*/\1/'
|
||||||
|
|
||||||
|
# HTML <video src="">
|
||||||
grep -oE '<video[^>]*src="[^"]+"' "$f" \
|
grep -oE '<video[^>]*src="[^"]+"' "$f" \
|
||||||
| sed -E 's/.*src="([^"]*)".*/\1/'
|
| sed -E 's/.*src="([^"]*)".*/\1/'
|
||||||
}
|
}
|
||||||
@@ -78,6 +111,9 @@ jobs:
|
|||||||
echo "🔍 Scanning Markdown files..."
|
echo "🔍 Scanning Markdown files..."
|
||||||
echo
|
echo
|
||||||
|
|
||||||
|
##############################################
|
||||||
|
# Loop through all Markdown files
|
||||||
|
##############################################
|
||||||
find . -type f -name '*.md' | while IFS= read -r mdfile; do
|
find . -type f -name '*.md' | while IFS= read -r mdfile; do
|
||||||
echo "📄 Checking: $mdfile"
|
echo "📄 Checking: $mdfile"
|
||||||
|
|
||||||
@@ -85,15 +121,18 @@ jobs:
|
|||||||
[[ -z "$url" ]] && continue
|
[[ -z "$url" ]] && continue
|
||||||
[[ "$url" == mailto:* ]] && continue
|
[[ "$url" == mailto:* ]] && continue
|
||||||
|
|
||||||
# Skip tag links inside /docs → but **do NOT print them**
|
# Skip tag links inside docs/ (quietly)
|
||||||
if [[ "$mdfile" == ./docs/* ]] && [[ "$url" == *"/src/tag/"* ]]; then
|
if [[ "$mdfile" == ./docs/* ]] && [[ "$url" == *"/src/tag/"* ]]; then
|
||||||
continue
|
continue
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# External URLs
|
||||||
if [[ "$url" == http* ]]; then
|
if [[ "$url" == http* ]]; then
|
||||||
if ! check_http "$url"; then
|
if ! check_http "$url"; then
|
||||||
ERRORS+=("❌ Broken external link: $url (in $mdfile)")
|
ERRORS+=("❌ Broken external link: $url (in $mdfile)")
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Local files
|
||||||
else
|
else
|
||||||
if ! check_local "$url"; then
|
if ! check_local "$url"; then
|
||||||
ERRORS+=("❌ Missing local file: $url (in $mdfile)")
|
ERRORS+=("❌ Missing local file: $url (in $mdfile)")
|
||||||
@@ -105,6 +144,9 @@ jobs:
|
|||||||
echo
|
echo
|
||||||
done
|
done
|
||||||
|
|
||||||
|
##############################################
|
||||||
|
# Final result output
|
||||||
|
##############################################
|
||||||
echo
|
echo
|
||||||
if (( ${#ERRORS[@]} > 0 )); then
|
if (( ${#ERRORS[@]} > 0 )); then
|
||||||
echo -e "${RED}✖ Errors found:${RESET}"
|
echo -e "${RED}✖ Errors found:${RESET}"
|
||||||
|
|||||||
Reference in New Issue
Block a user