name: Test Skins
on:
push:
pull_request:
jobs:
link-check:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Validate links and assets
shell: bash
run: |
set -uo pipefail
RED="\033[31m"
GREEN="\033[32m"
RESET="\033[0m"
ERRORS=()
############################################################
# Remove all NUL/control characters BEFORE any decoding
############################################################
sanitize_url() {
printf '%s' "$1" \
| tr -d '\000' \
| tr -d '\r' \
| sed 's/[[:cntrl:]]//g'
}
############################################################
# Safe unicode URL decode (no null bytes)
############################################################
urldecode() {
local url="$1"
url="${url//+/ }"
printf '%s' "$url" | perl -pe 's/%([A-Fa-f0-9]{2})/chr(hex($1))/eg'
}
############################################################
# Detect Gitea "soft 404" even when status=200 OK
############################################################
check_http() {
local url="$1"
echo " → Checking external: $url"
local status body
# Try HEAD first
status=$(curl -Is --max-time 10 "$url" | head -n1 | awk '{print $2}')
if [[ "$status" =~ ^2|3 ]]; then
body=$(curl -Ls --max-time 10 "$url")
if echo "$body" | grep -qiE "404 Not Found|Path .* doesn't exist|File not found|does not exist|Not Found"; then
return 1
fi
return 0
fi
# Try GET fallback
status=$(curl -Is --max-time 10 -X GET "$url" | head -n1 | awk '{print $2}')
if [[ "$status" =~ ^2|3 ]]; then
body=$(curl -Ls --max-time 10 "$url")
if echo "$body" | grep -qiE "404 Not Found|Path .* doesn't exist|File not found|does not exist|Not Found"; then
return 1
fi
return 0
fi
return 1
}
############################################################
# Local file existence check
############################################################
check_local() {
local path="$1"
path="${path#/}" # strip leading slash
local decoded
decoded=$(urldecode "$path")
echo " → Checking local: $decoded"
[[ -e "$decoded" ]]
}
############################################################
# Extract URLs from Markdown + HTML
############################################################
extract_links() {
local f="$1"
# Markdown links
grep -oE '\[[^]]*\]\([^)]*\)' "$f" \
| sed -E 's/.*\((.*)\).*/\1/'
# Markdown images
grep -oE '!\[[^]]*\]\([^)]*\)' "$f" \
| sed -E 's/.*\((.*)\).*/\1/'
# Raw URLs
grep -oE 'https?://[^ )"]+' "$f"
#
grep -oE '
]*src="[^"]+"' "$f" \
| sed -E 's/.*src="([^"]*)".*/\1/'
#