diff --git a/.gitea/workflows/test-skins.yml b/.gitea/workflows/test-skins.yml
deleted file mode 100644
index 2c94af6f..00000000
--- a/.gitea/workflows/test-skins.yml
+++ /dev/null
@@ -1,147 +0,0 @@
-name: Test Skins
-
-on:
- push:
- pull_request:
-
-jobs:
- link-check:
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v4
-
- - name: Validate links and assets
- shell: bash
- run: |
- set -uo pipefail
-
- RED="\033[31m"
- GREEN="\033[32m"
- RESET="\033[0m"
-
- ERRORS=()
-
- ############################################################
- # Safe unicode URL decode (never produces NULL bytes)
- ############################################################
- urldecode() {
- local url="$1"
- url="${url//+/ }"
- printf '%s' "$url" | perl -pe 's/%([A-Fa-f0-9]{2})/chr(hex($1))/eg'
- }
-
- ############################################################
- # Detect Gitea "soft 404" even when HTTP=200 OK
- ############################################################
- check_http() {
- local url="$1"
- echo " → Checking external: $url"
-
- local status body
-
- status=$(curl -Is --max-time 10 "$url" | head -n1 | awk '{print $2}')
- if [[ "$status" =~ ^2|3 ]]; then
- body=$(curl -Ls --max-time 10 "$url")
- if echo "$body" | grep -qiE "404 Not Found|doesn't exist|File not found|Not Found"; then
- return 1
- fi
- return 0
- fi
-
- status=$(curl -Is --max-time 10 -X GET "$url" | head -n1 | awk '{print $2}')
- if [[ "$status" =~ ^2|3 ]]; then
- body=$(curl -Ls --max-time 10 "$url")
- if echo "$body" | grep -qiE "404 Not Found|doesn't exist|File not found|Not Found"; then
- return 1
- fi
- return 0
- fi
-
- return 1
- }
-
- ############################################################
- # Local file check
- ############################################################
- check_local() {
- local path="$1"
- path="${path#/}" # strip leading slash
- local decoded
- decoded=$(urldecode "$path")
-
- echo " → Checking local: $decoded"
-
- [[ -e "$decoded" ]]
- }
-
- ############################################################
- # Extract links from markdown + HTML
- ############################################################
- extract_links() {
- local f="$1"
-
- grep -oE '\[[^]]*\]\([^)]*\)' "$f" \
- | sed -E 's/.*\((.*)\).*/\1/'
-
- grep -oE '!\[[^]]*\]\([^)]*\)' "$f" \
- | sed -E 's/.*\((.*)\).*/\1/'
-
- grep -oE 'https?://[^ )"]+' "$f"
-
- grep -oE '
]*src="[^"]+"' "$f" \
- | sed -E 's/.*src="([^"]*)".*/\1/'
-
- grep -oE '