Update .gitea/actions/generate-docs/action.yml
This commit is contained in:
@@ -1,242 +1,275 @@
|
||||
name: "Check Links"
|
||||
description: "Check all links in markdown files for validity"
|
||||
name: "Generate Documentation"
|
||||
description: "Generate README index and per-skin markdown pages"
|
||||
|
||||
inputs:
|
||||
user_repository:
|
||||
description: "Repository path in format owner/repo"
|
||||
new_tag:
|
||||
description: "The new tag for this build"
|
||||
required: true
|
||||
readme_path:
|
||||
description: "Path to write README.md"
|
||||
required: true
|
||||
doc_dir:
|
||||
description: "Directory to write per-skin markdown pages"
|
||||
required: true
|
||||
user_repository:
|
||||
description: "Path of the repository (relative inside container)"
|
||||
required: true
|
||||
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Check all links in markdown files
|
||||
- name: Generate README
|
||||
shell: bash
|
||||
run: |
|
||||
#!/bin/bash
|
||||
set -o pipefail
|
||||
|
||||
echo "[Link Checker Job Started]"
|
||||
echo ""
|
||||
|
||||
# Find all markdown files
|
||||
find . -name "*.md" -type f | sort > all_markdown_files.txt
|
||||
total_files=$(wc -l < all_markdown_files.txt)
|
||||
echo "Found $total_files markdown files to check"
|
||||
echo ""
|
||||
|
||||
# Get repository info from input at the start
|
||||
REPO_OWNER="${{ inputs.user_repository }}"
|
||||
REPO_OWNER="${REPO_OWNER%/*}"
|
||||
REPO_NAME="${{ inputs.user_repository }}"
|
||||
REPO_NAME="${REPO_NAME#*/}"
|
||||
GIT_BRANCH=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || echo "main")
|
||||
|
||||
has_errors=0
|
||||
file_index=1
|
||||
|
||||
# Process each markdown file
|
||||
while IFS= read -r md_file; do
|
||||
echo "[$file_index/$total_files] Checking: $md_file"
|
||||
|
||||
file_has_errors=0
|
||||
|
||||
# Extract ALL links from the markdown file
|
||||
{
|
||||
# Markdown links []()
|
||||
grep -oP '\]\(([^\)]+)\)' "$md_file" 2>/dev/null | sed 's/](\(.*\))/\1/' || true
|
||||
# Image links ![]()
|
||||
grep -oP '!\[[^\]]*\]\(([^\)]+)\)' "$md_file" 2>/dev/null | sed 's/!\[.*\](\(.*\))/\1/' || true
|
||||
# Video src attributes
|
||||
grep -oP '<video[^>]+src="([^"]+)"' "$md_file" 2>/dev/null | sed 's/.*src="\([^"]*\)".*/\1/' || true
|
||||
} > /tmp/links_$$.txt
|
||||
|
||||
link_count=$(wc -l < /tmp/links_$$.txt 2>/dev/null || echo "0")
|
||||
|
||||
if [ $link_count -eq 0 ]; then
|
||||
echo " → No links found"
|
||||
echo ""
|
||||
file_index=$((file_index + 1))
|
||||
echo "Generating README index…"
|
||||
|
||||
sanitize_filename() {
|
||||
echo "$1" | \
|
||||
tr -d '\000-\037' | \
|
||||
sed -e 's#[\\/:\*\?"<>|]#-#g' | \
|
||||
sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
|
||||
}
|
||||
|
||||
url_encode_path() {
|
||||
local IFS='/'
|
||||
local parts=($1)
|
||||
local encoded=""
|
||||
for part in "${parts[@]}"; do
|
||||
[ -n "$encoded" ] && encoded+="/"
|
||||
encoded+=$(printf '%s' "$part" | jq -sRr @uri)
|
||||
done
|
||||
echo "$encoded"
|
||||
}
|
||||
|
||||
SKINS_JSON_FILE="${{ github.workspace }}/.gitea/workflows/skins.json"
|
||||
DESC_FILE=$(mktemp)
|
||||
|
||||
echo "---" > "${{ inputs.readme_path }}"
|
||||
echo "gitea: none" >> "${{ inputs.readme_path }}"
|
||||
echo "include_toc: true" >> "${{ inputs.readme_path }}"
|
||||
echo "---" >> "${{ inputs.readme_path }}"
|
||||
echo "" >> "${{ inputs.readme_path }}"
|
||||
echo "# Skins" >> "${{ inputs.readme_path }}"
|
||||
echo "" >> "${{ inputs.readme_path }}"
|
||||
echo "<!--" >> "${{ inputs.readme_path }}"
|
||||
echo "osuid: $OSU_ID" >> "${{ inputs.readme_path }}"
|
||||
echo "-->" >> "${{ inputs.readme_path }}"
|
||||
echo "" >> "${{ inputs.readme_path }}"
|
||||
echo "**Go back to [osc/skins]($REGISTRY_URL/osc/skins)**" >> "${{ inputs.readme_path }}"
|
||||
echo "" >> "${{ inputs.readme_path }}"
|
||||
echo "**Click on the Skin name to download it, or click on the thumbnail to see more about the skin, including a video preview, screenshots, and mod icons.**" >> "${{ inputs.readme_path }}"
|
||||
echo "" >> "${{ inputs.readme_path }}"
|
||||
|
||||
jq -r '.descriptions | to_entries[] | "\(.key)=\(.value)"' "$SKINS_JSON_FILE" > "$DESC_FILE"
|
||||
jq -r '.order[]?' "$SKINS_JSON_FILE" > order.txt
|
||||
|
||||
get_desc() {
|
||||
grep -F -m1 -- "$1=" "$DESC_FILE" 2>/dev/null | cut -d '=' -f2- || true
|
||||
}
|
||||
|
||||
declare -A ordered
|
||||
while IFS= read -r skin; do
|
||||
[ "$skin" = "default-skin" ] && continue
|
||||
ordered["$skin"]=1
|
||||
dir="$DANSER_SKINS_DIR/$skin"
|
||||
[ ! -d "$dir" ] && continue
|
||||
|
||||
ini_file=$(find "$dir" -maxdepth 1 -iname "skin.ini" | head -n1 || true)
|
||||
skin_header="$skin"
|
||||
|
||||
if [ -f "$ini_file" ]; then
|
||||
name_line=$(grep -a -i -m1 'Name[[:space:]]*:' "$ini_file" || true)
|
||||
if [ -n "$name_line" ]; then
|
||||
val="${name_line#*:}"
|
||||
val="$(echo "$val" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
||||
[ -n "$val" ] && skin_header=$(sanitize_filename "$val")
|
||||
fi
|
||||
else
|
||||
continue
|
||||
fi
|
||||
|
||||
echo " → Checking $link_count links..."
|
||||
|
||||
# Categorize and check links
|
||||
> /tmp/download_$$.txt
|
||||
> /tmp/media_$$.txt
|
||||
> /tmp/video_$$.txt
|
||||
> /tmp/tags_$$.txt
|
||||
|
||||
# Check each link
|
||||
while IFS= read -r link; do
|
||||
[ -z "$link" ] && continue
|
||||
|
||||
# Decode URL-encoded characters for display
|
||||
decoded_link=$(echo "$link" | sed 's/%20/ /g' | sed 's/%23/#/g' | sed 's/%28/(/g' | sed 's/%29/)/g' | sed 's/%E2%80%A2/•/g' | sed 's/%E1%9A%96/ᚖ/g' | sed 's/%E3%80%8A/《/g' | sed 's/%E3%80%8B/》/g' | sed 's/%E3%80%8E/『/g' | sed 's/%E3%80%8F/』/g' | sed 's/%E2%9B%94/⛔/g' | sed 's/%E2%9C%A8/✨/g' | sed 's/%7B/{/g' | sed 's/%7D/}/g' | sed 's/%2B/+/g' | sed 's/%E3%83%86/テ/g' | sed 's/%E3%83%B3/ン/g' | sed 's/%E3%83%8D/ネ/g' | sed 's/%E3%82%B9/ス/g' | sed 's/%E3%82%A4/イ/g' | sed 's/%E3%83%BB/・/g' | sed 's/%E3%83%95/フ/g' | sed 's/%E3%83%AA/リ/g' | sed 's/%E3%83%BC/ー/g' | sed 's/%E3%83%8A/ナ/g' | sed 's/%5B/[/g' | sed 's/%5D/]/g' | sed 's/%2C/,/g')
|
||||
|
||||
# Determine link category and validation status
|
||||
status="✓"
|
||||
|
||||
# Check if it's an external URL
|
||||
if [[ "$decoded_link" =~ ^https?:// ]]; then
|
||||
# Replace git.sulej.net with internal gitea URL for checking
|
||||
check_url="$link"
|
||||
if [[ "$link" =~ git\.sulej\.net ]]; then
|
||||
check_url="${link//git.sulej.net/gitea:3000}"
|
||||
check_url="${check_url//https:/http:}"
|
||||
fi
|
||||
|
||||
# Check external URL with curl
|
||||
http_code=$(curl -s -o /dev/null -w "%{http_code}" --max-time 10 \
|
||||
-H "User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36" \
|
||||
"$check_url" 2>/dev/null || echo "000")
|
||||
else
|
||||
# Local file - convert to full Gitea URL (keep URL encoding)
|
||||
if [[ "$link" =~ ^/ ]]; then
|
||||
# Absolute path from repo root
|
||||
check_url="http://gitea:3000/$REPO_OWNER/$REPO_NAME/raw/branch/$GIT_BRANCH${link}"
|
||||
display_url="https://git.sulej.net/$REPO_OWNER/$REPO_NAME/raw/branch/$GIT_BRANCH${link}"
|
||||
else
|
||||
# Relative path from markdown file
|
||||
md_dir=$(dirname "$md_file")
|
||||
if [[ "$md_dir" == "." ]]; then
|
||||
rel_path="$link"
|
||||
else
|
||||
rel_path="${md_dir#./}/$link"
|
||||
fi
|
||||
check_url="http://gitea:3000/$REPO_OWNER/$REPO_NAME/raw/branch/$GIT_BRANCH/$rel_path"
|
||||
display_url="https://git.sulej.net/$REPO_OWNER/$REPO_NAME/raw/branch/$GIT_BRANCH/$rel_path"
|
||||
fi
|
||||
|
||||
# Use display_url for output instead of decoded_link
|
||||
decoded_link="$display_url"
|
||||
|
||||
# Check URL with curl
|
||||
http_code=$(curl -s -o /dev/null -w "%{http_code}" --max-time 10 "$check_url" 2>/dev/null || echo "000")
|
||||
fi
|
||||
|
||||
# Accept 2xx and 3xx status codes as valid
|
||||
if ! [[ "$http_code" =~ ^[23][0-9][0-9]$ ]]; then
|
||||
status="✖ (HTTP $http_code)"
|
||||
file_has_errors=1
|
||||
has_errors=1
|
||||
fi
|
||||
|
||||
# Categorize all links
|
||||
if [[ "$decoded_link" =~ /export/.*\.(osk|osz)$ ]] || [[ "$decoded_link" =~ ^https?:// && ! "$decoded_link" =~ /media/ && ! "$decoded_link" =~ /src/tag/ ]]; then
|
||||
echo " $status $decoded_link" >> /tmp/download_$$.txt
|
||||
elif [[ "$decoded_link" =~ /media/gameplay/.*\.(mp4|webm)$ ]]; then
|
||||
echo " $status $decoded_link" >> /tmp/video_$$.txt
|
||||
elif [[ "$decoded_link" =~ /src/tag/ ]]; then
|
||||
echo " $status $decoded_link" >> /tmp/tags_$$.txt
|
||||
elif [[ "$decoded_link" =~ \.(webp|png|jpg|jpeg)$ ]] || [[ "$decoded_link" =~ /media/(panel|icons|thumbnail)/ ]]; then
|
||||
echo " $status $decoded_link" >> /tmp/media_$$.txt
|
||||
elif [[ "$decoded_link" =~ \.md$ ]]; then
|
||||
echo " $status $decoded_link" >> /tmp/tags_$$.txt
|
||||
else
|
||||
echo " $status $decoded_link" >> /tmp/download_$$.txt
|
||||
fi
|
||||
done < /tmp/links_$$.txt
|
||||
|
||||
# Display categorized results - special handling for README
|
||||
if [[ "$md_file" == "./README.md" ]]; then
|
||||
# For README, group by skin name
|
||||
> /tmp/skins_$$.txt
|
||||
|
||||
# Extract unique skin names from download links and decode them
|
||||
if [ -s /tmp/download_$$.txt ]; then
|
||||
grep -oP 'export/[^/]+' /tmp/download_$$.txt | sed 's|export/||' | while read -r encoded_name; do
|
||||
# Decode the skin name
|
||||
echo "$encoded_name" | sed 's/%20/ /g' | sed 's/%23/#/g' | sed 's/%28/(/g' | sed 's/%29/)/g' | sed 's/%E2%80%A2/•/g' | sed 's/%E1%9A%96/ᚖ/g' | sed 's/%E3%80%8A/《/g' | sed 's/%E3%80%8B/》/g' | sed 's/%E3%80%8E/『/g' | sed 's/%E3%80%8F/』/g' | sed 's/%E2%9B%94/⛔/g' | sed 's/%E2%9C%A8/✨/g' | sed 's/%7B/{/g' | sed 's/%7D/}/g' | sed 's/%2B/+/g' | sed 's/%E3%83%86/テ/g' | sed 's/%E3%83%B3/ン/g' | sed 's/%E3%83%8D/ネ/g' | sed 's/%E3%82%B9/ス/g' | sed 's/%E3%82%A4/イ/g' | sed 's/%E3%83%BB/・/g' | sed 's/%E3%83%95/フ/g' | sed 's/%E3%83%AA/リ/g' | sed 's/%E3%83%BC/ー/g' | sed 's/%E3%83%8A/ナ/g' | sed 's/%5B/[/g' | sed 's/%5D/]/g' | sed 's/%2C/,/g'
|
||||
done | sort -u > /tmp/skins_$$.txt || true
|
||||
fi
|
||||
|
||||
# Show general links first (not skin-specific)
|
||||
if [ -s /tmp/download_$$.txt ]; then
|
||||
general_downloads=$(grep -v '/export/.*\.osk' /tmp/download_$$.txt | sort -u || true)
|
||||
if [ -n "$general_downloads" ]; then
|
||||
echo " general:"
|
||||
echo "$general_downloads"
|
||||
echo ""
|
||||
fi
|
||||
fi
|
||||
|
||||
# Show each skin's links together
|
||||
while IFS= read -r skin_name; do
|
||||
[ -z "$skin_name" ] && continue
|
||||
|
||||
echo " skin: $skin_name"
|
||||
|
||||
# Download link
|
||||
grep "/export/$skin_name/.*\.osk" /tmp/download_$$.txt 2>/dev/null | sort -u || true
|
||||
|
||||
# Thumbnail
|
||||
grep "thumbnail/$skin_name/" /tmp/media_$$.txt 2>/dev/null | sort -u || true
|
||||
|
||||
# Docs
|
||||
grep "/docs/$skin_name/" /tmp/tags_$$.txt 2>/dev/null | sort -u || true
|
||||
|
||||
echo ""
|
||||
done < /tmp/skins_$$.txt
|
||||
|
||||
# Show version tags separately
|
||||
if [ -s /tmp/tags_$$.txt ]; then
|
||||
version_tags=$(grep '/src/tag/' /tmp/tags_$$.txt | sort -u || true)
|
||||
if [ -n "$version_tags" ]; then
|
||||
echo " version tags:"
|
||||
echo "$version_tags"
|
||||
echo ""
|
||||
fi
|
||||
fi
|
||||
|
||||
rm -f /tmp/skins_$$.txt
|
||||
else
|
||||
# For other markdown files, show categorized as before
|
||||
if [ -s /tmp/download_$$.txt ]; then
|
||||
echo " download:"
|
||||
sort -u /tmp/download_$$.txt
|
||||
echo ""
|
||||
fi
|
||||
|
||||
if [ -s /tmp/media_$$.txt ]; then
|
||||
echo " media:"
|
||||
sort -u /tmp/media_$$.txt
|
||||
echo ""
|
||||
fi
|
||||
|
||||
if [ -s /tmp/video_$$.txt ]; then
|
||||
echo " video:"
|
||||
sort -u /tmp/video_$$.txt
|
||||
echo ""
|
||||
fi
|
||||
|
||||
if [ -s /tmp/tags_$$.txt ]; then
|
||||
echo " tags:"
|
||||
sort -u /tmp/tags_$$.txt
|
||||
echo ""
|
||||
|
||||
raw_path="$(printf "%s/%s" "$skin" "$skin_header" | sed 's/^ *//;s/ *$//')"
|
||||
base_path=$(url_encode_path "$raw_path")
|
||||
|
||||
echo "## [$skin_header]($REGISTRY_URL/${{ inputs.user_repository }}/media/tag/${{ inputs.new_tag }}/export/${base_path}.osk)" >> "${{ inputs.readme_path }}"
|
||||
echo "" >> "${{ inputs.readme_path }}"
|
||||
|
||||
desc=$(get_desc "$skin")
|
||||
[ -n "$desc" ] && { echo "$desc" >> "${{ inputs.readme_path }}"; echo "" >> "${{ inputs.readme_path }}"; }
|
||||
|
||||
if [ -f "$ini_file" ]; then
|
||||
author_line=$(grep -i '^[[:space:]]*Author:' "$ini_file" | head -n1 || true)
|
||||
if [ -n "$author_line" ]; then
|
||||
author=$(echo "$author_line" | cut -d ':' -f2- | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
|
||||
[ -n "$author" ] && { echo "**Author:** $author" >> "${{ inputs.readme_path }}"; echo "" >> "${{ inputs.readme_path }}"; }
|
||||
fi
|
||||
fi
|
||||
|
||||
rm -f /tmp/download_$$.txt /tmp/media_$$.txt /tmp/video_$$.txt /tmp/tags_$$.txt
|
||||
|
||||
rm -f /tmp/links_$$.txt
|
||||
|
||||
if [ $file_has_errors -eq 0 ]; then
|
||||
echo " ✓ All links valid"
|
||||
|
||||
echo "[](/docs/${base_path}.md)" >> "${{ inputs.readme_path }}"
|
||||
echo "" >> "${{ inputs.readme_path }}"
|
||||
done < order.txt
|
||||
|
||||
for dir in "$DANSER_SKINS_DIR"/*; do
|
||||
[ -d "$dir" ] || continue
|
||||
skin="$(basename "$dir")"
|
||||
[ "$skin" = "default-skin" ] && continue
|
||||
[[ -n "${ordered[$skin]}" ]] && continue
|
||||
|
||||
ini_file=$(find "$dir" -maxdepth 1 -iname "skin.ini" | head -n1 || true)
|
||||
skin_header="$skin"
|
||||
|
||||
if [ -f "$ini_file" ]; then
|
||||
name_line=$(grep -a -i -m1 'Name[[:space:]]*:' "$ini_file" || true)
|
||||
if [ -n "$name_line" ]; then
|
||||
val="${name_line#*:}"
|
||||
val="$(echo "$val" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
||||
[ -n "$val" ] && skin_header=$(sanitize_filename "$val")
|
||||
fi
|
||||
else
|
||||
echo " ✖ Some links broken"
|
||||
continue
|
||||
fi
|
||||
echo ""
|
||||
|
||||
file_index=$((file_index + 1))
|
||||
done < all_markdown_files.txt
|
||||
|
||||
echo ""
|
||||
|
||||
if [ $has_errors -eq 0 ]; then
|
||||
echo "[Link Checker Complete — all links valid in $total_files files]"
|
||||
exit 0
|
||||
else
|
||||
echo "[Link Checker Complete — found broken links in $total_files files]"
|
||||
exit 1
|
||||
|
||||
raw_path="$(printf "%s/%s" "$skin" "$skin_header" | sed 's/^ *//;s/ *$//')"
|
||||
base_path=$(url_encode_path "$raw_path")
|
||||
|
||||
echo "## [$skin_header]($REGISTRY_URL/${{ inputs.user_repository }}/media/tag/${{ inputs.new_tag }}/export/${base_path}.osk)" >> "${{ inputs.readme_path }}"
|
||||
echo "" >> "${{ inputs.readme_path }}"
|
||||
|
||||
if [ -f "$ini_file" ]; then
|
||||
author_line=$(grep -i '^[[:space:]]*Author:' "$ini_file" | head -n1 || true)
|
||||
if [ -n "$author_line" ]; then
|
||||
author=$(echo "$author_line" | cut -d ':' -f2- | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
|
||||
[ -n "$author" ] && { echo "**Author:** $author" >> "${{ inputs.readme_path }}"; echo "" >> "${{ inputs.readme_path }}"; }
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "[](/docs/${base_path}.md)" >> "${{ inputs.readme_path }}"
|
||||
echo "" >> "${{ inputs.readme_path }}"
|
||||
done
|
||||
|
||||
echo "# Build History" >> "${{ inputs.readme_path }}"
|
||||
echo "" >> "${{ inputs.readme_path }}"
|
||||
echo "| Version | Date |" >> "${{ inputs.readme_path }}"
|
||||
echo "| ------- | ---- |" >> "${{ inputs.readme_path }}"
|
||||
|
||||
current_commit_date=$(TZ="Europe/Zurich" date -d "$(git log -1 --format=%cI)" "+%d.%m.%Y %H:%M:%S")
|
||||
echo "| [\`${{ inputs.new_tag }} (Current)\`]($REGISTRY_URL/${{ inputs.user_repository }}/src/tag/${{ inputs.new_tag }}/README.md) | $current_commit_date |" >> "${{ inputs.readme_path }}"
|
||||
|
||||
old_tags=$(git tag --sort=-v:refname | grep -v "^${{ inputs.new_tag }}$" | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' || true)
|
||||
if [ -n "$old_tags" ]; then
|
||||
echo "$old_tags" | while read -r tag; do
|
||||
tag_date=$(git log -1 --format=%ci "$tag")
|
||||
formatted_date=$(TZ="Europe/Zurich" date -d "$tag_date" "+%d.%m.%Y %H:%M:%S")
|
||||
echo "| [\`$tag\`]($REGISTRY_URL/${{ inputs.user_repository }}/src/tag/$tag/README.md) | $formatted_date |" >> "${{ inputs.readme_path }}"
|
||||
done
|
||||
fi
|
||||
|
||||
echo "README index generated successfully."
|
||||
|
||||
- name: Generate Per-Skin Pages
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Generating detailed per-skin markdown pages…"
|
||||
|
||||
sanitize_filename() {
|
||||
echo "$1" | \
|
||||
tr -d '\000-\037' | \
|
||||
sed -e 's#[\\/:\*\?"<>|]#-#g' | \
|
||||
sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
|
||||
}
|
||||
|
||||
url_encode_path() {
|
||||
local IFS='/'
|
||||
local parts=($1)
|
||||
local encoded=""
|
||||
for part in "${parts[@]}"; do
|
||||
[ -n "$encoded" ] && encoded+="/"
|
||||
encoded+=$(printf '%s' "$part" | jq -sRr @uri)
|
||||
done
|
||||
echo "$encoded"
|
||||
}
|
||||
|
||||
mkdir -p "${{ inputs.doc_dir }}"
|
||||
|
||||
for dir in "$DANSER_SKINS_DIR"/*; do
|
||||
[ -d "$dir" ] || continue
|
||||
|
||||
skin=$(basename "$dir")
|
||||
[ "$skin" = "default-skin" ] && continue
|
||||
ini_file=$(find "$dir" -maxdepth 1 -iname "skin.ini" | head -n1 || true)
|
||||
skin_header="$skin"
|
||||
|
||||
if [ -f "$ini_file" ]; then
|
||||
line=$(grep -i '^[[:space:]]*Name:' "$ini_file" | head -n1 || true)
|
||||
if [ -n "$line" ]; then
|
||||
val="${line#*:}"
|
||||
val="$(echo "$val" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
|
||||
if [ -n "$val" ]; then
|
||||
skin_header=$(sanitize_filename "$val")
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
raw_path="${skin}/${skin_header}"
|
||||
base_path=$(url_encode_path "$raw_path")
|
||||
osk_url="$REGISTRY_URL/${{ inputs.user_repository }}/media/tag/${{ inputs.new_tag }}/export/${base_path}.osk"
|
||||
md_file_path="${{ inputs.doc_dir }}/${raw_path}.md"
|
||||
|
||||
mkdir -p "$(dirname "$md_file_path")"
|
||||
|
||||
video_url="$REGISTRY_URL/${{ inputs.user_repository }}/media/tag/${{ inputs.new_tag }}/media/gameplay/${base_path}.mp4"
|
||||
|
||||
author=""
|
||||
if [ -f "$ini_file" ]; then
|
||||
author_line=$(grep -i '^[[:space:]]*Author:' "$ini_file" | head -n1 || true)
|
||||
if [ -n "$author_line" ]; then
|
||||
author=$(echo "$author_line" | cut -d ':' -f2- | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
|
||||
fi
|
||||
fi
|
||||
|
||||
{
|
||||
echo "# [$skin_header]($osk_url)"
|
||||
echo ""
|
||||
[ -n "$author" ] && echo "**Author:** $author"
|
||||
[ -n "$author" ] && echo ""
|
||||
|
||||
echo "## Hitsounds"
|
||||
echo "<video controls autoplay loop muted playsinline src=\"$video_url\" type=\"video/mp4\">"
|
||||
echo "</video>"
|
||||
echo ""
|
||||
|
||||
echo "## Ranking Panel"
|
||||
echo ""
|
||||
echo ""
|
||||
|
||||
echo "## Mod Icons"
|
||||
echo ""
|
||||
|
||||
echo ""
|
||||
echo "## Build History"
|
||||
echo ""
|
||||
echo "| Version | Date |"
|
||||
echo "| ------- | ---- |"
|
||||
|
||||
current_commit_date=$(TZ="Europe/Zurich" date -d "$(git log -1 --format=%cI)" "+%d.%m.%Y %H:%M:%S")
|
||||
echo "| [\`${{ inputs.new_tag }} (Current)\`]($REGISTRY_URL/${{ inputs.user_repository }}/src/tag/${{ inputs.new_tag }}/docs/${base_path}.md) | $current_commit_date |"
|
||||
|
||||
old_tags=$(git tag --sort=-v:refname | grep -v "^${{ inputs.new_tag }}$" | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' || true)
|
||||
if [ -n "$old_tags" ]; then
|
||||
echo "$old_tags" | while read -r tag; do
|
||||
raw_osk_path="export/${skin}/${skin_header}.osk"
|
||||
if git ls-tree -r --name-only "$tag" | grep -Fx -- "$raw_osk_path" >/dev/null; then
|
||||
tag_date=$(git log -1 --format=%ci "$tag")
|
||||
formatted_date=$(TZ="Europe/Zurich" date -d "$tag_date" "+%d.%m.%Y %H:%M:%S")
|
||||
echo "| [\`$tag\`]($REGISTRY_URL/${{ inputs.user_repository }}/src/tag/$tag/docs/${base_path}.md) | $formatted_date |"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
} > "$md_file_path"
|
||||
|
||||
echo " → Wrote $md_file_path"
|
||||
done
|
||||
|
||||
echo "Per-skin markdown pages complete."
|
||||
|
||||
Reference in New Issue
Block a user