Update .gitea/workflows/ci.yml

This commit is contained in:
2025-11-23 14:16:11 +01:00
parent edf7542308
commit 3867174bf9

View File

@@ -1,114 +1,329 @@
name: Generate Skin previews, OSK files and per skin documentation name: Check Links in README
on: on:
push:
paths:
- '.gitea/workflows/*'
- 'Skins/**/*'
workflow_dispatch: workflow_dispatch:
inputs:
force_rebuild:
description: 'Force rebuild all skins'
required: false
default: 'false'
target_skins:
description: 'Comma-separated list of skin folder names to rebuild (e.g., "Skin1,Skin2")'
required: false
default: ''
env:
DANSER_DIR: "/app/danser"
DANSER_VIDEO_DIR: "/app/danser/videos"
DANSER_SCREENSHOT_DIR: "/app/danser/screenshots"
SKINS_DIR: "${{ github.workspace }}/Skins"
DANSER_SKINS_DIR: "/app/danser/skins"
DEFAULT_SKIN_DIR: "/app/danser/skins/default-skin"
REPO_SCREENSHOT_DIR: "${{ github.workspace }}/media/gameplay"
REPO_MOD_ICONS_DIR: "${{ github.workspace }}/media/icons"
REPO_RANKING_PANEL_DIR: "${{ github.workspace }}/media/panel"
REPO_THUMBNAIL_DIR: "${{ github.workspace }}/media/thumbnail"
README_PATH: "${{ github.workspace }}/README.md"
GAMEPLAY_REPLAY_PATH: "/app/danser/custom-replays/yomi_yori.osr"
THUMBNAIL_REPLAY_PATH: "/app/danser/custom-replays/combo_colors.osr"
PANEL_REPLAY_PATH: "/app/danser/custom-replays/2000_gekis.osr"
OSK_PATH: "${{ github.workspace }}/export"
IMAGE_NAME: osc/skins-image
REGISTRY_URL: "https://${{ vars.CONTAINER_REGISTRY }}"
OSU_ID: ${{ vars.OSUID }}
DOC_DIR: "${{ github.workspace }}/docs"
permissions:
contents: write
jobs: jobs:
generate_everything: check-links:
name: Full CI/CD Pipeline runs-on: ubuntu-latest
runs-on: danser
container:
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
options: >-
--gpus all
--privileged
--env NVIDIA_DRIVER_CAPABILITIES=all
--env NVIDIA_VISIBLE_DEVICES=all
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Discover and Detect Skins - name: Set up Node.js
id: discover uses: actions/setup-node@v4
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/discover-skins@main
with: with:
force_rebuild: ${{ github.event.inputs.force_rebuild }} node-version: '20'
target_skins: ${{ github.event.inputs.target_skins }}
- name: Pull Git LFS - name: Install link checker
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/pull-lfs@main run: npm install -g markdown-link-check
with:
changed_skins_file: ${{ steps.discover.outputs.changed_skins_file }}
- name: Prepare Assets - name: Check external links in all markdown files
id: prepare continue-on-error: true
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/prepare-assets@main run: |
with: # Create config file to handle custom domains and retries
all_skins: ${{ steps.discover.outputs.all_skins }} cat > .markdown-link-check.json << 'EOF'
{
"ignorePatterns": [
{
"pattern": "^/"
}
],
"replacementPatterns": [],
"httpHeaders": [
{
"urls": ["https://git.sulej.net"],
"headers": {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
}
}
],
"timeout": "20s",
"retryOn429": true,
"retryCount": 3,
"fallbackRetryDelay": "30s",
"aliveStatusCodes": [200, 206, 301, 302, 307, 308]
}
EOF
- name: Create Tag echo ""
id: tag echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/create-tag@main echo "🌐 Checking External Links in All Markdown Files"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
- name: Generate Previews # Find all markdown files
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/generate-previews@main find . -name "*.md" -type f | sort > markdown_files.txt
with: total_files=$(wc -l < markdown_files.txt)
changed_skins_file: ${{ steps.discover.outputs.changed_skins_file }}
- name: Generate Mod Icons and Convert Images echo "📊 Found $total_files markdown files to check"
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/generate-icons@main echo ""
with:
changed_skins_file: ${{ steps.discover.outputs.changed_skins_file }}
- name: Generate OSK # Extract and group all external links by context
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/generate-osk@main > all_broken_links.txt
with:
changed_skins_file: ${{ steps.discover.outputs.changed_skins_file }}
- name: Generate Documentation while IFS= read -r file; do
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/generate-docs@main # Run markdown-link-check and capture only errors
with: markdown-link-check "$file" --config .markdown-link-check.json 2>&1 | grep "^\s*\[✖\]" >> all_broken_links.txt || true
new_tag: ${{ steps.tag.outputs.new_tag }} done < markdown_files.txt
readme_path: ${{ env.README_PATH }}
doc_dir: ${{ env.DOC_DIR }}
user_repository: ${{ steps.prepare.outputs.user_repository }}
- name: Cleanup Files # Parse README.md to group links by skin/section
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/cleanup@main if [ -f "README.md" ]; then
with: echo "📋 Grouping broken links by context..."
all_skins: ${{ steps.discover.outputs.all_skins }} echo ""
- name: Commit and Push # Count total broken links
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/git-commit-push@main total_broken=$(wc -l < all_broken_links.txt)
with:
new_tag: ${{ steps.tag.outputs.new_tag }} if [ $total_broken -gt 0 ]; then
echo "❌ Found $total_broken broken external links"
echo ""
# Group by category
declare -A general_links
declare -A skin_links
declare -A tag_links
while IFS= read -r line; do
url=$(echo "$line" | grep -oP 'https://[^\s]+' || echo "$line" | grep -oP 'http://[^\s]+')
# Categorize links
if [[ "$url" == *"/osc/skins"* ]]; then
general_links["$url"]=1
elif [[ "$url" == *"/src/tag/"* ]] || [[ "$url" == *"/media/tag/"* ]]; then
tag_links["$url"]=1
elif [[ "$url" == *"/export/"* ]] || [[ "$url" == *"/media/"* ]]; then
# Extract skin name from URL
skin_name=$(echo "$url" | grep -oP '/export/[^/]+' | sed 's|/export/||' | head -1)
if [ -z "$skin_name" ]; then
skin_name="Unknown"
fi
skin_links["$skin_name"]+="$url"$'\n'
else
general_links["$url"]=1
fi
done < all_broken_links.txt
# Display grouped results
if [ ${#general_links[@]} -gt 0 ]; then
echo "🔗 General Links:"
for url in "${!general_links[@]}"; do
echo " ❌ $url"
done
echo ""
fi
if [ ${#skin_links[@]} -gt 0 ]; then
echo "🎨 Skin-specific Links:"
for skin in "${!skin_links[@]}"; do
echo ""
echo " $skin:"
echo "${skin_links[$skin]}" | while IFS= read -r url; do
[ -n "$url" ] && echo " ❌ $url"
done
done
echo ""
fi
if [ ${#tag_links[@]} -gt 0 ]; then
echo "🏷️ Version Tags:"
for url in "${!tag_links[@]}"; do
echo " ❌ $url"
done
echo ""
fi
else
echo "✅ All external links are valid!"
fi
fi
- name: Check internal markdown file references
run: |
#!/bin/bash
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "🔍 Checking Internal Markdown File References in All Markdown Files"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
# Find all markdown files and extract internal links with context
> internal_links_with_context.txt
find . -name "*.md" -type f | while IFS= read -r file; do
# Extract links with the skin name from headers
current_skin=""
while IFS= read -r line; do
# Check if it's a header (skin name)
if [[ "$line" =~ ^##[[:space:]]+\[([^\]]+)\] ]]; then
current_skin="${BASH_REMATCH[1]}"
fi
# Check for internal markdown links
if [[ "$line" =~ \]\((/[^\)]+\.md) ]]; then
link="${BASH_REMATCH[1]}"
echo "$current_skin|$link" >> internal_links_with_context.txt
fi
done < "$file"
done
total_count=$(wc -l < internal_links_with_context.txt)
echo "📊 Total internal markdown links found: $total_count"
echo ""
# Group by skin and check
declare -A skin_all_files
declare -A skin_missing_files
checked_count=0
total_missing=0
while IFS='|' read -r skin link; do
checked_count=$((checked_count + 1))
# Decode URL-encoded characters
decoded_link=$(echo "$link" | sed 's/%20/ /g' | sed 's/%23/#/g' | sed 's/%28/(/g' | sed 's/%29/)/g' | sed 's/%E2%80%A2/•/g' | sed 's/%E1%9A%96/ᚖ/g' | sed 's/%E3%80%8A/《/g' | sed 's/%E3%80%8B/》/g' | sed 's/%E3%80%8E/『/g' | sed 's/%E3%80%8F/』/g' | sed 's/%E2%9B%94/⛔/g' | sed 's/%E2%9C%A8/✨/g' | sed 's/%7B/{/g' | sed 's/%7D/}/g' | sed 's/%2B/+/g' | sed 's/%E3%83%86/テ/g' | sed 's/%E3%83%B3/ン/g' | sed 's/%E3%83%8D/ネ/g' | sed 's/%E3%82%B9/ス/g' | sed 's/%E3%82%A4/イ/g' | sed 's/%E3%83%BB/・/g' | sed 's/%E3%83%95/フ/g' | sed 's/%E3%83%AA/リ/g' | sed 's/%E3%83%BC/ー/g' | sed 's/%E3%83%8A/ナ/g' | sed 's/%5B/[/g' | sed 's/%5D/]/g')
# Remove leading slash to make it relative
file_path="${decoded_link#/}"
if [ -z "$skin" ]; then
skin="General"
fi
# Check if file exists
if [ -f "$file_path" ]; then
skin_all_files["$skin"]+="✅ $file_path"$'\n'
else
skin_all_files["$skin"]+="❌ $file_path"$'\n'
skin_missing_files["$skin"]+="$file_path"$'\n'
total_missing=$((total_missing + 1))
fi
done < internal_links_with_context.txt
# Report results
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
if [ $total_missing -gt 0 ]; then
echo "⚠️ RESULT: $total_missing of $total_count internal markdown files are MISSING"
else
echo "✅ RESULT: All $total_count internal markdown files exist!"
fi
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
# Display all links grouped by skin
if [ ${#skin_all_files[@]} -gt 0 ]; then
for skin in "${!skin_all_files[@]}"; do
echo "📄 $skin:"
# Use printf instead of echo with pipe to avoid subshell issues
printf '%s\n' "${skin_all_files[$skin]}" | while IFS= read -r line; do
[ -n "$line" ] && echo " $line"
done
echo ""
done
fi
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
# Exit with error if files are missing
if [ $total_missing -gt 0 ]; then
echo "❌ SUMMARY: Workflow failed due to $total_missing missing markdown file(s)"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
exit 1
else
echo "✅ SUMMARY: All checks passed!"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
fi
- name: Check for broken image references
run: |
#!/bin/bash
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo "🖼️ Checking Image References in All Markdown Files"
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
# Extract all image links from all markdown files with context
> image_links_with_context.txt
find . -name "*.md" -type f | while IFS= read -r file; do
current_skin=""
while IFS= read -r line; do
# Check if it's a header (skin name)
if [[ "$line" =~ ^##[[:space:]]+\[([^\]]+)\] ]]; then
current_skin="${BASH_REMATCH[1]}"
fi
# Check for image links
if [[ "$line" =~ !\[.*\]\(([^\)]+\.(png|jpg|jpeg|gif|webp|svg)) ]]; then
link="${BASH_REMATCH[1]}"
# Skip external URLs
if [[ ! "$link" =~ ^https?:// ]]; then
echo "$current_skin|$link" >> image_links_with_context.txt
fi
fi
done < "$file"
done
total_count=$(wc -l < image_links_with_context.txt)
echo "📊 Total local image references found: $total_count"
echo ""
# Group by skin and check
declare -A skin_all_images
declare -A skin_missing_images
checked_count=0
total_missing=0
while IFS='|' read -r skin link; do
checked_count=$((checked_count + 1))
# Decode URL-encoded characters
decoded_link=$(echo "$link" | sed 's/%20/ /g' | sed 's/%23/#/g' | sed 's/%28/(/g' | sed 's/%29/)/g' | sed 's/%E2%80%A2/•/g' | sed 's/%E1%9A%96/ᚖ/g' | sed 's/%E3%80%8A/《/g' | sed 's/%E3%80%8B/》/g' | sed 's/%E3%80%8E/『/g' | sed 's/%E3%80%8F/』/g' | sed 's/%E2%9B%94/⛔/g' | sed 's/%E2%9C%A8/✨/g' | sed 's/%7B/{/g' | sed 's/%7D/}/g' | sed 's/%2B/+/g' | sed 's/%E3%83%86/テ/g' | sed 's/%E3%83%B3/ン/g' | sed 's/%E3%83%8D/ネ/g' | sed 's/%E3%82%B9/ス/g' | sed 's/%E3%82%A4/イ/g' | sed 's/%E3%83%BB/・/g' | sed 's/%E3%83%95/フ/g' | sed 's/%E3%83%AA/リ/g' | sed 's/%E3%83%BC/ー/g' | sed 's/%E3%83%8A/ナ/g' | sed 's/%5B/[/g' | sed 's/%5D/]/g')
# Remove leading slash for absolute paths
if [[ "$decoded_link" =~ ^/ ]]; then
decoded_link="${decoded_link#/}"
fi
if [ -z "$skin" ]; then
skin="General"
fi
# Check if file exists
if [ -f "$decoded_link" ]; then
skin_all_images["$skin"]+="✅ $decoded_link"$'\n'
else
skin_all_images["$skin"]+="❌ $decoded_link"$'\n'
skin_missing_images["$skin"]+="$decoded_link"$'\n'
total_missing=$((total_missing + 1))
fi
done < image_links_with_context.txt
# Report results
echo ""
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
if [ $total_missing -gt 0 ]; then
echo "⚠️ RESULT: $total_missing of $total_count image files are MISSING (non-blocking)"
else
echo "✅ RESULT: All $total_count image files exist!"
fi
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
echo ""
# Display all images grouped by skin
if [ ${#skin_all_images[@]} -gt 0 ]; then
for skin in "${!skin_all_images[@]}"; do
echo "🖼️ $skin:"
# Use printf instead of echo with pipe to avoid subshell issues
printf '%s\n' "${skin_all_images[$skin]}" | while IFS= read -r line; do
[ -n "$line" ] && echo " $line"
done
echo ""
done
fi
# Don't fail the workflow for missing images, just warn
exit 0