generated from osc/skins-template
Update .gitea/workflows/ci.yml
All checks were successful
Generate Skin previews, OSK files and per skin documentation / Full CI/CD Pipeline (push) Successful in 23s
All checks were successful
Generate Skin previews, OSK files and per skin documentation / Full CI/CD Pipeline (push) Successful in 23s
This commit is contained in:
@@ -1,329 +1,117 @@
|
|||||||
name: Check Links in README
|
name: Generate Skin previews, OSK files and per skin documentation
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- '.gitea/workflows/*'
|
||||||
|
- 'Skins/**/*'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
force_rebuild:
|
||||||
|
description: 'Force rebuild all skins'
|
||||||
|
required: false
|
||||||
|
default: 'false'
|
||||||
|
target_skins:
|
||||||
|
description: 'Comma-separated list of skin folder names to rebuild (e.g., "Skin1,Skin2")'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
|
||||||
|
env:
|
||||||
|
DANSER_DIR: "/app/danser"
|
||||||
|
DANSER_VIDEO_DIR: "/app/danser/videos"
|
||||||
|
DANSER_SCREENSHOT_DIR: "/app/danser/screenshots"
|
||||||
|
SKINS_DIR: "${{ github.workspace }}/Skins"
|
||||||
|
DANSER_SKINS_DIR: "/app/danser/skins"
|
||||||
|
DEFAULT_SKIN_DIR: "/app/danser/skins/default-skin"
|
||||||
|
REPO_SCREENSHOT_DIR: "${{ github.workspace }}/media/gameplay"
|
||||||
|
REPO_MOD_ICONS_DIR: "${{ github.workspace }}/media/icons"
|
||||||
|
REPO_RANKING_PANEL_DIR: "${{ github.workspace }}/media/panel"
|
||||||
|
REPO_THUMBNAIL_DIR: "${{ github.workspace }}/media/thumbnail"
|
||||||
|
README_PATH: "${{ github.workspace }}/README.md"
|
||||||
|
GAMEPLAY_REPLAY_PATH: "/app/danser/custom-replays/yomi_yori.osr"
|
||||||
|
THUMBNAIL_REPLAY_PATH: "/app/danser/custom-replays/combo_colors.osr"
|
||||||
|
PANEL_REPLAY_PATH: "/app/danser/custom-replays/2000_gekis.osr"
|
||||||
|
OSK_PATH: "${{ github.workspace }}/export"
|
||||||
|
IMAGE_NAME: osc/skins-image
|
||||||
|
REGISTRY_URL: "https://${{ vars.CONTAINER_REGISTRY }}"
|
||||||
|
OSU_ID: ${{ vars.OSUID }}
|
||||||
|
DOC_DIR: "${{ github.workspace }}/docs"
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-links:
|
generate_everything:
|
||||||
runs-on: ubuntu-latest
|
name: Full CI/CD Pipeline
|
||||||
|
runs-on: danser
|
||||||
|
container:
|
||||||
|
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
|
||||||
|
options: >-
|
||||||
|
--gpus all
|
||||||
|
--privileged
|
||||||
|
--env NVIDIA_DRIVER_CAPABILITIES=all
|
||||||
|
--env NVIDIA_VISIBLE_DEVICES=all
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Node.js
|
|
||||||
uses: actions/setup-node@v4
|
|
||||||
with:
|
with:
|
||||||
node-version: '20'
|
fetch-depth: 0
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Install link checker
|
- name: Discover and Detect Skins
|
||||||
run: npm install -g markdown-link-check
|
id: discover
|
||||||
|
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/discover-skins@main
|
||||||
|
with:
|
||||||
|
force_rebuild: ${{ github.event.inputs.force_rebuild }}
|
||||||
|
target_skins: ${{ github.event.inputs.target_skins }}
|
||||||
|
|
||||||
- name: Check external links in all markdown files
|
- name: Pull Git LFS
|
||||||
continue-on-error: true
|
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/pull-lfs@main
|
||||||
run: |
|
with:
|
||||||
# Create config file to handle custom domains and retries
|
changed_skins_file: ${{ steps.discover.outputs.changed_skins_file }}
|
||||||
cat > .markdown-link-check.json << 'EOF'
|
|
||||||
{
|
|
||||||
"ignorePatterns": [
|
|
||||||
{
|
|
||||||
"pattern": "^/"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"replacementPatterns": [],
|
|
||||||
"httpHeaders": [
|
|
||||||
{
|
|
||||||
"urls": ["https://git.sulej.net"],
|
|
||||||
"headers": {
|
|
||||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"timeout": "20s",
|
|
||||||
"retryOn429": true,
|
|
||||||
"retryCount": 3,
|
|
||||||
"fallbackRetryDelay": "30s",
|
|
||||||
"aliveStatusCodes": [200, 206, 301, 302, 307, 308]
|
|
||||||
}
|
|
||||||
EOF
|
|
||||||
|
|
||||||
echo ""
|
- name: Prepare Assets
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
id: prepare
|
||||||
echo "🌐 Checking External Links in All Markdown Files"
|
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/prepare-assets@main
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
with:
|
||||||
echo ""
|
all_skins: ${{ steps.discover.outputs.all_skins }}
|
||||||
|
|
||||||
# Find all markdown files
|
- name: Create Tag
|
||||||
find . -name "*.md" -type f | sort > markdown_files.txt
|
id: tag
|
||||||
total_files=$(wc -l < markdown_files.txt)
|
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/create-tag@main
|
||||||
|
|
||||||
echo "📊 Found $total_files markdown files to check"
|
- name: Generate Previews
|
||||||
echo ""
|
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/generate-previews@main
|
||||||
|
with:
|
||||||
|
changed_skins_file: ${{ steps.discover.outputs.changed_skins_file }}
|
||||||
|
|
||||||
# Extract and group all external links by context
|
- name: Generate Mod Icons and Convert Images
|
||||||
> all_broken_links.txt
|
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/generate-icons@main
|
||||||
|
with:
|
||||||
|
changed_skins_file: ${{ steps.discover.outputs.changed_skins_file }}
|
||||||
|
|
||||||
while IFS= read -r file; do
|
- name: Generate OSK
|
||||||
# Run markdown-link-check and capture only errors
|
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/generate-osk@main
|
||||||
markdown-link-check "$file" --config .markdown-link-check.json 2>&1 | grep "^\s*\[✖\]" >> all_broken_links.txt || true
|
with:
|
||||||
done < markdown_files.txt
|
changed_skins_file: ${{ steps.discover.outputs.changed_skins_file }}
|
||||||
|
|
||||||
# Parse README.md to group links by skin/section
|
- name: Generate Documentation
|
||||||
if [ -f "README.md" ]; then
|
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/generate-docs@main
|
||||||
echo "📋 Grouping broken links by context..."
|
with:
|
||||||
echo ""
|
new_tag: ${{ steps.tag.outputs.new_tag }}
|
||||||
|
readme_path: ${{ env.README_PATH }}
|
||||||
|
doc_dir: ${{ env.DOC_DIR }}
|
||||||
|
user_repository: ${{ steps.prepare.outputs.user_repository }}
|
||||||
|
|
||||||
# Count total broken links
|
- name: Cleanup Files
|
||||||
total_broken=$(wc -l < all_broken_links.txt)
|
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/cleanup@main
|
||||||
|
with:
|
||||||
|
all_skins: ${{ steps.discover.outputs.all_skins }}
|
||||||
|
|
||||||
if [ $total_broken -gt 0 ]; then
|
- name: Commit and Push
|
||||||
echo "❌ Found $total_broken broken external links"
|
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/git-commit-push@main
|
||||||
echo ""
|
with:
|
||||||
|
new_tag: ${{ steps.tag.outputs.new_tag }}
|
||||||
|
|
||||||
# Group by category
|
- name: Test links
|
||||||
declare -A general_links
|
uses: ${{ env.REGISTRY_URL}}/osc/reusable-actions/.gitea/actions/test-links@main
|
||||||
declare -A skin_links
|
|
||||||
declare -A tag_links
|
|
||||||
|
|
||||||
while IFS= read -r line; do
|
|
||||||
url=$(echo "$line" | grep -oP 'https://[^\s]+' || echo "$line" | grep -oP 'http://[^\s]+')
|
|
||||||
|
|
||||||
# Categorize links
|
|
||||||
if [[ "$url" == *"/osc/skins"* ]]; then
|
|
||||||
general_links["$url"]=1
|
|
||||||
elif [[ "$url" == *"/src/tag/"* ]] || [[ "$url" == *"/media/tag/"* ]]; then
|
|
||||||
tag_links["$url"]=1
|
|
||||||
elif [[ "$url" == *"/export/"* ]] || [[ "$url" == *"/media/"* ]]; then
|
|
||||||
# Extract skin name from URL
|
|
||||||
skin_name=$(echo "$url" | grep -oP '/export/[^/]+' | sed 's|/export/||' | head -1)
|
|
||||||
if [ -z "$skin_name" ]; then
|
|
||||||
skin_name="Unknown"
|
|
||||||
fi
|
|
||||||
skin_links["$skin_name"]+="$url"$'\n'
|
|
||||||
else
|
|
||||||
general_links["$url"]=1
|
|
||||||
fi
|
|
||||||
done < all_broken_links.txt
|
|
||||||
|
|
||||||
# Display grouped results
|
|
||||||
if [ ${#general_links[@]} -gt 0 ]; then
|
|
||||||
echo "🔗 General Links:"
|
|
||||||
for url in "${!general_links[@]}"; do
|
|
||||||
echo " ❌ $url"
|
|
||||||
done
|
|
||||||
echo ""
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ${#skin_links[@]} -gt 0 ]; then
|
|
||||||
echo "🎨 Skin-specific Links:"
|
|
||||||
for skin in "${!skin_links[@]}"; do
|
|
||||||
echo ""
|
|
||||||
echo " $skin:"
|
|
||||||
echo "${skin_links[$skin]}" | while IFS= read -r url; do
|
|
||||||
[ -n "$url" ] && echo " ❌ $url"
|
|
||||||
done
|
|
||||||
done
|
|
||||||
echo ""
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ ${#tag_links[@]} -gt 0 ]; then
|
|
||||||
echo "🏷️ Version Tags:"
|
|
||||||
for url in "${!tag_links[@]}"; do
|
|
||||||
echo " ❌ $url"
|
|
||||||
done
|
|
||||||
echo ""
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "✅ All external links are valid!"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Check internal markdown file references
|
|
||||||
run: |
|
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
echo "🔍 Checking Internal Markdown File References in All Markdown Files"
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Find all markdown files and extract internal links with context
|
|
||||||
> internal_links_with_context.txt
|
|
||||||
find . -name "*.md" -type f | while IFS= read -r file; do
|
|
||||||
# Extract links with the skin name from headers
|
|
||||||
current_skin=""
|
|
||||||
while IFS= read -r line; do
|
|
||||||
# Check if it's a header (skin name)
|
|
||||||
if [[ "$line" =~ ^##[[:space:]]+\[([^\]]+)\] ]]; then
|
|
||||||
current_skin="${BASH_REMATCH[1]}"
|
|
||||||
fi
|
|
||||||
# Check for internal markdown links
|
|
||||||
if [[ "$line" =~ \]\((/[^\)]+\.md) ]]; then
|
|
||||||
link="${BASH_REMATCH[1]}"
|
|
||||||
echo "$current_skin|$link" >> internal_links_with_context.txt
|
|
||||||
fi
|
|
||||||
done < "$file"
|
|
||||||
done
|
|
||||||
|
|
||||||
total_count=$(wc -l < internal_links_with_context.txt)
|
|
||||||
echo "📊 Total internal markdown links found: $total_count"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Group by skin and check
|
|
||||||
declare -A skin_all_files
|
|
||||||
declare -A skin_missing_files
|
|
||||||
checked_count=0
|
|
||||||
total_missing=0
|
|
||||||
|
|
||||||
while IFS='|' read -r skin link; do
|
|
||||||
checked_count=$((checked_count + 1))
|
|
||||||
|
|
||||||
# Decode URL-encoded characters
|
|
||||||
decoded_link=$(echo "$link" | sed 's/%20/ /g' | sed 's/%23/#/g' | sed 's/%28/(/g' | sed 's/%29/)/g' | sed 's/%E2%80%A2/•/g' | sed 's/%E1%9A%96/ᚖ/g' | sed 's/%E3%80%8A/《/g' | sed 's/%E3%80%8B/》/g' | sed 's/%E3%80%8E/『/g' | sed 's/%E3%80%8F/』/g' | sed 's/%E2%9B%94/⛔/g' | sed 's/%E2%9C%A8/✨/g' | sed 's/%7B/{/g' | sed 's/%7D/}/g' | sed 's/%2B/+/g' | sed 's/%E3%83%86/テ/g' | sed 's/%E3%83%B3/ン/g' | sed 's/%E3%83%8D/ネ/g' | sed 's/%E3%82%B9/ス/g' | sed 's/%E3%82%A4/イ/g' | sed 's/%E3%83%BB/・/g' | sed 's/%E3%83%95/フ/g' | sed 's/%E3%83%AA/リ/g' | sed 's/%E3%83%BC/ー/g' | sed 's/%E3%83%8A/ナ/g' | sed 's/%5B/[/g' | sed 's/%5D/]/g')
|
|
||||||
|
|
||||||
# Remove leading slash to make it relative
|
|
||||||
file_path="${decoded_link#/}"
|
|
||||||
|
|
||||||
if [ -z "$skin" ]; then
|
|
||||||
skin="General"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if file exists
|
|
||||||
if [ -f "$file_path" ]; then
|
|
||||||
skin_all_files["$skin"]+="✅ $file_path"$'\n'
|
|
||||||
else
|
|
||||||
skin_all_files["$skin"]+="❌ $file_path"$'\n'
|
|
||||||
skin_missing_files["$skin"]+="$file_path"$'\n'
|
|
||||||
total_missing=$((total_missing + 1))
|
|
||||||
fi
|
|
||||||
done < internal_links_with_context.txt
|
|
||||||
|
|
||||||
# Report results
|
|
||||||
echo ""
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
|
|
||||||
if [ $total_missing -gt 0 ]; then
|
|
||||||
echo "⚠️ RESULT: $total_missing of $total_count internal markdown files are MISSING"
|
|
||||||
else
|
|
||||||
echo "✅ RESULT: All $total_count internal markdown files exist!"
|
|
||||||
fi
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Display all links grouped by skin
|
|
||||||
if [ ${#skin_all_files[@]} -gt 0 ]; then
|
|
||||||
for skin in "${!skin_all_files[@]}"; do
|
|
||||||
echo "📄 $skin:"
|
|
||||||
# Use printf instead of echo with pipe to avoid subshell issues
|
|
||||||
printf '%s\n' "${skin_all_files[$skin]}" | while IFS= read -r line; do
|
|
||||||
[ -n "$line" ] && echo " $line"
|
|
||||||
done
|
|
||||||
echo ""
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
|
|
||||||
# Exit with error if files are missing
|
|
||||||
if [ $total_missing -gt 0 ]; then
|
|
||||||
echo "❌ SUMMARY: Workflow failed due to $total_missing missing markdown file(s)"
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "✅ SUMMARY: All checks passed!"
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Check for broken image references
|
|
||||||
run: |
|
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
echo "🖼️ Checking Image References in All Markdown Files"
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Extract all image links from all markdown files with context
|
|
||||||
> image_links_with_context.txt
|
|
||||||
find . -name "*.md" -type f | while IFS= read -r file; do
|
|
||||||
current_skin=""
|
|
||||||
while IFS= read -r line; do
|
|
||||||
# Check if it's a header (skin name)
|
|
||||||
if [[ "$line" =~ ^##[[:space:]]+\[([^\]]+)\] ]]; then
|
|
||||||
current_skin="${BASH_REMATCH[1]}"
|
|
||||||
fi
|
|
||||||
# Check for image links
|
|
||||||
if [[ "$line" =~ !\[.*\]\(([^\)]+\.(png|jpg|jpeg|gif|webp|svg)) ]]; then
|
|
||||||
link="${BASH_REMATCH[1]}"
|
|
||||||
# Skip external URLs
|
|
||||||
if [[ ! "$link" =~ ^https?:// ]]; then
|
|
||||||
echo "$current_skin|$link" >> image_links_with_context.txt
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
done < "$file"
|
|
||||||
done
|
|
||||||
|
|
||||||
total_count=$(wc -l < image_links_with_context.txt)
|
|
||||||
echo "📊 Total local image references found: $total_count"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Group by skin and check
|
|
||||||
declare -A skin_all_images
|
|
||||||
declare -A skin_missing_images
|
|
||||||
checked_count=0
|
|
||||||
total_missing=0
|
|
||||||
|
|
||||||
while IFS='|' read -r skin link; do
|
|
||||||
checked_count=$((checked_count + 1))
|
|
||||||
|
|
||||||
# Decode URL-encoded characters
|
|
||||||
decoded_link=$(echo "$link" | sed 's/%20/ /g' | sed 's/%23/#/g' | sed 's/%28/(/g' | sed 's/%29/)/g' | sed 's/%E2%80%A2/•/g' | sed 's/%E1%9A%96/ᚖ/g' | sed 's/%E3%80%8A/《/g' | sed 's/%E3%80%8B/》/g' | sed 's/%E3%80%8E/『/g' | sed 's/%E3%80%8F/』/g' | sed 's/%E2%9B%94/⛔/g' | sed 's/%E2%9C%A8/✨/g' | sed 's/%7B/{/g' | sed 's/%7D/}/g' | sed 's/%2B/+/g' | sed 's/%E3%83%86/テ/g' | sed 's/%E3%83%B3/ン/g' | sed 's/%E3%83%8D/ネ/g' | sed 's/%E3%82%B9/ス/g' | sed 's/%E3%82%A4/イ/g' | sed 's/%E3%83%BB/・/g' | sed 's/%E3%83%95/フ/g' | sed 's/%E3%83%AA/リ/g' | sed 's/%E3%83%BC/ー/g' | sed 's/%E3%83%8A/ナ/g' | sed 's/%5B/[/g' | sed 's/%5D/]/g')
|
|
||||||
|
|
||||||
# Remove leading slash for absolute paths
|
|
||||||
if [[ "$decoded_link" =~ ^/ ]]; then
|
|
||||||
decoded_link="${decoded_link#/}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$skin" ]; then
|
|
||||||
skin="General"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Check if file exists
|
|
||||||
if [ -f "$decoded_link" ]; then
|
|
||||||
skin_all_images["$skin"]+="✅ $decoded_link"$'\n'
|
|
||||||
else
|
|
||||||
skin_all_images["$skin"]+="❌ $decoded_link"$'\n'
|
|
||||||
skin_missing_images["$skin"]+="$decoded_link"$'\n'
|
|
||||||
total_missing=$((total_missing + 1))
|
|
||||||
fi
|
|
||||||
done < image_links_with_context.txt
|
|
||||||
|
|
||||||
# Report results
|
|
||||||
echo ""
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
|
|
||||||
if [ $total_missing -gt 0 ]; then
|
|
||||||
echo "⚠️ RESULT: $total_missing of $total_count image files are MISSING (non-blocking)"
|
|
||||||
else
|
|
||||||
echo "✅ RESULT: All $total_count image files exist!"
|
|
||||||
fi
|
|
||||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Display all images grouped by skin
|
|
||||||
if [ ${#skin_all_images[@]} -gt 0 ]; then
|
|
||||||
for skin in "${!skin_all_images[@]}"; do
|
|
||||||
echo "🖼️ $skin:"
|
|
||||||
# Use printf instead of echo with pipe to avoid subshell issues
|
|
||||||
printf '%s\n' "${skin_all_images[$skin]}" | while IFS= read -r line; do
|
|
||||||
[ -n "$line" ] && echo " $line"
|
|
||||||
done
|
|
||||||
echo ""
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Don't fail the workflow for missing images, just warn
|
|
||||||
exit 0
|
|
||||||
|
|||||||
Reference in New Issue
Block a user