add conditional action
All checks were successful
Generate Skin previews, OSK files and per skin documentation / Detect Changed Skins (push) Successful in 3s
Generate Skin previews, OSK files and per skin documentation / Calculate Git Tag (push) Has been skipped
Generate Skin previews, OSK files and per skin documentation / Prepare Assets (push) Has been skipped
Generate Skin previews, OSK files and per skin documentation / Cleanup Extra Files (push) Has been skipped
Generate Skin previews, OSK files and per skin documentation / Generate Videos and Screenshots (push) Has been skipped
Generate Skin previews, OSK files and per skin documentation / Generate Mod Icons (WEBP) (push) Has been skipped
Generate Skin previews, OSK files and per skin documentation / Create OSK Files (push) Has been skipped
Generate Skin previews, OSK files and per skin documentation / Generate README (push) Has been skipped
Generate Skin previews, OSK files and per skin documentation / Generate Per-Skin Docs (push) Has been skipped
Generate Skin previews, OSK files and per skin documentation / Rename Assets Based on skin.ini (push) Has been skipped
Generate Skin previews, OSK files and per skin documentation / Convert PNGs to WEBPs (push) Has been skipped
Generate Skin previews, OSK files and per skin documentation / Git Commit and Push (push) Has been skipped

This commit is contained in:
2025-07-30 15:44:00 +02:00
parent 8b4deaba9b
commit e7170f6148

View File

@@ -15,6 +15,10 @@ on:
description: 'Comma-separated list of skin folder names to rebuild (e.g., "Skin1,Skin2")'
required: false
default: ''
soft_run:
description: 'Run doc/regeneration steps even if no skins changed'
required: false
default: 'false'
env:
DANSER_PATH: "/app/danser/danser-cli"
@@ -40,35 +44,29 @@ env:
DOC_DIR: "${{ github.workspace }}/docs"
jobs:
generate_everything:
name: Full CI/CD Pipeline
detect_changed_skins:
name: Detect Changed Skins
runs-on: danser
container:
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
options: >-
--gpus all
--privileged
--security-opt seccomp=unconfined
--security-opt apparmor=unconfined
--cap-add=ALL
--env NVIDIA_DRIVER_CAPABILITIES=all
--env NVIDIA_VISIBLE_DEVICES=all
--user 0:0
outputs:
has_changes: ${{ steps.set.outputs.has_changes }}
soft_run: ${{ github.event.inputs.soft_run || 'false' }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
tags: true
token: ${{ secrets.TOKEN }}
- name: Discover all skins
id: discover
shell: bash
run: |
echo "Discovering all skins in $SKINS_DIR…"
echo "Discovering all skins…"
mapfile -t skins < <(
find "$SKINS_DIR" -mindepth 1 -maxdepth 1 -type d \
| sed 's|'"$SKINS_DIR"'/||'
find "${{ env.SKINS_DIR }}" -mindepth 1 -maxdepth 1 -type d \
| sed 's|.*/||'
)
{
echo 'ALL_SKINS_DIR<<EOF'
@@ -77,102 +75,114 @@ jobs:
done
echo 'EOF'
} >> "$GITHUB_ENV"
echo "→ ALL_SKINS_DIR set (newline-delimited list)"
- name: Detect Changed Skin Directories
- name: Detect changed skins
id: detect
shell: bash
run: |
echo "[Detect Changed Skin Directories Started]"
readarray -t all_skins <<< "$ALL_SKINS_DIR"
force_rebuild="${{ github.event.inputs.force_rebuild }}"
target_skins="${{ github.event.inputs.target_skins }}"
skins=()
deleted_skins=()
echo "→ Force rebuild flag: $force_rebuild"
echo "→ Target skins input: $target_skins"
if [[ "$force_rebuild" == "true" ]]; then
echo "→ Force rebuild is enabled. Using ALL_SKINS_DIR for full list…"
skins=("${all_skins[@]}")
echo " ✓ Found ${#skins[@]} skin directories (from ALL_SKINS_DIR)"
elif [[ -n "$target_skins" ]]; then
echo "→ Target skins specified. Using target_skins input…"
IFS=',' read -r -a input_skins <<< "$target_skins"
for s in "${input_skins[@]}"; do
s="${s#"${s%%[![:space:]]*}"}"
s="${s%"${s##*[![:space:]]}"}"
s="${s## }"; s="${s%% }"
[[ -n "$s" ]] && skins+=("$s")
done
echo " ✓ Found ${#skins[@]} skin(s) from target_skins input"
else
echo "→ No rebuild flags set. Finding latest git tag..."
latest_tag=$(git tag --sort=-creatordate | head -n 1 || true)
if [[ -n "$latest_tag" ]]; then
echo "→ Latest tag found: $latest_tag"
echo "→ Finding added/modified skins since $latest_tag…"
mapfile -t skins < <(
git diff --name-only -z --diff-filter=AM "$latest_tag" HEAD \
| while IFS= read -r -d '' file; do
[[ $file == Skins/* ]] && echo "${file#Skins/}" | cut -d/ -f1
done | sort -u
git diff --name-only -z --diff-filter=AM "$latest_tag" HEAD |
while IFS= read -r -d '' file; do
[[ $file == Skins/* ]] && echo "${file#Skins/}" | cut -d/ -f1
done | sort -u
)
echo " ✓ Found ${#skins[@]} added/modified skins"
echo "→ Finding deleted skins since $latest_tag…"
mapfile -t deleted_skins < <(
git diff --name-only -z --diff-filter=D "$latest_tag" HEAD \
| while IFS= read -r -d '' file; do
[[ $file == Skins/* ]] && echo "${file#Skins/}" | cut -d/ -f1
done | sort -u
)
if [ "${#deleted_skins[@]}" -gt 0 ]; then
for d in "${deleted_skins[@]}"; do
echo "→ Skin '$d' was deleted"
done
else
echo " ✓ No skins deleted"
fi
else
echo "→ No tag found. Falling back to ALL_SKINS_DIR for full list…"
skins=("${all_skins[@]}")
echo " ✓ Found ${#skins[@]} skin directories (from ALL_SKINS_DIR)"
fi
fi
echo ""
echo "[Cleaning Skin Names]"
uniq_skins=()
for skin in "${skins[@]}"; do
skin="${skin#"${skin%%[![:space:]]*}"}"
skin="${skin%"${skin##*[![:space:]]}"}"
skin="${skin## }"; skin="${skin%% }"
[[ -n "$skin" ]] && uniq_skins+=("$skin")
done
echo " ✓ ${#uniq_skins[@]} valid skin names after cleaning"
echo ""
if [ "${#uniq_skins[@]}" -eq 0 ]; then
echo "→ No added/modified skins detected."
if [[ ${#uniq_skins[@]} -eq 0 ]]; then
echo "No skins changed"
echo "CHANGED_SKINS_FILE=" >> "$GITHUB_ENV"
else
echo "[Writing Changed Skins to File]"
changed_skins_file=$(mktemp)
printf "%s\n" "${uniq_skins[@]}" > "$changed_skins_file"
echo " ✓ Skins written to $changed_skins_file"
echo "CHANGED_SKINS_FILE=$changed_skins_file" >> "$GITHUB_ENV"
changed_file="/tmp/changed_skins.txt"
printf "%s\n" "${uniq_skins[@]}" > "$changed_file"
echo "CHANGED_SKINS_FILE=$changed_file" >> "$GITHUB_ENV"
fi
echo ""
echo "[Detect Changed Skin Directories Complete — ${#uniq_skins[@]} skins processed, ${#deleted_skins[@]} skins deleted]"
- name: Set outputs
id: set
shell: bash
run: |
has_changes=false
if [ -n "${CHANGED_SKINS_FILE:-}" ] && [ -s "$CHANGED_SKINS_FILE" ]; then
has_changes=true
fi
echo "has_changes=$has_changes" >> "$GITHUB_OUTPUT"
- name: Pull Git LFS objects for changed skins (and core assets)
calculate_git_tag:
name: Calculate Git Tag
runs-on: danser
needs: detect_changed_skins
if: >-
needs.detect_changed_skins.outputs.has_changes == 'true' ||
github.event.inputs.soft_run == 'true'
container:
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
outputs:
new_tag: ${{ steps.tag.outputs.new_tag }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
tags: true
token: ${{ secrets.TOKEN }}
- name: Calculate new tag
id: tag
shell: bash
run: |
latest_tag=$(git describe --tags $(git rev-list --tags --max-count=1) 2>/dev/null || echo "")
if [ -z "$latest_tag" ]; then
new_tag="v1.0.0"
else
IFS='.' read -r major minor patch <<< "${latest_tag#v}"
minor=$((minor + 1))
patch=0
new_tag="v${major}.${minor}.${patch}"
fi
echo "new_tag=$new_tag" >> "$GITHUB_OUTPUT"
prepare_assets:
name: Prepare Assets
runs-on: danser
needs: detect_changed_skins
if: needs.detect_changed_skins.outputs.has_changes == 'true'
container:
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
tags: true
token: ${{ secrets.TOKEN }}
- name: Pull Git LFS for changed skins and core assets
shell: bash
run: |
if [ -z "${CHANGED_SKINS_FILE:-}" ] || [ ! -s "$CHANGED_SKINS_FILE" ]; then
@@ -193,38 +203,31 @@ jobs:
includes="$includes,$skin_includes"
echo "→ Pulling LFS objects for patterns: $includes"
echo "→ Pulling LFS objects for: $includes"
git lfs pull --include="$includes"
- name: Extract Repository path
shell: bash
run: |
echo "Extracting repository path..."
USER_REPOSITORY="${{ github.workspace }}"
USER_REPOSITORY="${USER_REPOSITORY#/workspace/}"
USER_REPOSITORY="${USER_REPOSITORY%/}"
echo "Repository path extracted: $USER_REPOSITORY"
echo "USER_REPOSITORY=$USER_REPOSITORY" >> $GITHUB_ENV
- name: Set XDG_RUNTIME_DIR
shell: bash
run: |
echo "Setting XDG_RUNTIME_DIR..."
mkdir -p /tmp/xdg_runtime_dir
chmod 0700 /tmp/xdg_runtime_dir
echo "XDG_RUNTIME_DIR=/tmp/xdg_runtime_dir" >> "$GITHUB_ENV"
echo "XDG_RUNTIME_DIR set."
- name: Extract Repository Path
shell: bash
run: |
repo="${{ github.workspace }}"
repo="${repo#/workspace/}"
repo="${repo%/}"
echo "USER_REPOSITORY=$repo" >> "$GITHUB_ENV"
- name: Create directories for assets
shell: bash
run: |
echo "Creating base directories for assets..."
mkdir -p "$REPO_SCREENSHOT_DIR" "$REPO_MOD_ICONS_DIR" "$REPO_RANKING_PANEL_DIR" "$OSK_PATH" "$REPO_THUMBNAIL_DIR"
readarray -t skins <<< "$ALL_SKINS_DIR"
for skin in "${skins[@]}"; do
echo " → Creating subdirs for '$skin'…"
mkdir -p \
"$REPO_SCREENSHOT_DIR/$skin" \
"$REPO_MOD_ICONS_DIR/$skin" \
@@ -233,23 +236,22 @@ jobs:
"$REPO_THUMBNAIL_DIR/$skin"
done
echo "All asset directories created for ${#skins[@]} skins."
- name: Create New Tag
shell: bash
run: |
echo "Computing new tag..."
latest_tag=$(git describe --tags $(git rev-list --tags --max-count=1) 2>/dev/null || echo "")
if [ -z "$latest_tag" ]; then
new_tag="v1.0.0"
else
IFS='.' read -r major minor patch <<< "${latest_tag#v}"
minor=$((minor + 1))
patch=0
new_tag="v${major}.${minor}.${patch}"
fi
echo "new_tag=$new_tag" >> $GITHUB_ENV
echo "Computed new tag: $new_tag"
generate_videos_and_screenshots:
name: Generate Videos and Screenshots
runs-on: danser
needs: prepare_assets
if: needs.detect_changed_skins.outputs.has_changes == 'true'
container:
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
options: >-
--gpus all
--env NVIDIA_DRIVER_CAPABILITIES=all
--env NVIDIA_VISIBLE_DEVICES=all
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.TOKEN }}
- name: Move Skin files to Danser Skins directory
shell: bash
@@ -291,7 +293,7 @@ jobs:
LOGFILE="/tmp/danser_log_$INDEX.txt"
echo " → Generating video..."
if ! xvfb-run -a "$DANSER_DIR/danser-cli" \
if ! xvfb-run -a "$DANSER_PATH" \
-replay "$GAMEPLAY_REPLAY_PATH" -record -skip -start=300 -end=307 -noupdatecheck \
-out="$SKIN_NAME" -skin="$SKIN_NAME" >"$LOGFILE" 2>&1; then
echo " ✖ Video failed for $SKIN_NAME"; cat "$LOGFILE"; INDEX=$((INDEX+1)); continue
@@ -319,7 +321,7 @@ jobs:
fi
echo " → Taking screenshot..."
if ! xvfb-run -a "$DANSER_DIR/danser-cli" \
if ! xvfb-run -a "$DANSER_PATH" \
-replay "$PANEL_REPLAY_PATH" -skip -noupdatecheck -ss 28 \
-out="$SKIN_NAME" -skin="$SKIN_NAME" >>"$LOGFILE" 2>&1; then
echo " ✖ Screenshot failed for $SKIN_NAME"; cat "$LOGFILE"; INDEX=$((INDEX+1)); continue
@@ -334,7 +336,7 @@ jobs:
fi
echo " → Taking thumbnail screenshot..."
if ! xvfb-run -a "$DANSER_DIR/danser-cli" \
if ! xvfb-run -a "$DANSER_PATH" \
-replay "$THUMBNAIL_REPLAY_PATH" -skip -noupdatecheck -ss 1.3 \
-out="${SKIN_NAME}_thumb" -skin="$SKIN_NAME" >>"$LOGFILE" 2>&1; then
echo " ✖ Thumbnail screenshot failed for $SKIN_NAME"; cat "$LOGFILE"; INDEX=$((INDEX+1)); continue
@@ -354,7 +356,20 @@ jobs:
echo ""
echo "[Danser Job Finished — processed $SKIN_COUNT skins]"
- name: Rename Generated Assets Based on skin.ini
rename_assets_from_skin_ini:
name: Rename Assets Based on skin.ini
runs-on: danser
needs: generate_videos_and_screenshots
if: needs.detect_changed_skins.outputs.has_changes == 'true'
container:
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.TOKEN }}
- name: Rename assets based on skin.ini
shell: bash
run: |
echo "[Asset Renaming Job Started]"
@@ -367,24 +382,21 @@ jobs:
mapfile -t skins < "$CHANGED_SKINS_FILE"
[ "${#skins[@]}" -eq 0 ] && { echo "No skins to rename. Exiting."; exit 0; }
sanitize_filename() {
echo "$1" | tr -d '\000-\037' \
| sed -e 's#[\\/:\*\?"<>|]#-#g' \
-e 's/^[[:space:]]*//' \
-e 's/[[:space:]]*$//'
}
SKIN_COUNT=${#skins[@]}
INDEX=1
sanitize_filename() {
echo "$1" | \
tr -d '\000-\037' | \
sed -e 's#[\\/:\*\?"<>|]#-#g' | \
sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
}
for skin_path in "${skins[@]}"; do
[ -z "$skin_path" ] && continue
SKIN_DIR_NAME="$skin_path"
SKIN_DIR="$DANSER_SKINS_DIR/$skin_path"
if [ ! -d "$SKIN_DIR" ]; then
echo "Skipping missing skin directory: $SKIN_DIR"
continue
fi
SKIN_DIR="$SKINS_DIR/$skin_path"
[ ! -d "$SKIN_DIR" ] && { echo "Skipping missing skin directory: $SKIN_DIR"; continue; }
echo "Processing skin $INDEX/$SKIN_COUNT: $SKIN_DIR_NAME"
@@ -407,20 +419,17 @@ jobs:
THUMBNAIL_DIR="$REPO_THUMBNAIL_DIR/$SKIN_DIR_NAME"
if [ -f "$VIDEO_DIR/$SKIN_DIR_NAME.mp4" ] && [ "$SKIN_DIR_NAME" != "$skin_header" ]; then
mv -f "$VIDEO_DIR/$SKIN_DIR_NAME.mp4" \
"$VIDEO_DIR/$skin_header.mp4" || true
mv -f "$VIDEO_DIR/$SKIN_DIR_NAME.mp4" "$VIDEO_DIR/$skin_header.mp4" || true
echo " ✓ Renamed MP4 to $VIDEO_DIR/$skin_header.mp4"
fi
if [ -f "$PNG_DIR/$SKIN_DIR_NAME.png" ] && [ "$SKIN_DIR_NAME" != "$skin_header" ]; then
mv -f "$PNG_DIR/$SKIN_DIR_NAME.png" \
"$PNG_DIR/$skin_header.png" || true
mv -f "$PNG_DIR/$SKIN_DIR_NAME.png" "$PNG_DIR/$skin_header.png" || true
echo " ✓ Renamed PNG to $PNG_DIR/$skin_header.png"
fi
if [ -f "$THUMBNAIL_DIR/$SKIN_DIR_NAME.png" ] && [ "$SKIN_DIR_NAME" != "$skin_header" ]; then
mv -f "$THUMBNAIL_DIR/$SKIN_DIR_NAME.png" \
"$THUMBNAIL_DIR/$skin_header.png" || true
mv -f "$THUMBNAIL_DIR/$SKIN_DIR_NAME.png" "$THUMBNAIL_DIR/$skin_header.png" || true
echo " ✓ Renamed thumbnail to $THUMBNAIL_DIR/$skin_header.png"
fi
@@ -430,7 +439,20 @@ jobs:
echo ""
echo "[Asset Renaming Complete — processed $SKIN_COUNT skins]"
- name: Generate Mod Icons (WEBP)
generate_mod_icons:
name: Generate Mod Icons (WEBP)
runs-on: danser
needs: prepare_assets
if: needs.detect_changed_skins.outputs.has_changes == 'true'
container:
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.TOKEN }}
- name: Generate mod icon montages
shell: bash
run: |
echo "[Mod Icon Generation Job Started]"
@@ -444,95 +466,102 @@ jobs:
[ "${#skin_dirs[@]}" -eq 0 ] && { echo "No skins to process. Exiting."; exit 0; }
sanitize_filename() {
echo "$1" | \
tr -d '\000-\037' | \
sed -e 's#[\\/:\*\?"<>|]#-#g' | \
sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
echo "$1" | tr -d '\000-\037' \
| sed -e 's#[\\/:\*\?"<>|]#-#g' \
-e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
}
ICONS_JSON_FILE="${{ github.workspace }}/.gitea/workflows/icons.json"
group1_icons=$(jq -r '.group1 | join(" ")' "$ICONS_JSON_FILE")
group2_icons=$(jq -r '.group2 | join(" ")' "$ICONS_JSON_FILE")
group3_icons=$(jq -r '.group3 | join(" ")' "$ICONS_JSON_FILE")
group1_icons=$(jq -r '.group1 | join(\" \")' "$ICONS_JSON_FILE")
group2_icons=$(jq -r '.group2 | join(\" \")' "$ICONS_JSON_FILE")
group3_icons=$(jq -r '.group3 | join(\" \")' "$ICONS_JSON_FILE")
BLANK_IMAGE="blank.png"
magick -size "160x160" xc:none "$BLANK_IMAGE"
BLANK_IMAGE=\"blank.png\"
magick -size \"160x160\" xc:none \"$BLANK_IMAGE\"
SKIN_COUNT=${#skin_dirs[@]}
INDEX=1
for skin_path in "${skin_dirs[@]}"; do
SKIN_DIR="$DANSER_SKINS_DIR/$skin_path"
[ ! -d "$SKIN_DIR" ] && { echo "Skipping missing skin directory: $SKIN_DIR"; ((INDEX++)); continue; }
for skin_path in \"${skin_dirs[@]}\"; do
SKIN_DIR=\"$SKINS_DIR/$skin_path\"
[ ! -d \"$SKIN_DIR\" ] && { echo \"Skipping missing skin directory: $SKIN_DIR\"; ((INDEX++)); continue; }
skin_header="$skin_path"
ini_file=$(find "$SKIN_DIR" -maxdepth 1 -iname "skin.ini" | head -n1 || true)
if [ -f "$ini_file" ]; then
name_line=$(grep -i '^[[:space:]]*Name:' "$ini_file" | head -n1 || true)
if [ -n "$name_line" ]; then
val="${name_line#*:}"
val="$(echo "$val" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
if [ -n "$val" ]; then
sanitized="$(sanitize_filename "$val")"
[ -n "$sanitized" ] && skin_header="$sanitized"
fi
skin_header=\"$skin_path\"
ini_file=$(find \"$SKIN_DIR\" -maxdepth 1 -iname \"skin.ini\" | head -n1 || true)
if [ -f \"$ini_file\" ]; then
name_line=$(grep -i '^[[:space:]]*Name:' \"$ini_file\" | head -n1 || true)
if [ -n \"$name_line\" ]; then
val=\"${name_line#*:}\"
val=\"$(echo \"$val\" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')\"
[ -n \"$val\" ] && skin_header=$(sanitize_filename \"$val\")
fi
fi
echo ""
echo "[$INDEX/$SKIN_COUNT] Skin: $skin_header"
echo \"\"
echo \"[$INDEX/$SKIN_COUNT] Skin: $skin_header\"
ICON_FOLDER="$SKIN_DIR"
OUTPUT_DIR="$REPO_MOD_ICONS_DIR/$skin_path"
mkdir -p "$OUTPUT_DIR"
OUTPUT="$OUTPUT_DIR/${skin_header}-mod-icons.webp"
ICON_FOLDER=\"$SKIN_DIR\"
OUTPUT_DIR=\"$REPO_MOD_ICONS_DIR/$skin_path\"
mkdir -p \"$OUTPUT_DIR\"
OUTPUT=\"$OUTPUT_DIR/${skin_header}-mod-icons.webp\"
row_images=()
row_index=1
for group_list in "$group1_icons" "$group2_icons" "$group3_icons"; do
for group_list in \"$group1_icons\" \"$group2_icons\" \"$group3_icons\"; do
montage_files=()
for icon in $group_list; do
file=""
if [ -f "${ICON_FOLDER}/selection-mod-${icon}@2x.png" ]; then
file="${ICON_FOLDER}/selection-mod-${icon}@2x.png"
elif [ -f "${ICON_FOLDER}/selection-mod-${icon}.png" ]; then
file="${ICON_FOLDER}/selection-mod-${icon}.png"
elif [ -f "${DEFAULT_SKIN_DIR}/selection-mod-${icon}@2x.png" ]; then
file="${DEFAULT_SKIN_DIR}/selection-mod-${icon}@2x.png"
file=\"\"
if [ -f \"${ICON_FOLDER}/selection-mod-${icon}@2x.png\" ]; then
file=\"${ICON_FOLDER}/selection-mod-${icon}@2x.png\"
elif [ -f \"${ICON_FOLDER}/selection-mod-${icon}.png\" ]; then
file=\"${ICON_FOLDER}/selection-mod-${icon}.png\"
elif [ -f \"${DEFAULT_SKIN_DIR}/selection-mod-${icon}@2x.png\" ]; then
file=\"${DEFAULT_SKIN_DIR}/selection-mod-${icon}@2x.png\"
fi
[ -n "$file" ] && montage_files+=("$file")
[ -n \"$file\" ] && montage_files+=(\"$file\")
done
while [ "${#montage_files[@]}" -lt 7 ]; do
montage_files+=("$BLANK_IMAGE")
while [ \"${#montage_files[@]}\" -lt 7 ]; do
montage_files+=(\"$BLANK_IMAGE\")
done
magick montage "${montage_files[@]}" \
-tile "7x1" -geometry "160x160+10+10" -background none \
"row_${row_index}.png"
row_images+=("row_${row_index}.png")
magick montage \"${montage_files[@]}\" \
-tile \"7x1\" -geometry \"160x160+10+10\" -background none \
\"row_${row_index}.png\"
row_images+=(\"row_${row_index}.png\")
row_index=$((row_index + 1))
done
magick montage "${row_images[@]}" \
-tile "1x${#row_images[@]}" -geometry "+10+10" -background none \
"temp_combined.png"
magick montage \"${row_images[@]}\" \
-tile \"1x${#row_images[@]}\" -geometry \"+10+10\" -background none \
\"temp_combined.png\"
magick "temp_combined.png" -define webp:lossless=true "$OUTPUT"
magick \"temp_combined.png\" -define webp:lossless=true \"$OUTPUT\"
rm temp_combined.png row_*.png
echo " ✓ Mod Icons Generated at $OUTPUT"
echo \" ✓ Mod Icons Generated at $OUTPUT\"
INDEX=$((INDEX + 1))
done
rm "$BLANK_IMAGE"
rm \"$BLANK_IMAGE\"
echo \"\"
echo \"[Mod Icon Generation Finished — processed $SKIN_COUNT skins]\"
echo ""
echo "[Mod Icon Generation Finished — processed $SKIN_COUNT skins]"
convert_png_to_webp:
name: Convert PNGs to WEBPs
runs-on: danser
needs: generate_mod_icons
if: needs.detect_changed_skins.outputs.has_changes == 'true'
container:
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.TOKEN }}
- name: Convert PNGs to WEBPs
- name: Convert PNG to WEBP format
shell: bash
run: |
echo "[Convert PNG → WEBP Started]"
@@ -566,7 +595,20 @@ jobs:
echo "[Convert PNG → WEBP Finished]"
- name: Create OSK Files
create_osk_files:
name: Create OSK Files
runs-on: danser
needs: prepare_assets
if: needs.detect_changed_skins.outputs.has_changes == 'true'
container:
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.TOKEN }}
- name: Create OSK archives
shell: bash
run: |
echo "[OSK Creation Job Started]"
@@ -580,10 +622,9 @@ jobs:
[ "${#skin_dirs[@]}" -eq 0 ] && { echo "No skins to process. Exiting."; exit 0; }
sanitize_filename() {
echo "$1" | \
tr -d '\000-\037' | \
sed -e 's#[\\/:\*\?"<>|]#-#g' | \
sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
echo "$1" | tr -d '\000-\037' \
| sed -e 's#[\\/:\*\?"<>|]#-#g' \
-e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
}
FIXED_TIMESTAMP="2025-01-01 00:00:00"
@@ -591,7 +632,7 @@ jobs:
INDEX=1
for skin_path in "${skin_dirs[@]}"; do
SKIN_DIR="$DANSER_SKINS_DIR/$skin_path"
SKIN_DIR="$SKINS_DIR/$skin_path"
[ ! -d "$SKIN_DIR" ] && { echo "Skipping missing skin directory: $SKIN_DIR"; ((INDEX++)); continue; }
OUTPUT_DIR="$OSK_PATH/$skin_path"
@@ -627,15 +668,30 @@ jobs:
echo ""
echo "[OSK Creation Job Finished — processed $SKIN_COUNT skins]"
- name: Generate README
generate_readme_index:
name: Generate README
runs-on: danser
needs: calculate_git_tag
if: (needs.detect_changed_skins.outputs.has_changes == 'true') || (github.event.inputs.soft_run == 'true')
container:
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.TOKEN }}
- name: Generate README markdown
shell: bash
run: |
echo "Generating README index…"
mkdir -p /tmp/skins-docs
export README_PATH="/tmp/skins-docs/README.md"
sanitize_filename() {
echo "$1" | \
tr -d '\000-\037' | \
sed -e 's#[\\/:\*\?"<>|]#-#g' -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
echo "$1" | tr -d '\000-\037' \
| sed -e 's#[\\/:\*\?"<>|]#-#g' -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
}
url_encode_path() {
@@ -678,7 +734,7 @@ jobs:
declare -A ordered
while IFS= read -r skin; do
ordered["$skin"]=1
dir="$DANSER_SKINS_DIR/$skin"
dir="$SKINS_DIR/$skin"
[ ! -d "$dir" ] && continue
ini_file=$(find "$dir" -maxdepth 1 -iname "skin.ini" | head -n1 || true)
@@ -690,8 +746,6 @@ jobs:
val="${name_line#*:}"
val="$(echo "$val" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
[ -n "$val" ] && skin_header=$(sanitize_filename "$val")
else
skin_header=$(sanitize_filename "$skin")
fi
else
continue
@@ -700,7 +754,7 @@ jobs:
raw_path="$(printf "%s/%s" "$skin" "$skin_header" | sed 's/^ *//;s/ *$//')"
base_path=$(url_encode_path "$raw_path")
echo "## [$skin_header]($REGISTRY_URL/$USER_REPOSITORY/media/tag/$new_tag/export/${base_path}.osk)" >> "$README_PATH"
echo "## [$skin_header]($REGISTRY_URL/$USER_REPOSITORY/media/tag/$${{ steps.tag.outputs.new_tag }}/export/${base_path}.osk)" >> "$README_PATH"
echo "" >> "$README_PATH"
desc=$(get_desc "$skin")
@@ -718,74 +772,55 @@ jobs:
echo "" >> "$README_PATH"
done < order.txt
for dir in "$DANSER_SKINS_DIR"/*; do
[ -d "$dir" ] || continue
skin="$(basename "$dir")"
[[ -n "${ordered[$skin]}" ]] && continue
ini_file=$(find "$dir" -maxdepth 1 -iname "skin.ini" | head -n1 || true)
skin_header="$skin"
if [ -f "$ini_file" ]; then
name_line=$(grep -a -i -m1 'Name[[:space:]]*:' "$ini_file" || true)
if [ -n "$name_line" ]; then
val="${name_line#*:}"
val="$(echo "$val" | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')"
[ -n "$val" ] && skin_header=$(sanitize_filename "$val")
else
skin_header=$(sanitize_filename "$skin")
fi
else
continue
fi
raw_path="$(printf "%s/%s" "$skin" "$skin_header" | sed 's/^ *//;s/ *$//')"
base_path=$(url_encode_path "$raw_path")
echo "## [$skin_header]($REGISTRY_URL/$USER_REPOSITORY/media/tag/$new_tag/export/${base_path}.osk)" >> "$README_PATH"
echo "" >> "$README_PATH"
if [ -f "$ini_file" ]; then
author_line=$(grep -i '^[[:space:]]*Author:' "$ini_file" | head -n1 || true)
if [ -n "$author_line" ]; then
author=$(echo "$author_line" | cut -d ':' -f2- | sed 's/^[[:space:]]*//;s/[[:space:]]*$//')
[ -n "$author" ] && { echo "**Author:** $author" >> "$README_PATH"; echo "" >> "$README_PATH"; }
fi
fi
echo "[![$skin_header Thumbnail](media/thumbnail/${base_path}.webp)](/docs/${base_path}.md)" >> "$README_PATH"
echo "" >> "$README_PATH"
done
echo "# Build History" >> "$README_PATH"
echo "" >> "$README_PATH"
echo "| Version | Date |" >> "$README_PATH"
echo "| ------- | ---- |" >> "$README_PATH"
current_commit_date=$(TZ="Europe/Zurich" date -d "$(git log -1 --format=%cI)" "+%d.%m.%Y %H:%M:%S")
echo "| [\`$new_tag (Current)\`]($REGISTRY_URL/$USER_REPOSITORY/src/tag/$new_tag/README.md) | $current_commit_date |" >> "$README_PATH"
echo "| [\$${{ steps.tag.outputs.new_tag }} (Current)\]($REGISTRY_URL/$USER_REPOSITORY/src/tag/$${{ steps.tag.outputs.new_tag }}/README.md) | $current_commit_date |" >> "$README_PATH"
old_tags=$(git tag --sort=-v:refname | grep -v "^$new_tag$" | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' || true)
old_tags=$(git tag --sort=-v:refname | grep -v "^$${{ steps.tag.outputs.new_tag }}$" | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' || true)
if [ -n "$old_tags" ]; then
echo "$old_tags" | while read -r tag; do
tag_date=$(git log -1 --format=%ci "$tag")
formatted_date=$(TZ="Europe/Zurich" date -d "$tag_date" "+%d.%m.%Y %H:%M:%S")
echo "| [\`$tag\`]($REGISTRY_URL/$USER_REPOSITORY/src/tag/$tag/README.md) | $formatted_date |" >> "$README_PATH"
echo "| [\$tag\]($REGISTRY_URL/$USER_REPOSITORY/src/tag/$tag/README.md) | $formatted_date |" >> "$README_PATH"
done
fi
echo "README index generated successfully."
- name: Generate Per-Skin Pages
- name: Upload README
uses: actions/upload-artifact@v3
with:
name: updated-index-readme
path: /tmp/skins-docs
generate_per_skin_docs:
name: Generate Per-Skin Docs
runs-on: danser
needs: calculate_git_tag
if: (needs.detect_changed_skins.outputs.has_changes == 'true') || (github.event.inputs.soft_run == 'true')
container:
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.TOKEN }}
- name: Generate individual skin markdown pages
shell: bash
run: |
echo "Generating detailed per-skin markdown pages…"
echo "Generating per-skin docs…"
mkdir -p /tmp/skins-docs
export DOC_DIR="/tmp/skins-docs/"
sanitize_filename() {
echo "$1" | \
tr -d '\000-\037' | \
sed -e 's#[\\/:\*\?"<>|]#-#g' | \
sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
echo "$1" | tr -d '\000-\037' \
| sed -e 's#[\\/:\*\?"<>|]#-#g' -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
}
url_encode_path() {
@@ -801,7 +836,7 @@ jobs:
mkdir -p "$DOC_DIR"
for dir in "$DANSER_SKINS_DIR"/*; do
for dir in "$SKINS_DIR"/*; do
[ -d "$dir" ] || continue
skin=$(basename "$dir")
@@ -821,12 +856,11 @@ jobs:
raw_path="${skin}/${skin_header}"
base_path=$(url_encode_path "$raw_path")
osk_url="$REGISTRY_URL/$USER_REPOSITORY/media/tag/$new_tag/export/${base_path}.osk"
osk_url="$REGISTRY_URL/$USER_REPOSITORY/media/tag/$${{ steps.tag.outputs.new_tag }}/export/${base_path}.osk"
md_file_path="${DOC_DIR}/${raw_path}.md"
mkdir -p "$(dirname "$md_file_path")"
video_url="$REGISTRY_URL/$USER_REPOSITORY/raw/tag/$new_tag/media/gameplay/${base_path}.mp4"
video_url="$REGISTRY_URL/$USER_REPOSITORY/raw/tag/$${{ steps.tag.outputs.new_tag }}/media/gameplay/${base_path}.mp4"
author=""
if [ -f "$ini_file" ]; then
@@ -843,8 +877,7 @@ jobs:
[ -n "$author" ] && echo ""
echo "## Hitsounds"
echo "<video controls autoplay loop muted playsinline src=\"$video_url\" type=\"video/mp4\">"
echo "</video>"
echo "<video controls autoplay loop muted playsinline src=\"$video_url\" type=\"video/mp4\"></video>"
echo ""
echo "## Ranking Panel"
@@ -853,7 +886,6 @@ jobs:
echo "## Mod Icons"
echo "![](/media/icons/${base_path}-mod-icons.webp)"
echo ""
echo "## Build History"
echo ""
@@ -861,20 +893,19 @@ jobs:
echo "| ------- | ---- |"
current_commit_date=$(TZ="Europe/Zurich" date -d "$(git log -1 --format=%cI)" "+%d.%m.%Y %H:%M:%S")
echo "| [\`$new_tag (Current)\`]($REGISTRY_URL/$USER_REPOSITORY/src/tag/$new_tag/docs/${base_path}.md) | $current_commit_date |"
echo "| [\$${{ steps.tag.outputs.new_tag }} (Current)\]($REGISTRY_URL/$USER_REPOSITORY/src/tag/$${{ steps.tag.outputs.new_tag }}/docs/${base_path}.md) | $current_commit_date |"
old_tags=$(git tag --sort=-v:refname | grep -v "^$new_tag$" | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' || true)
old_tags=$(git tag --sort=-v:refname | grep -v "^$${{ steps.tag.outputs.new_tag }}$" | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' || true)
if [ -n "$old_tags" ]; then
echo "$old_tags" | while read -r tag; do
raw_osk_path="export/${skin}/${skin_header}.osk"
if git ls-tree -r --name-only "$tag" | grep -Fx -- "$raw_osk_path" >/dev/null; then
tag_date=$(git log -1 --format=%ci "$tag")
formatted_date=$(TZ="Europe/Zurich" date -d "$tag_date" "+%d.%m.%Y %H:%M:%S")
echo "| [\`$tag\`]($REGISTRY_URL/$USER_REPOSITORY/src/tag/$tag/docs/${base_path}.md) | $formatted_date |"
echo "| [\$tag\]($REGISTRY_URL/$USER_REPOSITORY/src/tag/$tag/docs/${base_path}.md) | $formatted_date |"
fi
done
fi
} > "$md_file_path"
echo " → Wrote $md_file_path"
@@ -882,10 +913,28 @@ jobs:
echo "Per-skin markdown pages complete."
- name: Cleanup Extra Files
- name: Upload README
uses: actions/upload-artifact@v3
with:
name: updated-per-skin-readme
path: /tmp/skins-docs
cleanup_extra_files:
name: Cleanup Extra Files
runs-on: danser
needs: detect_changed_skins
if: (needs.detect_changed_skins.outputs.has_changes == 'true') || (github.event.inputs.soft_run == 'true')
container:
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.TOKEN }}
- name: Cleanup redundant and obsolete files
shell: bash
run: |
set -euo pipefail
echo "[Cleanup Extra Files Started]"
[ -f how-to-use.md ] && rm -f how-to-use.md
@@ -941,7 +990,7 @@ jobs:
for skin in "${skins[@]}"; do
header=$(sanitize_filename "$skin")
ini=$(find "$DANSER_SKINS_DIR/$skin" -maxdepth 1 -type f -iname "skin.ini" -print -quit || true)
ini=$(find "$SKINS_DIR/$skin" -maxdepth 1 -type f -iname "skin.ini" -print -quit || true)
if [[ -f "$ini" ]]; then
raw=$(grep -i '^[[:space:]]*Name:' "$ini" | head -n1 || true)
raw="${raw#*:}"
@@ -960,6 +1009,48 @@ jobs:
echo "[Cleanup Extra Files Complete]"
git_commit_and_push:
name: Git Commit and Push
runs-on: danser
needs:
- cleanup_extra_files
- generate_per_skin_docs
- generate_readme_index
- create_osk_files
- convert_png_to_webp
- generate_mod_icons
- rename_assets_from_skin_ini
- generate_videos_and_screenshots
- prepare_assets
- detect_changed_skins
- calculate_git_tag
if: >-
always() && (
(needs.detect_changed_skins.outputs.has_changes == 'true') ||
(github.event.inputs.soft_run == 'true')
)
container:
image: ${{ vars.CONTAINER_REGISTRY }}/${{ env.IMAGE_NAME }}:latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
fetch-depth: 0
token: ${{ secrets.TOKEN }}
- name: Download README from generate_readme_index
uses: actions/download-artifact@v3
with:
name: updated-index-readme
path: .
- name: Download README from generate_per_skin_docs
uses: actions/download-artifact@v3
with:
name: updated-per-skin-readme
path: ./docs/
- name: Configure Git
shell: bash
run: |
@@ -984,8 +1075,8 @@ jobs:
run: |
if [ "${GITHUB_REF}" = "refs/heads/main" ]; then
git push origin HEAD:main || echo "No changes to push"
git tag "$new_tag"
git push origin "$new_tag"
git tag "$${{ steps.tag.outputs.new_tag }}"
git push origin "$${{ steps.tag.outputs.new_tag }}"
else
git push origin HEAD:"${GITHUB_REF_NAME}" || echo "No changes to push"
fi