Skip to content

Transifex

Transifex #5484

name: Transifex
on:
pull_request:
types: [ opened, synchronize, reopened ]
paths:
- '.tx/config'
- 'i18n/src/main/resources/**.properties'
workflow_run:
workflows: [ Build Bisq 2 ]
types: [ completed ]
jobs:
verify:
name: Verify Transifex configuration
if: github.event_name == 'pull_request' || (github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
uses: actions/checkout@v6
with:
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.event.workflow_run.head_sha }}
- name: Verify that .tx/config is in sync with the i18n folder
run: |
set -euo pipefail
# Extract source files from .tx/config, trimming whitespace and de-duplicating
config_files=$(grep -E '^[[:space:]]*source_file' .tx/config \
| sed 's/.*= *//' \
| sed 's|i18n/src/main/resources/||' \
| sort -u)
# Get actual source files from i18n directory (excluding locale-specific files)
i18n_files=$(find i18n/src/main/resources -maxdepth 1 -name "*.properties" \
| grep -v -E '(_[a-z]{2}(-[A-Z][a-z]+)?(([_-]([A-Z]{2}|[0-9]{3})))?|_pcm)\.properties$' \
| sed 's|i18n/src/main/resources/||' \
| sort -u)
if [ "$config_files" != "$i18n_files" ]; then
echo "::error::.tx/config is out of sync with i18n/src/main/resources/"
echo "Please run the following command and commit the changes:"
echo "./gradlew apps:desktop:i18n:updateTxConfig"
diff -u <(echo "$config_files") <(echo "$i18n_files")
exit 1
fi
echo ".tx/config is in sync with i18n/src/main/resources/"
calculate_and_push_sources:
name: Calculate pushes and push source files
if: github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success'
needs: verify
runs-on: ubuntu-latest
outputs:
t_matrix: ${{ steps.calculate-pushes.outputs.t_matrix || '{"include":[]}' }}
has_translation_changes: ${{ steps.calculate-pushes.outputs.has_translation_changes }}
steps:
- name: Checkout the repository
uses: actions/checkout@v6
with:
ref: ${{ github.event.workflow_run.head_sha }}
fetch-depth: 0
- name: Check if the commit is in the main branch
id: check_commit
run: |
git fetch origin main
if git merge-base --is-ancestor ${{ github.event.workflow_run.head_sha }} origin/main; then
echo "commit_in_main=true" >> $GITHUB_OUTPUT
else
echo "commit_in_main=false" >> $GITHUB_OUTPUT
fi
- name: "Check if TX_TOKEN secret exists"
if: steps.check_commit.outputs.commit_in_main == 'true'
env:
transifex_secret: ${{ secrets.TX_TOKEN }}
run: |
if [ -z "$transifex_secret" ]; then
echo "The secret \"TX_TOKEN\" has not been set; please go to \"settings > secrets and variables\" to create it"
exit 1
fi
- name: Check if batch script is available
if: steps.check_commit.outputs.commit_in_main == 'true'
id: check_batch_script
run: |
if [ -f scripts/generate_transifex_batches.py ]; then
echo "batch_script_available=true" >> $GITHUB_OUTPUT
echo "::notice::Batch generation script found - will use dynamic batching"
else
echo "batch_script_available=false" >> $GITHUB_OUTPUT
echo "::warning::Batch generation script not found in this commit - skipping translation batching"
echo "::warning::This is expected for commits before 2025-11-16"
fi
- name: Calculate push commands
id: calculate-pushes
if: steps.check_commit.outputs.commit_in_main == 'true'
run: |
set -euo pipefail
echo "Finding changed files..."
# We compare the merge commit with its first parent to get the list of files.
# Note: This is reliable for standard PR merges but may not handle octopus merges.
CHANGED_FILES=$(git diff --name-only ${{ github.event.workflow_run.head_sha }}~1 ${{ github.event.workflow_run.head_sha }})
echo "Files changed in the merge:"
echo "$CHANGED_FILES"
# --- Handle SOURCE file pushes (-s) ---
s_args=""
if echo "${CHANGED_FILES}" | grep -q -x ".tx/config"; then
echo "::notice:: .tx/config changed. Pushing all source files."
s_args="push -s"
else
s_resources="" # This will be the comma-separated list
CHANGED_SOURCE_FILES=$(echo "${CHANGED_FILES}" | grep -E '\.properties$' | grep -v -E '(_[a-z]{2}(-[A-Z][a-z]+)?(([_-]([A-Z]{2}|[0-9]{3})))?|_pcm)\.properties$' -- || true)
if [ -n "$CHANGED_SOURCE_FILES" ]; then
for file in $CHANGED_SOURCE_FILES; do
resource_slug_line=$(awk -v file="$file" '
$0 ~ /^\[o:.*:p:.*:r:.*\]$/ {resource=$0}
$0 ~ ("source_file[[:space:]]*=[[:space:]]*" file "$") {print resource}
' .tx/config)
if [[ -n "$resource_slug_line" ]] && [[ "$resource_slug_line" =~ ^\[o:.*:p:(.*):r:(.*)\]$ ]]; then
project_slug="${BASH_REMATCH[1]}"
resource_slug="${BASH_REMATCH[2]}"
full_resource_id="${project_slug}.${resource_slug}"
if [[ -z "$s_resources" ]]; then
s_resources="$full_resource_id"
else
s_resources+=",$full_resource_id"
fi
else
echo "::warning::Could not find resource slug for changed source file: $file"
fi
done
fi
# If we found any resources, construct the final args string
if [[ -n "$s_resources" ]]; then
s_args="push -s -r $s_resources"
fi
fi
echo "s_args=$s_args" >> $GITHUB_OUTPUT
# --- Handle TRANSLATION file pushes (-t) with BATCHING ---
# Collect all changed locales and their resources
declare -A lang_resources
CHANGED_TRANSLATION_FILES=$(echo "${CHANGED_FILES}" | grep -E 'i18n/src/main/resources/.*\.properties$' | grep -E '(_[a-z]{2}(-[A-Z][a-z]+)?(([_-]([A-Z]{2}|[0-9]{3})))?|_pcm)\.properties$' -- || true)
if [ -n "$CHANGED_TRANSLATION_FILES" ]; then
for file in $CHANGED_TRANSLATION_FILES; do
lang=$(echo "$file" | sed -E 's/.*_([a-z]{2}(-[A-Z][a-z]+)?(([_-]([A-Z]{2}|[0-9]{3})))?|pcm)\.properties$/\1/')
resource_slug_line=$(awk -v file="$file" -v lang="$lang" '
BEGIN{FS="="}
$0 ~ /^\[o:.*:p:.*:r:.*\]$/ {resource=$0}
$1 ~ /^[[:space:]]*file_filter/ {
filter_path=$2;
gsub(/^[[:space:]]+|[[:space:]]+$/, "", filter_path);
gsub(/<lang>/, lang, filter_path);
if (filter_path == file) {
print resource;
exit;
}
}
' .tx/config)
if [[ -n "$resource_slug_line" ]] && [[ "$resource_slug_line" =~ ^\[o:.*:p:(.*):r:(.*)\]$ ]]; then
project_slug="${BASH_REMATCH[1]}"
resource_slug="${BASH_REMATCH[2]}"
full_resource_id="${project_slug}.${resource_slug}"
if [[ ! -v lang_resources[$lang] ]]; then
lang_resources[$lang]="$full_resource_id"
else
lang_resources[$lang]+=",$full_resource_id"
fi
fi
done
# Build resources JSON for Python script
# Convert bash associative array to JSON format
resources_json="{"
first_item=true
for locale in "${!lang_resources[@]}"; do
if ! $first_item; then
resources_json+=","
fi
# Escape quotes in resource values
resources="${lang_resources[$locale]}"
resources_json+="\"$locale\":\"$resources\""
first_item=false
done
resources_json+="}"
# Collect changed locales for batch generation
changed_locales_list=""
for locale in "${!lang_resources[@]}"; do
changed_locales_list="${changed_locales_list:+$changed_locales_list,}$locale"
done
# Generate batched matrix configuration if we have locales
if [ -n "$changed_locales_list" ]; then
echo "has_translation_changes=true" >> $GITHUB_OUTPUT
echo "::notice::Changed locales: $changed_locales_list"
echo "::notice::Resources mapping: $resources_json"
# Check if batch script is available before using it
if [ "${{ steps.check_batch_script.outputs.batch_script_available }}" == "true" ]; then
# Call Python script with resources mapping for complete JSON generation
# This eliminates all bash JSON manipulation and deduplicates resources properly
batch_matrix=$(python3 scripts/generate_transifex_batches.py \
--locales "$changed_locales_list" \
--batch-size 4 \
--max-parallel 2 \
--json \
--resources-json "$resources_json")
echo "::notice::Generated batch configuration: $batch_matrix"
printf 't_matrix=%s\n' "$batch_matrix" >> "$GITHUB_OUTPUT"
else
# Skip batching for commits without the script (backward compatibility)
echo "::warning::Skipping translation batch push - batch script not available in this commit"
echo "::notice::Translation changes detected but batching is unavailable"
echo 't_matrix={"include":[]}' >> "$GITHUB_OUTPUT"
fi
else
echo "has_translation_changes=false" >> $GITHUB_OUTPUT
echo 't_matrix={"include":[]}' >> "$GITHUB_OUTPUT"
fi
else
echo "has_translation_changes=false" >> $GITHUB_OUTPUT
echo 't_matrix={"include":[]}' >> "$GITHUB_OUTPUT"
fi
- name: Push source files to Transifex
if: steps.calculate-pushes.outputs.s_args != ''
uses: transifex/cli-action@v2
with:
token: ${{ secrets.TX_TOKEN }}
args: ${{ steps.calculate-pushes.outputs.s_args }}
push_translations:
name: Push translations (${{ matrix.name }})
if: needs.calculate_and_push_sources.outputs.t_matrix != '{"include":[]}'
needs: calculate_and_push_sources
runs-on: ubuntu-latest
# PHASE 1: Dynamic batching with controlled parallelism
# Batches are generated dynamically based on changed locales only
# Uses scripts/generate_transifex_batches.py for optimal batch configuration
strategy:
matrix: ${{ fromJson(needs.calculate_and_push_sources.outputs.t_matrix) }}
max-parallel: 2 # Controlled parallelism to avoid Transifex API rate limits
fail-fast: false # Don't cancel all jobs if one fails
steps:
- name: Checkout the repository
uses: actions/checkout@v6
with:
ref: ${{ github.event.workflow_run.head_sha }}
# Install Transifex CLI for direct tx commands in retry/verification steps
- name: Install Transifex CLI
run: |
curl -o- https://raw.githubusercontent.com/transifex/cli/master/install.sh | bash
# Add to PATH for subsequent steps (takes effect in next step, not current)
echo "$PWD" >> $GITHUB_PATH
# Verify installation using full path
echo "::group::Transifex CLI Installation"
echo "Installed Transifex CLI version:"
./tx --version
if [ -f ./tx ]; then echo "CLI installed at: $(pwd)/tx"; else echo "CLI not found in current directory"; fi
echo "::endgroup::"
env:
TX_TOKEN: ${{ secrets.TX_TOKEN }}
# Validate matrix resources field
- name: Validate batch configuration
run: |
if [ -z "${{ matrix.resources }}" ]; then
echo "::error::No resources specified for batch ${{ matrix.id }} (locales: ${{ matrix.locales }})"
echo "::error::This indicates a bug in the batch generation script"
exit 1
fi
echo "::notice::Batch ${{ matrix.id }} validated: ${{ matrix.locales }} → ${{ matrix.resources }}"
# PHASE 1: Stagger batch starts to prevent API overload
- name: Stagger batch start
run: |
# Add delay based on batch ID to prevent simultaneous starts
DELAY=$(((${{ matrix.id }} - 1) * 10))
echo "::notice::Delaying batch ${{ matrix.id }} by ${DELAY}s for rate limiting"
sleep $DELAY
# PHASE 1: Retry logic with exponential backoff
- name: Push ${{ matrix.name }} translations with retry
uses: nick-fields/retry@v4
id: push_with_retry
env:
TX_TOKEN: ${{ secrets.TX_TOKEN }}
with:
timeout_minutes: 30 # Increased from 10 to 30 due to CLI performance issues (transifex/cli#212)
max_attempts: 3
retry_wait_seconds: 60 # Increased from 15 to allow API recovery time
# exponential_backoff: true # Removed - not supported by retry action v3
retry_on: error
command: |
set -euo pipefail
echo "::group::Uploading translations for batch: ${{ matrix.locales }}"
# Split batch locales and upload with inter-locale delay
IFS=',' read -ra LOCALES <<< "${{ matrix.locales }}"
UPLOADED_COUNT=0
for locale in "${LOCALES[@]}"; do
echo "::group::Uploading locale: $locale"
echo "Batch: ${{ matrix.name }} | Locale: $locale | Resource: ${{ matrix.resources }}"
echo "Start time: $(date -u +"%Y-%m-%d %H:%M:%S UTC")"
# Rate limiting: 5s delay between locales within batch
if [ $UPLOADED_COUNT -gt 0 ]; then
echo "Rate limiting: sleeping 5s between locales"
sleep 5
fi
# DEBUG: Show diagnostics before tx push
echo "::group::DEBUG Diagnostics"
echo "TX CLI version:"
./tx --version || tx --version
# Extract first resource from comma-separated list for diagnostic check
# Example: "bisq-2.walletproperties,bisq-2.accountproperties" -> "bisq-2.walletproperties"
FIRST_RESOURCE=$(echo "${{ matrix.resources }}" | cut -d',' -f1)
# Derive base name: "bisq-2.walletproperties" -> "wallet"
RESOURCE_BASE=$(echo "$FIRST_RESOURCE" | sed 's/^bisq-2\.//' | sed 's/properties$//')
echo "Resources from matrix: ${{ matrix.resources }}"
echo "Using first resource for diagnostics: ${FIRST_RESOURCE}"
echo "Derived resource base: ${RESOURCE_BASE}"
echo "Translation file check:"
if ls -lh "i18n/src/main/resources/${RESOURCE_BASE}_${locale}.properties" 2>/dev/null; then
echo "✓ Translation file exists"
else
echo "⚠ Translation file not found (may be expected for some locales)"
fi
echo "TX config check:"
# Extract resource slug from "bisq-2.applicationproperties" -> "applicationproperties"
RESOURCE_SLUG=$(echo "$FIRST_RESOURCE" | sed 's/^bisq-2\.//')
if grep -q "r:${RESOURCE_SLUG}]" .tx/config; then
echo "✓ Resource '${RESOURCE_SLUG}' found in .tx/config"
else
echo "⚠ Resource '${RESOURCE_SLUG}' not found in .tx/config"
fi
echo "::endgroup::"
# Real-time output instead of buffering (to see where it hangs)
START_TIME=$(date +%s)
EXIT_CODE=0
echo "Executing: tx push -t -l \"$locale\" -r \"${{ matrix.resources }}\""
echo "::notice::Watching TX CLI output in real-time (buffering disabled for debugging)"
# Run tx push with real-time output and capture to temp file
TX_LOG="/tmp/tx_${locale}_$$.log"
./tx push -t -l "$locale" -r "${{ matrix.resources }}" 2>&1 | tee "$TX_LOG"
EXIT_CODE=${PIPESTATUS[0]}
OUTPUT=$(cat "$TX_LOG" 2>/dev/null || echo "No output captured")
rm -f "$TX_LOG" # Clean up temp file
END_TIME=$(date +%s)
DURATION=$((END_TIME - START_TIME))
echo "End time: $(date -u +"%Y-%m-%d %H:%M:%S UTC")"
echo "Duration: ${DURATION}s"
echo "Exit code: $EXIT_CODE"
if [ ${EXIT_CODE:-0} -ne 0 ]; then
echo "::error::Upload failed for $locale after ${DURATION}s"
echo "Full output:"
echo "$OUTPUT"
# Distinguish permanent vs transient errors
if echo "$OUTPUT" | grep -qi "unauthorized\|forbidden\|not found\|invalid"; then
echo "::error::Permanent error detected - no retry"
echo "::endgroup::"
exit 1 # Don't retry permanent errors
elif echo "$OUTPUT" | grep -qi "timeout\|rate limit\|too many requests\|connection\|network"; then
echo "::warning::Transient error detected - will retry"
echo "::endgroup::"
exit 2 # Retry transient errors
else
echo "::warning::Unknown error type - will retry once"
echo "::endgroup::"
exit 2
fi
fi
if ! echo "$OUTPUT" | grep -q " - Uploading file"; then
echo "::error::tx push exited 0 but uploaded no files for locale '$locale'."
echo "::error::Likely locale mismatch (e.g., pt-BR vs pt_BR) or missing local files."
echo "Full output:"
echo "$OUTPUT"
echo "::endgroup::"
exit 1
fi
# Success - show abbreviated output
echo "✅ Upload successful (${DURATION}s)"
echo "TX CLI output:"
echo "$OUTPUT" | tail -5
echo "::endgroup::"
UPLOADED_COUNT=$((UPLOADED_COUNT + 1))
done
echo "::endgroup::"
echo "::notice::Batch ${{ matrix.name }} complete: ${UPLOADED_COUNT} locales uploaded"
# Upload confirmation step
# Note: tx push with exit code 0 already confirms Transifex accepted the files.
# No additional verification is needed - the upload step's success is sufficient.
- name: Confirm upload succeeded
id: verify
if: steps.push_with_retry.outcome == 'success'
run: |
echo "✅ Upload confirmed successful for batch: ${{ matrix.locales }}"
echo "::notice::Transifex accepted the translation files (tx push exit code 0)"
# Report metrics for monitoring
- name: Report upload metrics
if: always()
run: |
echo "## 🌍 Batch ${{ matrix.name }} Upload Report" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Batch ID**: ${{ matrix.id }}" >> $GITHUB_STEP_SUMMARY
echo "- **Locales**: ${{ matrix.locales }}" >> $GITHUB_STEP_SUMMARY
echo "- **Status**: ${{ steps.push_with_retry.outcome }}" >> $GITHUB_STEP_SUMMARY
echo "- **Verification**: ${{ steps.verify.outcome || 'skipped' }}" >> $GITHUB_STEP_SUMMARY
# Summary job to aggregate results
summary:
name: Upload Summary Report
needs: [calculate_and_push_sources, push_translations]
if: always() && needs.calculate_and_push_sources.outputs.has_translation_changes == 'true'
runs-on: ubuntu-latest
steps:
- name: Generate summary
run: |
echo "# 🌍 Translation Upload Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Note: In GitHub Actions, we can't easily aggregate matrix job results
# This is a placeholder for future monitoring enhancements
echo "Translation upload workflow completed." >> $GITHUB_STEP_SUMMARY
echo "Check individual batch job logs for detailed status." >> $GITHUB_STEP_SUMMARY