Skip to content

refactor(component-editor): replace derived-param confirmation dialog… #201

refactor(component-editor): replace derived-param confirmation dialog…

refactor(component-editor): replace derived-param confirmation dialog… #201

name: AI GUI translation
on:
push:
branches:
- master
paths:
- '**/*.py'
- '!tests/**'
workflow_dispatch:
# prevent race conditions
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
extract_strings:
permissions:
contents: write # for creating branches and commits
pull-requests: write # for creating PRs
runs-on: ubuntu-latest
outputs:
po-files-changed: ${{ steps.check-changes.outputs.po-files-changed }}
translations-to-process: ${{ steps.prepare-translations.outputs.translations-to-process }}
translation-matrix: ${{ steps.prepare-translations.outputs.translation-matrix }}
env:
PYGETTEXT_DOMAIN: ardupilot_methodic_configurator
PYGETTEXT_LOCALEDIR: ardupilot_methodic_configurator/locale
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: '3.x'
cache: 'pip'
cache-dependency-path: 'pyproject.toml'
- name: Cache apt packages
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: |
/var/cache/apt/archives/*.deb
/var/lib/apt/lists/*
key: ${{ runner.os }}-apt-gettext-ubuntu2204
restore-keys: |
${{ runner.os }}-apt-gettext-
${{ runner.os }}-apt-
- name: Install apt gettext package
run: |
sudo apt-get update
sudo apt-get install -y gettext=0.21-14ubuntu2
- name: Install python-gettext requirement
id: install_deps
continue-on-error: true
run: |
WARNINGS=0
export PIP_VERSION=$(grep -oP 'pip\s*==\s*\K[0-9]+(\.[0-9]+)*' pyproject.toml | head -1 || echo '')
export PYTHON_GETTEXT_VERSION=$(grep -oP 'python-gettext\s*==\s*\K[0-9]+(\.[0-9]+)*' pyproject.toml | head -1 || echo '')
if [ -z "$PIP_VERSION" ]; then
echo "::warning::Could not detect pip version in pyproject.toml; falling back to latest."
PIP_INSTALL="pip"
WARNINGS=1
else
echo "Will install pip version $PIP_VERSION."
PIP_INSTALL="pip==$PIP_VERSION"
fi
if [ -z "$PYTHON_GETTEXT_VERSION" ]; then
echo "::warning::Could not detect python-gettext version in pyproject.toml; falling back to 5.0."
PYTHON_GETTEXT_INSTALL="python-gettext==5.0"
WARNINGS=1
else
echo "Will install python-gettext version $PYTHON_GETTEXT_VERSION."
PYTHON_GETTEXT_INSTALL="python-gettext==$PYTHON_GETTEXT_VERSION"
fi
python -m pip install "$PIP_INSTALL" "$PYTHON_GETTEXT_INSTALL"
echo "warnings=$WARNINGS" >> $GITHUB_OUTPUT
if [ "$WARNINGS" -eq 1 ]; then
exit 1
fi
- name: Extract strings
run: |
python create_pot_file.py
- name: Stage changes and check for updates
id: check-changes
run: |
git add $PYGETTEXT_LOCALEDIR/$PYGETTEXT_DOMAIN.pot
if [ -n "$(git status --porcelain)" ]; then
CHANGED_LINES=$(git diff --staged | grep -E "^[\+\-]" | wc -l)
if [ $CHANGED_LINES -gt 4 ]; then
python merge_pot_file.py
# Check if any .po files were modified
git add $PYGETTEXT_LOCALEDIR/**/$PYGETTEXT_DOMAIN.po
PO_CHANGES=$(git status --porcelain | grep -E "\.po$" | wc -l)
if [ $PO_CHANGES -gt 0 ]; then
echo "po-files-changed=true" >> $GITHUB_OUTPUT
echo "✅ PO files have been updated with new strings"
else
echo "po-files-changed=false" >> $GITHUB_OUTPUT
echo "No PO file changes detected"
fi
else
echo "po-files-changed=false" >> $GITHUB_OUTPUT
echo "Not enough changes to commit (only $CHANGED_LINES lines changed)"
fi
else
echo "po-files-changed=false" >> $GITHUB_OUTPUT
echo "No changes to commit"
fi
- name: Prepare translation matrix
id: prepare-translations
if: steps.check-changes.outputs.po-files-changed == 'true'
run: |
python extract_missing_translations.py --lang-code all --max-translations 25 --max-characters 4500
# Check if any missing translation files were created
if ls missing_translations_*.txt 1> /dev/null 2>&1; then
echo "translations-to-process=true" >> $GITHUB_OUTPUT
echo "✅ Found missing translation files to process with AI"
# Create matrix configuration for all translation files
matrix_entries="["
first_entry=true
for file in missing_translations_*.txt; do
if [ -f "$file" ]; then
# Extract language code and file number from filename
base_name=$(basename "$file" .txt)
if [[ "$base_name" =~ missing_translations_([a-zA-Z_]+)(_[0-9]+)?$ ]]; then
lang_code="${BASH_REMATCH[1]}"
file_suffix="${BASH_REMATCH[2]:-}"
# Define language name for better context
case $lang_code in
"pt") language="Portuguese (Portugal)";;
"de") language="German";;
"it") language="Italian";;
"ja") language="Japanese";;
"zh_CN") language="Chinese (Simplified)";;
*) language="$lang_code";;
esac
if [ "$first_entry" = true ]; then
first_entry=false
else
matrix_entries+=","
fi
matrix_entries+="{\"lang_code\":\"$lang_code\",\"language\":\"$language\",\"file\":\"$file\",\"suffix\":\"$file_suffix\"}"
fi
fi
done
matrix_entries+="]"
echo "translation-matrix=$matrix_entries" >> $GITHUB_OUTPUT
echo "Matrix configuration: $matrix_entries"
else
echo "translations-to-process=false" >> $GITHUB_OUTPUT
echo "translation-matrix=[]" >> $GITHUB_OUTPUT
echo "No missing translations found"
fi
- name: Upload translation files as artifacts
if: steps.prepare-translations.outputs.translations-to-process == 'true'
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: translation-files
path: |
missing_translations_*.txt
retention-days: 1
# Matrix job to process translations in parallel for all languages and numbered files
# This approach scales to handle large translation batches by:
# 1. Splitting translations into chunks of 50 strings per file (configurable in extract_missing_translations.py)
# 2. Processing each chunk in parallel using GitHub Actions matrix strategy
# 3. Using consistent terminology guidelines across all chunks for the same language
# 4. Properly escaping YAML content to avoid parsing issues with colons in translation strings
ai_translate:
needs: extract_strings
if: needs.extract_strings.outputs.translations-to-process == 'true'
permissions:
actions: read # needed for downloading artifacts
contents: read # required by checkout
models: read # for AI inference
runs-on: ubuntu-latest
strategy:
matrix:
include: ${{ fromJson(needs.extract_strings.outputs.translation-matrix) }}
fail-fast: false # Continue processing other languages even if one fails
max-parallel: 1 # Limit concurrent AI requests to avoid rate limiting
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Download translation files
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
with:
name: translation-files
- name: Validate translation file exists
run: |
translation_file="${{ matrix.file }}"
echo "🔍 Validating translation file: $translation_file"
if [ ! -f "$translation_file" ]; then
echo "❌ Error: Translation file '$translation_file' not found"
exit 1
fi
file_size=$(wc -c < "$translation_file")
echo "📊 Translation file size: $file_size bytes"
echo "✅ Translation file validation completed successfully"
- name: Prepare translation content
id: translation_content
run: |
# Read file and indent each line by 2 spaces (except the first)
awk 'NR==1 {print} NR>1 {print " " $0}' "${{ matrix.file }}" > indented_content.txt
echo "content<<EOF" >> $GITHUB_OUTPUT
cat indented_content.txt >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
- name: Run AI translation
id: ai_translate
uses: actions/ai-inference@e09e65981758de8b2fdab13c2bfb7c7d5493b0b6 # v2.0.7
with:
system-prompt-file: .github/prompts/ai-translation-system.prompt.yml
prompt-file: .github/prompts/ai-translation-user.prompt.yml
input: |
language: ${{ matrix.language }}
lang_code: ${{ matrix.lang_code }}
translation_file: ${{ matrix.file }}
translation_content: |
${{ steps.translation_content.outputs.content }}
model: openai/gpt-4.1
max-tokens: 8000
token: ${{ secrets.AMC_COPILOT_TOKEN_CLASSIC }}
continue-on-error: true
- name: Check AI translation success
id: check_translation
run: |
# Check if the AI translation step succeeded
if [ "${{ steps.ai_translate.outcome }}" != "success" ]; then
echo "❌ AI translation step failed for ${{ matrix.file }}"
echo "Step outcome: ${{ steps.ai_translate.outcome }}"
echo "Step conclusion: ${{ steps.ai_translate.conclusion }}"
echo "Language: ${{ matrix.language }} (${{ matrix.lang_code }})"
echo "File: ${{ matrix.file }}"
echo "Suffix: ${{ matrix.suffix }}"
# Check if prompt file exists for debugging
prompt_file="translate_${{ matrix.lang_code }}${{ matrix.suffix }}.prompt.yml"
if [ -f "$prompt_file" ]; then
echo "✅ Prompt file exists and is $(wc -c < "$prompt_file") bytes"
else
echo "❌ Prompt file missing: $prompt_file"
fi
echo "translation_successful=false" >> $GITHUB_OUTPUT
exit 0 # Don't fail the workflow, just mark as unsuccessful
fi
# Debug: Show available outputs
echo "🔍 Available AI action outputs:"
echo "Response file output: '${{ steps.ai_translate.outputs.response-file }}'"
echo "Response output exists: ${{ steps.ai_translate.outputs.response != '' }}"
# Check if we have any usable output
if [ -n "${{ steps.ai_translate.outputs.response-file }}" ] && [ -f "${{ steps.ai_translate.outputs.response-file }}" ]; then
echo "✅ Response file found: ${{ steps.ai_translate.outputs.response-file }}"
echo "translation_successful=true" >> $GITHUB_OUTPUT
echo "output_method=file" >> $GITHUB_OUTPUT
elif [ -n "${{ steps.ai_translate.outputs.response }}" ]; then
response_length=$(echo "${{ steps.ai_translate.outputs.response }}" | wc -c)
echo "✅ Response content found (length: ${response_length})"
echo "translation_successful=true" >> $GITHUB_OUTPUT
echo "output_method=content" >> $GITHUB_OUTPUT
else
echo "❌ No usable AI response found for ${{ matrix.file }}"
echo "Available outputs:"
echo " - response-file: '${{ steps.ai_translate.outputs.response-file }}'"
echo " - response: '${{ steps.ai_translate.outputs.response }}'"
echo "translation_successful=false" >> $GITHUB_OUTPUT
fi
- name: Save translation result
if: steps.check_translation.outputs.translation_successful == 'true'
run: |
# Save the AI response back to the original translation file
if [ "${{ steps.check_translation.outputs.output_method }}" = "file" ]; then
echo "📄 Using response file: ${{ steps.ai_translate.outputs.response-file }}"
cp "${{ steps.ai_translate.outputs.response-file }}" "${{ matrix.file }}.raw"
elif [ "${{ steps.check_translation.outputs.output_method }}" = "content" ]; then
echo "📝 Using response content"
echo "${{ steps.ai_translate.outputs.response }}" > "${{ matrix.file }}.raw"
else
echo "❌ Unexpected output method: ${{ steps.check_translation.outputs.output_method }}"
exit 1
fi
# Clean up AI output: keep only lines starting with number followed by colon
echo "🧹 Cleaning AI output..."
grep -E '^[0-9]+:' "${{ matrix.file }}.raw" > "${{ matrix.file }}" || {
echo "❌ Failed to extract valid translations from AI output"
echo "Raw output preview:"
head -20 "${{ matrix.file }}.raw"
exit 1
}
# Validate the cleaned file
if [ -f "${{ matrix.file }}" ] && [ -s "${{ matrix.file }}" ]; then
echo "✅ AI translation saved and cleaned successfully for ${{ matrix.language }}"
echo "📊 Raw file size: $(wc -c < "${{ matrix.file }}.raw") bytes"
echo "📊 Cleaned file size: $(wc -c < "${{ matrix.file }}") bytes"
echo "📊 Valid translation lines: $(wc -l < "${{ matrix.file }}") lines"
else
echo "❌ Translation file is empty or missing after cleanup: ${{ matrix.file }}"
exit 1
fi
- name: Handle translation failure
if: steps.check_translation.outputs.translation_successful != 'true'
run: |
echo "⚠️ AI translation failed for ${{ matrix.language }} (${{ matrix.file }})"
echo "Language: ${{ matrix.language }} (code: ${{ matrix.lang_code }})"
echo "File chunk: ${{ matrix.file }}${{ matrix.suffix }}"
echo "Creating placeholder file to avoid breaking the workflow"
touch "${{ matrix.file }}"
echo "# Translation failed for ${{ matrix.language }} (chunk${{ matrix.suffix }})" > "${{ matrix.file }}"
echo "# File: ${{ matrix.file }}" >> "${{ matrix.file }}"
echo "# Please translate manually or retry the workflow" >> "${{ matrix.file }}"
echo "# This was part of a chunked translation batch for scalability" >> "${{ matrix.file }}"
- name: Upload translated file
if: always() # Upload even if translation failed, for debugging
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: translated-${{ matrix.lang_code }}${{ matrix.suffix }}
path: ${{ matrix.file }}
retention-days: 1
- name: Report translation status
if: always()
run: |
if [ "${{ steps.check_translation.outputs.translation_successful }}" = "true" ]; then
echo "✅ Successfully translated ${{ matrix.file }} for ${{ matrix.language }}"
echo "📊 Chunk: ${{ matrix.suffix || 'single file' }}"
else
echo "❌ Failed to translate ${{ matrix.file }} for ${{ matrix.language }}"
echo "📊 Chunk: ${{ matrix.suffix || 'single file' }}"
echo "::warning::AI translation failed for ${{ matrix.language }}${{ matrix.suffix }}. Manual translation may be required."
fi
# Job to collect all translations and create the final PR
finalize_translations:
needs: [extract_strings, ai_translate]
if: needs.extract_strings.outputs.po-files-changed == 'true'
permissions:
actions: read # needed for downloading translated artifacts
contents: write # for creating branches and commits
pull-requests: write # for creating PRs
runs-on: ubuntu-latest
env:
PYGETTEXT_DOMAIN: ardupilot_methodic_configurator
PYGETTEXT_LOCALEDIR: ardupilot_methodic_configurator/locale
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@58077d3c7e43986b6b15fba718e8ea69e387dfcc # v2.15.1
with:
egress-policy: audit
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
with:
python-version: '3.x'
cache: 'pip'
cache-dependency-path: 'pyproject.toml'
- name: Cache apt packages
uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3
with:
path: |
/var/cache/apt/archives/*.deb
/var/lib/apt/lists/*
key: ${{ runner.os }}-apt-gettext-ubuntu2204
restore-keys: |
${{ runner.os }}-apt-gettext-
${{ runner.os }}-apt-
- name: Install apt gettext package
run: |
sudo apt-get update
sudo apt-get install -y gettext=0.21-14ubuntu2
- name: Install python-gettext requirement
continue-on-error: true
run: |
export PIP_VERSION=$(grep -oP 'pip\s*==\s*\K[0-9]+(\.[0-9]+)*' pyproject.toml | head -1 || echo '')
export PYTHON_GETTEXT_VERSION=$(grep -oP 'python-gettext\s*==\s*\K[0-9]+(\.[0-9]+)*' pyproject.toml | head -1 || echo '')
if [ -z "$PIP_VERSION" ]; then
echo "::warning::Could not detect pip version in pyproject.toml; falling back to latest."
PIP_INSTALL="pip"
else
echo "Will install pip version $PIP_VERSION."
PIP_INSTALL="pip==$PIP_VERSION"
fi
if [ -z "$PYTHON_GETTEXT_VERSION" ]; then
echo "::warning::Could not detect python-gettext version in pyproject.toml; falling back to 5.0."
PYTHON_GETTEXT_INSTALL="python-gettext==5.0"
else
echo "Will install python-gettext version $PYTHON_GETTEXT_VERSION."
PYTHON_GETTEXT_INSTALL="python-gettext==$PYTHON_GETTEXT_VERSION"
fi
python -m pip install "$PIP_INSTALL" "$PYTHON_GETTEXT_INSTALL"
- name: Extract and merge strings into .po files
run: |
# Re-run the extraction and merge steps to ensure .po files have the new msgid entries
# This is necessary because the extract_strings job's changes are not persisted
python create_pot_file.py
python merge_pot_file.py
echo "✅ POT extracted and merged into .po files"
- name: Configure git for commits
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
- name: Commit .pot and .po string merges
run: |
git add $PYGETTEXT_LOCALEDIR/$PYGETTEXT_DOMAIN.pot
git add $PYGETTEXT_LOCALEDIR/**/$PYGETTEXT_DOMAIN.po
if git diff --staged --quiet; then
echo "ℹ️ No .pot or .po changes to commit"
else
git commit -m "chore(translations): merge new un-translated string(s) to existing .po files"
echo "✅ Committed .pot and .po file changes"
fi
- name: Download all translated files
if: needs.extract_strings.outputs.translations-to-process == 'true'
uses: actions/download-artifact@70fc10c6e5e1ce46ad2ea6f2b72d43f7d47b13c3 # v8.0.0
with:
pattern: translated-*
merge-multiple: true
- name: Insert AI translations into .po files
if: needs.extract_strings.outputs.translations-to-process == 'true'
shell: bash # Don't use -e flag to prevent premature exit
run: |
set -x # Enable command tracing for debugging
# Check if we have any translated files
if ls missing_translations_*.txt 1> /dev/null 2>&1; then
echo "📥 Processing AI translations..."
# Check for failed translations (files with failure markers)
failed_translations=0
successful_translations=0
for file in missing_translations_*.txt; do
if [ -f "$file" ]; then
if grep -q "# Translation failed" "$file" 2>/dev/null; then
echo "⚠️ Found failed translation: $file"
failed_translations=$((failed_translations + 1))
# Remove failed translation files so they don't get processed
rm "$file"
else
echo "✅ Found successful translation: $file"
successful_translations=$((successful_translations + 1))
fi
fi
done
echo "📊 Translation summary:"
echo " - Successful: $successful_translations"
echo " - Failed: $failed_translations"
if [ $successful_translations -gt 0 ]; then
echo "🔄 Processing successful translations with insert_missing_translations.py"
# Show files that will be processed
echo "Files to process:"
ls -lh missing_translations_*.txt
# Validate file format before processing
echo "Validating translation file format..."
for file in missing_translations_*.txt; do
echo "Checking $file:"
if grep -qE '^[0-9]+:' "$file"; then
echo "✅ File format is valid"
else
echo "❌ ERROR: File $file does not contain valid translation lines (format: number:text)"
echo "File contents:"
cat "$file"
exit 1
fi
# Show file preview
echo "First 5 lines of $file:"
head -5 "$file"
echo "---"
done
# Run with full error output captured
echo "Running insert_missing_translations.py..."
if python insert_missing_translations.py 2>&1; then
echo "✅ AI translations inserted into .po files"
else
exit_code=$?
echo "❌ insert_missing_translations.py failed with exit code $exit_code"
exit $exit_code
fi
else
echo "⚠️ No successful translations to process"
fi
if [ $failed_translations -gt 0 ]; then
echo "::warning::$failed_translations translation(s) failed and will need manual translation"
fi
else
echo "ℹ️ No AI translations to process"
fi
- name: Compile .mo files
run: |
if ! python create_mo_files.py; then
echo "❌ Failed to compile .mo files"
exit 1
fi
echo "✅ .mo files compiled successfully"
- name: Stage AI translations and compiled .mo files
run: |
git add $PYGETTEXT_LOCALEDIR/**/$PYGETTEXT_DOMAIN.po
git add $PYGETTEXT_LOCALEDIR/**/$PYGETTEXT_DOMAIN.mo
- name: Prepare PR summary
id: pr_summary
run: |
# Count successful and failed translations from job artifacts metadata
total_files=0
successful_files=0
failed_files=0
# Count translation files that were processed
if ls missing_translations_*.txt 1> /dev/null 2>&1; then
total_files=$(ls missing_translations_*.txt 2>/dev/null | wc -l)
successful_files=$total_files # Since failed ones were removed earlier
fi
# Calculate failed files based on matrix jobs (this is approximate)
# In a real scenario, you'd want to pass this info through job outputs
echo "translation_summary<<EOF" >> $GITHUB_OUTPUT
echo "## 📊 Translation Processing Summary" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
echo "- **Total translation files processed**: $total_files" >> $GITHUB_OUTPUT
echo "- **Successfully translated**: $successful_files" >> $GITHUB_OUTPUT
echo "- **Failed translations**: $failed_files" >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
if [ $failed_files -gt 0 ]; then
echo "⚠️ **Note**: Some translations failed and will need manual review." >> $GITHUB_OUTPUT
echo "" >> $GITHUB_OUTPUT
fi
echo "EOF" >> $GITHUB_OUTPUT
- name: Clean up temporary translation files
if: always()
run: |
# Remove temporary translation files to ensure they're not committed
rm -f missing_translations_*.txt
rm -f missing_translations_*.txt.raw
echo "🧹 Cleaned up temporary translation files"
- name: Create Pull Request
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
with:
labels: i18n, automated-pr
token: ${{ secrets.GITHUB_TOKEN }}
branch: merge-i18n-po-strings
title: "Merge new un-translated string(s) to existing .po files with AI translations"
commit-message: "chore(translations): apply AI translations and compile .mo files"
body: |
Update .pot file with new un-translated string(s) from the source code
Merge .pot file strings into existing .po files
🤖 **AI-Powered Translation Applied with Enhanced Matrix Processing**:
- Automatically extracted missing translations using `extract_missing_translations.py`
- Applied AI-powered translations using GitHub Models (GPT-4o) for multiple languages
- Supports processing unlimited translations per language with automatic chunking
- Inserted translated strings into .po files using `insert_missing_translations.py`
- Compiled binary .mo files for immediate use
${{ steps.pr_summary.outputs.translation_summary }}
**Languages processed**: Portuguese (pt), German (de), Italian (it), Japanese (ja), Chinese Simplified (zh_CN)
**Enhanced Matrix Processing & Scaling**:
- ✅ **Automatic chunking** when >50 strings per language (configurable)
- ✅ **Robust error handling** for failed AI translation requests with detailed debugging
- ✅ **File validation** before and after AI processing
- ✅ **Consistent terminology** guidelines applied across all chunks for each language
- Robust error handling for failed AI translation requests
**Translation Guidelines Applied**:
- Technical aviation/drone context preservation
- Formal register for technical documentation
- Language-specific conventions (e.g., European Portuguese, formal German)
- Consistent terminology maintenance across chunks
- Placeholder preservation ({variable_name} patterns)
**Quality Assurance**:
- Validation of AI action outputs before processing
- Graceful handling of AI service failures
- File size and content validation after translation
- Comprehensive error reporting and debugging information
Please review the AI-generated translations for accuracy and cultural appropriateness before merging.
delete-branch: true