diff --git a/.github/workflows/check_for_crowdin_updates.yml b/.github/workflows/check_for_crowdin_updates.yml
index 25a8877..8603ff4 100644
--- a/.github/workflows/check_for_crowdin_updates.yml
+++ b/.github/workflows/check_for_crowdin_updates.yml
@@ -1,12 +1,31 @@
name: Check for Crowdin Updates
+# Not sure why yet, but uploading artefacts after creating the pull requests
+# seems to only include a part of what should be in.
+# As a dirty fix, we upload the artefacts first, and then make the pull request
+
on:
schedule:
- cron: '0 0 * * 1' # Every Monday at 12:00 AM UTC, which is 10:00 AM Melbourne time (AEST)
workflow_dispatch:
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+env:
+ PR_TITLE: "[Automated] Update translations from Crowdin"
+ PR_DESCRIPTION: |
+ [Automated]
+ This PR includes the latest translations from Crowdin
+
+ Session uses the community-driven translation platform Crowdin for localization, anyone can contribute at https://getsession.org/translate
+ PR_TARGET_BRANCH: feature/update-crowdin-translations
+
jobs:
- fetch_convert_and_diff_translations:
+
+ fetch_translations:
+ name: Download translations from crowdin
runs-on: ubuntu-latest
steps:
- name: Checkout Repo Content
@@ -14,41 +33,13 @@ jobs:
with:
path: 'scripts'
# don't provide a branch (ref) so it uses the default for that event
- - name: Checkout Android
- uses: actions/checkout@v4
- with:
- repository: 'oxen-io/session-android'
- path: 'android'
- submodules: recursive
- ref: 'release/1.20.0'
- - name: Checkout Desktop
- uses: actions/checkout@v4
- with:
- repository: 'oxen-io/session-desktop'
- path: 'desktop'
- ref: 'standardised_strings_qa_2'
- - name: Checkout iOS
- uses: actions/checkout@v4
- with:
- repository: 'oxen-io/session-ios'
- path: 'ios'
- ref: 'dev'
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: 3.8
cache: 'pip' # caching pip dependencies
- - name: Setup Java
- uses: actions/setup-java@v4
- with:
- distribution: 'temurin'
- java-version: 17
- cache: gradle
-
- - name: Setup Gradle
- uses: gradle/actions/setup-gradle@v4
-
- name: Install Dependencies
+ shell: bash
run: |
pip install -r ${{ github.workspace }}/scripts/crowdin/requirements.txt
- name: Download Translations
@@ -62,77 +53,86 @@ jobs:
--glossary_id 407522 \
--concept_id 36 \
--skip-untranslated-strings
-
- - name: Upload xliff artefacts
+ - name: Upload download artefacts
uses: actions/upload-artifact@v4
with:
- name: session-xliff-artefact
- path: "{{ github.workspace }}/raw_translations/*"
+ name: session-download
+ path: |
+ ${{ github.workspace }}/raw_translations/*.xliff
+ ${{ github.workspace }}/raw_translations/_non_translatable_strings.json
+ ${{ github.workspace }}/raw_translations/_project_info.json
overwrite: true
if-no-files-found: warn
retention-days: 7
- - name: Prepare Android Strings
- run: |
- python "${{ github.workspace }}/scripts/crowdin/generate_android_strings.py" \
- "${{ github.workspace }}/raw_translations" \
- "${{ github.workspace }}/android/libsession/src/main/res" \
- "${{ github.workspace }}/android/libsession/src/main/java/org/session/libsession/utilities/NonTranslatableStringConstants.kt"
-
-
+ build_ios:
+ name: Build iOS strings
+ runs-on: ubuntu-latest
+ needs: [fetch_translations]
+ steps:
+ - name: Checkout Repo Content
+ uses: actions/checkout@v4
+ with:
+ path: 'scripts'
+ # don't provide a branch (ref) so it uses the default for that event
+ - name: Checkout iOS
+ uses: ./scripts/actions/checkout_ios
- - name: Print Android Strings
- run: |
- ls -l "${{ github.workspace }}/android/libsession/src/main/res/"
+ - name: Setup shared
+ uses: ./scripts/actions/setup_shared
- - name: Prepare Desktop Strings
- run: |
- rm -rf ${{ github.workspace }}/desktop/_locales/*
- python "${{ github.workspace }}/scripts/crowdin/generate_desktop_strings.py" \
- "${{ github.workspace }}/raw_translations" \
- "${{ github.workspace }}/desktop/_locales" \
- "${{ github.workspace }}/desktop/ts/localization/constants.ts"
- name: Prepare iOS Strings
run: |
python "${{ github.workspace }}/scripts/crowdin/generate_ios_strings.py" \
"${{ github.workspace }}/raw_translations" \
"${{ github.workspace }}/ios/Session/Meta" \
"${{ github.workspace }}/ios/SessionUtilitiesKit/General/Constants.swift"
-
- - name: Upload Android artefacts
+ - name: Upload iOS artefacts
uses: actions/upload-artifact@v4
with:
- name: session-android-artefact
+ name: session-ios
path: |
- ${{ github.workspace }}/android/libsession/src/main/res/values*/strings.xml
- ${{ github.workspace }}/android/libsession/src/main/java/org/session/libsession/utilities/NonTranslatableStringConstants.kt
+ ${{ github.workspace }}/ios/Session/Meta/Localizable.xcstrings
+ ${{ github.workspace }}/ios/SessionUtilitiesKit/General/Constants.swift
overwrite: true
if-no-files-found: warn
retention-days: 7
+
+
+
+ build_desktop:
+ name: Build Desktop strings
+ needs: [fetch_translations]
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout Repo Content
+ uses: actions/checkout@v4
+ with:
+ path: 'scripts'
+ # don't provide a branch (ref) so it uses the default for that event
+ - name: Setup shared
+ uses: ./scripts/actions/setup_shared
+
+ - name: Checkout Desktop
+ uses: ./scripts/actions/checkout_desktop
+
+
+ - name: Prepare Desktop Strings
+ run: |
+ python "${{ github.workspace }}/scripts/crowdin/generate_desktop_strings.py" \
+ "${{ github.workspace }}/raw_translations" \
+ "${{ github.workspace }}/desktop/_locales" \
+ "${{ github.workspace }}/desktop/ts/localization/constants.ts"
- name: Upload Desktop artifacts
uses: actions/upload-artifact@v4
with:
- name: session-desktop-artifact
+ name: session-desktop
path: |
${{ github.workspace }}/desktop/_locales
${{ github.workspace }}/desktop/ts/localization/constants.ts
overwrite: true
if-no-files-found: warn
retention-days: 7
- - name: Upload iOS artefacts
- uses: actions/upload-artifact@v4
- with:
- name: session-ios-artifact
- path: |
- ${{ github.workspace }}/ios/Session/Meta/Localizable.xcstrings
- ${{ github.workspace }}/ios/SessionUtilitiesKit/General/Constants.swift
- overwrite: true
- if-no-files-found: warn
- retention-days: 7
-
- - name: Validate strings for Android
- run: ${{ github.workspace }}/android/gradlew :libsession:packageDebugResources
-
# It's easier to find what went wrong with some strings if we can get the files from the artefact upload step above.
# The job will still be marked as failed and no Pull Requests will be made.
- name: Prepare QA strings
@@ -143,7 +143,7 @@ jobs:
- name: Upload QA artefacts
uses: actions/upload-artifact@v4
with:
- name: session-qa-artifact
+ name: session-qa
path: |
${{ github.workspace }}/desktop/ts/localization/locales.ts
${{ github.workspace }}/desktop/ts/localization/constants.ts
@@ -151,52 +151,154 @@ jobs:
if-no-files-found: warn
retention-days: 7
+ build_android:
+ name: Build Android strings
+ runs-on: ubuntu-latest
+ needs: [fetch_translations]
+
+ steps:
+ - name: Checkout Repo Content
+ uses: actions/checkout@v4
+ with:
+ path: 'scripts'
+ # don't provide a branch (ref) so it uses the default for that event
+ - name: Checkout Android
+ uses: ./scripts/actions/checkout_android
+
+ - name: Setup shared
+ uses: ./scripts/actions/setup_shared
+
+ - name: Setup Java
+ uses: actions/setup-java@v4
+ with:
+ distribution: 'temurin'
+ java-version: 17
+ cache: gradle
+
+ - name: Setup Gradle
+ uses: gradle/actions/setup-gradle@v4
+
+ - name: Prepare Android Strings
+ run: |
+ rm -rf ${{ github.workspace }}/android/libsession/src/main/res/values*/strings.xml
+ python "${{ github.workspace }}/scripts/crowdin/generate_android_strings.py" \
+ "${{ github.workspace }}/raw_translations" \
+ "${{ github.workspace }}/android/libsession/src/main/res" \
+ "${{ github.workspace }}/android/libsession/src/main/java/org/session/libsession/utilities/NonTranslatableStringConstants.kt"
+ - name: Upload Android artefacts
+ uses: actions/upload-artifact@v4
+ with:
+ name: session-android
+ path: |
+ ${{ github.workspace }}/android/libsession/src/main/res/values*/strings.xml
+ ${{ github.workspace }}/android/libsession/src/main/java/org/session/libsession/utilities/NonTranslatableStringConstants.kt
+ overwrite: true
+ if-no-files-found: warn
+ retention-days: 7
+
+ - name: Validate strings for Android
+ run: cd ${{ github.workspace }}/android && ${{ github.workspace }}/android/gradlew app:mergePlayDebugResources
+
+ jobs_sync:
+ name: Waiting for build jobs
+ needs: [build_android, build_ios, build_desktop]
+ runs-on: ubuntu-latest
+ steps:
+ - name: Nothing to do
+ shell: bash
+ run: echo "Nothing to do here"
+
+ make_android_pr:
+ name: Make Android PR
+ needs: [jobs_sync]
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout Repo Content
+ uses: actions/checkout@v4
+ with:
+ path: 'scripts'
+ # don't provide a branch (ref) so it uses the default for that event
+ - name: Checkout Android
+ uses: ./scripts/actions/checkout_android
+
+ - uses: actions/download-artifact@v4
+ with:
+ name: session-android
+ # this has to be the first shared parent on the upload artefact task for Android
+ path: "${{ github.workspace }}/android/libsession/src/main"
- # Not sure why yet, but uploading artefacts after creating the pull requests
- # seems to only include a part of what should be in.
- # As a dirty fix we upload the artefacts first, and then make the pull request
- name: Create Android Pull Request
uses: peter-evans/create-pull-request@v6
with:
path: 'android'
token: ${{ secrets.CROWDIN_PR_TOKEN }}
- title: "[Automated] Update translations from Crowdin"
- body: |
- [Automated]
- This PR includes the latest translations from Crowdin
-
- Session uses the community-driven translation platform Crowdin for localization, anyone can contribute at https://getsession.org/translate
- branch: feature/update-crowdin-translations
- commit-message: "[Automated] Update translations from Crowdin"
+ title: ${{ env.PR_TITLE }}
+ body: ${{ env.PR_DESCRIPTION }}
+ branch: ${{ env.PR_TARGET_BRANCH }}
+ commit-message: ${{ env.PR_TITLE }}
delete-branch: true
+
+ make_desktop_pr:
+ needs: [jobs_sync]
+ name: Make Desktop PR
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout Repo Content
+ uses: actions/checkout@v4
+ with:
+ path: 'scripts'
+ # don't provide a branch (ref) so it uses the default for that event
+ - name: Checkout Desktop
+ uses: ./scripts/actions/checkout_desktop
+
+ - uses: actions/download-artifact@v4
+ with:
+ name: session-desktop
+ # this has to be the first shared parent on the upload artefact task for Desktop
+ path: "${{ github.workspace }}/desktop"
+
- name: Create Desktop Pull Request
uses: peter-evans/create-pull-request@v6
with:
path: 'desktop'
token: ${{ secrets.CROWDIN_PR_TOKEN }}
- title: "[Automated] Update translations from Crowdin"
- body: |
- [Automated]
- This PR includes the latest translations from Crowdin
-
- Session uses the community-driven translation platform Crowdin for localization, anyone can contribute at https://getsession.org/translate
- branch: feature/update-crowdin-translations
- commit-message: "[Automated] Update translations from Crowdin"
+ title: ${{ env.PR_TITLE }}
+ body: ${{ env.PR_DESCRIPTION }}
+ branch: ${{ env.PR_TARGET_BRANCH }}
+ commit-message: ${{ env.PR_TITLE }}
delete-branch: true
+
+
+ make_ios_pr:
+ needs: [jobs_sync]
+ name: Make iOS PR
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout Repo Content
+ uses: actions/checkout@v4
+ with:
+ path: 'scripts'
+ # don't provide a branch (ref) so it uses the default for that event
+ - name: Checkout iOS
+ uses: ./scripts/actions/checkout_ios
+
+ - uses: actions/download-artifact@v4
+ with:
+ name: session-ios
+ # this has to be the first shared parent on the upload artefact task for iOS
+ path: "${{ github.workspace }}/ios"
+
- name: Create iOS Pull Request
uses: peter-evans/create-pull-request@v6
with:
path: 'ios'
token: ${{ secrets.CROWDIN_PR_TOKEN }}
- title: "[Automated] Update translations from Crowdin"
- body: |
- [Automated]
- This PR includes the latest translations from Crowdin
-
- Session uses the community-driven translation platform Crowdin for localization, anyone can contribute at https://getsession.org/translate
- branch: feature/update-crowdin-translations
- commit-message: "[Automated] Update translations from Crowdin"
+ title: ${{ env.PR_TITLE }}
+ body: ${{ env.PR_DESCRIPTION }}
+ branch: ${{ env.PR_TARGET_BRANCH }}
+ commit-message: ${{ env.PR_TITLE }}
delete-branch: true
+
diff --git a/.pylintrc b/.pylintrc
new file mode 100644
index 0000000..b03f098
--- /dev/null
+++ b/.pylintrc
@@ -0,0 +1,16 @@
+[MASTER]
+# Use multiple processes to speed up Pylint.
+jobs=0
+
+
+[FORMAT]
+max-line-length=180
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,TODO
+
+[MESSAGES CONTROL]
+disable=broad-except,missing-function-docstring
diff --git a/actions/checkout_android/action.yml b/actions/checkout_android/action.yml
new file mode 100644
index 0000000..1aa4dce
--- /dev/null
+++ b/actions/checkout_android/action.yml
@@ -0,0 +1,16 @@
+name: 'Setup for all'
+description: "Setup shared for all jobs"
+runs:
+ using: 'composite'
+ steps:
+ - name: Checkout Android
+ uses: actions/checkout@v4
+ with:
+ repository: 'oxen-io/session-android'
+ path: 'android'
+ submodules: recursive
+ ref: 'release/1.20.0'
+ - name: Remove existing strings
+ shell: bash
+ run: |
+ rm -rf ${{ github.workspace }}/android/libsession/src/main/res/values*/strings.xml
diff --git a/actions/checkout_desktop/action.yml b/actions/checkout_desktop/action.yml
new file mode 100644
index 0000000..4b15fd2
--- /dev/null
+++ b/actions/checkout_desktop/action.yml
@@ -0,0 +1,15 @@
+name: 'Setup for all'
+description: "Setup shared for all jobs"
+runs:
+ using: 'composite'
+ steps:
+ - name: Checkout Desktop
+ uses: actions/checkout@v4
+ with:
+ repository: 'oxen-io/session-desktop'
+ path: 'desktop'
+ ref: 'standardised_strings_qa_2'
+ - name: Remove existing strings
+ shell: bash
+ run: |
+ rm -rf ${{ github.workspace }}/desktop/_locales/*
\ No newline at end of file
diff --git a/actions/checkout_ios/action.yml b/actions/checkout_ios/action.yml
new file mode 100644
index 0000000..b498b48
--- /dev/null
+++ b/actions/checkout_ios/action.yml
@@ -0,0 +1,11 @@
+name: 'Setup for all'
+description: "Setup shared for all jobs"
+runs:
+ using: 'composite'
+ steps:
+ - name: Checkout iOS
+ uses: actions/checkout@v4
+ with:
+ repository: 'oxen-io/session-ios'
+ path: 'ios'
+ ref: 'dev'
\ No newline at end of file
diff --git a/actions/setup_shared/action.yml b/actions/setup_shared/action.yml
new file mode 100644
index 0000000..f8ce360
--- /dev/null
+++ b/actions/setup_shared/action.yml
@@ -0,0 +1,23 @@
+name: 'Setup for all'
+description: "Setup shared for all jobs"
+runs:
+ using: 'composite'
+ steps:
+ - name: Setup Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: 3.8
+ cache: 'pip' # caching pip dependencies
+ - name: Install Dependencies
+ shell: bash
+ run: |
+ pip install -r ${{ github.workspace }}/scripts/crowdin/requirements.txt
+
+ - uses: actions/download-artifact@v4
+ with:
+ name: session-download
+ path: "${{ github.workspace }}/raw_translations"
+
+ - name: Display structure of downloaded files
+ shell: bash
+ run: ls ${{ github.workspace }}/raw_translations
\ No newline at end of file
diff --git a/crowdin/download_translations_from_crowdin.py b/crowdin/download_translations_from_crowdin.py
index 01b4285..bd1553a 100644
--- a/crowdin/download_translations_from_crowdin.py
+++ b/crowdin/download_translations_from_crowdin.py
@@ -1,10 +1,9 @@
import os
-import requests
import json
-import time
import sys
-import re
import argparse
+import requests
+
from colorama import Fore, Style, init
# Initialize colorama
@@ -32,49 +31,40 @@
FORCE_ALLOW_UNAPPROVED = args.force_allow_unapproved
VERBOSE = args.verbose
-# Function to check for errors in API responses
+REQUEST_TIMEOUT_S = 5
+
def check_error(response):
+ """
+ Function to check for errors in API responses
+ """
if response.status_code != 200:
print(f"\033[2K{Fore.RED}❌ Error: {response.json().get('error', {}).get('message', 'Unknown error')} (Code: {response.status_code}){Style.RESET_ALL}")
if VERBOSE:
print(f"{Fore.BLUE}Response: {json.dumps(response.json(), indent=2)}{Style.RESET_ALL}")
sys.exit(1)
-# Function to download a file from Crowdin
def download_file(url, output_path):
- response = requests.get(url, stream=True)
+ """
+ Function to download a file from Crowdin
+ """
+ response = requests.get(url, stream=True, timeout=REQUEST_TIMEOUT_S)
response.raise_for_status()
with open(output_path, 'wb') as f:
for chunk in response.iter_content(chunk_size=8192):
f.write(chunk)
- sanitize_downloaded_file(output_path)
-
-
-# Sanitize crowdin translations and common user mistakes
-def sanitize_downloaded_file(file_path):
- with open(file_path, 'r', encoding='utf-8') as file:
- xml_content = file.read()
-
- correct = '<br/>'
- # the only correct br tag is
.
- # This replaces <.{0,2}br.{0,2}>
- # as we sometimes have a \ or a / or both misplaces
- updated_content = re.sub("<.{0,2}br.{0,2}>",correct,xml_content)
-
-
- # Write the updated content back to the file
- with open(file_path, 'w', encoding='utf-8') as file:
- file.write(updated_content)
-
-# Main function to handle the logic
def main():
+ """
+ Main Function
+ Fetch crowdin project info, and iterate over each locale to save the corresponding .xliff locally.
+ """
# Retrieve the list of languages
print(f"{Fore.WHITE}⏳ Retrieving project details...{Style.RESET_ALL}", end='\r')
project_response = requests.get(f"{CROWDIN_API_BASE_URL}/projects/{CROWDIN_PROJECT_ID}",
- headers={"Authorization": f"Bearer {CROWDIN_API_TOKEN}"})
+ headers={"Authorization": f"Bearer {CROWDIN_API_TOKEN}"},
+ timeout=REQUEST_TIMEOUT_S)
check_error(project_response)
project_details = project_response.json()['data']
source_language_id = project_details['sourceLanguageId']
@@ -90,7 +80,7 @@ def main():
if not os.path.exists(DOWNLOAD_DIRECTORY):
os.makedirs(DOWNLOAD_DIRECTORY)
- project_info_file = os.path.join(DOWNLOAD_DIRECTORY, f"_project_info.json")
+ project_info_file = os.path.join(DOWNLOAD_DIRECTORY, "_project_info.json")
with open(project_info_file, 'w', encoding='utf-8') as file:
json.dump(project_response.json(), file, indent=2)
@@ -105,7 +95,7 @@ def main():
}
source_export_response = requests.post(f"{CROWDIN_API_BASE_URL}/projects/{CROWDIN_PROJECT_ID}/translations/exports",
headers={"Authorization": f"Bearer {CROWDIN_API_TOKEN}", "Content-Type": "application/json"},
- data=json.dumps(source_export_payload))
+ data=json.dumps(source_export_payload), timeout=REQUEST_TIMEOUT_S)
check_error(source_export_response)
if VERBOSE:
@@ -118,7 +108,7 @@ def main():
try:
download_file(source_download_url, source_download_path)
except requests.exceptions.HTTPError as e:
- print(f"\033[2K{Fore.RED}❌ {prefix} Failed to download translations for {source_lang_locale} (Error: {e}){Style.RESET_ALL}")
+ print(f"\033[2K{Fore.RED}❌ Failed to download translations for {source_lang_locale} (Error: {e}){Style.RESET_ALL}")
if VERBOSE:
print(f"{Fore.BLUE}Response: {e.response.text}{Style.RESET_ALL}")
sys.exit(1)
@@ -145,7 +135,7 @@ def main():
}
export_response = requests.post(f"{CROWDIN_API_BASE_URL}/projects/{CROWDIN_PROJECT_ID}/translations/exports",
headers={"Authorization": f"Bearer {CROWDIN_API_TOKEN}", "Content-Type": "application/json"},
- data=json.dumps(export_payload))
+ data=json.dumps(export_payload), timeout=REQUEST_TIMEOUT_S)
check_error(export_response)
if VERBOSE:
@@ -170,13 +160,14 @@ def main():
if CROWDIN_GLOSSARY_ID is not None and CROWDIN_CONCEPT_ID is not None:
print(f"{Fore.WHITE}⏳ Retrieving non-translatable strings...{Style.RESET_ALL}", end='\r')
static_string_response = requests.get(f"{CROWDIN_API_BASE_URL}/glossaries/{CROWDIN_GLOSSARY_ID}/terms?conceptId={CROWDIN_CONCEPT_ID}&limit=500",
- headers={"Authorization": f"Bearer {CROWDIN_API_TOKEN}"})
+ headers={"Authorization": f"Bearer {CROWDIN_API_TOKEN}"},
+ timeout=REQUEST_TIMEOUT_S)
check_error(static_string_response)
if VERBOSE:
print(f"{Fore.BLUE}Response: {json.dumps(static_string_response.json(), indent=2)}{Style.RESET_ALL}")
- non_translatable_strings_file = os.path.join(DOWNLOAD_DIRECTORY, f"_non_translatable_strings.json")
+ non_translatable_strings_file = os.path.join(DOWNLOAD_DIRECTORY, "_non_translatable_strings.json")
with open(non_translatable_strings_file, 'w', encoding='utf-8') as file:
json.dump(static_string_response.json(), file, indent=2)
diff --git a/crowdin/generate_android_strings.py b/crowdin/generate_android_strings.py
index ee8d532..88fcc46 100644
--- a/crowdin/generate_android_strings.py
+++ b/crowdin/generate_android_strings.py
@@ -5,11 +5,27 @@
import argparse
import re
from pathlib import Path
-from colorama import Fore, Style, init
+from colorama import Fore, Style
# Variables that should be treated as numeric (using %d)
NUMERIC_VARIABLES = ['count', 'found_count', 'total_count']
+# Customizable mapping for output folder hierarchy
+# Add entries here to customize the output path for specific locales
+# Format: 'input_locale': 'output_path'
+LOCALE_PATH_MAPPING = {
+ 'es-419': 'b+es+419',
+ 'kmr-TR': 'kmr',
+ 'hy-AM': 'b+hy',
+ 'pt-BR': 'b+pt+BR',
+ 'pt-PT': 'b+pt+PT',
+ 'zh-CN': 'b+zh+CN',
+ 'zh-TW': 'b+zh+TW',
+ 'sr-CS': 'b+sr+CS',
+ 'sr-SP': 'b+sr+SP'
+ # Add more mappings as needed
+}
+
# Parse command-line arguments
parser = argparse.ArgumentParser(description='Convert a XLIFF translation files to Android XML.')
parser.add_argument('raw_translations_directory', help='Directory which contains the raw translation files')
@@ -26,7 +42,7 @@ def parse_xliff(file_path):
root = tree.getroot()
namespace = {'ns': 'urn:oasis:names:tc:xliff:document:1.2'}
translations = {}
-
+
# Handle plural groups
for group in root.findall('.//ns:group[@restype="x-gettext-plurals"]', namespaces=namespace):
plural_forms = {}
@@ -42,7 +58,7 @@ def parse_xliff(file_path):
plural_forms[form] = target.text
if resname and plural_forms:
translations[resname] = plural_forms
-
+
# Handle non-plural translations
for trans_unit in root.findall('.//ns:trans-unit', namespaces=namespace):
resname = trans_unit.get('resname')
@@ -50,14 +66,14 @@ def parse_xliff(file_path):
target = trans_unit.find('ns:target', namespaces=namespace)
if target is not None and target.text:
translations[resname] = target.text
-
+
return translations
def convert_placeholders(text):
def repl(match):
var_name = match.group(1)
index = len(set(re.findall(r'\{([^}]+)\}', text[:match.start()]))) + 1
-
+
if var_name in NUMERIC_VARIABLES:
return f"%{index}$d"
else:
@@ -66,6 +82,8 @@ def repl(match):
return re.sub(r'\{([^}]+)\}', repl, text)
def clean_string(text):
+ # Note: any changes done for all platforms needs most likely to be done on crowdin side.
+ # So we don't want to replace -> with → for instance, we want the crowdin strings to not have those at all.
# We can use standard XML escaped characters for most things (since XLIFF is an XML format) but
# want the following cases escaped in a particular way
text = text.replace("'", r"\'")
@@ -74,10 +92,6 @@ def clean_string(text):
text = text.replace("<b>", "")
text = text.replace("</b>", "")
text = text.replace("</br>", "\\n")
- text = text.replace("->", "→") # Use the special unicode for arrows
- text = text.replace("->", "→") # Use the special unicode for arrows
- text = text.replace("<-", "←") # Use the special unicode for arrows
- text = text.replace("<-", "←") # Use the special unicode for arrows
text = text.replace("
", "\\n")
text = text.replace("&", "&") # Assume any remaining ampersands are desired
return text.strip() # Strip whitespace
@@ -105,35 +119,30 @@ def generate_android_xml(translations, app_name):
return result
-def convert_xliff_to_android_xml(input_file, output_dir, source_locale, locale, app_name):
+def convert_xliff_to_android_xml(input_file, output_dir, source_locale, locale, locale_two_letter_code, app_name):
if not os.path.exists(input_file):
raise FileNotFoundError(f"Could not find '{input_file}' in raw translations directory")
# Parse the XLIFF and convert to XML (only include the 'app_name' entry in the source language)
- is_source_language = (locale == source_locale)
+ is_source_language = locale == source_locale
translations = parse_xliff(input_file)
output_data = generate_android_xml(translations, app_name if is_source_language else None)
# Generate output files
- language_code = locale.split('-')[0]
- region_code = locale.split('-')[1] if '-' in locale else None
+ output_locale = LOCALE_PATH_MAPPING.get(locale, LOCALE_PATH_MAPPING.get(locale_two_letter_code, locale_two_letter_code))
+
if is_source_language:
language_output_dir = os.path.join(output_dir, 'values')
else:
- language_output_dir = os.path.join(output_dir, f'values-{language_code}')
+ language_output_dir = os.path.join(output_dir, f'values-{output_locale}')
os.makedirs(language_output_dir, exist_ok=True)
language_output_file = os.path.join(language_output_dir, 'strings.xml')
with open(language_output_file, 'w', encoding='utf-8') as file:
file.write(output_data)
- if region_code:
- region_output_dir = os.path.join(output_dir, f'values-{language_code}-r{region_code}')
- os.makedirs(region_output_dir, exist_ok=True)
- region_output_file = os.path.join(region_output_dir, 'strings.xml')
- with open(region_output_file, 'w', encoding='utf-8') as file:
- file.write(output_data)
+
def convert_non_translatable_strings_to_kotlin(input_file, output_path):
if not os.path.exists(input_file):
@@ -141,7 +150,7 @@ def convert_non_translatable_strings_to_kotlin(input_file, output_path):
# Process the non-translatable string input
non_translatable_strings_data = {}
- with open(input_file, 'r') as file:
+ with open(input_file, 'r', encoding="utf-8") as file:
non_translatable_strings_data = json.load(file)
entries = non_translatable_strings_data['data']
@@ -177,9 +186,9 @@ def convert_all_files(input_directory):
raise FileNotFoundError(f"Could not find '{project_info_file}' in raw translations directory")
project_details = {}
- with open(project_info_file, 'r') as file:
+ with open(project_info_file, 'r', encoding="utf-8") as file:
project_details = json.load(file)
-
+
# Extract the language info and sort the target languages alphabetically by locale
source_language = project_details['data']['sourceLanguage']
target_languages = project_details['data']['targetLanguages']
@@ -198,9 +207,10 @@ def convert_all_files(input_directory):
source_locale = source_language['locale']
for language in [source_language] + target_languages:
lang_locale = language['locale']
+ lang_two_letter_code = language['twoLettersCode']
print(f"\033[2K{Fore.WHITE}⏳ Converting translations for {lang_locale} to target format...{Style.RESET_ALL}", end='\r')
input_file = os.path.join(input_directory, f"{lang_locale}.xliff")
- convert_xliff_to_android_xml(input_file, TRANSLATIONS_OUTPUT_DIRECTORY, source_locale, lang_locale, app_name)
+ convert_xliff_to_android_xml(input_file, TRANSLATIONS_OUTPUT_DIRECTORY, source_locale, lang_locale, lang_two_letter_code, app_name)
print(f"\033[2K{Fore.GREEN}✅ All conversions complete{Style.RESET_ALL}")
if __name__ == "__main__":
diff --git a/crowdin/generate_desktop_strings.py b/crowdin/generate_desktop_strings.py
index 9f5f3d1..729dcc1 100644
--- a/crowdin/generate_desktop_strings.py
+++ b/crowdin/generate_desktop_strings.py
@@ -15,12 +15,14 @@
'kmr-TR': 'kmr',
# Note: we don't want to replace - with _ anymore.
# We still need those mappings, otherwise they fallback to their 2 letter codes
- 'es-419': 'es-419',
'hy-AM': 'hy-AM',
+ 'es-419': 'es-419',
'pt-BR': 'pt-BR',
'pt-PT': 'pt-PT',
'zh-CN': 'zh-CN',
- 'zh-TW': 'zh-TW'
+ 'zh-TW': 'zh-TW',
+ 'sr-CS': 'sr-CS',
+ 'sr-SP': 'sr-SP'
# Add more mappings as needed
}
@@ -40,7 +42,7 @@ def parse_xliff(file_path):
root = tree.getroot()
namespace = {'ns': 'urn:oasis:names:tc:xliff:document:1.2'}
translations = {}
-
+
# Handle plural groups
for group in root.findall('.//ns:group[@restype="x-gettext-plurals"]', namespaces=namespace):
plural_forms = {}
@@ -56,7 +58,7 @@ def parse_xliff(file_path):
plural_forms[form] = target.text
if resname and plural_forms:
translations[resname] = plural_forms
-
+
# Handle non-plural translations
for trans_unit in root.findall('.//ns:trans-unit', namespaces=namespace):
resname = trans_unit.get('resname')
@@ -64,14 +66,12 @@ def parse_xliff(file_path):
target = trans_unit.find('ns:target', namespaces=namespace)
if target is not None and target.text:
translations[resname] = target.text
-
+
return translations
def clean_string(text):
- text = text.replace("->", "→") # Use the special unicode for arrows
- text = text.replace("->", "→") # Use the special unicode for arrows
- text = text.replace("<-", "←") # Use the special unicode for arrows
- text = text.replace("<-", "←") # Use the special unicode for arrows
+ # Note: any changes done for all platforms needs most likely to be done on crowdin side.
+ # So we don't want to replace -> with → for instance, we want the crowdin strings to not have those at all.
text = html.unescape(text) # Unescape any HTML escaping
return text.strip() # Strip whitespace
@@ -83,7 +83,7 @@ def generate_icu_pattern(target):
# Replace {count} with #
value = clean_string(value.replace('{count}', '#'))
pattern_parts.append(f"{form} [{value}]")
-
+
return "{{count, plural, {0}}}".format(" ".join(pattern_parts))
else: # It's a regular string
return clean_string(target)
@@ -118,7 +118,7 @@ def convert_non_translatable_strings_to_type_script(input_file, output_path, exp
# Process the non-translatable string input
non_translatable_strings_data = {}
- with open(input_file, 'r') as file:
+ with open(input_file, 'r', encoding="utf-8") as file:
non_translatable_strings_data = json.load(file)
entries = non_translatable_strings_data['data']
@@ -155,9 +155,9 @@ def convert_all_files(input_directory):
raise FileNotFoundError(f"Could not find '{project_info_file}' in raw translations directory")
project_details = {}
- with open(project_info_file, 'r') as file:
+ with open(project_info_file, 'r', encoding="utf-8") as file:
project_details = json.load(file)
-
+
# Extract the language info and sort the target languages alphabetically by locale
source_language = project_details['data']['sourceLanguage']
target_languages = project_details['data']['targetLanguages']
diff --git a/crowdin/generate_ios_strings.py b/crowdin/generate_ios_strings.py
index 879aa79..1e81c30 100644
--- a/crowdin/generate_ios_strings.py
+++ b/crowdin/generate_ios_strings.py
@@ -24,7 +24,7 @@ def parse_xliff(file_path):
root = tree.getroot()
namespace = {'ns': 'urn:oasis:names:tc:xliff:document:1.2'}
translations = {}
-
+
file_elem = root.find('ns:file', namespaces=namespace)
if file_elem is None:
raise ValueError(f"Invalid XLIFF structure in file: {file_path}")
@@ -32,15 +32,15 @@ def parse_xliff(file_path):
target_language = file_elem.get('target-language')
if target_language is None:
raise ValueError(f"Missing target-language in file: {file_path}")
-
+
for trans_unit in root.findall('.//ns:trans-unit', namespaces=namespace):
resname = trans_unit.get('resname') or trans_unit.get('id')
if resname is None:
continue # Skip entries without a resname or id
-
+
target = trans_unit.find('ns:target', namespaces=namespace)
source = trans_unit.find('ns:source', namespaces=namespace)
-
+
if target is not None and target.text:
translations[resname] = target.text
elif source is not None and source.text:
@@ -68,10 +68,8 @@ def parse_xliff(file_path):
return translations, target_language
def clean_string(text):
- text = text.replace("->", "→") # Use the special unicode for arrows
- text = text.replace("->", "→") # Use the special unicode for arrows
- text = text.replace("<-", "←") # Use the special unicode for arrows
- text = text.replace("<-", "←") # Use the special unicode for arrows
+ # Note: any changes done for all platforms needs most likely to be done on crowdin side.
+ # So we don't want to replace -> with → for instance, we want the crowdin strings to not have those at all.
text = html.unescape(text) # Unescape any HTML escaping
return text.strip() # Strip whitespace
@@ -114,7 +112,7 @@ def convert_xliff_to_string_catalog(input_dir, output_dir, source_language, targ
if isinstance(translation, dict): # It's a plural group
converted_translations = convert_placeholders_for_plurals(resname, translation)
-
+
# Check if any of the translations contain '{count}'
contains_count = any('{count}' in value for value in translation.values())
@@ -175,7 +173,7 @@ def convert_non_translatable_strings_to_swift(input_file, output_path):
# Process the non-translatable string input
non_translatable_strings_data = {}
- with open(input_file, 'r') as file:
+ with open(input_file, 'r', encoding="utf-8") as file:
non_translatable_strings_data = json.load(file)
entries = non_translatable_strings_data['data']
@@ -206,9 +204,9 @@ def convert_all_files(input_directory):
raise FileNotFoundError(f"Could not find '{project_info_file}' in raw translations directory")
project_details = {}
- with open(project_info_file, 'r') as file:
+ with open(project_info_file, 'r', encoding="utf-8") as file:
project_details = json.load(file)
-
+
# Extract the language info and sort the target languages alphabetically by locale
source_language = project_details['data']['sourceLanguage']
target_languages = project_details['data']['targetLanguages']