Skip to content

Commit

Permalink
chore: test breaking workflow into separate jobs
Browse files Browse the repository at this point in the history
  • Loading branch information
Bilb committed Sep 20, 2024
1 parent 3a7326e commit 974ed6a
Show file tree
Hide file tree
Showing 10 changed files with 370 additions and 180 deletions.
306 changes: 208 additions & 98 deletions .github/workflows/check_for_crowdin_updates.yml

Large diffs are not rendered by default.

16 changes: 16 additions & 0 deletions .pylintrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
[MASTER]
# Use multiple processes to speed up Pylint.
jobs=0


[FORMAT]
max-line-length=180


[MISCELLANEOUS]

# List of note tags to take in consideration, separated by a comma.
notes=FIXME,TODO

[MESSAGES CONTROL]
disable=broad-except,missing-function-docstring
16 changes: 16 additions & 0 deletions actions/checkout_android/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
name: 'Setup for all'
description: "Setup shared for all jobs"
runs:
using: 'composite'
steps:
- name: Checkout Android
uses: actions/checkout@v4
with:
repository: 'oxen-io/session-android'
path: 'android'
submodules: recursive
ref: 'release/1.20.0'
- name: Remove existing strings
shell: bash
run: |
rm -rf ${{ github.workspace }}/android/libsession/src/main/res/values*/strings.xml
15 changes: 15 additions & 0 deletions actions/checkout_desktop/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
name: 'Setup for all'
description: "Setup shared for all jobs"
runs:
using: 'composite'
steps:
- name: Checkout Desktop
uses: actions/checkout@v4
with:
repository: 'oxen-io/session-desktop'
path: 'desktop'
ref: 'standardised_strings_qa_2'
- name: Remove existing strings
shell: bash
run: |
rm -rf ${{ github.workspace }}/desktop/_locales/*
11 changes: 11 additions & 0 deletions actions/checkout_ios/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
name: 'Setup for all'
description: "Setup shared for all jobs"
runs:
using: 'composite'
steps:
- name: Checkout iOS
uses: actions/checkout@v4
with:
repository: 'oxen-io/session-ios'
path: 'ios'
ref: 'dev'
23 changes: 23 additions & 0 deletions actions/setup_shared/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
name: 'Setup for all'
description: "Setup shared for all jobs"
runs:
using: 'composite'
steps:
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: 3.8
cache: 'pip' # caching pip dependencies
- name: Install Dependencies
shell: bash
run: |
pip install -r ${{ github.workspace }}/scripts/crowdin/requirements.txt
- uses: actions/download-artifact@v4
with:
name: session-download
path: "${{ github.workspace }}/raw_translations"

- name: Display structure of downloaded files
shell: bash
run: ls ${{ github.workspace }}/raw_translations
57 changes: 24 additions & 33 deletions crowdin/download_translations_from_crowdin.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import os
import requests
import json
import time
import sys
import re
import argparse
import requests

from colorama import Fore, Style, init

# Initialize colorama
Expand Down Expand Up @@ -32,49 +31,40 @@
FORCE_ALLOW_UNAPPROVED = args.force_allow_unapproved
VERBOSE = args.verbose

# Function to check for errors in API responses
REQUEST_TIMEOUT_S = 5

def check_error(response):
"""
Function to check for errors in API responses
"""
if response.status_code != 200:
print(f"\033[2K{Fore.RED}❌ Error: {response.json().get('error', {}).get('message', 'Unknown error')} (Code: {response.status_code}){Style.RESET_ALL}")
if VERBOSE:
print(f"{Fore.BLUE}Response: {json.dumps(response.json(), indent=2)}{Style.RESET_ALL}")
sys.exit(1)

# Function to download a file from Crowdin
def download_file(url, output_path):
response = requests.get(url, stream=True)
"""
Function to download a file from Crowdin
"""
response = requests.get(url, stream=True, timeout=REQUEST_TIMEOUT_S)
response.raise_for_status()

with open(output_path, 'wb') as f:
for chunk in response.iter_content(chunk_size=8192):
f.write(chunk)

sanitize_downloaded_file(output_path)


# Sanitize crowdin translations and common user mistakes
def sanitize_downloaded_file(file_path):
with open(file_path, 'r', encoding='utf-8') as file:
xml_content = file.read()

correct = '<br/>'
# the only correct br tag is <br‍/>.
# This replaces <.{0,2}br.{0,2}>
# as we sometimes have a \ or a / or both misplaces
updated_content = re.sub("&lt;.{0,2}br.{0,2}&gt;",correct,xml_content)


# Write the updated content back to the file
with open(file_path, 'w', encoding='utf-8') as file:
file.write(updated_content)


# Main function to handle the logic
def main():
"""
Main Function
Fetch crowdin project info, and iterate over each locale to save the corresponding .xliff locally.
"""
# Retrieve the list of languages
print(f"{Fore.WHITE}⏳ Retrieving project details...{Style.RESET_ALL}", end='\r')
project_response = requests.get(f"{CROWDIN_API_BASE_URL}/projects/{CROWDIN_PROJECT_ID}",
headers={"Authorization": f"Bearer {CROWDIN_API_TOKEN}"})
headers={"Authorization": f"Bearer {CROWDIN_API_TOKEN}"},
timeout=REQUEST_TIMEOUT_S)
check_error(project_response)
project_details = project_response.json()['data']
source_language_id = project_details['sourceLanguageId']
Expand All @@ -90,7 +80,7 @@ def main():
if not os.path.exists(DOWNLOAD_DIRECTORY):
os.makedirs(DOWNLOAD_DIRECTORY)

project_info_file = os.path.join(DOWNLOAD_DIRECTORY, f"_project_info.json")
project_info_file = os.path.join(DOWNLOAD_DIRECTORY, "_project_info.json")
with open(project_info_file, 'w', encoding='utf-8') as file:
json.dump(project_response.json(), file, indent=2)

Expand All @@ -105,7 +95,7 @@ def main():
}
source_export_response = requests.post(f"{CROWDIN_API_BASE_URL}/projects/{CROWDIN_PROJECT_ID}/translations/exports",
headers={"Authorization": f"Bearer {CROWDIN_API_TOKEN}", "Content-Type": "application/json"},
data=json.dumps(source_export_payload))
data=json.dumps(source_export_payload), timeout=REQUEST_TIMEOUT_S)
check_error(source_export_response)

if VERBOSE:
Expand All @@ -118,7 +108,7 @@ def main():
try:
download_file(source_download_url, source_download_path)
except requests.exceptions.HTTPError as e:
print(f"\033[2K{Fore.RED}{prefix} Failed to download translations for {source_lang_locale} (Error: {e}){Style.RESET_ALL}")
print(f"\033[2K{Fore.RED}❌ Failed to download translations for {source_lang_locale} (Error: {e}){Style.RESET_ALL}")
if VERBOSE:
print(f"{Fore.BLUE}Response: {e.response.text}{Style.RESET_ALL}")
sys.exit(1)
Expand All @@ -145,7 +135,7 @@ def main():
}
export_response = requests.post(f"{CROWDIN_API_BASE_URL}/projects/{CROWDIN_PROJECT_ID}/translations/exports",
headers={"Authorization": f"Bearer {CROWDIN_API_TOKEN}", "Content-Type": "application/json"},
data=json.dumps(export_payload))
data=json.dumps(export_payload), timeout=REQUEST_TIMEOUT_S)
check_error(export_response)

if VERBOSE:
Expand All @@ -170,13 +160,14 @@ def main():
if CROWDIN_GLOSSARY_ID is not None and CROWDIN_CONCEPT_ID is not None:
print(f"{Fore.WHITE}⏳ Retrieving non-translatable strings...{Style.RESET_ALL}", end='\r')
static_string_response = requests.get(f"{CROWDIN_API_BASE_URL}/glossaries/{CROWDIN_GLOSSARY_ID}/terms?conceptId={CROWDIN_CONCEPT_ID}&limit=500",
headers={"Authorization": f"Bearer {CROWDIN_API_TOKEN}"})
headers={"Authorization": f"Bearer {CROWDIN_API_TOKEN}"},
timeout=REQUEST_TIMEOUT_S)
check_error(static_string_response)

if VERBOSE:
print(f"{Fore.BLUE}Response: {json.dumps(static_string_response.json(), indent=2)}{Style.RESET_ALL}")

non_translatable_strings_file = os.path.join(DOWNLOAD_DIRECTORY, f"_non_translatable_strings.json")
non_translatable_strings_file = os.path.join(DOWNLOAD_DIRECTORY, "_non_translatable_strings.json")
with open(non_translatable_strings_file, 'w', encoding='utf-8') as file:
json.dump(static_string_response.json(), file, indent=2)

Expand Down
58 changes: 34 additions & 24 deletions crowdin/generate_android_strings.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,27 @@
import argparse
import re
from pathlib import Path
from colorama import Fore, Style, init
from colorama import Fore, Style

# Variables that should be treated as numeric (using %d)
NUMERIC_VARIABLES = ['count', 'found_count', 'total_count']

# Customizable mapping for output folder hierarchy
# Add entries here to customize the output path for specific locales
# Format: 'input_locale': 'output_path'
LOCALE_PATH_MAPPING = {
'es-419': 'b+es+419',
'kmr-TR': 'kmr',
'hy-AM': 'b+hy',
'pt-BR': 'b+pt+BR',
'pt-PT': 'b+pt+PT',
'zh-CN': 'b+zh+CN',
'zh-TW': 'b+zh+TW',
'sr-CS': 'b+sr+CS',
'sr-SP': 'b+sr+SP'
# Add more mappings as needed
}

# Parse command-line arguments
parser = argparse.ArgumentParser(description='Convert a XLIFF translation files to Android XML.')
parser.add_argument('raw_translations_directory', help='Directory which contains the raw translation files')
Expand All @@ -26,7 +42,7 @@ def parse_xliff(file_path):
root = tree.getroot()
namespace = {'ns': 'urn:oasis:names:tc:xliff:document:1.2'}
translations = {}

# Handle plural groups
for group in root.findall('.//ns:group[@restype="x-gettext-plurals"]', namespaces=namespace):
plural_forms = {}
Expand All @@ -42,22 +58,22 @@ def parse_xliff(file_path):
plural_forms[form] = target.text
if resname and plural_forms:
translations[resname] = plural_forms

# Handle non-plural translations
for trans_unit in root.findall('.//ns:trans-unit', namespaces=namespace):
resname = trans_unit.get('resname')
if resname not in translations: # This is not part of a plural group
target = trans_unit.find('ns:target', namespaces=namespace)
if target is not None and target.text:
translations[resname] = target.text

return translations

def convert_placeholders(text):
def repl(match):
var_name = match.group(1)
index = len(set(re.findall(r'\{([^}]+)\}', text[:match.start()]))) + 1

if var_name in NUMERIC_VARIABLES:
return f"%{index}$d"
else:
Expand All @@ -66,6 +82,8 @@ def repl(match):
return re.sub(r'\{([^}]+)\}', repl, text)

def clean_string(text):
# Note: any changes done for all platforms needs most likely to be done on crowdin side.
# So we don't want to replace -&gt; with → for instance, we want the crowdin strings to not have those at all.
# We can use standard XML escaped characters for most things (since XLIFF is an XML format) but
# want the following cases escaped in a particular way
text = text.replace("'", r"\'")
Expand All @@ -74,10 +92,6 @@ def clean_string(text):
text = text.replace("&lt;b&gt;", "<b>")
text = text.replace("&lt;/b&gt;", "</b>")
text = text.replace("&lt;/br&gt;", "\\n")
text = text.replace("-&gt;", "→") # Use the special unicode for arrows
text = text.replace("->", "→") # Use the special unicode for arrows
text = text.replace("&lt;-", "←") # Use the special unicode for arrows
text = text.replace("<-", "←") # Use the special unicode for arrows
text = text.replace("<br/>", "\\n")
text = text.replace("&", "&amp;") # Assume any remaining ampersands are desired
return text.strip() # Strip whitespace
Expand Down Expand Up @@ -105,43 +119,38 @@ def generate_android_xml(translations, app_name):

return result

def convert_xliff_to_android_xml(input_file, output_dir, source_locale, locale, app_name):
def convert_xliff_to_android_xml(input_file, output_dir, source_locale, locale, locale_two_letter_code, app_name):
if not os.path.exists(input_file):
raise FileNotFoundError(f"Could not find '{input_file}' in raw translations directory")

# Parse the XLIFF and convert to XML (only include the 'app_name' entry in the source language)
is_source_language = (locale == source_locale)
is_source_language = locale == source_locale
translations = parse_xliff(input_file)
output_data = generate_android_xml(translations, app_name if is_source_language else None)

# Generate output files
language_code = locale.split('-')[0]
region_code = locale.split('-')[1] if '-' in locale else None
output_locale = LOCALE_PATH_MAPPING.get(locale, LOCALE_PATH_MAPPING.get(locale_two_letter_code, locale_two_letter_code))


if is_source_language:
language_output_dir = os.path.join(output_dir, 'values')
else:
language_output_dir = os.path.join(output_dir, f'values-{language_code}')
language_output_dir = os.path.join(output_dir, f'values-{output_locale}')

os.makedirs(language_output_dir, exist_ok=True)
language_output_file = os.path.join(language_output_dir, 'strings.xml')
with open(language_output_file, 'w', encoding='utf-8') as file:
file.write(output_data)

if region_code:
region_output_dir = os.path.join(output_dir, f'values-{language_code}-r{region_code}')
os.makedirs(region_output_dir, exist_ok=True)
region_output_file = os.path.join(region_output_dir, 'strings.xml')
with open(region_output_file, 'w', encoding='utf-8') as file:
file.write(output_data)


def convert_non_translatable_strings_to_kotlin(input_file, output_path):
if not os.path.exists(input_file):
raise FileNotFoundError(f"Could not find '{input_file}' in raw translations directory")

# Process the non-translatable string input
non_translatable_strings_data = {}
with open(input_file, 'r') as file:
with open(input_file, 'r', encoding="utf-8") as file:
non_translatable_strings_data = json.load(file)

entries = non_translatable_strings_data['data']
Expand Down Expand Up @@ -177,9 +186,9 @@ def convert_all_files(input_directory):
raise FileNotFoundError(f"Could not find '{project_info_file}' in raw translations directory")

project_details = {}
with open(project_info_file, 'r') as file:
with open(project_info_file, 'r', encoding="utf-8") as file:
project_details = json.load(file)

# Extract the language info and sort the target languages alphabetically by locale
source_language = project_details['data']['sourceLanguage']
target_languages = project_details['data']['targetLanguages']
Expand All @@ -198,9 +207,10 @@ def convert_all_files(input_directory):
source_locale = source_language['locale']
for language in [source_language] + target_languages:
lang_locale = language['locale']
lang_two_letter_code = language['twoLettersCode']
print(f"\033[2K{Fore.WHITE}⏳ Converting translations for {lang_locale} to target format...{Style.RESET_ALL}", end='\r')
input_file = os.path.join(input_directory, f"{lang_locale}.xliff")
convert_xliff_to_android_xml(input_file, TRANSLATIONS_OUTPUT_DIRECTORY, source_locale, lang_locale, app_name)
convert_xliff_to_android_xml(input_file, TRANSLATIONS_OUTPUT_DIRECTORY, source_locale, lang_locale, lang_two_letter_code, app_name)
print(f"\033[2K{Fore.GREEN}✅ All conversions complete{Style.RESET_ALL}")

if __name__ == "__main__":
Expand Down
Loading

0 comments on commit 974ed6a

Please sign in to comment.