Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Diffing Algorithm Part 2 #18

Draft
wants to merge 23 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
8464823
added translation pull endpoint
andrew-fenton Nov 28, 2024
525f605
implemented CLI pull command
andrew-fenton Nov 29, 2024
cfa6b6c
fixed bug
andrew-fenton Nov 29, 2024
1619b42
fixed typo
andrew-fenton Nov 29, 2024
07af7fe
added test for pull endpoint
andrew-fenton Nov 30, 2024
fe81184
fixed test for pulling multiple translations
andrew-fenton Nov 30, 2024
4676c7e
added explicit post parameter for clarity
andrew-fenton Nov 30, 2024
e5d5ee5
added pip side test for pulling translations
andrew-fenton Dec 4, 2024
cad52e5
Merge branch 'main' into diffing-algorithm-part-2
andrew-fenton Dec 4, 2024
b849cc8
Implemented the push command
andrewahn-ubc Dec 4, 2024
7a4fa73
Merge branch 'diffing-algorithm-part-2' of https://github.com/ubclaun…
andrewahn-ubc Dec 4, 2024
ecc871e
fixed circular imports error
andrew-fenton Dec 5, 2024
a76b50b
added test for push
andrew-fenton Dec 5, 2024
0ac92c5
added setup command
andrew-fenton Dec 5, 2024
3e2a771
fixed circular imports
andrew-fenton Dec 6, 2024
a7d4958
fix imports
andrew-fenton Dec 6, 2024
08c5b31
minor fixes and backup languages
andrew-fenton Dec 6, 2024
9824d03
minor fixes
andrew-fenton Dec 6, 2024
a4f2dd8
disabled dirsync logging
andrew-fenton Dec 6, 2024
86ea339
fixed bug with read_json and added comments for push/pull tests
andrew-fenton Dec 15, 2024
c7767b7
fixed pip installation bug
andrew-fenton Dec 15, 2024
c0dbe1c
renamed setup.cfg
andrew-fenton Dec 15, 2024
fa34675
added rest_framework to installed apps
andrew-fenton Jan 12, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions core/core/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'i18nilize',
]

Expand Down
Binary file modified core/db.sqlite3
Binary file not shown.
41 changes: 40 additions & 1 deletion core/i18nilize/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -1010,4 +1010,43 @@ def test_bulk_translations(self):
# validate get requests
response = self.client.get(reverse('translation'), query_params=query_params_get[i], headers=headers)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
self.assertEqual(response.data['error'], 'Translation not found for given language and word!')
self.assertEqual(response.data['error'], 'Translation not found for given language and word!')

class PullTranslations(APITestCase):

def setUp(self):
token = Token.objects.create()
self.TEST_TOKEN = str(token.value)

def test_pulling_multiple_assigned_translations(self):
headers = {
'Token': self.TEST_TOKEN
}
translations_data = {
'translations': [
{
'language': 'spanish',
'hello': 'hola',
'bye': 'chau',
},
{
'language': 'french',
'hello': 'bonjour',
}
]
}
expected_response = {
'spanish': {
'hello': 'hola',
'bye': 'chau',
},
'french': {
'hello': 'bonjour',
}
}

self.client.post(reverse('process-translations'), data=translations_data, headers=headers, format='json')

response = self.client.get(reverse('pull-translations'), headers=headers, format='json')
response_data = response.json()
self.assertEqual(response_data, expected_response)
8 changes: 5 additions & 3 deletions core/i18nilize/urls.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from django.urls import path
from . import views
from .views import TokenView, TranslationView
from .views import TokenView, TranslationView, ProcessTranslationsView, PullTranslations, TestTokenView

urlpatterns = [
path('token/', TokenView.as_view(), name='create-token'),
path('token/<str:value>/', TokenView.as_view(), name='read-token'),
path('test/', TestTokenView.as_view(), name='test-token'),
path('translation', TranslationView.as_view(), name='translation'),
path('translations', views.ProcessTranslationsView.as_view(), name='process-translations')
path('translations', ProcessTranslationsView.as_view(), name='process-translations'),
path('translations/pull/', PullTranslations.as_view(), name='pull-translations'),
path('translations/push/', TranslationView.as_view(), name='push-translations'),
]
45 changes: 44 additions & 1 deletion core/i18nilize/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,21 @@ def get(self, request, value=None):
except Token.DoesNotExist:
return Response({'error': 'Token not found.'}, status=status.HTTP_404_NOT_FOUND)

class TestTokenView(APIView):
"""
Endpoint to delete all translations tied to a token for testing.
"""
@require_valid_token
def delete(self, request):
token = request.token
try:
translations = Translation.objects.filter(token=token)
for t in translations:
t.delete()
except Exception as e:
print(e)
return Response({'error': 'Could not delete all translations for given token.'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return Response({'message': 'Deleted all translations tied to given token.'}, status=status.HTTP_200_OK)

class ProcessTranslationsView(APIView):
"""
Expand Down Expand Up @@ -304,4 +319,32 @@ def delete(self, request):

# Throw a bad request if the translation doesn't exist
except Translation.DoesNotExist:
return Response({"error": "translation doesn't exist!"}, status=status.HTTP_404_NOT_FOUND)
return Response({"error": "translation doesn't exist!"}, status=status.HTTP_404_NOT_FOUND)

class PullTranslations(APIView):
"""
Pulls all translations for a given token.
"""
@require_valid_token
def get(self, request):
token = request.token

try:
translations = Translation.objects.filter(token=token)

# Consolidate all translations into single dictionary following
# the format of local translation files to overwrite files easily.
response_data = {}
for translation in translations:
language = translation.language.lower()
original_word = translation.original_word
translated_word = translation.translated_word

if language not in response_data:
response_data[language] = {}
response_data[language][original_word] = translated_word
except Exception as e:
print(e)
return Response({"error": "could not fetch translations"}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)

return Response(response_data, status=status.HTTP_200_OK)
39 changes: 37 additions & 2 deletions i18nilize/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,3 +1,38 @@
[build-system]
requires = ['setuptools>=42']
build-backend = 'setuptools.build_meta'
requires = ["setuptools >= 42", "wheel"]
build-backend = "setuptools.build_meta"

[project]
name = "localization"
version = "1.0.0"
authors = [
{ name = "UBC Launchpad", email = "[email protected]" }
]
description = "A localization package for microservices"
readme = "readme.md"
license = { file = "LICENSE.txt" }
keywords = ["localization", "microservices"]
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent"
]
dependencies = [
"requests>=2.25.1",
"geocoder>=1.38.1",
"geopy>=2.2.0",
"Babel>=2.9.1",
"dirsync>=2.2.5",
]

[project.scripts]
i18nilize = "src.internationalize.command_line:cli"

[tool.setuptools]
packages = ["src"]

[tool.setuptools.package-dir]
src = "src"

# [tool.setuptools.packages.find]
# where = ["src"]
File renamed without changes.
2 changes: 1 addition & 1 deletion i18nilize/src/internationalize/api_helpers.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# api_helpers.py

import requests
from . import globals
from . import globals
import sys

def create_token():
Expand Down
25 changes: 23 additions & 2 deletions i18nilize/src/internationalize/command_line.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
#from src.internationalize.helpers import add_language
import json
import argparse
from i18nilize.src.internationalize.helpers import add_language, add_update_translated_word, delete_translation
from src.internationalize.helpers import add_language, add_update_translated_word, delete_translation
from src.internationalize.sync_processor import pull_translations, push_translations
from src.internationalize.diffing_processor import DiffingProcessor
from src.internationalize import globals

def cli():
# initialize the parser
Expand Down Expand Up @@ -30,6 +33,15 @@ def cli():
delete_parser.add_argument('original_word')
delete_parser.add_argument('translated_word')

# sub parser for pull
pull_parser = subparsers.add_parser('pull')

# sub parser for push
push_parser = subparsers.add_parser('push')

# sub parser for setup
setup_parser = subparsers.add_parser('setup')

# the subparser is used because different CLIs use a different amount of inputs

args = parser.parse_args()
Expand All @@ -41,7 +53,16 @@ def cli():
add_update_translated_word(args.language, args.original_word, args.translated_word)
elif args.command == 'delete':
delete_translation(args.language, args.original_word, args.translated_word)
elif args.command == 'pull':
pull_translations()
elif args.command == 'push':
push_translations()
elif args.command == 'setup':
# Quick fix for now
dp = DiffingProcessor(globals.LANGUAGES_DIR)
dp.setup()
else:
print("Invalid command")

cli()
if __name__ == "__main__":
cli()
3 changes: 3 additions & 0 deletions i18nilize/src/internationalize/default_languages/chinese.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"thank you": "\u8c22\u8c22"
}
4 changes: 4 additions & 0 deletions i18nilize/src/internationalize/default_languages/french.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"thanks": "merci",
"hello": "bonjour"
}
3 changes: 3 additions & 0 deletions i18nilize/src/internationalize/default_languages/german.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"thank you": "danke"
}
3 changes: 3 additions & 0 deletions i18nilize/src/internationalize/default_languages/korean.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"welcome": "\ud658\uc601\ud569\ub2c8\ub2e4"
}
4 changes: 4 additions & 0 deletions i18nilize/src/internationalize/default_languages/spanish.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"hello": "hola",
"thanks": "gracias"
}
12 changes: 9 additions & 3 deletions i18nilize/src/internationalize/diffing_processor.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import hashlib
import json
import logging
from dirsync import sync
from src.internationalize.helpers import compute_hash, compute_hashes, read_json_file

Expand All @@ -16,6 +17,7 @@
"""
class DiffingProcessor():
def __init__(self, curr_translations_dir):
logging.getLogger('dirsync').disabled = True
self.diff_state_root_dir = "diff_state"
self.diff_state_files_dir = os.path.join(self.diff_state_root_dir, "translations")
self.metadata_file_dir = os.path.join(self.diff_state_root_dir, "metadata.json")
Expand All @@ -26,8 +28,10 @@ def __init__(self, curr_translations_dir):
"""
def setup(self):
try:
os.mkdir(self.diff_state_root_dir)
os.mkdir(self.diff_state_files_dir)
if not os.path.exists(self.diff_state_root_dir):
os.mkdir(self.diff_state_root_dir)
if not os.path.exists(self.diff_state_files_dir):
os.mkdir(self.diff_state_files_dir)
with open(self.metadata_file_dir, "w") as outfile:
json.dump({}, outfile)

Expand All @@ -48,7 +52,9 @@ def setup(self):
"""
Updates translation files with new changes and updates hashes in metadata.
"""
def update_to_current_state(self, hash_dict):
def update_to_current_state(self, hash_dict=None):
if hash_dict == None:
hash_dict = compute_hashes(self.curr_translation_files_dir)
self.update_metadata(hash_dict)
self.sync_translations()

Expand Down
6 changes: 5 additions & 1 deletion i18nilize/src/internationalize/globals.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,17 @@
# globals.py

# Test Token: "c84234c3-b507-4ed0-a6eb-8b10116cdef1"

class GlobalToken:
def __init__(self):
self.value = "dummy"
self.value = "dummy"

API_BASE_URL = "http://localhost:8000/api/"

TOKEN_ENDPOINT = f"{API_BASE_URL}token/"
TRANSLATIONS_ENDPOINT = f"{API_BASE_URL}translations/"
PULL_TRANSLATIONS_ENDPOINT = f"{TRANSLATIONS_ENDPOINT}pull/"
PUSH_TRANSLATIONS_ENDPOINT = f"{TRANSLATIONS_ENDPOINT}push/"

LANGUAGES_DIR = 'src/internationalize/languages'

Expand Down
2 changes: 1 addition & 1 deletion i18nilize/src/internationalize/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
def get_json(file_path):
try:
# open file and parse
with open(file_path, 'r') as file:
with open(file_path, 'r', encoding='utf8') as file:
data = json.load(file)
except FileNotFoundError:
print("File not found")
Expand Down
67 changes: 67 additions & 0 deletions i18nilize/src/internationalize/sync_processor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import requests, os, json
from . import globals
from src.internationalize.diffing_processor import DiffingProcessor

"""
Pulls all translations assigned to the microservices' token
and overwrites all language files to sync translations.
"""
def pull_translations(write_directory=globals.LANGUAGES_DIR):
token = globals.token.value
diff_processor = DiffingProcessor(write_directory)

try:
all_translations = requests.get(globals.PULL_TRANSLATIONS_ENDPOINT, headers={'Token': token})
except Exception as e:
print("Error: Could not fetch translations from database.", e)

# Overwrite all translation files
all_transactions_dict = all_translations.json()
for language, translations in all_transactions_dict.items():
file_name = f"{language}.json"
curr_file_path = os.path.join(write_directory, file_name)
with open(curr_file_path, "w+") as file:
json.dump(translations, file, indent=4)

diff_processor.update_to_current_state()
print(f"Pulled all translations from the database.")

"""
Push all local translations to the API.
"""
def push_translations(translations_dir=globals.LANGUAGES_DIR):
token = globals.token.value
diff_processor = DiffingProcessor(translations_dir)
changed_translations = diff_processor.get_changed_translations()

for language in changed_translations:
created = changed_translations[language]["created"]
modified = changed_translations[language]["modified"]
deleted = changed_translations[language]["deleted"]

# Post a new entry for each new translation
for original_word in created:
try:
response = requests.post(globals.PUSH_TRANSLATIONS_ENDPOINT, headers={'Token': token},
params={'language': language, original_word: created[original_word]})
except Exception as e:
print("Error: Could not create translation.", e)

# Patch the appropriate entry for each modified translation
for original_word in modified:
try:
response = requests.patch(globals.PUSH_TRANSLATIONS_ENDPOINT, headers={'Token': token},
params={'language': language, original_word: modified[original_word]})
except Exception as e:
print("Error: Could not patch translation.", e)

# Delete the appropriate entry for each deleted translation
for original_word in deleted:
try:
response = requests.delete(globals.PUSH_TRANSLATIONS_ENDPOINT, headers={'Token': token},
params={'language': language, original_word: deleted[original_word]})
except Exception as e:
print("Error: Could not delete translation.", e)

diff_processor.update_to_current_state()
print(f"Pushed all translations from the database.")
Loading
Loading