2016-04-21 08:48:33 +02:00
|
|
|
"""
|
|
|
|
The contents of this file are taken from
|
2017-11-10 04:33:28 +01:00
|
|
|
https://github.com/niwinz/django-jinja/blob/master/django_jinja/management/commands/makemessages.py
|
2016-04-21 08:48:33 +02:00
|
|
|
|
|
|
|
Jinja2's i18n functionality is not exactly the same as Django's.
|
|
|
|
In particular, the tags names and their syntax are different:
|
|
|
|
|
|
|
|
1. The Django ``trans`` tag is replaced by a _() global.
|
|
|
|
2. The Django ``blocktrans`` tag is called ``trans``.
|
|
|
|
|
|
|
|
(1) isn't an issue, since the whole ``makemessages`` process is based on
|
|
|
|
converting the template tags to ``_()`` calls. However, (2) means that
|
|
|
|
those Jinja2 ``trans`` tags will not be picked up by Django's
|
|
|
|
``makemessages`` command.
|
|
|
|
|
|
|
|
There aren't any nice solutions here. While Jinja2's i18n extension does
|
|
|
|
come with extraction capabilities built in, the code behind ``makemessages``
|
|
|
|
unfortunately isn't extensible, so we can:
|
|
|
|
|
|
|
|
* Duplicate the command + code behind it.
|
|
|
|
* Offer a separate command for Jinja2 extraction.
|
|
|
|
* Try to get Django to offer hooks into makemessages().
|
|
|
|
* Monkey-patch.
|
|
|
|
|
|
|
|
We are currently doing that last thing. It turns out there we are lucky
|
|
|
|
for once: It's simply a matter of extending two regular expressions.
|
|
|
|
Credit for the approach goes to:
|
2020-03-27 01:32:21 +01:00
|
|
|
https://stackoverflow.com/questions/2090717
|
2016-04-21 08:48:33 +02:00
|
|
|
|
|
|
|
"""
|
2016-05-13 12:44:03 +02:00
|
|
|
import glob
|
|
|
|
import json
|
2020-05-09 02:01:05 +02:00
|
|
|
import itertools
|
2017-11-16 00:43:27 +01:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
from argparse import ArgumentParser
|
2018-05-10 19:30:04 +02:00
|
|
|
from typing import Any, Dict, Iterable, List, Mapping
|
2016-05-13 12:44:03 +02:00
|
|
|
|
2017-11-16 00:43:27 +01:00
|
|
|
from django.core.management.commands import makemessages
|
|
|
|
from django.template.base import BLOCK_TAG_END, BLOCK_TAG_START
|
2017-07-10 07:17:42 +02:00
|
|
|
from django.utils.translation import template
|
2016-04-21 08:48:33 +02:00
|
|
|
|
2017-11-09 11:45:56 +01:00
|
|
|
strip_whitespace_right = re.compile("(%s-?\\s*(trans|pluralize).*?-%s)\\s+" % (
|
|
|
|
BLOCK_TAG_START, BLOCK_TAG_END), re.U)
|
2017-11-02 09:22:26 +01:00
|
|
|
strip_whitespace_left = re.compile("\\s+(%s-\\s*(endtrans|pluralize).*?-?%s)" % (
|
2016-08-13 16:55:28 +02:00
|
|
|
BLOCK_TAG_START, BLOCK_TAG_END), re.U)
|
2016-04-21 08:48:33 +02:00
|
|
|
|
2018-07-02 00:05:24 +02:00
|
|
|
regexes = [r'{{#tr .*?}}([\s\S]*?){{/tr}}', # '.' doesn't match '\n' by default
|
|
|
|
r'{{\s*t "(.*?)"\W*}}',
|
|
|
|
r"{{\s*t '(.*?)'\W*}}",
|
2020-04-16 09:05:56 +02:00
|
|
|
r'=\(t "(.*?)"\)(?=[^{]*}})',
|
|
|
|
r"=\(t '(.*?)'\)(?=[^{]*}})",
|
2018-07-02 00:05:24 +02:00
|
|
|
r"i18n\.t\('([^']*?)'\)",
|
|
|
|
r"i18n\.t\('(.*?)',\s*.*?[^,]\)",
|
|
|
|
r'i18n\.t\("([^"]*?)"\)',
|
|
|
|
r'i18n\.t\("(.*?)",\s*.*?[^,]\)',
|
2016-05-13 12:44:03 +02:00
|
|
|
]
|
2017-08-11 16:35:16 +02:00
|
|
|
tags = [('err_', "error"),
|
|
|
|
]
|
2016-05-13 12:44:03 +02:00
|
|
|
|
|
|
|
frontend_compiled_regexes = [re.compile(regex) for regex in regexes]
|
2018-07-02 00:05:24 +02:00
|
|
|
multiline_js_comment = re.compile(r"/\*.*?\*/", re.DOTALL)
|
2016-10-10 09:31:00 +02:00
|
|
|
singleline_js_comment = re.compile("//.*?\n")
|
2016-05-13 12:44:03 +02:00
|
|
|
|
2018-05-10 19:30:04 +02:00
|
|
|
def strip_whitespaces(src: str) -> str:
|
2017-11-02 09:22:26 +01:00
|
|
|
src = strip_whitespace_left.sub('\\1', src)
|
|
|
|
src = strip_whitespace_right.sub('\\1', src)
|
2016-04-21 08:48:33 +02:00
|
|
|
return src
|
|
|
|
|
|
|
|
class Command(makemessages.Command):
|
|
|
|
|
2017-08-11 16:35:16 +02:00
|
|
|
xgettext_options = makemessages.Command.xgettext_options
|
|
|
|
for func, tag in tags:
|
2020-06-09 00:25:09 +02:00
|
|
|
xgettext_options += [f'--keyword={func}:1,"{tag}"']
|
2017-08-11 16:35:16 +02:00
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def add_arguments(self, parser: ArgumentParser) -> None:
|
2020-04-09 21:51:58 +02:00
|
|
|
super().add_arguments(parser)
|
2016-05-13 12:44:03 +02:00
|
|
|
parser.add_argument('--frontend-source', type=str,
|
|
|
|
default='static/templates',
|
|
|
|
help='Name of the Handlebars template directory')
|
|
|
|
parser.add_argument('--frontend-output', type=str,
|
2019-07-02 22:38:09 +02:00
|
|
|
default='locale',
|
2016-05-13 12:44:03 +02:00
|
|
|
help='Name of the frontend messages output directory')
|
|
|
|
parser.add_argument('--frontend-namespace', type=str,
|
|
|
|
default='translations.json',
|
|
|
|
help='Namespace of the frontend locale file')
|
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def handle(self, *args: Any, **options: Any) -> None:
|
2016-05-13 12:44:03 +02:00
|
|
|
self.handle_django_locales(*args, **options)
|
2018-02-13 20:38:25 +01:00
|
|
|
self.handle_frontend_locales(**options)
|
|
|
|
|
|
|
|
def handle_frontend_locales(self, *,
|
|
|
|
frontend_source: str,
|
|
|
|
frontend_output: str,
|
|
|
|
frontend_namespace: str,
|
|
|
|
locale: List[str],
|
|
|
|
exclude: List[str],
|
|
|
|
all: bool,
|
|
|
|
**options: Any) -> None:
|
|
|
|
self.frontend_source = frontend_source
|
|
|
|
self.frontend_output = frontend_output
|
|
|
|
self.frontend_namespace = frontend_namespace
|
|
|
|
self.frontend_locale = locale
|
|
|
|
self.frontend_exclude = exclude
|
|
|
|
self.frontend_all = all
|
2016-05-13 12:44:03 +02:00
|
|
|
|
|
|
|
translation_strings = self.get_translation_strings()
|
|
|
|
self.write_translation_strings(translation_strings)
|
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def handle_django_locales(self, *args: Any, **options: Any) -> None:
|
2017-07-10 07:17:42 +02:00
|
|
|
old_endblock_re = template.endblock_re
|
|
|
|
old_block_re = template.block_re
|
|
|
|
old_constant_re = template.constant_re
|
|
|
|
|
|
|
|
old_templatize = template.templatize
|
2016-04-21 08:48:33 +02:00
|
|
|
# Extend the regular expressions that are used to detect
|
|
|
|
# translation blocks with an "OR jinja-syntax" clause.
|
2017-07-10 07:17:42 +02:00
|
|
|
template.endblock_re = re.compile(
|
|
|
|
template.endblock_re.pattern + '|' + r"""^-?\s*endtrans\s*-?$""")
|
|
|
|
template.block_re = re.compile(
|
|
|
|
template.block_re.pattern + '|' + r"""^-?\s*trans(?:\s+(?!'|")(?=.*?=.*?)|\s*-?$)""")
|
|
|
|
template.plural_re = re.compile(
|
|
|
|
template.plural_re.pattern + '|' + r"""^-?\s*pluralize(?:\s+.+|-?$)""")
|
|
|
|
template.constant_re = re.compile(r"""_\(((?:".*?")|(?:'.*?')).*\)""")
|
2017-06-02 08:02:34 +02:00
|
|
|
|
2018-05-10 19:30:04 +02:00
|
|
|
def my_templatize(src: str, *args: Any, **kwargs: Any) -> str:
|
2016-04-21 08:48:33 +02:00
|
|
|
new_src = strip_whitespaces(src)
|
2017-06-02 08:02:34 +02:00
|
|
|
return old_templatize(new_src, *args, **kwargs)
|
2016-04-21 08:48:33 +02:00
|
|
|
|
2017-07-10 07:17:42 +02:00
|
|
|
template.templatize = my_templatize
|
2016-04-21 08:48:33 +02:00
|
|
|
|
|
|
|
try:
|
2016-10-10 10:53:02 +02:00
|
|
|
ignore_patterns = options.get('ignore_patterns', [])
|
|
|
|
ignore_patterns.append('docs/*')
|
2018-03-24 06:48:40 +01:00
|
|
|
ignore_patterns.append('var/*')
|
2016-10-10 10:53:02 +02:00
|
|
|
options['ignore_patterns'] = ignore_patterns
|
2017-10-27 08:28:23 +02:00
|
|
|
super().handle(*args, **options)
|
2016-04-21 08:48:33 +02:00
|
|
|
finally:
|
2017-07-10 07:17:42 +02:00
|
|
|
template.endblock_re = old_endblock_re
|
|
|
|
template.block_re = old_block_re
|
|
|
|
template.templatize = old_templatize
|
|
|
|
template.constant_re = old_constant_re
|
2016-05-13 12:44:03 +02:00
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def extract_strings(self, data: str) -> List[str]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
translation_strings: List[str] = []
|
2016-05-13 12:44:03 +02:00
|
|
|
for regex in frontend_compiled_regexes:
|
|
|
|
for match in regex.findall(data):
|
2017-03-27 23:25:43 +02:00
|
|
|
match = match.strip()
|
|
|
|
match = ' '.join(line.strip() for line in match.splitlines())
|
|
|
|
match = match.replace('\n', '\\n')
|
2017-10-05 09:46:56 +02:00
|
|
|
translation_strings.append(match)
|
2016-05-13 12:44:03 +02:00
|
|
|
|
|
|
|
return translation_strings
|
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def ignore_javascript_comments(self, data: str) -> str:
|
2016-10-10 09:31:00 +02:00
|
|
|
# Removes multi line comments.
|
2017-03-30 19:50:38 +02:00
|
|
|
data = multiline_js_comment.sub('', data)
|
2016-10-10 09:31:00 +02:00
|
|
|
# Removes single line (//) comments.
|
2017-03-30 19:50:38 +02:00
|
|
|
data = singleline_js_comment.sub('', data)
|
2016-10-10 09:31:00 +02:00
|
|
|
return data
|
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def get_translation_strings(self) -> List[str]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
translation_strings: List[str] = []
|
2016-05-13 12:44:03 +02:00
|
|
|
dirname = self.get_template_dir()
|
|
|
|
|
2016-10-05 01:36:26 +02:00
|
|
|
for dirpath, dirnames, filenames in os.walk(dirname):
|
2019-07-12 00:52:56 +02:00
|
|
|
for filename in [f for f in filenames if f.endswith(".hbs")]:
|
2017-04-05 09:23:35 +02:00
|
|
|
if filename.startswith('.'):
|
|
|
|
continue
|
2020-04-09 21:51:58 +02:00
|
|
|
with open(os.path.join(dirpath, filename)) as reader:
|
2016-05-13 12:44:03 +02:00
|
|
|
data = reader.read()
|
2017-10-05 09:46:56 +02:00
|
|
|
translation_strings.extend(self.extract_strings(data))
|
2020-05-09 02:01:05 +02:00
|
|
|
for dirpath, dirnames, filenames in itertools.chain(os.walk("static/js"),
|
|
|
|
os.walk("static/shared/js")):
|
|
|
|
for filename in [f for f in filenames if f.endswith(".js") or f.endswith(".ts")]:
|
|
|
|
if filename.startswith('.'):
|
|
|
|
continue
|
|
|
|
with open(os.path.join(dirpath, filename)) as reader:
|
2016-06-13 09:52:10 +02:00
|
|
|
data = reader.read()
|
2017-08-28 21:13:49 +02:00
|
|
|
data = self.ignore_javascript_comments(data)
|
2017-10-05 09:46:56 +02:00
|
|
|
translation_strings.extend(self.extract_strings(data))
|
2016-06-13 09:52:10 +02:00
|
|
|
|
2017-10-05 09:46:56 +02:00
|
|
|
return list(set(translation_strings))
|
2016-05-13 12:44:03 +02:00
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def get_template_dir(self) -> str:
|
2016-05-13 12:44:03 +02:00
|
|
|
return self.frontend_source
|
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def get_namespace(self) -> str:
|
2016-05-13 12:44:03 +02:00
|
|
|
return self.frontend_namespace
|
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def get_locales(self) -> Iterable[str]:
|
2016-05-13 12:44:03 +02:00
|
|
|
locale = self.frontend_locale
|
|
|
|
exclude = self.frontend_exclude
|
|
|
|
process_all = self.frontend_all
|
|
|
|
|
2019-04-20 01:00:46 +02:00
|
|
|
paths = glob.glob('%s/*' % (self.default_locale_path,),)
|
2016-05-23 16:34:48 +02:00
|
|
|
all_locales = [os.path.basename(path) for path in paths if os.path.isdir(path)]
|
2016-05-13 12:44:03 +02:00
|
|
|
|
|
|
|
# Account for excluded locales
|
|
|
|
if process_all:
|
2016-05-23 16:34:48 +02:00
|
|
|
return all_locales
|
2016-05-13 12:44:03 +02:00
|
|
|
else:
|
|
|
|
locales = locale or all_locales
|
2016-05-23 16:34:48 +02:00
|
|
|
return set(locales) - set(exclude)
|
2016-05-13 12:44:03 +02:00
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def get_base_path(self) -> str:
|
2016-05-13 12:44:03 +02:00
|
|
|
return self.frontend_output
|
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def get_output_paths(self) -> Iterable[str]:
|
2016-05-13 12:44:03 +02:00
|
|
|
base_path = self.get_base_path()
|
|
|
|
locales = self.get_locales()
|
|
|
|
for path in [os.path.join(base_path, locale) for locale in locales]:
|
|
|
|
if not os.path.exists(path):
|
|
|
|
os.makedirs(path)
|
|
|
|
|
|
|
|
yield os.path.join(path, self.get_namespace())
|
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def get_new_strings(self, old_strings: Mapping[str, str],
|
|
|
|
translation_strings: List[str], locale: str) -> Dict[str, str]:
|
2016-05-13 12:44:03 +02:00
|
|
|
"""
|
|
|
|
Missing strings are removed, new strings are added and already
|
|
|
|
translated strings are not touched.
|
|
|
|
"""
|
2017-05-31 23:44:59 +02:00
|
|
|
new_strings = {} # Dict[str, str]
|
2016-05-13 12:44:03 +02:00
|
|
|
for k in translation_strings:
|
2016-06-09 12:17:45 +02:00
|
|
|
k = k.replace('\\n', '\n')
|
2017-10-04 10:09:24 +02:00
|
|
|
if locale == 'en':
|
|
|
|
# For English language, translation is equal to the key.
|
|
|
|
new_strings[k] = old_strings.get(k, k)
|
|
|
|
else:
|
|
|
|
new_strings[k] = old_strings.get(k, "")
|
2016-05-13 12:44:03 +02:00
|
|
|
|
2016-07-08 12:06:08 +02:00
|
|
|
plurals = {k: v for k, v in old_strings.items() if k.endswith('_plural')}
|
|
|
|
for plural_key, value in plurals.items():
|
|
|
|
components = plural_key.split('_')
|
|
|
|
singular_key = '_'.join(components[:-1])
|
|
|
|
if singular_key in new_strings:
|
|
|
|
new_strings[plural_key] = value
|
|
|
|
|
2016-05-13 12:44:03 +02:00
|
|
|
return new_strings
|
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def write_translation_strings(self, translation_strings: List[str]) -> None:
|
2016-05-13 12:44:03 +02:00
|
|
|
for locale, output_path in zip(self.get_locales(), self.get_output_paths()):
|
2020-06-09 00:25:09 +02:00
|
|
|
self.stdout.write(f"[frontend] processing locale {locale}")
|
2016-05-13 12:44:03 +02:00
|
|
|
try:
|
2020-04-09 21:51:58 +02:00
|
|
|
with open(output_path) as reader:
|
2016-05-13 12:44:03 +02:00
|
|
|
old_strings = json.load(reader)
|
2020-04-09 21:51:58 +02:00
|
|
|
except (OSError, ValueError):
|
2016-05-13 12:44:03 +02:00
|
|
|
old_strings = {}
|
|
|
|
|
2017-04-10 11:19:43 +02:00
|
|
|
new_strings = {
|
2018-11-27 20:24:23 +01:00
|
|
|
k: v
|
2017-04-10 11:19:43 +02:00
|
|
|
for k, v in self.get_new_strings(old_strings,
|
2017-10-04 10:09:24 +02:00
|
|
|
translation_strings,
|
|
|
|
locale).items()
|
2017-04-10 11:19:43 +02:00
|
|
|
}
|
2016-05-13 12:44:03 +02:00
|
|
|
with open(output_path, 'w') as writer:
|
2017-03-03 08:14:32 +01:00
|
|
|
json.dump(new_strings, writer, indent=2, sort_keys=True)
|