Delete two unused i18n scripts

These date back to before Blockly Games was separated from Blockly.
This commit is contained in:
Neil Fraser
2019-04-25 16:42:53 -07:00
committed by Neil Fraser
parent de6bf6e6ab
commit c438188ab1
4 changed files with 2 additions and 419 deletions

View File

@@ -37,7 +37,7 @@ def string_is_ascii(s):
def load_constants(filename):
"""Read in constants file, which must be output in every language."""
constant_defs = read_json_file(filename);
constant_defs = read_json_file(filename)
constants_text = '\n'
for key in constant_defs:
value = constant_defs[key]

View File

@@ -17,7 +17,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""Extracts messages from .js files into .json files for translation.
"""Extracts messages from messages.js file into .json files for translation.
Specifically, lines with the following formats are extracted:

View File

@@ -1,185 +0,0 @@
#!/usr/bin/python
# Converts .json files into .js files for use within Blockly apps.
#
# Copyright 2013 Google Inc.
# https://developers.google.com/blockly/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import codecs # for codecs.open(..., 'utf-8')
import glob
import json # for json.load()
import os # for os.path()
import subprocess # for subprocess.check_call()
from common import InputError
from common import read_json_file
# Store parsed command-line arguments in global variable.
args = None
def _create_xlf(target_lang):
"""Creates a <target_lang>.xlf file for Soy.
Args:
target_lang: The ISO 639 language code for the target language.
This is used in the name of the file and in the metadata.
Returns:
A pointer to a file to which the metadata has been written.
Raises:
IOError: An error occurred while opening or writing the file.
"""
filename = os.path.join(os.curdir, args.output_dir, target_lang + '.xlf')
out_file = codecs.open(filename, 'w', 'utf-8')
out_file.write("""<?xml version="1.0" encoding="UTF-8"?>
<xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2">
<file original="SoyMsgBundle"
datatype="x-soy-msg-bundle"
xml:space="preserve"
source-language="{0}"
target-language="{1}">
<body>""".format(args.source_lang, target_lang))
return out_file
def _close_xlf(xlf_file):
"""Closes a <target_lang>.xlf file created with create_xlf().
This includes writing the terminating XML.
Args:
xlf_file: A pointer to a file created by _create_xlf().
Raises:
IOError: An error occurred while writing to or closing the file.
"""
xlf_file.write("""
</body>
</file>
</xliff>
""")
xlf_file.close()
def _process_file(path_to_json, target_lang, key_dict):
"""Creates an .xlf file corresponding to the specified .json input file.
The name of the input file must be target_lang followed by '.json'.
The name of the output file will be target_lang followed by '.js'.
Args:
path_to_json: Path to the directory of xx.json files.
target_lang: A IETF language code (RFC 4646), such as 'es' or 'pt-br'.
key_dict: Dictionary mapping Blockly keys (e.g., Maze.turnLeft) to
Closure keys (hash numbers).
Raises:
IOError: An I/O error occurred with an input or output file.
InputError: Input JSON could not be parsed.
KeyError: Key found in input file but not in key file.
"""
keyfile = os.path.join(path_to_json, target_lang + '.json')
j = read_json_file(keyfile)
out_file = _create_xlf(target_lang)
for key in j:
if key != '@metadata':
try:
identifier = key_dict[key]
except KeyError as e:
print('Key "%s" is in %s but not in %s' %
(key, keyfile, args.key_file))
raise e
target = j.get(key)
out_file.write(u"""
<trans-unit id="{0}" datatype="html">
<target>{1}</target>
</trans-unit>""".format(identifier, target))
_close_xlf(out_file)
def main():
"""Parses arguments and iterates over files."""
# Set up argument parser.
parser = argparse.ArgumentParser(description='Convert JSON files to JS.')
parser.add_argument('--source_lang', default='en',
help='ISO 639-1 source language code')
parser.add_argument('--output_dir', default='generated',
help='relative directory for output files')
parser.add_argument('--key_file', default='json' + os.path.sep + 'keys.json',
help='relative path to input keys file')
parser.add_argument('--template', default='template.soy')
parser.add_argument('--path_to_jar',
default='..' + os.path.sep + 'apps' + os.path.sep
+ '_soy',
help='relative path from working directory to '
'SoyToJsSrcCompiler.jar')
parser.add_argument('files', nargs='+', help='input files')
# Initialize global variables.
global args
args = parser.parse_args()
# Make sure output_dir ends with slash.
if (not args.output_dir.endswith(os.path.sep)):
args.output_dir += os.path.sep
# Read in keys.json, mapping descriptions (e.g., Maze.turnLeft) to
# Closure keys (long hash numbers).
key_file = open(args.key_file)
key_dict = json.load(key_file)
key_file.close()
# Process each input file.
print('Creating .xlf files...')
processed_langs = []
if len(args.files) == 1:
# Windows does not expand globs automatically.
args.files = glob.glob(args.files[0])
for arg_file in args.files:
(path_to_json, filename) = os.path.split(arg_file)
if not filename.endswith('.json'):
raise InputError(filename, 'filenames must end with ".json"')
target_lang = filename[:filename.index('.')]
if not target_lang in ('qqq', 'keys'):
processed_langs.append(target_lang)
_process_file(path_to_json, target_lang, key_dict)
# Output command line for Closure compiler.
if processed_langs:
print('Creating .js files...')
processed_lang_list = ','.join(processed_langs)
subprocess.check_call([
'java',
'-jar', os.path.join(args.path_to_jar, 'SoyToJsSrcCompiler.jar'),
'--locales', processed_lang_list,
'--messageFilePathFormat', args.output_dir + '{LOCALE}.xlf',
'--outputPathFormat', args.output_dir + '{LOCALE}.js',
'--srcs', args.template])
if len(processed_langs) == 1:
print('Created ' + processed_lang_list + '.js in ' + args.output_dir)
else:
print('Created {' + processed_lang_list + '}.js in ' + args.output_dir)
for lang in processed_langs:
os.remove(args.output_dir + lang + '.xlf')
print('Removed .xlf files.')
if __name__ == '__main__':
main()

View File

@@ -1,232 +0,0 @@
#!/usr/bin/python
# Converts .xlf files into .json files for use at http://translatewiki.net.
#
# Copyright 2013 Google Inc.
# https://developers.google.com/blockly/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
import re
import subprocess
import sys
from xml.dom import minidom
from common import InputError
from common import write_files
# Global variables
args = None # Parsed command-line arguments.
def _parse_trans_unit(trans_unit):
"""Converts a trans-unit XML node into a more convenient dictionary format.
Args:
trans_unit: An XML representation of a .xlf translation unit.
Returns:
A dictionary with useful information about the translation unit.
The returned dictionary is guaranteed to have an entry for 'key' and
may have entries for 'source', 'target', 'description', and 'meaning'
if present in the argument.
Raises:
InputError: A required field was not present.
"""
def get_value(tag_name):
elts = trans_unit.getElementsByTagName(tag_name)
if not elts:
return None
elif len(elts) == 1:
return ''.join([child.toxml() for child in elts[0].childNodes])
else:
raise InputError('', 'Unable to extract ' + tag_name)
result = {}
key = trans_unit.getAttribute('id')
if not key:
raise InputError('', 'id attribute not found')
result['key'] = key
# Get source and target, if present.
try:
result['source'] = get_value('source')
result['target'] = get_value('target')
except InputError as e:
raise InputError(key, e.msg)
# Get notes, using the from value as key and the data as value.
notes = trans_unit.getElementsByTagName('note')
for note in notes:
from_value = note.getAttribute('from')
if from_value and len(note.childNodes) == 1:
result[from_value] = note.childNodes[0].data
else:
raise InputError(key, 'Unable to extract ' + from_value)
return result
def _process_file(filename):
"""Builds list of translation units from input file.
Each translation unit in the input file includes:
- an id (opaquely generated by Soy)
- the Blockly name for the message
- the text in the source language (generally English)
- a description for the translator
The Soy and Blockly ids are joined with a hyphen and serve as the
keys in both output files. The value is the corresponding text (in the
<lang>.json file) or the description (in the qqq.json file).
Args:
filename: The name of an .xlf file produced by Closure.
Raises:
IOError: An I/O error occurred with an input or output file.
InputError: The input file could not be parsed or lacked required
fields.
Returns:
A list of dictionaries produced by parse_trans_unit().
"""
try:
results = [] # list of dictionaries (return value)
names = [] # list of names of encountered keys (local variable)
try:
parsed_xml = minidom.parse(filename)
except IOError:
# Don't get caught by below handler
raise
except Exception as e:
print()
raise InputError(filename, str(e))
# Make sure needed fields are present and non-empty.
for trans_unit in parsed_xml.getElementsByTagName('trans-unit'):
unit = _parse_trans_unit(trans_unit)
for key in ['description', 'meaning', 'source']:
if not key in unit or not unit[key]:
raise InputError(filename + ':' + unit['key'],
key + ' not found')
if unit['description'].lower() == 'ibid':
if unit['meaning'] not in names:
# If the term has not already been described, the use of 'ibid'
# is an error.
raise InputError(
filename,
'First encountered definition of: ' + unit['meaning']
+ ' has definition: ' + unit['description']
+ '. This error can occur if the definition was not'
+ ' provided on the first appearance of the message'
+ ' or if the source (English-language) messages differ.')
else:
# If term has already been described, 'ibid' was used correctly,
# and we output nothing.
pass
else:
if unit['meaning'] in names:
raise InputError(filename,
'Second definition of: ' + unit['meaning'])
names.append(unit['meaning'])
results.append(unit)
return results
except IOError as e:
print('Error with file {0}: {1}'.format(filename, e.strerror))
sys.exit(1)
def sort_units(units, templates):
"""Sorts the translation units by their definition order in the template.
Args:
units: A list of dictionaries produced by parse_trans_unit()
that have a non-empty value for the key 'meaning'.
templates: A string containing the Soy templates in which each of
the units' meanings is defined.
Returns:
A new list of translation units, sorted by the order in which
their meaning is defined in the templates.
Raises:
InputError: If a meaning definition cannot be found in the
templates.
"""
def key_function(unit):
match = re.search(
'\\smeaning\\s*=\\s*"{0}"\\s'.format(unit['meaning']),
templates)
if match:
return match.start()
else:
raise InputError(args.templates,
'msg definition for meaning not found: ' +
unit['meaning'])
return sorted(units, key=key_function)
def main():
"""Parses arguments and processes the specified file.
Raises:
IOError: An I/O error occurred with an input or output file.
InputError: Input files lacked required fields.
"""
# Set up argument parser.
parser = argparse.ArgumentParser(description='Create translation files.')
parser.add_argument(
'--author',
default='Ellen Spertus <ellen.spertus@gmail.com>',
help='name and email address of contact for translators')
parser.add_argument('--lang', default='en',
help='ISO 639-1 source language code')
parser.add_argument('--output_dir', default='json',
help='relative directory for output files')
parser.add_argument('--xlf', help='file containing xlf definitions')
parser.add_argument('--templates', default=['template.soy'], nargs='+',
help='relative path to Soy templates, comma or space '
'separated (used for ordering messages)')
global args
args = parser.parse_args()
# Make sure output_dir ends with slash.
if (not args.output_dir.endswith(os.path.sep)):
args.output_dir += os.path.sep
# Process the input file, and sort the entries.
units = _process_file(args.xlf)
files = []
for arg in args.templates:
for filename in arg.split(','):
filename = filename.strip();
if filename:
with open(filename) as myfile:
files.append(' '.join(line.strip() for line in myfile))
sorted_units = sort_units(units, ' '.join(files))
# Write the output files.
write_files(args.author, args.lang, args.output_dir, sorted_units, True)
# Delete the input .xlf file.
os.remove(args.xlf)
print('Removed ' + args.xlf)
if __name__ == '__main__':
main()