mirror of
https://github.com/google/blockly.git
synced 2026-01-07 00:50:27 +01:00
Merge pull request #2123 from gomercin/python3_upgrade
Make build.py compatible with Python 2 and 3
This commit is contained in:
42
build.py
42
build.py
@@ -53,9 +53,6 @@
|
||||
# msg/js/<LANG>.js for every language <LANG> defined in msg/js/<LANG>.json.
|
||||
|
||||
import sys
|
||||
if sys.version_info[0] != 2:
|
||||
raise Exception("Blockly build only compatible with Python 2.x.\n"
|
||||
"You are using: " + sys.version)
|
||||
|
||||
for arg in sys.argv[1:len(sys.argv)]:
|
||||
if (arg != 'core' and
|
||||
@@ -65,8 +62,15 @@ for arg in sys.argv[1:len(sys.argv)]:
|
||||
raise Exception("Invalid argument: \"" + arg + "\". Usage: build.py "
|
||||
"<0 or more of accessible, core, generators, langfiles>")
|
||||
|
||||
import errno, glob, httplib, json, os, re, subprocess, threading, urllib
|
||||
import errno, glob, json, os, re, subprocess, threading, codecs
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
import httplib
|
||||
from urllib import urlencode
|
||||
else:
|
||||
import http.client as httplib
|
||||
from urllib.parse import urlencode
|
||||
from importlib import reload
|
||||
|
||||
def import_path(fullpath):
|
||||
"""Import a file with full path specification.
|
||||
@@ -234,8 +238,8 @@ class Gen_compressed(threading.Thread):
|
||||
# Filter out the Closure files (the compiler will add them).
|
||||
if filename.startswith(os.pardir + os.sep): # '../'
|
||||
continue
|
||||
f = open(filename)
|
||||
params.append(("js_code", "".join(f.readlines())))
|
||||
f = codecs.open(filename, encoding="utf-8")
|
||||
params.append(("js_code", "".join(f.readlines()).encode("utf-8")))
|
||||
f.close()
|
||||
|
||||
self.do_compile(params, target_filename, filenames, "")
|
||||
@@ -263,8 +267,8 @@ class Gen_compressed(threading.Thread):
|
||||
# Filter out the Closure files (the compiler will add them).
|
||||
if filename.startswith(os.pardir + os.sep): # '../'
|
||||
continue
|
||||
f = open(filename)
|
||||
params.append(("js_code", "".join(f.readlines())))
|
||||
f = codecs.open(filename, encoding="utf-8")
|
||||
params.append(("js_code", "".join(f.readlines()).encode("utf-8")))
|
||||
f.close()
|
||||
|
||||
self.do_compile(params, target_filename, filenames, "")
|
||||
@@ -288,8 +292,8 @@ class Gen_compressed(threading.Thread):
|
||||
filenames = glob.glob(os.path.join("blocks", "*.js"))
|
||||
filenames.sort() # Deterministic build.
|
||||
for filename in filenames:
|
||||
f = open(filename)
|
||||
params.append(("js_code", "".join(f.readlines())))
|
||||
f = codecs.open(filename, encoding="utf-8")
|
||||
params.append(("js_code", "".join(f.readlines()).encode("utf-8")))
|
||||
f.close()
|
||||
|
||||
# Remove Blockly.Blocks to be compatible with Blockly.
|
||||
@@ -317,8 +321,8 @@ class Gen_compressed(threading.Thread):
|
||||
filenames.sort() # Deterministic build.
|
||||
filenames.insert(0, os.path.join("generators", language + ".js"))
|
||||
for filename in filenames:
|
||||
f = open(filename)
|
||||
params.append(("js_code", "".join(f.readlines())))
|
||||
f = codecs.open(filename, encoding="utf-8")
|
||||
params.append(("js_code", "".join(f.readlines()).encode("utf-8")))
|
||||
f.close()
|
||||
filenames.insert(0, "[goog.provide]")
|
||||
|
||||
@@ -330,7 +334,7 @@ class Gen_compressed(threading.Thread):
|
||||
# Send the request to Google.
|
||||
headers = {"Content-type": "application/x-www-form-urlencoded"}
|
||||
conn = httplib.HTTPSConnection("closure-compiler.appspot.com")
|
||||
conn.request("POST", "/compile", urllib.urlencode(params), headers)
|
||||
conn.request("POST", "/compile", urlencode(params), headers)
|
||||
response = conn.getresponse()
|
||||
json_str = response.read()
|
||||
conn.close()
|
||||
@@ -349,12 +353,12 @@ class Gen_compressed(threading.Thread):
|
||||
n = int(name[6:]) - 1
|
||||
return filenames[n]
|
||||
|
||||
if json_data.has_key("serverErrors"):
|
||||
if "serverErrors" in json_data:
|
||||
errors = json_data["serverErrors"]
|
||||
for error in errors:
|
||||
print("SERVER ERROR: %s" % target_filename)
|
||||
print(error["error"])
|
||||
elif json_data.has_key("errors"):
|
||||
elif "errors" in json_data:
|
||||
errors = json_data["errors"]
|
||||
for error in errors:
|
||||
print("FATAL ERROR")
|
||||
@@ -366,7 +370,7 @@ class Gen_compressed(threading.Thread):
|
||||
print((" " * error["charno"]) + "^")
|
||||
sys.exit(1)
|
||||
else:
|
||||
if json_data.has_key("warnings"):
|
||||
if "warnings" in json_data:
|
||||
warnings = json_data["warnings"]
|
||||
for warning in warnings:
|
||||
print("WARNING")
|
||||
@@ -378,7 +382,7 @@ class Gen_compressed(threading.Thread):
|
||||
print((" " * warning["charno"]) + "^")
|
||||
print()
|
||||
|
||||
if not json_data.has_key("compiledCode"):
|
||||
if not "compiledCode" in json_data:
|
||||
print("FATAL ERROR: Compiler did not return compiledCode.")
|
||||
sys.exit(1)
|
||||
|
||||
@@ -530,10 +534,10 @@ developers.google.com/blockly/guides/modify/web/closure""")
|
||||
|
||||
core_search_paths = calcdeps.ExpandDirectories(
|
||||
["core", os.path.join(os.path.pardir, "closure-library")])
|
||||
core_search_paths.sort() # Deterministic build.
|
||||
core_search_paths = sorted(core_search_paths) # Deterministic build.
|
||||
full_search_paths = calcdeps.ExpandDirectories(
|
||||
["accessible", "core", os.path.join(os.path.pardir, "closure-library")])
|
||||
full_search_paths.sort() # Deterministic build.
|
||||
full_search_paths = sorted(full_search_paths) # Deterministic build.
|
||||
|
||||
if (len(sys.argv) == 1):
|
||||
args = ['core', 'accessible', 'generators', 'defaultlangfiles']
|
||||
|
||||
@@ -59,7 +59,7 @@ def read_json_file(filename):
|
||||
if '@metadata' in defs:
|
||||
del defs['@metadata']
|
||||
return defs
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
print('Error reading ' + filename)
|
||||
raise InputError(filename, str(e))
|
||||
|
||||
@@ -85,7 +85,7 @@ def _create_qqq_file(output_dir):
|
||||
"""
|
||||
qqq_file_name = os.path.join(os.curdir, output_dir, 'qqq.json')
|
||||
qqq_file = codecs.open(qqq_file_name, 'w', 'utf-8')
|
||||
print 'Created file: ' + qqq_file_name
|
||||
print('Created file: ' + qqq_file_name)
|
||||
qqq_file.write('{\n')
|
||||
return qqq_file
|
||||
|
||||
@@ -126,7 +126,7 @@ def _create_lang_file(author, lang, output_dir):
|
||||
"""
|
||||
lang_file_name = os.path.join(os.curdir, output_dir, lang + '.json')
|
||||
lang_file = codecs.open(lang_file_name, 'w', 'utf-8')
|
||||
print 'Created file: ' + lang_file_name
|
||||
print('Created file: ' + lang_file_name)
|
||||
# string.format doesn't like printing braces, so break up our writes.
|
||||
lang_file.write('{\n\t"@metadata": {')
|
||||
lang_file.write("""
|
||||
@@ -166,7 +166,7 @@ def _create_key_file(output_dir):
|
||||
key_file_name = os.path.join(os.curdir, output_dir, 'keys.json')
|
||||
key_file = open(key_file_name, 'w')
|
||||
key_file.write('{\n')
|
||||
print 'Created file: ' + key_file_name
|
||||
print('Created file: ' + key_file_name)
|
||||
return key_file
|
||||
|
||||
|
||||
|
||||
@@ -30,9 +30,9 @@ _NEWLINE_PATTERN = re.compile('[\n\r]')
|
||||
|
||||
def string_is_ascii(s):
|
||||
try:
|
||||
s.decode('ascii')
|
||||
return True
|
||||
except UnicodeEncodeError:
|
||||
# This approach is better for compatibility
|
||||
return all(ord(c) < 128 for c in s)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
def load_constants(filename):
|
||||
@@ -82,14 +82,15 @@ def main():
|
||||
print('ERROR: definition of {0} in {1} contained a newline character.'.
|
||||
format(key, args.source_lang_file))
|
||||
sys.exit(1)
|
||||
sorted_keys = source_defs.keys()
|
||||
sorted_keys.sort()
|
||||
sorted_keys = sorted(source_defs.keys())
|
||||
|
||||
# Read in synonyms file, which must be output in every language.
|
||||
synonym_defs = read_json_file(os.path.join(
|
||||
os.curdir, args.source_synonym_file))
|
||||
|
||||
# synonym_defs is also being sorted to ensure the same order is kept
|
||||
synonym_text = '\n'.join([u'Blockly.Msg["{0}"] = Blockly.Msg["{1}"];'
|
||||
.format(key, synonym_defs[key]) for key in synonym_defs])
|
||||
.format(key, synonym_defs[key]) for key in sorted(synonym_defs)])
|
||||
|
||||
# Read in constants file, which must be output in every language.
|
||||
constants_text = load_constants(os.path.join(os.curdir, args.source_constants_file))
|
||||
|
||||
Reference in New Issue
Block a user