讓 scratch-blocks/build.py
直接用 java
運行 google-closure-compiler/compiler.jar
, 一個依賴下的 closure 編譯器忧勿。優(yōu)點是離線且不太會出錯杉女,缺點是需要 java, python2.7, 和一個 wsl 環(huán)境。
修改后在 wsl 環(huán)境的 sratch-blocks
文件夾運行 npm run prepublish
就不會 報錯了鸳吸。
在 scratch-gui
文件夾下運行 npm start
也不會報錯了熏挎。VSCode 有對 WSL Bash 的集成。
import sys
if sys.version_info[0] != 2:
raise Exception("Blockly build only compatible with Python 2.x.\n"
"You are using: " + sys.version)
import errno, glob, httplib, json, os, re, subprocess, threading, urllib
CLOSURE_DIR = os.path.pardir
CLOSURE_ROOT = os.path.pardir
CLOSURE_LIBRARY = "closure-library"
CLOSURE_DIR_NPM = "node_modules"
CLOSURE_ROOT_NPM = os.path.join("node_modules")
CLOSURE_LIBRARY_NPM = "google-closure-library"
CLOSURE_COMPILER_NPM = "google-closure-compiler"
def import_path(fullpath):
path, filename = os.path.split(fullpath)
filename, ext = os.path.splitext(filename)
sys.path.append(path)
module = __import__(filename)
reload(module)
del sys.path[-1]
return module
def read(filename):
f = open(filename)
content = "".join(f.readlines())
f.close()
return content
HEADER = ("http:// Do not edit this file; automatically generated by build.py.\n"
"'use strict';\n")
class Gen_uncompressed(threading.Thread):
"""Generate a JavaScript file that loads Blockly's raw files.
Runs in a separate thread.
"""
def __init__(self, search_paths, vertical, closure_env):
threading.Thread.__init__(self)
self.search_paths = search_paths
self.vertical = vertical
self.closure_env = closure_env
def run(self):
if self.vertical:
target_filename = 'blockly_uncompressed_vertical.js'
else:
target_filename = 'blockly_uncompressed_horizontal.js'
f = open(target_filename, 'w')
f.write(HEADER)
f.write(self.format_js("""
var isNodeJS = !!(typeof module !== 'undefined' && module.exports &&
typeof window === 'undefined');
if (isNodeJS) {
var window = {};
require('{closure_library}');
}
window.BLOCKLY_DIR = (function() {
if (!isNodeJS) {
// Find name of current directory.
var scripts = document.getElementsByTagName('script');
var re = new RegExp('(.+)[\/]blockly_uncompressed(_vertical|_horizontal|)\.js$');
for (var i = 0, script; script = scripts[i]; i++) {
var match = re.exec(script.src);
if (match) {
return match[1];
}
}
alert('Could not detect Blockly\\'s directory name.');
}
return '';
})();
window.BLOCKLY_BOOT = function() {
var dir = '';
if (isNodeJS) {
require('{closure_library}');
dir = 'blockly';
} else {
// Execute after Closure has loaded.
if (!window.goog) {
alert('Error: Closure not found. Read this:\\n' +
'developers.google.com/blockly/guides/modify/web/closure');
}
if (window.BLOCKLY_DIR.search(/node_modules/)) {
dir = '..';
} else {
dir = window.BLOCKLY_DIR.match(/[^\\/]+$/)[0];
}
}
"""))
add_dependency = []
base_path = calcdeps.FindClosureBasePath(self.search_paths)
for dep in calcdeps.BuildDependenciesFromFiles(self.search_paths):
add_dependency.append(calcdeps.GetDepsLine(dep, base_path))
add_dependency.sort()
add_dependency = '\n'.join(add_dependency)
m = re.search('[\\/]([^\\/]+)[\\/]core[\\/]blockly.js', add_dependency)
add_dependency = re.sub('([\\/])' + re.escape(m.group(1)) +
'([\\/]core[\\/])', '\\1" + dir + "\\2', add_dependency)
f.write(add_dependency + '\n')
provides = []
for dep in calcdeps.BuildDependenciesFromFiles(self.search_paths):
if not dep.filename.startswith(self.closure_env["closure_root"] + os.sep):
provides.extend(dep.provides)
provides.sort()
f.write('\n')
f.write('// Load Blockly.\n')
for provide in provides:
f.write("goog.require('%s');\n" % provide)
f.write(self.format_js("""
delete this.BLOCKLY_DIR;
delete this.BLOCKLY_BOOT;
};
if (isNodeJS) {
window.BLOCKLY_BOOT();
module.exports = Blockly;
} else {
// Delete any existing Closure (e.g. Soy's nogoog_shim).
document.write('<script>var goog = undefined;</script>');
// Load fresh Closure Library.
document.write('<script src="' + window.BLOCKLY_DIR +
'/{closure_dir}/{closure_library}/closure/goog/base.js"></script>');
document.write('<script>window.BLOCKLY_BOOT();</script>');
}
"""))
f.close()
print("SUCCESS: " + target_filename)
def format_js(self, code):
key_whitelist = self.closure_env.keys()
keys_pipe_separated = reduce(lambda accum, key: accum + "|" + key, key_whitelist)
begin_brace = re.compile(r"\{(?!%s)" % (keys_pipe_separated,))
end_brace = re.compile(r"\}")
def end_replacement(match):
try:
maybe_key = match.string[match.string[:match.start()].rindex("{") + 1:match.start()]
except ValueError:
return "}}"
if maybe_key and maybe_key in key_whitelist:
return "}"
else:
return "}}"
return begin_brace.sub("{{", end_brace.sub(end_replacement, code)).format(**self.closure_env)
class Gen_compressed(threading.Thread):
def __init__(self, search_paths_vertical, search_paths_horizontal, closure_env):
threading.Thread.__init__(self)
self.search_paths_vertical = search_paths_vertical
self.search_paths_horizontal = search_paths_horizontal
self.closure_env = closure_env
def run(self):
self.gen_core(True)
self.gen_core(False)
self.gen_blocks("horizontal")
self.gen_blocks("vertical")
self.gen_blocks("common")
def gen_core(self, vertical):
if vertical:
target_filename = 'blockly_compressed_vertical.js'
search_paths = self.search_paths_vertical
else:
target_filename = 'blockly_compressed_horizontal.js'
search_paths = self.search_paths_horizontal
params = []
filenames = calcdeps.CalculateDependencies(search_paths,
[os.path.join("core", "blockly.js")])
filenames.sort()
for filename in filenames:
params.append(("js_file", filename))
self.do_compile(params, target_filename, filenames, "")
def gen_blocks(self, block_type):
if block_type == "horizontal":
target_filename = "blocks_compressed_horizontal.js"
filenames = glob.glob(os.path.join("blocks_horizontal", "*.js"))
elif block_type == "vertical":
target_filename = "blocks_compressed_vertical.js"
filenames = glob.glob(os.path.join("blocks_vertical", "*.js"))
elif block_type == "common":
target_filename = "blocks_compressed.js"
filenames = glob.glob(os.path.join("blocks_common", "*.js"))
filenames.sort()
params = []
params.append(("js_file", os.path.join("build", "gen_blocks.js")))
filenames.append(os.path.join("core", "colours.js"))
filenames.append(os.path.join("core", "constants.js"))
for filename in filenames:
params.append(("js_file", filename))
remove = "var Blockly={Blocks:{}};"
self.do_compile(params, target_filename, filenames, remove)
def do_compile(self, params, target_filename, filenames, remove):
json_data = self.do_compile_jar(params, target_filename)
if self.report_errors(target_filename, filenames, json_data):
self.write_output(target_filename, remove, json_data)
self.report_stats(target_filename, json_data)
def do_compile_jar(self, params, target_filename):
dash_params = ["--compilation_level SIMPLE", "--language_in ECMASCRIPT_2017", "--language_out ECMASCRIPT5", "--define='goog.DEBUG=false'", "--rewrite_polyfills=false"]
for (arg, value) in params:
if arg == "js_file":
dash_params.append("--js='" + value + "'")
args = []
for group in [["java -jar ./node_modules/google-closure-compiler/compiler.jar"], dash_params]:
args.extend(group)
if sys.platform == "darwin":
proc = subprocess.Popen(" ".join(args), stdin=subprocess.PIPE, stdout=subprocess.PIPE)
else:
proc = subprocess.Popen(" ".join(args), stdin=subprocess.PIPE, stdout=subprocess.PIPE, shell=True)
(stdout, stderr) = proc.communicate()
print stdout
print stderr
filesizes = [os.path.getsize(value) for (arg, value) in params if arg == "js_file"]
return dict(
compiledCode=stdout,
statistics=dict(
originalSize=reduce(lambda v, size: v + size, filesizes, 0),
compressedSize=len(stdout),
)
)
def report_errors(self, target_filename, filenames, json_data):
def file_lookup(name):
if not name.startswith("Input_"):
return "???"
n = int(name[6:]) - 1
return filenames[n]
if json_data.has_key("serverErrors"):
errors = json_data["serverErrors"]
for error in errors:
print("SERVER ERROR: %s" % target_filename)
print(error["error"])
elif json_data.has_key("errors"):
errors = json_data["errors"]
for error in errors:
print("FATAL ERROR")
print(error["error"])
if error["file"]:
print("%s at line %d:" % (
file_lookup(error["file"]), error["lineno"]))
print(error["line"])
print((" " * error["charno"]) + "^")
sys.exit(1)
else:
if json_data.has_key("warnings"):
warnings = json_data["warnings"]
for warning in warnings:
print("WARNING")
print(warning["warning"])
if warning["file"]:
print("%s at line %d:" % (
file_lookup(warning["file"]), warning["lineno"]))
print(warning["line"])
print((" " * warning["charno"]) + "^")
print()
return True
return False
def write_output(self, target_filename, remove, json_data):
if not json_data.has_key("compiledCode"):
print("FATAL ERROR: Compiler did not return compiledCode.")
sys.exit(1)
code = HEADER + "\n" + json_data["compiledCode"]
code = code.replace(remove, "")
LICENSE = re.compile("""/\\*
[\w ]+
Copyright \\d+ Google Inc.
https://developers.google.com/blockly/
Licensed under the Apache License, Version 2.0 \(the "License"\);
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
\\*/""")
code = re.sub(LICENSE, "", code)
stats = json_data["statistics"]
original_b = stats["originalSize"]
compressed_b = stats["compressedSize"]
if original_b > 0 and compressed_b > 0:
f = open(target_filename, "w")
f.write(code)
f.close()
def report_stats(self, target_filename, json_data):
stats = json_data["statistics"]
original_b = stats["originalSize"]
compressed_b = stats["compressedSize"]
if original_b > 0 and compressed_b > 0:
original_kb = int(original_b / 1024 + 0.5)
compressed_kb = int(compressed_b / 1024 + 0.5)
ratio = int(float(compressed_b) / float(original_b) * 100 + 0.5)
print("SUCCESS: " + target_filename)
print("Size changed from %d KB to %d KB (%d%%)." % (
original_kb, compressed_kb, ratio))
else:
print("UNKNOWN ERROR")
class Gen_langfiles(threading.Thread):
"""Generate JavaScript file for each natural language supported.
Runs in a separate thread.
"""
def __init__(self):
threading.Thread.__init__(self)
def _rebuild(self, srcs, dests):
try:
return (max(os.path.getmtime(src) for src in srcs) >
min(os.path.getmtime(dest) for dest in dests))
except OSError as e:
if e.errno == errno.ENOENT:
if e.filename in srcs:
print("Source file missing: " + e.filename)
sys.exit(1)
else:
return True
else:
print("Error checking file creation times: " + e)
def run(self):
if self._rebuild([os.path.join("msg", "messages.js")],
[os.path.join("msg", "json", f) for f in
["en.json", "qqq.json", "synonyms.json"]]):
try:
subprocess.check_call([
"python",
os.path.join("i18n", "js_to_json.py"),
"--input_file", "msg/messages.js",
"--output_dir", "msg/json/",
"--quiet"])
except (subprocess.CalledProcessError, OSError) as e:
print("Error running i18n/js_to_json.py: ", e)
sys.exit(1)
try:
cmd = [
"python",
os.path.join("i18n", "create_messages.py"),
"--source_lang_file", os.path.join("msg", "json", "en.json"),
"--source_synonym_file", os.path.join("msg", "json", "synonyms.json"),
"--source_constants_file", os.path.join("msg", "json", "constants.json"),
"--key_file", os.path.join("msg", "json", "keys.json"),
"--output_dir", os.path.join("msg", "js"),
"--quiet"]
json_files = glob.glob(os.path.join("msg", "json", "*.json"))
json_files = [file for file in json_files if not
(file.endswith(("keys.json", "synonyms.json", "qqq.json", "constants.json")))]
cmd.extend(json_files)
subprocess.check_call(cmd)
except (subprocess.CalledProcessError, OSError) as e:
print("Error running i18n/create_messages.py: ", e)
sys.exit(1)
for f in json_files:
f = f.replace("json", "js")
if os.path.isfile(f):
print("SUCCESS: " + f)
else:
print("FAILED to create " + f)
def exclude_vertical(item):
return not item.endswith("block_render_svg_vertical.js")
def exclude_horizontal(item):
return not item.endswith("block_render_svg_horizontal.js")
if __name__ == "__main__":
closure_dir = CLOSURE_DIR_NPM
closure_root = CLOSURE_ROOT_NPM
closure_library = CLOSURE_LIBRARY_NPM
closure_compiler = CLOSURE_COMPILER_NPM
calcdeps = import_path(os.path.join(
closure_root, closure_library, "closure", "bin", "calcdeps.py"))
print("Using local compiler: google-closure-compiler.jar ...\n")
search_paths = calcdeps.ExpandDirectories(
["core", os.path.join(closure_root, closure_library)])
search_paths_horizontal = filter(exclude_vertical, search_paths)
search_paths_vertical = filter(exclude_horizontal, search_paths)
closure_env = {
"closure_dir": closure_dir,
"closure_root": closure_root,
"closure_library": closure_library,
"closure_compiler": closure_compiler,
}
# Run all tasks in parallel threads.
# Uncompressed is limited by processor speed.
# Compressed is limited by network and server speed.
# Vertical:
Gen_uncompressed(search_paths_vertical, True, closure_env).start()
# Horizontal:
Gen_uncompressed(search_paths_horizontal, False, closure_env).start()
# Compressed forms of vertical and horizontal.
Gen_compressed(search_paths_vertical, search_paths_horizontal, closure_env).start()
# This is run locally in a separate thread.
# Gen_langfiles().start()