Update build rules to work with ES6 modules.

This change makes build rules compatible with ES6 modules and
incompatible with html imports.

The build is broken in this change and gerrit-ci fails. The build
repairs after conversion to ES6 modules.

Change-Id: I5da619db6cda06670fffcaefd97a9d3bbe5feb0c
This commit is contained in:
Dmitrii Filippov
2020-03-11 13:33:06 +01:00
parent 445e9249e6
commit c2aac21b17
8 changed files with 50 additions and 342 deletions

View File

@@ -52,7 +52,9 @@
"error",
80,
2,
{"ignoreComments": true}
{"ignoreComments": true,
"ignorePattern":"^import .*;$"
}
],
"new-cap": ["error", { "capIsNewExceptions": ["Polymer", "LegacyElementMixin", "GestureEventListeners", "LegacyDataMixin"] }],
"no-console": "off",
@@ -139,6 +141,12 @@
"rules": {
"jsdoc/require-file-overview": "off"
}
},
{
"files": ["*_html.js", "gr-icons.js", "*-theme.js", "*-styles.js"],
"rules": {
"max-len": "off"
}
}
],
"plugins": [

View File

@@ -1,5 +1,4 @@
load(":rules.bzl", "polygerrit_bundle")
load("//tools/node_tools/polygerrit_app_preprocessor:index.bzl", "update_links")
package(default_visibility = ["//visibility:public"])
@@ -7,7 +6,6 @@ polygerrit_bundle(
name = "polygerrit_ui",
srcs = glob(
[
"**/*.html",
"**/*.js",
],
exclude = [
@@ -15,6 +13,7 @@ polygerrit_bundle(
"node_modules_licenses/**",
"test/**",
"**/*_test.html",
"**/*_test.js",
],
),
outs = ["polygerrit_ui.zip"],
@@ -49,27 +48,18 @@ filegroup(
"**/*_test.html",
"test/**",
"samples/**",
"**/*_test.js",
],
),
)
# update_links - temporary action. Later links/references will be updated in repository,
# so this rule will be removed.
update_links(
name = "test-srcs-updated-links",
srcs = [
"test/common-test-setup.html",
"test/index.html",
":pg_code",
],
redirects = "redirects.json",
)
# Workaround for https://github.com/bazelbuild/bazel/issues/1305
filegroup(
name = "test-srcs-updated-links-fg",
name = "test-srcs-fg",
srcs = [
":test-srcs-updated-links",
"test/common-test-setup.js",
"test/index.html",
":pg_code",
"@ui_dev_npm//:node_modules",
"@ui_npm//:node_modules",
],
@@ -83,7 +73,7 @@ sh_test(
"$(location @ui_dev_npm//web-component-tester/bin:wct)",
],
data = [
":test-srcs-updated-links-fg",
":test-srcs-fg",
"@ui_dev_npm//web-component-tester/bin:wct",
],
# Should not run sandboxed.
@@ -109,21 +99,11 @@ sh_test(
],
)
# update_links - temporary action. Later links/references will be updated in repository,
# so this rule will be removed.
update_links(
name = "polylint-updated-links",
srcs = [
":pg_code_without_test",
],
redirects = "redirects.json",
)
# Workaround for https://github.com/bazelbuild/bazel/issues/1305
filegroup(
name = "polylint-updated-links-fg",
name = "polylint-fg",
srcs = [
":polylint-updated-links",
":pg_code_without_test",
"@ui_npm//:node_modules",
],
)
@@ -138,7 +118,7 @@ sh_test(
],
data = [
"polymer.json",
":polylint-updated-links-fg",
":polylint-fg",
"@tools_npm//polymer-cli/bin:polymer",
],
# Should not run sandboxed.

View File

@@ -5,7 +5,7 @@ set -ex
DIR=$(pwd)
ln -s $RUNFILES_DIR/ui_npm/node_modules $TEST_TMPDIR/node_modules
cp $2 $TEST_TMPDIR/polymer.json
cp -R -L polygerrit-ui/app/polylint-updated-links/polygerrit-ui/app/* $TEST_TMPDIR
cp -R -L polygerrit-ui/app/* $TEST_TMPDIR
#Can't use --root with polymer.json - see https://github.com/Polymer/tools/issues/2616
#Change current directory to the root folder

View File

@@ -81,7 +81,7 @@ export default {
context: 'window',
plugins: [resolve({
customResolveOptions: {
moduleDirectory: 'node_modules'
moduleDirectory: 'external/ui_npm/node_modules'
}
}), importLocalFontMetaUrlResolver()],
};

View File

@@ -1,5 +1,4 @@
load("//tools/bzl:genrule2.bzl", "genrule2")
load("//tools/node_tools/polygerrit_app_preprocessor:index.bzl", "prepare_for_bundling", "update_links")
load("//tools/node_tools/legacy:index.bzl", "polymer_bundler_tool")
load("@npm_bazel_rollup//:index.bzl", "rollup_bundle")
@@ -16,84 +15,18 @@ def polygerrit_bundle(name, srcs, outs, entry_point, redirects):
app_name = entry_point.split(".html")[0].split("/").pop() # eg: gr-app
# Update links in all .html files according to rules in redirects.json file. All other files
# remain unchanged. After the update, all references to bower_components have been replaced with
# correct references to node_modules.
# The output of this rule is a directory, which mirrors the directory layout of srcs files.
update_links(
name = app_name + "-updated-links",
srcs = srcs,
redirects = redirects,
)
# Note: prepare_for_bundling and polymer_bundler_tool will be removed after switch to
# ES6 modules.
# Polymer 3 uses ES modules; gerrit still use HTML imports and polymer-bridges. In such
# conditions, polymer-bundler/crisper and polymer-cli tools crash without an error or complains
# about non-existing syntax error in .js code. But even if they works with some config, the
# output result is not correct. At the same time, polymer-bundler/crisper work well if input
# files are HTML and js without javascript modules.
#
# Polygerrit's code follows simple rules, so it is quite easy to preprocess code in a way, that
# it can be consumed by polymer-bundler/crisper tool. Rules do the following:
# 1) prepare_for_bundling - update srcs by moving all scripts out of HTML files.
# For each HTML file it creates file.html_gen.js file in the same directory and put all
# scripts there in the same order, as script tags appear in HTML file.
# - Inline javascript is copied as is;
# - <script src = "path/to/file.js" > adds to .js file as
# import 'path/to/file.js'
# statement. Such import statement run all side-effects in file.js (i.e. it run all global
# code).
# - <link rel="import" href = "path/to/file.html"> adds to .js file as
# import 'path/to/file.html.js' - i.e. instead of html, the .js script imports another
# generated js file ('path/to/file.html_gen.js').
# Because output JS keeps the order of imports, all global variables are initialized in a
# correct order (this is important for gerrit; it is impossible to use AMD modules here).
# Then, all scripts are removed from HTML file.
# Output of this rule - directory with updated HTML and JS files; all other files are copied
# to the output directory without changes.
# 2) rollup_bundle - combines all .js files from the previous step into one bundle.
# 3) polymer_bundler_tool -
# a) run polymer-bundle tool on HTML files (i.e. on output from the first step). Because
# these files don't contain scripts anymore, it just combine all HTML/CSS files in one file
# (by following HTML imports).
# b) run crisper to add script tag at the end of generated HTML
#
# Output of the rule is 2 file: HTML bundle and JS bundle and HTML file loads JS file with
# <script src="..."> tag.
prepare_for_bundling(
name = app_name + "-prebundling-srcs",
srcs = [
app_name + "-updated-links",
],
additional_node_modules_to_preprocess = [
"@ui_npm//polymer-bridges",
],
entry_point = entry_point,
node_modules = [
native.filegroup(
name = app_name + "-full-src",
srcs = srcs + [
"@ui_npm//:node_modules",
],
root_path = "polygerrit-ui/app/" + app_name + "-updated-links/polygerrit-ui/app",
)
native.filegroup(
name = app_name + "-prebundling-srcs-js",
srcs = [app_name + "-prebundling-srcs"],
output_group = "js",
)
native.filegroup(
name = app_name + "-prebundling-srcs-html",
srcs = [app_name + "-prebundling-srcs"],
output_group = "html",
)
rollup_bundle(
name = app_name + "-bundle-js",
srcs = [app_name + "-prebundling-srcs-js"],
srcs = [app_name + "-full-src"],
config_file = ":rollup.config.js",
entry_point = app_name + "-prebundling-srcs/entry.js",
entry_point = "elements/" + app_name + ".js",
rollup_bin = "//tools/node_tools:rollup-bin",
sourcemap = "hidden",
deps = [
@@ -101,18 +34,11 @@ def polygerrit_bundle(name, srcs, outs, entry_point, redirects):
],
)
polymer_bundler_tool(
name = app_name + "-bundle-html",
srcs = [app_name + "-prebundling-srcs-html"],
entry_point = app_name + "-prebundling-srcs/entry.html",
script_src_value = app_name + ".js",
)
native.filegroup(
name = name + "_app_sources",
srcs = [
app_name + "-bundle-js.js",
app_name + "-bundle-html.html",
entry_point,
],
)

View File

@@ -16,7 +16,7 @@ cp -R -L ./external/ui_dev_npm/node_modules/* $t/node_modules
# it always receives file from ui_npm. It can broke WCT itself but luckely it works.
cp -R -L ./external/ui_npm/node_modules/* $t/node_modules
cp -R -L ./polygerrit-ui/app/test-srcs-updated-links/polygerrit-ui/app/* $t/
cp -R -L ./polygerrit-ui/app/* $t/
export PATH="$(dirname $NPM):$PATH"

View File

@@ -136,43 +136,6 @@ func addDevHeaders(writer http.ResponseWriter) {
}
func getFinalPath(redirects []redirects, originalPath string) string {
testComponentsPrefix := "/components/"
if strings.HasPrefix(originalPath, testComponentsPrefix) {
return "/../node_modules/" + originalPath[len(testComponentsPrefix):]
}
for _, redirect := range redirects {
fromDir := redirect.From
if !strings.HasSuffix(fromDir, "/") {
fromDir = fromDir + "/"
}
if strings.HasPrefix(originalPath, fromDir) {
targetDir := ""
if redirect.To.NpmModule != "" {
targetDir = "node_modules/" + redirect.To.NpmModule
} else {
targetDir = redirect.To.Dir
}
if !strings.HasSuffix(targetDir, "/") {
targetDir = targetDir + "/"
}
if !strings.HasPrefix(targetDir, "/") {
targetDir = "/" + targetDir
}
filename := originalPath[len(fromDir):]
if redirect.To.Files != nil {
newfilename, found := redirect.To.Files[filename]
if found {
filename = newfilename
}
}
return targetDir + filename
}
}
return originalPath
}
func handleSrcRequest(redirects []redirects, dirListingMux *http.ServeMux, writer http.ResponseWriter, originalRequest *http.Request) {
parsedUrl, err := url.Parse(originalRequest.RequestURI)
if err != nil {
@@ -188,23 +151,32 @@ func handleSrcRequest(redirects []redirects, dirListingMux *http.ServeMux, write
return
}
requestPath := getFinalPath(redirects, parsedUrl.Path)
requestPath := parsedUrl.Path
if !strings.HasPrefix(requestPath, "/") {
requestPath = "/" + requestPath
}
isJsFile := strings.HasSuffix(requestPath, ".js") || strings.HasSuffix(requestPath, ".mjs")
data, err := readFile(parsedUrl.Path, requestPath)
if err != nil {
writer.WriteHeader(404)
return
data, err = readFile(parsedUrl.Path + ".js", requestPath + ".js")
if err != nil {
writer.WriteHeader(404)
return
}
isJsFile = true
}
if strings.HasSuffix(requestPath, ".js") {
r := regexp.MustCompile("(?m)^(import.*)'([^/.].*)';$")
data = r.ReplaceAll(data, []byte("$1 '/node_modules/$2'"))
if isJsFile {
moduleImportRegexp := regexp.MustCompile("(?m)^(import.*)'([^/.].*)';$")
data = moduleImportRegexp.ReplaceAll(data, []byte("$1 '/node_modules/$2';"))
writer.Header().Set("Content-Type", "application/javascript")
} else if strings.HasSuffix(requestPath, ".css") {
writer.Header().Set("Content-Type", "text/css")
} else if strings.HasSuffix(requestPath, "_test.html") {
moduleImportRegexp := regexp.MustCompile("(?m)^(import.*)'([^/.].*)';$")
data = moduleImportRegexp.ReplaceAll(data, []byte("$1 '/node_modules/$2';"))
writer.Header().Set("Content-Type", "text/html")
} else if strings.HasSuffix(requestPath, ".html") {
writer.Header().Set("Content-Type", "text/html")
}
@@ -214,9 +186,15 @@ func handleSrcRequest(redirects []redirects, dirListingMux *http.ServeMux, write
}
func readFile(originalPath string, redirectedPath string) ([]byte, error) {
pathsToTry := []string{"app" + redirectedPath}
pathsToTry := []string{"app/modulizer_out" + redirectedPath, "app" + redirectedPath}
bowerComponentsSuffix := "/bower_components/"
nodeModulesPrefix := "/node_modules/"
testComponentsPrefix := "/components/"
if strings.HasPrefix(originalPath, testComponentsPrefix) {
pathsToTry = append(pathsToTry, "node_modules/wct-browser-legacy/node_modules/"+originalPath[len(testComponentsPrefix):])
pathsToTry = append(pathsToTry, "node_modules/"+originalPath[len(testComponentsPrefix):])
}
if strings.HasPrefix(originalPath, bowerComponentsSuffix) {
pathsToTry = append(pathsToTry, "node_modules/wct-browser-legacy/node_modules/"+originalPath[len(bowerComponentsSuffix):])

View File

@@ -1,184 +0,0 @@
"""This file contains rules to preprocess files before bundling"""
def _update_links_impl(ctx):
"""Wrapper for the links-update command-line tool"""
dir_name = ctx.label.name
output_files = []
input_js_files = []
output_js_files = []
js_files_args = ctx.actions.args()
js_files_args.set_param_file_format("multiline")
js_files_args.use_param_file("%s", use_always = True)
for f in ctx.files.srcs:
output_file = ctx.actions.declare_file(dir_name + "/" + f.path)
output_files.append(output_file)
if f.extension == "html":
input_js_files.append(f)
output_js_files.append(output_file)
js_files_args.add(f)
js_files_args.add(output_file)
else:
ctx.actions.expand_template(
output = output_file,
template = f,
substitutions = {},
)
ctx.actions.run(
executable = ctx.executable._updater,
outputs = output_js_files,
inputs = input_js_files + [ctx.file.redirects],
arguments = [js_files_args, ctx.file.redirects.path],
)
return [DefaultInfo(files = depset(output_files))]
update_links = rule(
implementation = _update_links_impl,
attrs = {
"srcs": attr.label_list(allow_files = True),
"redirects": attr.label(allow_single_file = True, mandatory = True),
"_updater": attr.label(
default = ":links-updater-bin",
executable = True,
cfg = "host",
),
},
)
def _get_node_modules_root(node_modules):
if node_modules == None or len(node_modules) == 0:
return None
node_module_root = node_modules[0].label.workspace_root
for target in node_modules:
if target.label.workspace_root != node_module_root:
fail("Only one node_modules workspace can be used")
return node_module_root + "/"
def _get_relative_path(file, root):
root_len = len(root)
if file.path.startswith(root):
return file.path[root_len - 1:]
else:
fail("The file '%s' is not under the root '%s'." % (file.path, root))
def _copy_file(ctx, src, target_name):
output_file = ctx.actions.declare_file(target_name)
ctx.actions.expand_template(
output = output_file,
template = src,
substitutions = {},
)
return output_file
def _get_generated_files(ctx, files, files_root_path, target_dir):
gen_files_for_html = dict()
gen_files_for_js = dict()
copied_files = []
for f in files:
target_name = target_dir + _get_relative_path(f, files_root_path)
if f.extension == "html":
html_output_file = ctx.actions.declare_file(target_name)
js_output_file = ctx.actions.declare_file(target_name + "_gen.js")
gen_files_for_html.update([[f, {"html": html_output_file, "js": js_output_file}]])
elif f.extension == "js":
js_output_file = ctx.actions.declare_file(target_name)
gen_files_for_js.update([[f, {"js": js_output_file}]])
else:
copied_files.append(_copy_file(ctx, f, target_name))
return (gen_files_for_html, gen_files_for_js, copied_files)
def _prepare_for_bundling_impl(ctx):
dir_name = ctx.label.name
all_output_files = []
node_modules_root = _get_node_modules_root(ctx.attr.node_modules)
html_files_dict = dict()
js_files_dict = dict()
root_path = ctx.bin_dir.path + "/" + ctx.attr.root_path
if not root_path.endswith("/"):
root_path = root_path + "/"
gen_files_for_html, gen_files_for_js, copied_files = _get_generated_files(ctx, ctx.files.srcs, root_path, dir_name)
html_files_dict.update(gen_files_for_html)
js_files_dict.update(gen_files_for_js)
all_output_files.extend(copied_files)
gen_files_for_html, gen_files_for_js, copied_files = _get_generated_files(ctx, ctx.files.additional_node_modules_to_preprocess, node_modules_root, dir_name)
html_files_dict.update(gen_files_for_html)
js_files_dict.update(gen_files_for_js)
all_output_files.extend(copied_files)
for f in ctx.files.node_modules:
target_name = dir_name + _get_relative_path(f, node_modules_root)
if html_files_dict.get(f) == None and js_files_dict.get(f) == None:
all_output_files.append(_copy_file(ctx, f, target_name))
preprocessed_output_files = []
html_files_args = ctx.actions.args()
html_files_args.set_param_file_format("multiline")
html_files_args.use_param_file("%s", use_always = True)
for src_path, output_files in html_files_dict.items():
html_files_args.add(src_path)
html_files_args.add(output_files["html"])
html_files_args.add(output_files["js"])
preprocessed_output_files.append(output_files["html"])
preprocessed_output_files.append(output_files["js"])
js_files_args = ctx.actions.args()
js_files_args.set_param_file_format("multiline")
js_files_args.use_param_file("%s", use_always = True)
for src_path, output_files in js_files_dict.items():
js_files_args.add(src_path)
js_files_args.add(output_files["js"])
preprocessed_output_files.append(output_files["js"])
all_output_files.extend(preprocessed_output_files)
ctx.actions.run(
executable = ctx.executable._preprocessor,
outputs = preprocessed_output_files,
inputs = ctx.files.srcs + ctx.files.additional_node_modules_to_preprocess,
arguments = [root_path, html_files_args, js_files_args],
)
entry_point_html = ctx.attr.entry_point
entry_point_js = ctx.attr.entry_point + "_gen.js"
ctx.actions.write(ctx.outputs.html, "<link rel=\"import\" href=\"./%s\" >" % entry_point_html)
ctx.actions.write(ctx.outputs.js, "import \"./%s\";" % entry_point_js)
return [
DefaultInfo(files = depset([ctx.outputs.html, ctx.outputs.js], transitive = [depset(all_output_files)])),
OutputGroupInfo(
js = depset([ctx.outputs.js] + [f for f in all_output_files if f.extension == "js"]),
html = depset([ctx.outputs.html] + [f for f in all_output_files if f.extension == "html"]),
),
]
prepare_for_bundling = rule(
implementation = _prepare_for_bundling_impl,
attrs = {
"srcs": attr.label_list(allow_files = True),
"node_modules": attr.label_list(allow_files = True),
"_preprocessor": attr.label(
default = ":preprocessor-bin",
executable = True,
cfg = "host",
),
"additional_node_modules_to_preprocess": attr.label_list(allow_files = True),
"root_path": attr.string(),
"entry_point": attr.string(
mandatory = True,
doc = "Path relative to root_path",
),
},
outputs = {
"html": "%{name}/entry.html",
"js": "%{name}/entry.js",
},
)