Add streams to releases again, include minified bundle, drop in… (#2539)
* Minify stream, add stream stuff to releases again * Kill off a lot of tech debt, drop internal utilities from npm 1. Kill `module/`, internalize `bundler/`, privatize `test-utils/` We've been telling people to move elsewhere from these for a while, and it's about time we just pull the plug here and finally remove them. - We officially removed the bundler from the public API in v2.0, and that was the only one of these that was ever publicly documented. Usage should be low enough by now it shouldn't break anyone- I'm not seeing bundler bugs being reported anymore, either. - The `module/` utility was so narrow and caveat-filled that I'm not sure anyone really used it (even us core Mithril devs never really used it), and we only had it documented in the repo folder it lived in. I think only one bug was ever filed, and it's because it somehow ended up completely non-functional without any of us realizing it. - The test utilities were meant to be internal from day 1, but people started using it despite us core developers constantly telling people to look elsewhere and even the docs recommending specific alternatives without mention of our internal mocks. (Now if people would RTFM, that'd be nice...) 2. Add dedicated HTML test files to verify ospec and the promise polyfill, and ensure the promise tests are in pure ES5. These are made specially for those and should be much easier to just run now. 3. Fix the benchmark script to use the real DOM in browsers and to not require as many dependencies to create. Also, tweak them to be much more effective and precise on what's being tested. Previously, it was rendering to the HTML file itself, while now it's rendering to the `body`. This means in browsers, it's triggering layout and everything, benchmarking how well Mithril optimizes for style and layout recalcs, too. It also puts some pressure on the hyperscript parser attribute application, so that can be noticed as well. * Update dependencies
This commit is contained in:
parent
34f4363357
commit
d4551f49f5
41 changed files with 1893 additions and 3795 deletions
183
scripts/_bundler-impl.js
Normal file
183
scripts/_bundler-impl.js
Normal file
|
|
@ -0,0 +1,183 @@
|
|||
"use strict"
|
||||
|
||||
const fs = require("fs")
|
||||
const path = require("path")
|
||||
const execFileSync = require("child_process").execFileSync
|
||||
const util = require("util")
|
||||
|
||||
const readFile = util.promisify(fs.readFile)
|
||||
const access = util.promisify(fs.access)
|
||||
|
||||
function isFile(filepath) {
|
||||
return access(filepath).then(() => true, () => false)
|
||||
}
|
||||
function escapeRegExp(string) {
|
||||
return string.replace(/[|\\{}()[\]^$+*?.-]/g, "\\$&")
|
||||
}
|
||||
function escapeReplace(string) {
|
||||
return string.replace(/\$/g, "\\$&")
|
||||
}
|
||||
|
||||
async function resolve(filepath, filename) {
|
||||
if (filename[0] !== ".") {
|
||||
// resolve as npm dependency
|
||||
const packagePath = `./node_modules/${filename}/package.json`
|
||||
let json, meta
|
||||
|
||||
try {
|
||||
json = await readFile(packagePath, "utf8")
|
||||
} catch (e) {
|
||||
meta = {}
|
||||
}
|
||||
|
||||
if (json) {
|
||||
try {
|
||||
meta = JSON.parse(json)
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`invalid JSON for ${packagePath}: ${json}`)
|
||||
}
|
||||
}
|
||||
|
||||
const main = `./node_modules/${filename}/${meta.main || `${filename}.js`}`
|
||||
return path.resolve(await isFile(main) ? main : `./node_modules/${filename}/index.js`)
|
||||
}
|
||||
else {
|
||||
// resolve as local dependency
|
||||
return path.resolve(path.dirname(filepath), filename + ".js")
|
||||
}
|
||||
}
|
||||
|
||||
function matchAll(str, regexp) {
|
||||
regexp.lastIndex = 0
|
||||
const result = []
|
||||
let exec
|
||||
while ((exec = regexp.exec(str)) != null) result.push(exec)
|
||||
return result
|
||||
}
|
||||
|
||||
let error
|
||||
module.exports = async (input) => {
|
||||
const modules = new Map()
|
||||
const bindings = new Map()
|
||||
const declaration = /^\s*(?:var|let|const|function)[\t ]+([\w_$]+)/gm
|
||||
const include = /(?:((?:var|let|const|,|)[\t ]*)([\w_$\.\[\]"'`]+)(\s*=\s*))?require\(([^\)]+)\)(\s*[`\.\(\[])?/gm
|
||||
let uuid = 0
|
||||
async function process(filepath, data) {
|
||||
for (const [, binding] of matchAll(data, declaration)) bindings.set(binding, 0)
|
||||
|
||||
const tasks = []
|
||||
|
||||
for (const [, def = "", variable = "", eq = "", dep, rest = ""] of matchAll(data, include)) {
|
||||
tasks.push({filename: JSON.parse(dep), def, variable, eq, rest})
|
||||
}
|
||||
|
||||
const imports = await Promise.all(
|
||||
tasks.map((t) => resolve(filepath, t.filename))
|
||||
)
|
||||
|
||||
const results = []
|
||||
for (const [i, task] of tasks.entries()) {
|
||||
const dependency = imports[i]
|
||||
let pre = "", def = task.def
|
||||
if (def[0] === ",") def = "\nvar ", pre = "\n"
|
||||
const localUUID = uuid // global uuid can update from nested `process` call, ensure same id is used on declaration and consumption
|
||||
const existingModule = modules.get(dependency)
|
||||
modules.set(dependency, task.rest ? `_${localUUID}` : task.variable)
|
||||
const code = await process(
|
||||
dependency,
|
||||
pre + (
|
||||
existingModule == null
|
||||
? await exportCode(task.filename, dependency, def, task.variable, task.eq, task.rest, localUUID)
|
||||
: def + task.variable + task.eq + existingModule
|
||||
)
|
||||
)
|
||||
uuid++
|
||||
results.push(code + task.rest)
|
||||
}
|
||||
|
||||
let i = 0
|
||||
return data.replace(include, () => results[i++])
|
||||
}
|
||||
|
||||
async function exportCode(filename, filepath, def, variable, eq, rest, uuid) {
|
||||
let code = await readFile(filepath, "utf-8")
|
||||
// if there's a syntax error, report w/ proper stack trace
|
||||
try {
|
||||
new Function(code)
|
||||
}
|
||||
catch (e) {
|
||||
try {
|
||||
execFileSync("node", ["--check", filepath], {
|
||||
stdio: "pipe",
|
||||
})
|
||||
}
|
||||
catch (e) {
|
||||
if (e.message !== error) {
|
||||
error = e.message
|
||||
console.log(`\x1b[31m${e.message}\x1b[0m`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// disambiguate collisions
|
||||
const targetPromises = []
|
||||
code.replace(include, (match, def, variable, eq, dep) => {
|
||||
targetPromises.push(resolve(filepath, JSON.parse(dep)))
|
||||
})
|
||||
|
||||
const ignoredTargets = await Promise.all(targetPromises)
|
||||
const ignored = new Set()
|
||||
|
||||
for (const target of ignoredTargets) {
|
||||
const binding = modules.get(target)
|
||||
if (binding != null) ignored.add(binding)
|
||||
}
|
||||
|
||||
if (new RegExp(`module\\.exports\\s*=\\s*${variable}\s*$`, "m").test(code)) ignored.add(variable)
|
||||
for (const [binding, count] of bindings) {
|
||||
if (!ignored.has(binding)) {
|
||||
const before = code
|
||||
code = code.replace(
|
||||
new RegExp(`(\\b)${escapeRegExp(binding)}\\b`, "g"),
|
||||
escapeReplace(binding) + count
|
||||
)
|
||||
if (before !== code) bindings.set(binding, count + 1)
|
||||
}
|
||||
}
|
||||
|
||||
// fix strings that got mangled by collision disambiguation
|
||||
const string = /(["'])((?:\\\1|.)*?)(\1)/g
|
||||
const candidates = Array.from(bindings, ([binding, count]) => escapeRegExp(binding) + (count - 1)).join("|")
|
||||
const variables = new RegExp(candidates, "g")
|
||||
code = code.replace(string, (match, open, data, close) => {
|
||||
const fixed = data.replace(variables, (match) => match.replace(/\d+$/, ""))
|
||||
return open + fixed + close
|
||||
})
|
||||
|
||||
//fix props
|
||||
const props = new RegExp(`((?:[^:]\\/\\/.*)?\\.\\s*)(${candidates})|([\\{,]\\s*)(${candidates})(\\s*:)`, "gm")
|
||||
code = code.replace(props, (match, dot, a, pre, b, post) => {
|
||||
// Don't do anything because dot was matched in a comment
|
||||
if (dot && dot.indexOf("//") === 1) return match
|
||||
if (dot) return dot + a.replace(/\d+$/, "")
|
||||
return pre + b.replace(/\d+$/, "") + post
|
||||
})
|
||||
|
||||
return code
|
||||
.replace(/("|')use strict\1;?/gm, "") // remove extraneous "use strict"
|
||||
.replace(/module\.exports\s*=\s*/gm, escapeReplace(rest ? `var _${uuid}` + eq : def + (rest ? "_" : "") + variable + eq)) // export
|
||||
+ (rest ? `\n${def}${variable}${eq}_${uuid}` : "") // if `rest` is truthy, it means the expression is fluent or higher-order (e.g. require(path).foo or require(path)(foo)
|
||||
}
|
||||
|
||||
const code = ";(function() {\n" +
|
||||
(await process(path.resolve(input), await readFile(input, "utf-8")))
|
||||
.replace(/^\s*((?:var|let|const|)[\t ]*)([\w_$\.]+)(\s*=\s*)(\2)(?=[\s]+(\w)|;|$)/gm, "") // remove assignments to self
|
||||
.replace(/;+(\r|\n|$)/g, ";$1") // remove redundant semicolons
|
||||
.replace(/(\r|\n)+/g, "\n").replace(/(\r|\n)$/, "") + // remove multiline breaks
|
||||
"\n}());"
|
||||
|
||||
//try {new Function(code); console.log(`build completed at ${new Date()}`)} catch (e) {}
|
||||
error = null
|
||||
return code
|
||||
}
|
||||
22
scripts/bundler-readme.md
Normal file
22
scripts/bundler-readme.md
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
# bundler.js
|
||||
|
||||
Simplistic CommonJS module bundler
|
||||
|
||||
Version: 0.1
|
||||
License: MIT
|
||||
|
||||
## About
|
||||
|
||||
This bundler attempts to aggressively bundle CommonJS modules by assuming the dependency tree is static, similar to what Rollup does for ES6 modules.
|
||||
|
||||
Most browsers don't support ES6 `import/export` syntax, but we can achieve modularity by using CommonJS module syntax and transpiling it.
|
||||
|
||||
Webpack is conservative and treats CommonJS modules as non-statically-analyzable since `require` and `module.exports` are legally allowed everywhere. Therefore, it must generate extra code to resolve dependencies at runtime (i.e. `__webpack_require()`). Rollup only works with ES6 modules. ES6 modules can be bundled more efficiently because they are statically analyzable, but some use cases are difficult to handle due to ES6's support for cyclic dependencies and hosting rules. This bundler assumes code is written in CommonJS style but follows a strict set of rules that emulate statically analyzable code and favors the usage of the factory pattern instead of relying on obscure corners of the Javascript language (hoisting rules and binding semantics).
|
||||
|
||||
### Caveats
|
||||
|
||||
- Only supports modules that have the `require` and `module.exports` statement declared at the top-level scope before all other code, i.e. it does not support CommonJS modules that rely on dynamic importing/exporting. This means modules should only export a pure function or export a factory function if there are multiple statements and/or internal module state. The factory function pattern allows easier dependency injection in stateful modules, thus making modules testable.
|
||||
- Changes the semantics of value/binding exporting between unbundled and bundled code, and therefore relying on those semantics is discouraged.
|
||||
- Top level strictness is infectious (i.e. if entry file is in `"use strict"` mode, all modules inherit strict mode, and conversely, if the entry file is not in strict mode, all modules are pulled out of strict mode)
|
||||
- Currently only supports assignments to `module.exports` (i.e. `module.exports.foo = bar` will not work)
|
||||
- It is tiny and dependency-free because it uses regular expressions, and it only supports the narrow range of import/export declaration patterns outlined above.
|
||||
70
scripts/bundler.js
Normal file
70
scripts/bundler.js
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
"use strict"
|
||||
|
||||
const fs = require("fs")
|
||||
const zlib = require("zlib")
|
||||
const chokidar = require("chokidar")
|
||||
const Terser = require("terser")
|
||||
const util = require("util")
|
||||
|
||||
const readFile = util.promisify(fs.readFile)
|
||||
const writeFile = util.promisify(fs.writeFile)
|
||||
const gzip = util.promisify(zlib.gzip)
|
||||
|
||||
const bundle = require("./_bundler-impl")
|
||||
|
||||
const aliases = {o: "output", m: "minify", w: "watch", s: "save"}
|
||||
const params = Object.create(null)
|
||||
let command
|
||||
for (let arg of process.argv.slice(2)) {
|
||||
if (arg[0] === '"') arg = JSON.parse(arg)
|
||||
if (arg[0] === "-") {
|
||||
if (command != null) add(true)
|
||||
command = arg.replace(/\-+/g, "")
|
||||
}
|
||||
else if (command != null) add(arg)
|
||||
else params.input = arg
|
||||
}
|
||||
if (command != null) add(true)
|
||||
|
||||
function add(value) {
|
||||
params[aliases[command] || command] = value
|
||||
command = null
|
||||
}
|
||||
|
||||
function format(n) {
|
||||
return n.toString().replace(/(\d)(?=(\d\d\d)+(?!\d))/g, "$1,")
|
||||
}
|
||||
|
||||
async function build() {
|
||||
const original = await bundle(params.input)
|
||||
if (!params.minify) {
|
||||
await writeFile(params.output, original, "utf-8")
|
||||
return
|
||||
}
|
||||
console.log("minifying...")
|
||||
const minified = Terser.minify(original)
|
||||
if (minified.error) throw new Error(minified.error)
|
||||
await writeFile(params.output, minified.code, "utf-8")
|
||||
const originalSize = Buffer.byteLength(original, "utf-8")
|
||||
const compressedSize = Buffer.byteLength(minified.code, "utf-8")
|
||||
const originalGzipSize = (await gzip(original)).byteLength
|
||||
const compressedGzipSize = (await gzip(minified.code)).byteLength
|
||||
|
||||
console.log("Original size: " + format(originalGzipSize) + " bytes gzipped (" + format(originalSize) + " bytes uncompressed)")
|
||||
console.log("Compiled size: " + format(compressedGzipSize) + " bytes gzipped (" + format(compressedSize) + " bytes uncompressed)")
|
||||
|
||||
if (params.save) {
|
||||
const readme = await readFile("./README.md", "utf8")
|
||||
const kb = compressedGzipSize / 1000
|
||||
|
||||
await writeFile("./README.md",
|
||||
readme.replace(
|
||||
/(<!-- size -->)(.+?)(<!-- \/size -->)/,
|
||||
"$1" + (kb % 1 ? kb.toFixed(2) : kb) + " KB$3"
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
build()
|
||||
if (params.watch) chokidar.watch(".", {ignored: params.output}).on("all", build)
|
||||
39
scripts/minify-stream.js
Normal file
39
scripts/minify-stream.js
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
#!/usr/bin/env node
|
||||
/* eslint-disable no-process-exit */
|
||||
"use strict"
|
||||
|
||||
// This is my temporary hack to simplify deployment until I fix the underlying
|
||||
// problems in these bugs:
|
||||
// - https://github.com/MithrilJS/mithril.js/issues/2417
|
||||
// - https://github.com/MithrilJS/mithril.js/pull/2422
|
||||
|
||||
const {promises: fs} = require("fs")
|
||||
const path = require("path")
|
||||
const zlib = require("zlib")
|
||||
const Terser = require("terser")
|
||||
|
||||
function format(n) {
|
||||
return n.toString().replace(/(\d)(?=(\d\d\d)+(?!\d))/g, "$1,")
|
||||
}
|
||||
|
||||
module.exports = minify
|
||||
async function minify() {
|
||||
const input = path.resolve(__dirname, "../stream/stream.js")
|
||||
const output = path.resolve(__dirname, "../stream/stream.min.js")
|
||||
const original = await fs.readFile(input, "utf-8")
|
||||
const minified = Terser.minify(original)
|
||||
if (minified.error) throw new Error(minified.error)
|
||||
await fs.writeFile(output, minified.code, "utf-8")
|
||||
const originalSize = Buffer.byteLength(original, "utf-8")
|
||||
const compressedSize = Buffer.byteLength(minified.code, "utf-8")
|
||||
const originalGzipSize = zlib.gzipSync(original).byteLength
|
||||
const compressedGzipSize = zlib.gzipSync(minified.code).byteLength
|
||||
|
||||
console.log("Original size: " + format(originalGzipSize) + " bytes gzipped (" + format(originalSize) + " bytes uncompressed)")
|
||||
console.log("Compiled size: " + format(compressedGzipSize) + " bytes gzipped (" + format(compressedSize) + " bytes uncompressed)")
|
||||
}
|
||||
|
||||
/* eslint-disable global-require */
|
||||
if (require.main === module) {
|
||||
require("./_command")({exec: minify})
|
||||
}
|
||||
294
scripts/tests/test-bundler.js
Normal file
294
scripts/tests/test-bundler.js
Normal file
|
|
@ -0,0 +1,294 @@
|
|||
"use strict"
|
||||
|
||||
const fs = require("fs")
|
||||
const util = require("util")
|
||||
const access = util.promisify(fs.access)
|
||||
const writeFile = util.promisify(fs.writeFile)
|
||||
const unlink = util.promisify(fs.unlink)
|
||||
|
||||
const o = require("../../ospec/ospec")
|
||||
const bundle = require("../_bundler-impl")
|
||||
|
||||
o.spec("bundler", async () => {
|
||||
let filesCreated
|
||||
const ns = "./"
|
||||
|
||||
async function write(filepath, data) {
|
||||
try {
|
||||
await access(ns + filepath)
|
||||
} catch (e) {
|
||||
return writeFile(ns + filepath, data, "utf8")
|
||||
}
|
||||
throw new Error(`Don't call \`write('${filepath}')\`. Cannot overwrite file.`)
|
||||
}
|
||||
|
||||
function setup(files) {
|
||||
filesCreated = Object.keys(files)
|
||||
return Promise.all(filesCreated.map((f) => write(f, files[f])))
|
||||
}
|
||||
|
||||
o.afterEach(() => Promise.all(
|
||||
filesCreated.map((filepath) => unlink(ns + filepath))
|
||||
))
|
||||
|
||||
o("relative imports works", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b")',
|
||||
"b.js": "module.exports = 1",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar b = 1\n}());")
|
||||
})
|
||||
o("relative imports works with semicolons", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b");',
|
||||
"b.js": "module.exports = 1;",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar b = 1;\n}());")
|
||||
})
|
||||
o("relative imports works with let", async () => {
|
||||
await setup({
|
||||
"a.js": 'let b = require("./b")',
|
||||
"b.js": "module.exports = 1",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nlet b = 1\n}());")
|
||||
})
|
||||
o("relative imports works with const", async () => {
|
||||
await setup({
|
||||
"a.js": 'const b = require("./b")',
|
||||
"b.js": "module.exports = 1",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nconst b = 1\n}());")
|
||||
})
|
||||
o("relative imports works with assignment", async () => {
|
||||
await setup({
|
||||
"a.js": 'var a = {}\na.b = require("./b")',
|
||||
"b.js": "module.exports = 1",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar a = {}\na.b = 1\n}());")
|
||||
})
|
||||
o("relative imports works with reassignment", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = {}\nb = require("./b")',
|
||||
"b.js": "module.exports = 1",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar b = {}\nb = 1\n}());")
|
||||
})
|
||||
o("relative imports removes extra use strict", async () => {
|
||||
await setup({
|
||||
"a.js": '"use strict"\nvar b = require("./b")',
|
||||
"b.js": '"use strict"\nmodule.exports = 1',
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(';(function() {\n"use strict"\nvar b = 1\n}());')
|
||||
})
|
||||
o("relative imports removes extra use strict using single quotes", async () => {
|
||||
await setup({
|
||||
"a.js": "'use strict'\nvar b = require(\"./b\")",
|
||||
"b.js": "'use strict'\nmodule.exports = 1",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\n'use strict'\nvar b = 1\n}());")
|
||||
})
|
||||
o("relative imports removes extra use strict using mixed quotes", async () => {
|
||||
await setup({
|
||||
"a.js": '"use strict"\nvar b = require("./b")',
|
||||
"b.js": "'use strict'\nmodule.exports = 1",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(';(function() {\n"use strict"\nvar b = 1\n}());')
|
||||
})
|
||||
o("works w/ window", async () => {
|
||||
await setup({
|
||||
"a.js": 'window.a = 1\nvar b = require("./b")',
|
||||
"b.js": "module.exports = function() {return a}",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nwindow.a = 1\nvar b = function() {return a}\n}());")
|
||||
})
|
||||
o("works without assignment", async () => {
|
||||
await setup({
|
||||
"a.js": 'require("./b")',
|
||||
"b.js": "1 + 1",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\n1 + 1\n}());")
|
||||
})
|
||||
o("works if used fluently", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b").toString()',
|
||||
"b.js": "module.exports = []",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar _0 = []\nvar b = _0.toString()\n}());")
|
||||
})
|
||||
o("works if used fluently w/ multiline", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b")\n\t.toString()',
|
||||
"b.js": "module.exports = []",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar _0 = []\nvar b = _0\n\t.toString()\n}());")
|
||||
})
|
||||
o("works if used w/ curry", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b")()',
|
||||
"b.js": "module.exports = function() {}",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar _0 = function() {}\nvar b = _0()\n}());")
|
||||
})
|
||||
o("works if used w/ curry w/ multiline", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b")\n()',
|
||||
"b.js": "module.exports = function() {}",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar _0 = function() {}\nvar b = _0\n()\n}());")
|
||||
})
|
||||
o("works if used fluently in one place and not in another", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b").toString()\nvar c = require("./c")',
|
||||
"b.js": "module.exports = []",
|
||||
"c.js": 'var b = require("./b")\nmodule.exports = function() {return b}',
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar _0 = []\nvar b = _0.toString()\nvar b0 = _0\nvar c = function() {return b0}\n}());")
|
||||
})
|
||||
o("works if used in sequence", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b"), c = require("./c")',
|
||||
"b.js": "module.exports = 1",
|
||||
"c.js": "var x\nmodule.exports = 2",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar b = 1\nvar x\nvar c = 2\n}());")
|
||||
})
|
||||
o("works if assigned to property", async () => {
|
||||
await setup({
|
||||
"a.js": 'var x = {}\nx.b = require("./b")\nx.c = require("./c")',
|
||||
"b.js": "var bb = 1\nmodule.exports = bb",
|
||||
"c.js": "var cc = 2\nmodule.exports = cc",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar x = {}\nvar bb = 1\nx.b = bb\nvar cc = 2\nx.c = cc\n}());")
|
||||
})
|
||||
o("works if assigned to property using bracket notation", async () => {
|
||||
await setup({
|
||||
"a.js": 'var x = {}\nx["b"] = require("./b")\nx["c"] = require("./c")',
|
||||
"b.js": "var bb = 1\nmodule.exports = bb",
|
||||
"c.js": "var cc = 2\nmodule.exports = cc",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(';(function() {\nvar x = {}\nvar bb = 1\nx["b"] = bb\nvar cc = 2\nx["c"] = cc\n}());')
|
||||
})
|
||||
o("works if collision", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b")',
|
||||
"b.js": "var b = 1\nmodule.exports = 2",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar b0 = 1\nvar b = 2\n}());")
|
||||
})
|
||||
o("works if multiple aliases", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b")\n',
|
||||
"b.js": 'var b = require("./c")\nb.x = 1\nmodule.exports = b',
|
||||
"c.js": "var b = {}\nmodule.exports = b",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar b = {}\nb.x = 1\n}());")
|
||||
})
|
||||
o("works if multiple collision", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b")\nvar c = require("./c")\nvar d = require("./d")',
|
||||
"b.js": "var a = 1\nmodule.exports = a",
|
||||
"c.js": "var a = 2\nmodule.exports = a",
|
||||
"d.js": "var a = 3\nmodule.exports = a",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar a = 1\nvar b = a\nvar a0 = 2\nvar c = a0\nvar a1 = 3\nvar d = a1\n}());")
|
||||
})
|
||||
o("works if included multiple times", async () => {
|
||||
await setup({
|
||||
"a.js": "module.exports = 123",
|
||||
"b.js": 'var a = require("./a").toString()\nmodule.exports = a',
|
||||
"c.js": 'var a = require("./a").toString()\nvar b = require("./b")',
|
||||
})
|
||||
|
||||
o(await bundle(ns + "c.js")).equals(";(function() {\nvar _0 = 123\nvar a = _0.toString()\nvar a0 = _0.toString()\nvar b = a0\n}());")
|
||||
})
|
||||
o("works if included multiple times reverse", async () => {
|
||||
await setup({
|
||||
"a.js": "module.exports = 123",
|
||||
"b.js": 'var a = require("./a").toString()\nmodule.exports = a',
|
||||
"c.js": 'var b = require("./b")\nvar a = require("./a").toString()',
|
||||
})
|
||||
|
||||
o(await bundle(ns + "c.js")).equals(";(function() {\nvar _0 = 123\nvar a0 = _0.toString()\nvar b = a0\nvar a = _0.toString()\n}());")
|
||||
})
|
||||
o("reuses binding if possible", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b")\nvar c = require("./c")',
|
||||
"b.js": 'var d = require("./d")\nmodule.exports = function() {return d + 1}',
|
||||
"c.js": 'var d = require("./d")\nmodule.exports = function() {return d + 2}',
|
||||
"d.js": "module.exports = 1",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar d = 1\nvar b = function() {return d + 1}\nvar c = function() {return d + 2}\n}());")
|
||||
})
|
||||
o("disambiguates conflicts if imported collides with itself", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b")',
|
||||
"b.js": "var b = 1\nmodule.exports = function() {return b}",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar b0 = 1\nvar b = function() {return b0}\n}());")
|
||||
})
|
||||
o("disambiguates conflicts if imported collides with something else", async () => {
|
||||
await setup({
|
||||
"a.js": 'var a = 1\nvar b = require("./b")',
|
||||
"b.js": "var a = 2\nmodule.exports = function() {return a}",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar a = 1\nvar a0 = 2\nvar b = function() {return a0}\n}());")
|
||||
})
|
||||
o("disambiguates conflicts if imported collides with function declaration", async () => {
|
||||
await setup({
|
||||
"a.js": 'function a() {}\nvar b = require("./b")',
|
||||
"b.js": "var a = 2\nmodule.exports = function() {return a}",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nfunction a() {}\nvar a0 = 2\nvar b = function() {return a0}\n}());")
|
||||
})
|
||||
o("disambiguates conflicts if imported collides with another module's private", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b")\nvar c = require("./c")',
|
||||
"b.js": "var a = 1\nmodule.exports = function() {return a}",
|
||||
"c.js": "var a = 2\nmodule.exports = function() {return a}",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar a = 1\nvar b = function() {return a}\nvar a0 = 2\nvar c = function() {return a0}\n}());")
|
||||
})
|
||||
o("does not mess up strings", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b")',
|
||||
"b.js": 'var b = "b b b \\" b"\nmodule.exports = function() {return b}',
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(';(function() {\nvar b0 = "b b b \\\" b"\nvar b = function() {return b0}\n}());')
|
||||
})
|
||||
o("does not mess up properties", async () => {
|
||||
await setup({
|
||||
"a.js": 'var b = require("./b")',
|
||||
"b.js": "var b = {b: 1}\nmodule.exports = function() {return b.b}",
|
||||
})
|
||||
|
||||
o(await bundle(ns + "a.js")).equals(";(function() {\nvar b0 = {b: 1}\nvar b = function() {return b0.b}\n}());")
|
||||
})
|
||||
})
|
||||
|
|
@ -13,6 +13,7 @@ const ghPages = require("gh-pages")
|
|||
const upstream = require("./_upstream")
|
||||
const generate = require("./generate-docs")
|
||||
|
||||
module.exports = update
|
||||
async function update() {
|
||||
await generate()
|
||||
const commit = execFileSync("git", ["rev-parse", "--verify", "HEAD"], {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue