diff --git a/.eslintignore b/.eslintignore
index 72bec5e8..a2d34c31 100644
--- a/.eslintignore
+++ b/.eslintignore
@@ -1,7 +1,16 @@
-/.vscode
/coverage
-/docs/lib
-/examples
+/node_modules
+/jsconfig.json
+/npm-debug.log
+/.vscode
+/.DS_Store
+/.eslintcache
+
+# These are artifacts from various scripts
+/dist
+/archive
/mithril.js
/mithril.min.js
-/node_modules
+
+# And the examples are ignored (for now).
+/examples
diff --git a/.gitignore b/.gitignore
index aa4d28f3..b6539ad4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,10 +1,10 @@
-coverage
-node_modules
-jsconfig.json
-npm-debug.log
-.vscode
-.DS_Store
-.eslintcache
+/coverage
+/node_modules
+/jsconfig.json
+/npm-debug.log
+/.vscode
+/.DS_Store
+/.eslintcache
# These are artifacts from various scripts
/dist
diff --git a/package-lock.json b/package-lock.json
index e33245c3..1182e7e4 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -744,6 +744,14 @@
"semver": "^5.5.0",
"shebang-command": "^1.2.0",
"which": "^1.2.9"
+ },
+ "dependencies": {
+ "semver": {
+ "version": "5.7.1",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
+ "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
+ "dev": true
+ }
}
},
"dashdash": {
@@ -1063,6 +1071,12 @@
"integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==",
"dev": true
},
+ "semver": {
+ "version": "5.7.1",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
+ "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
+ "dev": true
+ },
"strip-ansi": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz",
@@ -3410,6 +3424,14 @@
"resolve": "^1.10.0",
"semver": "2 || 3 || 4 || 5",
"validate-npm-package-license": "^3.0.1"
+ },
+ "dependencies": {
+ "semver": {
+ "version": "5.7.1",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
+ "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
+ "dev": true
+ }
}
},
"normalize-path": {
@@ -4140,9 +4162,9 @@
"dev": true
},
"semver": {
- "version": "5.6.0",
- "resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz",
- "integrity": "sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg==",
+ "version": "6.3.0",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
+ "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
"dev": true
},
"semver-compare": {
diff --git a/package.json b/package.json
index 04054154..fd7b4892 100644
--- a/package.json
+++ b/package.json
@@ -10,8 +10,8 @@
"precommit": "lint-staged",
"watch": "run-p watch:**",
"watch:js": "node bundler/cli browser.js -output mithril.js -watch",
- "watch:docs": "node scripts/watch-docs.js",
- "build": "run-p build:**",
+ "watch:docs": "node scripts/generate-docs --watch",
+ "build": "run-p build:browser build:min",
"build:browser": "node bundler/cli browser.js -output mithril.js",
"build:docs": "node scripts/generate-docs",
"build:min": "node bundler/cli browser.js -output mithril.min.js -minify -save",
@@ -49,6 +49,7 @@
"request": "^2.88.0",
"request-promise-native": "^1.0.7",
"rimraf": "^2.6.3",
+ "semver": "^6.3.0",
"terser": "^3.16.1"
},
"bin": {
diff --git a/scripts/.eslintrc.js b/scripts/.eslintrc.js
index 424fa8e4..f31bec11 100644
--- a/scripts/.eslintrc.js
+++ b/scripts/.eslintrc.js
@@ -6,4 +6,7 @@ module.exports = {
"parserOptions": {
"ecmaVersion": 2019,
},
+ "rules": {
+ "no-process-env": "off",
+ },
};
diff --git a/scripts/_command.js b/scripts/_command.js
index 3949c7b5..d3bbc085 100644
--- a/scripts/_command.js
+++ b/scripts/_command.js
@@ -1,8 +1,7 @@
"use strict"
-process.on("unhandledRejection", (e) => { throw e })
-
-function reportExec(e) {
+process.on("unhandledRejection", function (e) {
+ process.exitCode = 1
if (!e.stdout || !e.stderr) return false
console.error(e.stack)
@@ -14,23 +13,23 @@ function reportExec(e) {
}
return true
-}
+})
-exports.exec = (mod, init) => {
- if (require.main === mod) {
- // Skip the first tick.
- Promise.resolve().then(init).catch((e) => {
+module.exports = ({exec, watch}) => {
+ const index = process.argv.indexOf("--watch")
+ if (index >= 0) {
+ process.argv.splice(index, 1)
+
+ if (watch == null) {
+ console.error("Watching this script is not supported!")
// eslint-disable-next-line no-process-exit
- if (reportExec(e)) process.exit(1)
- else throw e
+ process.exit(1)
+ }
+
+ watch()
+ } else {
+ Promise.resolve(exec()).then((code) => {
+ if (code != null) process.exitCode = code
})
}
}
-
-exports.run = async (init) => {
- try {
- await init()
- } catch (e) {
- if (!reportExec(e)) console.error(e)
- }
-}
diff --git a/scripts/_upstream.js b/scripts/_upstream.js
index 0dc2020f..8c180f9e 100644
--- a/scripts/_upstream.js
+++ b/scripts/_upstream.js
@@ -20,18 +20,16 @@ function find(type) {
const line = remoteInfo.find((line) => regexp.test(line))
- if (line == null) {
- console.error(
- "An upstream must be configured with both fetch and push!"
- )
- process.exit(1)
- }
-
- return {
- branch: line.slice(0, line.indexOf("\t")),
+ return line == null ? undefined : {
+ remote: line.slice(0, line.indexOf("\t")),
repo: line.slice(line.lastIndexOf("\t") + 1, -(type.length + 3)),
}
}
exports.fetch = find("fetch")
exports.push = find("push")
+
+if (exports.fetch == null) {
+ console.error("You must have an upstream to pull from!")
+ process.exit(1)
+}
diff --git a/scripts/generate-docs.js b/scripts/generate-docs.js
index ab3fe851..7a366612 100644
--- a/scripts/generate-docs.js
+++ b/scripts/generate-docs.js
@@ -1,18 +1,17 @@
"use strict"
-const {createReadStream, createWriteStream, promises: fs} = require("fs")
+const {promises: fs} = require("fs")
const path = require("path")
const {promisify} = require("util")
-const pipeline = promisify(require("stream").pipeline)
const marked = require("marked")
const rimraf = promisify(require("rimraf"))
const copy = require("recursive-copy")
const {execFileSync} = require("child_process")
const escapeRegExp = require("escape-string-regexp")
const HTMLMinifier = require("html-minifier")
+const upstream = require("./_upstream")
-require("./_command").exec(module, () => generate())
-module.exports = generate
+const r = (file) => path.resolve(__dirname, "..", file)
// Minify our docs.
const htmlMinifierConfig = {
@@ -38,9 +37,12 @@ const htmlMinifierConfig = {
useShortDoctype: true,
}
+module.exports = generate
async function generate() {
- const r = (file) => path.resolve(__dirname, "..", file)
+ return (await makeGenerator()).generate()
+}
+async function makeGenerator() {
await rimraf(r("dist"))
const [guides, methods, layout, pkg] = await Promise.all([
@@ -53,19 +55,39 @@ async function generate() {
const version = JSON.parse(pkg).version
+ // Make sure we have the latest archive.
+ execFileSync("git", [
+ "fetch", "--depth=1",
+ upstream.fetch.remote, "gh-pages",
+ ])
+
// Set up archive directories
- execFileSync("git", ["checkout", "gh-pages", "--", "archive"])
+ execFileSync("git", [
+ "checkout", `${upstream.fetch.remote}/gh-pages`,
+ "--", "archive",
+ ])
await fs.rename(r("archive"), r("dist/archive"))
await fs.mkdir(r(`dist/archive/v${version}`), {recursive: true})
// Tell Git to ignore our changes - it's no longer there.
execFileSync("git", ["add", "archive"])
- function compilePage(file, markdown) {
+ return new Generator({version, guides, methods, layout})
+}
+
+class Generator {
+ constructor(opts) {
+ this._version = opts.version
+ this._guides = opts.guides
+ this._methods = opts.methods
+ this._layout = opts.layout
+ }
+
+ compilePage(file, markdown) {
file = path.basename(file)
const link = new RegExp(
`([ \t]*)(- )(\\[.+?\\]\\(${escapeRegExp(file)}\\))`
)
- const src = link.test(guides) ? guides : methods
+ const src = link.test(this._guides) ? this._guides : this._methods
let body = markdown
// fix pipes in code tags
@@ -100,7 +122,7 @@ async function generate() {
const markedHtml = marked(body)
const title = body.match(/^#([^\n\r]+)/i) || []
- let result = layout
+ let result = this._layout
result = result.replace(
/
Mithril\.js<\/title>/,
@@ -108,7 +130,7 @@ async function generate() {
)
// update version
- result = result.replace(/\[version\]/g, version)
+ result = result.replace(/\[version\]/g, this._version)
// insert parsed HTML
result = result.replace(/\[body\]/, markedHtml)
@@ -137,46 +159,103 @@ async function generate() {
return result
}
- async function generate(file) {
- try {
- const handle = await fs.open(file, "r")
- try {
- const relative = path.relative(r("docs"), file)
- const archive = r(`dist/archive/v${version}/${relative}`)
- await fs.mkdir(path.dirname(archive), {recursive: true})
+ async eachTarget(relative, init) {
+ await Promise.all([
+ init(r(`dist/archive/v${this._version}/${relative}`)),
+ init(r(`dist/${relative}`)),
+ ])
+ }
- if (file.endsWith(".md")) {
- const html = compilePage(file, await handle.readFile("utf-8"))
- const minified = HTMLMinifier.minify(html, htmlMinifierConfig)
- await fs.writeFile(archive.replace(/\.md$/, ".html"), minified)
- } else if (file.endsWith(".html")) {
- const html = await handle.readFile("utf-8")
- const minified = HTMLMinifier.minify(html, htmlMinifierConfig)
- await fs.writeFile(archive, minified)
- } else {
- await pipeline(
- createReadStream(null, {fd: handle.fd}),
- createWriteStream(archive)
- )
- }
- } finally {
- handle.close()
- }
- } catch (e) {
- if (e.code !== "EISDIR") throw e
- const files = await fs.readdir(file)
- const devOnly = /^layout\.html$|^tutorials$|^archive$|^nav-/
- await Promise.all(
- files
- .filter((f) => !devOnly.test(f))
- .map((f) => path.join(file, f))
- .map(generate)
+ async generateSingle(file) {
+ const relative = path.relative(r("docs"), file)
+ const archived = (target, init) =>
+ this.eachTarget(target, async (dest) => {
+ await fs.mkdir(path.dirname(dest), {recursive: true})
+ await init(dest)
+ })
+
+ if ((/\.(md|html)$/).test(file)) {
+ await archived(relative, (dest) => fs.copyFile(file, dest))
+ console.log(`Copied: ${relative}`)
+ }
+ else {
+ let html = await fs.readFile(file, "utf-8")
+ if (file.endsWith(".md")) html = this.compilePage(file, html)
+ const minified = HTMLMinifier.minify(html, htmlMinifierConfig)
+ await archived(
+ relative.replace(/\.md$/, ".html"),
+ (dest) => fs.writeFile(dest, minified)
)
+ console.log(`Compiled: ${relative}`)
}
}
- await generate(r("docs"))
- await copy(r(`dist/archive/v${version}`), r("dist"))
- // Just ensure it exists.
- await (await fs.open(r("dist/.nojekyll"), "a")).close()
+ async generateRec(file) {
+ let files
+ try {
+ files = await fs.readdir(file)
+ }
+ catch (e) {
+ if (e.code !== "ENOTDIR") throw e
+ return this.generateSingle(file)
+ }
+
+ const devOnly = /^layout\.html$|^archive$|^nav-/
+ // Don't care about the return value here.
+ await Promise.all(
+ files
+ .filter((f) => !devOnly.test(f))
+ .map((f) => this.generateRec(path.join(file, f)))
+ )
+ }
+
+ async generate() {
+ await this.generateRec(r("docs"))
+ await copy(r(`dist/archive/v${this._version}`), r("dist"))
+ // Just ensure it exists.
+ await (await fs.open(r("dist/.nojekyll"), "a")).close()
+ }
+}
+
+/* eslint-disable global-require */
+if (require.main === module) {
+ require("./_command")({
+ exec: generate,
+ async watch() {
+ let timeout, genPromise
+ function updateGenerator() {
+ if (timeout == null) return
+ clearTimeout(timeout)
+ genPromise = new Promise((resolve) => {
+ timeout = setTimeout(function() {
+ timeout = null
+ resolve(makeGenerator().then((g) => g.generate()))
+ }, 100)
+ })
+ }
+
+ async function updateFile(file) {
+ if ((/^layout\.html$|^archive$|^nav-/).test(file)) {
+ updateGenerator()
+ }
+ (await genPromise).generateSingle(file)
+ }
+
+ async function removeFile(file) {
+ (await genPromise).eachTarget(file, (dest) => fs.unlink(dest))
+ }
+
+ require("chokidar").watch(r("docs"), {
+ ignored: ["archive/**", /(^|\\|\/)\../],
+ // This depends on `layout`/etc. existing first.
+ ignoreInitial: true,
+ awaitWriteFinish: true,
+ })
+ .on("ready", updateGenerator)
+ .on("add", updateFile)
+ .on("change", updateFile)
+ .on("unlink", removeFile)
+ .on("unlinkDir", removeFile)
+ },
+ })
}
diff --git a/scripts/lint-docs.js b/scripts/lint-docs.js
index 692f5080..d622564d 100644
--- a/scripts/lint-docs.js
+++ b/scripts/lint-docs.js
@@ -1,7 +1,7 @@
#!/usr/bin/env node
"use strict"
-const {promises} = require("fs")
+const {promises: fs} = require("fs")
const path = require("path")
const {Glob} = require("glob")
const marked = require("marked")
@@ -10,8 +10,6 @@ const babelParser = require("@babel/parser")
// Peer dependency on `request`
const request = require("request-promise-native")
-require("./_command").exec(module, () => lint())
-
// lint rules
class LintRenderer extends marked.Renderer {
constructor(file) {
@@ -61,7 +59,8 @@ class LintRenderer extends marked.Renderer {
}, (e) => {
if (e.statusCode === 404) {
this._emit(`broken external link: ${href}`)
- } else {
+ }
+ else {
if (
e.error.code === "ERR_TLS_CERT_ALTNAME_INVALID" &&
href.startsWith("https://")
@@ -75,11 +74,12 @@ class LintRenderer extends marked.Renderer {
httpError(e)
}
}))
- } else {
+ }
+ else {
const exec = (/^([^#?]*\.md)(?:$|\?|#)/).exec(href)
if (exec != null) {
const resolved = path.resolve(this._dir, exec[1])
- this._awaiting.push(promises.access(resolved).catch(() => {
+ this._awaiting.push(fs.access(resolved).catch(() => {
this._emit(`broken internal link: ${href}`)
}))
}
@@ -100,7 +100,8 @@ class LintRenderer extends marked.Renderer {
allowUndeclaredExports: true,
plugins: ["dynamicImport"],
})
- } catch (e) {
+ }
+ catch (e) {
this._error = e
}
}
@@ -127,7 +128,8 @@ class LintRenderer extends marked.Renderer {
"Code block possibly missing `json` language tag",
this._block(),
)
- } catch {
+ }
+ catch (_) {
// ignore
}
}
@@ -157,7 +159,16 @@ class LintRenderer extends marked.Renderer {
}
}
-function lint() {
+exports.lintOne = lintOne
+async function lintOne(file) {
+ const contents = await fs.readFile(file, "utf-8")
+ const renderer = new LintRenderer(file)
+ marked(contents, {renderer})
+ return Promise.all(renderer._awaiting)
+}
+
+exports.lintAll = lintAll
+function lintAll() {
return new Promise((resolve, reject) => {
const glob = new Glob(path.resolve(__dirname, "../docs/**/*.md"), {
ignore: [
@@ -170,15 +181,29 @@ function lint() {
const awaiting = []
glob.on("match", (file) => {
- awaiting.push(promises.readFile(file, "utf-8").then((contents) => {
- const renderer = new LintRenderer(file)
- marked(contents, {renderer})
- return Promise.all(renderer._awaiting)
- }))
+ awaiting.push(lintOne(file))
})
glob.on("error", reject)
glob.on("end", () => resolve(Promise.all(awaiting)))
})
}
-module.exports = lint
+
+/* eslint-disable global-require */
+if (require.main === module) {
+ require("./_command")({
+ exec: lintAll,
+ watch() {
+ require("chokidar")
+ .watch(path.resolve(__dirname, "../docs/**/*.md"), {
+ ignore: [
+ "**/change-log.md",
+ "**/migration-*.md",
+ "**/node_modules/**",
+ ],
+ })
+ .on("add", lintOne)
+ .on("change", lintOne)
+ },
+ })
+}
diff --git a/scripts/release.js b/scripts/release.js
index d4ad3a38..a60cd80f 100644
--- a/scripts/release.js
+++ b/scripts/release.js
@@ -1,5 +1,4 @@
#!/usr/bin/env node
-/* eslint-disable no-process-exit */
"use strict"
// This is my temporary hack to simplify deployment until I fix the underlying
@@ -11,73 +10,57 @@
// helpful to create a release on Travis vs locally, aside from a couple extra
// potential 2FA prompts by npm during login and publish.
-if (require.main !== module) {
- throw new Error("This is a script, not a module!")
-}
-
const path = require("path")
-const fs = require("fs")
+const {promises: fsp} = require("fs")
+const readline = require("readline")
const {execFileSync} = require("child_process")
-const rimraf = require("rimraf")
+const {promisify} = require("util")
+const rimraf = promisify(require("rimraf"))
+const semver = require("semver")
+const upstream = require("./_upstream")
+const updateDocs = require("./update-docs")
+
+// Fake it until it works with this.
+upstream.fetch.remote = "origin"
function showHelp() {
console.error(`
-node scripts/release increment [ --preid id ]
+node scripts/release increment [ --preid id ] [ --publish ]
-Invoke as \`scripts/release.sh\` to invoke the release sequence, specifying the
-version increment via \`increment\` (required). Here's how they all work:
+Invoke as 'scripts/release.sh' to invoke the release sequence, specifying the
+version increment via 'increment' (required). Pass '--publish' to push the
+change and publish it, instead of just logging the commands used to push the
+release.
-- \`major\` increments from 1.0.0 or 2.0.0-beta.0 to 2.0.0
-- \`minor\` increments from 1.0.0 to 1.1.0
-- \`patch\` increments from 1.0.0 to 1.0.1
-- \`premajor\` increments from 1.0.0 to 2.0.0-beta.0
-- \`preminor\` increments from 1.0.0 to 1.1.0-beta.0
-- \`prepatch\` increments from 1.0.0 to 1.0.1-beta.0
-- \`prerelease\` increments from 2.0.0-beta.0 to 2.0.0-beta.1
+Here's how each increment type works:
-\`--preid beta\` specifies the \`beta\` part above (default). It's required for
-all \`pre*\` increment types except \`prerelease\`.
+- 'major' increments from 1.0.0 or 2.0.0-beta.0 to 2.0.0
+- 'minor' increments from 1.0.0 to 1.1.0
+- 'patch' increments from 1.0.0 to 1.0.1
+- 'premajor' increments from 1.0.0 to 2.0.0-beta.0
+- 'preminor' increments from 1.0.0 to 1.1.0-beta.0
+- 'prepatch' increments from 1.0.0 to 1.0.1-beta.0
+- 'prerelease' increments from 2.0.0-beta.0 to 2.0.0-beta.1
-See the docs for \`npm version\` for
-details on the \`increment\` parameter.
-`.trim())
- process.exit(0)
-}
+'--preid beta' specifies the 'beta' part above (default). It's required for all
+'pre*' increment types except 'prerelease'.
-function bail(...args) {
- console.error(...args)
- process.exit(1)
+See the docs for 'npm version' for details
+on the 'increment' parameter.
+`)
}
const rootDir = path.dirname(__dirname)
const p = (...args) => path.resolve(rootDir, ...args)
-function readVersion() {
- return JSON.parse(fs.readFileSync(p("../package.json"), "utf-8")).version
+function fail(...args) {
+ console.error(...args)
+ return 1
}
-const parsed = require("minimist")(process.argv.slice(2), {
- boolean: ["help"],
- alias: {help: ["h", "?"]},
- string: ["preid"],
- "--": true,
-})
-parsed._ = parsed._.concat(parsed["--"])
-
-if (parsed.help || !parsed._.length) showHelp()
-const publishType = parsed._[0]
-const publishPreid = parsed.preid
-const publishArgs = publishType.startsWith("pre") ? ["--tag", "next"] : []
-let releaseArgs = []
-
-if (publishType.startsWith("pre") && publishType !== "prerelease") {
- if (publishPreid == null) {
- bail("`pre*` increments other than `prerelease` require `--preid`")
- }
- releaseArgs = [`--preid=${publishPreid}`]
-}
-
-function exec(cmd, args, opts) {
+function execCommand(cmd, args, opts) {
+ console.error()
+ console.error(["executing:", cmd, ...args].join(" "))
return execFileSync(cmd, args, {
windowsHide: true,
stdio: "inherit",
@@ -86,83 +69,212 @@ function exec(cmd, args, opts) {
})
}
+function git(...cmd) { return execCommand("git", cmd) }
+function npm(...cmd) { return execCommand("npm", cmd) }
+function npmConfig(key) { return npm("config", "get", key).trim() }
+
function getChanges() {
- const result = exec("git", ["status", "-z"], {
+ return execCommand("git", ["status", "-z"], {
stdio: ["inherit", "pipe", "inherit"],
})
- return result.split(/\0/g).filter(Boolean)
+ .split(/\0/g)
+ .filter((l) => (/\S/).test(l))
}
-if (getChanges().length) {
- bail("Error: Tree must not be dirty to start!")
-}
-
-const upstream = require("./_upstream")
-
-exec("git", ["checkout", "next"])
-exec("git", ["pull", "--rebase", upstream.fetch.branch, "next"])
-
-// Because I'm too lazy to make everything async.
-exec("read", ["-rsp", `
-Update "Upcoming" in \`docs/change-log.md\`. If moving a prerelease to stable,
-also replace all references to \`mithril@next\` to \`mithril\`, including in
-Flems snippets. Press enter once ready to continue.
-`.trim()], {shell: true})
-
-// Verify the changelog was updated
-let changelogUpdated = false
-let treeDirty = false
-
-for (const line of getChanges()) {
- switch (line) {
- case " M CHANGELOG.md":
- case "M CHANGELOG.md":
- case "MM CHANGELOG.md":
- changelogUpdated = true
- break
-
- default:
- treeDirty = true
+async function release({increment, preid, publish}) {
+ if (!(/^prerelease$|^(pre)?(major|minor|patch)$/).test(increment)) {
+ return fail(`Invalid increment: ${increment}`)
}
+
+ if ((/^pre(major|minor|patch)/).test(increment) && preid == null) {
+ return fail(`'${increment}' must include a '--preid'`)
+ }
+
+ if (getChanges().length) {
+ return fail("Tree must be clean to start!")
+ }
+
+ if (upstream.push == null) {
+ return fail("You must have an upstream to push to!")
+ }
+
+ const rl = readline.createInterface({
+ input: process.stdin,
+ output: process.stdout,
+ })
+
+ // Update local `master` and `next`.
+ git("fetch", upstream.fetch.remote, "master", "next")
+
+ // Make sure we're on the current `next` and merge any docs fixes and
+ // similar that have landed in upstream `master`.
+ git("checkout", "next")
+ git("pull", "--rebase", upstream.fetch.remote, "next")
+ git(
+ "pull", "--allow-unrelated-histories",
+ upstream.fetch.remote, "master"
+ )
+
+ // Note: we're doing our own semver incrementing.
+ const packageJson = JSON.parse(
+ await fsp.readFile(p("package.json"), "utf-8")
+ )
+ const version = semver.inc(packageJson.version, increment, preid)
+
+ console.error(`
+Copy the parts listed in "Upcoming" to a new section "### v${version}" in
+docs/change-log.md and clear that section out.
+`)
+
+ for (;;) {
+ await new Promise((resolve) => rl.question(
+ "Press once ready to continue or Ctrl+C to abort.",
+ // Ignore any input.
+ () => resolve(),
+ ))
+
+ // Verify the changelog was updated, and give a chance to retry if it's
+ // prematurely continued.
+ const changes = getChanges()
+ const isChangelog = /^[ M][ M] docs\/change-log\.md$/
+ const errors = []
+
+ console.log("changes", changes)
+
+ if (!changes.some((l) => isChangelog.test(l))) {
+ errors.push("Changelog must be updated!")
+ }
+
+ if (changes.some((l) => !isChangelog.test(l))) {
+ errors.push("Tree must not be otherwise dirty!")
+ }
+
+ if (!errors.length) break
+ console.error(errors.join("\n"))
+ }
+
+ await rimraf(p("node_modules"))
+ npm("install-test")
+ npm("run", "build")
+ console.log("*** Build done ***")
+
+ // Update the package file.
+ packageJson.version = version
+ await fsp.writeFile(p("package.json"), "utf-8",
+ JSON.stringify(packageJson, null, 2)
+ )
+ // Commit and tag the new release, with the appropriate CLI flag if the
+ // commit needs signed.
+ git("add", ".")
+ git(
+ "commit",
+ ...npmConfig("sign-git-tag") === "true" ? ["--gpg-sign"] : [],
+ "--message", `v${version}`,
+ )
+ git("tag", `v${version}`)
+
+ // Update `master` to reflect the current state of `next`.
+ git("checkout", "master")
+ git("reset", "--hard", "next")
+ git("checkout", "next")
+
+ if (publish) {
+ // TODO: switch this to just do the push, and use the following Travis
+ // config. This also conveniently keeps private stuff out of the build
+ // scripts and just in build config, avoiding the grief that led to this
+ // file's existence.
+ //
+ // ```yml
+ // # See https://docs.travis-ci.com/user/deployment/npm/ for details on
+ // # `api_key:` for the npm provider.
+ // # See https://docs.travis-ci.com/user/deployment/pages/ for details
+ // # on `github_token:` for the pages provider.
+ // after_success: >
+ // [ "$TRAVIS_BRANCH" == "master" ] && node scripts/generate-docs
+ //
+ // deploy:
+ // - provider: npm
+ // skip_cleanup: true
+ // email: 'contact@isiahmeadows.com'
+ // api_key:
+ // secure: 'output of `travis encrypt NPM_AUTH_TOKEN`'
+ // on:
+ // tags: true
+ // condition: "$TRAVIS_TAG != *-*"
+ // - provider: npm
+ // skip_cleanup: true
+ // tag: next
+ // email: 'contact@isiahmeadows.com'
+ // api_key:
+ // secure: 'output of `travis encrypt NPM_AUTH_TOKEN`'
+ // on:
+ // tags: true
+ // condition: "$TRAVIS_TAG == *-*"
+ // - provider: pages
+ // skip_cleanup: true
+ // github_token:
+ // secure: 'output of `travis encrypt GITHUB_AUTH_TOKEN`'
+ // local_dir: dist
+ // fqdn: mithril.js.org
+ // committer_from_gh: true
+ // on:
+ // tags: false
+ // branch: master
+ // ```
+ npm("login")
+ if (increment.startsWith("pre")) {
+ npm("publish", "--tag", "next")
+ } else {
+ npm("publish")
+ }
+ npm("logout")
+
+ // Only push after successful publish
+ git(
+ "push", "--atomic", "origin",
+ "+next:master", "next:next", `next:refs/tags/v${version}`,
+ )
+ git(
+ "push", "--atomic", upstream.push.remote,
+ "+next:master", "next:next", `next:refs/tags/v${version}`,
+ )
+ await updateDocs()
+ } else {
+ const remote = upstream.push.remote
+ console.error(`
+npm login
+npm publish${increment.startsWith("pre") ? " --tag next" : ""}
+npm logout
+git push --atomic origin +next:master next:next next:refs/tags/v${version}
+git push --atomic ${remote} +next:master next:next next:refs/tags/v${version}
+npm run release:docs
+`)
+ }
+
+ console.error(`
+Don't forget to update the latest release! You can find it here:
+https://github.com/MithrilJS/mithril.js/releases/tag/v${version}
+`)
+
+ return 0
}
-if (!changelogUpdated || treeDirty) {
- if (!changelogUpdated) console.error("Error: Changelog must be updated!")
- if (!treeDirty) console.error("Error: Tree must not be otherwise dirty!")
- process.exit(1)
+/* eslint-disable global-require */
+if (require.main === module) {
+ require("./_command")({async exec() {
+ const parsed = require("minimist")(process.argv.slice(2), {
+ boolean: ["help", "publish"],
+ alias: {help: ["h", "?"]},
+ string: ["preid"],
+ })
+
+ if (parsed.help || !parsed._.length) showHelp()
+ else {
+ await release({
+ increment: parsed._[0],
+ preid: parsed.preid,
+ publish: parsed.publish,
+ })
+ }
+ }})
}
-
-exec("git", ["add", "."])
-exec("git", ["commit", "-m", "Preparing for release"])
-
-exec("git", ["checkout", "master"])
-exec("git", ["pull", "--rebase", upstream.fetch.branch, "master"])
-// There may be merge conflicts with `index.js` and/or the bundle - just ignore
-// them. Whatever they have is canon, as is the case with everything else.
-exec("git", ["merge", "next", "--strategy-option=theirs"])
-rimraf.sync(p("node_modules"))
-exec("npm", ["install-test"])
-
-exec("npm", ["version", "-m", "v%s", publishType, ...releaseArgs])
-
-exec("git", ["checkout", "next"])
-exec("git", ["checkout", "master", "--", "mithril.js", "mithril.min.js"])
-// That's already been updated in `master`.
-exec("git", ["commit", "-m", `Generated bundles for ${readVersion()} [skip ci]`])
-
-exec("git", ["checkout", "master"])
-
-console.log("publish args: ", publishArgs)
-console.log("push all: ", upstream.push.branch)
-// exec("npm", ["login"])
-// exec("npm", ["publish", ...publishArgs])
-// exec("npm", ["logout"])
-//
-// // Only push after successful publish
-// exec("git", ["push", "--follow-tags", "origin", "master:master", "next:next"])
-// exec("git", ["push", "--follow-tags", upstream.push.branch, "master:master", "next:next"])
-
-exec("git", ["checkout", "next"])
-
-console.log("update docs")
-// require("./update-docs")()
diff --git a/scripts/update-docs.js b/scripts/update-docs.js
index d21fe053..a410d104 100644
--- a/scripts/update-docs.js
+++ b/scripts/update-docs.js
@@ -13,9 +13,6 @@ const ghPages = require("gh-pages")
const upstream = require("./_upstream")
const generate = require("./generate-docs")
-require("./_command")(module, () => update())
-module.exports = update
-
async function update() {
await generate()
const commit = execFileSync("git", ["rev-parse", "--verify", "HEAD"], {
@@ -30,10 +27,17 @@ async function update() {
// force it to go over SSH so the saved keys are used.
// https://github.com/tschaub/gh-pages/issues/160
repo: upstream.push.repo,
- remote: upstream.push.branch,
+ remote: upstream.push.remote,
src: ["**/*", ".nojekyll"],
message: `Generated docs for commit ${commit} [skip ci]`,
// May want to enable this if an API token resolves the issue.
// silent: !!process.env.TRAVIS_CI,
})
+
+ console.log("Published!")
+}
+
+/* eslint-disable global-require */
+if (require.main === module) {
+ require("./_command")({exec: update})
}
diff --git a/scripts/watch-docs.js b/scripts/watch-docs.js
deleted file mode 100644
index b6e25c05..00000000
--- a/scripts/watch-docs.js
+++ /dev/null
@@ -1,10 +0,0 @@
-"use strict"
-
-const path = require("path")
-const chokidar = require("chokidar")
-const generate = require("./generate-docs")
-const command = require("./_command")
-
-chokidar.watch(path.resolve(__dirname, "../docs")).on("all", () => {
- command.run(() => generate())
-})