Refactor the scripts to work as advertised

- Correct docs generation to always fetch its dependency
- Don't try to close a handle that's already been closed by other methods
- Allow the release script to actually be testable.
This commit is contained in:
Isiah Meadows 2019-08-17 15:22:35 -04:00
parent 30ad45caa1
commit 22e6d37a26
12 changed files with 483 additions and 241 deletions

View file

@ -1,7 +1,16 @@
/.vscode
/coverage
/docs/lib
/examples
/node_modules
/jsconfig.json
/npm-debug.log
/.vscode
/.DS_Store
/.eslintcache
# These are artifacts from various scripts
/dist
/archive
/mithril.js
/mithril.min.js
/node_modules
# And the examples are ignored (for now).
/examples

14
.gitignore vendored
View file

@ -1,10 +1,10 @@
coverage
node_modules
jsconfig.json
npm-debug.log
.vscode
.DS_Store
.eslintcache
/coverage
/node_modules
/jsconfig.json
/npm-debug.log
/.vscode
/.DS_Store
/.eslintcache
# These are artifacts from various scripts
/dist

28
package-lock.json generated
View file

@ -744,6 +744,14 @@
"semver": "^5.5.0",
"shebang-command": "^1.2.0",
"which": "^1.2.9"
},
"dependencies": {
"semver": {
"version": "5.7.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
"integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
"dev": true
}
}
},
"dashdash": {
@ -1063,6 +1071,12 @@
"integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==",
"dev": true
},
"semver": {
"version": "5.7.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
"integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
"dev": true
},
"strip-ansi": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz",
@ -3410,6 +3424,14 @@
"resolve": "^1.10.0",
"semver": "2 || 3 || 4 || 5",
"validate-npm-package-license": "^3.0.1"
},
"dependencies": {
"semver": {
"version": "5.7.1",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
"integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==",
"dev": true
}
}
},
"normalize-path": {
@ -4140,9 +4162,9 @@
"dev": true
},
"semver": {
"version": "5.6.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-5.6.0.tgz",
"integrity": "sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg==",
"version": "6.3.0",
"resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz",
"integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==",
"dev": true
},
"semver-compare": {

View file

@ -10,8 +10,8 @@
"precommit": "lint-staged",
"watch": "run-p watch:**",
"watch:js": "node bundler/cli browser.js -output mithril.js -watch",
"watch:docs": "node scripts/watch-docs.js",
"build": "run-p build:**",
"watch:docs": "node scripts/generate-docs --watch",
"build": "run-p build:browser build:min",
"build:browser": "node bundler/cli browser.js -output mithril.js",
"build:docs": "node scripts/generate-docs",
"build:min": "node bundler/cli browser.js -output mithril.min.js -minify -save",
@ -49,6 +49,7 @@
"request": "^2.88.0",
"request-promise-native": "^1.0.7",
"rimraf": "^2.6.3",
"semver": "^6.3.0",
"terser": "^3.16.1"
},
"bin": {

View file

@ -6,4 +6,7 @@ module.exports = {
"parserOptions": {
"ecmaVersion": 2019,
},
"rules": {
"no-process-env": "off",
},
};

View file

@ -1,8 +1,7 @@
"use strict"
process.on("unhandledRejection", (e) => { throw e })
function reportExec(e) {
process.on("unhandledRejection", function (e) {
process.exitCode = 1
if (!e.stdout || !e.stderr) return false
console.error(e.stack)
@ -14,23 +13,23 @@ function reportExec(e) {
}
return true
})
module.exports = ({exec, watch}) => {
const index = process.argv.indexOf("--watch")
if (index >= 0) {
process.argv.splice(index, 1)
if (watch == null) {
console.error("Watching this script is not supported!")
// eslint-disable-next-line no-process-exit
process.exit(1)
}
exports.exec = (mod, init) => {
if (require.main === mod) {
// Skip the first tick.
Promise.resolve().then(init).catch((e) => {
// eslint-disable-next-line no-process-exit
if (reportExec(e)) process.exit(1)
else throw e
watch()
} else {
Promise.resolve(exec()).then((code) => {
if (code != null) process.exitCode = code
})
}
}
exports.run = async (init) => {
try {
await init()
} catch (e) {
if (!reportExec(e)) console.error(e)
}
}

View file

@ -20,18 +20,16 @@ function find(type) {
const line = remoteInfo.find((line) => regexp.test(line))
if (line == null) {
console.error(
"An upstream must be configured with both fetch and push!"
)
process.exit(1)
}
return {
branch: line.slice(0, line.indexOf("\t")),
return line == null ? undefined : {
remote: line.slice(0, line.indexOf("\t")),
repo: line.slice(line.lastIndexOf("\t") + 1, -(type.length + 3)),
}
}
exports.fetch = find("fetch")
exports.push = find("push")
if (exports.fetch == null) {
console.error("You must have an upstream to pull from!")
process.exit(1)
}

View file

@ -1,18 +1,17 @@
"use strict"
const {createReadStream, createWriteStream, promises: fs} = require("fs")
const {promises: fs} = require("fs")
const path = require("path")
const {promisify} = require("util")
const pipeline = promisify(require("stream").pipeline)
const marked = require("marked")
const rimraf = promisify(require("rimraf"))
const copy = require("recursive-copy")
const {execFileSync} = require("child_process")
const escapeRegExp = require("escape-string-regexp")
const HTMLMinifier = require("html-minifier")
const upstream = require("./_upstream")
require("./_command").exec(module, () => generate())
module.exports = generate
const r = (file) => path.resolve(__dirname, "..", file)
// Minify our docs.
const htmlMinifierConfig = {
@ -38,9 +37,12 @@ const htmlMinifierConfig = {
useShortDoctype: true,
}
module.exports = generate
async function generate() {
const r = (file) => path.resolve(__dirname, "..", file)
return (await makeGenerator()).generate()
}
async function makeGenerator() {
await rimraf(r("dist"))
const [guides, methods, layout, pkg] = await Promise.all([
@ -53,19 +55,39 @@ async function generate() {
const version = JSON.parse(pkg).version
// Make sure we have the latest archive.
execFileSync("git", [
"fetch", "--depth=1",
upstream.fetch.remote, "gh-pages",
])
// Set up archive directories
execFileSync("git", ["checkout", "gh-pages", "--", "archive"])
execFileSync("git", [
"checkout", `${upstream.fetch.remote}/gh-pages`,
"--", "archive",
])
await fs.rename(r("archive"), r("dist/archive"))
await fs.mkdir(r(`dist/archive/v${version}`), {recursive: true})
// Tell Git to ignore our changes - it's no longer there.
execFileSync("git", ["add", "archive"])
function compilePage(file, markdown) {
return new Generator({version, guides, methods, layout})
}
class Generator {
constructor(opts) {
this._version = opts.version
this._guides = opts.guides
this._methods = opts.methods
this._layout = opts.layout
}
compilePage(file, markdown) {
file = path.basename(file)
const link = new RegExp(
`([ \t]*)(- )(\\[.+?\\]\\(${escapeRegExp(file)}\\))`
)
const src = link.test(guides) ? guides : methods
const src = link.test(this._guides) ? this._guides : this._methods
let body = markdown
// fix pipes in code tags
@ -100,7 +122,7 @@ async function generate() {
const markedHtml = marked(body)
const title = body.match(/^#([^\n\r]+)/i) || []
let result = layout
let result = this._layout
result = result.replace(
/<title>Mithril\.js<\/title>/,
@ -108,7 +130,7 @@ async function generate() {
)
// update version
result = result.replace(/\[version\]/g, version)
result = result.replace(/\[version\]/g, this._version)
// insert parsed HTML
result = result.replace(/\[body\]/, markedHtml)
@ -137,46 +159,103 @@ async function generate() {
return result
}
async function generate(file) {
try {
const handle = await fs.open(file, "r")
try {
const relative = path.relative(r("docs"), file)
const archive = r(`dist/archive/v${version}/${relative}`)
await fs.mkdir(path.dirname(archive), {recursive: true})
async eachTarget(relative, init) {
await Promise.all([
init(r(`dist/archive/v${this._version}/${relative}`)),
init(r(`dist/${relative}`)),
])
}
if (file.endsWith(".md")) {
const html = compilePage(file, await handle.readFile("utf-8"))
async generateSingle(file) {
const relative = path.relative(r("docs"), file)
const archived = (target, init) =>
this.eachTarget(target, async (dest) => {
await fs.mkdir(path.dirname(dest), {recursive: true})
await init(dest)
})
if ((/\.(md|html)$/).test(file)) {
await archived(relative, (dest) => fs.copyFile(file, dest))
console.log(`Copied: ${relative}`)
}
else {
let html = await fs.readFile(file, "utf-8")
if (file.endsWith(".md")) html = this.compilePage(file, html)
const minified = HTMLMinifier.minify(html, htmlMinifierConfig)
await fs.writeFile(archive.replace(/\.md$/, ".html"), minified)
} else if (file.endsWith(".html")) {
const html = await handle.readFile("utf-8")
const minified = HTMLMinifier.minify(html, htmlMinifierConfig)
await fs.writeFile(archive, minified)
} else {
await pipeline(
createReadStream(null, {fd: handle.fd}),
createWriteStream(archive)
await archived(
relative.replace(/\.md$/, ".html"),
(dest) => fs.writeFile(dest, minified)
)
console.log(`Compiled: ${relative}`)
}
} finally {
handle.close()
}
} catch (e) {
if (e.code !== "EISDIR") throw e
const files = await fs.readdir(file)
const devOnly = /^layout\.html$|^tutorials$|^archive$|^nav-/
async generateRec(file) {
let files
try {
files = await fs.readdir(file)
}
catch (e) {
if (e.code !== "ENOTDIR") throw e
return this.generateSingle(file)
}
const devOnly = /^layout\.html$|^archive$|^nav-/
// Don't care about the return value here.
await Promise.all(
files
.filter((f) => !devOnly.test(f))
.map((f) => path.join(file, f))
.map(generate)
.map((f) => this.generateRec(path.join(file, f)))
)
}
}
await generate(r("docs"))
await copy(r(`dist/archive/v${version}`), r("dist"))
async generate() {
await this.generateRec(r("docs"))
await copy(r(`dist/archive/v${this._version}`), r("dist"))
// Just ensure it exists.
await (await fs.open(r("dist/.nojekyll"), "a")).close()
}
}
/* eslint-disable global-require */
if (require.main === module) {
require("./_command")({
exec: generate,
async watch() {
let timeout, genPromise
function updateGenerator() {
if (timeout == null) return
clearTimeout(timeout)
genPromise = new Promise((resolve) => {
timeout = setTimeout(function() {
timeout = null
resolve(makeGenerator().then((g) => g.generate()))
}, 100)
})
}
async function updateFile(file) {
if ((/^layout\.html$|^archive$|^nav-/).test(file)) {
updateGenerator()
}
(await genPromise).generateSingle(file)
}
async function removeFile(file) {
(await genPromise).eachTarget(file, (dest) => fs.unlink(dest))
}
require("chokidar").watch(r("docs"), {
ignored: ["archive/**", /(^|\\|\/)\../],
// This depends on `layout`/etc. existing first.
ignoreInitial: true,
awaitWriteFinish: true,
})
.on("ready", updateGenerator)
.on("add", updateFile)
.on("change", updateFile)
.on("unlink", removeFile)
.on("unlinkDir", removeFile)
},
})
}

View file

@ -1,7 +1,7 @@
#!/usr/bin/env node
"use strict"
const {promises} = require("fs")
const {promises: fs} = require("fs")
const path = require("path")
const {Glob} = require("glob")
const marked = require("marked")
@ -10,8 +10,6 @@ const babelParser = require("@babel/parser")
// Peer dependency on `request`
const request = require("request-promise-native")
require("./_command").exec(module, () => lint())
// lint rules
class LintRenderer extends marked.Renderer {
constructor(file) {
@ -61,7 +59,8 @@ class LintRenderer extends marked.Renderer {
}, (e) => {
if (e.statusCode === 404) {
this._emit(`broken external link: ${href}`)
} else {
}
else {
if (
e.error.code === "ERR_TLS_CERT_ALTNAME_INVALID" &&
href.startsWith("https://")
@ -75,11 +74,12 @@ class LintRenderer extends marked.Renderer {
httpError(e)
}
}))
} else {
}
else {
const exec = (/^([^#?]*\.md)(?:$|\?|#)/).exec(href)
if (exec != null) {
const resolved = path.resolve(this._dir, exec[1])
this._awaiting.push(promises.access(resolved).catch(() => {
this._awaiting.push(fs.access(resolved).catch(() => {
this._emit(`broken internal link: ${href}`)
}))
}
@ -100,7 +100,8 @@ class LintRenderer extends marked.Renderer {
allowUndeclaredExports: true,
plugins: ["dynamicImport"],
})
} catch (e) {
}
catch (e) {
this._error = e
}
}
@ -127,7 +128,8 @@ class LintRenderer extends marked.Renderer {
"Code block possibly missing `json` language tag",
this._block(),
)
} catch {
}
catch (_) {
// ignore
}
}
@ -157,7 +159,16 @@ class LintRenderer extends marked.Renderer {
}
}
function lint() {
exports.lintOne = lintOne
async function lintOne(file) {
const contents = await fs.readFile(file, "utf-8")
const renderer = new LintRenderer(file)
marked(contents, {renderer})
return Promise.all(renderer._awaiting)
}
exports.lintAll = lintAll
function lintAll() {
return new Promise((resolve, reject) => {
const glob = new Glob(path.resolve(__dirname, "../docs/**/*.md"), {
ignore: [
@ -170,15 +181,29 @@ function lint() {
const awaiting = []
glob.on("match", (file) => {
awaiting.push(promises.readFile(file, "utf-8").then((contents) => {
const renderer = new LintRenderer(file)
marked(contents, {renderer})
return Promise.all(renderer._awaiting)
}))
awaiting.push(lintOne(file))
})
glob.on("error", reject)
glob.on("end", () => resolve(Promise.all(awaiting)))
})
}
module.exports = lint
/* eslint-disable global-require */
if (require.main === module) {
require("./_command")({
exec: lintAll,
watch() {
require("chokidar")
.watch(path.resolve(__dirname, "../docs/**/*.md"), {
ignore: [
"**/change-log.md",
"**/migration-*.md",
"**/node_modules/**",
],
})
.on("add", lintOne)
.on("change", lintOne)
},
})
}

View file

@ -1,5 +1,4 @@
#!/usr/bin/env node
/* eslint-disable no-process-exit */
"use strict"
// This is my temporary hack to simplify deployment until I fix the underlying
@ -11,73 +10,57 @@
// helpful to create a release on Travis vs locally, aside from a couple extra
// potential 2FA prompts by npm during login and publish.
if (require.main !== module) {
throw new Error("This is a script, not a module!")
}
const path = require("path")
const fs = require("fs")
const {promises: fsp} = require("fs")
const readline = require("readline")
const {execFileSync} = require("child_process")
const rimraf = require("rimraf")
const {promisify} = require("util")
const rimraf = promisify(require("rimraf"))
const semver = require("semver")
const upstream = require("./_upstream")
const updateDocs = require("./update-docs")
// Fake it until it works with this.
upstream.fetch.remote = "origin"
function showHelp() {
console.error(`
node scripts/release increment [ --preid id ]
node scripts/release increment [ --preid id ] [ --publish ]
Invoke as \`scripts/release.sh\` to invoke the release sequence, specifying the
version increment via \`increment\` (required). Here's how they all work:
Invoke as 'scripts/release.sh' to invoke the release sequence, specifying the
version increment via 'increment' (required). Pass '--publish' to push the
change and publish it, instead of just logging the commands used to push the
release.
- \`major\` increments from 1.0.0 or 2.0.0-beta.0 to 2.0.0
- \`minor\` increments from 1.0.0 to 1.1.0
- \`patch\` increments from 1.0.0 to 1.0.1
- \`premajor\` increments from 1.0.0 to 2.0.0-beta.0
- \`preminor\` increments from 1.0.0 to 1.1.0-beta.0
- \`prepatch\` increments from 1.0.0 to 1.0.1-beta.0
- \`prerelease\` increments from 2.0.0-beta.0 to 2.0.0-beta.1
Here's how each increment type works:
\`--preid beta\` specifies the \`beta\` part above (default). It's required for
all \`pre*\` increment types except \`prerelease\`.
- 'major' increments from 1.0.0 or 2.0.0-beta.0 to 2.0.0
- 'minor' increments from 1.0.0 to 1.1.0
- 'patch' increments from 1.0.0 to 1.0.1
- 'premajor' increments from 1.0.0 to 2.0.0-beta.0
- 'preminor' increments from 1.0.0 to 1.1.0-beta.0
- 'prepatch' increments from 1.0.0 to 1.0.1-beta.0
- 'prerelease' increments from 2.0.0-beta.0 to 2.0.0-beta.1
See the docs for \`npm version\` <https://docs.npmjs.com/cli/version> for
details on the \`increment\` parameter.
`.trim())
process.exit(0)
}
'--preid beta' specifies the 'beta' part above (default). It's required for all
'pre*' increment types except 'prerelease'.
function bail(...args) {
console.error(...args)
process.exit(1)
See the docs for 'npm version' <https://docs.npmjs.com/cli/version> for details
on the 'increment' parameter.
`)
}
const rootDir = path.dirname(__dirname)
const p = (...args) => path.resolve(rootDir, ...args)
function readVersion() {
return JSON.parse(fs.readFileSync(p("../package.json"), "utf-8")).version
function fail(...args) {
console.error(...args)
return 1
}
const parsed = require("minimist")(process.argv.slice(2), {
boolean: ["help"],
alias: {help: ["h", "?"]},
string: ["preid"],
"--": true,
})
parsed._ = parsed._.concat(parsed["--"])
if (parsed.help || !parsed._.length) showHelp()
const publishType = parsed._[0]
const publishPreid = parsed.preid
const publishArgs = publishType.startsWith("pre") ? ["--tag", "next"] : []
let releaseArgs = []
if (publishType.startsWith("pre") && publishType !== "prerelease") {
if (publishPreid == null) {
bail("`pre*` increments other than `prerelease` require `--preid`")
}
releaseArgs = [`--preid=${publishPreid}`]
}
function exec(cmd, args, opts) {
function execCommand(cmd, args, opts) {
console.error()
console.error(["executing:", cmd, ...args].join(" "))
return execFileSync(cmd, args, {
windowsHide: true,
stdio: "inherit",
@ -86,83 +69,212 @@ function exec(cmd, args, opts) {
})
}
function git(...cmd) { return execCommand("git", cmd) }
function npm(...cmd) { return execCommand("npm", cmd) }
function npmConfig(key) { return npm("config", "get", key).trim() }
function getChanges() {
const result = exec("git", ["status", "-z"], {
return execCommand("git", ["status", "-z"], {
stdio: ["inherit", "pipe", "inherit"],
})
return result.split(/\0/g).filter(Boolean)
.split(/\0/g)
.filter((l) => (/\S/).test(l))
}
async function release({increment, preid, publish}) {
if (!(/^prerelease$|^(pre)?(major|minor|patch)$/).test(increment)) {
return fail(`Invalid increment: ${increment}`)
}
if ((/^pre(major|minor|patch)/).test(increment) && preid == null) {
return fail(`'${increment}' must include a '--preid'`)
}
if (getChanges().length) {
bail("Error: Tree must not be dirty to start!")
return fail("Tree must be clean to start!")
}
const upstream = require("./_upstream")
exec("git", ["checkout", "next"])
exec("git", ["pull", "--rebase", upstream.fetch.branch, "next"])
// Because I'm too lazy to make everything async.
exec("read", ["-rsp", `
Update "Upcoming" in \`docs/change-log.md\`. If moving a prerelease to stable,
also replace all references to \`mithril@next\` to \`mithril\`, including in
Flems snippets. Press enter once ready to continue.
`.trim()], {shell: true})
// Verify the changelog was updated
let changelogUpdated = false
let treeDirty = false
for (const line of getChanges()) {
switch (line) {
case " M CHANGELOG.md":
case "M CHANGELOG.md":
case "MM CHANGELOG.md":
changelogUpdated = true
break
default:
treeDirty = true
}
if (upstream.push == null) {
return fail("You must have an upstream to push to!")
}
if (!changelogUpdated || treeDirty) {
if (!changelogUpdated) console.error("Error: Changelog must be updated!")
if (!treeDirty) console.error("Error: Tree must not be otherwise dirty!")
process.exit(1)
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
})
// Update local `master` and `next`.
git("fetch", upstream.fetch.remote, "master", "next")
// Make sure we're on the current `next` and merge any docs fixes and
// similar that have landed in upstream `master`.
git("checkout", "next")
git("pull", "--rebase", upstream.fetch.remote, "next")
git(
"pull", "--allow-unrelated-histories",
upstream.fetch.remote, "master"
)
// Note: we're doing our own semver incrementing.
const packageJson = JSON.parse(
await fsp.readFile(p("package.json"), "utf-8")
)
const version = semver.inc(packageJson.version, increment, preid)
console.error(`
Copy the parts listed in "Upcoming" to a new section "### v${version}" in
docs/change-log.md and clear that section out.
`)
for (;;) {
await new Promise((resolve) => rl.question(
"Press <Enter> once ready to continue or Ctrl+C to abort.",
// Ignore any input.
() => resolve(),
))
// Verify the changelog was updated, and give a chance to retry if it's
// prematurely continued.
const changes = getChanges()
const isChangelog = /^[ M][ M] docs\/change-log\.md$/
const errors = []
console.log("changes", changes)
if (!changes.some((l) => isChangelog.test(l))) {
errors.push("Changelog must be updated!")
}
exec("git", ["add", "."])
exec("git", ["commit", "-m", "Preparing for release"])
if (changes.some((l) => !isChangelog.test(l))) {
errors.push("Tree must not be otherwise dirty!")
}
exec("git", ["checkout", "master"])
exec("git", ["pull", "--rebase", upstream.fetch.branch, "master"])
// There may be merge conflicts with `index.js` and/or the bundle - just ignore
// them. Whatever they have is canon, as is the case with everything else.
exec("git", ["merge", "next", "--strategy-option=theirs"])
rimraf.sync(p("node_modules"))
exec("npm", ["install-test"])
if (!errors.length) break
console.error(errors.join("\n"))
}
exec("npm", ["version", "-m", "v%s", publishType, ...releaseArgs])
await rimraf(p("node_modules"))
npm("install-test")
npm("run", "build")
console.log("*** Build done ***")
exec("git", ["checkout", "next"])
exec("git", ["checkout", "master", "--", "mithril.js", "mithril.min.js"])
// That's already been updated in `master`.
exec("git", ["commit", "-m", `Generated bundles for ${readVersion()} [skip ci]`])
// Update the package file.
packageJson.version = version
await fsp.writeFile(p("package.json"), "utf-8",
JSON.stringify(packageJson, null, 2)
)
// Commit and tag the new release, with the appropriate CLI flag if the
// commit needs signed.
git("add", ".")
git(
"commit",
...npmConfig("sign-git-tag") === "true" ? ["--gpg-sign"] : [],
"--message", `v${version}`,
)
git("tag", `v${version}`)
exec("git", ["checkout", "master"])
// Update `master` to reflect the current state of `next`.
git("checkout", "master")
git("reset", "--hard", "next")
git("checkout", "next")
console.log("publish args: ", publishArgs)
console.log("push all: ", upstream.push.branch)
// exec("npm", ["login"])
// exec("npm", ["publish", ...publishArgs])
// exec("npm", ["logout"])
if (publish) {
// TODO: switch this to just do the push, and use the following Travis
// config. This also conveniently keeps private stuff out of the build
// scripts and just in build config, avoiding the grief that led to this
// file's existence.
//
// // Only push after successful publish
// exec("git", ["push", "--follow-tags", "origin", "master:master", "next:next"])
// exec("git", ["push", "--follow-tags", upstream.push.branch, "master:master", "next:next"])
// ```yml
// # See https://docs.travis-ci.com/user/deployment/npm/ for details on
// # `api_key:` for the npm provider.
// # See https://docs.travis-ci.com/user/deployment/pages/ for details
// # on `github_token:` for the pages provider.
// after_success: >
// [ "$TRAVIS_BRANCH" == "master" ] && node scripts/generate-docs
//
// deploy:
// - provider: npm
// skip_cleanup: true
// email: 'contact@isiahmeadows.com'
// api_key:
// secure: 'output of `travis encrypt NPM_AUTH_TOKEN`'
// on:
// tags: true
// condition: "$TRAVIS_TAG != *-*"
// - provider: npm
// skip_cleanup: true
// tag: next
// email: 'contact@isiahmeadows.com'
// api_key:
// secure: 'output of `travis encrypt NPM_AUTH_TOKEN`'
// on:
// tags: true
// condition: "$TRAVIS_TAG == *-*"
// - provider: pages
// skip_cleanup: true
// github_token:
// secure: 'output of `travis encrypt GITHUB_AUTH_TOKEN`'
// local_dir: dist
// fqdn: mithril.js.org
// committer_from_gh: true
// on:
// tags: false
// branch: master
// ```
npm("login")
if (increment.startsWith("pre")) {
npm("publish", "--tag", "next")
} else {
npm("publish")
}
npm("logout")
exec("git", ["checkout", "next"])
// Only push after successful publish
git(
"push", "--atomic", "origin",
"+next:master", "next:next", `next:refs/tags/v${version}`,
)
git(
"push", "--atomic", upstream.push.remote,
"+next:master", "next:next", `next:refs/tags/v${version}`,
)
await updateDocs()
} else {
const remote = upstream.push.remote
console.error(`
npm login
npm publish${increment.startsWith("pre") ? " --tag next" : ""}
npm logout
git push --atomic origin +next:master next:next next:refs/tags/v${version}
git push --atomic ${remote} +next:master next:next next:refs/tags/v${version}
npm run release:docs
`)
}
console.log("update docs")
// require("./update-docs")()
console.error(`
Don't forget to update the latest release! You can find it here:
https://github.com/MithrilJS/mithril.js/releases/tag/v${version}
`)
return 0
}
/* eslint-disable global-require */
if (require.main === module) {
require("./_command")({async exec() {
const parsed = require("minimist")(process.argv.slice(2), {
boolean: ["help", "publish"],
alias: {help: ["h", "?"]},
string: ["preid"],
})
if (parsed.help || !parsed._.length) showHelp()
else {
await release({
increment: parsed._[0],
preid: parsed.preid,
publish: parsed.publish,
})
}
}})
}

View file

@ -13,9 +13,6 @@ const ghPages = require("gh-pages")
const upstream = require("./_upstream")
const generate = require("./generate-docs")
require("./_command")(module, () => update())
module.exports = update
async function update() {
await generate()
const commit = execFileSync("git", ["rev-parse", "--verify", "HEAD"], {
@ -30,10 +27,17 @@ async function update() {
// force it to go over SSH so the saved keys are used.
// https://github.com/tschaub/gh-pages/issues/160
repo: upstream.push.repo,
remote: upstream.push.branch,
remote: upstream.push.remote,
src: ["**/*", ".nojekyll"],
message: `Generated docs for commit ${commit} [skip ci]`,
// May want to enable this if an API token resolves the issue.
// silent: !!process.env.TRAVIS_CI,
})
console.log("Published!")
}
/* eslint-disable global-require */
if (require.main === module) {
require("./_command")({exec: update})
}

View file

@ -1,10 +0,0 @@
"use strict"
const path = require("path")
const chokidar = require("chokidar")
const generate = require("./generate-docs")
const command = require("./_command")
chokidar.watch(path.resolve(__dirname, "../docs")).on("all", () => {
command.run(() => generate())
})