Refactor scripts (#2465)

* Refactor all kinds of scripts

* Update docs to ensure linter passes
This commit is contained in:
Isiah Meadows 2019-07-27 15:12:49 -04:00 committed by GitHub
parent 62172cbe08
commit 48e7fd1711
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
23 changed files with 1695 additions and 340 deletions

6
scripts/.eslintrc.js Normal file
View file

@ -0,0 +1,6 @@
module.exports = {
"extends": "../.eslintrc.js",
"parserOptions": {
"ecmaVersion": 2019,
},
};

36
scripts/_command.js Normal file
View file

@ -0,0 +1,36 @@
"use strict"
process.on("unhandledRejection", (e) => { throw e })
function reportExec(e) {
if (!e.stdout || !e.stderr) return false
console.error(e.stack)
if (e.stdout && e.stdout.length) {
console.error(e.stdout.toString("utf-8"))
}
if (e.stderr && e.stderr.length) {
console.error(e.stderr.toString("utf-8"))
}
return true
}
exports.exec = (mod, init) => {
if (require.main === mod) {
// Skip the first tick.
Promise.resolve().then(init).catch((e) => {
// eslint-disable-next-line no-process-exit
if (reportExec(e)) process.exit(1)
else throw e
})
}
}
exports.run = async (init) => {
try {
await init()
} catch (e) {
if (!reportExec(e)) console.error(e)
}
}

37
scripts/_upstream.js Normal file
View file

@ -0,0 +1,37 @@
/* eslint-disable no-process-exit */
"use strict"
const {execFileSync} = require("child_process")
const remoteInfo = execFileSync("git", ["remote", "-v"], {
windowsHide: true,
stdio: ["inherit", "pipe", "inherit"],
encoding: "utf-8",
}).trim().split(/\r\n?|\n/g)
function find(type) {
const regexp = new RegExp(
"\t(?:" +
"(?:(?:git+)?https?|git|ssh)://(?:[^@\\s]+@)?github\\.com/|" +
"git@github\\.com:" +
")" +
`MithrilJS/mithril\\.js\\.git \\(${type}\\)$`
)
const line = remoteInfo.find((line) => regexp.test(line))
if (line == null) {
console.error(
"An upstream must be configured with both fetch and push!"
)
process.exit(1)
}
return {
branch: line.slice(0, line.indexOf("\t")),
repo: line.slice(line.lastIndexOf("\t") + 1, -(type.length + 3)),
}
}
exports.fetch = find("fetch")
exports.push = find("push")

176
scripts/generate-docs.js Normal file
View file

@ -0,0 +1,176 @@
"use strict"
const {createReadStream, createWriteStream, promises: fs} = require("fs")
const path = require("path")
const {promisify} = require("util")
const pipeline = promisify(require("stream").pipeline)
const marked = require("marked")
const rimraf = promisify(require("rimraf"))
const copy = require("recursive-copy")
const {execFileSync} = require("child_process")
const escapeRegExp = require("escape-string-regexp")
const HTMLMinifier = require("html-minifier")
require("./_command").exec(module, () => generate())
module.exports = generate
// Minify our docs.
const htmlMinifierConfig = {
collapseBooleanAttributes: true,
collapseWhitespace: true,
conservativeCollapse: true,
continueOnParseError: true,
minifyCss: {
compatibility: "ie9",
},
minifyJs: true,
minifyUrls: true,
preserveLineBreaks: true,
removeAttributeQuotes: true,
removeCdatasectionsFromCdata: true,
removeComments: true,
removeCommentsFromCdata: true,
removeEmptyAttributes: true,
removeOptionalTags: true,
removeRedundantAttributes: true,
removeScriptTypeAttributes: true,
removeStyleLinkTypeAttributes: true,
useShortDoctype: true,
}
async function generate() {
const r = (file) => path.resolve(__dirname, "..", file)
await rimraf(r("dist"))
const [guides, methods, layout, pkg] = await Promise.all([
fs.readFile(r("docs/nav-guides.md"), "utf-8"),
fs.readFile(r("docs/nav-methods.md"), "utf-8"),
fs.readFile(r("docs/layout.html"), "utf-8"),
fs.readFile(r("package.json"), "utf-8"),
fs.mkdir(r("dist"), {recursive: true}),
])
const version = JSON.parse(pkg).version
// Set up archive directories
execFileSync("git", ["checkout", "gh-pages", "--", "archive"])
await fs.rename(r("archive"), r("dist/archive"))
await fs.mkdir(r(`dist/archive/v${version}`), {recursive: true})
function compilePage(file, markdown) {
file = path.basename(file)
const link = new RegExp(
`([ \t]*)(- )(\\[.+?\\]\\(${escapeRegExp(file)}\\))`
)
const src = link.test(guides) ? guides : methods
let body = markdown
// fix pipes in code tags
body = body.replace(/`((?:\S| -> |, )+)(\|)(\S+)`/gim,
(match, a, b, c) =>
`<code>${(a + b + c).replace(/\|/g, "&#124;")}</code>`
)
// inject menu
body = body.replace(
/(^# .+?(?:\r?\n){2,}?)(?:(-(?:.|\r|\n)+?)((?:\r?\n){2,})|)/m,
(match, title, nav) => {
if (!nav) {
return title + src.replace(link, "$1$2**$3**") + "\n\n"
}
return title + src.replace(link, (match, space, li, link) =>
`${space}${li}**${link}**\n${
nav.replace(/(^|\n)/g, `$1\t${space}`)
}`
) + "\n\n"
}
)
// fix links
body = body.replace(/(\]\([^\)]+)(\.md)/gim, (match, path, extension) =>
path + ((/http/).test(path) ? extension : ".html")
)
// Fix type signatures containing Array<...>
body = body.replace(/(\W)Array<([^/<]+?)>/gim, "$1Array&lt;$2&gt;")
const markedHtml = marked(body)
const title = body.match(/^#([^\n\r]+)/i) || []
let result = layout
result = result.replace(
/<title>Mithril\.js<\/title>/,
`<title>${title[1]} - Mithril.js</title>`
)
// update version
result = result.replace(/\[version\]/g, version)
// insert parsed HTML
result = result.replace(/\[body\]/, markedHtml)
// fix anchors
const anchorIds = new Map()
result = result.replace(
/<h([1-6]) id="([^"]+)">(.+?)<\/h\1>/gim,
(match, n, id, text) => {
let anchor = text.toLowerCase()
.replace(/<(\/?)code>/g, "")
.replace(/<a.*?>.+?<\/a>/g, "")
.replace(/\.|\[|\]|&quot;|\/|\(|\)/g, "")
.replace(/\s/g, "-");
const anchorId = anchorIds.get(anchor)
anchorIds.set(anchor, anchorId != null ? anchorId + 1 : 0)
if (anchorId != null) anchor += anchorId
return `<h${n} id="${anchor}">` +
`<a href="#${anchor}">${text}</a>` +
`</h${n}>`
}
)
return result
}
async function generate(file) {
try {
const handle = await fs.open(file, "r")
const relative = path.relative(r("docs"), file)
const archive = r(`dist/archive/v${version}/${relative}`)
await fs.mkdir(path.dirname(archive), {recursive: true})
if (file.endsWith(".md")) {
const html = compilePage(file, await handle.readFile("utf-8"))
const minified = HTMLMinifier.minify(html, htmlMinifierConfig)
await fs.writeFile(archive.replace(/\.md$/, ".html"), minified)
} else if (file.endsWith(".html")) {
const html = await handle.readFile("utf-8")
const minified = HTMLMinifier.minify(html, htmlMinifierConfig)
await fs.writeFile(archive, minified)
} else {
await pipeline(
createReadStream(null, {fd: handle.fd}),
createWriteStream(archive)
)
}
} catch (e) {
if (e.code !== "EISDIR") throw e
const files = await fs.readdir(file)
const devOnly = /^layout\.html$|^tutorials$|^archive$|^nav-/
await Promise.all(
files
.filter((f) => !devOnly.test(f))
.map((f) => path.join(file, f))
.map(generate)
)
}
}
await generate(r("docs"))
await copy(r(`dist/archive/v${version}`), r("dist"))
// Just ensure it exists.
await (await fs.open(r("dist/.nojekyll"), "a")).close()
}

184
scripts/lint-docs.js Normal file
View file

@ -0,0 +1,184 @@
#!/usr/bin/env node
"use strict"
const {promises} = require("fs")
const path = require("path")
const {Glob} = require("glob")
const marked = require("marked")
// Accept just about anything
const babelParser = require("@babel/parser")
// Peer dependency on `request`
const request = require("request-promise-native")
require("./_command").exec(module, () => lint())
// lint rules
class LintRenderer extends marked.Renderer {
constructor(file) {
super()
this._file = file
this._dir = path.dirname(file)
this._context = undefined
this._code = undefined
this._lang = undefined
this._error = undefined
this._awaiting = []
}
_emitTolerate(...data) {
let str = data.join("\n")
if (str.endsWith("\n")) str = str.slice(0, -1)
console.log(`${this._file} - ${str}\n${"-".repeat(60)}`)
}
_emit(...data) {
this._emitTolerate(...data)
process.exitCode = 1
}
_block() {
return `\`\`\`${this._lang || ""}\n${this._code}\n\`\`\``
}
link(href) {
// Don't fail if something byzantine shows up - it's the freaking
// internet. Just log it and move on.
const httpError = (e) =>
this._emitTolerate(`http error for ${href}`, e.message)
// Prefer https: > http: where possible, but allow http: when https: is
// inaccessible.
if ((/^https?:\/\//).test(href)) {
const url = href.replace(/#.*$/, "")
this._awaiting.push(request.head(url).then(() => {
const isHTTPS = href.startsWith("https:")
if (!isHTTPS) {
return request.head(`https:${url.slice(7)}`).then(
() => this._emit("change http: to https:"),
() => { /* ignore inner errors */ }
)
}
}, (e) => {
if (e.statusCode === 404) {
this._emit(`broken external link: ${href}`)
} else {
if (
e.error.code === "ERR_TLS_CERT_ALTNAME_INVALID" &&
href.startsWith("https://")
) {
return request.head(`http:${url.slice(6)}`).then(
() => this._emit(`change ${href} to use http:`),
// ignore inner errors
() => httpError(e)
)
}
httpError(e)
}
}))
} else {
const exec = (/^([^#?]*\.md)(?:$|\?|#)/).exec(href)
if (exec != null) {
const resolved = path.resolve(this._dir, exec[1])
this._awaiting.push(promises.access(resolved).catch(() => {
this._emit(`broken internal link: ${href}`)
}))
}
}
}
code(code, lang) {
this._code = code
this._lang = lang
if (!lang || lang === "js" || lang === "javascript") {
try {
// Could be within any production.
babelParser.parse(code, {
sourceType: "unambiguous",
allowReturnOutsideFunction: true,
allowAwaitOutsideFunction: true,
allowSuperOutsideMethod: true,
allowUndeclaredExports: true,
plugins: ["dynamicImport"],
})
} catch (e) {
this._error = e
}
}
this._ensureCodeIsHighlightable()
this._ensureCodeHasConsistentTag()
this._ensureCodeIsSyntaticallyValid()
this._ensureCommentStyle()
}
_ensureCodeIsHighlightable() {
// We only care about what's not tagged here.
if (!this._lang) {
// TODO: ensure all code blocks have tags, and check this in CI.
if (this._error == null) {
this._emit(
"Code block possibly missing `javascript` language tag",
this._block(),
)
}
try {
JSON.parse(this._code)
this._emit(
"Code block possibly missing `json` language tag",
this._block(),
)
} catch {
// ignore
}
}
}
_ensureCodeHasConsistentTag() {
if (this._lang === "js") {
this._emit("JS code block has wrong language tag", this._block())
}
}
_ensureCodeIsSyntaticallyValid() {
if (!this.lang || !(/^js$|^javascript$/).test(this._lang)) return
if (this._error != null) {
this._emit(
"JS code block has invalid syntax", this._error.message,
this._block()
)
}
}
_ensureCommentStyle() {
if (!this.lang || !(/^js$|^javascript$/).test(this._lang)) return
if ((/(^|\s)\/\/[\S]/).test(this._code)) {
this._emit("Comment is missing a preceding space", this._block())
}
}
}
function lint() {
return new Promise((resolve, reject) => {
const glob = new Glob(path.resolve(__dirname, "../docs/**/*.md"), {
ignore: [
"**/change-log.md",
"**/migration-*.md",
"**/node_modules/**",
],
nodir: true,
})
const awaiting = []
glob.on("match", (file) => {
awaiting.push(promises.readFile(file, "utf-8").then((contents) => {
const renderer = new LintRenderer(file)
marked(contents, {renderer})
return Promise.all(renderer._awaiting)
}))
})
glob.on("error", reject)
glob.on("end", () => resolve(Promise.all(awaiting)))
})
}
module.exports = lint

153
scripts/release.js Normal file
View file

@ -0,0 +1,153 @@
#!/usr/bin/env node
/* eslint-disable no-process-exit */
"use strict"
// This is my temporary hack to simplify deployment until I fix the underlying
// problems in these bugs:
// - https://github.com/MithrilJS/mithril.js/issues/2417
// - https://github.com/MithrilJS/mithril.js/pull/2422
//
// Depending on the complexity, it might become permanent. It really isn't that
// helpful to create a release on Travis vs locally, aside from a couple extra
// potential 2FA prompts by npm during login and publish.
if (require.main !== module) {
throw new Error("This is a script, not a module!")
}
const path = require("path")
const fs = require("fs")
const {execFileSync} = require("child_process")
const rimraf = require("rimraf")
function showHelp() {
console.error(`
scripts/release.sh increment [ --preid id ]
Invoke as \`scripts/release.sh\` to invoke the release sequence, specifying the
version increment via \`increment\` (required). Here's how they all work:
- \`major\` increments from 1.0.0 or 2.0.0-beta.0 to 2.0.0
- \`minor\` increments from 1.0.0 to 1.1.0
- \`patch\` increments from 1.0.0 to 1.0.1
- \`premajor\` increments from 1.0.0 to 2.0.0-beta.0
- \`preminor\` increments from 1.0.0 to 1.1.0-beta.0
- \`prepatch\` increments from 1.0.0 to 1.0.1-beta.0
- \`prerelease\` increments from 2.0.0-beta.0 to 2.0.0-beta.1
\`--preid beta\` specifies the \`beta\` part above (default). It's required for
all \`pre*\` increment types except \`prerelease\`.
See the docs for \`npm version\` <https://docs.npmjs.com/cli/version> for
details on the \`increment\` parameter.
`.trim())
process.exit(0)
}
function bail(...args) {
console.error(...args)
process.exit(1)
}
const rootDir = path.dirname(__dirname)
const p = (...args) => path.resolve(rootDir, ...args)
function readVersion() {
return JSON.parse(fs.readFileSync(p("../package.json"), "utf-8")).version
}
const parsed = require("minimist")(process.argv.slice(2), {
boolean: ["help"],
alias: {help: ["h", "?"]},
string: ["preid"],
"--": true,
})
if (parsed.help || !parsed["--"].length) showHelp()
const publishType = parsed["--"][0]
const publishPreid = parsed.preid
const publishArgs = publishType.startsWith("pre") ? ["--tag", "next"] : []
let releaseArgs = []
if (publishType.startsWith("pre") && publishType !== "prerelease") {
if (publishPreid == null) {
bail("`pre*` increments other than `prerelease` require `--preid`")
}
releaseArgs = [`--preid=${publishPreid}`]
}
function exec(cmd, args, opts) {
return execFileSync(name, args, {
windowsHide: true,
stdio: "inherit",
encoding: "utf-8",
...opts,
})
}
const upstream = require("./_upstream")
exec("git", ["checkout", "next"])
exec("git", ["pull", "--rebase", upstream.fetch.branch, "next"])
// Because I'm too lazy to make everything async.
exec("read", ["-rsp", `
Update "Upcoming" in \`docs/change-log.md\`. If moving a prerelease to stable,
also replace all references to \`mithril@next\` to \`mithril\`, including in
Flems snippets. Press enter once ready to continue.
`.trim()], {shell: true})
// Verify the changelog was updated
let changelogUpdated = false
let treeDirty = false
for (const line of exec("git", ["status", "-z"]).split(/\0/g)) {
switch (line) {
case " M CHANGELOG.md":
case "M CHANGELOG.md":
case "MM CHANGELOG.md":
changelogUpdated = true
break
default:
treeDirty = true
}
}
if (!changelogUpdated || treeDirty) {
if (!changelogUpdated) console.error("Error: Changelog must be updated!")
if (!treeDirty) console.error("Error: Tree must not be otherwise dirty!")
process.exit(1)
}
exec("git", ["add", "."])
exec("git", ["commit", "-m", "Preparing for release"])
exec("git", ["checkout", "master"])
exec("git", ["pull", "--rebase", upstream.fetch.branch, "master"])
// There may be merge conflicts with `index.js` and/or the bundle - just ignore
// them. Whatever they have is canon, as is the case with everything else.
exec("git", ["merge", "next", "-s", "theirs"])
rimraf.sync(p("node_modules"))
exec("npm", ["install-test"])
exec("npm", ["version", "-m", "v%s", publishType, ...releaseArgs])
exec("git", ["push", "--follow-tags", "origin", "master"])
exec("git", ["push", "--follow-tags", upstream.push.branch, "master"])
exec("git", ["checkout", "next"])
exec("git", ["checkout", "master", "--", "mithril.js", "mithril.min.js"])
// That's already been updated in `master`.
exec("git", ["commit", "-m", `Generated bundles for ${readVersion()} [skip ci]`])
exec("git", ["push"])
exec("git", ["push", upstream.push.branch, "next"])
exec("git", ["checkout", "master"])
exec("npm", ["login"])
exec("npm", ["publish", ...publishArgs])
exec("npm", ["logout"])
require("./update-docs")()

39
scripts/update-docs.js Normal file
View file

@ -0,0 +1,39 @@
#!/usr/bin/env node
/* eslint-disable no-process-exit */
"use strict"
// This is my temporary hack to simplify deployment until I fix the underlying
// problems in these bugs:
// - https://github.com/MithrilJS/mithril.js/issues/2417
// - https://github.com/MithrilJS/mithril.js/pull/2422
const path = require("path")
const {execFileSync} = require("child_process")
const ghPages = require("gh-pages")
const upstream = require("./_upstream")
const generate = require("./generate-docs")
require("./_command")(module, () => update())
module.exports = update
async function update() {
await generate()
const commit = execFileSync("git", ["rev-parse", "--verify", "HEAD"], {
windowsHide: true,
stdio: "inherit",
encoding: "utf-8",
})
await ghPages.publish(path.resolve(__dirname, "../dist"), {
// Note: once this is running on Travis again, run
// `git remote add upstream git@github.com:MithrilJS/mithril.js.git` to
// force it to go over SSH so the saved keys are used.
// https://github.com/tschaub/gh-pages/issues/160
repo: upstream.push.repo,
remote: upstream.push.branch,
src: ["**/*", ".nojekyll"],
message: `Generated docs for commit ${commit} [skip ci]`,
// May want to enable this if an API token resolves the issue.
// silent: !!process.env.TRAVIS_CI,
})
}

10
scripts/watch-docs.js Normal file
View file

@ -0,0 +1,10 @@
"use strict"
const path = require("path")
const chokidar = require("chokidar")
const generate = require("./generate-docs")
const command = require("./_command")
chokidar.watch(path.resolve(__dirname, "../docs")).on("all", () => {
command.run(() => generate())
})