Refactor scripts (#2465)

* Refactor all kinds of scripts

* Update docs to ensure linter passes
This commit is contained in:
Isiah Meadows 2019-07-27 15:12:49 -04:00 committed by GitHub
parent 62172cbe08
commit 48e7fd1711
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
23 changed files with 1695 additions and 340 deletions

View file

@ -3,8 +3,13 @@ root = true
[*]
charset = utf-8
[*.js]
[*.{js,json,yml,html,md}]
indent_style = tab
tab_width = 4
trim_trailing_whitespace = true
insert_final_newline = true
end_of_line = lf
[{package.json,.travis.yml}]
indent_style = space
indent_size = 2

View file

@ -1,7 +1,7 @@
.vscode/
coverage/
docs/lib/
examples/
/.vscode
/coverage
/docs/lib
/examples
/mithril.js
/mithril.min.js
node_modules/
/node_modules

3
.gitignore vendored
View file

@ -6,5 +6,6 @@ npm-debug.log
.DS_Store
.eslintcache
# These are artifacts from Travis' deploy scripts
# These are artifacts from various scripts
/dist
/archive

View file

@ -16,12 +16,6 @@ install:
# This is to prevent lint-staged/prettier from running on the bundles
- npm rm husky
# Build bundles (so they're always up to date)
before_script:
- npm run build-browser
# Pass -save so it'll update the readme as well
- npm run build-min -- -save
# Run tests, lint, and then check for perf regressions
script:
- npm test
@ -90,12 +84,13 @@ deploy:
tags: true
repo: MithrilJS/mithril.js
- provider: npm
skip_cleanup: true
email: me@isiahmeadows.com
api_key:
secure: "uPLbeJTalA/b38srb1VuWnD3eOgeKTkXf8VVUasUXIqc2xub4DSkFm1IVKSVd/rzP7EeO7+gRUs2UteNKlpZJl226IS5mFPSVtC7ViW46WSpYT0wlMsc7hrubMBGTx3/XYpPwtmMlTIGs5ICT7YkGAuju/6i79LDAB+gbnEY8Bc="
on:
tags: true
repo: MithrilJS/mithril.js
condition: "$TRAVIS_TAG != *-*"
# Skip until I can figure out what's going on with docs + version deployment
# - provider: npm
# skip_cleanup: true
# email: me@isiahmeadows.com
# api_key:
# secure: "uPLbeJTalA/b38srb1VuWnD3eOgeKTkXf8VVUasUXIqc2xub4DSkFm1IVKSVd/rzP7EeO7+gRUs2UteNKlpZJl226IS5mFPSVtC7ViW46WSpYT0wlMsc7hrubMBGTx3/XYpPwtmMlTIGs5ICT7YkGAuju/6i79LDAB+gbnEY8Bc="
# on:
# tags: true
# repo: MithrilJS/mithril.js
# condition: "$TRAVIS_TAG != *-*"

View file

@ -466,7 +466,7 @@ var Modal = {
If you do it like above, you could run into issues when using it:
```js
```javascript
var MyModal = {
view: function() {
return m(Modal, {
@ -483,7 +483,7 @@ var MyModal = {
Instead, you should forward *single* attributes into vnodes:
```js
```javascript
// PREFER
var Modal = {
// ...

View file

@ -1,71 +0,0 @@
"use strict"
var fs = require("fs")
var path = require("path")
var marked = require("marked")
var layout = fs.readFileSync("./docs/layout.html", "utf-8")
var version = JSON.parse(fs.readFileSync("./package.json", "utf-8")).version
try {fs.mkdirSync("./dist")} catch (e) {/* ignore */}
try {fs.mkdirSync("./dist/archive")} catch (e) {/* ignore */}
try {fs.mkdirSync("./dist/archive/v" + version)} catch (e) {/* ignore */}
var guides = fs.readFileSync("docs/nav-guides.md", "utf-8")
var methods = fs.readFileSync("docs/nav-methods.md", "utf-8")
generate("docs")
function generate(pathname) {
if (fs.lstatSync(pathname).isDirectory()) {
fs.readdirSync(pathname).forEach(function(filename) {
generate(pathname + "/" + filename)
})
}
else if (!pathname.match(/tutorials|archive|nav-/)) {
if (pathname.match(/\.md$/)) {
var outputFilename = pathname.replace(/\.md$/, ".html")
var markdown = fs.readFileSync(pathname, "utf-8")
var anchors = {}
var fixed = markdown
.replace(/`((?:\S| -> |, )+)(\|)(\S+)`/gim, function(match, a, b, c) { // fix pipes in code tags
return "<code>" + (a + b + c).replace(/\|/g, "&#124;") + "</code>"
})
.replace(/(^# .+?(?:\r?\n){2,}?)(?:(-(?:.|\r|\n)+?)((?:\r?\n){2,})|)/m, function(match, title, nav) { // inject menu
var file = path.basename(pathname)
var link = new RegExp("([ \t]*)(- )(\\[.+?\\]\\(" + file + "\\))")
var replace = function(match, space, li, link) {
return space + li + "**" + link + "**" + (nav ? "\n" + nav.replace(/(^|\n)/g, "$1\t" + space) : "")
}
var modified = guides.match(link) ? guides.replace(link, replace) : methods.replace(link, replace)
return title + modified + "\n\n"
})
.replace(/(\]\([^\)]+)(\.md)/gim, function(match, path, extension) {
return path + (path.match(/http/) ? extension : ".html")
}) // fix links
var markedHtml = marked(fixed)
.replace(/(\W)Array<([^/<]+?)>/gim, "$1Array&lt;$2&gt;") // Fix type signatures containing Array<...>
var title = fixed.match(/^#([^\n\r]+)/i) || []
var html = layout
.replace(/<title>Mithril\.js<\/title>/, "<title>" + title[1] + " - Mithril.js</title>")
.replace(/\[version\]/g, version) // update version
.replace(/\[body\]/, markedHtml)
.replace(/<h(.) id="([^"]+?)">(.+?)<\/h.>/gim, function(match, n, id, text) { // fix anchors
var anchor = text.toLowerCase().replace(/<(\/?)code>/g, "").replace(/<a.*?>.+?<\/a>/g, "").replace(/\.|\[|\]|&quot;|\/|\(|\)/g, "").replace(/\s/g, "-");
if(anchor in anchors) {
anchor += ++anchors[anchor]
} else {
anchors[anchor] = 0;
}
return `<h${n} id="${anchor}"><a href="#${anchor}">${text}</a></h${n}>`;
})
fs.writeFileSync("./dist/archive/v" + version + "/" + outputFilename.replace(/^docs\//, ""), html, "utf-8")
fs.writeFileSync("./dist/" + outputFilename.replace(/^docs\//, ""), html, "utf-8")
}
else if (!pathname.match(/lint|generate/)) {
var encoding = (/\.(ico|png)$/i).test(path.extname(pathname)) ? "binary" : "utf-8";
fs.writeFileSync("./dist/archive/v" + version + "/" + pathname.replace(/^docs\//, ""), fs.readFileSync(pathname, encoding), encoding)
fs.writeFileSync("./dist/" + pathname.replace(/^docs\//, ""), fs.readFileSync(pathname, encoding), encoding)
}
}
}

View file

@ -215,7 +215,7 @@ m("a-scene", [
])
```
And yes, this translates to both attributes and properties, and it works just like they would in the DOM. Using [Brick's `brick-deck`](https://brick.mozilla.io/docs/brick-deck) as an example, they have a `selected-index` attribute with a corresponding `selectedIndex` getter/setter property.
And yes, this translates to both attributes and properties, and it works just like they would in the DOM. Using [Brick's `brick-deck`](http://brick.mozilla.io/docs/brick-deck) as an example, they have a `selected-index` attribute with a corresponding `selectedIndex` getter/setter property.
```javascript
m("brick-deck[selected-index=0]", [/* ... */]) // lowercase

View file

@ -51,7 +51,7 @@ $ npm install webpack webpack-cli --save-dev
```
3. Add a "start" entry to the scripts section in `package.json`.
```js
```javascript
{
// ...
"scripts": {
@ -61,7 +61,7 @@ $ npm install webpack webpack-cli --save-dev
```
4. Create `src/index.js` file.
```js
```javascript
import m from "mithril";
m.render(document.body, "hello world");
```
@ -113,7 +113,7 @@ m.render(document.body, "hello world")
Modularization is the practice of separating the code into files. Doing so makes it easier to find code, understand what code relies on what code, and test.
CommonJS is a de-facto standard for modularizing JavaScript code, and it's used by Node.js, as well as tools like [Browserify](https://browserify.org/) and [Webpack](https://webpack.js.org/). It's a robust, battle-tested precursor to ES6 modules. Although the syntax for ES6 modules is specified in Ecmascript 6, the actual module loading mechanism is not. If you wish to use ES6 modules despite the non-standardized status of module loading, you can use tools like [Rollup](https://rollupjs.org/) or [Babel](https://babeljs.io/).
CommonJS is a de-facto standard for modularizing JavaScript code, and it's used by Node.js, as well as tools like [Browserify](http://browserify.org/) and [Webpack](https://webpack.js.org/). It's a robust, battle-tested precursor to ES6 modules. Although the syntax for ES6 modules is specified in Ecmascript 6, the actual module loading mechanism is not. If you wish to use ES6 modules despite the non-standardized status of module loading, you can use tools like [Rollup](https://rollupjs.org/) or [Babel](https://babeljs.io/).
Most browser today do not natively support modularization systems (CommonJS or ES6), so modularized code must be bundled into a single JavaScript file before running in a client-side application.
@ -125,7 +125,7 @@ npm install webpack webpack-cli --save-dev
Open the `package.json` that you created earlier, and add an entry to the `scripts` section:
```
```json
{
"name": "my-project",
"scripts": {
@ -199,7 +199,7 @@ If you open bin/app.js, you'll notice that the Webpack bundle is not minified, s
You can use hooks in your production environment to run the production build script automatically. Here's an example for [Heroku](https://www.heroku.com/):
```
```json
{
"name": "my-project",
"scripts": {

View file

@ -193,7 +193,7 @@ JSX and hyperscript are two different syntaxes you can use for specifying vnodes
You can see the tradeoffs come into play in more complex trees. For instance, consider this hyperscript tree, adapted from a real-world project by [@isiahmeadows](https://github.com/isiahmeadows/) with some alterations for clarity and readability:
```js
```javascript
function SummaryView() {
let tag, posts

View file

@ -1,186 +0,0 @@
#!/usr/bin/env node
"use strict"
var fs = require("fs")
var path = require("path")
var http = require("http")
var url = require("url")
//lint rules
function lint(file, data) {
ensureCodeIsHighlightable(file, data)
ensureCodeIsSyntaticallyValid(file, data)
ensureCodeIsRunnable(file, data)
ensureCommentStyle(file, data)
ensureLinkIsValid(file, data)
}
function ensureCodeIsHighlightable(file, data) {
var codeBlocks = data.match(/```(.|\n|\r)*?```/gim) || []
codeBlocks.forEach(function(block) {
block = block.slice(3, -3)
if (block.indexOf("javascript") !== 0) {
try {if (new Function(block)) console.log(file + " - javascript block missing language tag after triple backtick\n\n" + block + "\n\n---\n\n")}
catch (e) {/*not a js block, ignore*/}
}
})
}
function ensureCodeIsSyntaticallyValid(file, data) {
var codeBlocks = data.match(/```javascript(.|\n|\r)*?```/gim) || []
codeBlocks.forEach(function(block) {
block = block.slice(13, -3)
try {new Function(block)}
catch (e) {console.log(file + " - javascript block has wrong syntax\n\n" + e.message + "\n\n" + block + "\n\n---\n\n")}
})
}
function ensureCodeIsRunnable(file, data) {
var codeBlocks = data.match(/```javascript(.|\n|\r)*?```/gim) || []
var code = codeBlocks.map(function(block) {return block.slice(13, -3)}).join(";")
//stubs
var silentConsole = {log: function() {}}
var fetch = function() {
return Promise.resolve({
json: function() {}
})
}
try {
initMocks()
var module = {exports: {}}
new Function("console,fetch,module,require", code).call(this, silentConsole, fetch, module, function(dep) {
if (dep.indexOf("./mycomponent") === 0) return {view: function() {}}
if (dep.indexOf("mithril/ospec/ospec") === 0) return global.o
if (dep.indexOf("mithril/stream") === 0) return global.stream
if (dep === "mithril") return global.m
if (dep === "../model/User") return {
list: [],
current: {},
loadList: function() {
return Promise.resolve({data: []})
},
load: function() {
return Promise.resolve({firstName: "", lastName: ""})
},
save: function() {
return Promise.resolve()
},
}
if (dep === "./view/UserList") return {view: function() {}}
if (dep === "./view/UserForm") return {view: function() {}}
if (dep === "./view/Layout") return {view: function() {}}
})
}
catch (e) {console.log(file + " - javascript code cannot run\n\n" + e.stack + "\n\n" + code + "\n\n---\n\n")}
}
function ensureCommentStyle(file, data) {
var codeBlocks = data.match(/```javascript(.|\n|\r)*?```/gim) || []
codeBlocks.forEach(function(block) {
block = block.slice(13, -3)
if (block.match(/(^|\s)\/\/[\S]/)) console.log(file + " - comment missing space\n\n" + block + "\n\n---\n\n")
})
}
function ensureLinkIsValid(file, data) {
var links = data.match(/\]\(([^\)]+?)\)/gim) || []
links.forEach(function(match) {
var link = match.slice(2, -1)
var path = (link.match(/[\w-#]+\.md/) || [])[0]
if (link.match(/http/)) {
var u = url.parse(link)
http.request({method: "HEAD", host: u.host, path: u.pathname, port: 80}).on("error", function() {
console.log(file + " - broken external link: " + link)
})
}
else if (path && !fs.existsSync("docs/" + path)) console.log(file + " - broken link: " + link)
})
}
function initMocks() {
/* eslint-disable global-require */
global.window = require("../test-utils/browserMock")()
global.document = window.document
global.m = require("../index")
global.o = require("../ospec/ospec")
global.stream = require("../stream")
global.alert = function() {}
/* eslint-enable global-require */
//routes consumed by request.md
global.window.$defineRoutes({
"GET /api/v1/users": function() {
return {status: 200, responseText: JSON.stringify([{name: ""}])}
},
"GET /api/v1/users/search": function() {
return {status: 200, responseText: JSON.stringify([{id: 1, name: ""}])}
},
"GET /api/v1/users/1/projects": function() {
return {status: 200, responseText: JSON.stringify([{id: 1, name: ""}])}
},
"GET /api/v1/todos": function() {
return {status: 200, responseText: JSON.stringify([])}
},
"PUT /api/v1/users/1": function(request) {
return {status: 200, responseText: request.query.callback ? request.query.callback + "([])" : "[]"}
},
"POST /api/v1/upload": function() {
return {status: 200, responseText: JSON.stringify([])}
},
"GET /files/icon.svg": function() {
return {status: 200, responseText: "<svg></svg>"}
},
"GET /files/data.csv": function() {
return {status: 200, responseText: "a,b,c"}
},
"GET /api/v1/users/123": function() {
return {status: 200, responseText: JSON.stringify({id: 123})}
},
"GET /api/v1/users/foo:bar": function() {
return {status: 200, responseText: JSON.stringify({id: 123})}
},
"GET /files/image.svg": function() {
return {status: 200, responseText: "<svg></svg>"}
},
})
}
//runner
function traverseDirectory(pathname, callback) {
pathname = pathname.replace(/\\/g, "/")
return new Promise(function(resolve, reject) {
fs.lstat(pathname, function(err, stat) {
if (err) reject(err)
if (stat.isDirectory()) {
fs.readdir(pathname, function(err, pathnames) {
if (err) reject(err)
var promises = []
for (var i = 0; i < pathnames.length; i++) {
pathnames[i] = path.join(pathname, pathnames[i])
promises.push(traverseDirectory(pathnames[i], callback))
}
callback(pathname, stat, pathnames)
resolve(Promise.all(promises))
})
}
else {
callback(pathname, stat)
resolve(pathname)
}
})
})
}
//run
traverseDirectory("./docs", function(pathname) {
if (pathname.indexOf(".md") > -1 && !pathname.match(/change-log|migration-|node_modules/)) {
fs.readFile(pathname, "utf8", function(err, data) {
if (err) console.log(err)
else lint(pathname, data)
})
}
})
.then(process.exit)

View file

@ -349,7 +349,7 @@ m("div", m(Component, "value", function(key) { return "child" }))
In v0.2.x, the children of DOM nodes were represented literally with no normalization aside from using the children directly if only a single array child is present. It returned a structure more like this, with the strings represented literally.
```js
```javascript
m("div", "value", ["nested"])
// Becomes:
@ -365,7 +365,7 @@ m("div", "value", ["nested"])
In v2.x, children of DOM vnodes are normalized to objects of a single consistent structure.
```js
```javascript
m("div", "value", ["nested"])
// Becomes roughly:
@ -383,7 +383,7 @@ m("div", "value", ["nested"])
If only a single text child is present on a DOM vnode, it instead sets `text` to that value.
```js
```javascript
m("div", "value")
// Becomes roughly:

View file

@ -177,7 +177,7 @@ m(m.route.Link, {
}, "link name")
```
This supports full accessibility for both `a` and `button`, via a `disabled` attribute. This ensures [no `href` attribute or `onclick` handler is set](https://css-tricks.com/how-to-disable-links/) and that an `"aria-disabled": "true"` attribute *is* set. If you are passing an `onclick` handler already, that's dropped. (You can work around this by adding it directly in a [lifecycle hook](lifecycle.md).) The `disabled` attribute is itself proxied to the element or component, so you can disable routed `<button>`s and the like.
This supports full accessibility for both `a` and `button`, via a `disabled` attribute. This ensures [no `href` attribute or `onclick` handler is set](https://css-tricks.com/how-to-disable-links/) and that an `"aria-disabled": "true"` attribute *is* set. If you are passing an `onclick` handler already, that's dropped. (You can work around this by adding it directly in a [lifecycle hook](lifecycle-methods.md).) The `disabled` attribute is itself proxied to the element or component, so you can disable routed `<button>`s and the like.
```javascript
// This does the right thing and the accessible thing for you.
@ -241,7 +241,7 @@ As a rule of thumb, RouteResolvers should be in the same file as the `m.route` c
When using components, you could think of them as special sugar for this route resolver, assuming your component is `Home`:
```js
```javascript
var routeResolver = {
onmatch: function() { return Home },
render: function(vnode) { return [vnode] },
@ -803,16 +803,13 @@ m.route(document.body, "/", {
However, realistically, in order for that to work on a production scale, it would be necessary to bundle all of the dependencies for the `Home.js` module into the file that is ultimately served by the server.
Fortunately, there are a number of tools that facilitate the task of bundling modules for lazy loading. Here's an example using [webpack's code splitting system](https://webpack.github.io/docs/code-splitting.html):
Fortunately, there are a number of tools that facilitate the task of bundling modules for lazy loading. Here's an example using [native dynamic `import(...)`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/import), supported by many bundlers:
```javascript
m.route(document.body, "/", {
"/": {
onmatch: function() {
// using Webpack async code splitting
return new Promise(function(resolve) {
require(['./Home.js'], resolve)
})
return import('./Home.js')
},
},
})

View file

@ -150,7 +150,7 @@ module.exports = MyComponent
You could easily create a few unit tests for that.
```js
```javascript
var mq = require("mithril-query")
var MyComponent = require("./MyComponent")

1017
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -7,36 +7,48 @@
"unpkg": "mithril.min.js",
"repository": "MithrilJS/mithril.js",
"scripts": {
"dev": "node bundler/cli browser.js -output mithril.js -watch",
"build": "npm run build-browser & npm run build-min",
"build-browser": "node bundler/cli browser.js -output mithril.js",
"build-min": "node bundler/cli browser.js -output mithril.min.js -minify -save",
"precommit": "lint-staged",
"lintdocs": "node docs/lint",
"gendocs": "node docs/generate",
"lint": "eslint . || true",
"lint:fix": "eslint . --fix",
"watch": "run-p watch:**",
"watch:js": "node bundler/cli browser.js -output mithril.js -watch",
"watch:docs": "node scripts/watch-docs.js",
"build": "run-p build:**",
"build:browser": "node bundler/cli browser.js -output mithril.js",
"build:docs": "node scripts/generate-docs",
"build:min": "node bundler/cli browser.js -output mithril.min.js -minify -save",
"lint": "run-s -cn lint:**",
"lint:js": "eslint . --cache",
"lint:docs": "node scripts/lint-docs",
"perf": "node performance/test-perf.js",
"pretest": "npm run lint",
"test": "node ospec/bin/ospec",
"posttest": "npm run lint",
"cover": "istanbul cover --print both ospec/bin/ospec",
"release": "npm version -m 'v%s'",
"preversion": "npm run test",
"version": "npm run build && git add index.js mithril.js mithril.min.js README.md",
"postversion": "git push --follow-tags"
"release": "node scripts/release",
"release:docs": "node scripts/update-docs",
"version": "npm run build && git add index.js mithril.js mithril.min.js README.md"
},
"devDependencies": {
"@alrra/travis-scripts": "^3.0.1",
"@babel/parser": "^7.5.5",
"benchmark": "^2.1.4",
"chokidar": "^2.0.4",
"dedent": "^0.7.0",
"escape-string-regexp": "^2.0.0",
"eslint": "^5.13.0",
"gh-pages": "^2.0.1",
"glob": "^7.1.4",
"html-minifier": "^4.0.0",
"istanbul": "^0.4.5",
"lint-staged": "^8.1.3",
"locater": "^1.3.0",
"marked": "^0.6.2",
"marked": "^0.7.0",
"minimist": "^1.2.0",
"npm-run-all": "^4.1.5",
"pinpoint": "^1.1.0",
"recursive-copy": "^2.0.10",
"request": "^2.88.0",
"request-promise-native": "^1.0.7",
"rimraf": "^2.6.3",
"terser": "^3.16.1"
},
"bin": {

6
scripts/.eslintrc.js Normal file
View file

@ -0,0 +1,6 @@
module.exports = {
"extends": "../.eslintrc.js",
"parserOptions": {
"ecmaVersion": 2019,
},
};

36
scripts/_command.js Normal file
View file

@ -0,0 +1,36 @@
"use strict"
process.on("unhandledRejection", (e) => { throw e })
function reportExec(e) {
if (!e.stdout || !e.stderr) return false
console.error(e.stack)
if (e.stdout && e.stdout.length) {
console.error(e.stdout.toString("utf-8"))
}
if (e.stderr && e.stderr.length) {
console.error(e.stderr.toString("utf-8"))
}
return true
}
exports.exec = (mod, init) => {
if (require.main === mod) {
// Skip the first tick.
Promise.resolve().then(init).catch((e) => {
// eslint-disable-next-line no-process-exit
if (reportExec(e)) process.exit(1)
else throw e
})
}
}
exports.run = async (init) => {
try {
await init()
} catch (e) {
if (!reportExec(e)) console.error(e)
}
}

37
scripts/_upstream.js Normal file
View file

@ -0,0 +1,37 @@
/* eslint-disable no-process-exit */
"use strict"
const {execFileSync} = require("child_process")
const remoteInfo = execFileSync("git", ["remote", "-v"], {
windowsHide: true,
stdio: ["inherit", "pipe", "inherit"],
encoding: "utf-8",
}).trim().split(/\r\n?|\n/g)
function find(type) {
const regexp = new RegExp(
"\t(?:" +
"(?:(?:git+)?https?|git|ssh)://(?:[^@\\s]+@)?github\\.com/|" +
"git@github\\.com:" +
")" +
`MithrilJS/mithril\\.js\\.git \\(${type}\\)$`
)
const line = remoteInfo.find((line) => regexp.test(line))
if (line == null) {
console.error(
"An upstream must be configured with both fetch and push!"
)
process.exit(1)
}
return {
branch: line.slice(0, line.indexOf("\t")),
repo: line.slice(line.lastIndexOf("\t") + 1, -(type.length + 3)),
}
}
exports.fetch = find("fetch")
exports.push = find("push")

176
scripts/generate-docs.js Normal file
View file

@ -0,0 +1,176 @@
"use strict"
const {createReadStream, createWriteStream, promises: fs} = require("fs")
const path = require("path")
const {promisify} = require("util")
const pipeline = promisify(require("stream").pipeline)
const marked = require("marked")
const rimraf = promisify(require("rimraf"))
const copy = require("recursive-copy")
const {execFileSync} = require("child_process")
const escapeRegExp = require("escape-string-regexp")
const HTMLMinifier = require("html-minifier")
require("./_command").exec(module, () => generate())
module.exports = generate
// Minify our docs.
const htmlMinifierConfig = {
collapseBooleanAttributes: true,
collapseWhitespace: true,
conservativeCollapse: true,
continueOnParseError: true,
minifyCss: {
compatibility: "ie9",
},
minifyJs: true,
minifyUrls: true,
preserveLineBreaks: true,
removeAttributeQuotes: true,
removeCdatasectionsFromCdata: true,
removeComments: true,
removeCommentsFromCdata: true,
removeEmptyAttributes: true,
removeOptionalTags: true,
removeRedundantAttributes: true,
removeScriptTypeAttributes: true,
removeStyleLinkTypeAttributes: true,
useShortDoctype: true,
}
async function generate() {
const r = (file) => path.resolve(__dirname, "..", file)
await rimraf(r("dist"))
const [guides, methods, layout, pkg] = await Promise.all([
fs.readFile(r("docs/nav-guides.md"), "utf-8"),
fs.readFile(r("docs/nav-methods.md"), "utf-8"),
fs.readFile(r("docs/layout.html"), "utf-8"),
fs.readFile(r("package.json"), "utf-8"),
fs.mkdir(r("dist"), {recursive: true}),
])
const version = JSON.parse(pkg).version
// Set up archive directories
execFileSync("git", ["checkout", "gh-pages", "--", "archive"])
await fs.rename(r("archive"), r("dist/archive"))
await fs.mkdir(r(`dist/archive/v${version}`), {recursive: true})
function compilePage(file, markdown) {
file = path.basename(file)
const link = new RegExp(
`([ \t]*)(- )(\\[.+?\\]\\(${escapeRegExp(file)}\\))`
)
const src = link.test(guides) ? guides : methods
let body = markdown
// fix pipes in code tags
body = body.replace(/`((?:\S| -> |, )+)(\|)(\S+)`/gim,
(match, a, b, c) =>
`<code>${(a + b + c).replace(/\|/g, "&#124;")}</code>`
)
// inject menu
body = body.replace(
/(^# .+?(?:\r?\n){2,}?)(?:(-(?:.|\r|\n)+?)((?:\r?\n){2,})|)/m,
(match, title, nav) => {
if (!nav) {
return title + src.replace(link, "$1$2**$3**") + "\n\n"
}
return title + src.replace(link, (match, space, li, link) =>
`${space}${li}**${link}**\n${
nav.replace(/(^|\n)/g, `$1\t${space}`)
}`
) + "\n\n"
}
)
// fix links
body = body.replace(/(\]\([^\)]+)(\.md)/gim, (match, path, extension) =>
path + ((/http/).test(path) ? extension : ".html")
)
// Fix type signatures containing Array<...>
body = body.replace(/(\W)Array<([^/<]+?)>/gim, "$1Array&lt;$2&gt;")
const markedHtml = marked(body)
const title = body.match(/^#([^\n\r]+)/i) || []
let result = layout
result = result.replace(
/<title>Mithril\.js<\/title>/,
`<title>${title[1]} - Mithril.js</title>`
)
// update version
result = result.replace(/\[version\]/g, version)
// insert parsed HTML
result = result.replace(/\[body\]/, markedHtml)
// fix anchors
const anchorIds = new Map()
result = result.replace(
/<h([1-6]) id="([^"]+)">(.+?)<\/h\1>/gim,
(match, n, id, text) => {
let anchor = text.toLowerCase()
.replace(/<(\/?)code>/g, "")
.replace(/<a.*?>.+?<\/a>/g, "")
.replace(/\.|\[|\]|&quot;|\/|\(|\)/g, "")
.replace(/\s/g, "-");
const anchorId = anchorIds.get(anchor)
anchorIds.set(anchor, anchorId != null ? anchorId + 1 : 0)
if (anchorId != null) anchor += anchorId
return `<h${n} id="${anchor}">` +
`<a href="#${anchor}">${text}</a>` +
`</h${n}>`
}
)
return result
}
async function generate(file) {
try {
const handle = await fs.open(file, "r")
const relative = path.relative(r("docs"), file)
const archive = r(`dist/archive/v${version}/${relative}`)
await fs.mkdir(path.dirname(archive), {recursive: true})
if (file.endsWith(".md")) {
const html = compilePage(file, await handle.readFile("utf-8"))
const minified = HTMLMinifier.minify(html, htmlMinifierConfig)
await fs.writeFile(archive.replace(/\.md$/, ".html"), minified)
} else if (file.endsWith(".html")) {
const html = await handle.readFile("utf-8")
const minified = HTMLMinifier.minify(html, htmlMinifierConfig)
await fs.writeFile(archive, minified)
} else {
await pipeline(
createReadStream(null, {fd: handle.fd}),
createWriteStream(archive)
)
}
} catch (e) {
if (e.code !== "EISDIR") throw e
const files = await fs.readdir(file)
const devOnly = /^layout\.html$|^tutorials$|^archive$|^nav-/
await Promise.all(
files
.filter((f) => !devOnly.test(f))
.map((f) => path.join(file, f))
.map(generate)
)
}
}
await generate(r("docs"))
await copy(r(`dist/archive/v${version}`), r("dist"))
// Just ensure it exists.
await (await fs.open(r("dist/.nojekyll"), "a")).close()
}

184
scripts/lint-docs.js Normal file
View file

@ -0,0 +1,184 @@
#!/usr/bin/env node
"use strict"
const {promises} = require("fs")
const path = require("path")
const {Glob} = require("glob")
const marked = require("marked")
// Accept just about anything
const babelParser = require("@babel/parser")
// Peer dependency on `request`
const request = require("request-promise-native")
require("./_command").exec(module, () => lint())
// lint rules
class LintRenderer extends marked.Renderer {
constructor(file) {
super()
this._file = file
this._dir = path.dirname(file)
this._context = undefined
this._code = undefined
this._lang = undefined
this._error = undefined
this._awaiting = []
}
_emitTolerate(...data) {
let str = data.join("\n")
if (str.endsWith("\n")) str = str.slice(0, -1)
console.log(`${this._file} - ${str}\n${"-".repeat(60)}`)
}
_emit(...data) {
this._emitTolerate(...data)
process.exitCode = 1
}
_block() {
return `\`\`\`${this._lang || ""}\n${this._code}\n\`\`\``
}
link(href) {
// Don't fail if something byzantine shows up - it's the freaking
// internet. Just log it and move on.
const httpError = (e) =>
this._emitTolerate(`http error for ${href}`, e.message)
// Prefer https: > http: where possible, but allow http: when https: is
// inaccessible.
if ((/^https?:\/\//).test(href)) {
const url = href.replace(/#.*$/, "")
this._awaiting.push(request.head(url).then(() => {
const isHTTPS = href.startsWith("https:")
if (!isHTTPS) {
return request.head(`https:${url.slice(7)}`).then(
() => this._emit("change http: to https:"),
() => { /* ignore inner errors */ }
)
}
}, (e) => {
if (e.statusCode === 404) {
this._emit(`broken external link: ${href}`)
} else {
if (
e.error.code === "ERR_TLS_CERT_ALTNAME_INVALID" &&
href.startsWith("https://")
) {
return request.head(`http:${url.slice(6)}`).then(
() => this._emit(`change ${href} to use http:`),
// ignore inner errors
() => httpError(e)
)
}
httpError(e)
}
}))
} else {
const exec = (/^([^#?]*\.md)(?:$|\?|#)/).exec(href)
if (exec != null) {
const resolved = path.resolve(this._dir, exec[1])
this._awaiting.push(promises.access(resolved).catch(() => {
this._emit(`broken internal link: ${href}`)
}))
}
}
}
code(code, lang) {
this._code = code
this._lang = lang
if (!lang || lang === "js" || lang === "javascript") {
try {
// Could be within any production.
babelParser.parse(code, {
sourceType: "unambiguous",
allowReturnOutsideFunction: true,
allowAwaitOutsideFunction: true,
allowSuperOutsideMethod: true,
allowUndeclaredExports: true,
plugins: ["dynamicImport"],
})
} catch (e) {
this._error = e
}
}
this._ensureCodeIsHighlightable()
this._ensureCodeHasConsistentTag()
this._ensureCodeIsSyntaticallyValid()
this._ensureCommentStyle()
}
_ensureCodeIsHighlightable() {
// We only care about what's not tagged here.
if (!this._lang) {
// TODO: ensure all code blocks have tags, and check this in CI.
if (this._error == null) {
this._emit(
"Code block possibly missing `javascript` language tag",
this._block(),
)
}
try {
JSON.parse(this._code)
this._emit(
"Code block possibly missing `json` language tag",
this._block(),
)
} catch {
// ignore
}
}
}
_ensureCodeHasConsistentTag() {
if (this._lang === "js") {
this._emit("JS code block has wrong language tag", this._block())
}
}
_ensureCodeIsSyntaticallyValid() {
if (!this.lang || !(/^js$|^javascript$/).test(this._lang)) return
if (this._error != null) {
this._emit(
"JS code block has invalid syntax", this._error.message,
this._block()
)
}
}
_ensureCommentStyle() {
if (!this.lang || !(/^js$|^javascript$/).test(this._lang)) return
if ((/(^|\s)\/\/[\S]/).test(this._code)) {
this._emit("Comment is missing a preceding space", this._block())
}
}
}
function lint() {
return new Promise((resolve, reject) => {
const glob = new Glob(path.resolve(__dirname, "../docs/**/*.md"), {
ignore: [
"**/change-log.md",
"**/migration-*.md",
"**/node_modules/**",
],
nodir: true,
})
const awaiting = []
glob.on("match", (file) => {
awaiting.push(promises.readFile(file, "utf-8").then((contents) => {
const renderer = new LintRenderer(file)
marked(contents, {renderer})
return Promise.all(renderer._awaiting)
}))
})
glob.on("error", reject)
glob.on("end", () => resolve(Promise.all(awaiting)))
})
}
module.exports = lint

153
scripts/release.js Normal file
View file

@ -0,0 +1,153 @@
#!/usr/bin/env node
/* eslint-disable no-process-exit */
"use strict"
// This is my temporary hack to simplify deployment until I fix the underlying
// problems in these bugs:
// - https://github.com/MithrilJS/mithril.js/issues/2417
// - https://github.com/MithrilJS/mithril.js/pull/2422
//
// Depending on the complexity, it might become permanent. It really isn't that
// helpful to create a release on Travis vs locally, aside from a couple extra
// potential 2FA prompts by npm during login and publish.
if (require.main !== module) {
throw new Error("This is a script, not a module!")
}
const path = require("path")
const fs = require("fs")
const {execFileSync} = require("child_process")
const rimraf = require("rimraf")
function showHelp() {
console.error(`
scripts/release.sh increment [ --preid id ]
Invoke as \`scripts/release.sh\` to invoke the release sequence, specifying the
version increment via \`increment\` (required). Here's how they all work:
- \`major\` increments from 1.0.0 or 2.0.0-beta.0 to 2.0.0
- \`minor\` increments from 1.0.0 to 1.1.0
- \`patch\` increments from 1.0.0 to 1.0.1
- \`premajor\` increments from 1.0.0 to 2.0.0-beta.0
- \`preminor\` increments from 1.0.0 to 1.1.0-beta.0
- \`prepatch\` increments from 1.0.0 to 1.0.1-beta.0
- \`prerelease\` increments from 2.0.0-beta.0 to 2.0.0-beta.1
\`--preid beta\` specifies the \`beta\` part above (default). It's required for
all \`pre*\` increment types except \`prerelease\`.
See the docs for \`npm version\` <https://docs.npmjs.com/cli/version> for
details on the \`increment\` parameter.
`.trim())
process.exit(0)
}
function bail(...args) {
console.error(...args)
process.exit(1)
}
const rootDir = path.dirname(__dirname)
const p = (...args) => path.resolve(rootDir, ...args)
function readVersion() {
return JSON.parse(fs.readFileSync(p("../package.json"), "utf-8")).version
}
const parsed = require("minimist")(process.argv.slice(2), {
boolean: ["help"],
alias: {help: ["h", "?"]},
string: ["preid"],
"--": true,
})
if (parsed.help || !parsed["--"].length) showHelp()
const publishType = parsed["--"][0]
const publishPreid = parsed.preid
const publishArgs = publishType.startsWith("pre") ? ["--tag", "next"] : []
let releaseArgs = []
if (publishType.startsWith("pre") && publishType !== "prerelease") {
if (publishPreid == null) {
bail("`pre*` increments other than `prerelease` require `--preid`")
}
releaseArgs = [`--preid=${publishPreid}`]
}
function exec(cmd, args, opts) {
return execFileSync(name, args, {
windowsHide: true,
stdio: "inherit",
encoding: "utf-8",
...opts,
})
}
const upstream = require("./_upstream")
exec("git", ["checkout", "next"])
exec("git", ["pull", "--rebase", upstream.fetch.branch, "next"])
// Because I'm too lazy to make everything async.
exec("read", ["-rsp", `
Update "Upcoming" in \`docs/change-log.md\`. If moving a prerelease to stable,
also replace all references to \`mithril@next\` to \`mithril\`, including in
Flems snippets. Press enter once ready to continue.
`.trim()], {shell: true})
// Verify the changelog was updated
let changelogUpdated = false
let treeDirty = false
for (const line of exec("git", ["status", "-z"]).split(/\0/g)) {
switch (line) {
case " M CHANGELOG.md":
case "M CHANGELOG.md":
case "MM CHANGELOG.md":
changelogUpdated = true
break
default:
treeDirty = true
}
}
if (!changelogUpdated || treeDirty) {
if (!changelogUpdated) console.error("Error: Changelog must be updated!")
if (!treeDirty) console.error("Error: Tree must not be otherwise dirty!")
process.exit(1)
}
exec("git", ["add", "."])
exec("git", ["commit", "-m", "Preparing for release"])
exec("git", ["checkout", "master"])
exec("git", ["pull", "--rebase", upstream.fetch.branch, "master"])
// There may be merge conflicts with `index.js` and/or the bundle - just ignore
// them. Whatever they have is canon, as is the case with everything else.
exec("git", ["merge", "next", "-s", "theirs"])
rimraf.sync(p("node_modules"))
exec("npm", ["install-test"])
exec("npm", ["version", "-m", "v%s", publishType, ...releaseArgs])
exec("git", ["push", "--follow-tags", "origin", "master"])
exec("git", ["push", "--follow-tags", upstream.push.branch, "master"])
exec("git", ["checkout", "next"])
exec("git", ["checkout", "master", "--", "mithril.js", "mithril.min.js"])
// That's already been updated in `master`.
exec("git", ["commit", "-m", `Generated bundles for ${readVersion()} [skip ci]`])
exec("git", ["push"])
exec("git", ["push", upstream.push.branch, "next"])
exec("git", ["checkout", "master"])
exec("npm", ["login"])
exec("npm", ["publish", ...publishArgs])
exec("npm", ["logout"])
require("./update-docs")()

39
scripts/update-docs.js Normal file
View file

@ -0,0 +1,39 @@
#!/usr/bin/env node
/* eslint-disable no-process-exit */
"use strict"
// This is my temporary hack to simplify deployment until I fix the underlying
// problems in these bugs:
// - https://github.com/MithrilJS/mithril.js/issues/2417
// - https://github.com/MithrilJS/mithril.js/pull/2422
const path = require("path")
const {execFileSync} = require("child_process")
const ghPages = require("gh-pages")
const upstream = require("./_upstream")
const generate = require("./generate-docs")
require("./_command")(module, () => update())
module.exports = update
async function update() {
await generate()
const commit = execFileSync("git", ["rev-parse", "--verify", "HEAD"], {
windowsHide: true,
stdio: "inherit",
encoding: "utf-8",
})
await ghPages.publish(path.resolve(__dirname, "../dist"), {
// Note: once this is running on Travis again, run
// `git remote add upstream git@github.com:MithrilJS/mithril.js.git` to
// force it to go over SSH so the saved keys are used.
// https://github.com/tschaub/gh-pages/issues/160
repo: upstream.push.repo,
remote: upstream.push.branch,
src: ["**/*", ".nojekyll"],
message: `Generated docs for commit ${commit} [skip ci]`,
// May want to enable this if an API token resolves the issue.
// silent: !!process.env.TRAVIS_CI,
})
}

10
scripts/watch-docs.js Normal file
View file

@ -0,0 +1,10 @@
"use strict"
const path = require("path")
const chokidar = require("chokidar")
const generate = require("./generate-docs")
const command = require("./_command")
chokidar.watch(path.resolve(__dirname, "../docs")).on("all", () => {
command.run(() => generate())
})