Skip to content

Commit

Permalink
Merge pull request #22 from zambezi/refactor-async
Browse files Browse the repository at this point in the history
Big refactor to use async/await for control flow
  • Loading branch information
mstade authored Jul 7, 2016
2 parents 0057b96 + 02ff4f0 commit d09bea3
Show file tree
Hide file tree
Showing 11 changed files with 231 additions and 173 deletions.
8 changes: 8 additions & 0 deletions .babelrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"presets": ["es2015"],
"plugins":
[ "transform-async-functions"
, "transform-regenerator"
, "transform-es2015-modules-commonjs"
]
}
43 changes: 24 additions & 19 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,45 +4,50 @@
"bin": {
"ez-build": "bin/ez-build.js"
},
"version": "0.2.4",
"version": "0.3.0-refactor-async.1",
"description": "The Zambezi build process",
"devDependencies": {
"babel-cli": "^6.5.0",
"babel-plugin-transform-es2015-modules-commonjs": "^6.5.0"
"babel-plugin-transform-async-functions": "^6.8.0",
"babel-plugin-transform-es2015-modules-commonjs": "^6.5.0",
"babel-plugin-transform-regenerator": "^6.9.0",
"depcheck": "0.6.3"
},
"dependencies": {
"ansicolors": "0.3.2",
"async": "1.5.2",
"babel-core": "^6.5.0",
"babel-core": "6.5.0",
"babel-plugin-transform-es2015-modules-amd": "^6.5.0",
"babel-polyfill": "6.5.0",
"babel-polyfill": "^6.5.0",
"babel-preset-es2015": "^6.5.0",
"bunyan": "1.6.0",
"chokidar": "1.4.2",
"commander": "^2.9.0",
"glob": "^6.0.4",
"mkdirp": "0.5.1",
"output-file-sync": "1.1.1",
"pkginfo": "^0.3.1",
"postcss": "5.0.16",
"postcss-cssnext": "2.4.0",
"postcss-import": "8.0.2",
"postcss-url": "5.1.1",
"read-package-json": "^2.0.3",
"requirejs": "^2.1.22",
"semver": "^5.1.0",
"commander": "2.9.0",
"funkis": "0.2.0",
"glob": "6.0.4",
"output-file": "1.1.1",
"pkginfo": "0.3.1",
"postcss": "^5.0.16",
"postcss-cssnext": "^2.4.0",
"postcss-import": "^8.0.2",
"postcss-url": "^5.1.1",
"read-package-json": "2.0.3",
"source-map-support": "0.4.0",
"strip-ansi": "3.0.0",
"thenify": "3.2.0",
"window-size": "0.2.0"
},
"scripts": {
"dev": "npm run build -- --watch",
"build": "babel src -d lib --presets es2015 --plugins transform-es2015-modules-commonjs --source-maps",
"prepublish": "npm run build"
"build": "babel src -d lib --source-maps",
"depcheck": "depcheck . --ignores-dirs=lib",
"prepublish": "npm run depcheck && npm run build"
},
"license": "MIT",
"repository": {
"type": "git",
"url": "[email protected]:zambezi/ez-build.git"
},
"engines": {
"node": ">=4"
}
}
13 changes: 4 additions & 9 deletions src/builder/copy-files.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,9 @@
import { readFile } from 'fs'
import { slurp } from '../util/file'
import { extname } from 'path'

export default function configure(pkg, opts) {
return (name, input, done) => {
readFile(input, (error, data) => {
if (error) {
done(error)
} else {
done(null, { files: { [`${name}${extname(input)}`]: data } })
}
})
return async function process(name, input) {
let data = await slurp(input)
return { files: { [`${name}${extname(input)}`]: data } }
}
}
30 changes: 13 additions & 17 deletions src/builder/css.js
Original file line number Diff line number Diff line change
@@ -1,30 +1,26 @@
import postcss from 'postcss'
import cssimport from 'postcss-import'
import cssnext from 'postcss-cssnext'
import { readFile } from 'fs'
import { slurp } from '../util/file'
import { relative } from 'path'

export default function configure(pkg, opts) {
const cc = postcss([cssimport, cssnext])
, map = opts.debug? { inline: false } : false

return (name, file, done) => {
readFile(file, (error, data) => {
const to = `${opts.lib}/${relative(opts.src, file)}`
cc.process(data, { from: file, to, map })
.then(result => {
let output =
{ messages: result.messages
, files: { [`${name}.css`]: result.css }
}
return async function process(name, file) {
let data = await slurp(file)
const to = `${opts.lib}/${relative(opts.src, file)}`
let result = await cc.process(data, { from: file, to, map })
let output =
{ messages: result.messages
, files: { [`${name}.css`]: result.css }
}

if (opts.debug) {
output.files[`${name}.css.map`] = JSON.stringify(result.map)
}
if (opts.debug) {
output.files[`${name}.css.map`] = JSON.stringify(result.map)
}

done(null, output)
})
.catch(done)
})
return output
}
}
28 changes: 12 additions & 16 deletions src/builder/javascript.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import { transformFile } from 'babel-core'
import { default as deferred } from 'thenify'
import { debug } from '../util/stdio'
import { default as es2015 } from 'babel-preset-es2015'
import { default as amd } from 'babel-plugin-transform-es2015-modules-amd'

export default function configure(pkg, opts) {
return (name, file, done) => {
transformFile(file
, { moduleIds: true
return async function process(name, file) {
let result = await deferred(transformFile)(file,
{ moduleIds: true
, moduleRoot: `${pkg.name}/${opts.lib}`
, sourceRoot: opts.src
, presets: [es2015]
Expand All @@ -16,20 +17,15 @@ export default function configure(pkg, opts) {
, sourceFileName: file
, sourceMapTarget: file
}
, (error, result) => {
if (error) {
done(error)
} else {
let output = { files: { [`${name}.js`]: result.code } }
)

if (opts.debug) {
output.files[`${name}.js`] += `\n//# sourceMappingURL=${name}.js.map`
output.files[`${name}.js.map`] = JSON.stringify(result.map)
}
let output = { files: { [`${name}.js`]: result.code } }

done(null, output)
}
}
)
if (opts.debug) {
output.files[`${name}.js`] += `\n//# sourceMappingURL=${name}.js.map`
output.files[`${name}.js.map`] = JSON.stringify(result.map)
}

return output
}
}
154 changes: 76 additions & 78 deletions src/main.js
Original file line number Diff line number Diff line change
@@ -1,30 +1,28 @@
import program from 'commander'
import readPkg from 'read-package-json'
import { sync as find } from 'glob'
import { find as resolvePkg } from 'pkginfo'
import { dirname, relative, basename as base, resolve } from 'path'
import { default as put } from 'output-file-sync'
import { readFileSync as slurp } from 'fs'
import { read as readPkg, find as resolvePkg } from './util/pkg'
import { find, slurp, put } from './util/file'
import stdio from './util/stdio'
import { parallel, apply } from 'async'
import { default as jsc } from './builder/javascript'
import { default as cssc } from './builder/css'
import { default as copyFiles } from './builder/copy-files'
import { default as createPipeline } from './pipeline'
import { red, yellow } from 'ansicolors'
import { watch } from 'chokidar'
import rebaseProdCss from './rebase-prod-css'
import { timed } from './util/performance'
import './util/cli'

const keys = Object.keys
const all = Promise.all.bind(Promise)

const pkgFile = resolvePkg(module, process.cwd())
, pkgRoot = dirname(pkgFile)
main()

readPkg(pkgFile, (err, pkg) => {
if (err) {
console.error(err.message)
process.exit(1)
}
async function main() {
const pkgFile = resolvePkg(module, process.cwd())
, pkgRoot = dirname(pkgFile)

var pkg = await readPkg(pkgFile)

pkg.root = pkgRoot
pkg.resolve = (path) => relative(process.cwd(), resolve(pkgRoot, path))
Expand Down Expand Up @@ -64,9 +62,9 @@ readPkg(pkgFile, (err, pkg) => {
const console = stdio({ debug: !!process.env.DEBUG, format: opts.log })

const pipeline =
{ js: createPipeline(pkg, opts, jsc(pkg, opts), logPipe('js'))
, css: createPipeline(pkg, opts, cssc(pkg, opts), logPipe('css'))
, 'copy-files': createPipeline(pkg, opts, copyFiles(pkg, opts), logPipe('copy files'))
{ js: createPipeline(pkg, opts, jsc(pkg, opts))
, css: createPipeline(pkg, opts, cssc(pkg, opts))
, 'copy-files': createPipeline(pkg, opts, copyFiles(pkg, opts))
}

opts.include = conclude(keys(pipeline), defaults.include, opts.include)
Expand All @@ -84,54 +82,71 @@ readPkg(pkgFile, (err, pkg) => {

console.debug('Options:')
keys(defaults).forEach(name => console.debug(`- ${name}: ${JSON.stringify(opts[name])}`))

const build = await timed(all(keys(pipeline).map(async type => {
let results = pipeline[type](await collect(opts.include[type], opts.exclude[type]))

for (let result of results) {
try {
let { input, messages, files } = await result

parallel(
keys(pipeline).map(type => apply(pipeline[type], collect(opts.include[type], opts.exclude[type])))
, (error, results) => {
if (opts.interactive) {
console.info('Starting interactive mode...')
keys(pipeline).forEach(type => {
console.debug(`Watching ${type} pipeline:`)
console.debug(`- included: ${opts.include[type]}`)
console.debug(`- excluded: ${opts.exclude[type]}`)
interactive(opts.include[type], opts.exclude[type])
.on('add', file => pipeline[type](file))
.on('change', file => pipeline[type](file))
})
} else if (opts.optimize) {
console.debug('Writing optimised-modules.json')
put(pkg.resolve('optimised-modules.json'),
JSON.stringify(
new Set(find(`${opts.lib}/**/*`, { nodir: true }).map(file => {
const name = file.replace(/^([^\.]+).*$/, '$1').replace(/\\/g, '/')
return `${pkg.name}/${name}`
}))
, null, 2)
)

console.debug(`Writing ${pkg.name}-min.css`)
put(pkg.resolve(`${pkg.name}-min.css`),
find(`${opts.lib}/**/*.css`)
.map(file => rebaseProdCss(pkg, opts, file))
.join('\n')
)

console.debug(`Writing ${pkg.name}-min.js`)
put(pkg.resolve(`${pkg.name}-min.js`),
find(`${opts.lib}/**/*.js`)
.map(file => slurp(file, 'utf8'))
.join('\n')
)
if (messages) {
[...messages].forEach(message => {
console.warn(yellow(`\n${type}${input}: ${message}`))
})
}

console.log(`${type}${input} -> ${files}`)
} catch (error) {
console.error(`\n${type}${red(error.message)}\n${error.codeFrame || error.stack}\n`)
}
}
)
})))

console.debug(`Build took ${build.duration.ms} ms`)

if (opts.interactive) {
console.info('Starting interactive mode...')
keys(pipeline).forEach(type => {
console.debug(`Watching ${type} pipeline:`)
console.debug(`- included: ${opts.include[type]}`)
console.debug(`- excluded: ${opts.exclude[type]}`)
interactive(opts.include[type], opts.exclude[type])
.on('add', file => pipeline[type](file))
.on('change', file => pipeline[type](file))
})
} else if (opts.optimize) {
console.debug('Writing optimised-modules.json')
await put(pkg.resolve('optimised-modules.json'), JSON.stringify(
new Set((await find(`${opts.lib}/**/*`, { nodir: true })).map(file => {
const name = file.replace(/^([^\.]+).*$/, '$1').replace(/\\/g, '/')
return `${pkg.name}/${name}`
}))
, null, 2
))

console.debug(`Writing ${pkg.name}-min.css`)
await put(pkg.resolve(`${pkg.name}-min.css`),
(await find(`${opts.lib}/**/*.css`))
.map(file => rebaseProdCss(pkg, opts, file))
.join('\n')
)

console.debug(`Writing ${pkg.name}-min.js`)
await put(pkg.resolve(`${pkg.name}-min.js`),
(await find(`${opts.lib}/**/*.js`))
.map(file => slurp(file, 'utf8'))
.join('\n')
)
}

function collect(include, exclude) {
let collection = include.reduce((files, pattern) => {
return files.concat(
find(`${opts.src}/${pattern}`, { nodir: true, ignore: exclude, cwd: pkg.root })
)
}, [])
async function collect(include, exclude) {
let collection = []

for (let pattern of include) {
let files = await find(`${opts.src}/${pattern}`, { nodir: true, ignore: exclude, cwd: pkg.root })
collection = [...collection, ...files]
}

return collection
}
Expand All @@ -143,24 +158,7 @@ readPkg(pkgFile, (err, pkg) => {

return watcher
}

function logPipe(pipeline) {
return {
onBuild({ input, messages, files }) {
if (messages) {
[].concat(messages).forEach(message => {
console.warn({ pipeline, message }, yellow(`\n${pipeline}${input}: ${message}`))
})
}
console.log({ pipeline, input, files }, `${pipeline}${input} -> ${files}`)
},

onError({ input, error }) {
console.error({ pipeline, error }, `\n${pipeline}${red(error.message)}\n${error.codeFrame}\n`)
}
}
}
})
}

function setOptimization(level) {
return Math.max(level | 0, 0)
Expand Down
Loading

0 comments on commit d09bea3

Please sign in to comment.