diff --git a/__tests__/bin/mastarm.js b/__tests__/bin/mastarm.js index 1f8351e..2c6e7b9 100644 --- a/__tests__/bin/mastarm.js +++ b/__tests__/bin/mastarm.js @@ -40,7 +40,7 @@ describe('mastarm cli', () => { exec(`node ${mastarm} build ${mockDir}/index.js:${buildDir}/index.js ${mockDir}/index.css:${buildDir}/index.css`, (err, stdout, stderr) => { expect(err).toBeNull() - expect(stdout).toContain('updated css file') + expect(stdout).toContain('done building') expect(stderr).toBe('') expect(fs.existsSync(`${buildDir}/index.js`)).toBeTruthy() expect(fs.existsSync(`${buildDir}/index.css`)).toBeTruthy() diff --git a/__tests__/lib/push-to-s3.js b/__tests__/lib/push-to-s3.js index 58a00ec..4d7bdb9 100644 --- a/__tests__/lib/push-to-s3.js +++ b/__tests__/lib/push-to-s3.js @@ -1,25 +1,34 @@ /* global describe, it */ -const BUILD_DIR = '__tests__/test-utils/tmp' -const MOCK_DIR = '__tests__/test-utils/mocks' +const fs = require('fs') +const path = require('path') + +const BUILD_DIR = path.join(process.cwd(), '__tests__/test-utils/tmp') +const MOCK_DIR = path.join(process.cwd(), '__tests__/test-utils/mocks') +const files = [ + [`${MOCK_DIR}/index.js`, `${BUILD_DIR}/index.js`], + [`${MOCK_DIR}/index.css`, `${BUILD_DIR}/index.css`] +] describe('lib > push to s3', () => { - const configPush = require('../../lib/push-to-s3') + const build = require('../../lib/build') + const createPushToS3 = require('../../lib/push-to-s3') const loadConfig = require('../../lib/load-config') - const createLogger = require('../../lib/logger') it('should compile JavaScript and CSS and send to s3 via aws-sdk', () => { const config = loadConfig(process.cwd(), 'configurations/default', 'development') - const push = configPush({ + const push = createPushToS3({ env: 'development', config, - log: createLogger(), minify: false, s3bucket: 'test-bucket' }) - return Promise.all([ - push([`${MOCK_DIR}/index.js`, `${BUILD_DIR}/index.js`]), - push([`${MOCK_DIR}/index.css`, `${BUILD_DIR}/index.css`]) - ]) + return build({ + config, + env: 'development', + files + }).then(() => + Promise.all(files.map((f) => + push({body: fs.readFileSync(f[0]), outfile: f[0]})))) }) }) diff --git a/bin/mastarm b/bin/mastarm index 456c8d9..c7d298a 100755 --- a/bin/mastarm +++ b/bin/mastarm @@ -1,9 +1,11 @@ #!/usr/bin/env node const commander = require('commander') +const fs = require('fs') const path = require('path') const loadConfig = require('../lib/load-config') +const logger = require('../lib/logger') const util = require('../lib/util') commander @@ -42,11 +44,11 @@ commander const build = require('../lib/build') build(opts) .then((results) => { - console.log('done building...') + logger.log('done building...') if (!watch) process.exit(0) }) .catch((err) => { - console.error(err) + logger.error(err) if (!watch) process.exit(1) }) } @@ -75,52 +77,69 @@ commander .action(function (entries, options) { const commit = require('this-commit')() const username = require('username') - const createLogger = require('../lib/logger') + + const build = require('../lib/build') const pkg = require('../lib/pkg') - const pushToS3 = require('../lib/push-to-s3') + const createPushToS3 = require('../lib/push-to-s3') const url = pkg.repository.url.replace('.git', '') - const tag = `<${url}/commit/${commit}|${pkg.name}@${commit.slice(0, 6)}>:` - const user = username.sync() + const tag = `<${url}/commit/${commit}|${pkg.name}@${commit.slice(0, 6)}>` const config = loadConfig(process.cwd(), commander.config, commander.env) const get = util.makeGetFn([options, commander, config.settings]) - const env = get('env') || 'development' - const s3bucket = get('s3bucket') + if (config.env.SLACK_WEBHOOK && config.env.SLACK_WEBHOOK.length > 0) { + logger.logToSlack({ + channel: config.env.SLACK_CHANNEL || '#devops', + webhook: config.env.SLACK_WEBHOOK + }) + } const files = util.parseEntries([...entries, ...(get('entries') || [])]) util.assertEntriesExist(files) + const sourceFiles = files.map((f) => f[0]) + const outfiles = [ + ...files.map((f) => f[1]), + ...files.map((f) => `${f[1]}.map`) + ] - const log = createLogger({channel: config.env.SLACK_CHANNEL || '#devops', webhook: config.env.SLACK_WEBHOOK}) + const env = get('env') || 'development' const minify = get('minify') - const cloudfront = get('cloudfront') - const push = pushToS3({ - cloudfront, + const buildOpts = { config, env, - log, - minify, - s3bucket, - tag + files, + minify + } + const cloudfront = get('cloudfront') + const s3bucket = get('s3bucket') + + const pushToS3 = createPushToS3({ + cloudfront, + s3bucket }) - log( -`:construction: *${tag} deploy started by <@${user}>* + logger.log( +`:construction: *deploying: ${tag} by <@${username.sync()}>* :cloud: *cloudfront:* ${cloudfront} :hash: *commit:* ${commit} :seedling: *env:* ${env} :compression: *minify:* ${minify} -:package: *s3bucket:* ${s3bucket}` - ).then(() => { - Promise - .all(files.map(push)) +:package: *s3bucket:* ${s3bucket} +:hammer_and_wrench: *building:* ${sourceFiles.join(', ')}` + ).then(() => + build(buildOpts) .then(() => - log(`:rocket: ${tag} deploy finished!! :tada: :confetti_ball: :tada:`) + logger.log(`:rocket: *uploading:* ${sourceFiles.length * 2} file(s)`)) + .then(() => + Promise.all(outfiles.map((outfile) => + readFile(outfile).then((body) => + pushToS3({body, outfile}))))) + .then(() => + logger.log(`:tada: :confetti_ball: :tada: *deploy ${tag} complete* :tada: :confetti_ball: :tada:`) .then(() => process.exit(0))) .catch((err) => - log(`:rotating_light: *error deploying ${tag} ${err.message}*`) - .then(() => process.exit(1))) - }) + logger.log(`:rotating_light: *${tag} error deploying ${tag} ${err.message || err}*`) + .then(() => process.exit(1)))) }) commander @@ -158,14 +177,14 @@ commander const errors = lintMessages(paths.length > 0 ? paths : ['lib'], config.messages) if (errors.length > 0) { - console.log(`${errors.length} missing messages`) + logger.error(`${errors.length} missing messages`) for (const [message, file, line] of errors) { - console.log(`${file} line ${line}: ${message} is not defined`) + logger.error(`${file} line ${line}: ${message} is not defined`) } process.exit(1) } else { - console.log('No missing messages found! 💃') + logger.log('No missing messages found! 💃') } }) @@ -212,3 +231,9 @@ commander }) commander.parse(process.argv) + +const readFile = (f) => + new Promise((resolve, reject) => + fs.readFile(f, (err, data) => err + ? reject(err) + : resolve(data))) diff --git a/lib/browserify.js b/lib/browserify.js index 742f500..0b96241 100644 --- a/lib/browserify.js +++ b/lib/browserify.js @@ -4,13 +4,15 @@ const uglifyify = require('uglifyify') const transform = require('./js-transform') -module.exports = function ({ +module.exports = browserifyIt + +function browserifyIt ({ config, entry, env, minify }) { - const pipeline = browserify(entry, { + return browserify(entry, { basedir: process.cwd(), cache: {}, debug: true, @@ -22,10 +24,10 @@ module.exports = function ({ ], transform: transform({config, env}) }) +} - if (minify) { - pipeline.transform(uglifyify, {global: true}) - } - +module.exports.minify = function (opts) { + const pipeline = browserifyIt(opts) + pipeline.transform(uglifyify, {global: true}) return pipeline } diff --git a/lib/budo.js b/lib/budo.js index 1814718..ed08067 100644 --- a/lib/budo.js +++ b/lib/budo.js @@ -1,8 +1,9 @@ const budo = require('budo') const path = require('path') -const transformJs = require('./js-transform') const transformCss = require('./css-transform') +const logger = require('./logger') +const transformJs = require('./js-transform') module.exports = function ({ config, @@ -55,6 +56,6 @@ module.exports = function ({ budo .cli(budoFiles, budoOpts) .on('error', function (err) { - console.error(err.stack) + logger.error(err.stack) }) } diff --git a/lib/build.js b/lib/build.js index ed4f82f..b677c01 100644 --- a/lib/build.js +++ b/lib/build.js @@ -1,10 +1,7 @@ -const fs = require('fs') -const exorcist = require('exorcist') -const mkdirp = require('mkdirp') const path = require('path') -const browserify = require('./browserify') const buildCss = require('./css-transform') +const buildJs = require('./js-build') /** * Takes a configuration object, array of file entries [entry, output], and other options. @@ -24,36 +21,3 @@ module.exports = function ({ ? buildCss({config, entry, outfile, watch}) : buildJs({config, entry, env, minify, outfile, watch}))) } - -/** - * - * @return Promise - */ - -function buildJs ({config, entry, env, minify, outfile, watch}) { - const pipeline = browserify({config, entry, env, minify}) - const bundle = () => { - return new Promise((resolve, reject) => { - const stream = pipeline.bundle((err, buf) => { - if (err) reject(err) - else resolve(buf) - }) - - if (outfile) { - mkdirp.sync(path.dirname(outfile)) - stream - .pipe(exorcist(`${outfile}.map`)) - .pipe(fs.createWriteStream(outfile)) - } - }) - } - - if (watch) { - pipeline.plugin(require('watchify'), {poll: true}) - pipeline.plugin(require('errorify')) - pipeline.on('update', bundle) - pipeline.on('log', console.log) - } - - return bundle() -} diff --git a/lib/css-transform.js b/lib/css-transform.js index c814524..ef1e2c9 100644 --- a/lib/css-transform.js +++ b/lib/css-transform.js @@ -9,6 +9,8 @@ const postcssImport = require('postcss-import') const postcssReporter = require('postcss-reporter') const postcssSafeParser = require('postcss-safe-parser') +const logger = require('./logger') + module.exports = function ({ config, entry, @@ -45,9 +47,11 @@ module.exports = function ({ mkdirp.sync(path.dirname(outfile)) fs.writeFileSync(outfile, results.css) if (results.map) { - fs.writeFile(`${outfile}.map`, results.map, handleErr) + fs.writeFileSync(`${outfile}.map`, results.map) + } + if (watch) { + logger.log(`updated ${outfile}`) } - console.log(`updated css file: ${outfile}`) } return results }) @@ -93,9 +97,3 @@ function getUrl (value) { const url = match[3] return url } - -function handleErr (err) { - if (err) { - console.error(err.stack) - } -} diff --git a/lib/flyle.js b/lib/flyle.js index ac9291c..307b5c1 100644 --- a/lib/flyle.js +++ b/lib/flyle.js @@ -6,6 +6,8 @@ const mkdirp = require('mkdirp') const path = require('path') const parse = require('url').parse +const logger = require('./logger') + const DEFAULT_CACHE_DIRECTORY = `${process.env.HOME}/.flyle` const DEFAULT_PNG = path.resolve(__dirname, '../mastarm.png') @@ -43,7 +45,7 @@ module.exports = function (req, res) { } function logAndSend ({err, res}) { - console.error('flyle >> sending default image: ', err.message) + logger.error('flyle >> sending default image: ', err.message) sendImg({ path: DEFAULT_PNG, res diff --git a/lib/js-build.js b/lib/js-build.js new file mode 100644 index 0000000..da5c1fd --- /dev/null +++ b/lib/js-build.js @@ -0,0 +1,42 @@ +const fs = require('fs') +const exorcist = require('exorcist') +const mkdirp = require('mkdirp') +const path = require('path') + +const browserify = require('./browserify') +const logger = require('./logger') + +/** + * + * @return Promise + */ + +module.exports = function buildJs ({config, entry, env, minify, outfile, watch}) { + const pipeline = minify + ? browserify.minify({config, entry, env}) + : browserify({config, entry, env}) + const bundle = () => new Promise((resolve, reject) => { + if (outfile) { + mkdirp.sync(path.dirname(outfile)) + pipeline.bundle() + .pipe(exorcist(`${outfile}.map`)) + .pipe(fs.createWriteStream(outfile)) + .on('error', reject) + .on('finish', resolve) + } else { + pipeline.bundle((err, buf) => { + if (err) reject(err) + else resolve(buf) + }) + } + }) + + if (watch) { + pipeline.plugin(require('watchify'), {poll: true}) + pipeline.plugin(require('errorify')) + pipeline.on('update', bundle) + pipeline.on('log', logger.log) + } + + return bundle() +} diff --git a/lib/lint-messages.js b/lib/lint-messages.js index b13acbb..dcdf8a8 100644 --- a/lib/lint-messages.js +++ b/lib/lint-messages.js @@ -3,6 +3,8 @@ const { readFileSync, statSync, existsSync } = require('fs') const glob = require('glob') +const logger = require('./logger') + // the IMPORT expression can be quite slow, so use a fast expression to pre-test if this is even a candidate const IS_IMPORT = /import.*from '[^ ]+\/messages'/ // This will split up the import statement into the default and named imports. The named imports, @@ -97,8 +99,8 @@ module.exports = { lint: function (files, messages) { const missingFiles = files.filter(file => !existsSync(file)) if (missingFiles.length > 0) { - console.log('some files were not found:') - missingFiles.forEach(f => console.log(` - ${f}`)) + logger.log('some files were not found:') + missingFiles.forEach(f => logger.log(` - ${f}`)) process.exit(1) } diff --git a/lib/logger.js b/lib/logger.js index ee4086a..b86a8d2 100644 --- a/lib/logger.js +++ b/lib/logger.js @@ -1,19 +1,50 @@ +const fetch = require('isomorphic-fetch') const nodeEmoji = require('node-emoji') -const slack = require('./notify-slack') -module.exports = function ({channel, webhook} = {}) { - if (webhook) { - return function (text) { - emojifyLog(text) - return slack({channel, text, webhook}) - } +const logToConsole = (text) => + Promise.resolve(console.log(emojify(text))) +const logToErrorConsole = (text) => + Promise.resolve(console.error(emojify(text))) + +module.exports.log = logToConsole +module.exports.error = logToErrorConsole + +module.exports.logToSlack = ({channel, webhook}) => { + module.exports.log = (text) => { + logToConsole(text) + return notifySlack({channel, text, webhook}) } - return function (text) { - return Promise.resolve(emojifyLog(text)) + + module.exports.error = (text) => { + logToErrorConsole(text) + return notifySlack({channel, text, webhook}) } } -function emojifyLog (text) { +function emojify (text) { const strippedLinks = text.replace(/<[^|>]+\|([^>]+)>/g, '$1') - console.log(nodeEmoji.emojify(strippedLinks)) + return nodeEmoji.emojify(strippedLinks) +} + +function notifySlack ({ + channel, + text, + webhook +}) { + return fetch(webhook, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + channel, + text + }) + }) + .then((response) => response.text()) + .catch((err) => { + logToErrorConsole('Error posting to Slack webhook') + logToErrorConsole(err) + return err + }) } diff --git a/lib/notify-slack.js b/lib/notify-slack.js deleted file mode 100644 index 2a88147..0000000 --- a/lib/notify-slack.js +++ /dev/null @@ -1,32 +0,0 @@ -const Slack = require('slack-node') -const slack = new Slack() - -module.exports = function notify ({ - channel, - text, - webhook -}) { - return new Promise((resolve, reject) => { - try { - slack.setWebhook(webhook) - slack.webhook({channel, text}, (err, response) => { - if (err) { - onError(err) - } else if (response.statusCode >= 400) { - onError(new Error(`${response.statusCode} ${response.response}`)) - } else { - resolve(response) - } - }) - } catch (err) { - onError(err) - } - - function onError (err) { - console.log(text) - console.error('Error posting to Slack webhook') - console.error(err) - resolve(err) // Always resolve to avoid logging to cause failure - } - }) -} diff --git a/lib/push-to-s3.js b/lib/push-to-s3.js index 4689fe2..8610345 100644 --- a/lib/push-to-s3.js +++ b/lib/push-to-s3.js @@ -1,96 +1,63 @@ const AWS = require('aws-sdk') const mime = require('mime') -const path = require('path') const uuid = require('uuid') -const browserify = require('./browserify') -const transformCss = require('./css-transform') +const logger = require('./logger') -module.exports = function ({ - cloudfront, - config, - env, - log, - minify, - s3bucket, - tag -}) { - const upload = createUpload({bucket: s3bucket, cloudfront, log, tag}) - return ([entry, outfile]) => - log(`:hammer_and_wrench: ${tag} building ${outfile}`) - .then(() => path.extname(entry) === '.js' - ? new Promise((resolve, reject) => - browserify({config, entry, env, minify}) - .bundle((err, buffer) => err - ? reject(err) - : resolve(upload({body: buffer, outfile})) - ) - ) - : transformCss({config, entry, env, minify}) - .then((results) => - upload({body: results.css, outfile})) - ) -} - -const createUpload = ({bucket, cloudfront, log, tag}) => +module.exports = ({s3bucket, cloudfront}) => ({body, outfile}) => - upload({body, bucket, cloudfront, log, outfile, tag}) + upload({body, s3bucket, cloudfront, outfile}) function upload ({ body, - bucket, + s3bucket, cloudfront, - log, - outfile, - tag + outfile }) { - const bucketUrl = `https://s3.amazonaws.com/${bucket}` + const bucketUrl = `https://s3.amazonaws.com/${s3bucket}` return new Promise((resolve, reject) => { const s3object = new AWS.S3({ params: { ACL: 'public-read', Body: body, - Bucket: bucket, + Bucket: s3bucket, ContentType: mime.lookup(outfile), Key: outfile } }) const bytes = bytesToSize(body.byteLength || body.length) - const bucketLink = `<${bucketUrl}/${outfile}|${bucket}/${outfile}>` - log(`:airplane_departure: ${tag} uploading to ${bucketLink} (${bytes})`) + const bucketLink = `<${bucketUrl}/${outfile}|${s3bucket}/${outfile}>` s3object .upload() .send(function (err) { - if (err) return reject(new Error(`s3 upload to ${bucket} rejected with ${err.code} ${err.message}`)) - log(`:ok_hand: ${tag} finished uploading to ${bucketLink}`).then(() => { - if (cloudfront) { - const cf = new AWS.CloudFront() - log(`:earth_asia: ${tag} creating cloudfront invalidation at ${outfile}`).then(() => { - cf.createInvalidation({ - DistributionId: cloudfront, - InvalidationBatch: { - CallerReference: uuid.v4(), - Paths: { - Quantity: 1, - Items: [ - '/' + outfile - ] - } + if (err) return reject(new Error(`s3 upload to ${bucketLink} rejected with ${err.code} ${err.message}`)) + if (cloudfront) { + const cf = new AWS.CloudFront() + logger.log(`:lightning: *cloudfront:* invalidating path ${outfile}`).then(() => { + cf.createInvalidation({ + DistributionId: cloudfront, + InvalidationBatch: { + CallerReference: uuid.v4(), + Paths: { + Quantity: 1, + Items: [ + '/' + outfile + ] } - }, function (err) { - if (err) return reject(new Error(`cf invalidation rejected with ${err.message}`)) - done() - }) + } + }, function (err) { + if (err) return reject(new Error(`cf invalidation rejected with ${err.message}`)) + done() }) - } else { - done() - } - }) + }) + } else { + done() + } }) function done () { - log(`:checkered_flag: ${tag} finished with ${outfile}`).then(resolve) + logger.log(`:checkered_flag: *uploaded:* ${bucketLink} (${bytes})`).then(resolve) } }) }