diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..3b300a1 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,16 @@ +# top-most EditorConfig file +root = true + +# Unix-style newlines with a newline ending every file +[*] +end_of_line = lf +insert_final_newline = true + +# I like my 2-space indents, grr +[*] +indent_style = space +indent_size = 2 + +# It's the universal encoding of love +[*] +charset = utf-8 diff --git a/.gitignore b/.gitignore index 3c3629e..f7f6a08 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,27 @@ +# Logs +logs +*.log + +# Runtime data +pids +*.pid +*.seed + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# node-waf configuration +.lock-wscript + +# Compiled binary addons (http://nodejs.org/api/addons.html) +build/Release + +# Dependency directory +# https://docs.npmjs.com/misc/faq#should-i-check-my-node-modules-folder-into-git node_modules diff --git a/.npmignore b/.npmignore new file mode 100644 index 0000000..06bf8ca --- /dev/null +++ b/.npmignore @@ -0,0 +1,4 @@ +* +!lib/*.js +!index.js +!LICENSE diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..4ad9ca5 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,14 @@ +language: node_js +node_js: + - '0.10' + - '0.12' + - 'iojs' + +install: + - time npm install + +script: + - npm run test:coverage + +after_success: + - cat ./coverage/lcov.info | npm run coveralls diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..01f48bf --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,83 @@ +# master + +# 1.2.0 + +* Replace with @caitp's cauliflower-filter implementation and @stefanpenner's tests + +# 1.1.0 + +* Add `name` and `annotation` options + +# 1.0.0 + +* Bump without change + +# 0.2.0 + +* Derive from new broccoli-plugin base class. Notably, this means that + subclasses always must call `Filter.call(this, inputTree)` in their + constructors, instead of settings `this.inputTree = inputTree`. + +# 0.1.14 + +* Improve performance by symlinking when possible + +# 0.1.13 + +* Improve error message when `processString` isn't overridden in subclass + +# 0.1.12 + +* Throw on undefined `inputTree` + +# 0.1.11 + +* Update dependencies + +# 0.1.10 + +* Do not override this.inputEncoding/this.outputEncoding if not provided + +# 0.1.9 + +* Fix inputEncoding/outputEncoding defaults + +# 0.1.8 + +* Add `inputEncoding` and `outputEncoding` options + +# 0.1.7 + +* Update dependency to deal with symlinks correctly + +# 0.1.6 + +* Copy instead of hardlinking + +# 0.1.5 + +* Use new broccoli-writer base class + +# 0.1.4 + +* Use broccoli-kitchen-sink-helpers instead of larger broccoli dependency + +# 0.1.3 + +* Remove stray `console.log` O_O + +# 0.1.2 + +* Augment error objects for better error reporting + +# 0.1.1 + +* Update `broccoli` dependency + +# 0.1.0 + +* Pass relativePath argument to `processFile` + +# 0.0.1 + +* Initial release diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..27b8e98 --- /dev/null +++ b/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Jo Liss +Copyright (c) 2015 Caitlin Potter & Contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..0ef19f3 --- /dev/null +++ b/README.md @@ -0,0 +1,166 @@ +# broccoli-filter + +[![Build Status](https://travis-ci.org/broccolijs/broccoli-filter.svg?branch=master)](https://travis-ci.org/broccolijs/broccoli-filter) +[![Build status](https://ci.appveyor.com/api/projects/status/hc68s0vbn9di4ehi/branch/master?svg=true)](https://ci.appveyor.com/project/joliss/broccoli-filter/branch/master) + +Helper base class for Broccoli plugins that map input files into output files +one-to-one. + +## API + +```js +class Filter { + /** + * Abstract base-class for filtering purposes. + * + * Enforces that it is invoked on an instance of a class which prototypically + * inherits from Filter, and which is not itself Filter. + */ + constructor(inputNode: BroccoliNode, options: FilterOptions): Filter; + + /** + * Abstract method `processString`: must be implemented on subclasses of + * Filter. + * + * The return value is written as the contents of the output file + */ + abstract processString(contents: string, relativePath: string): string; + + /** + * Virtual method `getDestFilePath`: determine whether the source file should + * be processed, and optionally rename the output file when processing occurs. + * + * Return `null` to pass the file through without processing. Return + * `relativePath` to process the file with `processString`. Return a + * different path to process the file with `processString` and rename it. + * + * By default, if the options passed into the `Filter` constructor contain a + * property `extensions`, and `targetExtension` is supplied, the first matching + * extension in the list is replaced with the `targetExtension` option's value. + */ + virtual getDestFilePath(relativePath: string): string; +} +``` + +### Options + +* `extensions`: An array of file extensions to process, e.g. `['md', 'markdown']`. +* `targetExtension`: The file extension of the corresponding output files, e.g. + `'html'`. +* `inputEncoding`: The character encoding used for reading input files to be + processed (default: `'utf8'`). For binary files, pass `null` to receive a + `Buffer` object in `processString`. +* `outputEncoding`: The character encoding used for writing output files after + processing (default: `'utf8'`). For binary files, pass `null` and return a + `Buffer` object from `processString`. +* `name`, `annotation`: Same as + [broccoli-plugin](https://github.com/broccolijs/broccoli-plugin#new-plugininputnodes-options); + see there. + +All options except `name` and `annotation` can also be set on the prototype +instead of being passed into the constructor. + +### Example Usage + +```js +var Filter = require('broccoli-filter'); + +Awk.prototype = Object.create(Filter.prototype); +Awk.prototype.constructor = Awk; +function Awk(inputNode, search, replace, options) { + options = options || {}; + Filter.call(this, inputNode, { + annotation: options.annotation + }); + this.search = search; + this.replace = replace; +} + +Awk.prototype.extensions = ['txt']; +Awk.prototype.targetExtension = 'txt'; + +Awk.prototype.processString = function(content, relativePath) { + return content.replace(this.search, this.replace); +}; +``` + +In `Brocfile.js`, use your new `Awk` plugin like so: + +``` +var node = new Awk('docs', 'ES6', 'ECMAScript 2015'); + +module.exports = node; +``` + +## Persistent Cache + +__Note: This feature is experimental and is only available on Unix based systems.__ + +Adding persist flag allows a subclass to persist state across restarts. This exists to mitigate the upfront cost of some more expensive transforms on warm boot. __It does not aim to improve incremental build performance, if it does, it should indicate something is wrong with the filter or input filter in question.__ + +### How does it work? + +It does so but establishing a 2 layer file cache. The first layer, is the entire bucket. +The second, `cacheKeyProcessString` is a per file cache key. + +Together, these two layers should provide the right balance of speed and sensibility. + +The bucket level cacheKey must be stable but also never become stale. If the key is not +stable, state between restarts will be lost and performance will suffer. On the flip-side, +if the cacheKey becomes stale changes may not be correctly reflected. + +It is configured by subclassing and refining `cacheKey` method. A good key here, is +likely the name of the plugin, its version and the actual versions of its dependencies. + +```js +Subclass.prototype.cacheKey = function() { + return md5(Filter.prototype.call(this) + inputOptionsChecksum + dependencyVersionChecksum); +} +``` + +The second key, represents the contents of the file. Typically the base-class's functionality +is sufficient, as it merely generates a checksum of the file contents. If for some reason this +is not sufficient, it can be re-configured via subclassing. + +```js +Subclass.prototype.cacheKeyProcessString = function(string, relativePath) { + return superAwesomeDigest(string); +} +``` + +It is recommended that persistent re-builds is opt-in by the consumer as it does not currently work on all systems. + +```js +var myTree = new SomePlugin('lib', { persist: true }); +``` + +## FAQ + +### Upgrading from 0.1.x to 1.x + +You must now call the base class constructor. For example: + +```js +// broccoli-filter 0.1.x: +function MyPlugin(inputTree) { + this.inputTree = inputTree; +} + +// broccoli-filter 1.x: +function MyPlugin(inputNode) { + Filter.call(this, inputNode); +} +``` + +Note that "node" is simply new terminology for "tree". + +### Source Maps + +**Can this help with compilers that are almost 1:1, like a minifier that takes +a `.js` and `.js.map` file and outputs a `.js` and `.js.map` file?** + +Not at the moment. I don't know yet how to implement this and still have the +API look beautiful. We also have to make sure that caching works correctly, as +we have to invalidate if either the `.js` or the `.js.map` file changes. My +plan is to write a source-map-aware uglifier plugin to understand this use +case better, and then extract common code back into this `Filter` base class. diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 0000000..d733e9f --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,35 @@ +# http://www.appveyor.com/docs/appveyor-yml + +# Fix line endings in Windows. (runs before repo cloning) +init: + - git config --global core.autocrlf true + +# Test against these versions of Node.js. +environment: + matrix: + - nodejs_version: "0.12" + +# Install scripts. (runs after repo cloning) +install: + # Get the latest stable version of Node 0.STABLE.latest + - ps: Install-Product node $env:nodejs_version + # Typical npm stuff. + - md C:\nc + - npm install -g npm@latest + # Workaround https://github.com/npm/npm/wiki/Troubleshooting#upgrading-on-windows + - set PATH=%APPDATA%\npm;%PATH% + - npm config set cache C:\nc + - npm version + - npm install + +# Post-install test scripts. +test_script: + # Output useful info for debugging. + - npm version + - cmd: npm test + +# Don't actually build. +build: off + +# Set build version format here instead of in the admin panel. +version: "{build}" diff --git a/index.js b/index.js index d977746..9272032 100644 --- a/index.js +++ b/index.js @@ -1,91 +1,91 @@ -var Filter = require('broccoli-filter'); -var Cache = require('async-disk-cache'); -var md5Hex = require('md5-hex'); +'use strict'; + var fs = require('fs'); -var hashForDep = require('hash-for-dep'); +var path = require('path'); +var mkdirp = require('mkdirp'); +var Promise = require('rsvp').Promise; +var Plugin = require('broccoli-plugin'); +var helpers = require('broccoli-kitchen-sink-helpers'); +var walkSync = require('walk-sync'); +var mapSeries = require('promise-map-series'); +var symlinkOrCopySync = require('symlink-or-copy').sync; +var copyDereferenceSync = require('copy-dereference').sync; +var Cache = require('./lib/cache'); +var debugGenerator = require('debug'); +var keyForFile = require('./lib/key-for-file'); +var PersistentCache = require('async-disk-cache'); +var md5Hex = require('md5-hex'); +var Processor = require('./lib/processor'); +var defaultProccessor = require('./lib/strategies/default'); -module.exports = PersistentFilter; +module.exports = Filter; -/* - * @public - * - * `broccoli-persistent-filter` is broccoli-filter but it is able to persit - * state across restarts. This exists to mitigate the upfront cost of some more - * expensive transforms on warm boot. - * - * Why isn't this the default behaviour? - * - * Deriving the correct cache key for a - * given filter can be tricky. In addition, this should be seen as a last - * resort, if a given filter is too slow often times it should be improved - * rather then opting for caching. - * - * What does this do? - * - * * This does not aim to improve incremental build performance, if it does, it - * should indicate something is wrong with the filter or input filter in - * question. - * - * * This does not improve cold boot times. - * - * How does it work? - * - * It does so but establishing a 2 layer file cache. - * The first layer, is the entire bucket. The second, `cacheKeyProcessString` - * is a per file cache key. - * - * Together, these two layers should provide the right balance of speed and - * sensibility. - * - * The bucket level cacheKey must be stable but also never become stale. If the - * key is not stable, state between restarts will be lost and performance will - * suffer. On the flip-side, if the cacheKey becomes stale changes may not be - * correctly reflected. - * - * It is configured by subclassing and refining `cacheKey` method. A good key - * here, is likely the name of the plugin, its version and the actual versions - * of its dependencies - * - * ```js - * Subclass.prototype.cacheKey = function() { - * return md5(Filter.prototype.call(this) + inputOptionsChecksum + dependencyVersionChecksum); - * } - * ``` - * - * The second key, represents the contents of the file. Typically the - * base-class's functionality is sufficient, as it merely generates a checksum - * of the file contents. If for some reason this is not sufficient, it can be - * re-configured via subclassing. - * - * ```js - * Subbclass.prototype.cacheKeyProcessString = function(string, relativePath) { - * return superAwesomeDigest(string); - * } - * ``` - * - * @class PersistentFilter - * @param {Tree} inputTree - * @param {Object} options - * - * */ -function PersistentFilter(inputTree, options) { - Filter.call(this, inputTree, options); - this.cache = new Cache(this.cacheKey(), { - compression: 'deflate' - }); + +Filter.prototype = Object.create(Plugin.prototype); +Filter.prototype.constructor = Filter; +function Filter(inputTree, options) { + if (!this || !(this instanceof Filter) || + Object.getPrototypeOf(this) === Filter.prototype) { + throw new TypeError('Filter is an abstract class and must be sub-classed'); + } + + var name = 'cauliflower-filter:' + (this.constructor.name); + if (this.description) { + name += ' > [' + this.description + ']'; + } + + this._debug = debugGenerator(name); + + Plugin.call(this, [inputTree]); + + this.processor = new Processor(options); + this.processor.setStrategy(defaultProccessor); + + /* Destructuring assignment in node 0.12.2 would be really handy for this! */ + if (options) { + if (options.extensions != null) + this.extensions = options.extensions; + if (options.targetExtension != null) + this.targetExtension = options.targetExtension; + if (options.inputEncoding != null) + this.inputEncoding = options.inputEncoding; + if (options.outputEncoding != null) + this.outputEncoding = options.outputEncoding; + if (options.persist) { + this.processor.setStrategy(require('./lib/strategies/persistent')); + } + } + + this.processor.init(this); + + this._cache = new Cache(); + this._canProcessCache = Object.create(null); + this._destFilePathCache = Object.create(null); } -PersistentFilter.prototype = Object.create(Filter.prototype); +Filter.prototype.build = function build() { + var self = this; + var srcDir = this.inputPaths[0]; + var destDir = this.outputPath; + var paths = walkSync(srcDir); -/* - * @private - * - * - * @method cachKey - * @return {String} this filters top-level cache key - */ -PersistentFilter.prototype.cacheKey = function() { - return hashForDep(this.baseDir()); + this._cache.deleteExcept(paths).forEach(function(key) { + fs.unlinkSync(this.cachePath + '/' + key); + }, this); + + return mapSeries(paths, function rebuildEntry(relativePath) { + var destPath = destDir + '/' + relativePath; + if (relativePath.slice(-1) === '/') { + mkdirp.sync(destPath); + } else { + if (self.canProcessFile(relativePath)) { + return self.processAndCacheFile(srcDir, destDir, relativePath); + } else { + var srcPath = srcDir + '/' + relativePath; + symlinkOrCopySync(srcPath, destPath); + } + } + }); }; /* @public @@ -93,44 +93,147 @@ PersistentFilter.prototype.cacheKey = function() { * @method baseDir * @returns {String} absolute path to the root of the filter... */ -PersistentFilter.prototype.baseDir = function() { +Filter.prototype.baseDir = function() { throw Error('Filter must implement prototype.baseDir'); }; -/* +/** * @public * - * @method cacheKeyProcessString - * @return {String} this filters top-level cache key + * optionally override this to build a more rhobust cache key + * @param {String} string The contents of a file that is being processed + * @return {String} A cache key */ -PersistentFilter.prototype.cacheKeyProcessString = function(string, relativePath) { +Filter.prototype.cacheKeyProcessString = function(string /*, relativePath*/) { return md5Hex(string); }; +Filter.prototype.canProcessFile = + function canProcessFile(relativePath) { + return !!this.getDestFilePath(relativePath); +}; + +Filter.prototype.getDestFilePath = function getDestFilePath(relativePath) { + if (this.extensions == null) return relativePath; -/* - * @private - * - * @method processFile - * @param {String} srcDir - * @param {String} destDir - * @param {String} relativePath - * @return {Promise} - */ -PersistentFilter.prototype.processFile = function(srcDir, destDir, relativePath) { - var filter = this; - var inputEncoding = (this.inputEncoding === undefined) ? 'utf8' : this.inputEncoding; - var outputEncoding = (this.outputEncoding === undefined) ? 'utf8' : this.outputEncoding; - var string = fs.readFileSync(srcDir + '/' + relativePath, { encoding: inputEncoding }); - var cache = this.cache; - var key = this.cacheKeyProcessString(string, relativePath); - - return cache.get(key).then(function(entry) { - return entry.isCached ? entry.value : filter.processString(string, relativePath); - }).then(function(outputString) { - var outputPath = filter.getDestFilePath(relativePath); - fs.writeFileSync(destDir + '/' + outputPath, outputString, { encoding: outputEncoding }); - - return cache.set(key, outputString); + for (var i = 0, ii = this.extensions.length; i < ii; ++i) { + var ext = this.extensions[i]; + if (relativePath.slice(-ext.length - 1) === '.' + ext) { + if (this.targetExtension != null) { + relativePath = + relativePath.slice(0, -ext.length) + this.targetExtension; + } + return relativePath; + } + } + return null; +}; + +Filter.prototype.processAndCacheFile = + function processAndCacheFile(srcDir, destDir, relativePath) { + var self = this; + var cacheEntry = this._cache.get(relativePath); + + if (cacheEntry) { + var hashResult = hash(srcDir, cacheEntry.inputFile); + + if (cacheEntry.hash.hash === hashResult.hash) { + this._debug('cache hit: %s', relativePath); + + return symlinkOrCopyFromCache(cacheEntry, destDir, relativePath); + } else { + this._debug('cache miss: %s \n - previous: %o \n - next: %o ', relativePath, cacheEntry.hash.key, hashResult.key); + } + + } else { + this._debug('cache prime: %s', relativePath); + } + + return Promise.resolve(). + then(function asyncProcessFile() { + return self.processFile(srcDir, destDir, relativePath); + }). + then(copyToCache, + // TODO(@caitp): error wrapper is for API compat, but is not particularly + // useful. + // istanbul ignore next + function asyncProcessFileErrorWrapper(e) { + if (typeof e !== 'object') e = new Error('' + e); + e.file = relativePath; + e.treeDir = srcDir; + throw e; + }); + + function copyToCache() { + var entry = { + hash: hash(srcDir, relativePath), + inputFile: relativePath, + outputFile: destDir + '/' + self.getDestFilePath(relativePath), + cacheFile: self.cachePath + '/' + relativePath + }; + + if (fs.existsSync(entry.cacheFile)) { + fs.unlinkSync(entry.cacheFile); + } else { + mkdirp.sync(path.dirname(entry.cacheFile)); + } + + copyDereferenceSync(entry.outputFile, entry.cacheFile); + + return self._cache.set(relativePath, entry); + } +}; + +Filter.prototype.processFile = + function processFile(srcDir, destDir, relativePath) { + var self = this; + var inputEncoding = this.inputEncoding; + var outputEncoding = this.outputEncoding; + if (inputEncoding === void 0) inputEncoding = 'utf8'; + if (outputEncoding === void 0) outputEncoding = 'utf8'; + var contents = fs.readFileSync( + srcDir + '/' + relativePath, { encoding: inputEncoding }); + + return this.processor.processString(this, contents, relativePath).then(function asyncOutputFilteredFile(result) { + var outputString = result.string; + var outputPath = self.getDestFilePath(relativePath); + if (outputPath == null) { + throw new Error('canProcessFile("' + relativePath + '") is true, but getDestFilePath("' + relativePath + '") is null'); + } + outputPath = destDir + '/' + outputPath; + mkdirp.sync(path.dirname(outputPath)); + fs.writeFileSync(outputPath, outputString, { + encoding: outputEncoding + }); + + return self.processor.done(self, result); }); }; + +Filter.prototype.processString = + function unimplementedProcessString(contents, relativePath) { + throw new Error( + 'When subclassing cauliflower-filter you must implement the ' + + '`processString()` method.'); +}; + +function hash(src, filePath) { + var path = src + '/' + filePath; + var key = keyForFile(path); + + return { + key: key, + hash: helpers.hashStrings([ + path, + key.size, + key.mode, + key.mtime + ]) + }; +} + +function symlinkOrCopyFromCache(entry, dest, relativePath) { + mkdirp.sync(path.dirname(entry.outputFile)); + + symlinkOrCopySync(entry.cacheFile, dest + '/' + relativePath); +} diff --git a/lib/cache.js b/lib/cache.js new file mode 100644 index 0000000..18f021e --- /dev/null +++ b/lib/cache.js @@ -0,0 +1,34 @@ +module.exports = Cache; + +function Cache() { + this.store = Object.create(null); +} + +Cache.prototype.get = function(key) { + return this.store[key]; +}; + +Cache.prototype.set = function(key, value) { + return this.store[key] = value; +}; + +Cache.prototype.delete = function(key) { + delete this.store[key]; +}; + +Cache.prototype.keys = function() { + return Object.keys(this.store); +}; + +Cache.prototype.keysWithout = function(without) { + return this.keys().filter(function(key) { + return without.indexOf(key) === -1; + }); +}; + +Cache.prototype.deleteExcept = function(without) { + return this.keysWithout(without).map(function(key) { + this.delete(key); + return key; + }, this); +}; diff --git a/lib/key-for-file.js b/lib/key-for-file.js new file mode 100644 index 0000000..4b66217 --- /dev/null +++ b/lib/key-for-file.js @@ -0,0 +1,17 @@ +'use strict'; + +var fs = require('fs'); + +module.exports = function keyForFile(fullPath) { + var stats = fs.statSync(fullPath); + + if (stats.isDirectory()) { + throw new Error('cannot diff directory'); + } + + return { + mode: stats.mode, + mtime: stats.mtime.getTime(), + size: stats.size + }; +}; diff --git a/lib/processor.js b/lib/processor.js new file mode 100644 index 0000000..7a12792 --- /dev/null +++ b/lib/processor.js @@ -0,0 +1,27 @@ +function Processor(options) { + options = options || {}; + this.processor = {}; + this.persistent = options.persist; +} + +Processor.prototype.setStrategy = function(stringProcessor) { + if (this.persistent && /^win/.test(process.platform)) { + console.log('Unfortunately persistent cache is currently not available on windows based systems. Please see https://github.com/stefanpenner/hash-for-dep/issues/8.'); + return; + } + this.processor = stringProcessor; +}; + +Processor.prototype.init = function(ctx) { + this.processor.init(ctx); +}; + +Processor.prototype.processString = function(ctx, contents, relativePath) { + return this.processor.processString(ctx, contents, relativePath); +}; + +Processor.prototype.done = function(ctx, result) { + return this.processor.done(ctx, result); +}; + +module.exports = Processor; diff --git a/lib/strategies/default.js b/lib/strategies/default.js new file mode 100644 index 0000000..9c26a45 --- /dev/null +++ b/lib/strategies/default.js @@ -0,0 +1,13 @@ +var Promise = require('rsvp').Promise; + +module.exports = { + init: function() {}, + + processString: function(ctx, contents, relativePath) { + return Promise.resolve({ string: ctx.processString(contents, relativePath) }); + }, + + done: function(ctx) { + return Promise.resolve(ctx.outputPath); + } +}; diff --git a/lib/strategies/persistent.js b/lib/strategies/persistent.js new file mode 100644 index 0000000..8b2f84f --- /dev/null +++ b/lib/strategies/persistent.js @@ -0,0 +1,49 @@ +'use strict'; + +var md5Hex = require('md5-hex'); +var PersistentCache = require('async-disk-cache'); +var hashForDep = require('hash-for-dep'); + +module.exports = { + + _peristentCache: {}, + + init: function(ctx) { + if (!ctx.constructor.cacheKey) { + ctx.constructor.cacheKey = this.cacheKey(ctx); + } + + this._peristentCache = new PersistentCache(ctx.constructor.cacheKey, { + compression: 'deflate' + }); + }, + + cacheKey: function(ctx) { + return hashForDep(ctx.baseDir()); + }, + + processString: function(ctx, contents, relativePath) { + var key = ctx.cacheKeyProcessString(contents, relativePath); + return this._peristentCache.get(key).then(function(entry) { + var result; + + if (entry.isCached) { + result = { + string: entry.value, + key: key + }; + } else { + result = { + string: ctx.processString(contents, relativePath), + key: key + }; + } + + return result; + }); + }, + + done: function(ctx, result) { + return this._peristentCache.set(result.key, result.string); + } +}; diff --git a/package.json b/package.json index 0a4ff33..6d3371f 100644 --- a/package.json +++ b/package.json @@ -2,28 +2,55 @@ "name": "broccoli-persistent-filter", "version": "0.0.2", "description": "broccoli filter but with a persistent cache", + "author": "Stefan Penner ", "main": "index.js", "scripts": { - "test": "npm test" + "test": "mocha", + "test:debug": "mocha debug", + "test:debug:brk": "mocha --debug-brk", + "test:watch": "mocha --watch", + "test:coverage": "istanbul cover --config=test/istanbul.yml _mocha" }, + "license": "MIT", "repository": { "type": "git", "url": "git+https://github.com/stefanpenner/broccoli-persistent-filter.git" }, - "keywords": [ - "broccoli", - "broccoli-plugin" - ], - "author": "Stefan Penner ", - "license": "ISC", "bugs": { "url": "https://github.com/stefanpenner/broccoli-persistent-filter/issues" }, "homepage": "https://github.com/stefanpenner/broccoli-persistent-filter#readme", + "keywords": [ + "broccoli", + "broccoli-plugin", + "broccoli-helper", + "filter", + "cache" + ], "dependencies": { - "async-disk-cache": "1.0.0", - "broccoli-filter": "^0.1.12", - "hash-for-dep": "^1.0.0", - "md5-hex": "^1.0.2" + "async-disk-cache": "^1.0.0", + "broccoli-kitchen-sink-helpers": "^0.2.7", + "broccoli-plugin": "^1.0.0", + "copy-dereference": "^1.0.0", + "debug": "^2.2.0", + "hash-for-dep": "0.0.3", + "md5-hex": "^1.0.2", + "mkdirp": "^0.5.1", + "promise-map-series": "^0.2.1", + "rsvp": "^3.0.18", + "symlink-or-copy": "^1.0.1", + "walk-sync": "^0.1.3" + }, + "devDependencies": { + "broccoli": "^0.16.3", + "broccoli-test-helpers": "^0.0.8", + "chai": "^3.0.0", + "chai-as-promised": "^5.1.0", + "coveralls": "^2.11.4", + "istanbul": "^0.3.17", + "minimatch": "^2.0.8", + "mocha": "^2.2.5", + "rimraf": "^2.4.2", + "sinon": "^1.15.3" } } diff --git a/test/cache-tests.js b/test/cache-tests.js new file mode 100644 index 0000000..3cf32b5 --- /dev/null +++ b/test/cache-tests.js @@ -0,0 +1,90 @@ +'use strict'; + +var chai = require('chai'); +var expect = chai.expect; +var Cache = require('../lib/cache'); + +describe('Cache', function() { + var cache; + + beforeEach(function(){ + cache = new Cache(); + }); + + it('has basic cache functionaltiy', function() { + expect(cache.get('foo')).to.be.undefined; + expect(cache.set('foo', 1)).to.equal(1); + expect(cache.get('foo')).to.equal(1); + expect(cache.get('bar', 2)).to.be.undefined; + expect(cache.set('bar', 2)).to.equal(2); + expect(cache.get('foo')).to.equal(1); + expect(cache.get('bar')).to.equal(2); + expect(cache.delete('bar')).to.be.undefined; + expect(cache.get('foo')).to.equal(1); + expect(cache.get('bar')).to.be.undefined; + }); + + it('without', function() { + cache.set('foo', 1); + cache.set('bar', 2); + cache.set('baz', 2); + + expect(cache.keysWithout(['foo'])).to.eql([ + 'bar', + 'baz' + ]); + + cache.delete('foo'); + + expect(cache.keysWithout(['foo'])).to.eql([ + 'bar', + 'baz' + ]); + + cache.delete('bar'); + + expect(cache.keysWithout(['foo'])).to.eql([ + 'baz' + ]); + }); + + it('deleteExcept', function() { + cache.set('foo', 1); + cache.set('bar', 2); + cache.set('baz', 2); + + expect(cache.deleteExcept(['foo'])).to.eql([ + 'bar', + 'baz' + ]); + + expect(cache.keys()).to.eql([ + 'foo' + ]); + + cache.set('foo', 1); + cache.set('bar', 2); + cache.set('baz', 2); + + expect(cache.deleteExcept(['apple'])).to.eql([ + 'foo', + 'bar', + 'baz' + ]); + + expect(cache.keys()).to.eql([ + + ]); + + cache.set('foo', 1); + cache.set('bar', 2); + cache.set('baz', 2); + + expect(cache.deleteExcept(['foo', 'bar', 'baz'])).to.eql([]); + expect(cache.keys()).to.eql([ + 'foo', + 'bar', + 'baz' + ]); + }); +}); diff --git a/test/fixtures/dir/a/README.md b/test/fixtures/dir/a/README.md new file mode 100644 index 0000000..a7d962a --- /dev/null +++ b/test/fixtures/dir/a/README.md @@ -0,0 +1 @@ +Nicest cats in need of homes \ No newline at end of file diff --git a/test/fixtures/dir/a/bar/bar.js b/test/fixtures/dir/a/bar/bar.js new file mode 100644 index 0000000..a1b9c0c --- /dev/null +++ b/test/fixtures/dir/a/bar/bar.js @@ -0,0 +1 @@ +Dogs... who needs dogs? \ No newline at end of file diff --git a/test/fixtures/dir/a/foo.js b/test/fixtures/dir/a/foo.js new file mode 100644 index 0000000..9eb4e7a --- /dev/null +++ b/test/fixtures/dir/a/foo.js @@ -0,0 +1 @@ +Nicest dogs in need of homes \ No newline at end of file diff --git a/test/fixtures/file.js b/test/fixtures/file.js new file mode 100644 index 0000000..85fdc35 --- /dev/null +++ b/test/fixtures/file.js @@ -0,0 +1 @@ +a file. diff --git a/test/istanbul.yml b/test/istanbul.yml new file mode 100644 index 0000000..3e95759 --- /dev/null +++ b/test/istanbul.yml @@ -0,0 +1,10 @@ +instrumentation: + root: . + extensions: + - .js + excludes: + - test/test.js +reporting: + print: 'none' + reports: + - lcov diff --git a/test/key-for-file-tests.js b/test/key-for-file-tests.js new file mode 100644 index 0000000..b73933d --- /dev/null +++ b/test/key-for-file-tests.js @@ -0,0 +1,34 @@ +'use strict'; + +var chai = require('chai'); +var expect = chai.expect; +var keyForFile = require('../lib/key-for-file'); + +describe('keyForFile', function () { + describe('when given a path to a directory', function () { + it('throws an error', function () { + expect(function () { + keyForFile('./test/fixtures/dir'); + }).to.throw(/cannot diff directory/i); + }); + }); + + describe('when given an invalid path', function () { + it('throws an error', function () { + expect(function () { + keyForFile('./unlikely/to/be/a/real/path'); + }).to.throw(); + }); + }); + + describe('when given a path to a file', function () { + it('returns the cache key parts in an object literal', function () { + var key = keyForFile('./test/fixtures/file.js'); + expect(Object.keys(key)).to.deep.equal(['mode', 'mtime', 'size']); + expect(key.mode).to.be.a('number'); + expect(key.mtime).to.be.a('number'); + // windows line endings add a byte + expect(key.size).to.be.within(8,9); + }); + }); +}); diff --git a/test/mocha.opts b/test/mocha.opts new file mode 100644 index 0000000..f38b72e --- /dev/null +++ b/test/mocha.opts @@ -0,0 +1,2 @@ +--reporter dot +--ui bdd diff --git a/test/test.js b/test/test.js new file mode 100644 index 0000000..ac8df74 --- /dev/null +++ b/test/test.js @@ -0,0 +1,374 @@ +'use strict'; + +var chai = require('chai'); +var expect = chai.expect; +var chaiAsPromised = require('chai-as-promised'); +chai.use(chaiAsPromised); +var sinon = require('sinon'); +var broccoliTestHelpers = require('broccoli-test-helpers'); +var makeTestHelper = broccoliTestHelpers.makeTestHelper; +var cleanupBuilders = broccoliTestHelpers.cleanupBuilders; + +var inherits = require('util').inherits; +var fs = require('fs'); +var mkdirp = require('mkdirp'); +var path = require('path'); +var Builder = require('broccoli').Builder; +var Filter = require('../index.js'); +var minimatch = require('minimatch'); +var rimraf = require('rimraf').sync; +var walkSync = require('walk-sync'); +var copy = require('copy-dereference').sync; + +var fixturePath = path.join(process.cwd(), 'test', 'fixtures'); + +function ReplaceFilter(inputTree, options) { + if (!this) return new ReplaceFilter(inputTree, options); + options = options || {}; + Filter.call(this, inputTree, options); + this._glob = options.glob; + this._search = options.search; + this._replacement = options.replace; +} + +inherits(ReplaceFilter, Filter); + +ReplaceFilter.prototype.getDestFilePath = function(relativePath) { + if (this._glob === void 0) { + return Filter.prototype.getDestFilePath.call(this, relativePath); + } + return minimatch(relativePath, this._glob) ? relativePath : null; +}; + +ReplaceFilter.prototype.processString = function(contents, relativePath) { + var result = contents.replace(this._search, this._replacement); + return result; +}; + +ReplaceFilter.prototype.baseDir = function() { + return '../'; +}; + +function IncompleteFilter(inputTree, options) { + if (!this) return new IncompleteFilter(inputTree, options); + Filter.call(this, inputTree, options); +} + +inherits(IncompleteFilter, Filter); + +describe('Filter', function() { + function makeBuilder(plugin, dir, prepSubject) { + return makeTestHelper({ + subject: plugin, + fixturePath: dir, + prepSubject: prepSubject + }); + } + + afterEach(function() { + return cleanupBuilders(); + }); + + function read(relativePath, encoding) { + encoding = encoding === void 0 ? 'utf8' : encoding; + return fs.readFileSync(relativePath, encoding); + } + + function write(relativePath, contents, encoding) { + encoding = encoding === void 0 ? 'utf8' : encoding; + mkdirp.sync(path.dirname(relativePath)); + fs.writeFileSync(relativePath, contents, { + encoding: encoding + }); + } + + function remove(relativePath) { + fs.unlinkSync(relativePath); + } + + it('should throw if called as a function', function() { + expect(function() { + return Filter(); + }).to.throw(TypeError, /abstract class and must be sub-classed/); + }); + + + it('should throw if called on object which does not a child class of Filter', + function() { + expect(function() { + return Filter.call({}); + }).to.throw(TypeError, /abstract class and must be sub-classed/); + + expect(function() { + return Filter.call([]); + }).to.throw(TypeError, /abstract class and must be sub-classed/); + + expect(function() { + return Filter.call(global); + }).to.throw(TypeError, /abstract class and must be sub-classed/); + }); + + + it('should throw if base Filter class is new-ed', function() { + expect(function() { + return new Filter(); + }).to.throw(TypeError, /abstract class and must be sub-classed/); + }); + + + it('should throw if `processString` is not implemented', function() { + expect(function() { + new IncompleteFilter('.').processString('foo', 'fake_path'); + }).to.throw(Error, /must implement/); + }); + + + it('should process files with extensions included in `extensions` list by ' + + 'default', function() { + function MyFilter(inputTree, options) { + if (!this) return new MyFilter(inputTree, options); + Filter.call(this, inputTree, options); + } + inherits(MyFilter, Filter); + var filter = MyFilter('.', { extensions: ['c', 'cc', 'js']}); + expect(filter.canProcessFile('foo.c')).to.equal(true); + expect(filter.canProcessFile('test.js')).to.equal(true); + expect(filter.canProcessFile('blob.cc')).to.equal(true); + expect(filter.canProcessFile('twerp.rs')).to.equal(false); + }); + + it('should replace matched extension with targetExtension by default', + function() { + function MyFilter(inputTree, options) { + if (!this) return new MyFilter(inputTree, options); + Filter.call(this, inputTree, options); + } + inherits(MyFilter, Filter); + var filter = MyFilter('.', { + extensions: ['c', 'cc', 'js'], + targetExtension: 'zebra' + }); + expect(filter.getDestFilePath('foo.c')).to.equal('foo.zebra'); + expect(filter.getDestFilePath('test.js')).to.equal('test.zebra'); + expect(filter.getDestFilePath('blob.cc')).to.equal('blob.zebra'); + expect(filter.getDestFilePath('twerp.rs')).to.equal(null); + }); + + + it('should processString only when canProcessFile returns true', + function() { + var builder = makeBuilder(ReplaceFilter, fixturePath, function(awk) { + sinon.spy(awk, 'processString'); + return awk; + }); + + return builder('dir', { + glob: '**/*.md', + search: 'dogs', + replace: 'cats' + }).then(function(results) { + var awk = results.subject; + expect(read(results.directory + '/a/README.md')). + to.equal('Nicest cats in need of homes'); + expect(read(results.directory + '/a/foo.js')). + to.equal('Nicest dogs in need of homes'); + expect(awk.processString.callCount).to.equal(1); + }); + }); + + it('should complain if canProcessFile is true but getDestFilePath is null', + function() { + var builder = makeBuilder(ReplaceFilter, fixturePath, function(awk) { + awk.canProcessFile = function() { + // We cannot return `true` here unless `getDestFilePath` also returns + // a path + return true; + }; + return awk; + }); + + return expect(builder('dir', { + glob: '**/*.md', + search: 'dogs', + replace: 'cats' + })).to.eventually.be.rejectedWith(Error, /getDestFilePath.* is null/); + }); + + it('should purge cache', function() { + + var builder = makeBuilder(ReplaceFilter, fixturePath, function(awk) { + return awk; + }); + var fileForRemoval = path.join(fixturePath, 'dir', 'a', 'README.md'); + + return builder('dir', { + glob: '**/*.md', + search: 'dogs', + replace: 'cats' + }).then(function(results) { + expect(existsSync(fileForRemoval)).to.be.true; + rimraf(fileForRemoval); + + expect(existsSync(fileForRemoval)).to.be.false; + expect(existsSync(results.directory + '/a/README.md')).to.be.true; + + return results.builder(); + }).then(function(results) { + expect(existsSync(results.directory + '/a/README.md'), 'OUTPUT: a/foo.js should NO LONGER be present').to.be.false; + + expect(existsSync(fileForRemoval)).to.be.false; + return results; + }).finally(function() { + fs.writeFileSync(fileForRemoval, 'Nicest cats in need of homes'); + }).then(function(results) { + expect(existsSync(fileForRemoval)).to.be.true; + + return results.builder(); + }).then(function(results) { + expect(existsSync(results.directory + '/a/foo.js'), 'OUTPUT: a/foo.js should be once again present').to.be.true; + }); + }); + + it('replaces stale entries', function() { + var fileForChange = path.join(fixturePath, 'dir', 'a', 'README.md'); + + var builder = makeBuilder(ReplaceFilter, fixturePath, function(awk) { + return awk; + }); + + return builder('dir', { + glob: '**/*.md', + search: 'dogs', + replace: 'cats' + }).then(function(results) { + var awk = results.subject; + + expect(existsSync(fileForChange)).to.be.true; + + fs.writeFileSync(fileForChange, 'such changes'); + + expect(existsSync(fileForChange)).to.be.true; + + return results.builder(); + }).then(function(results) { + expect(existsSync(fileForChange)).to.be.true; + + fs.writeFileSync(fileForChange, 'such changes'); + + expect(existsSync(fileForChange)).to.be.true; + }).then(function() { + fs.writeFileSync(fileForChange, 'Nicest cats in need of homes'); + }); + }); + + + function existsSync(path) { + // node is apparently deprecating this function.. + try { + fs.lstatSync(path); + return true; + } catch(e) { + return false; + } + } + + it('should not overwrite core options if they are not present', function() { + function F(inputTree, options) { Filter.call(this, inputTree, options); } + inherits(F, Filter); + F.prototype.extensions = ['js', 'rs']; + F.prototype.targetExtension = 'glob'; + F.prototype.inputEncoding = 'latin1'; + F.prototype.outputEncoding = 'shift-jis'; + expect(new F('.').extensions).to.eql(['js', 'rs']); + expect(new F('.').targetExtension).to.equal('glob'); + expect(new F('.').inputEncoding).to.equal('latin1'); + expect(new F('.').outputEncoding).to.equal('shift-jis'); + + expect(new F('.', { extensions: ['x'] }).extensions). + to.eql(['x']); + expect(new F('.', { targetExtension: 'c' }).targetExtension). + to.equal('c'); + expect(new F('.', { inputEncoding: 'utf8'} ).inputEncoding). + to.equal('utf8'); + expect(new F('.', { outputEncoding: 'utf8' }).outputEncoding). + to.equal('utf8'); + }); + + if (!/^win/.test(process.platform)) { + describe('persistent cache', function() { + var f; + function F(inputTree, options) { Filter.call(this, inputTree, options); } + inherits(F, Filter); + F.prototype.baseDir = function() { + return '../'; + }; + + beforeEach(function() { + f = new F(fixturePath, { persist: true }); + }); + + it('cache is initialized', function() { + expect(f.processor.processor._peristentCache).to.be.ok; + }); + + it('default `baseDir` implementation throws an Unimplemented Exception', function() { + function F(inputTree, options) { Filter.call(this, inputTree, options); } + inherits(F, Filter); + expect(function() { + new F(fixturePath, { persist: true }); + }).to.throw(/Filter must implement prototype.baseDir/); + }); + + it('`cacheKeyProcessString` return correct first level file cache', function() { + expect(f.cacheKeyProcessString('foo-bar-baz', 'relative-path')).to.eql('4c43793687f9a7170a9149ad391cbf70'); + }); + + it('filter properly reads file tree', function() { + var builder = makeBuilder(ReplaceFilter, fixturePath, function(awk) { + return awk; + }); + + return builder('dir', { + persist: true, + glob: '**/*.md', + search: 'dogs', + replace: 'cats' + }).then(function(results) { + expect(results.files).to.deep.eql([ + 'a/', + 'a/README.md', + 'a/bar/', + 'a/bar/bar.js', + 'a/foo.js' + ]); + }); + }); + }); + } + + describe('processFile', function() { + beforeEach(function() { + sinon.spy(fs, 'mkdirSync'); + }); + + afterEach(function() { + fs.mkdirSync.restore(); + }); + + it('should not effect the current cwd', function() { + var builder = makeBuilder(ReplaceFilter, fixturePath, function(awk) { + sinon.spy(awk, 'canProcessFile'); + return awk; + }); + + return builder('dir', { + glob: '**/*.js', + search: 'dogs', + replace: 'cats' + }).then(function(results) { + expect(fs.mkdirSync.calledWith(path.join(process.cwd(), 'a'), 493)).to.eql(false); + expect(fs.mkdirSync.calledWith(path.join(process.cwd(), 'a', 'bar'), 493)).to.eql(false); + }); + }); + }); +});