diff --git a/.editorconfig b/.editorconfig index 0f099897b..98a4353fa 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,4 +1,12 @@ -# editorconfig.org +# ╔═╗╔╦╗╦╔╦╗╔═╗╦═╗┌─┐┌─┐┌┐┌┌─┐┬┌─┐ +# ║╣ ║║║ ║ ║ ║╠╦╝│ │ ││││├┤ ││ ┬ +# o╚═╝═╩╝╩ ╩ ╚═╝╩╚═└─┘└─┘┘└┘└ ┴└─┘ +# +# This file (`.editorconfig`) exists to help maintain consistent formatting +# throughout this package, the Sails framework, and the Node-Machine project. +# +# To review what each of these options mean, see: +# http://editorconfig.org/ root = true [*] diff --git a/.eslintrc b/.eslintrc index aaadf08ac..9734814f5 100644 --- a/.eslintrc +++ b/.eslintrc @@ -1,15 +1,70 @@ { - "parser": "espree", - "extends": "eslint:recommended", + // ╔═╗╔═╗╦ ╦╔╗╔╔╦╗┬─┐┌─┐ + // ║╣ ╚═╗║ ║║║║ ║ ├┬┘│ + // o╚═╝╚═╝╩═╝╩╝╚╝ ╩ ┴└─└─┘ + // A set of basic conventions (similar to .jshintrc) for use within any + // arbitrary JavaScript / Node.js package -- inside or outside Sails.js. + // For the master copy of this file, see the `.eslintrc` template file in + // the `sails-generate` package (https://www.npmjs.com/package/sails-generate.) + // Designed for ESLint v4. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // For more information about any of the rules below, check out the relevant + // reference page on eslint.org. For example, to get details on "no-sequences", + // you would visit `http://eslint.org/docs/rules/no-sequences`. If you're unsure + // or could use some advice, come by https://sailsjs.com/support. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + "env": { - "node": true, - "mocha": true + "node": true }, + + "parserOptions": { + "ecmaVersion": 5 + // ^^This can be changed to `8` if this package doesn't need to support <= Node v6. + }, + + "globals": { + "Promise": true + // ^^Available since Node v4 + }, + "rules": { - "eqeqeq": [1, "smart"], - "no-multiple-empty-lines": [1, {"max": 2}], - "semi": [1, "always"], - "space-before-function-paren": [1, "never"], - "spaced-comment": [1, "always", {"exceptions": ["/"]}] + "callback-return": ["error", ["done", "proceed", "next", "onwards", "callback", "cb"]], + "camelcase": ["warn", {"properties": "always"}], + "comma-style": ["warn", "last"], + "curly": ["error"], + "eqeqeq": ["error", "always"], + "eol-last": ["warn"], + "handle-callback-err": ["error"], + "indent": ["warn", 2, { + "SwitchCase": 1, + "MemberExpression": "off", + "FunctionDeclaration": {"body":1, "parameters": "off"}, + "FunctionExpression": {"body":1, "parameters": "off"}, + "CallExpression": {"arguments":"off"}, + "ArrayExpression": 1, + "ObjectExpression": 1, + "ignoredNodes": ["ConditionalExpression"] + }], + "linebreak-style": ["error", "unix"], + "no-dupe-keys": ["error"], + "no-duplicate-case": ["error"], + "no-extra-semi": ["warn"], + "no-labels": ["error"], + "no-mixed-spaces-and-tabs": ["error", "smart-tabs"], + "no-redeclare": ["warn"], + "no-return-assign": ["error", "always"], + "no-sequences": ["error"], + "no-trailing-spaces": ["warn"], + "no-undef": ["error"], + "no-unexpected-multiline": ["warn"], + "no-unused-vars": ["warn", {"caughtErrors":"all", "caughtErrorsIgnorePattern": "^unused($|[A-Z].*$)", "argsIgnorePattern": "^unused($|[A-Z].*$)", "varsIgnorePattern": "^unused($|[A-Z].*$)" }], + "no-use-before-define": ["error", {"functions":false}], + "one-var": ["warn", "never"], + "quotes": ["warn", "single", {"avoidEscape":false, "allowTemplateLiterals":true}], + "semi": ["error", "always"], + "semi-spacing": ["warn", {"before":false, "after":true}], + "semi-style": ["warn", "last"] } + } diff --git a/.github/ISSUE_TEMPLATE b/.github/ISSUE_TEMPLATE deleted file mode 100644 index c5fe4b879..000000000 --- a/.github/ISSUE_TEMPLATE +++ /dev/null @@ -1,44 +0,0 @@ - - -**Waterline version**: -**Node version**: -**NPM version**: -**Operating system**: - - -
diff --git a/.gitignore b/.gitignore index 80ddc0652..806b1b073 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,48 @@ +# ┌─┐┬┌┬┐╦╔═╗╔╗╔╔═╗╦═╗╔═╗ +# │ ┬│ │ ║║ ╦║║║║ ║╠╦╝║╣ +# o└─┘┴ ┴ ╩╚═╝╝╚╝╚═╝╩╚═╚═╝ +# +# This file (`.gitignore`) exists to signify to `git` that certain files +# and/or directories should be ignored for the purposes of version control. +# +# This is primarily useful for excluding temporary files of all sorts; stuff +# generated by IDEs, build scripts, automated tests, package managers, or even +# end-users (e.g. file uploads). `.gitignore` files like this also do a nice job +# at keeping sensitive credentials and personal data out of version control systems. +# + +############################ +# sails / node.js / npm +############################ node_modules -*.swp -*.swo -.dist -coverage/ +.tmp npm-debug.log +package-lock.json +.waterline +.node_history + +############################ +# editor & OS files +############################ +*.swo +*.swp +*.swn +*.swm +*.seed +*.log +*.out +*.pid +lib-cov +.DS_STORE +*# +*\# +.\#* +*~ +.idea +.netbeans +nbproject + +############################ +# misc +############################ +dump.rdb diff --git a/.jshintrc b/.jshintrc index 059786a40..5099273dd 100644 --- a/.jshintrc +++ b/.jshintrc @@ -27,6 +27,10 @@ // EVERYTHING ELSE: ////////////////////////////////////////////////////////////////////// + // Allow the use of ES6 features. + // (re ES7, see https://github.com/jshint/jshint/issues/2297) + "esversion": 6, + // Allow the use of `eval` and `new Function()` // (we sometimes actually need to use these things) "evil": true, @@ -84,6 +88,14 @@ // read, albeit a bit less exciting) "laxcomma": false, + // Do NOT allow avant garde use of commas in conditional statements. + // (this prevents accidentally writing code like: + // ``` + // if (!_.contains(['+ci', '-ci', '∆ci', '+ce', '-ce', '∆ce']), change.verb) {...} + // ``` + // See the problem in that code? Neither did we-- that's the problem!) + "nocomma": true, + // Strictly enforce the consistent use of single quotes. // (this is a convention that was established primarily to make it easier // to grep [or FIND+REPLACE in Sublime] particular string literals in diff --git a/.npmignore b/.npmignore index 2f50a614b..7f802e752 100644 --- a/.npmignore +++ b/.npmignore @@ -1,19 +1,34 @@ -*# +.git +./.gitignore +./.jshintrc +./.editorconfig +./.travis.yml +./appveyor.yml +./example +./examples +./test +./tests +./.github + node_modules -ssl +npm-debug.log +.node_history +*.swo +*.swp +*.swn +*.swm +*.seed +*.log +*.out +*.pid +lib-cov .DS_STORE +*# +*\# +.\#* *~ .idea +.netbeans nbproject -test -CONTRIBUTING.md -.git -.gitignore .tmp -*.swo -*.swp -*.swn -*.swm -.jshintrc -.editorconfig -example +dump.rdb diff --git a/.travis.yml b/.travis.yml index cf104e4e6..3c9ff410c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,11 +1,26 @@ +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# ╔╦╗╦═╗╔═╗╦ ╦╦╔═╗ ┬ ┬┌┬┐┬ # +# ║ ╠╦╝╠═╣╚╗╔╝║╚═╗ └┬┘││││ # +# o ╩ ╩╚═╩ ╩ ╚╝ ╩╚═╝o ┴ ┴ ┴┴─┘ # +# # +# This file configures Travis CI. # +# (i.e. how we run the tests... mainly) # +# # +# https://docs.travis-ci.com/user/customizing-the-build # +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # + language: node_js + node_js: - - "0.10" - - "0.12" - - "4" -after_script: - - npm run coverage && cat ./coverage/lcov.info | ./node_modules/.bin/codeclimate -addons: - code_climate: - repo_token: 351483555263cf9bcd2416c58b0e0ae6ca1b32438aa51bbab2c833560fb67cc0 -sudo: false + - "10" + - "12" + - "14" + - "16" + +branches: + only: + - master + +notifications: + email: + - ci@sailsjs.com diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index 83bc9cb47..6fc0a14bc 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -1,5 +1,795 @@ # How Waterline Works + +## High-Level Diagram + > This is a very rough/early pass at an architectural doc, and it only covers a subset of the major components inside of Waterline, but I wanted to include a link to it here in case it was helpful for anyone. > > [How Waterline Works (diagram)](https://docs.google.com/a/balderdashdesign.com/drawings/d/1u7xb5jDY5i2oeVRP2-iOGGVsFbosqTMWh9wfmY3BTfw/edit?usp=sharing) + +#### Computational complexity of various kinds of association mutations + +[Link, tweeze, & splice performance for associations in Waterline ≥0.13 (by # of native queries)](https://twitter.com/mikermcneil/status/792179005348655104) + + +## Overview: Talking to the database + +There are two different approaches for talking to the database using Waterline. + +### Waterline queries + +The first, and simplest, is by building and executing a **Waterline query** -- most commonly by calling a model method to get a chainable deferred object: + +```js +User.find() +.where({ + occupation: 'doctor' +}) +.omit('occupation') +.limit(30) +.skip(90) +.sort('name asc') +.exec(function (err, userRecords){ + +}); +``` + +### Statements + +The second, lower-level approach to talking to your database with Waterline is to build and execute a **statement** -- most commonly by calling a datastore method: + +```js +sails.datastore('mysql').sendStatement({ + select: ['*'], + from: 'inventory', + where: { + type: 'snack' + } +}).exec(function (err, result) { + +}); +``` + +> Statements expect you to use column names, not attribute names. + + + + +## Querying (implementation) + +When you run a query in Waterline, the data structure goes through 5 different stages. + +### Stage 1 query + +> _aka "Query instance" / "deferred object"_ + +Stage 1 queries are Query instances; i.e. the deferred object you get from calling a model method. + +For example: +``` +var q = User.findOne({ + omit: 'occupation', + where: { + occupation: 'doctor' + }, + select: ['name', 'age', 'createdAt'], + skip: 90, + sort: 'name asc' +}).populate('friends', { + where: { + occupation: 'doctor', + age: { '>': 40, '<': 50 } + }, + sort: 'yearsInIndustry desc' +}); +``` + + +### Stage 2 query + +> _aka "logical protostatement"_ + +Under the covers, when you call `.exec()`, Waterline expands the stage 1 query into a dictionary (i.e. plain JavaScript object). + +This is what's known as a "Stage 2 query": + +```js +{ + method: 'findOne', // << the name of the method + using: 'user', // << the identity of the model + + // The criteria dictionary + // (because this is "find"/"findOne", "update", "destroy", "count", "sum", or "avg") + criteria: { + + // The expanded "select" clause + // (note that the only reason this is not `['*']` is because we specified an explicit `select` or `omit` + // It will ALWAYS include the primary key.) + // For no projections, this is `select: ['*']`. And `select` is NEVER allowed to be `[]`.) + select: [ + 'id', + 'name', + 'age', + 'createdAt' + ], + + // The expanded "omit" clause + // (always empty array, unless we provided an `omit`. If `omit` is anything other than [], then `select` must be `['*']` -- and vice versa) + omit: ['occupation'], + + // The expanded "where" clause + where: { + occupation: 'doctor' + }, + + // The "limit" clause (if there is one, otherwise defaults to `Number.MAX_SAFE_INTEGER`) + limit: 9007199254740991, + + // The "skip" clause (if there is one, otherwise defaults to 0) + skip: 90, + + // The expanded "sort" clause + // (an empty array indicates that the adapter's default sort should be used) + sort: [ + { name: 'ASC' } + ] + }, + + // The `populates` clause. + // (if nothing was populated, this would be an empty dictionary.) + populates: { + + // The keys inside of `populates` are either: + // • `true` - if this is a singular ("model") association + // • a subcriteria - if this is a plural ("collection") association a fully-normalized, stage 2 Waterline criteria + // • `false` - special case, only for when this is a plural ("collection") association: when the provided subcriteria would actually be a no-op that will always end up as `[]` + + friends: { + select: [ '*' ], + omit: [], + where: { + and: [ + { occupation: 'doctor' }, + { + and: [ + { age: { '>': 40 } }, + { age: { '<': 50 } } + ] + } + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // > Why don't we coallesce the "and"s above? It's kind of ugly. + // + // Performance trumps prettiness here-- S2Qs are for computers, not humans. + // S1Qs should be pretty, but for S2Qs, the priorities are different. Instead, it's more important + // that they (1) are easy to write parsing code for and (2) don't introduce any meaningful overhead + // when they are built (remember: we're building these on a per-query basis). + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + ] + }, + limit: (Number.MAX_SAFE_INTEGER||9007199254740991), + skip: 0, + sort: [ + { yearsInIndustry: 'DESC' } + ] + } + + } + +} +``` + +##### Side note about populating + +``` +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // > Side note about what to expect under the relevant key in record(s) when you populate vs. don't populate: + // > • When populating a singular ("model") attribute, you'll always get either a dictionary (a child record) or `null` (if no child record matches the fk; e.g. if the fk was old, or if it was `null`) + // > • When populating a plural ("collection") attribute, you'll always get an array of dictionaries (a collection, consisting of child records). Of course, it might be empty. + // > • When NOT populating a singular ("model") attribute, you'll get whatever is stored in the database (there is no guarantee it will be correct-- if you fiddle with your database directly at the physical layer, you could mess it up). Note that we ALWAYS guarantee that the key will be present though, so long as it's not being explicitly excluded by `omit` or `select`. i.e. even if the database says it's not there, the key will exist as `null`. + // > • When NOT populating a plural ("collection") attribute, you'll never get the key. It won't exist on the resulting parent record(s). + // > • If populating a plural ("collection") attribute, and child records w/ duplicate ids exist in the collection (e.g. because of a corrupted physical database), any duplicate child records are stripped out. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +``` + +Also, some more formal terminology: + ++ Ideally, one uses the word "association" when one wants to refer to _both sides_ of the association *at the same time*. It's still possible to understand what it means more generally or when referring to a particular attribute, but it's one of those things that's helpful to be able to get a bit more formal about sometimes. ++ When one needs to be specific, one refers to the attribute defs themselves as "singular attributes" (or more rarely: "model attribute") and "plural attribute" (aka "collection attribute"). ++ one uses "singular" and "plural" to refer to a _particular side_ of the association. So really, in that parlance, an "association" is never wholly singular or plural-- it's just that the attributes on either side are. Similarly, you can't always look at a plural or singular attribute and decide whether it's part 2-way or 1-way association (you don't always have enough information) ++ A 1-way (or "exclusive") association is either a vialess collection attribute, or a singular attribute that is not pointed at by a via on the other side ++ A 2-way (or "shared") association is any collection attribute with `via`, or a singular attribute that _is_ pointed at by a via on the other side ++ A 2-way association that is laid out in such a way that it needs a junction model to fully represent it is called a many-to-many association ++ When referring to a record which might be populated, one calls it a "parent record" (or rarely: "primary record") ++ Finally, when referring to a populated key/value pair within a parent record, one refers to it as one of the following: + + for singular, when not populated: a "foreign key" + + for singular, when populated: a "child record" (aka "foreign record") + + for plural, when populated: a "collection" (aka "foreign collection") + + +### Stage 3 query + +> _aka "physical protostatement"_ + + +Next, Waterline performs a couple of additional transformations: + ++ replaces `method: 'findOne'` with `method: 'find'` (and updates `limit` accordingly) ++ replaces model attribute names with physical database attribute/column names ++ replaces the model identity with the table name ++ removed `populates` (or potentially replaced it with `joins`) + + this varies-- keep in mind that sometimes _multiple physical protostatements will be built up and sent to different adapters_-- or even the same one. + + if `joins` is added, then this would replace `method: 'findOne'` or `method: 'find'` with `method: 'join'`. + +```js +{ + method: 'find', //<< note that "findOne" was replaced with "find" + using: 'users', //<< the table name, it can be different than the model name, as it can be set in the model definition + criteria: { + select: [ + 'id', + 'full_name', // << in this case full_name is the native database attribute/column name + 'age', + 'created_at' + ], + where: { + and: [ + { occupation_key: 'doctor' } + ] + }, + limit: 2, //<< note that this was set to `2` automatically, because of being originally a "findOne" + skip: 90, + sort: [ + { full_name: 'ASC' } + ] + } +} +``` + +This physical protostatement is what gets sent to the database adapter. + + + +> Note that, in some cases, **multiple different physical protostatements** will be built up, and sent to the same or different adapters. + +For example, if Waterline decides that it is a good idea (based on the variety of logical query +this is, which datastores it spans, and the support implemented in adapters), then it will transform +the method to `join`, and provide additional info: + +```js +{ + method: 'join', //<< note that "findOne" was replaced with "join" + using: 'users', //<< the table name + criteria: { + select: [ + 'id', + 'full_name', + 'age', + 'created_at' + ], + where: { + and: [ + { occupation_key: 'doctor' } + ] + }, + limit: 1,//<< note that this was STILL set to `1` automatically + skip: 90, + sort: [ + { full_name: 'ASC' } + ], + + // If `method` is `join`, then join instructions will be included in the criteria: + joins: [ + // The `joins` array can have 1 or 2 dictionaries inside of it for __each__ populated + // attribute in the query. If the query requires the use of a join table then + // the array will have two items for that population. + { + // The identity of the parent model + parentCollectionIdentity: 'users', + // The model tableName of the parent (unless specified all keys are using tableNames) + parent: 'user_table_name', + // An alias to use for the join + parentAlias: 'user_table_name__pets', + // For singular associations, the populated attribute will have a schema (since it represents + // a real column). For plural associations, we'll use the primary key column of the parent table. + parentKey: 'id', + // The identity of the child model (in this case the join table) + childCollectionIdentity: 'pets_owners__users_pets', + // The tableName of the child model + child: 'pets_owners__users_pets', + // An alias to use for the join. It's made up of the parent reference + '__' + the attribute to populate + childAlias: 'pets_owners__users_pets__pets', + // The key on the child model that represents the foreign key value + childKey: 'user_pets', + // The original model alias used + alias: 'pets', + // Determines if the parent key is needed on the record. Will be true for + // singular associations otherwise false. + removeParentKey: false, + // Similar to removeParentKey + model: false, + // Flag determining if multiple records will be returned + collection: true + }, + // In this case the "pets" population requires the use of a join table so + // two joins are needed to get the correct data. This dictionary represents + // the connection between the join table and the child table. + { + // Parent in this case will be the join table + parentCollectionIdentity: 'pets_owners__users_pets', + parent: 'pets_owners__users_pets', + parentAlias: 'pets_owners__users_pets__pets', + parentKey: 'pet_owners', + // Child will be the table that holds the actual record being populated + childCollectionIdentity: 'pets', + child: 'pets', + childAlias: 'pets__pets', + childKey: 'id', + alias: 'pets', + // Flag to show that a join table was used so when joining the records + // take that into account. + junctionTable: true, + removeParentKey: false, + model: false, + collection: true, + // Criteria to use for the child table. + criteria: { + where: {}, + limit: 9007199254740991, + skip: 0, + sort: [{ + id: 'ASC' + }], + select: ['createdAt', 'updatedAt', 'id', 'name'] + } + } + ] + }, +} +``` + + +### Stage 4 query + +> _aka "statement"_ + +**In future releases of Waterline and its core adapters, the concept of a Stage 4 query will likely be removed for performance reasons.** + +In the database adapter, the physical protostatement is converted into an actual _statement_: + +```js +{ + from: 'users', + select: [ + 'id', + 'full_name', + 'age', + 'created_at' + ], + where: { + and: [ + { occupation_key: 'doctor' } + ] + }, + limit: 1, + skip: 90, + sort: [ + { full_name: 'ASC' } + ] +} +``` + +This is the same kind of statement that you can send directly to the lower-level driver. Statements are _much_ closer to native queries (e.g. SQL query or MongoDB native queries). They are still more or less database-agnostic, but less regimented, and completely independent from the database schema. + + +> Not _every_ adapter necessarily uses statements (S4Qs) and native queries (S5Qs). This will likely change in the future though. +> If you're implementing a new adapter for Waterline, take a peek at the latest versions of sails-postgresql or sails-mysql for inspiration. If you need help, [hit us up](https://flagship.sailsjs.com/contact). + + +### Stage 5 query + +> _aka "native query"_ + +In the database driver, the statement is compiled into a native query: + +```js +SELECT id, full_name, age, created_at FROM users WHERE occupation_key="doctor" LIMIT 1 SKIP 90 SORT full_name ASC; +``` + + + +## Example `where` clause iterator + +See https://gist.github.com/mikermcneil/8252ce4b7f15d9e2901003a3a7a800cf for an example of an iterator for a stage 2 query's `where` clause. + + + + + +## Query pipeline (example) + +Here's a quick example that demonstrates how this all fits together. + +It operates under these assumptions: + +1. A person have exactly one mom (also a Person) +2. A person can have many "cats" (Cat), and they can have many "humanFriends" (Person) +3. A person can have many "dogs" (Dog), but every dog has one "owner" (Person) + + + +Given the following stage 1 query: + +```js +// A stage 1 query +var q = Person.find({ + select: ['name', 'age'] +}) +.populate('mom') +.populate('dogs') +.populate('cats', { + where: { name: { startsWith: 'Fluffy' } }, + limit: 50, + sort: 'age DESC', + omit: ['age'] +}); +``` + +It would be forged into the following stage 2 query: + +```js +// A stage 2 query +{ + + method: 'find', + + using: 'person', + + meta: {}, + + criteria: { + where: {}, + limit: 9007199254740991, + skip: 0, + sort: [ { id: 'ASC' } ], //<< implicitly added + select: ['id', 'name', 'age', 'mom'], + //^^ note that it automatically filled in the pk attr, + // as well as the fk attrs for any model associations + // being populated. (if omit was specified instead, + // then it would have been checked to be sure that neither + // the pk attr nor any necessary fk attrs were being explicitly + // omitted. If any were, Waterline would refuse to run the query.) + }, + + populates: { + mom: true, + dogs: { + where: {}, + limit: 9007199254740991, + skip: 0, + sort: [ { id: 'ASC' } ], //<< implicitly added + select: ['*'] + }, + cats: { + where: { + and: [ + { name: { startsWith: 'Fluffy' } } + ] + }, + limit: 50, + skip: 0, + sort: [ { age: 'DESC' } ], + omit: ['age'] + } + } + +} +``` + + +Then, it would then be forged into one or more stage 3 queries, depending on the datastores/adapters at work. For example: + +```js +// A stage 3 query +{ + method: 'find', + using: 'the_person_table', + meta: {}, + criteria: { + where: {}, + limit: 9007199254740991, + skip: 0, + sort: [ { id_colname: 'ASC' } ], + select: ['id_colname', 'name_col_____name', 'age_whatever', 'mom_fk_col_name'] + // If this had been `['*']`, then the `select` clause would have simply been omitted. + }, + // Note that `joins` might sometimes be included here. + // But since this example is xD/A, the `joins` key would not exist. +} +``` + + +```js +// Another stage 3 query (for "cats") +{ + method: 'find', + using: 'the_cat_table', + meta: {}, + criteria: { + where: { + and: [ + { id_colname: { in: [ 39, 844, 2, 3590, 381, 3942, 93, 3959, 1, 492, 449, 224 ] } }, + //^^ injected b/c this is implementing part of an xD/A populate + { name_colname: { startsWith: 'Fluffy' } } + ] + }, + limit: 50, + skip: 0, + sort: [ { age_col_name: 'DESC' } ], + select: ['id_colname', 'name_colname__', '_temperament_colname'], + // Note that even though this was an `omit`, it was expanded. + } +} +``` + + +```js +// Yet another stage 3 query (for "mom") +{ + method: 'find', + using: 'the_person_table', + meta: {}, + criteria: { + where: { + and: [ + { id_colname: { in: [ 2323, 3291, 38, 1399481 ] } } + //^^ injected b/c this is implementing part of an xD/A populate + ] + }, + limit: 9007199254740991, + skip: 0, + sort: [ { id_colname: 'ASC' } ], + select: ['id_colname', 'name_col_____name', 'age_whatever', 'mom_fk_col_name'] + // ^This is always fully expanded, because you can't currently specify a subcriteria for a model association. + } +} +``` + + +_etc._ + + + + +## Validating/normalizing a criteria's `where` clause + +#### If key is `and` or `or`... +Then this is a predicate operator that should have an array on the RHS. + +#### For any other key... + +The key itself must be a valid attr name or column name (depending on if this is a stage 2 or stage 3 query). + +The meaning of the RHS depends on its type: + +=> string, number, boolean, or null + => indicates an equality constraint + +=> array + => indicates shortcut notation for "IN" + => (should be normalized into `{in: ['...']}` automatically -- never allowed if expecting it to already be normalized) + +=> dictionary + => indicates a subattribute modifier + => The type expectation for the dictionary itself varies. + => (but note that `{'!':[...]}` should be normalized into `{nin: ['...']}` automatically -- never allowed if expecting it to already be normalized) + +=> misc + => never allowed + + + + +Examples: +------------------------------------------------------------------------------------- + +{ occupation: 'doctor' }, +{ occupation: 23523 }, +{ occupation: null }, +{ occupation: true }, +{ occupation: false }, +{ occupation: false }, + +{ occupation: { not: 'doctor' } }, +{ occupation: { not: 23523 } }, +{ occupation: { not: null } }, +{ occupation: { not: true } }, +{ occupation: { not: false } }, + +{ occupation: { in: ['doctor', 'nurse'] } }, +{ occupation: { in: [true, false, 283523, null] } }, + +{ occupation: { nin: ['doctor', 'nurse'] } }, +{ occupation: { nin: [true, false, 283523, null] } }, + +{ occupation: { contains: 'asdf' } }, +{ occupation: { like: 'asdf' } }, +{ occupation: { startsWith: 'asdf' } }, +{ occupation: { endsWith: 'asdf' } }, + + + + + + + + + +## Glossary + +Quick reference for what various things inside of any given query are called. (Some of these terms are formal and specific, and shouldn't come up in everyday use for most people contributing to Waterline. Still, it's important to have names for things when discussing the finer details.) + +> These notes are for the stage 2 and stage 3 queries-- but they are mostly applicable to stage 1 queries and stage 4 queries as well. Just note that stage 1 queries tend to be much more tolerant in general, whereas stage 4 queries are more strict. Also realize that the details of what is supported in criteria varies slightly between stages. +> +> + For more specific (albeit slightly older and potentially out of date) docs on criteria in stage 4 queries, see https://github.com/treelinehq/waterline-query-docs/blob/99a51109a8cfe5b705f40b987d4d933852a4af4c/docs/criteria.md +> + For more specific (albeit slightly older and potentially out of date) docs on criteria in stage 1 queries, see https://github.com/balderdashy/waterline-criteria/blob/26f2d0e25ff88e5e1d49e55116988322339aad10/lib/validators/validate-sort-clause.js and https://github.com/balderdashy/waterline-criteria/blob/26f2d0e25ff88e5e1d49e55116988322339aad10/lib/validators/validate-where-clause.js + + +| Word/Phrase | Meaning | +|:-----------------------|:------------------------------------------------------------------------------| +| query key | A top-level key in the query itself; e.g. `criteria`, `populates`, `newRecords`, etc. There are a specific set of permitted query keys (attempting to use any extra keys will cause errors! But note that instead of attaching ad hoc query keys, you can use `meta` for custom stuff.) +| `using` | The `using` query key is a vocative that indicates which model is being "spoken to" by the query. +| clause | A top-level key in the `criteria`. There are a specific set of permitted clauses in criterias. Which clauses are allowed depends on what stage of query this is (for example, stage 3 queries don't permit the use of `omit`, but stage 2 queries _do_) +| `sort` clause | When fully-normalized, this is an array of >=1 dictionaries called comparator directives. +| comparator directive | An item within the array of a fully normalized `sort` clause. Should always be a dictionary with exactly one key (known as the _comparator target_), which is usually the name of an attribute (or column name, if this is a stage 3 query). The RHS value for the key in a comparator directive must always be either 'ASC' or 'DESC'. +| `where` clause | The `where` clause of a fully normalized criteria always has one key at the top level: either (1) a predicate ("and"/"or") whose RHS is an array consisting of zero or more conjuncts or disjuncts, or (2) a single constraint (see below) +| conjunct | A dictionary within an `and` array. When fully normalized, always consists of exactly one key-- an attribute name (or column name), whose RHS is either (A) a nested predicate operator or (B) a filter. +| disjunct | A dictionary within an `or` array whose contents work exactly like those of a conjunct (see above). +| scruple | Another, more general name for a dictionary which could be a conjunct, disjunct, or the very top level of the `where` clause. A scruple could contain either a _constraint_ or a _predicate_. (This terminology is particularly useful when talking about a stage 1 query, since not everything will have been normalized yet.) +| predicate | A _predicate scruple_ (usually simply called a _predicate_) is a lone key/value pair whose LHS is a _predicate operator_ (either "and" or "or") and whose RHS is a _predicate set_. +| predicate operator | The LHS of a predicate scruple ("and" or "or") is called a _predicate operator_. (Sometimes also informally known as a _predicate key_.) +| predicate operands | The RHS of a predicate scruple is an array of _predicate operands_. Its items are scruples called either "conjuncts" or "disjuncts", depending on whether the predicate operator is an "and" or an "or", respectively. +| constraint | A _constraint scruple_ (usually simply called a _constraint_) is a key/value pair that represents how values for a piece of data will be qualified. Once normalized, the RHS of a constraint is always either a primitive (making it an _equivalency constraint_) or a dictionary consisting of exactly one key/value pair called a "modifier" aka "sub-attribute modifier" (making the constraint a _complex constraint_). In certain special cases, (in stage 1 queries only!) multiple different modifiers can be combined together within a complex constraint (e.g. combining `>` and `<` to indicate a range of values). In stage 2 queries, these have already been normalized out (using `and`). +| constraint target | The LHS of a constraint is called the _constraint target_. Usually, this is the name of a particular attribute in the target model (or column in the target table, if this is stage 3). +| constraint modifier | A _complex constraint modifier_ (or simply a _modifier_) is a key/value pair within a complex constraint, where the key is one of a special list of legal operators such as `nin`, `in`, `contains`, `!`, `>=`, etc. A modifier impacts how values for a particular attribute name (or column name) will be qualified. The data type for a particular modifier depends on the modifier. For example, a modifier for key `in` or `nin` must be an array, but a modifier for key `contains` must be either a string or number. + + +```javascript +// Example: Look up records whose name contains "Ricky", as well as being prefixed or suffixed +// with some sort of formal-sounding title. +where: { + and: [ + { name: {contains: 'Ricky'} }, + { + or: [ + { name: {endsWith: 'Esq.'} }, + { name: {endsWith: 'Jr.'} }, + { name: {endsWith: 'Sr.'} }, + { name: {endsWith: 'II'} }, + { name: {endsWith: 'III'} }, + { name: {endsWith: 'IV'} }, + { name: {startsWith: 'Dr.'} } + { name: {startsWith: 'Miss'} } + { name: {startsWith: 'Ms.'} } + { name: {startsWith: 'Mrs.'} } + { name: {startsWith: 'Mr.'} }, + { name: {startsWith: 'Rvd.'} } + ] + } + ] +} +``` + + + +### Example of iterating over a `where` clause from the criteria of a stage 2 query + +See https://gist.github.com/mikermcneil/8252ce4b7f15d9e2901003a3a7a800cf. + + + +## Associations + +### Broad classifications of associations: + ++ singular (association which declares `model`) ++ plural (association which declares `collection`) + +*There is also a distinction between one-way and two-way associations:* + +"Two-way" just means that there's another "side" to the association-- i.e. that, if you change the association, the expected results when you populate the other side of the association change-- _automatically_ (and in some cases, they actually change at the physical layer when you make the original change). "One-way" means that there is no other side. If you change a one-way association, no other associations are affected. + +There are three different kinds of two-way associations, and two different kinds of one-way associations. Here they are: + +### The various kinds of two-way associations: + ++ plural, two-way, *exclusive* (plural association whose `via` is pointing at a singular association on the other side) ++ singular, two-way (singular association who is pointed at on the other side by a plural association w/ `via`) ++ plural, two-way, *shared* (plural association whose `via` is pointing at a plural association on the other side with a matching `via`) + +### The various kinds of one-way associations: + ++ singular, one-way (singular association who is NOT pointed at by any `via`) ++ plural, one-way (plural association without a `via` of its own, and which is NOT pointed at by `via` on the other side) + + + + + +## Adapters & auto-migrations + +Auto-migrations are now handled outside of Waterline core. + +Notes for adapter maintainers who implement `define` et al: + + + +##### Reserved column types + +When interpeting `autoMigrations.columnType`, there are a few special reserved column types to be aware of, that should always be handled: + + `_numberkey` _(e.g. you might understand this as "INTEGER")_ + + `_stringkey` _(e.g. you might understand this as "VARCHAR(255)")_ + + `_numbertimestamp` _(e.g. you might understand this as "BIGINTEGER" -- this is for JS timestamps (epoch ms))_ + + `_stringtimestamp` _(e.g. you might understand this as "VARCHAR(14)")_ + + `_string` _(e.g. you might understand this as "TEXT")_ + + `_number` _(e.g. you might understand this as "DOUBLE")_ + + `_boolean` _(e.g. you might understand this as "TINYINT")_ + + `_json` _(e.g. you might understand this as "TEXT" in MySQL, or "JSON" in PostgreSQL)_ + + `_ref` _(non-JSON-structured data that may or may not be serializable in adapter-specific ways; e.g. you might understand this as "TEXT".)_ + +These (^^) are the different core Waterline logical data types, but prefixed by underscore (e.g. `_string`) AS WELL AS two special reserved column types (`_numberkey` and `_stringkey`). These two additional column types are used for primary key and foreign key (singular association) values. Note that foreign key values could also be null. + +##### Unrecognized column types + +If `autoMigrations.columnType` for a given attribute is unrecognized for your database, then fail with an error. + + + + +## Special cases / FAQ + +##### _What is an "exclusive" association?_ + +It just means a plural association with the special restriction that no two records can have the same associated child records in it. + +> This is vs. a "shared" association, which is what we call any plural association that is non-exclusive, as per this definition. + +##### _What about *through* associations?_ + +A *through* association is a subgenre of plural, two-way, shared associations, where you actually can set up the junction model as one of the models in your app-level code. + + +##### _What about *reflexive* associations?_ + +A **reflexive** association is just any association where the associated model is the same as the parent model. + + +##### _What about if you have a plural association with `via` pointed at another plural association, but there is no via on the other side?_ + +That's an error (i.e. in waterline-schema). + + + + +## Required vs allowNull vs. defaultsTo vs. autoCreatedAt vs. autoUpdatedAt + +Though relatively simple from the perspective of userland, this gets a bit complicated internally in Waterline. + +For details, see https://docs.google.com/spreadsheets/d/1whV739iW6O9SxRZLCIe2lpvuAUqm-ie7j7tn_Pjir3s/edit#gid=1814738146 + + + + +## Errors + +| Error `name` | Meaning | +|:------------------------|:---------------------------------------------------------------| +| UsageError | Bad usage, caught by Waterline core | +| AdapterError | Something went wrong in the adapter (e.g. uniqueness constraint violation) | +| PropagationError | A conflict was detected while making additional, internal calls to other model methods within Waterline core (e.g. `replaceCollection()` could not update a required null foreign key, or a conflict was encountered while performing "cascade" polyfill for a `.destroy()`) | +| _anything else_ | Something unexpected happened | + + + + + + diff --git a/CHANGELOG.md b/CHANGELOG.md index 232c7d0f2..34d0054e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,40 +1,101 @@ # Waterline Changelog -### 0.11.3 +### Edge + +##### General +* [BUGFIX] Fix .archive() and .archiveOne() when using custom column names (#1616) +* [BREAKING] Waterline attribute names must now be [ECMAScript 5.1-compatible variable names](https://github.com/mikermcneil/machinepack-javascript/blob/3786c05388cf49220a6d3b6dbbc1d80312d247ec/machines/validate-varname.js#L41). + + Custom column names can still be configured to anything, as long as it is supported by the underlying database. +* [BREAKING] Breaking changes to criteria usage: + + For performance, criteria passed in to Waterline's model methods will now be mutated in-place in most situations (whereas in Sails/Waterline v0.12, this was not necessarily the case.) + + Aggregation clauses (`sum`, `average`, `min`, `max`, and `groupBy`) are no longer supported in criteria. Instead, see new model methods. + + `limit: 0` **no longer does the same thing as `limit: undefined`**. Instead of matching ∞ results, it now matches 0 results. + + `skip: -20` **no longer does the same thing as `skip: undefined`**. Instead of skipping zero results, it now refuses to run with an error. + + Limit must be < Number.MAX_SAFE_INTEGER (...with one exception: for compatibility/convenience, `Infinity` is tolerated and normalized to `Number.MAX_SAFE_INTEGER` automatically.) + + Skip must be < Number.MAX_SAFE_INTEGER + + Criteria dictionaries with a mixed `where` clause are no longer supported. + + e.g. instead of `{ username: 'santaclaus', limit: 4, select: ['beardLength', 'lat', 'long']}`, + + use `{ where: { username: 'santaclaus' }, limit: 4, select: ['beardLength', 'lat', 'long'] }`. + + Note that you can still do `{ username: 'santaclaus' }` as shorthand for `{ where: { username: 'santaclaus' } }` -- it's just that you can't mix other top-level criteria clauses (like `limit`) alongside constraints (e.g. `username`). + + And as for anywhere you're building criteria using Waterline's chainable deferred object, then don't worry about this-- it's taken care of for you. +* [DEPRECATE] Deprecated criteria usage: + + Avoid specifying a limit of < 0. It is still ignored, and acts like `limit: undefined`, but it now logs a deprecation warning to the console. +* [BREAKING] With the major exception of `.populate()`, repeated use of any other one chainable query method like `.sort()`, `.where()`, `.set()`, `.meta()`, etc is no longer supported. For example, you should not do: `User.find().where({username: 'santaclaus'}).where({location: 'north pole'})` +* [BREAKING] Coercion of result records + + Resulting records from calling model methods are no longer special instances-- they are just dictionaries (plain JavaScript objects) + + There are now warning messages for some common problematic results from the adapter. This is designed to make it easier to catch schema migration issues, as well as to identify adapter bugs. + + +##### Automigrations +* [BREAKING] Automigrations now live outside of Waterline core (in waterline-util) + + Remove `index` for automigrations + + In core SQL adapters, `.create()` and `.createEach()` no longer deals with updating the current autoincrement sequence (the "next value to use") when a record with a greater value is explicitly created + +##### Data types +* The data types in Waterline have changed to more closely reflect their purpose: validation and coercion of JavaScript values. This drastically reduced the number of types to just 5: string, number, boolean, json, and ref. +* To allow for flexibility in automigrations, attributes may also specify a new key, `columnType`. If specified, the `columnType` is sent to the appropriate adapter during automigration (in sails-hook-orm). This allows Sails/Waterline models to indicate how the values for individual attributes should be stored _at rest_ vs. how they are validated/coerced when your code calls `.create()` or `.update()`. +* All documented previously-supported types are checked for and adjusted if possible (in sails-hook-orm), but if you are using a custom type, you may need to choose an appropriate `type` and `columnType`. +* `defaultsTo` can no longer be specified as a function. In practice, this can lead to unintended consequences, and its implementation was adding considerable weight and complexity to Waterline (without a whole lot of tangible benefit). +* Optional attributes with no value specified are no longer necessarily stored as `null`. If they are set to `type: 'json'` or `type: 'ref'`, and there is no `defaultsTo`, then `null` is stored. But, if an attribute declares itself as `type: 'string'`, then when a record is created without specifying a value for that attribute, it is stored as `''` (empty string). Similarly, `type: 'number'` is stored as `0`, and `type: 'boolean'` as `false`. To represent an attribute which might be `null` or a string, use `type: 'json'` (combining it with the new `isString` validation rule, if you like). + +##### Model methods ++ Revamped [.stream()](http://sailsjs.com/documentation/reference/waterline-orm/models/stream) + + Simplify interface and remove reliance on emitters in favor of [adapter-agnostic batch processing](https://gitter.im/balderdashy/sails?at=58655edd9d4cc4fc53553d51). + + Add support for `.populate()` + + Now supports batch-at-a-time or record-at-a-time iteration. + + +##### `required` & `allowNull` + +* [BREAKING] Standardizing the definition of `required` + + If an attribute specifies itself as `required`, it means that a value for the attribute must be _defined_ when using Waterline to do a `.create()`. + + For example, if `foo` is a required attribute, then passing in `foo: undefined` or omitting `foo` on a `.create()` would fail the required check. + + In addition, trying to .create() OR .update() the value as either `''` (empty string) or `null` would fail the required check. ++ If an attribute specifies itself as `type: 'string'`, then if a value for that attr is explicitly provided as `null` in a `.create()` or `.update()`, it will **no longer be allowed through**-- regardless of the attribute's `required` status. ++ Other types (json and ref) allow `null` out of the box. To support a string attribute which might be `null`, you'll want to set the attribute to `type: 'json'`. If you want to prevent numbers, booleans, arrays, and dictionaries, then you'll also want to add the `isString: true` validation rule. ++ For more information and a reference of edge cases, see https://docs.google.com/spreadsheets/d/1whV739iW6O9SxRZLCIe2lpvuAUqm-ie7j7tn_Pjir3s/edit#gid=1927470769 + + +### 0.12.2 + +* [BUGFIX] Fix issues with compatibility in alter auto-migrations. This was causing corrupted data depending on the permutation of adapter version and Waterline version. This should be fixed in the SQL adapters that support the new select query modifier. -* [BUG] Fix auto-updating attributes to take into account custom column names. See [#1360](https://github.com/balderdashy/waterline/pull/1360) for more details. Thanks to [@jenjenut233](https://github.com/jenjenut233) for the patch! Also fixes https://github.com/balderdashy/sails/issues/3821. +* [ENHANCEMENT] Updated dependencies to remove warning messages when installing. -### 0.11.2 +### 0.12.1 -* [BUG] Fix #1326 +* [BUGFIX] Fixes an issue when searching by `id` in schemaless mode. See [#1326](https://github.com/balderdashy/waterline/issues/1326) for more details. -* [BUG] Fix issues with compatibility in `alter` auto-migrations. This was causing corrupted data depending on the permutation of adapter version and Waterline version. This should be fixed in the SQL adapters that support the new `select` query modifier. +### 0.12.0 -* [ENHANCEMENT] Updated dependencies to remove warning messages when installing. +* [ENHANCEMENT] Allows attribute definitions to contain a `meta` property that will be passed down to the adapter. This allows arbitrary information about an attribute to be passed down to interactions on the physical storage engine. Going forward any adapter specific migration information should be sent via the `meta` property. See [#1306](https://github.com/balderdashy/waterline/pull/1306) for more information. -### 0.12.1 +* [ENHANCEMENT] Allows for the use of `.select()` to build out projections in both top level queries and association queries. See [#1310](https://github.com/balderdashy/waterline/pull/1310) for more details and examples. -* [BUG] Fixes an issue when searching by `id` in schemaless mode. See [#1326](https://github.com/balderdashy/waterline/issues/1326) for more details. +* [ENHANCEMENT] Allow for the ability to pass in extra data to an adapter function using the `.meta()` option. This could be used for a variety of things inside custom adapters such as passing connections around for transactions or passing config values for muti-tenant functionality. For more details see [#1325](https://github.com/balderdashy/waterline/pull/1325). -### 0.12.0 +### 0.11.6 -* [Enhancement] Allows attribute definitions to contain a `meta` property that will be passed down to the adapter. This allows arbitrary information about an attribute to be passed down to interactions on the physical storage engine. Going forward any adapter specific migration information should be sent via the `meta` property. See [#1306](https://github.com/balderdashy/waterline/pull/1306) for more information. +* [BUGFIX] Remove max engines SVR re #1406. Also normalize 'bugs' URL, and chang… … [d89d2a6](https://github.com/balderdashy/waterline/commit/d89d2a6) +* [INTERNAL] Add latest Node versions, and add 0.11.x branch to CI whitelist. [ca0814e](https://github.com/balderdashy/waterline/commit/ca0814e) +* [INTERNAL] Add appveyor.yml for running tests on Windows. [c88cfa7](https://github.com/balderdashy/waterline/commit/c88cfa7) -* [Enhancement] Allows for the use of `.select()` to build out projections in both top level queries and association queries. See [#1310](https://github.com/balderdashy/waterline/pull/1310) for more details and examples. +### 0.11.5 -* [Enhancement] Allow for the ability to pass in extra data to an adapter function using the `.meta()` option. This could be used for a variety of things inside custom adapters such as passing connections around for transactions or passing config values for muti-tenant functionality. For more details see [#1325](https://github.com/balderdashy/waterline/pull/1325). +* [BUGFIX] Fix join table mapping for 2-way collection assocations (i.e. "many to many"), specifically in the case when a `through` model is being used, and custom column names are configured. Originally identified in [this StackOverflow question](http://stackoverflow.com/questions/37774857/sailsjs-through-association-how-to-create-association) (Thanks [@ultrasaurus](https://github.com/ultrasaurus)!) [8b46f0f](https://github.com/balderdashy/waterline/commit/8b46f0f), [1f4ff37](https://github.com/balderdashy/waterline/commit/1f4ff37) +* [BUGFIX] Make `.add()` idempotent in 2-way collection associations -- i.e. don't error out if the join record already exists. Fixes [#3784](https://github.com/balderdashy/sails/issues/3784 (Thanks [@linxiaowu66](https://github.com/linxiaowu66)!) [a14d16a](https://github.com/balderdashy/waterline/commit/a14d16a),[5b0ea8b](https://github.com/balderdashy/waterline/commit/5b0ea8b) -### 0.11.3 +### 0.11.4 -* [BUG] Fix auto-updating attributes to take into account custom column names. See [#1360](https://github.com/balderdashy/waterline/pull/1360) for more details. Thanks to [@jenjenut233](https://github.com/jenjenut233) for the patch! +* [BUGFIX] Fix auto-updating attributes to take into account custom column names. See [#1360](https://github.com/balderdashy/waterline/pull/1360) for more details. Thanks to [@jenjenut233](https://github.com/jenjenut233) for the patch! Also fixes https://github.com/balderdashy/sails/issues/3821. ### 0.11.2 -* [BUG] Fixes an issue when searching by `id` in schemaless mode. See [#1326](https://github.com/balderdashy/waterline/issues/1326) for more details. +* [BUGFIX] Fixes an issue when searching by `id` in schemaless mode. See [#1326](https://github.com/balderdashy/waterline/issues/1326) for more details. ### 0.11.1 -* [Enhancement] Handles fatal errors in validations better and returns clearer error messages for them. Who knew crashing the process would be bad? Thanks [@mikermcneil](https://github.com/mikermcneil) +* [ENHANCEMENT] Handles fatal errors in validations better and returns clearer error messages for them. Who knew crashing the process would be bad? Thanks [@mikermcneil](https://github.com/mikermcneil) ### 0.11.0 @@ -42,7 +103,7 @@ * [ENHANCEMENT] Errors coming from `.save()` now return actual Error objects that have been extended from `WLError`. -* [BUG] Fixes issue with dynamic finders not understanding custom `columnName` attributes. See [#1298](https://github.com/balderdashy/waterline/pull/1298) for more details. Thanks [@HaKr](https://github.com/HaKr) for the detailed test case. +* [BUGFIX] Fixes issue with dynamic finders not understanding custom `columnName` attributes. See [#1298](https://github.com/balderdashy/waterline/pull/1298) for more details. Thanks [@HaKr](https://github.com/HaKr) for the detailed test case. * [ENHANCEMENT] Auto timestamps column names are now overridable. See[#946](https://github.com/balderdashy/waterline/pull/946) for more details. Thanks [@Esya](https://github.com/Esya) for the patch. @@ -50,16 +111,16 @@ * [ENHANCEMENT] Ensures that createdAt and updatedAt are always the exact same on `create`. See [#1201](https://github.com/balderdashy/waterline/pull/1201) for more details. Thanks [@ziacik](https://github.com/ziacik) for the patch. -* [BUG] Fixed issue with booleans not being cast correctly for validations. See [#1225](https://github.com/balderdashy/waterline/pull/1225) for more details. Thanks [@edupsousa](https://github.com/edupsousa) for the patch. +* [BUGFIX] Fixed issue with booleans not being cast correctly for validations. See [#1225](https://github.com/balderdashy/waterline/pull/1225) for more details. Thanks [@edupsousa](https://github.com/edupsousa) for the patch. -* [BUG] Fixed bug where dates as primary keys would fail serialization. See [#1269](https://github.com/balderdashy/waterline/pull/1269) for more details. Thanks [@elennaro](https://github.com/elennaro) for the patch. +* [BUGFIX] Fixed bug where dates as primary keys would fail serialization. See [#1269](https://github.com/balderdashy/waterline/pull/1269) for more details. Thanks [@elennaro](https://github.com/elennaro) for the patch. -* [BUG] Update support and patch some bugs in Many-To-Many through associations. See [#1134](https://github.com/balderdashy/waterline/pull/1134) for more details. Thanks [@atiertant](https://github.com/atiertant) for the patch. +* [BUGFIX] Update support and patch some bugs in Many-To-Many through associations. See [#1134](https://github.com/balderdashy/waterline/pull/1134) for more details. Thanks [@atiertant](https://github.com/atiertant) for the patch. ### 0.10.30 -* [BUG] Fix issue with maximum callstack when using dates as foreign keys. See [#1265](https://github.com/balderdashy/waterline/issues/1265) for more details. Thanks [@elennaro](https://github.com/elennaro) for the patch. +* [BUGFIX] Fix issue with maximum callstack when using dates as foreign keys. See [#1265](https://github.com/balderdashy/waterline/issues/1265) for more details. Thanks [@elennaro](https://github.com/elennaro) for the patch. ### 0.10.29 @@ -67,7 +128,7 @@ ### 0.10.28 -* [BUG] Fix issue with `through` table joins. See [#1134](https://github.com/balderdashy/waterline/pull/1134) for more details. Thanks [@atiertant](https://github.com/atiertant) for the patch! +* [BUGFIX] Fix issue with `through` table joins. See [#1134](https://github.com/balderdashy/waterline/pull/1134) for more details. Thanks [@atiertant](https://github.com/atiertant) for the patch! * [ENHANCEMENT] Bump version of [Waterline-Schema](https://github.com/balderdashy/waterline-schema) to the latest. @@ -75,8 +136,18 @@ ### 0.10.27 -* [BUG] Fix issue with invalid `in` criteria removing more data than it should. See [#1076](https://github.com/balderdashy/waterline/pull/1076) for more details. Thanks [@slester](https://github.com/slester) for the patch! +* [BUGFIX] Fix issue with invalid `in` criteria removing more data than it should. See [#1076](https://github.com/balderdashy/waterline/pull/1076) for more details. Thanks [@slester](https://github.com/slester) for the patch! ### 0.10.26 -* [BUG] Fix issue with `defaultsTo` not setting values for undefined values. +* [BUGFIX] Fix issue with `defaultsTo` not setting values for undefined values. + +### 0.10.25 and earlier? + +See https://github.com/balderdashy/waterline/commits/f5efc0349fe9594a962357287bb6c25acdda9a76. + +> #### Earlier still? +> +> For the first year or so, Waterline lived in the main Sails repo. See https://github.com/balderdashy/sails/commits/master?after=q8Jnoggc%2F%2B7O7021adjRanuRhssrNDM3NA%3D%3D and back. + + diff --git a/LICENSE.md b/LICENSE.md index e777ca300..76ba06957 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1,7 +1,7 @@ The MIT License (MIT) -- -Copyright © 2012-2016 Balderdash Design Co. +Copyright © 2012-present Mike McNeil & The Sails Company Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: diff --git a/Makefile b/Makefile deleted file mode 100644 index 67847ff20..000000000 --- a/Makefile +++ /dev/null @@ -1,24 +0,0 @@ -ROOT=$(shell pwd) - -test: test-unit test-integration - -test-unit: - @echo "\nRunning unit tests..." - @NODE_ENV=test mocha test/integration test/structure test/support test/unit --recursive - -test-integration: - @echo "\nRunning integration tests..." - rm -rf node_modules/waterline-adapter-tests/node_modules/waterline; - ln -s "$(ROOT)" node_modules/waterline-adapter-tests/node_modules/waterline; - @NODE_ENV=test node test/adapter/runner.js - -coverage: - @echo "\n\nRunning coverage report..." - rm -rf coverage - @NODE_ENV=test ./node_modules/istanbul/lib/cli.js cover --report none --dir coverage/core ./node_modules/.bin/_mocha \ - test/integration test/structure test/support test/unit -- --recursive - ./node_modules/istanbul/lib/cli.js cover --report none --dir coverage/adapter test/adapter/runner.js - ./node_modules/istanbul/lib/cli.js report - - -.PHONY: coverage diff --git a/README.md b/README.md index f313330ce..80b37caa7 100644 --- a/README.md +++ b/README.md @@ -1,97 +1,71 @@ -# [Waterline logo](https://github.com/balderdashy/waterline) +# [Waterline logo](http://waterlinejs.org) -[![Build Status](https://travis-ci.org/balderdashy/waterline.svg?branch=master)](https://travis-ci.org/balderdashy/waterline) -[![NPM version](https://badge.fury.io/js/waterline.svg)](http://badge.fury.io/js/waterline) -[![Dependency Status](https://gemnasium.com/balderdashy/waterline.svg)](https://gemnasium.com/balderdashy/waterline) -[![Test Coverage](https://codeclimate.com/github/balderdashy/waterline/badges/coverage.svg)](https://codeclimate.com/github/balderdashy/waterline) -[![StackOverflow](https://img.shields.io/badge/stackoverflow-waterline-blue.svg)]( http://stackoverflow.com/questions/tagged/waterline) +[![Master Branch Build Status](https://travis-ci.org/balderdashy/waterline.svg?branch=master)](https://travis-ci.org/balderdashy/waterline) +[![Master Branch Build Status (Windows)](https://ci.appveyor.com/api/projects/status/tdu70ax32iymvyq3?svg=true)](https://ci.appveyor.com/project/mikermcneil/waterline) +[![StackOverflow (waterline)](https://img.shields.io/badge/stackoverflow-waterline-blue.svg)]( http://stackoverflow.com/questions/tagged/waterline) +[![StackOverflow (sails)](https://img.shields.io/badge/stackoverflow-sails.js-blue.svg)]( http://stackoverflow.com/questions/tagged/sails.js) -Waterline is a brand new kind of storage and retrieval engine. +Waterline is a next-generation storage and retrieval engine, and the default ORM used in the [Sails framework](https://sailsjs.com). -It provides a uniform API for accessing stuff from different kinds of databases, protocols, and 3rd party APIs. That means you write the same code to get and store things like users, whether they live in Redis, MySQL, MongoDB, or Postgres. +It provides a uniform API for accessing stuff from different kinds of [databases and protocols](https://sailsjs.com/documentation/concepts/extending-sails/adapters/available-adapters). That means you write the same code to get and store things like users, whether they live in MySQL, MongoDB, neDB, or Postgres. Waterline strives to inherit the best parts of ORMs like ActiveRecord, Hibernate, and Mongoose, but with a fresh perspective and emphasis on modularity, testability, and consistency across adapters. -For detailed documentation, go to [Waterline Documentation](https://github.com/balderdashy/waterline-docs) repository. +## No more callbacks -## Installation +Starting with v0.13, Waterline takes full advantage of ECMAScript & Node 8's `await` keyword. -Install from NPM. +**In other words, [no more callbacks](https://gist.github.com/mikermcneil/c1028d000cc0cc8bce995a2a82b29245).** -```bash -$ npm install waterline +```js +var newOrg = await Organization.create({ + slug: 'foo' +}) +.fetch(); ``` -## Overview - -Waterline uses the concept of an Adapter to translate a predefined set of methods into a query that can be understood by your data store. Adapters allow you to use various datastores such as MySQL, PostgreSQL, MongoDB, Redis, etc. and have a clear API for working with your model data. +> Looking for the version of Waterline used in Sails v0.12? See the [0.11.x branch](https://github.com/balderdashy/waterline/tree/0.11.x) of this repo. If you're upgrading to v0.13 from a previous release of Waterline _standalone_, take a look at the [upgrading guide](http://sailsjs.com/documentation/upgrading/to-v-1-0). -It also allows an adapter to define it's own methods that don't necessarily fit into the CRUD methods defined by default in Waterline. If an adapter defines a custom method, Waterline will simply pass the function arguments down to the adapter. - -You may also supply an array of adapters and Waterline will map out the methods so they are both mixed in. It works similar to Underscore's [Extend](http://underscorejs.org/#extend) method where the last item in the array will override any methods in adapters before it. This allows you to mixin both the traditional CRUD adapters such as MySQL with specialized adapters such as Twilio and have both types of methods available. - -#### Community Adapters +## Installation +Install from NPM. - - [PostgreSQL](https://github.com/balderdashy/sails-postgresql) - *0.9+ compatible* - - [MySQL](https://github.com/balderdashy/sails-mysql) - *0.9+ compatible* - - [MongoDB](https://github.com/balderdashy/sails-mongo) - *0.9+ compatible* - - [Memory](https://github.com/balderdashy/sails-memory) - *0.9+ compatible* - - [Disk](https://github.com/balderdashy/sails-disk) - *0.9+ compatible* - - [Microsoft SQL Server](https://github.com/cnect/sails-sqlserver) - - [Redis](https://github.com/balderdashy/sails-redis) - - [Riak](https://github.com/balderdashy/sails-riak) - - [IRC](https://github.com/balderdashy/sails-irc) - - [Twitter](https://github.com/balderdashy/sails-twitter) - - [JSDom](https://github.com/mikermcneil/sails-jsdom) - - [Neo4j](https://github.com/natgeo/sails-neo4j) - - [OrientDB](https://github.com/appscot/sails-orientdb) - - [ArangoDB](https://github.com/rosmo/sails-arangodb) - - [Apache Cassandra](https://github.com/dtoubelis/sails-cassandra) - - [GraphQL](https://github.com/wistityhq/waterline-graphql) - - [Solr](https://github.com/sajov/sails-solr) - - [Apache Derby](https://github.com/dash-/node-sails-derby) +```bash + $ npm install waterline +``` +## Overview +Waterline uses the concept of an adapter to translate a predefined set of methods into a query that can be understood by your data store. Adapters allow you to use various datastores such as MySQL, PostgreSQL, MongoDB, Redis, etc. and have a clear API for working with your model data. -## Support -Need help or have a question? -- [StackOverflow](http://stackoverflow.com/questions/tagged/waterline) -- [Gitter Chat Room](https://gitter.im/balderdashy/sails) +Waterline supports [a wide variety of adapters](http://sailsjs.com/documentation/concepts/extending-sails/adapters/available-adapters), both core and community maintained. +## Usage -## Issue Submission -Please read the [issue submission guidelines](https://github.com/balderdashy/sails/blob/master/CONTRIBUTING.md#opening-issues) before opening a new issue. +The up-to-date documentation for Waterline is maintained on the [Sails framework website](http://sailsjs.com). +You can find detailed API reference docs under [Reference > Waterline ORM](http://sailsjs.com/documentation/reference/waterline-orm). For conceptual info (including Waterline standalone usage), and answers to common questions, see [Concepts > Models & ORM](https://sailsjs.com/documentation/concepts/models-and-orm). -Waterline and Sails are composed of a [number of different sub-projects](https://github.com/balderdashy/sails/blob/master/MODULES.md), many of which have their own dedicated repository. If you suspect an issue in one of these sub-modules, you can find its repo on the [organization](https://github.com/balderdashy) page, or in [MODULES.md](https://github.com/balderdashy/sails/blob/master/MODULES.md). Click [here](https://github.com/balderdashy/waterline/search?q=&type=Issues) to search/post issues in this repository. +#### Help +Check out the recommended [community support options](http://sailsjs.com/support) for tutorials and other resources. If you have a specific question, or just need to clarify [how something works](https://docs.google.com/drawings/d/1u7xb5jDY5i2oeVRP2-iOGGVsFbosqTMWh9wfmY3BTfw/edit), ask [for help](https://gitter.im/balderdashy/sails) or reach out to the [core team](http://sailsjs.com/about) [directly](http://sailsjs.com/flagship). -## Feature Requests -If you have an idea for a new feature, please feel free to submit it as a pull request to the backlog section of the [ROADMAP.md](https://github.com/balderdashy/waterline/blob/master/ROADMAP.md) file in this repository. +You can keep up to date with security patches, the Waterline release schedule, new database adapters, and events in your area by following us ([@sailsjs](https://twitter.com/sailsjs)) on Twitter. +## Bugs   [![NPM version](https://badge.fury.io/js/waterline.svg)](http://npmjs.com/package/waterline) +To report a bug, [click here](http://sailsjs.com/bugs). ## Contribute -Please carefully read our [contribution guide](https://github.com/balderdashy/sails/blob/master/CONTRIBUTING.md) before submitting a pull request with code changes. - - -## Tests +Please observe the guidelines and conventions laid out in our [contribution guide](http://sailsjs.com/documentation/contributing) when opening issues or submitting pull requests. +#### Tests All tests are written with [mocha](https://mochajs.org/) and should be run with [npm](https://www.npmjs.com/): ``` bash $ npm test ``` -## Coverage - -To generate the code coverage report, run: - -``` bash - $ npm run coverage -``` -And have a look at `coverage/lcov-report/index.html`. ## License +[MIT](http://sailsjs.com/license). Copyright © 2012-present Mike McNeil & The Sails Company -[MIT License](http://sails.mit-license.org/) Copyright © 2012-2016 Balderdash Design Co. - +[Waterline](http://waterlinejs.org), like the rest of the [Sails framework](https://sailsjs.com), is free and open-source under the [MIT License](https://sailsjs.com/license). -![image_squidhome@2x.png](http://sailsjs.org/images/bkgd_squiddy.png) +![image_squidhome@2x.png](http://sailsjs.com/images/bkgd_squiddy.png) diff --git a/ROADMAP.md b/ROADMAP.md index 100f598f8..cacc6424f 100644 --- a/ROADMAP.md +++ b/ROADMAP.md @@ -8,7 +8,7 @@ This file contains the development roadmap for the upcoming release of Waterline ## v0.13 -This section includes the main features, enhancements, and other improvements tentatively planned or already implemented for the v0.11 release of Waterline. Note that this is by no means a comprehensive changelog or release plan and may exclude important additions, bug fixes, and documentation tasks; it is just a reference point. Please also realize that the following notes may be slightly out of date-- until the release is finalized, API changes, deprecation announcements, additions, etc. are all tentative. +This section includes the main features, enhancements, and other improvements tentatively planned or already implemented for the v0.13 release of Waterline. Note that this is by no means a comprehensive changelog or release plan and may exclude important additions, bug fixes, and documentation tasks; it is just a reference point. Please also realize that the following notes may be slightly out of date-- until the release is finalized, API changes, deprecation announcements, additions, etc. are all tentative. + Pull out auto-migrations into https://github.com/balderdashy/sails-hook-orm + Remove the 2nd argument to the .exec() callback from `.update()`. @@ -78,3 +78,30 @@ Feature | Summary Do not mess with identity case | Identities of models should not be lowercased per default, better be left as defined. See [issue](https://github.com/balderdashy/waterline/issues/745) for more details. Support JSONB in PostgreSQL | Add support for JSONB querying in the Postgres adapter. This requires modifing/extending the criteria language. See [issue](https://github.com/balderdashy/sails-postgresql/issues/212) for more details. Deep populate | [#1052](https://github.com/balderdashy/waterline/pull/1052) | Recursively populate child associations. + + + + +  +  + + +## Experimental features + +Below, you'll find a handful of experimental features. If you're interested in them, please try them out and provide [feedback](http://twitter.com/sailsjs)! It helps the core team and other open-source contributors from the community prioritize our efforts, and it lets us know what works and what doesn't. (As always, we welcome your [contributions](http://sailsjs.com/contribute)!) + +> Please be aware that these are in the early stages and should not be relied upon +> as production features of Waterline. They could change at any time-- even on a patch +release! **You have been warned!** + +#### Experimental lifecycle and accessor methods + +```js +var Waterline = require('waterline'); +``` + ++ `Waterline.start(opts, function(err, orm) { /*...*/ })` ++ `Waterline.stop(orm, function(err) { /*...*/ })` ++ `Waterline.getModel(modelIdentity, orm)` + +> For detailed usage, see the source code (bottom of `lib/waterline.js` in this repo.) diff --git a/accessible/allowed-validations.js b/accessible/allowed-validations.js new file mode 100644 index 000000000..058fdd109 --- /dev/null +++ b/accessible/allowed-validations.js @@ -0,0 +1 @@ +module.exports = require('anchor/accessible/rules'); diff --git a/accessible/valid-attribute-properties.js b/accessible/valid-attribute-properties.js new file mode 100644 index 000000000..5c38dbdab --- /dev/null +++ b/accessible/valid-attribute-properties.js @@ -0,0 +1 @@ +module.exports = require('waterline-schema/accessible/valid-attribute-properties'); diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 000000000..81fa58bfd --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,44 @@ +# # # # # # # # # # # # # # # # # # # # # # # # # # +# ╔═╗╔═╗╔═╗╦ ╦╔═╗╦ ╦╔═╗╦═╗ ┬ ┬┌┬┐┬ # +# ╠═╣╠═╝╠═╝╚╗╔╝║╣ ╚╦╝║ ║╠╦╝ └┬┘││││ # +# ╩ ╩╩ ╩ ╚╝ ╚═╝ ╩ ╚═╝╩╚═o ┴ ┴ ┴┴─┘ # +# # +# This file configures Appveyor CI. # +# (i.e. how we run the tests on Windows) # +# # +# https://www.appveyor.com/docs/lang/nodejs-iojs/ # +# # # # # # # # # # # # # # # # # # # # # # # # # # + + +# Test against these versions of Node.js. +environment: + matrix: + - nodejs_version: "6" + - nodejs_version: "8" + - nodejs_version: "10" + +# Install scripts. (runs after repo cloning) +install: + # Get the latest stable version of Node.js + # (Not sure what this is for, it's just in Appveyor's example.) + - ps: Install-Product node $env:nodejs_version + # Install declared dependencies + - npm install + + +# Post-install test scripts. +test_script: + # Output Node and NPM version info. + # (Presumably just in case Appveyor decides to try any funny business? + # But seriously, always good to audit this kind of stuff for debugging.) + - node --version + - npm --version + # Run the actual tests. + - npm run custom-tests + + +# Don't actually build. +# (Not sure what this is for, it's just in Appveyor's example. +# I'm not sure what we're not building... but I'm OK with not +# building it. I guess.) +build: off diff --git a/example/express/express-example.js b/example/express/express-example.js index 92561606d..6aa1f1af5 100644 --- a/example/express/express-example.js +++ b/example/express/express-example.js @@ -1,159 +1,261 @@ /** - * A simple example of how to use Waterline v0.10 with Express + * Module dependencies */ -var express = require('express'), - _ = require('lodash'), - app = express(), - Waterline = require('waterline'), - bodyParser = require('body-parser'), - methodOverride = require('method-override'); +var express = require('express'); +var bodyParser = require('body-parser'); +var DiskAdapter = require('sails-disk'); +var MySQLAdapter = require('sails-mysql'); +// - - - - - - - - - - - - - - - - - - - - - - - - - - - +var Waterline = require('../../'); +// ^^ or if running this example outside of this repo, +// require the following instead: +// ``` +// var Waterline = require('waterline'); +// ``` +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - -// Instantiate a new instance of the ORM -var orm = new Waterline(); +/** + * A simple example of how to use Waterline v0.13 with Express 4. + * + * Before running this example, be sure and do: + * ``` + * npm install express body-parser waterline sails-disk + * ``` + */ ////////////////////////////////////////////////////////////////// -// WATERLINE CONFIG +// WATERLINE SETUP ////////////////////////////////////////////////////////////////// -// Require any waterline compatible adapters here -var diskAdapter = require('sails-disk'), - mysqlAdapter = require('sails-mysql'); - - -// Build A Config Object -var config = { +// Instantiate a new instance of the ORM +Waterline.start({ - // Setup Adapters - // Creates named adapters that have been required adapters: { - 'default': diskAdapter, - disk: diskAdapter, - mysql: mysqlAdapter + 'sails-disk': DiskAdapter, + 'sails-mysql': MySQLAdapter, + // ...other Waterline-compatible adapters (e.g. 'sails-mysql') might go here }, - // Build Connections Config - // Setup connections using the named adapter configs - connections: { - myLocalDisk: { - adapter: 'disk' + datastores: { + default: { + adapter: 'sails-disk', + // ...any misc. special config might go here }, + customerDb: { + adapter: 'sails-mysql', + url: 'localhost/foobar', + // ...any misc. special config might go here + }, + // ...any other datastores go here + }, - myLocalMySql: { - adapter: 'mysql', - host: 'localhost', - database: 'foobar' + models: { + user: { + attributes: { + emailAddress: { type: 'string', required: true }, + firstName: { type: 'string' }, + lastName: { type: 'string' }, + numChickens: { type: 'number' }, + pets: { collection: 'pet' } + } + }, + pet: { + datastore: 'customerDb', + attributes: { + name: { type: 'string', required: true }, + breed: { + type: 'string', + validations: { + isIn: ['chihuahua', 'great dane', 'collie', 'unknown'] + }, + defaultsTo: 'unknown' + } + } } + // ...any other model defs go here }, - defaults: { - migrate: 'alter' + defaultModelSettings: { + primaryKey: 'id', + datastore: 'default', + attributes: { + id: { type: 'number', autoMigrations: { autoIncrement: true } }, + }, + // ...any other orm-wide default settings for all models go here } -}; - - -////////////////////////////////////////////////////////////////// -// WATERLINE MODELS -////////////////////////////////////////////////////////////////// - -var User = Waterline.Collection.extend({ - - identity: 'user', - connection: 'myLocalDisk', - - attributes: { - first_name: 'string', - last_name: 'string' +}, function(err, orm){ + if(err) { + console.error('Could not start up the ORM:\n',err); + return process.exit(1); } -}); -var Pet = Waterline.Collection.extend({ - identity: 'pet', - connection: 'myLocalMySql', + // ORM is now running! - attributes: { - name: 'string', - breed: 'string' - } -}); -// Load the Models into the ORM -orm.loadCollection(User); -orm.loadCollection(Pet); + ////////////////////////////////////////////////////////////////// + // EXPRESS SETUP + ////////////////////////////////////////////////////////////////// + // Setup simple Express application. + var app = express(); + app.use(bodyParser.urlencoded({ extended: false })); + app.use(bodyParser.json()); -////////////////////////////////////////////////////////////////// -// EXPRESS SETUP -////////////////////////////////////////////////////////////////// + // Bind Express Routes (CRUD routes for /users) + // Find all users + app.get('/users', function(req, res) { + Waterline.getModel('user', orm) + .find().exec(function(err, records) { + if(err) { + switch (err.name) { + case 'UsageError': + return res.sendStatus(400); + default: + console.error('Unexpected error occurred:',err.stack); + return res.sendStatus(500); + } + }//-• -// Setup Express Application -app.use(bodyParser.urlencoded({ extended: false })); -app.use(bodyParser.json()); -app.use(methodOverride()); + return res.json(records); + }); + }); -// Build Express Routes (CRUD routes for /users) -app.get('/users', function(req, res) { - app.models.user.find().exec(function(err, models) { - if(err) return res.json({ err: err }, 500); - res.json(models); + // Find one user + app.get('/users/:id', function(req, res) { + Waterline.getModel('user', orm) + .findOne({ id: req.params.id }, function(err, record) { + if(err && err.name === 'UsageError') { + return res.sendStatus(400); + } + else if (err && err.name === 'AdapterError' && err.code === 'E_UNIQUE') { + return res.status(401).json(err); + } + else if (err) { + console.error('Unexpected error occurred:',err.stack); + return res.sendStatus(500); + } + else { + return res.json(record); + } + }); }); -}); -app.post('/users', function(req, res) { - app.models.user.create(req.body, function(err, model) { - if(err) return res.json({ err: err }, 500); - res.json(model); - }); -}); -app.get('/users/:id', function(req, res) { - app.models.user.findOne({ id: req.params.id }, function(err, model) { - if(err) return res.json({ err: err }, 500); - res.json(model); - }); -}); -app.delete('/users/:id', function(req, res) { - app.models.user.destroy({ id: req.params.id }, function(err) { - if(err) return res.json({ err: err }, 500); - res.json({ status: 'ok' }); + // Create a user + // (This one uses promises, just for fun.) + app.post('/users', function(req, res) { + Waterline.getModel('user', orm) + .create(req.body) + .meta({fetch:true}) + .catch({name:'UsageError'}, function (err) { + console.log('Refusing to perform impossible/confusing query. Details:',err); + return res.sendStatus(400); + }) + .catch({name:'AdapterError', code:'E_UNIQUE'}, function (err) { + console.log('Refusing to create duplicate user. Details:',err); + return res.status(401).json(err); + }) + .catch(function (err) { + console.error('Unexpected error occurred:',err.stack); + return res.sendStatus(500); + }) + .then(function (newRecord){ + return res.status(201).json(newRecord); + }); }); -}); -app.put('/users/:id', function(req, res) { - // Don't pass ID to update - delete req.body.id; - - app.models.user.update({ id: req.params.id }, req.body, function(err, model) { - if(err) return res.json({ err: err }, 500); - res.json(model); + // Destroy a user (if it exists) + app.delete('/users/:id', function(req, res) { + Waterline.getModel('user', orm) + .destroy({ id: req.params.id }, function(err) { + if(err && err.name === 'UsageError') { + return res.sendStatus(400); + } + else if (err) { + console.error('Unexpected error occurred:',err.stack); + return res.sendStatus(500); + } + else { + return res.sendStatus(200); + } + }); }); -}); + // Update a user + app.put('/users/:id', function(req, res) { + + // Don't pass ID to update + // > (We don't want to try to change the primary key this way, at least not + // > for this example. It's totally possible to do that, of course... just + // > kind of weird.) + var valuesToSet = req.body; + delete valuesToSet.id; + + // In this example, we'll send back a JSON representation of the newly-updated + // user record, just for kicks. + Waterline.getModel('user', orm) + .update({ id: req.params.id }) + .set(valuesToSet) + .meta({fetch:true}) + .exec(function(err, updatedUsers) { + if(err && err.name === 'UsageError') { + return res.sendStatus(400); + } + else if (err && err.name === 'AdapterError' && err.code === 'E_UNIQUE') { + return res.status(401).json(err); + } + else if (err) { + console.error('Unexpected error occurred:',err.stack); + return res.sendStatus(500); + } + else if (updatedUsers.length < 1) { + return res.sendStatus(404); + } + else { + return res.status(200).json(updatedUsers[0]); + } + }); + }); -////////////////////////////////////////////////////////////////// -// START WATERLINE -////////////////////////////////////////////////////////////////// -// Start Waterline passing adapters in -orm.initialize(config, function(err, models) { - if(err) throw err; + // Lift Express server and start listening to requests + app.listen(3000, function (err){ + if (err) { + console.error('Failed to lift express server:', err); + console.error('(Attempting to shut down ORM...)'); + Waterline.stop(orm, function(err){ + if (err) { + console.error('Unexpected failure when attempting to shut down ORM! Details:', err); + return process.exit(1); + } + + console.error('ORM was shut down successfully.'); + return process.exit(1); + });//_∏_ + return; + }//-• + + console.log('Express server is running and ORM is started!'); + console.log('To see saved users, visit http://localhost:3000/users'); + console.log('Press CTRL+C to terminate process.'); + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // NOTE: Sails takes care of all this kind of stuff automatically, but if you're using + // vanilla express, it would be a good idea to bind SIGINT/SIGTERM listeners here and have + // them shut down the ORM if fired. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - app.models = models.collections; - app.connections = models.connections; + }); - // Start Server - app.listen(3000); - - console.log("To see saved users, visit http://localhost:3000/users"); }); diff --git a/example/raw/bootstrap.js b/example/raw/bootstrap.js deleted file mode 100644 index a1768417f..000000000 --- a/example/raw/bootstrap.js +++ /dev/null @@ -1,71 +0,0 @@ -/** - * Module dependencies - */ - -var _ = require('lodash') - , Waterline = require('../../lib/waterline'); - - -/** - * Set up Waterline with the specified - * models, connections, and adapters. - - @param options - :: {Dictionary} adapters - :: {Dictionary} connections - :: {Dictionary} collections - - @param {Function} cb - () {Error} err - () ontology - :: {Dictionary} collections - :: {Dictionary} connections - - @return {Waterline} - (You probably wont want to use this. Instead, write code in the callback and use the `ontology` that comes back.) - */ - -module.exports = function bootstrap( options, cb ) { - - var adapters = options.adapters || {}; - var connections = options.connections || {}; - var collections = options.collections || {}; - - - - _(adapters).each(function (def, identity) { - // Make sure our adapter defs have `identity` properties - def.identity = def.identity || identity; - }); - - - var extendedCollections = []; - _(collections).each(function (def, identity) { - - // Make sure our collection defs have `identity` properties - def.identity = def.identity || identity; - - // Fold object of collection definitions into an array - // of extended Waterline collections. - extendedCollections.push(Waterline.Collection.extend(def)); - }); - - - // Instantiate Waterline and load the already-extended - // Waterline collections. - var waterline = new Waterline(); - extendedCollections.forEach(function (collection) { - waterline.loadCollection(collection); - }); - - - // Initialize Waterline - // (and tell it about our adapters) - waterline.initialize({ - adapters: adapters, - connections: connections - }, cb); - - return waterline; -}; - diff --git a/example/raw/raw-example.js b/example/raw/raw-example.js index 4f4bbcaa3..6216373e0 100644 --- a/example/raw/raw-example.js +++ b/example/raw/raw-example.js @@ -1,39 +1,208 @@ +#!/usr/bin/env node + /** * Module dependencies */ -var setupWaterline = require('./bootstrap'); - - +var util = require('util'); +var _ = require('@sailshq/lodash'); +var SailsDiskAdapter = require('sails-disk'); +var Waterline = require('../../'); /** - * Do stuff. + * `raw-example.js` + * + * This is an example demonstrating how to use Waterline + * from a vanilla Node.js script. + * + * + * To run this example, do: + * ``` + * node example/raw/raw-example + * ``` */ -setupWaterline({ - adapters: { - 'sails-disk': require('sails-disk') - }, - collections: { - user: { - connection: 'tmp', - attributes: {} - } - }, - connections: { - tmp: { - adapter: 'sails-disk' + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// +// NOTE: The `machine-as-script` package, like Sails, takes care of all this kind of +// stuff automatically, including bootstrapping the ORM in the context of a Sails app. +// (For deets, see https://npmjs.com/package/machine-as-script) +// +// But since we're doing this vanilla-style, we'll kick things off by calling a self-invoking +// function here. This just lets us avoid repeating ourselves and gives us a level of control +// over logging. See the two callbacks below in order to better understand how it works. +// +// > To read more general tips about managing flow and exposing customizable logic via +// > self-invoking functions in Node.js apps/scripts, check out: +// > https://www.npmjs.com/package/parley#flow-control +// +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +(function (handleLog, done){ + + // Set up Waterline. + Waterline.start({ + + + adapters: { + + 'sails-disk': SailsDiskAdapter, + // ...other Waterline-compatible adapters (e.g. 'sails-mysql') might go here + + }, + + + datastores: { + + default: { + adapter: 'sails-disk' + } + + }, + + + models: { + + user: { + datastore: 'default', + + attributes: { + id: { type: 'number', autoMigrations: { autoIncrement: true } }, + numChickens: { type: 'number' }, + pets: { collection: 'pet' } + }, + primaryKey: 'id', + schema: true + }, + + pet: { + datastore: 'default', + + attributes: { + id: { type: 'number', autoMigrations: { autoIncrement: true } }, + name: { type: 'string' } + }, + primaryKey: 'id', + schema: true + } + } - } -}, function waterlineReady (err, ontology) { - if (err) throw err; - // Our collections (i.e. models): - ontology.collections; - // Our connections (i.e. databases): - ontology.connections; + }, function whenWaterlineIsReady (err, orm) { + if (err) { + return done(new Error('Could not start up Waterline ORM: '+err.stack)); + }//--• + + + // Now kick off another self-invoking function. + // (Once again, this is just to avoid repeating ourselves.) + (function (proceed){ + + handleLog(); + handleLog(); + handleLog('--'); + handleLog('Waterline ORM is started and ready.'); + + // Get access to models: + var Pet = Waterline.getModel('pet', orm); + var User = Waterline.getModel('user', orm); + + handleLog(); + handleLog('(this is where you could write come code)'); + // ...for example, like this: + + handleLog( + '\n'+ + '\n'+ + '==========================================================================\n'+ + '• EXAMPLE: Calling some model methods: •\n'+ + '==========================================================================\n' + ); + + + var PET_NAMES = ['Carrie', 'Samantha', 'Charlotte', 'Miranda', 'Mr. Big']; + Pet.createEach([ + { name: _.random(PET_NAMES) }, + { name: _.random(PET_NAMES) } + ]) + .meta({fetch: true}) + .exec(function (err, pets) { + if (err) { return proceed(new Error('Failed to create new pets: '+err.stack)); } + + User.create({ + numChickens: pets.length, + pets: _.pluck(pets, 'id') + }) + .exec(function (err) { + if (err) { return proceed(new Error('Failed to create new user: '+err.stack)); } + + User.stream() + .populate('pets') + .eachRecord(function eachRecord(user, next){ + handleLog('Streamed record:',util.inspect(user,{depth: null})); + return next(); + }) + .exec(function afterwards(err) { + if (err) { return proceed(new Error('Unexpected error occurred while streaming users:',err.stack)); } -}); + return proceed(); + });// + + });// + });// + + })(function (err){ + if (err) { + Waterline.stop(orm, function(secondaryErr) { + if (secondaryErr) { + handleLog(); + handleLog('An error occurred, and then, when trying to shut down the ORM gracefully, THAT failed too!'); + handleLog('More on the original error in just a while.'); + handleLog('But first, here\'s the secondary error that was encountered while trying to shut down the ORM:\n', secondaryErr); + handleLog('... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... '); + return done(err); + }//-• + + return done(err); + + });//_∏_ + return; + }//-• + + // IWMIH, everything went well. + handleLog(); + handleLog('Done. (Stopping ORM...)'); + handleLog('... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... '); + Waterline.stop(orm, function(secondaryErr) { + if (secondaryErr) { + return done(new Error('Everything else went fine, but then when attempting to shut down the ORM gracefully, something went wrong! Details:'+secondaryErr.stack)); + } + return done(); + }); + + });// + + });// + +})( + function handleLog(){ console.log.apply(console, Array.prototype.slice.call(arguments)); }, + function whenFinishedAndORMHasBeenStopped(err){ + if (err) { + console.log(); + console.log(err.stack); + console.log(); + console.log(' ✘ Something went wrong.'); + console.log(' (see stack trace above)'); + console.log(); + return process.exit(1); + }//-• + + console.log(); + console.log(' ✔ OK.'); + console.log(); + return process.exit(0); + } +);// diff --git a/lib/waterline.js b/lib/waterline.js index 24d11d680..4b704c32b 100644 --- a/lib/waterline.js +++ b/lib/waterline.js @@ -1,282 +1,1025 @@ -var _ = require('lodash'); +// ██╗ ██╗ █████╗ ████████╗███████╗██████╗ ██╗ ██╗███╗ ██╗███████╗ +// ██║ ██║██╔══██╗╚══██╔══╝██╔════╝██╔══██╗██║ ██║████╗ ██║██╔════╝ +// ██║ █╗ ██║███████║ ██║ █████╗ ██████╔╝██║ ██║██╔██╗ ██║█████╗ +// ██║███╗██║██╔══██║ ██║ ██╔══╝ ██╔══██╗██║ ██║██║╚██╗██║██╔══╝ +// ╚███╔███╔╝██║ ██║ ██║ ███████╗██║ ██║███████╗██║██║ ╚████║███████╗ +// ╚══╝╚══╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝╚═╝ ╚═╝╚══════╝╚═╝╚═╝ ╚═══╝╚══════╝ +// + +var assert = require('assert'); +var util = require('util'); +var _ = require('@sailshq/lodash'); var async = require('async'); +// var EA = require('encrypted-attr'); « this is required below for node compat. +var flaverr = require('flaverr'); var Schema = require('waterline-schema'); -var Connections = require('./waterline/connections'); -var CollectionLoader = require('./waterline/collection/loader'); -var COLLECTION_DEFAULTS = require('./waterline/collection/defaults'); -var hasOwnProperty = require('./waterline/utils/helpers').object.hasOwnProperty; +var buildDatastoreMap = require('./waterline/utils/system/datastore-builder'); +var buildLiveWLModel = require('./waterline/utils/system/collection-builder'); +var BaseMetaModel = require('./waterline/MetaModel'); +var getModel = require('./waterline/utils/ontology/get-model'); +var validateDatastoreConnectivity = require('./waterline/utils/system/validate-datastore-connectivity'); + + + /** - * Waterline + * ORM (Waterline) + * + * Construct a Waterline ORM instance. + * + * @constructs {Waterline} */ +function Waterline() { -var Waterline = module.exports = function() { + // Start by setting up an array of model definitions. + // (This will hold the raw model definitions that were passed in, + // plus any implicitly introduced models-- but that part comes later) + // + // > `wmd` stands for "weird intermediate model def thing". + // - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: make this whole wmd thing less weird. + // - - - - - - - - - - - - - - - - - - - - - - - - + var wmds = []; - if (!(this instanceof Waterline)) { - return new Waterline(); - } + // Hold a map of the instantaited and active datastores and models. + var modelMap = {}; + var datastoreMap = {}; - // Keep track of all the collections internally so we can build associations - // between them when needed. - this._collections = []; + // This "context" dictionary will be passed into the BaseMetaModel constructor + // later every time we instantiate a new BaseMetaModel instance (e.g. `User` + // or `Pet` or generically, sometimes called "WLModel" -- sorry about the + // capital letters!!) + // + var context = { + collections: modelMap, + datastores: datastoreMap + }; + // ^^FUTURE: Level this out (This is currently just a stop gap to prevent + // re-writing all the "collection query" stuff.) - // Keep track of all the active connections used by collections - this._connections = {}; - return this; -}; + // Now build an ORM instance. + var orm = {}; -/* - *********************************************************** - * Modules that can be extended - ***********************************************************/ -// Collection to be extended in your application -Waterline.Collection = require('./waterline/collection'); + // ┌─┐─┐ ┬┌─┐┌─┐┌─┐┌─┐ ┌─┐┬─┐┌┬┐ ╦═╗╔═╗╔═╗╦╔═╗╔╦╗╔═╗╦═╗╔╦╗╔═╗╔╦╗╔═╗╦ + // ├┤ ┌┴┬┘├─┘│ │└─┐├┤ │ │├┬┘│││ ╠╦╝║╣ ║ ╦║╚═╗ ║ ║╣ ╠╦╝║║║║ ║ ║║║╣ ║ + // └─┘┴ └─┴ └─┘└─┘└─┘ └─┘┴└─┴ ┴o╩╚═╚═╝╚═╝╩╚═╝ ╩ ╚═╝╩╚═╩ ╩╚═╝═╩╝╚═╝╩═╝ + /** + * .registerModel() + * + * Register a "weird intermediate model definition thing". (see above) + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * FUTURE: Deprecate support for this method in favor of simplified `Waterline.start()` + * (see bottom of this file). In WL 1.0, remove this method altogether. + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * @param {Dictionary} wmd + */ + orm.registerModel = function registerModel(wmd) { + wmds.push(wmd); + }; -// Model Instance, returned as query results -Waterline.Model = require('./waterline/model'); + // Alias for backwards compatibility: + orm.loadCollection = function heyThatsDeprecated(){ + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Change this alias method so that it throws an error in WL 0.14. + // (And in WL 1.0, just remove it altogether.) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + console.warn('\n'+ + 'Warning: As of Waterline 0.13, `loadCollection()` is now `registerModel()`. Please call that instead.\n'+ + 'I get what you mean, so I temporarily renamed it for you this time, but here is a stack trace\n'+ + 'so you know where this is coming from in the code, and can change it to prevent future warnings:\n'+ + '```\n'+ + (new Error()).stack+'\n'+ + '```\n' + ); + orm.registerModel.apply(orm, Array.prototype.slice.call(arguments)); + }; -/* - *********************************************************** - * Prototype Methods - ***********************************************************/ -/** - * loadCollection - * - * Loads a new Collection. It should be an extended Waterline.Collection - * that contains your attributes, instance methods and class methods. - * - * @param {Object} collection - * @return {Object} internal models dictionary - * @api public - */ + // ┌─┐─┐ ┬┌─┐┌─┐┌─┐┌─┐ ┌─┐┬─┐┌┬┐ ╦╔╗╔╦╔╦╗╦╔═╗╦ ╦╔═╗╔═╗ + // ├┤ ┌┴┬┘├─┘│ │└─┐├┤ │ │├┬┘│││ ║║║║║ ║ ║╠═╣║ ║╔═╝║╣ + // └─┘┴ └─┴ └─┘└─┘└─┘ └─┘┴└─┴ ┴o╩╝╚╝╩ ╩ ╩╩ ╩╩═╝╩╚═╝╚═╝ -Waterline.prototype.loadCollection = function(collection) { + /** + * .initialize() + * + * Start the ORM and set up active datastores. + * + * @param {Dictionary} options + * @param {Function} done + */ + orm.initialize = function initialize(options, done) { - // Cache collection - this._collections.push(collection); + try { - return this._collections; -}; -/** - * initialize - * - * Creates an initialized version of each Collection and auto-migrates depending on - * the Collection configuration. - * - * @param {Object} config object containing adapters - * @param {Function} callback - * @return {Array} instantiated collections - * @api public - */ + // First, verify traditional settings, check compat.: + // ============================================================================================= -Waterline.prototype.initialize = function(options, cb) { - var self = this; + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: In WL 0.14, deprecate support for this method in favor of the simplified + // `Waterline.start()` (see bottom of this file). In WL 1.0, remove it altogether. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Ensure a config object is passed in containing adapters - if (!options) throw new Error('Usage Error: function(options, callback)'); - if (!options.adapters) throw new Error('Options object must contain an adapters object'); - if (!options.connections) throw new Error('Options object must contain a connections object'); - // Allow collections to be passed in to the initialize method - if (options.collections) { - for (var collection in options.collections) { - this.loadCollection(options.collections[collection]); - } + // Ensure the ORM hasn't already been initialized. + // (This prevents all sorts of issues, because model definitions are modified in-place.) + if (_.keys(modelMap).length > 0) { + throw new Error('A Waterline ORM instance cannot be initialized more than once. To reset the ORM, create a new instance of it by running `new Waterline()`.'); + } - // Remove collections from the options after they have been loaded - delete options.collections; - } + // Backwards-compatibility for `connections`: + if (!_.isUndefined(options.connections)){ - // Cache a reference to instantiated collections - this.collections = {}; + // Sanity check + assert(_.isUndefined(options.datastores), 'Attempted to provide backwards-compatibility for `connections`, but `datastores` was ALSO defined! This should never happen.'); - // Build up all the connections used by the collections - this.connections = new Connections(options.adapters, options.connections); + options.datastores = options.connections; + console.warn('\n'+ + 'Warning: `connections` is no longer supported. Please use `datastores` instead.\n'+ + 'I get what you mean, so I temporarily renamed it for you this time, but here is a stack trace\n'+ + 'so you know where this is coming from in the code, and can change it to prevent future warnings:\n'+ + '```\n'+ + (new Error()).stack+'\n'+ + '```\n' + ); + delete options.connections; + }//>- - // Grab config defaults or set them to empty - var defaults = _.merge({}, COLLECTION_DEFAULTS, options.defaults); + // Usage assertions + if (_.isUndefined(options) || !_.keys(options).length) { + throw new Error('Usage Error: .initialize(options, callback)'); + } - // Build a schema map - this.schema = new Schema(this._collections, this.connections, defaults); + if (_.isUndefined(options.adapters) || !_.isPlainObject(options.adapters)) { + throw new Error('Options must contain an `adapters` dictionary'); + } - // Load a Collection into memory - function loadCollection(item, next) { - var loader = new CollectionLoader(item, self.connections, defaults); - var collection = loader.initialize(self); + if (_.isUndefined(options.datastores) || !_.isPlainObject(options.datastores)) { + throw new Error('Options must contain a `datastores` dictionary'); + } - // Store the instantiated collection so it can be used - // internally to create other records - self.collections[collection.identity.toLowerCase()] = collection; - next(); - } + // - - - - - - - - - - - - - - - - - - - - - + // FUTURE: anchor ruleset checks + // - - - - - - - - - - - - - - - - - - - - - - async.auto({ - // Load all the collections into memory - loadCollections: function(next) { - async.each(self._collections, loadCollection, function(err) { - if (err) return next(err); + // Next, validate ORM settings related to at-rest encryption, if it is in use. + // ============================================================================================= + var areAnyModelsUsingAtRestEncryption; + _.each(wmds, function(wmd){ + _.each(wmd.prototype.attributes, function(attrDef){ + if (attrDef.encrypt !== undefined) { + areAnyModelsUsingAtRestEncryption = true; + } + });//∞ + });//∞ - // Migrate Junction Tables - var junctionTables = []; + // Only allow using at-rest encryption for compatible Node versions + var EA; + if (areAnyModelsUsingAtRestEncryption) { + var RX_NODE_MAJOR_DOT_MINOR = /^v([^.]+\.?[^.]+)\./; + var parsedNodeMajorAndMinorVersion = process.version.match(RX_NODE_MAJOR_DOT_MINOR) && (+(process.version.match(RX_NODE_MAJOR_DOT_MINOR)[1])); + var MIN_NODE_VERSION = 6; + var isNativeCryptoFullyCapable = parsedNodeMajorAndMinorVersion >= MIN_NODE_VERSION; + if (!isNativeCryptoFullyCapable) { + throw new Error('Current installed node version\'s native `crypto` module is not fully capable of the necessary functionality for encrypting/decrypting data at rest with Waterline. To use this feature, please upgrade to Node v' + MIN_NODE_VERSION + ' or above, flush your node_modules, run npm install, and then try again. Otherwise, if you cannot upgrade Node.js, please remove the `encrypt` property from your models\' attributes.'); + } + EA = require('encrypted-attr'); + }//fi - Object.keys(self.schema).forEach(function(table) { - if (!self.schema[table].junctionTable) return; - junctionTables.push(Waterline.Collection.extend(self.schema[table])); - }); + _.each(wmds, function(wmd){ - async.each(junctionTables, loadCollection, function(err) { - if (err) return next(err); - next(null, self.collections); - }); - }); - }, + var modelDef = wmd.prototype; - // Build up Collection Schemas - buildCollectionSchemas: ['loadCollections', function(next, results) { - var collections = self.collections; - var schemas = {}; + // Verify that `encrypt` attr prop is valid, if in use. + var isThisModelUsingAtRestEncryption; + try { + _.each(modelDef.attributes, function(attrDef, attrName){ + if (attrDef.encrypt !== undefined) { + if (!_.isBoolean(attrDef.encrypt)){ + throw flaverr({ + code: 'E_INVALID_ENCRYPT', + attrName: attrName, + message: 'If set, `encrypt` must be either `true` or `false`.' + }); + }//• - Object.keys(collections).forEach(function(key) { - var collection = collections[key]; + if (attrDef.encrypt === true){ - // Remove hasMany association keys - var schema = _.clone(collection._schema.schema); + isThisModelUsingAtRestEncryption = true; - Object.keys(schema).forEach(function(key) { - if (hasOwnProperty(schema[key], 'type')) return; - delete schema[key]; - }); + if (attrDef.type === 'ref') { + throw flaverr({ + code: 'E_ATTR_NOT_COMPATIBLE_WITH_AT_REST_ENCRYPTION', + attrName: attrName, + whyNotCompatible: 'with `type: \'ref\'` attributes.' + }); + }//• + + if (attrDef.autoCreatedAt || attrDef.autoUpdatedAt) { + throw flaverr({ + code: 'E_ATTR_NOT_COMPATIBLE_WITH_AT_REST_ENCRYPTION', + attrName: attrName, + whyNotCompatible: 'with `'+(attrDef.autoCreatedAt?'autoCreatedAt':'autoUpdatedAt')+'` attributes.' + }); + }//• + + if (attrDef.model || attrDef.collection) { + throw flaverr({ + code: 'E_ATTR_NOT_COMPATIBLE_WITH_AT_REST_ENCRYPTION', + attrName: attrName, + whyNotCompatible: 'with associations.' + }); + }//• + + if (attrDef.defaultsTo !== undefined) { + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Consider adding support for this. Will require some refactoring + // in order to do it right (i.e. otherwise we'll just be copying and pasting + // the encryption logic.) We'll want to pull it out from normalize-value-to-set + // into a new utility, then call that from the appropriate spot in + // normalize-new-record in order to encrypt the initial default value. + // + // (See also the other note in normalize-new-record re defaultsTo + cloneDeep.) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + throw flaverr({ + code: 'E_ATTR_NOT_COMPATIBLE_WITH_AT_REST_ENCRYPTION', + attrName: attrName, + whyNotCompatible: 'with an attribute that also specifies a `defaultsTo`. '+ + 'Please remove the `defaultsTo` from this attribute definition.' + }); + }//• + + }//fi + + }//fi + });//∞ + } catch (err) { + switch (err.code) { + case 'E_INVALID_ENCRYPT': + throw flaverr({ + message: + 'Invalid usage of `encrypt` in the definition for `'+modelDef.identity+'` model\'s '+ + '`'+err.attrName+'` attribute. '+err.message + }, err); + case 'E_ATTR_NOT_COMPATIBLE_WITH_AT_REST_ENCRYPTION': + throw flaverr({ + message: + 'Invalid usage of `encrypt` in the definition for `'+modelDef.identity+'` model\'s '+ + '`'+err.attrName+'` attribute. At-rest encryption (`encrypt: true`) cannot be used '+ + err.whyNotCompatible + }, err); + default: throw err; + } + } + + + // Verify `dataEncryptionKeys`. + // (Remember, if there is a secondary key system in use, these DEKs should have + // already been "unwrapped" before they were passed in to Waterline as model settings.) + if (modelDef.dataEncryptionKeys !== undefined) { + + if (!_.isObject(modelDef.dataEncryptionKeys) || _.isArray(modelDef.dataEncryptionKeys) || _.isFunction(modelDef.dataEncryptionKeys)) { + throw flaverr({ + message: 'In the definition for the `'+modelDef.identity+'` model, the `dataEncryptionKeys` model setting '+ + 'is invalid. If specified, `dataEncryptionKeys` must be a dictionary (plain JavaScript object).' + }); + }//• + + // Check all DEKs for validity. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // (FUTURE: maybe extend EA to support a `validateKeys()` method instead of this-- + // or at least to have error code) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + try { + _.each(modelDef.dataEncryptionKeys, function(dek, dekId){ + + if (!dek || !_.isString(dek)) { + throw flaverr({ + code: 'E_INVALID_DATA_ENCRYPTION_KEYS', + dekId: dekId, + message: 'Must be a cryptographically random, 32 byte string.' + }); + }//• + + if (!dekId.match(/^[a-z\$]([a-z0-9])*$/i)){ + throw flaverr({ + code: 'E_INVALID_DATA_ENCRYPTION_KEYS', + dekId: dekId, + message: 'Please make sure the ids of all of your data encryption keys begin with a letter and do not contain any special characters.' + }); + }//• + + if (areAnyModelsUsingAtRestEncryption) { + try { + EA(undefined, { keys: modelDef.dataEncryptionKeys, keyId: dekId }).encryptAttribute(undefined, 'test-value-purely-for-validation'); + } catch (err) { + throw flaverr({ + code: 'E_INVALID_DATA_ENCRYPTION_KEYS', + dekId: dekId + }, err); + } + } + + });//∞ + } catch (err) { + switch (err.code) { + case 'E_INVALID_DATA_ENCRYPTION_KEYS': + throw flaverr({ + message: 'In the definition for the `'+modelDef.identity+'` model, one of the data encryption keys (`dataEncryptionKeys.'+err.dekId+'`) is invalid.\n'+ + 'Details:\n'+ + ' '+err.message + }, err); + default: + throw err; + } + } + + }//fi + + + // If any attrs have `encrypt: true`, verify that there is both a valid + // `dataEncryptionKeys` dictionary and a valid `dataEncryptionKeys.default` DEK set. + if (isThisModelUsingAtRestEncryption) { + + if (!modelDef.dataEncryptionKeys || !modelDef.dataEncryptionKeys.default) { + throw flaverr({ + message: + 'DEKs should be 32 bytes long, and cryptographically random. A random, default DEK is included '+ + 'in new Sails apps, so one easy way to generate a new DEK is to generate a new Sails app. '+ + 'Alternatively, you could run:\n'+ + ' require(\'crypto\').randomBytes(32).toString(\'base64\')\n'+ + '\n'+ + 'Remember: once in production, you should manage your DEKs like you would any other sensitive credential. '+ + 'For example, one common best practice is to configure them using environment variables.\n'+ + 'In a Sails app:\n'+ + ' sails_models__dataEncryptionKeys__default=vpB2EhXaTi+wYKUE0ojI5cVQX/VRGP++Fa0bBW/NFSs=\n'+ + '\n'+ + ' [?] If you\'re unsure or want advice, head over to https://sailsjs.com/support' + }); + }//• + }//fi + + + });//∞ + + + // Next, set up support for the default archive, and validate related settings: + // ============================================================================================= + + var DEFAULT_ARCHIVE_MODEL_IDENTITY = 'archive'; + + // Notes for use in docs: + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // • To choose which datastore the Archive model will live in: + // + // …in top-level orm settings: + // archiveModelIdentity: 'myarchive', + // + // …in 'MyArchive' model: + // datastore: 'foo' + // + // + // • To choose the `tableName` and `columnName`s for your Archive model: + // …in top-level orm settings: + // archiveModelIdentity: 'archive', + // + // …in 'archive' model: + // tableName: 'foo', + // attributes: { + // originalRecord: { type: 'json', columnName: 'barbaz' }, + // fromModel: { type: 'string', columnName: 'bingbong' } + // } + // + // + // • To disable support for the `.archive()` model method: + // + // …in top-level orm settings: + // archiveModelIdentity: false + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + var archiversInfoByArchiveIdentity = {}; + + _.each(wmds, function(wmd){ + + var modelDef = wmd.prototype; + // console.log('· checking `'+util.inspect(wmd,{depth:null})+'`…'); + // console.log('· checking `'+modelDef.identity+'`…'); + + // Check the `archiveModelIdentity` model setting. + if (modelDef.archiveModelIdentity === undefined) { + if (modelDef.archiveModelIdentity !== modelDef.identity) { + // console.log('setting default archiveModelIdentity for model `'+modelDef.identity+'`…'); + modelDef.archiveModelIdentity = DEFAULT_ARCHIVE_MODEL_IDENTITY; + } + else { + // A model can't be its own archive model! + modelDef.archiveModelIdentity = false; + } + }//fi + + if (modelDef.archiveModelIdentity === false) { + // This will cause the .archive() method for this model to error out and explain + // that the feature was explicitly disabled. + } + else if (modelDef.archiveModelIdentity === modelDef.identity) { + return done(new Error('Invalid `archiveModelIdentity` setting. A model cannot be its own archive! But model `'+modelDef.identity+'` has `archiveModelIdentity: \''+modelDef.archiveModelIdentity+'\'`.')); + } + else if (!modelDef.archiveModelIdentity || !_.isString(modelDef.archiveModelIdentity)){ + return done(new Error('Invalid `archiveModelIdentity` setting. If set, expecting either `false` (to disable .archive() altogether) or the identity of a registered model (e.g. "archive"), but instead got: '+util.inspect(options.defaults.archiveModelIdentity,{depth:null}))); + }//fi + + // Keep track of the model identities of all archive models, as well as info about the models using them. + if (modelDef.archiveModelIdentity !== false) { + if (!_.contains(Object.keys(archiversInfoByArchiveIdentity), modelDef.archiveModelIdentity)) { + // Save an initial info dictionary: + archiversInfoByArchiveIdentity[modelDef.archiveModelIdentity] = { + archivers: [] + }; + }//fi + + archiversInfoByArchiveIdentity[modelDef.archiveModelIdentity].archivers.push(modelDef); + + }//fi + + + });//∞ + + + // If any models are using the default archive, then register the default archive model + // if it isn't already registered. + if (_.contains(Object.keys(archiversInfoByArchiveIdentity), DEFAULT_ARCHIVE_MODEL_IDENTITY)) { + + + // Inject the built-in Archive model into the ORM's ontology: + // • id (pk-- string or number, depending on where the Archive model is being stored) + // • createdAt (timestamp-- this is effectively ≈ "archivedAt") + // • originalRecord (json-- the original record, completely unpopulated) + // • originalRecordId (pk-- string or number, the pk of the original record) + // • fromModel (string-- the original model identity) + // + // > Note there's no updatedAt! + + var existingDefaultArchiveWmd = _.find(wmds, function(wmd){ return wmd.prototype.identity === DEFAULT_ARCHIVE_MODEL_IDENTITY; }); + if (!existingDefaultArchiveWmd) { + + var defaultArchiversInfo = archiversInfoByArchiveIdentity[DEFAULT_ARCHIVE_MODEL_IDENTITY]; + + // Arbitrarily pick the first archiver. + // (we'll use this to derive a datastore and pk style so that they both match) + var arbitraryArchiver = defaultArchiversInfo.archivers[0]; + // console.log('arbitraryArchiver', arbitraryArchiver); + + var newWmd = Waterline.Model.extend({ + identity: DEFAULT_ARCHIVE_MODEL_IDENTITY, + // > Note that we inject a "globalId" for potential use in higher-level frameworks (e.g. Sails) + // > that might want to globalize this model. This way, it'd show up as "Archive" instead of "archive". + // > Remember: Waterline is NOT responsible for any globalization itself, this is just advisory. + globalId: _.capitalize(DEFAULT_ARCHIVE_MODEL_IDENTITY), + primaryKey: 'id', + datastore: arbitraryArchiver.datastore, + attributes: { + id: arbitraryArchiver.attributes[arbitraryArchiver.primaryKey], + createdAt: { type: 'number', autoCreatedAt: true, autoMigrations: { columnType: '_numbertimestamp' } }, + fromModel: { type: 'string', required: true, autoMigrations: { columnType: '_string' } }, + originalRecord: { type: 'json', required: true, autoMigrations: { columnType: '_json' } }, + + // Use `type:'json'` for this: + // (since it might contain pks for records from different datastores) + originalRecordId: { type: 'json', autoMigrations: { columnType: '_json' } }, + } + }); + wmds.push(newWmd); + + }//fi + + }//fi + + + // Now make sure all archive models actually exist, and that they're valid. + _.each(archiversInfoByArchiveIdentity, function(archiversInfo, archiveIdentity) { + var archiveWmd = _.find(wmds, function(wmd){ return wmd.prototype.identity === archiveIdentity; }); + if (!archiveWmd) { + throw new Error('Invalid `archiveModelIdentity` setting. A model declares `archiveModelIdentity: \''+archiveIdentity+'\'`, but there\'s no other model actually registered with that identity to use as an archive!'); + } + + // Validate that this archive model can be used for the purpose of Waterline's .archive() + // > (note that the error messages here should be considerate of the case where someone is + // > upgrading their app from an older version of Sails/Waterline and might happen to have + // > a model named "Archive".) + var EXPECTED_ATTR_NAMES = ['id', 'createdAt', 'fromModel', 'originalRecord', 'originalRecordId']; + var actualAttrNames = _.keys(archiveWmd.prototype.attributes); + var namesOfMissingAttrs = _.difference(EXPECTED_ATTR_NAMES, actualAttrNames); + + try { + + if (namesOfMissingAttrs.length > 0) { + throw flaverr({ + code: 'E_INVALID_ARCHIVE_MODEL', + because: 'it is missing '+ namesOfMissingAttrs.length+' mandatory attribute'+(namesOfMissingAttrs.length===1?'':'s')+': '+namesOfMissingAttrs+'.' + }); + }//• + + if (archiveWmd.prototype.primaryKey !== 'id') { + throw flaverr({ + code: 'E_INVALID_ARCHIVE_MODEL', + because: 'it is using an attribute other than `id` as its logical primary key attribute.' + }); + }//• + + if (_.any(EXPECTED_ATTR_NAMES, { encrypt: true })) { + throw flaverr({ + code: 'E_INVALID_ARCHIVE_MODEL', + because: 'it is using at-rest encryption on one of its mandatory attributes, when it shouldn\'t be.' + }); + }//• + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: do more checks (there's a lot of things we should probably check-- e.g. the `type` of each + // mandatory attribute, that no crazy defaultsTo is provided, that the auto-timestamp is correct, etc.) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + } catch (err) { + switch (err.code) { + case 'E_INVALID_ARCHIVE_MODEL': + throw new Error( + 'The `'+archiveIdentity+'` model cannot be used as a custom archive, because '+err.because+'\n'+ + 'Please adjust this custom archive model accordingly, or otherwise switch to a different '+ + 'model as your custom archive. (For reference, this `'+archiveIdentity+'` model this is currently '+ + 'configured as the custom archive model for '+archiversInfo.archivers.length+' other '+ + 'model'+(archiversInfo.archivers.length===1?'':'s')+': '+_.pluck(archiversInfo.archivers, 'identity')+'.' + ); + default: + throw err; + } + } + + });//∞ + + + + + + + // Build up a dictionary of datastores (used by our models?) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // TODO: verify the last part of that statement ^^ (not seeing how this is related to "used by our models") + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // ================================================================= + + try { + datastoreMap = buildDatastoreMap(options.adapters, options.datastores); + } catch (err) { throw err; } + + + // Now check out the models and build a schema map (using wl-schema) + // ================================================================= + var internalSchema; + try { + internalSchema = new Schema(wmds, options.defaults); + } catch (err) { throw err; } + + + // Check the internal "schema map" for any junction models that were + // implicitly introduced above and handle them. + _.each(_.keys(internalSchema), function(table) { + if (internalSchema[table].junctionTable) { + // Whenever one is found, flag it as `_private: true` and generate + // a custom constructor for it (based on a clone of the `BaseMetaModel` + // constructor), then push it on to our set of wmds. + internalSchema[table]._private = true; + wmds.push(BaseMetaModel.extend(internalSchema[table])); + }//fi + });//∞ + + + // Now build live models + // ================================================================= + + // Hydrate each model definition (in-place), and also set up a + // reference to it in the model map. + _.each(wmds, function (wmd) { + + // Set the attributes and schema values using the normalized versions from + // Waterline-Schema where everything has already been processed. + var schemaVersion = internalSchema[wmd.prototype.identity]; + + // Set normalized values from the schema version on the model definition. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: no need to use a prototype here, so let's avoid it to minimize future boggling + // (or if we determine it significantly improves the performance of ORM initialization, then + // let's keep it, but document that here and leave a link to the benchmark as a comment) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + wmd.prototype.identity = schemaVersion.identity; + wmd.prototype.tableName = schemaVersion.tableName; + wmd.prototype.datastore = schemaVersion.datastore; + wmd.prototype.primaryKey = schemaVersion.primaryKey; + wmd.prototype.meta = schemaVersion.meta; + wmd.prototype.attributes = schemaVersion.attributes; + wmd.prototype.schema = schemaVersion.schema; + wmd.prototype.hasSchema = schemaVersion.hasSchema; + + // Mixin junctionTable or throughTable if available + if (_.has(schemaVersion, 'junctionTable')) { + wmd.prototype.junctionTable = schemaVersion.junctionTable; + } - // Grab JunctionTable flag - var meta = collection.meta || {}; - meta.junctionTable = hasOwnProperty(collection.waterline.schema[collection.identity], 'junctionTable') ? - collection.waterline.schema[collection.identity].junctionTable : false; + if (_.has(schemaVersion, 'throughTable')) { + wmd.prototype.throughTable = schemaVersion.throughTable; + } + + var WLModel = buildLiveWLModel(wmd, datastoreMap, context); + + // Store the live Waterline model so it can be used + // internally to create other records + modelMap[WLModel.identity] = WLModel; - schemas[collection.identity] = collection; - schemas[collection.identity].definition = schema; - schemas[collection.identity].attributes = collection._attributes; - schemas[collection.identity].meta = meta; }); - next(null, schemas); - }], + } catch (err) { return done(err); } + + + // Finally, register datastores. + // ================================================================= + + // Simultaneously register each datastore with the correct adapter. + // (This is async because the `registerDatastore` method in adapters + // is async. But since they're not interdependent, we run them all in parallel.) + async.each(_.keys(datastoreMap), function(datastoreName, next) { + + var datastore = datastoreMap[datastoreName]; + + if (_.isFunction(datastore.adapter.registerConnection)) { + return next(new Error('The adapter for datastore `' + datastoreName + '` is invalid: the `registerConnection` method must be renamed to `registerDatastore`.')); + } - // Register the Connections with an adapter - registerConnections: ['buildCollectionSchemas', function(next, results) { - async.each(Object.keys(self.connections), function(item, nextItem) { - var connection = self.connections[item]; - var config = {}; + try { + // Note: at this point, the datastore should always have a usable adapter + // set as its `adapter` property. + + // Check if the datastore's adapter has a `registerDatastore` method + if (!_.has(datastore.adapter, 'registerDatastore')) { + // FUTURE: get rid of this `setImmediate` (or if it's serving a purpose, document what that is) + setImmediate(function() { next(); });//_∏_ + return; + }//-• + + // Add the datastore name as the `identity` property in its config. + datastore.config.identity = datastoreName; + + // Get the identities of all the models which use this datastore, and then build up + // a simple mapping that can be passed down to the adapter. var usedSchemas = {}; + var modelIdentities = _.uniq(datastore.collections); + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // TODO: figure out if we still need this `uniq` or not. If so, document why. + // If not, remove it. (hopefully the latter) + // + // e.g. + // ``` + // assert(modelIdentities.length === datastore.collections.length); + // ``` + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + _.each(modelIdentities, function(modelIdentity) { + var WLModel = modelMap[modelIdentity]; - // Check if the connection's adapter has a register connection method - if (!hasOwnProperty(connection._adapter, 'registerConnection')) return nextItem(); + // Track info about this model by table name (for use in the adapter) + var tableName; + if (_.has(Object.getPrototypeOf(WLModel), 'tableName')) { + tableName = Object.getPrototypeOf(WLModel).tableName; + } + else { + tableName = modelIdentity; + } + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Suck the `getPrototypeOf()` poison out of this stuff. Mike is too dumb for this. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Copy all values over to a tempory object minus the adapter definition - Object.keys(connection.config).forEach(function(key) { - config[key] = connection.config[key]; - }); + assert(WLModel.tableName === tableName, 'Expecting `WLModel.tableName === tableName`. (Please open an issue: http://sailsjs.com/bugs)'); + assert(WLModel.identity === modelIdentity, 'Expecting `WLModel.identity === modelIdentity`. (Please open an issue: http://sailsjs.com/bugs)'); + assert(WLModel.primaryKey && _.isString(WLModel.primaryKey), 'How flabbergasting! Expecting truthy string in `WLModel.primaryKey`, but got something else. (If you\'re seeing this, there\'s probably a bug in Waterline. Please open an issue: http://sailsjs.com/bugs)'); + assert(WLModel.schema && _.isObject(WLModel.schema), 'Expecting truthy string in `WLModel.schema`, but got something else. (Please open an issue: http://sailsjs.com/bugs)'); - // Set an identity on the connection - config.identity = item; + usedSchemas[tableName] = { + primaryKey: WLModel.primaryKey, + definition: WLModel.schema, + tableName: tableName, + identity: modelIdentity + }; + });// - // Grab the schemas used on this connection - connection._collections.forEach(function(coll) { - var identity = coll; - if (hasOwnProperty(self.collections[coll].__proto__, 'tableName')) { - identity = self.collections[coll].__proto__.tableName; + // Call the `registerDatastore` adapter method. + datastore.adapter.registerDatastore(datastore.config, usedSchemas, function(err) { + if (err) { + return next(err); } - var schema = results.buildCollectionSchemas[coll]; - usedSchemas[identity] = schema; + return validateDatastoreConnectivity(datastore, next); }); - // Call the registerConnection method - connection._adapter.registerConnection(config, usedSchemas, function(err) { - if (err) return nextItem(err); - nextItem(); - }); - }, next); - }] - - }, function(err) { - if (err) return cb(err); - self.bootstrap(function(err) { - if (err) return cb(err); - cb(null, { collections: self.collections, connections: self.connections }); - }); - }); + } catch (err) { return next(err); } + + }, function(err) { + if (err) { return done(err); } + + // Build up and return the ontology. + return done(undefined, { + collections: modelMap, + datastores: datastoreMap + }); + + });// + + };// + + + // ┌─┐─┐ ┬┌─┐┌─┐┌─┐┌─┐ ┌─┐┬─┐┌┬┐╔╦╗╔═╗╔═╗╦═╗╔╦╗╔═╗╦ ╦╔╗╔ + // ├┤ ┌┴┬┘├─┘│ │└─┐├┤ │ │├┬┘│││ ║ ║╣ ╠═╣╠╦╝ ║║║ ║║║║║║║ + // └─┘┴ └─┴ └─┘└─┘└─┘ └─┘┴└─┴ ┴o╩ ╚═╝╩ ╩╩╚══╩╝╚═╝╚╩╝╝╚╝ + orm.teardown = function teardown(done) { + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: In WL 0.14, deprecate support for this method in favor of the simplified + // `Waterline.start()` (see bottom of this file). In WL 1.0, remove it altogether. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + async.each(_.keys(datastoreMap), function(datastoreName, next) { + var datastore = datastoreMap[datastoreName]; + + + // Check if the adapter has a teardown method implemented. + // If not, then just skip this datastore. + if (!_.has(datastore.adapter, 'teardown')) { + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: get rid of this `setImmediate` (or if it's serving a purpose, document what that is) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + setImmediate(function() { next(); });//_∏_ + return; + }//-• + + // But otherwise, call its teardown method. + try { + datastore.adapter.teardown(datastoreName, next); + } catch (err) { return next(err); } + + }, done); + + }; + + // ╦═╗╔═╗╔╦╗╦ ╦╦═╗╔╗╔ ┌┐┌┌─┐┬ ┬ ┌─┐┬─┐┌┬┐ ┬┌┐┌┌─┐┌┬┐┌─┐┌┐┌┌─┐┌─┐ + // ╠╦╝║╣ ║ ║ ║╠╦╝║║║ │││├┤ │││ │ │├┬┘│││ ││││└─┐ │ ├─┤││││ ├┤ + // ╩╚═╚═╝ ╩ ╚═╝╩╚═╝╚╝ ┘└┘└─┘└┴┘ └─┘┴└─┴ ┴ ┴┘└┘└─┘ ┴ ┴ ┴┘└┘└─┘└─┘ + return orm; + +} + +// Export the Waterline ORM constructor. +module.exports = Waterline; + + + + + + + +// ╔═╗═╗ ╦╔╦╗╔═╗╔╗╔╔═╗╦╔═╗╔╗╔╔═╗ +// ║╣ ╔╩╦╝ ║ ║╣ ║║║╚═╗║║ ║║║║╚═╗ +// ╚═╝╩ ╚═ ╩ ╚═╝╝╚╝╚═╝╩╚═╝╝╚╝╚═╝ + +// Expose the generic, stateless BaseMetaModel constructor for direct access from +// vanilla Waterline applications (available as `Waterline.Model`) +// +// > Note that this is technically a "MetaModel", because it will be "newed up" +// > into a Waterline model instance (WLModel) like `User`, `Pet`, etc. +// > But since, from a userland perspective, there is no real distinction, we +// > still expose this as `Model` for the sake of simplicity. +module.exports.Model = BaseMetaModel; + +// Expose `Collection` as an alias for `Model`, but only for backwards compatibility. +module.exports.Collection = BaseMetaModel; +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// ^^FUTURE: In WL 1.0, remove this alias. +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + -}; /** - * Teardown + * Waterline.start() + * + * Build and initialize a new Waterline ORM instance using the specified + * userland ontology, including model definitions, datastore configurations, + * and adapters. * - * Calls the teardown method on each connection if available. + * --EXPERIMENTAL-- + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * FUTURE: Have this return a Deferred using parley (so it supports `await`) + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * @param {Dictionary} options + * @property {Dictionary} models + * @property {Dictionary} datastores + * @property {Dictionary} adapters + * @property {Dictionary?} defaultModelSettings + * + * @param {Function} done + * @param {Error?} err + * @param {Ref} orm */ +module.exports.start = function (options, done){ -Waterline.prototype.teardown = function teardown(cb) { - var self = this; + // Verify usage & apply defaults: + if (!_.isFunction(done)) { + throw new Error('Please provide a valid callback function as the 2nd argument to `Waterline.start()`. (Instead, got: `'+done+'`)'); + } - async.each(Object.keys(this.connections), function(item, next) { - var connection = self.connections[item]; + try { - // Check if the adapter has a teardown method implemented - if (!hasOwnProperty(connection._adapter, 'teardown')) return next(); + if (!_.isObject(options) || _.isArray(options) || _.isFunction(options)) { + throw new Error('Please provide a valid dictionary (plain JS object) as the 1st argument to `Waterline.start()`. (Instead, got: `'+options+'`)'); + } + + if (!_.isObject(options.adapters) || _.isArray(options.adapters) || _.isFunction(options.adapters)) { + throw new Error('`adapters` must be provided as a valid dictionary (plain JS object) of adapter definitions, keyed by adapter identity. (Instead, got: `'+options.adapters+'`)'); + } + if (!_.isObject(options.datastores) || _.isArray(options.datastores) || _.isFunction(options.datastores)) { + throw new Error('`datastores` must be provided as a valid dictionary (plain JS object) of datastore configurations, keyed by datastore name. (Instead, got: `'+options.datastores+'`)'); + } + if (!_.isObject(options.models) || _.isArray(options.models) || _.isFunction(options.models)) { + throw new Error('`models` must be provided as a valid dictionary (plain JS object) of model definitions, keyed by model identity. (Instead, got: `'+options.models+'`)'); + } + + if (_.isUndefined(options.defaultModelSettings)) { + options.defaultModelSettings = {}; + } else if (!_.isObject(options.defaultModelSettings) || _.isArray(options.defaultModelSettings) || _.isFunction(options.defaultModelSettings)) { + throw new Error('If specified, `defaultModelSettings` must be a dictionary (plain JavaScript object). (Instead, got: `'+options.defaultModelSettings+'`)'); + } + + var VALID_OPTIONS = ['adapters', 'datastores', 'models', 'defaultModelSettings']; + var unrecognizedOptions = _.difference(_.keys(options), VALID_OPTIONS); + if (unrecognizedOptions.length > 0) { + throw new Error('Unrecognized option(s):\n '+unrecognizedOptions+'\n\nValid options are:\n '+VALID_OPTIONS+'\n'); + } + + + // Check adapter identities. + _.each(options.adapters, function (adapter, key){ + + if (_.isUndefined(adapter.identity)) { + adapter.identity = key; + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // Note: We removed the following purely for convenience. + // If this comes up again, we should consider bringing it back instead of the more + // friendly behavior above. But in the mean time, erring on the side of less typing + // in userland by gracefully adjusting the provided adapter def. + // ``` + // throw new Error('All adapters should declare an `identity`. But the adapter passed in under `'+key+'` has no identity! (Keep in mind that this adapter could get require()-d from somewhere else.)'); + // ``` + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + } + else if (adapter.identity !== key) { + throw new Error('The `identity` explicitly defined on an adapter should exactly match the key under which it is passed in to `Waterline.start()`. But the adapter passed in for key `'+key+'` has an identity that does not match: `'+adapter.identity+'`'); + } + + });// + + + // Now go ahead: start building & initializing the ORM. + var orm = new Waterline(); + + // Register models (checking model identities along the way). + // + // > In addition: Unfortunately, passing in `defaults` in `initialize()` + // > below doesn't _ACTUALLY_ apply the specified model settings as + // > defaults right now -- it only does so for implicit junction models. + // > So we have to do that ourselves for the rest of the models out here + // > first in this iteratee. Also note that we handle `attributes` as a + // > special case. + _.each(options.models, function (userlandModelDef, key){ + + if (_.isUndefined(userlandModelDef.identity)) { + userlandModelDef.identity = key; + } + else if (userlandModelDef.identity !== key) { + throw new Error('If `identity` is explicitly defined on a model definition, it should exactly match the key under which it is passed in to `Waterline.start()`. But the model definition passed in for key `'+key+'` has an identity that does not match: `'+userlandModelDef.identity+'`'); + } + + _.defaults(userlandModelDef, _.omit(options.defaultModelSettings, 'attributes')); + if (options.defaultModelSettings.attributes) { + userlandModelDef.attributes = userlandModelDef.attributes || {}; + _.defaults(userlandModelDef.attributes, options.defaultModelSettings.attributes); + } + + orm.registerModel(Waterline.Model.extend(userlandModelDef)); + + });// + + + // Fire 'er up + orm.initialize({ + adapters: options.adapters, + datastores: options.datastores, + defaults: options.defaultModelSettings + }, function (err, _classicOntology) { + if (err) { return done(err); } + + // Attach two private properties for compatibility's sake. + // (These are necessary for utilities that accept `orm` to work.) + // > But note that we do this as non-enumerable properties + // > to make it less tempting to rely on them in userland code. + // > (Instead, use `getModel()`!) + Object.defineProperty(orm, 'collections', { + value: _classicOntology.collections + }); + Object.defineProperty(orm, 'datastores', { + value: _classicOntology.datastores + }); + + return done(undefined, orm); + }); + + } catch (err) { return done(err); } + +};// + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// To test quickly: +// ``` +// require('./').start({adapters: { 'sails-foo': { identity: 'sails-foo' } }, datastores: { default: { adapter: 'sails-foo' } }, models: { user: { attributes: {id: {type: 'number'}}, primaryKey: 'id', datastore: 'default'} }}, function(err, _orm){ if(err){throw err;} console.log(_orm); /* and expose as `orm`: */ orm = _orm; }); +// ``` +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - connection._adapter.teardown(item, next); - }, cb); -}; /** - * Bootstrap + * Waterline.stop() + * + * Tear down the specified Waterline ORM instance. + * + * --EXPERIMENTAL-- + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * FUTURE: Have this return a Deferred using parley (so it supports `await`) + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * @param {Ref} orm * - * Auto-migrate all collections + * @param {Function} done + * @param {Error?} err */ +module.exports.stop = function (orm, done){ -Waterline.prototype.bootstrap = function bootstrap(cb) { - var self = this; + // Verify usage & apply defaults: + if (!_.isFunction(done)) { + throw new Error('Please provide a valid callback function as the 2nd argument to `Waterline.stop()`. (Instead, got: `'+done+'`)'); + } - // - // TODO: - // Come back to this -- see https://github.com/balderdashy/waterline/issues/259 - // (the stuff in this file works fine-- the work would be structural changes elsewhere) - // + try { + + if (!_.isObject(orm)) { + throw new Error('Please provide a Waterline ORM instance (obtained from `Waterline.start()`) as the first argument to `Waterline.stop()`. (Instead, got: `'+orm+'`)'); + } + + orm.teardown(function (err){ + if (err) { return done(err); } + return done(); + });//_∏_ - // // Use the schema to get a list of junction tables idents - // // and then determine which are "logical" collections - // // (i.e. everything EXCEPT junction tables) - // var junctionTableIdents = _(this.schema).filter({junctionTable: true}).pluck('identity').value(); - // var logicalCollections = _(this.collections).omit(junctionTableIdents).value(); - - // // Flatten logical collections obj into an array for convenience - // var toBeSynced = _.reduce(logicalCollections, function(logicals,coll,ident) { - // logicals.push(coll); - // return logicals; - // }, []); - - // // console.log(junctionTableIdents); - // // console.log(Object.keys(logicalCollections)); - // // console.log('\n', - // // 'Migrating collections ::', - // // _(toBeSynced).pluck('identity').value() - // // ); - - // For now: - var toBeSynced = _.reduce(this.collections, function(resources, collection, ident) { - resources.push(collection); - return resources; - }, []); - - // Run auto-migration strategies on each collection - // async.each(toBeSynced, function(collection, next) { - async.eachSeries(toBeSynced, function(collection, next) { - // async.eachLimit(toBeSynced, 9, function(collection, next) { - collection.sync(next); - }, cb); + } catch (err) { return done(err); } + +}; + + + +/** + * Waterline.getModel() + * + * Look up one of an ORM's models by identity. + * (If no matching model is found, this throws an error.) + * + * --EXPERIMENTAL-- + * + * ------------------------------------------------------------------------------------------ + * @param {String} modelIdentity + * The identity of the model this is referring to (e.g. "pet" or "user") + * + * @param {Ref} orm + * The ORM instance to look for the model in. + * ------------------------------------------------------------------------------------------ + * @returns {Ref} [the Waterline model] + * ------------------------------------------------------------------------------------------ + * @throws {Error} If no such model exists. + * E_MODEL_NOT_REGISTERED + * + * @throws {Error} If anything else goes wrong. + * ------------------------------------------------------------------------------------------ + */ +module.exports.getModel = function (modelIdentity, orm){ + return getModel(modelIdentity, orm); }; diff --git a/lib/waterline/MetaModel.js b/lib/waterline/MetaModel.js new file mode 100644 index 000000000..460040cfa --- /dev/null +++ b/lib/waterline/MetaModel.js @@ -0,0 +1,265 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var LifecycleCallbackBuilder = require('./utils/system/lifecycle-callback-builder'); +var TransformerBuilder = require('./utils/system/transformer-builder'); +var hasSchemaCheck = require('./utils/system/has-schema-check'); + + +/** + * MetaModel + * + * Construct a new MetaModel instance (e.g. `User` or `WLModel`) with methods for + * interacting with a set of structured database records. + * + * > This is really just the same idea as constructing a "Model instance"-- we just + * > use the term "MetaModel" for utmost clarity -- since at various points in the + * > past, individual records were referred to as "model instances" rather than "records". + * > + * > In other words, this file contains the entry point for all ORM methods + * > (e.g. User.find()). So like, `User` is a MetaModel instance. You might + * > call it a "model" or a "model model" -- the important thing is just to + * > understand that we're talking about the same thing in either case. + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * Usage: + * ``` + * var WLModel = new MetaModel(orm, { adapter: require('sails-disk') }); + * // (sorry about the capital "W" in the instance!) + * ``` + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * @param {Dictionary} orm + * + * @param {Dictionary} adapterWrapper + * @property {Dictionary} adapter + * The adapter definition. + * ************************************************************ + * FUTURE: probably just remove this second argument. Instead of + * passing it in, it seems like we should just look up the + * appropriate adapter at the top of this constructor function + * (or even just attach `._adapter` in userland- after instantiating + * the new MetaModel instance-- e.g. "WLModel"). The only code that + * directly runs `new MetaModel()` or `new SomeCustomizedMetaModel()` + * is inside of Waterline core anyway.) + * ************************************************************ + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * @constructs {MetaModel} + * The base MetaModel from whence other MetaModels are customized. + * Remember: running `new MetaModel()` yields an instance like `User`, + * which is itself generically called a WLModel. + * + * > This is kind of confusing, mainly because capitalization. And + * > it feels silly to nitpick about something so confusing. But at + * > least this way we know what everything's called, and it's consistent. + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +var MetaModel = module.exports = function MetaModel (orm, adapterWrapper) { + + // Attach a private reference to the adapter definition indicated by + // this model's configured `datastore`. + this._adapter = adapterWrapper.adapter; + + // Attach a private reference to the ORM. + this._orm = orm; + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // > Note that we also alias it as `this.waterline`. + this.waterline = orm; + // ^^^ + // FUTURE: remove this alias in Waterline v1.0 + // (b/c it implies that `this.waterline` might be the stateless export from + // the Waterline package itself, rather than what it actually is: a configured + // ORM instance) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + // Initialize the `attributes` of our new MetaModel instance (e.g. `User.attributes`) + // to an empty dictionary, unless they're already set. + if (_.isUndefined(this.attributes)) { + this.attributes = {}; + } + else { + if (!_.isObject(this.attributes)) { + throw new Error('Consistency violation: When instantiating this new instance of MetaModel, it became clear (within the constructor) that `this.attributes` was already set, and not a dictionary: '+util.inspect(this.attributes, {depth: 5})+''); + } + else { + // FUTURE: Consider not allowing this, because it's weird. + } + } + + + // Build a dictionary of all lifecycle callbacks applicable to this model, and + // attach it as a private property (`_callbacks`). + this._callbacks = LifecycleCallbackBuilder(this); + //^^FUTURE: bust this utility apart to make it stateless like the others + // + //^^FUTURE: Also, document what's going on here as far as timing-- i.e. answering questions + //like "when are model settings from the original model definition applied?" and + //"How are they set?". + + // Set the `hasSchema` flag for this model. + // > This is based on a handful of factors, including the original model definition, + // > ORM-wide default model settings, and (if defined) an implicit default from the + // > adapter itself. + this.hasSchema = hasSchemaCheck(this); + // ^^FUTURE: change utility's name to either the imperative mood (e.g. `getSchemafulness()`) + // or interrogative mood (`isSchemaful()`) for consistency w/ the other utilities + // (and to avoid confusion, because the name of the flag makes it kind of crazy in this case.) + + // Build a TransformerBuilder instance and attach it as a private property (`_transformer`). + this._transformer = new TransformerBuilder(this.schema); + // ^^FUTURE: bust this utility apart to make it stateless like the others + + return this; + // ^^FUTURE: remove this `return` (it shouldn't be necessary) +}; + + + +// ███╗ ███╗███████╗████████╗██╗ ██╗ ██████╗ ██████╗ ███████╗ +// ████╗ ████║██╔════╝╚══██╔══╝██║ ██║██╔═══██╗██╔══██╗██╔════╝ +// ██╔████╔██║█████╗ ██║ ███████║██║ ██║██║ ██║███████╗ +// ██║╚██╔╝██║██╔══╝ ██║ ██╔══██║██║ ██║██║ ██║╚════██║ +// ██║ ╚═╝ ██║███████╗ ██║ ██║ ██║╚██████╔╝██████╔╝███████║ +// ╚═╝ ╚═╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝ +// +// MODEL METHODS +// +// Now extend the MetaModel constructor's `prototype` with each built-in model method. +// > This allows for the use of `Foo.find()`, etc., and it's equivalent to attaching +// > each method individually (e.g. `MetaModel.prototype.find = ()->{}`), just with +// > slightly better performance characteristics. +_.extend( + MetaModel.prototype, + { + // DQL + find: require('./methods/find'), + findOne: require('./methods/find-one'), + findOrCreate: require('./methods/find-or-create'), + stream: require('./methods/stream'), + count: require('./methods/count'), + sum: require('./methods/sum'), + avg: require('./methods/avg'), + + // DML + create: require('./methods/create'), + createEach: require('./methods/create-each'), + update: require('./methods/update'), + updateOne: require('./methods/update-one'), + destroy: require('./methods/destroy'), + destroyOne: require('./methods/destroy-one'), + archive: require('./methods/archive'), + archiveOne: require('./methods/archive-one'), + addToCollection: require('./methods/add-to-collection'), + removeFromCollection: require('./methods/remove-from-collection'), + replaceCollection: require('./methods/replace-collection'), + + // Misc. + validate: require('./methods/validate'), + } +); + + + + +// SPECIAL STATIC METAMODEL METHODS +// +// Now add properties to the MetaModel constructor itself. +// (i.e. static properties) + +/** + * MetaModel.extend() + * + * Build & return a new constructor based on the existing constructor in the + * current runtime context (`this`) -- which happens to be our base model + * constructor (MetaModel). This also attaches the specified properties to + * the new constructor's prototype. + * + * + * > Originally taken from `.extend()` in Backbone source: + * > http://backbonejs.org/docs/backbone.html#section-189 + * > + * > Although this is called `extend()`, note that it does not actually modify + * > the original MetaModel constructor. Instead, it first builds a shallow + * > clone of the original constructor and then extends THAT. + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * @param {Dictionary?} protoProps + * Optional extra set of properties to attach to the new ctor's prototype. + * (& possibly a brand of breakfast cereal) + * + * @param {Dictionary?} staticProps + * NO LONGER SUPPORTED: An optional, extra set of properties to attach + * directly to the new ctor. + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * @returns {Function} [The new constructor -- e.g. `SomeCustomizedMetaModel`] + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * @this {Function} [The original constructor -- BaseMetaModel] + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ +MetaModel.extend = function (protoProps, staticProps) { + var thisConstructor = this; + + // Sanity checks: + + // If a prototypal properties were provided, and one of them is under the `constructor` key, + // then freak out. This is no longer supported, and shouldn't still be in use anywhere. + if (protoProps && _.has(protoProps, 'constructor')) { + throw new Error('Consistency violation: The first argument (`protoProps`) provided to Waterline.Model.extend() should never have a `constructor` property. (This kind of usage is no longer supported.)'); + } + + // If any additional custom static properties were specified, then freak out. + // This is no longer supported, and shouldn't still be in use anywhere. + if (!_.isUndefined(staticProps)) { + throw new Error('Consistency violation: Unrecognized extra argument provided to Waterline.Model.extend() (`staticProps` is no longer supported.)'); + } + + //--• + // Now proceed with the classical, Backbone-flavor extending. + + var newConstructor = function() { return thisConstructor.apply(this, arguments); }; + + // Shallow-copy all of the static properties (top-level props of original constructor) + // over to the new constructor. + _.extend(newConstructor, thisConstructor, staticProps); + + // Create an ad hoc "Surrogate" -- a short-lived, bionic kind of a constructor + // that serves as an intermediary... or maybe more of an organ donor? Surrogate + // is probably still best. Anyway it's some dark stuff, that's for sure. Because + // what happens next is that we give it a reference to our original ctor's prototype + // and constructor, then "new up" an instance for us-- but only so that we can cut out + // that newborn instance's `prototype` and put it where the prototype for our new ctor + // is supposed to go. + // + // > Why? Well for one thing, this is important so that our new constructor appears + // > to "inherit" from our original constructor. But likely a more prescient motive + // > is so that our new ctor is a proper clone. That is, it's no longer entangled with + // > the original constructor. + // > (More or less anyway. If there are any deeply nested things, like an `attributes` + // > dictionary -- those could still contain deep, entangled references to stuff from the + // > original ctor's prototype. + var Surrogate = function() { this.constructor = newConstructor; }; + Surrogate.prototype = thisConstructor.prototype; + newConstructor.prototype = new Surrogate(); + + // If extra `protoProps` were provided, merge them onto our new ctor's prototype. + // (now that it's a legitimately separate thing that we can safely modify) + if (protoProps) { + _.extend(newConstructor.prototype, protoProps); + } + + // Set a proprietary `__super__` key to keep track of the original ctor's prototype. + // (see http://stackoverflow.com/questions/8596861/super-in-backbone#comment17856929_8614228) + newConstructor.__super__ = thisConstructor.prototype; + + // Return our new ctor. + return newConstructor; + +}; + diff --git a/lib/waterline/VERSION.js b/lib/waterline/VERSION.js deleted file mode 100644 index 9de0ebeb7..000000000 --- a/lib/waterline/VERSION.js +++ /dev/null @@ -1,2 +0,0 @@ -// Store the API Version being used -module.exports = 1; diff --git a/lib/waterline/adapter/aggregateQueries.js b/lib/waterline/adapter/aggregateQueries.js deleted file mode 100644 index 2bc94194e..000000000 --- a/lib/waterline/adapter/aggregateQueries.js +++ /dev/null @@ -1,132 +0,0 @@ -/** - * Aggregate Queries Adapter Normalization - */ - -var _ = require('lodash'); -var async = require('async'); -var normalize = require('../utils/normalize'); -var hasOwnProperty = require('../utils/helpers').object.hasOwnProperty; - -module.exports = { - - // If an optimized createEach exists, use it, otherwise use an asynchronous loop with create() - createEach: function(valuesList, cb, metaContainer) { - var self = this; - var connName, - adapter; - - // Normalize Arguments - cb = normalize.callback(cb); - - // Build Default Error Message - var err = 'No createEach() or create() method defined in adapter!'; - - // Custom user adapter behavior - if (hasOwnProperty(this.dictionary, 'createEach')) { - connName = this.dictionary.createEach; - adapter = this.connections[connName]._adapter; - - if (hasOwnProperty(adapter, 'createEach')) { - return adapter.createEach(connName, this.collection, valuesList, cb, metaContainer); - } - } - - // Default behavior - // WARNING: Not transactional! (unless your data adapter is) - var results = []; - - // Find the connection to run this on - if (!hasOwnProperty(this.dictionary, 'create')) return cb(new Error(err)); - - connName = this.dictionary.create; - adapter = this.connections[connName]._adapter; - - if (!hasOwnProperty(adapter, 'create')) return cb(new Error(err)); - - async.eachSeries(valuesList, function(values, cb) { - adapter.create(connName, self.collection, values, function(err, row) { - if (err) return cb(err); - results.push(row); - cb(); - }, metaContainer); - }, function(err) { - if (err) return cb(err); - cb(null, results); - }); - }, - - // If an optimized findOrCreateEach exists, use it, otherwise use an asynchronous loop with create() - findOrCreateEach: function(attributesToCheck, valuesList, cb, metaContainer) { - var self = this; - var connName; - var adapter; - - // Normalize Arguments - cb = normalize.callback(cb); - - var isObjectArray = false; - - if (_.isObject(attributesToCheck[0])) { - if (attributesToCheck.length > 1 && - attributesToCheck.length !== valuesList.length) { - return cb(new Error('findOrCreateEach: The two passed arrays have to be of the same length.')); - } - isObjectArray = true; - } - - // Clone sensitive data - attributesToCheck = _.clone(attributesToCheck); - valuesList = _.clone(valuesList); - - // Custom user adapter behavior - if (hasOwnProperty(this.dictionary, 'findOrCreateEach')) { - connName = this.dictionary.findOrCreateEach; - adapter = this.connections[connName]._adapter; - - if (hasOwnProperty(adapter, 'findOrCreateEach')) { - return adapter.findOrCreateEach(connName, this.collection, valuesList, cb, metaContainer); - } - } - - // Build a list of models - var models = []; - var i = 0; - - async.eachSeries(valuesList, function(values, cb) { - if (!_.isObject(values)) return cb(new Error('findOrCreateEach: Unexpected value in valuesList.')); - // Check that each of the criteria keys match: - // build a criteria query - var criteria = {}; - - if (isObjectArray) { - if (_.isObject(attributesToCheck[i])) { - Object.keys(attributesToCheck[i]).forEach(function(attrName) { - criteria[attrName] = values[attrName]; - }); - if (attributesToCheck.length > 1) { - i++; - } - } else { - return cb(new Error('findOrCreateEach: Element ' + i + ' in attributesToCheck is not an object.')); - } - } else { - attributesToCheck.forEach(function(attrName) { - criteria[attrName] = values[attrName]; - }); - } - - return self.findOrCreate.call(self, criteria, values, function(err, model) { - if (err) return cb(err); - - // Add model to list - if (model) models.push(model); - - cb(null, model); - }, metaContainer); - }, function(err) { - if (err) return cb(err); - cb(null, models); - }); - } - -}; diff --git a/lib/waterline/adapter/compoundQueries.js b/lib/waterline/adapter/compoundQueries.js deleted file mode 100644 index 38b5847c0..000000000 --- a/lib/waterline/adapter/compoundQueries.js +++ /dev/null @@ -1,46 +0,0 @@ -/** - * Compound Queries Adapter Normalization - */ - -var _ = require('lodash'); -var normalize = require('../utils/normalize'); -var hasOwnProperty = require('../utils/helpers').object.hasOwnProperty; - -module.exports = { - - findOrCreate: function(criteria, values, cb, metaContainer) { - var self = this; - var connName, - adapter; - - // If no values were specified, use criteria - if (!values) values = criteria.where ? criteria.where : criteria; - - // Normalize Arguments - criteria = normalize.criteria(criteria); - cb = normalize.callback(cb); - - // Build Default Error Message - var err = 'No find() or create() method defined in adapter!'; - - // Custom user adapter behavior - if (hasOwnProperty(this.dictionary, 'findOrCreate')) { - connName = this.dictionary.findOrCreate; - adapter = this.connections[connName]._adapter; - - if (hasOwnProperty(adapter, 'findOrCreate')) { - return adapter.findOrCreate(connName, this.collection, values, cb, metaContainer); - } - } - - // Default behavior - // WARNING: Not transactional! (unless your data adapter is) - this.findOne(criteria, function(err, result) { - if (err) return cb(err); - if (result) return cb(null, result[0]); - - self.create(values, cb, metaContainer); - }, metaContainer); - } - -}; diff --git a/lib/waterline/adapter/ddl/README.md b/lib/waterline/adapter/ddl/README.md deleted file mode 100644 index a43e6e85a..000000000 --- a/lib/waterline/adapter/ddl/README.md +++ /dev/null @@ -1,35 +0,0 @@ -# DDL - -DDL stands for data definition language. It refers to functionality which defines/modifies data structures. For our purposes in Waterline, it refers to methods which read from or modify the schema. - -This submodule implements default behavior for adapter methods like `define`, `describe`, `alter`, and `drop`. - - - - -### Altering a schema - -Some considerations must be taken into account when modifying the schema of a structured database. - -For now, automigrations are for development only. That's because the first thing Waterline will try to do is load all records from the data source in memory. If we can't do that, we give up. This is not the most efficient or useful thing to do, but it is a safety measure to help prevent data from being corrupted. It'll work fine in development, but as soon as you go to production, you'll want to take into consideration the normal precautions around migrating user data. - - - \ No newline at end of file diff --git a/lib/waterline/adapter/ddl/alter/index.js b/lib/waterline/adapter/ddl/alter/index.js deleted file mode 100644 index 5ea6cbea5..000000000 --- a/lib/waterline/adapter/ddl/alter/index.js +++ /dev/null @@ -1,168 +0,0 @@ -/** - * Module dependencies - */ - -var _ = require('lodash'); -var async = require('async'); -var normalize = require('../../../utils/normalize'); -var hasOwnProperty = require('../../../utils/helpers').object.hasOwnProperty; - - -//////////////////////////////////////////////////////////////////////// -//////////////////////////////////////////////////////////////////////// -//////////////////////////////////////////////////////////////////////// - -/** - * NOTICE: - * - * This module is not currently being used. - * Instead, a development-only solution is implemented in `ddl.js.` - * Auto-migrations for production, that carefully backup data, - * would be a great addition in the future, but must be carefully - * evaluated, and probably should not be part of this core Waterline - * module. - */ - -//////////////////////////////////////////////////////////////////////// -//////////////////////////////////////////////////////////////////////// -//////////////////////////////////////////////////////////////////////// - - -/** - * alter - * - * Default definition of `alter` functionality in an adapter. - * Compare physical (original) attributes with specified new schema, - * and change the physical layer accordingly. - */ - -module.exports = function(cb) { - - // The collection we're working with - var collectionID = this.collection; - - // Normalize Arguments - cb = normalize.callback(cb); - - // Remove hasMany association keys before sending down to adapter - var schema = _.clone(this.query._schema.schema) || {}; - Object.keys(schema).forEach(function(key) { - if (schema[key].type) return; - delete schema[key]; - }); - - - // Check if the adapter defines an alter method, if so - // go ahead and use that, passing down the new schema. - if (hasOwnProperty(this.dictionary, 'alter')) { - - var connName = this.dictionary.alter; - var adapter = this.connections[connName]._adapter; - - if (hasOwnProperty(adapter, 'alter')) { - return adapter.alter(connName, collectionID, schema, cb); - } - } - - - // Check if an addAttribute and removeAttribute adapter method are defined - if (!hasOwnProperty(this.dictionary, 'addAttribute') || !hasOwnProperty(this.dictionary, 'removeAttribute')) { - return cb(); - // return cb(new Error('Both addAttribute() and removeAttribute() methods are required to use alter()')); - } - - // Find the relevant connections to run this on - var AdderConnection = this.dictionary.addAttribute; - var RemoverConnection = this.dictionary.removeAttribute; - - // Find the relevant adapters to run this with - var AdderAdapter = this.connections[AdderConnection]._adapter; - var RemoverAdapter = this.connections[RemoverConnection]._adapter; - - if (!hasOwnProperty(AdderAdapter, 'addAttribute')) return cb(new Error('Adapter is missing an addAttribute() method')); - if (!hasOwnProperty(RemoverAdapter, 'removeAttribute')) return cb(new Error('Adapter is missing a removeAttribute() method')); - - - this.describe(function afterDescribe(err, originalAttributes) { - if (err) return cb(err); - - // Iterate through each attribute in the new definition - // Used for keeping track of previously undefined attributes - // when updating the data stored at the physical layer. - var newAttributes = _.reduce(schema, function checkAttribute(newAttributes, attribute, attrName) { - if (!originalAttributes[attrName]) { - newAttributes[attrName] = attribute; - } - return newAttributes; - }, {}); - - - // Iterate through physical columns in the database - // Used for keeping track of no-longer-existent attributes. - // These must be removed from the physical (original) database. - var deprecatedAttributes = _.reduce(originalAttributes, function(deprecatedAttributes, attribute, attrName) { - if (!schema[attrName]) { - deprecatedAttributes[attrName] = attribute; - } - return deprecatedAttributes; - }, {}); - - - // Iterate through physical columns in the database - // Used for keeping track of attributes which are now different - // than their physical layer equivalents. - var diff = _.reduce(originalAttributes, function(diff, attribute, attrName) { - - // Bail out if the attribute is no longer in the app-level schema - if (!schema[attrName]) { return diff; } - - // var hasChanged = _.diff(schema[attrName], originalAttributes[attrName]); - var hasChanged = false; - - - // - // TODO: - // implement this! (note: it's not particularly easy) - // - // Probably not something that should be done in core. - // - - console.log('\n\n************* ' + collectionID + '.' + attrName + ' ****************'); - console.log('new: ', schema[attrName]); - console.log('orig: ', originalAttributes[attrName]); - if (hasChanged) { - diff[attrName] = schema[attrName]; - } - return diff; - }, {}); - - - async.auto({ - newAttributes: function(done_newAttributes) { - async.eachSeries(_.keys(newAttributes), function(attrName, nextAttr_) { - var attrDef = newAttributes[attrName]; - AdderAdapter.addAttribute(AdderConnection, collectionID, attrName, attrDef, nextAttr_); - }, done_newAttributes); - }, - deprecatedAttributes: function(done_deprecatedAttributes) { - async.eachSeries(_.keys(deprecatedAttributes), function(attrName, nextAttr_) { - RemoverAdapter.removeAttribute(RemoverConnection, collectionID, attrName, nextAttr_); - }, done_deprecatedAttributes); - }, - modifiedAttributes: function(done_modifiedAttributes) { - done_modifiedAttributes(); - } - }, cb); - - - // - // Should we update the data belonging to this attribute to reflect the new properties? - // Realistically, this will mainly be about constraints, and primarily uniquness. - // It'd be good if waterline could enforce all constraints at this time, - // but there's a trade-off with destroying people's data - // TODO: Figure this out - // - - }); - -}; diff --git a/lib/waterline/adapter/ddl/index.js b/lib/waterline/adapter/ddl/index.js deleted file mode 100644 index 00172c333..000000000 --- a/lib/waterline/adapter/ddl/index.js +++ /dev/null @@ -1,131 +0,0 @@ -/** - * Module dependencies - */ - -var _ = require('lodash'); -var normalize = require('../../utils/normalize'); -var getRelations = require('../../utils/getRelations'); -var hasOwnProperty = require('../../utils/helpers').object.hasOwnProperty; - - -/** - * DDL Adapter Normalization - */ - -module.exports = { - - define: function(cb) { - var self = this; - - // Normalize Arguments - cb = normalize.callback(cb); - - // Build Default Error Message - var errMsg = 'No define() method defined in adapter!'; - - // Grab attributes from definition - var schema = _.clone(this.query._schema.schema) || {}; - - // Find any junctionTables that reference this collection - var relations = getRelations({ - schema: self.query.waterline.schema, - parentCollection: self.collection - }); - - // - // TODO: if junction tables don't exist, define them - // console.log(relations); - // - - // Verify that collection doesn't already exist - // and then define it and trigger callback - this.describe(function(err, existingAttributes) { - if (err) return cb(err); - if (existingAttributes) return cb(new Error('Trying to define a collection (' + self.collection + ') which already exists.')); - - // Remove hasMany association keys before sending down to adapter - Object.keys(schema).forEach(function(key) { - if (schema[key].type) return; - delete schema[key]; - }); - - // Find the connection to run this on - if (!hasOwnProperty(self.dictionary, 'define')) return cb(); - - var connName = self.dictionary.define; - var adapter = self.connections[connName]._adapter; - - if (!hasOwnProperty(adapter, 'define')) return cb(new Error(errMsg)); - adapter.define(connName, self.collection, schema, cb); - }); - }, - - describe: function(cb) { - - // Normalize Arguments - cb = normalize.callback(cb); - - // Build Default Error Message - var err = 'No describe() method defined in adapter!'; - - // Find the connection to run this on - // NOTE: if `describe` doesn't exist, an error is not being returned. - if (!hasOwnProperty(this.dictionary, 'describe')) return cb(); - - var connName = this.dictionary.describe; - var adapter = this.connections[connName]._adapter; - - if (!hasOwnProperty(adapter, 'describe')) return cb(new Error(err)); - adapter.describe(connName, this.collection, cb); - }, - - drop: function(relations, cb) { - // Allow relations to be optional - if (typeof relations === 'function') { - cb = relations; - relations = []; - } - - relations = []; - - // - // TODO: - // Use a more normalized strategy to get relations so we can omit the extra argument above. - // e.g. getRelations({ schema: self.query.waterline.schema, parentCollection: self.collection }); - // - - // Normalize Arguments - cb = normalize.callback(cb); - - // Build Default Error Message - var err = 'No drop() method defined in adapter!'; - - // Find the connection to run this on - if (!hasOwnProperty(this.dictionary, 'drop')) return cb(new Error(err)); - - var connName = this.dictionary.drop; - var adapter = this.connections[connName]._adapter; - - if (!hasOwnProperty(adapter, 'drop')) return cb(new Error(err)); - adapter.drop(connName, this.collection, relations, cb); - }, - - alter: function(cb) { - - // Normalize arguments - cb = normalize.callback(cb); - - // Build Default Error Message - var err = 'No alter() method defined in adapter!'; - - // Find the connection to run this on - if (!hasOwnProperty(this.dictionary, 'alter')) return cb(new Error(err)); - - var connName = this.dictionary.alter; - var adapter = this.connections[connName]._adapter; - - if (!hasOwnProperty(adapter, 'alter')) return cb(new Error(err)); - adapter.alter(connName, this.collection, cb); - } - -}; diff --git a/lib/waterline/adapter/dql.js b/lib/waterline/adapter/dql.js deleted file mode 100644 index 579d47482..000000000 --- a/lib/waterline/adapter/dql.js +++ /dev/null @@ -1,274 +0,0 @@ -/** - * Module Dependencies - */ - -var normalize = require('../utils/normalize'); -var schema = require('../utils/schema'); -var hasOwnProperty = require('../utils/helpers').object.hasOwnProperty; -var _ = require('lodash'); - - -/** - * DQL Adapter Normalization - */ -module.exports = { - - hasJoin: function() { - return hasOwnProperty(this.dictionary, 'join'); - }, - - - /** - * join() - * - * If `join` is defined in the adapter, Waterline will use it to optimize - * the `.populate()` implementation when joining collections within the same - * database connection. - * - * @param {[type]} criteria - * @param {Function} cb - */ - join: function(criteria, cb, metaContainer) { - - // Normalize Arguments - criteria = normalize.criteria(criteria); - cb = normalize.callback(cb); - - // Build Default Error Message - var err = 'No join() method defined in adapter!'; - - // Find the connection to run this on - if (!hasOwnProperty(this.dictionary, 'join')) return cb(new Error(err)); - - var connName = this.dictionary.join; - var adapter = this.connections[connName]._adapter; - - if (!hasOwnProperty(adapter, 'join')) return cb(new Error(err)); - - // Parse Join Criteria and set references to any collection tableName properties. - // This is done here so that everywhere else in the codebase can use the collection identity. - criteria = schema.serializeJoins(criteria, this.query.waterline.schema); - - adapter.join(connName, this.collection, criteria, cb, metaContainer); - }, - - - /** - * create() - * - * Create one or more models. - * - * @param {[type]} values [description] - * @param {Function} cb [description] - * @return {[type]} [description] - */ - create: function(values, cb, metaContainer) { - - var globalId = this.query.globalId; - - // Normalize Arguments - cb = normalize.callback(cb); - - if (Array.isArray(values)) { - return this.createEach.call(this, values, cb, metaContainer); - } - - // Build Default Error Message - var err = 'No create() method defined in adapter!'; - - // Find the connection to run this on - if (!hasOwnProperty(this.dictionary, 'create')) return cb(new Error(err)); - - var connName = this.dictionary.create; - var adapter = this.connections[connName]._adapter; - - if (!hasOwnProperty(adapter, 'create')) return cb(new Error(err)); - adapter.create(connName, this.collection, values, normalize.callback(function afterwards(err, createdRecord) { - if (err) { - if (typeof err === 'object') err.model = globalId; - return cb(err); - } - else return cb(null, createdRecord); - }), metaContainer); - }, - - - /** - * find() - * - * Find a set of models. - * - * @param {[type]} criteria [description] - * @param {Function} cb [description] - * @return {[type]} [description] - */ - find: function(criteria, cb, metaContainer) { - // Normalize Arguments - criteria = normalize.criteria(criteria); - cb = normalize.callback(cb); - - // Build Default Error Message - var err = 'No find() method defined in adapter!'; - - // Find the connection to run this on - if (!hasOwnProperty(this.dictionary, 'find')) return cb(new Error(err)); - - var connName = this.dictionary.find; - var adapter = this.connections[connName]._adapter; - - if (!adapter.find) return cb(new Error(err)); - adapter.find(connName, this.collection, criteria, cb, metaContainer); - }, - - - /** - * findOne() - * - * Find exactly one model. - * - * @param {[type]} criteria [description] - * @param {Function} cb [description] - * @return {[type]} [description] - */ - findOne: function(criteria, cb, metaContainer) { - - // make shallow copy of criteria so original does not get modified - criteria = _.clone(criteria); - - // Normalize Arguments - cb = normalize.callback(cb); - - // Build Default Error Message - var err = '.findOne() requires a criteria. If you want the first record try .find().limit(1)'; - - // If no criteria is specified or where is empty return an error - if (!criteria || criteria.where === null) return cb(new Error(err)); - - // Detects if there is a `findOne` in the adapter. Use it if it exists. - if (hasOwnProperty(this.dictionary, 'findOne')) { - var connName = this.dictionary.findOne; - var adapter = this.connections[connName]._adapter; - - if (adapter.findOne) { - // Normalize Arguments - criteria = normalize.criteria(criteria); - return adapter.findOne(connName, this.collection, criteria, cb, metaContainer); - } - } - - // Fallback to use `find()` to simulate a `findOne()` - // Enforce limit to 1 - criteria.limit = 1; - - this.find(criteria, function(err, models) { - if (!models) return cb(err); - if (models.length < 1) return cb(err); - - cb(null, models); - }, metaContainer); - }, - - /** - * [count description] - * @param {[type]} criteria [description] - * @param {Function} cb [description] - * @return {[type]} [description] - */ - count: function(criteria, cb, metaContainer) { - var connName; - - // Normalize Arguments - cb = normalize.callback(cb); - criteria = normalize.criteria(criteria); - - // Build Default Error Message - var err = '.count() requires the adapter define either a count method or a find method'; - - // Find the connection to run this on - if (!hasOwnProperty(this.dictionary, 'count')) { - - // If a count method isn't defined make sure a find method is - if (!hasOwnProperty(this.dictionary, 'find')) return cb(new Error(err)); - - // Use the find method - connName = this.dictionary.find; - } - - if (!connName) connName = this.dictionary.count; - var adapter = this.connections[connName]._adapter; - - if (hasOwnProperty(adapter, 'count')) return adapter.count(connName, this.collection, criteria, cb, metaContainer); - - this.find(criteria, function(err, models) { - if (err) return cb(err); - var count = models && models.length || 0; - cb(err, count); - }, metaContainer); - }, - - - /** - * [update description] - * @param {[type]} criteria [description] - * @param {[type]} values [description] - * @param {Function} cb [description] - * @return {[type]} [description] - */ - update: function(criteria, values, cb, metaContainer) { - var globalId = this.query.globalId; - - - // Normalize Arguments - cb = normalize.callback(cb); - criteria = normalize.criteria(criteria); - - if (criteria === false) { - return cb(null, []); - } else if (!criteria) { - return cb(new Error('No criteria or id specified!')); - } - - // Build Default Error Message - var err = 'No update() method defined in adapter!'; - - // Find the connection to run this on - if (!hasOwnProperty(this.dictionary, 'update')) return cb(new Error(err)); - - var connName = this.dictionary.update; - var adapter = this.connections[connName]._adapter; - - adapter.update(connName, this.collection, criteria, values, normalize.callback(function afterwards(err, updatedRecords) { - if (err) { - if (typeof err === 'object') err.model = globalId; - return cb(err); - } - return cb(null, updatedRecords); - }), metaContainer); - }, - - - /** - * [destroy description] - * @param {[type]} criteria [description] - * @param {Function} cb [description] - * @return {[type]} [description] - */ - destroy: function(criteria, cb, metaContainer) { - - // Normalize Arguments - cb = normalize.callback(cb); - criteria = normalize.criteria(criteria); - - // Build Default Error Message - var err = 'No destroy() method defined in adapter!'; - - // Find the connection to run this on - if (!hasOwnProperty(this.dictionary, 'destroy')) return cb(new Error(err)); - - var connName = this.dictionary.destroy; - var adapter = this.connections[connName]._adapter; - - adapter.destroy(connName, this.collection, criteria, cb, metaContainer); - } - -}; diff --git a/lib/waterline/adapter/errors.js b/lib/waterline/adapter/errors.js deleted file mode 100644 index 50ec675c8..000000000 --- a/lib/waterline/adapter/errors.js +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Module dependencies - */ -var _ = require('lodash'); - - -/** - * Adapter Error Definitions - * @type {Object} - */ -module.exports = { - - invalid: defineError({ - message: 'Adapter rejected invalid input.' - }), - - error: defineError({ - message: 'Adapter encountered an unexpected error.' - }) - -}; - - -/** - * @param {Object} options [message, etc.] - */ -function defineError(options) { - _.defaults(options, { - data: {} - }); - - return options; -} diff --git a/lib/waterline/adapter/index.js b/lib/waterline/adapter/index.js deleted file mode 100644 index 484ddcd22..000000000 --- a/lib/waterline/adapter/index.js +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Base Adapter Definition - */ - -var _ = require('lodash'); - -var Adapter = module.exports = function(options) { - - // Ensure the connections are set - this.connections = options.connections || {}; - - // Ensure the dictionary is built - this.dictionary = options.dictionary || {}; - - // Set a Query instance to get access to top - // level query functions - this.query = options.query || {}; - - // Set Collection Name - this.collection = options.collection || ''; - - // Set Model Identity - this.identity = options.identity || ''; - - return this; -}; - -_.extend( - Adapter.prototype, - require('./dql'), - require('./ddl'), - require('./compoundQueries'), - require('./aggregateQueries'), - require('./setupTeardown'), - require('./sync'), - require('./stream') -); diff --git a/lib/waterline/adapter/setupTeardown.js b/lib/waterline/adapter/setupTeardown.js deleted file mode 100644 index 3e4d8aa67..000000000 --- a/lib/waterline/adapter/setupTeardown.js +++ /dev/null @@ -1,19 +0,0 @@ -/** - * Setup and Teardown Adapter Normalization - */ - -module.exports = { - - // Teardown is fired once-per-adapter - // Should tear down any open connections, etc. for each collection - // (i.e. tear down any remaining connections to the underlying data model) - // (i.e. flush data to disk before the adapter shuts down) - teardown: function(cb) { - if (this.adapter.teardown) { - return this.adapter.teardown.apply(this, arguments); - }; - - cb(); - } - -}; diff --git a/lib/waterline/adapter/stream.js b/lib/waterline/adapter/stream.js deleted file mode 100644 index b6ccef169..000000000 --- a/lib/waterline/adapter/stream.js +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Module Dependencies - */ - -var normalize = require('../utils/normalize'); -var hasOwnProperty = require('../utils/helpers').object.hasOwnProperty; - -/** - * Stream Normalization - */ - -module.exports = { - - // stream.write() is used to send data - // Must call stream.end() to complete stream - stream: function(criteria, stream, metaContainer) { - - // Normalize Arguments - criteria = normalize.criteria(criteria); - - // Build Default Error Message - var err = 'No stream() method defined in adapter!'; - - // Find the connection to run this on - if (!hasOwnProperty(this.dictionary, 'stream')) return stream.end(new Error(err)); - - var connName = this.dictionary.stream; - var adapter = this.connections[connName]._adapter; - - if (!hasOwnProperty(adapter, 'stream')) return stream.end(new Error(err)); - adapter.stream(connName, this.collection, criteria, stream, metaContainer); - } - -}; diff --git a/lib/waterline/adapter/sync/index.js b/lib/waterline/adapter/sync/index.js deleted file mode 100644 index b104a730d..000000000 --- a/lib/waterline/adapter/sync/index.js +++ /dev/null @@ -1,7 +0,0 @@ -// TODO: probably can eliminate this file -module.exports = { - migrateDrop: require('./strategies/drop.js'), - migrateAlter: require('./strategies/alter.js'), - migrateCreate: require('./strategies/create.js'), - migrateSafe: require('./strategies/safe.js') -}; diff --git a/lib/waterline/adapter/sync/strategies/alter.js b/lib/waterline/adapter/sync/strategies/alter.js deleted file mode 100644 index 94aea9161..000000000 --- a/lib/waterline/adapter/sync/strategies/alter.js +++ /dev/null @@ -1,271 +0,0 @@ -/** - * Module dependencies - */ - -var _ = require('lodash'); -var async = require('async'); -var getRelations = require('../../../utils/getRelations'); - - -/** - * Try and synchronize the underlying physical-layer schema - * to work with our app's collections. (i.e. models) - * - * @param {Function} cb - */ -module.exports = function(cb) { - var self = this; - - // Refuse to run this migration strategy in production. - if (process.env.NODE_ENV === 'production') { - return cb(new Error('`migrate: "alter"` strategy is not supported in production, please change to `migrate: "safe"`.')); - } - - // Find any junctionTables that reference this collection - var relations = getRelations({ - schema: self.query.waterline.schema, - parentCollection: self.collection - }); - - var backupData; - - // Check that collection exists-- - self.describe(function afterDescribe(err, attrs) { - - if (err) return cb(err); - - // if it doesn't go ahead and add it and get out - if (!attrs) return self.define(cb); - - var collectionName = _.find(self.query.waterline.schema, {tableName: self.collection}).identity; - - // Create a mapping of column names -> attribute names - var columnNamesMap = _.reduce(self.query.waterline.schema[collectionName].attributes, function(memo, val, key) { - // If the attribute has a custom column name, use it as the key for the mapping - if (val.columnName) { - memo[val.columnName] = key; - // Otherwise just use the attribute name - } else { - memo[key] = key; - } - return memo; - }, {}); - - // Transform column names into attribute names using the columnNamesMap, - // removing attributes that no longer exist (they will be dropped) - attrs = _.compact(_.keys(attrs).map(function(key) { - return columnNamesMap[key]; - })); - - // - // TODO: - // Take a look and see if anything important has changed. - // If it has (at all), we still have to follow the naive strategy below, - // but it will at least save time in the general case. - // (because it really sucks to have to wait for all of this to happen - // every time you initialize Waterline.) - // - - - // - // OK so we have to fix up the schema and migrate the data... - // - // ... we'll let Waterline do it for us. - // - // Load all data from this collection into memory. - // If this doesn't work, crash to avoid corrupting any data. - // (see `waterline/lib/adapter/ddl/README.md` for more info about this) - // - // Make sure we only select the existing keys for the schema. - // The default "find all" will select each attribute in the schema, which - // now includes attributes that haven't been added to the table yet, so - // on SQL databases the query will fail with "unknown field" error. - // - var hasSchema = self.query.hasSchema; - - // If we have a schema, make sure we only select the existing keys for the schema. - // The default "find all" will select each attribute in the schema, which - // now includes attributes that haven't been added to the table yet, so - // on SQL databases the query will fail with "unknown field" error. - // - // If we don't have a schema then we need to select all the values to make - // sure we don't lose data in the process. - var queryCriteria; - - if (hasSchema) { - queryCriteria = {select: attrs}; - } else { - queryCriteria = {}; - } - - self.query.find(queryCriteria, function(err, existingData) { - - if (err) { - // - // TODO: - // If this was a memory error, log a more useful error - // explaining what happened. - // - return cb(err); - } - - // - // From this point forward, we must be very careful. - // - backupData = _.cloneDeep(existingData, function dealWithBuffers(val) { - if (val instanceof Buffer) { - return val.slice(); - } - }); - - - // Check to see if there is anything obviously troublesome - // that will cause the drop and redefinition of our schemaful - // collections to fail. - // (i.e. violation of uniqueness constraints) - var attrs = self.query.waterline.collections[self.identity]._attributes; - var pk = self.query.waterline.collections[self.identity].primaryKey; - var attrsAsArray = _.reduce(_.cloneDeep(attrs), function(memo, attrDef, attrName) { - attrDef.name = attrName; - memo.push(attrDef); - return memo; - }, []); - var uniqueAttrs = _.where(attrsAsArray, {unique: true}); - async.each(uniqueAttrs, function(uniqueAttr, each_cb) { - var uniqueData = _.uniq(_.pluck(existingData, uniqueAttr.name)); - - // Remove any unique values who have their values set to undefined or null - var cleansedExistingData = _.filter(existingData, function(val) { - return [undefined, null].indexOf(val[uniqueAttr.name]) < 0; - }); - - // Remove any undefined or null values from the unique data - var cleansedUniqueData = _.filter(uniqueData, function(val) { - return [undefined, null].indexOf(val) < 0; - }); - - if (cleansedUniqueData.length < cleansedExistingData.length) { - // Some existing data violates a new uniqueness constraint - var prompt = require('prompt'); - prompt.start(); - console.log( - 'One or more existing records in your database violate ' + - 'a new uniqueness constraint\n' + - 'on `' + uniqueAttr.name + '` ' + - 'in your `' + self.identity + '` model.'); - console.log(); - console.log('Should we automatically remove duplicates?'); - console.log(); - console.log('** WARNING: DO NOT TYPE "y" IF YOU ARE WORKING WITH PRODUCTION DATA **'); - // var laptimer = setInterval(function beepbeepbeepbeep(){ - // process.stdout.write('\u0007'); - // }, 1500); - prompt.get(['y/n'], function(err, results) { - // clearInterval(laptimer); - if (err) return each_cb(err); - var wasConfirmedByUser = _.isString(results['y/n']) && results['y/n'].match(/y/); - if (wasConfirmedByUser) { - - // Wipe out duplicate records in `backupData` and continue - // to perform the automigration - var diff = _.difference(existingData, _.uniq(existingData, false, uniqueAttr.name)); - - var destroyCriteria = {}; - destroyCriteria[pk] = _.pluck(diff, pk); - // console.log(diff, '\n', destroyCriteria); - backupData = _.remove(backupData, function(datum) { - return !_.contains(destroyCriteria[pk], datum[pk]); - }); - return each_cb(); - // console.log(backupData); - // throw new Error(); - // self.query.waterline.collections[self.collection].destroy(destroyCriteria).exec(each_cb); - } else return each_cb(new Error('Auto-migration aborted. Please migrate your data manually and then try this again.')); - }); - } else return each_cb(); - }, function afterAsyncEach(err) { - if (err) return cb(err); - - // Now we'll drop the collection. - self.drop(relations, function(err) { - if (err) return uhoh(err, backupData, cb); - - // Now we'll redefine the collection. - self.define(function(err) { - if (err) return uhoh(err, backupData, cb); - - // Now we'll create the `backupData` again, - // being careful not to run any lifecycle callbacks - // and disable automatic updating of `createdAt` and - // `updatedAt` attributes: - // - // ((((TODO: actually be careful about said things)))) - // - self.query.createEach(backupData, function(err) { - if (err) return uhoh(err, backupData, cb); - - // Done. - return cb(); - }); - - }); // - }); // - }); // - }); - - - // - // The old way-- (doesn't always work, and is way more - // complex than we should spend time on for now) - // - // || || || || || || - // \/ \/ \/ \/ \/ \/ - // - // Otherwise, if it *DOES* exist, we'll try and guess what changes need to be made - // self.alter(function(err) { - // if (err) return cb(err); - // cb(); - // }); - - }); -}; - - -/** - * uh oh. - * - * If we can't persist the data again, we'll log an error message, then - * stream the data to stdout as JSON to make sure that it gets persisted - * SOMEWHERE at least. - * - * (this is another reason this automigration strategy cannot be used in - * production currently..) - * - * @param {[type]} err [description] - * @param {[type]} backupData [description] - * @param {Function} cb [description] - * @return {[type]} [description] - */ - -function uhoh(err, backupData, cb) { - - console.error('Waterline encountered a fatal error when trying to perform the `alter` auto-migration strategy.'); - console.error('In a couple of seconds, the data (cached in memory) will be logged to stdout.'); - console.error('(a failsafe put in place to preserve development data)'); - console.error(); - console.error('In the mean time, here\'s the error:'); - console.error(); - console.error(err); - console.error(); - console.error(); - - setTimeout(function() { - console.error('================================'); - console.error('Data backup:'); - console.error('================================'); - console.error(''); - console.log(backupData); - return cb(err); - }, 1200); - -} diff --git a/lib/waterline/adapter/sync/strategies/create.js b/lib/waterline/adapter/sync/strategies/create.js deleted file mode 100644 index b9b011b29..000000000 --- a/lib/waterline/adapter/sync/strategies/create.js +++ /dev/null @@ -1,70 +0,0 @@ -/** - * Module dependencies - */ - -var _ = require('lodash'); -var async = require('async'); -var hasOwnProperty = require('../../../utils/helpers').object.hasOwnProperty; - - -/** - * Try and synchronize the underlying physical-layer schema - * in safely manner by only adding new collections and new attributes - * to work with our app's collections. (i.e. models) - * - * @param {Function} cb - */ -module.exports = function(cb) { - var self = this; - - - // Check that collection exists - self.describe(function afterDescribe(err, attrs) { - - if (err) return cb(err); - - // if it doesn't go ahead and add it and get out - if (!attrs) return self.define(cb); - - // Check if an addAttribute adapter method is defined - if (!hasOwnProperty(self.dictionary, 'addAttribute')) { - return cb(); - } - - // Find the relevant connections to run this on - var connName = self.dictionary.addAttribute; - var adapter = self.connections[connName]._adapter; - - // Check if adapter has addAttribute method - if (!hasOwnProperty(adapter, 'addAttribute')) { - return cb(); - } - - // The collection we're working with - var collectionID = self.collection; - - // Remove hasMany association keys before sending down to adapter - var schema = _.clone(self.query._schema.schema) || {}; - Object.keys(schema).forEach(function(key) { - if (schema[key].type) return; - delete schema[key]; - }); - - // Iterate through each attribute in the new definition - // Used for keeping track of previously undefined attributes - // when updating the data stored at the physical layer. - var newAttributes = _.reduce(schema, function checkAttribute(newAttributes, attribute, attrName) { - if (!attrs[attrName]) { - newAttributes[attrName] = attribute; - } - return newAttributes; - }, {}); - - // Add new attributes - async.eachSeries(_.keys(newAttributes), function(attrName, next) { - var attrDef = newAttributes[attrName]; - adapter.addAttribute(connName, collectionID, attrName, attrDef, next); - }, cb); - - }); -}; diff --git a/lib/waterline/adapter/sync/strategies/drop.js b/lib/waterline/adapter/sync/strategies/drop.js deleted file mode 100644 index 7681bef59..000000000 --- a/lib/waterline/adapter/sync/strategies/drop.js +++ /dev/null @@ -1,38 +0,0 @@ -/** - * Module dependencies - */ - -var _ = require('lodash'); -var getRelations = require('../../../utils/getRelations'); - - -/** - * Drop and recreate collection - * - * @param {Function} cb - */ - -module.exports = function drop(cb) { - var self = this; - - // Refuse to run this migration strategy in production. - if (process.env.NODE_ENV === 'production') { - return cb(new Error('`migrate: "drop"` strategy is not supported in production, please change to `migrate: "safe"`.')); - } - - // Find any junctionTables that reference this collection - // var relations = getRelations({ - // schema: self.query.waterline.schema, - // parentCollection: self.collection - // }); - - // Pass along relations to the drop method - // console.log('Dropping ' + self.collection); - this.drop(function afterDrop(err, data) { - if (err) return cb(err); - - self.define(function() { - cb.apply(null, Array.prototype.slice.call(arguments)); - }); - }); -}; diff --git a/lib/waterline/adapter/sync/strategies/safe.js b/lib/waterline/adapter/sync/strategies/safe.js deleted file mode 100644 index d5052000d..000000000 --- a/lib/waterline/adapter/sync/strategies/safe.js +++ /dev/null @@ -1,15 +0,0 @@ -/** - * Module dependencies - */ - -var _ = require('lodash'); - - -/** - * Do absolutely nothing to the schema of the underlying datastore. - * - * @param {Function} cb - */ -module.exports = function(cb) { - cb(); -}; diff --git a/lib/waterline/collection/defaults.js b/lib/waterline/collection/defaults.js deleted file mode 100644 index dc6c2c09b..000000000 --- a/lib/waterline/collection/defaults.js +++ /dev/null @@ -1,10 +0,0 @@ - -/** - * Default Collection properties - * @type {Object} - */ -module.exports = { - - migrate: 'alter' - -}; diff --git a/lib/waterline/collection/index.js b/lib/waterline/collection/index.js deleted file mode 100644 index 4a6bb09fe..000000000 --- a/lib/waterline/collection/index.js +++ /dev/null @@ -1,56 +0,0 @@ -/** - * Dependencies - */ - -var _ = require('lodash'); -var extend = require('../utils/extend'); -var inherits = require('util').inherits; - -// Various Pieces -var Core = require('../core'); -var Query = require('../query'); - -/** - * Collection - * - * A prototype for managing a collection of database - * records. - * - * This file is the prototype for collections defined using Waterline. - * It contains the entry point for all ORM methods (e.g. User.find()) - * - * Methods in this file defer to the adapter for their true implementation: - * the implementation here just validates and normalizes the parameters. - * - * @param {Object} waterline, reference to parent - * @param {Object} options - * @param {Function} callback - */ - -var Collection = module.exports = function(waterline, connections, cb) { - - var self = this; - - // Set the named connections - this.connections = connections || {}; - - // Cache reference to the parent - this.waterline = waterline; - - // Default Attributes - this.attributes = this.attributes || {}; - - // Instantiate Base Collection - Core.call(this); - - // Instantiate Query Language - Query.call(this); - - return this; -}; - -inherits(Collection, Core); -inherits(Collection, Query); - -// Make Extendable -Collection.extend = extend; diff --git a/lib/waterline/collection/loader.js b/lib/waterline/collection/loader.js deleted file mode 100644 index 65c74c3d8..000000000 --- a/lib/waterline/collection/loader.js +++ /dev/null @@ -1,108 +0,0 @@ -/** - * Module Dependencies - */ - -var hasOwnProperty = require('../utils/helpers').object.hasOwnProperty; - -/** - * Collection Loader - * - * @param {Object} connections - * @param {Object} collection - * @api public - */ - -var CollectionLoader = module.exports = function(collection, connections, defaults) { - - this.defaults = defaults; - - // Normalize and validate the collection - this.collection = this._validate(collection, connections); - - // Find the named connections used in the collection - this.namedConnections = this._getConnections(collection, connections); - - return this; -}; - -/** - * Initalize the collection - * - * @param {Object} context - * @param {Function} callback - * @api public - */ - -CollectionLoader.prototype.initialize = function initialize(context) { - return new this.collection(context, this.namedConnections); -}; - -/** - * Validate Collection structure. - * - * @param {Object} collection - * @param {Object} connections - * @api private - */ - -CollectionLoader.prototype._validate = function _validate(collection, connections) { - - // Throw Error if no Tablename/Identity is set - if (!hasOwnProperty(collection.prototype, 'tableName') && !hasOwnProperty(collection.prototype, 'identity')) { - throw new Error('A tableName or identity property must be set.'); - } - - // Ensure identity is lowercased - collection.prototype.identity = collection.prototype.identity.toLowerCase(); - - // Set the defaults - collection.prototype.defaults = this.defaults; - - // Find the connections used by this collection - // If none is specified check if a default connection exist - if (!hasOwnProperty(collection.prototype, 'connection')) { - - // Check if a default connection was specified - if (!hasOwnProperty(connections, 'default')) { - throw new Error('No adapter was specified for collection: ' + collection.prototype.identity); - } - - // Set the connection as the default - collection.prototype.connection = 'default'; - } - - return collection; -}; - -/** - * Get the named connections - * - * @param {Object} collection - * @param {Object} connections - * @api private - */ - -CollectionLoader.prototype._getConnections = function _getConnections(collection, connections) { - - // Hold the used connections - var usedConnections = {}; - - // Normalize connection to array - if (!Array.isArray(collection.prototype.connection)) { - collection.prototype.connection = [collection.prototype.connection]; - } - - // Set the connections used for the adapter - collection.prototype.connection.forEach(function(conn) { - - // Ensure the named connection exist - if (!hasOwnProperty(connections, conn)) { - var msg = 'The connection ' + conn + ' specified in ' + collection.prototype.identity + ' does not exist!'; - throw new Error(msg); - } - - usedConnections[conn] = connections[conn]; - }); - - return usedConnections; -}; diff --git a/lib/waterline/connections/index.js b/lib/waterline/connections/index.js deleted file mode 100644 index e68b53e07..000000000 --- a/lib/waterline/connections/index.js +++ /dev/null @@ -1,76 +0,0 @@ -/** - * Module Dependencies - */ -var _ = require('lodash'); -var util = require('util'); -var hasOwnProperty = require('../utils/helpers').object.hasOwnProperty; -var API_VERSION = require('../VERSION'); - -/** - * Connections are active "connections" to a specific adapter for a specific configuration. - * This allows you to have collections share named connections. - * - * @param {Object} adapters - * @param {Object} objects - * @api public - */ - -var Connections = module.exports = function(adapters, options) { - - // Hold the active connections - this._connections = {}; - - // Build the connections - this._build(adapters, options); - - return this._connections; -}; - - -/** - * Builds up a named connections object with a clone of the adapter - * it will use for the connection. - * - * @param {Object} adapters - * @param {Object} options - * @api private - */ -Connections.prototype._build = function _build(adapters, options) { - - var self = this; - - // For each of the configured connections in options, find the required - // adapter by name and build up an object that can be attached to the - // internal connections object. - Object.keys(options).forEach(function(key) { - var config = options[key]; - var msg, - connection; - - // Ensure an adapter module is specified - if (!hasOwnProperty(config, 'adapter')) { - msg = util.format('Connection ("%s") is missing a required property (`adapter`). You should indicate the name of one of your adapters.', key); - throw new Error(msg); - } - - // Ensure the adapter exists in the adapters options - if (!hasOwnProperty(adapters, config.adapter)) { - if (typeof config.adapter !== 'string') { - msg = util.format('Invalid `adapter` property in connection `%s`. It should be a string (the name of one of the adapters you passed into `waterline.initialize()`)', key); - } - else msg = util.format('Unknown adapter "%s" for connection `%s`. You should double-check that the connection\'s `adapter` property matches the name of one of your adapters. Or perhaps you forgot to include your "%s" adapter when you called `waterline.initialize()`...', config.adapter, key, config.adapter); - throw new Error(msg); - } - - // Build the connection config - connection = { - config: _.merge({}, adapters[config.adapter].defaults, config, { version: API_VERSION }), - _adapter: _.cloneDeep(adapters[config.adapter]), - _collections: [] - }; - - // Attach the connections to the connection library - self._connections[key] = connection; - }); - -}; diff --git a/lib/waterline/core/dictionary.js b/lib/waterline/core/dictionary.js deleted file mode 100644 index 35feddf7a..000000000 --- a/lib/waterline/core/dictionary.js +++ /dev/null @@ -1,56 +0,0 @@ -var _ = require('lodash'); - -/** - * Handle Building an Adapter/Connection dictionary - * - * @param {Object} connections - * @param {Array} ordered - * @return {Object} - * @api public - * - * Manages a 'dictionary' object of the following structure: - * { - * CONNECTION: { - * METHOD: ADAPTER_NAME - * } - * } - */ -var Dictionary = module.exports = function(connections, ordered) { - this.dictionary = this._build(connections); - return this._smash(ordered); -}; - -/** - * Build Dictionary. This maps adapter methods to the effective connection - * for which the method is pertinent. - * - * @param {Object} connections - * @api private - */ -Dictionary.prototype._build = function _build(connections) { - return _.mapValues(connections, function(connection, connectionName) { - var adapter = connection._adapter || { }; - - return _.mapValues(adapter, function(method) { - return connectionName; - }); - }); -}; - -/** - * Combine Dictionary into a single level object. - * - * Appends methods from other adapters onto the left most connection adapter, - * but does not override any existing methods defined in the leftmost adapter. - * - * @param {Array} ordered - * @return {Object} - * @api private - */ -Dictionary.prototype._smash = function _smash(ordered) { - var mergeArguments = _.map((ordered || [ ]).reverse(), function(adapterName) { - return this.dictionary[adapterName]; - }, this); - - return _.merge.apply(null, mergeArguments); -}; diff --git a/lib/waterline/core/index.js b/lib/waterline/core/index.js deleted file mode 100644 index 422fb9e3f..000000000 --- a/lib/waterline/core/index.js +++ /dev/null @@ -1,151 +0,0 @@ -/** - * Dependencies - */ - -var _ = require('lodash'); -var schemaUtils = require('../utils/schema'); -var COLLECTION_DEFAULTS = require('../collection/defaults'); -var Model = require('../model'); -var Cast = require('./typecast'); -var Schema = require('./schema'); -var Dictionary = require('./dictionary'); -var Validator = require('./validations'); -var Transformer = require('./transformations'); -var hasOwnProperty = require('../utils/helpers').object.hasOwnProperty; - -/** - * Core - * - * Setup the basic Core of a collection to extend. - */ - -var Core = module.exports = function(options) { - - options = options || {}; - - // Set Defaults - this.adapter = this.adapter || {}; - this._attributes = this.attributes; - this.connections = this.connections || {}; - - this.defaults = _.merge(COLLECTION_DEFAULTS, this.defaults); - - // Construct our internal objects - this._cast = new Cast(); - this._schema = new Schema(this); - this._validator = new Validator(); - - // Normalize attributes, extract instance methods, and callbacks - // Note: this is ordered for a reason! - this._callbacks = schemaUtils.normalizeCallbacks(this); - this._instanceMethods = schemaUtils.instanceMethods(this.attributes); - this._attributes = schemaUtils.normalizeAttributes(this._attributes); - - this.hasSchema = Core._normalizeSchemaFlag.call(this); - - this.migrate = Object.getPrototypeOf(this).hasOwnProperty('migrate') ? - this.migrate : this.defaults.migrate; - - // Initalize the internal values from the Collection - Core._initialize.call(this, options); - - return this; -}; - -/** - * Initialize - * - * Setups internal mappings from an extended collection. - */ - -Core._initialize = function(options) { - var self = this; - - options = options || {}; - - // Extend a base Model with instance methods - this._model = new Model(this, this._instanceMethods); - - // Cache the attributes from the schema builder - var schemaAttributes = this.waterline.schema[this.identity].attributes; - - // Remove auto attributes for validations - var _validations = _.clone(this._attributes); - if (this.autoPK) delete _validations.id; - if (this.autoCreatedAt) delete _validations.createdAt; - if (this.autoUpdatedAt) delete _validations.updatedAt; - - // If adapter exposes any reserved attributes, pass them to the schema - var connIdx = Array.isArray(this.connection) ? this.connection[0] : this.connection; - - var adapterInfo = {}; - if (this.connections[connIdx] && this.connections[connIdx]._adapter) { - adapterInfo = this.connections[connIdx]._adapter; - } - - var reservedAttributes = adapterInfo.reservedAttributes || {}; - - // Initialize internal objects from attributes - this._schema.initialize(this._attributes, this.hasSchema, reservedAttributes); - this._cast.initialize(this._schema.schema); - this._validator.initialize(_validations, this.types, this.defaults.validations); - - // Set the collection's primaryKey attribute - Object.keys(schemaAttributes).forEach(function(key) { - if (hasOwnProperty(schemaAttributes[key], 'primaryKey') && schemaAttributes[key].primaryKey) { - self.primaryKey = key; - } - }); - - // Build Data Transformer - this._transformer = new Transformer(schemaAttributes, this.waterline.schema); - - // Transform Schema - this._schema.schema = this._transformer.serialize(this._schema.schema, 'schema'); - - // Build up a dictionary of which methods run on which connection - this.adapterDictionary = new Dictionary(_.cloneDeep(this.connections), this.connection); - - // Add this collection to the connection - Object.keys(this.connections).forEach(function(conn) { - self.connections[conn]._collections = self.connections[conn]._collections || []; - self.connections[conn]._collections.push(self.identity); - }); - - // Remove remnants of user defined attributes - delete this.attributes; -}; - -/** - * Normalize Schema Flag - * - * Normalize schema setting by looking at the model first to see if it is defined, if not look at - * the connection and see if it's defined and if not finally look into the adapter and check if - * there is a default setting. If not found anywhere be safe and set to true. - * - * @api private - * @return {Boolean} - */ - -Core._normalizeSchemaFlag = function() { - - // If schema is defined on the collection, return the value - if (hasOwnProperty(Object.getPrototypeOf(this), 'schema')) { - return Object.getPrototypeOf(this).schema; - } - - // Grab the first connection used - if (!this.connection || !Array.isArray(this.connection)) return true; - var connection = this.connections[this.connection[0]]; - - // Check the user defined config - if (hasOwnProperty(connection, 'config') && hasOwnProperty(connection.config, 'schema')) { - return connection.config.schema; - } - - // Check the defaults defined in the adapter - if (!hasOwnProperty(connection, '_adapter')) return true; - if (!hasOwnProperty(connection._adapter, 'schema')) return true; - - return connection._adapter.schema; -}; diff --git a/lib/waterline/core/schema.js b/lib/waterline/core/schema.js deleted file mode 100644 index 6f33d046f..000000000 --- a/lib/waterline/core/schema.js +++ /dev/null @@ -1,211 +0,0 @@ -/** - * Module dependencies - */ - -var _ = require('lodash'); -var types = require('../utils/types'); -var utils = require('../utils/helpers'); -var hasOwnProperty = utils.object.hasOwnProperty; - -/** - * Builds a Schema Object from an attributes - * object in a model. - * - * Loops through an attributes object to build a schema - * containing attribute name as key and a type for casting - * in the database. Also includes a default value if supplied. - * - * Example: - * - * attributes: { - * name: 'string', - * phone: { - * type: 'string', - * defaultsTo: '555-555-5555' - * } - * } - * - * Returns: { - * name: { type: 'string' }, - * phone: { type: 'string, defaultsTo: '555-555-5555' } - * } - * - * @param {Object} context - * @return {Object} - */ - -var Schema = module.exports = function(context) { - this.context = context || {}; - this.schema = {}; - - return this; -}; - -/** - * Initialize the internal schema object - * - * @param {Object} attrs - * @param {Object} associations - * @param {Boolean} hasSchema - */ - -Schema.prototype.initialize = function(attrs, hasSchema, reservedAttributes) { - var self = this; - - // Build normal attributes - Object.keys(attrs).forEach(function(key) { - if (hasOwnProperty(attrs[key], 'collection')) return; - self.schema[key] = self.objectAttribute(key, attrs[key]); - }); - - // Build Reserved Attributes - if (Array.isArray(reservedAttributes)) { - reservedAttributes.forEach(function(key) { - self.schema[key] = {}; - }); - } - - // Set hasSchema to determine if values should be cleansed or not - this.hasSchema = typeof hasSchema !== 'undefined' ? hasSchema : true; -}; - -/** - * Handle the building of an Object attribute - * - * Cleans any unnecessary attributes such as validation properties off of - * the internal schema and set's defaults for incorrect values. - * - * @param {Object} value - * @return {Object} - */ - -Schema.prototype.objectAttribute = function(attrName, value) { - var attr = {}; - - for (var key in value) { - switch (key) { - - // Set schema[attribute].type - case 'type': - // Allow validation types in attributes and transform them to strings - attr.type = ~types.indexOf(value[key]) ? value[key] : 'string'; - break; - - // Set schema[attribute].defaultsTo - case 'defaultsTo': - attr.defaultsTo = value[key]; - break; - - // Set schema[attribute].primaryKey - case 'primaryKey': - attr.primaryKey = value[key]; - attr.unique = true; - break; - - // Set schema[attribute].foreignKey - case 'foreignKey': - attr.foreignKey = value[key]; - break; - - // Set schema[attribute].references - case 'references': - attr.references = value[key]; - break; - - // Set schema[attribute].on - case 'on': - attr.on = value[key]; - break; - - // Set schema[attribute].via - case 'via': - attr.via = value[key]; - break; - - // Set schema[attribute].autoIncrement - case 'autoIncrement': - attr.autoIncrement = value[key]; - attr.type = 'integer'; - break; - - // Set schema[attribute].unique - case 'unique': - attr.unique = value[key]; - break; - - // Set schema[attribute].index - case 'index': - attr.index = value[key]; - break; - - // Set schema[attribute].enum - case 'enum': - attr.enum = value[key]; - break; - - // Set schema[attribute].size - case 'size': - attr.size = value[key]; - break; - - // Set schema[attribute].notNull - case 'notNull': - attr.notNull = value[key]; - break; - - // Handle Belongs To Attributes - case 'model': - var type; - var attrs = this.context.waterline.schema[value[key].toLowerCase()].attributes; - - for (var attribute in attrs) { - if (hasOwnProperty(attrs[attribute], 'primaryKey') && attrs[attribute].primaryKey) { - type = attrs[attribute].type; - break; - } - } - - attr.type = type.toLowerCase(); - attr.model = value[key].toLowerCase(); - attr.foreignKey = true; - attr.alias = attrName; - break; - } - } - - return attr; -}; - - -/** - * Clean Values - * - * Takes user inputted data and strips out any values not defined in - * the schema. - * - * This is run after all the validations and right before being sent to the - * adapter. This allows you to add temporary properties when doing validation - * callbacks and have them stripped before being sent to the database. - * - * @param {Object} values to clean - * @return {Object} clone of values, stripped of any extra properties - */ - -Schema.prototype.cleanValues = function(values) { - - var clone = {}; - - for (var key in values) { - - // The value can pass through if either the collection does have a schema and the key is in the schema, - // or otherwise if the collection is schemaless and the key does not represent an associated collection. - if ((this.hasSchema && hasOwnProperty(this.schema, key)) || - (!this.hasSchema && !(hasOwnProperty(this.context._attributes, key) && hasOwnProperty(this.context._attributes[key], 'collection')))) { - - clone[key] = values[key]; - } - - } - - return clone; -}; diff --git a/lib/waterline/core/transformations.js b/lib/waterline/core/transformations.js deleted file mode 100644 index 08f0196b1..000000000 --- a/lib/waterline/core/transformations.js +++ /dev/null @@ -1,180 +0,0 @@ -/** - * Module dependencies - */ - -var _ = require('lodash'); -var utils = require('../utils/helpers'); -var hasOwnProperty = utils.object.hasOwnProperty; - -/** - * Transformation - * - * Allows for a Waterline Collection to have different - * attributes than what actually exist in an adater's representation. - * - * @param {Object} attributes - * @param {Object} tables - */ - -var Transformation = module.exports = function(attributes, tables) { - - // Hold an internal mapping of keys to transform - this._transformations = {}; - - // Initialize - this.initialize(attributes, tables); - - return this; -}; - -/** - * Initial mapping of transformations. - * - * @param {Object} attributes - * @param {Object} tables - */ - -Transformation.prototype.initialize = function(attributes, tables) { - var self = this; - - Object.keys(attributes).forEach(function(attr) { - - // Ignore Functions and Strings - if (['function', 'string'].indexOf(typeof attributes[attr]) > -1) return; - - // If not an object, ignore - if (attributes[attr] !== Object(attributes[attr])) return; - - // Loop through an attribute and check for transformation keys - Object.keys(attributes[attr]).forEach(function(key) { - - // Currently just works with `columnName`, `collection`, `groupKey` - if (key !== 'columnName') return; - - // Error if value is not a string - if (typeof attributes[attr][key] !== 'string') { - throw new Error('columnName transformation must be a string'); - } - - // Set transformation attr to new key - if (key === 'columnName') { - if (attr === attributes[attr][key]) return; - self._transformations[attr] = attributes[attr][key]; - } - - }); - }); -}; - -/** - * Transforms a set of attributes into a representation used - * in an adapter. - * - * @param {Object} attributes to transform - * @return {Object} - */ - -Transformation.prototype.serialize = function(attributes, behavior) { - var self = this; - var values = _.clone(attributes); - - behavior = behavior || 'default'; - - function recursiveParse(obj) { - - // Return if no object - if (!obj) return; - - // Handle array of types for findOrCreateEach - if (typeof obj === 'string') { - if (hasOwnProperty(self._transformations, obj)) { - values = self._transformations[obj]; - return; - } - - return; - } - - Object.keys(obj).forEach(function(property) { - - // Just a double check to exit if hasOwnProperty fails - if (!hasOwnProperty(obj, property)) return; - - // Schema must be serialized in first level only - if (behavior === 'schema') { - if (hasOwnProperty(self._transformations, property)) { - obj[self._transformations[property]] = _.clone(obj[property]); - delete obj[property]; - } - return; - } - - // Recursively parse `OR` criteria objects to transform keys - if (Array.isArray(obj[property]) && property === 'or') return recursiveParse(obj[property]); - - // If Nested Object call function again passing the property as obj - if ((toString.call(obj[property]) !== '[object Date]') && (_.isPlainObject(obj[property]))) { - - // check if object key is in the transformations - if (hasOwnProperty(self._transformations, property)) { - obj[self._transformations[property]] = _.clone(obj[property]); - delete obj[property]; - - return recursiveParse(obj[self._transformations[property]]); - } - - return recursiveParse(obj[property]); - } - - // If the property === SELECT check for any transformation keys - if (property === 'select' && _.isArray(obj[property])) { - var arr = _.clone(obj[property]); - _.each(arr, function(prop) { - if(_.has(self._transformations, prop)) { - var idx = _.indexOf(obj[property], prop); - if(idx > -1) { - obj[property][idx] = self._transformations[prop]; - } - } - }); - } - - // Check if property is a transformation key - if (hasOwnProperty(self._transformations, property)) { - - obj[self._transformations[property]] = obj[property]; - delete obj[property]; - } - }); - } - - // Recursivly parse attributes to handle nested criteria - recursiveParse(values); - - return values; -}; - -/** - * Transforms a set of attributes received from an adapter - * into a representation used in a collection. - * - * @param {Object} attributes to transform - * @return {Object} - */ - -Transformation.prototype.unserialize = function(attributes) { - var self = this; - var values = _.clone(attributes); - - // Loop through the attributes and change them - Object.keys(this._transformations).forEach(function(key) { - var transformed = self._transformations[key]; - - if (!hasOwnProperty(attributes, transformed)) return; - - values[key] = attributes[transformed]; - if (transformed !== key) delete values[transformed]; - }); - - return values; -}; diff --git a/lib/waterline/core/typecast.js b/lib/waterline/core/typecast.js deleted file mode 100644 index 55b6bf9ea..000000000 --- a/lib/waterline/core/typecast.js +++ /dev/null @@ -1,230 +0,0 @@ -/** - * Module dependencies - */ - -var types = require('../utils/types'); -var utils = require('../utils/helpers'); -var hasOwnProperty = utils.object.hasOwnProperty; -var _ = require('lodash'); - -/** - * Cast Types - * - * Will take values and cast they to the correct type based on the - * type defined in the schema. - * - * Especially handy for converting numbers passed as strings to the - * correct integer type. - * - * Should be run before sending values to an adapter. - */ - -var Cast = module.exports = function() { - this._types = {}; - - return this; -}; - -/** - * Builds an internal _types object that contains each - * attribute with it's type. This can later be used to - * transform values into the correct type. - * - * @param {Object} attrs - */ - -Cast.prototype.initialize = function(attrs) { - var self = this; - - Object.keys(attrs).forEach(function(key) { - self._types[key] = ~types.indexOf(attrs[key].type) ? attrs[key].type : 'string'; - }); -}; - -/** - * Converts a set of values into the proper types - * based on the Collection's schema. - * - * @param {Object} values - * @return {Object} - * @api public - */ - -Cast.prototype.run = function(values) { - var self = this; - - if (values === undefined || values === null) { - return; - } - - Object.keys(values).forEach(function(key) { - - // Set undefined to null - if (_.isUndefined(values[key])) values[key] = null; - if (!hasOwnProperty(self._types, key) || values[key] === null || !hasOwnProperty(values, key)) { - return; - } - - // If the value is a plain object, don't attempt to cast it - if (_.isPlainObject(values[key])) return; - - // Find the value's type - var type = self._types[key]; - - // Casting Function - switch (type) { - case 'string': - case 'text': - values[key] = self.string(values[key]); - break; - - case 'integer': - values[key] = self.integer(key, values[key]); - break; - - case 'float': - values[key] = self.float(values[key]); - break; - - case 'date': - case 'time': - case 'datetime': - values[key] = self.date(values[key]); - break; - - case 'boolean': - values[key] = self.boolean(values[key]); - break; - - case 'array': - values[key] = self.array(values[key]); - break; - } - }); - - return values; -}; - -/** - * Cast String Values - * - * @param {String} str - * @return {String} - * @api private - */ - -Cast.prototype.string = function string(str) { - return typeof str.toString !== 'undefined' ? str.toString() : '' + str; -}; - -/** - * Cast Integer Values - * - * @param {String} key - * @param {Integer} value - * @return {Integer} - * @api private - */ - -Cast.prototype.integer = function integer(key, value) { - var _value; - - // Attempt to see if the value is resembles a MongoID - // if so let's not try and cast it and instead return a string representation of - // it. Needed for sails-mongo. - if (utils.matchMongoId(value)) return value.toString(); - - // Attempt to parseInt - try { - _value = parseInt(value, 10); - } catch(e) { - return value; - } - - return _value; -}; - -/** - * Cast Float Values - * - * @param {Float} value - * @return {Float} - * @api private - */ - -Cast.prototype.float = function float(value) { - var _value; - - try { - _value = parseFloat(value); - } catch(e) { - return value; - } - - return _value; -}; - -/** - * Cast Boolean Values - * - * @param {Boolean} value - * @return {Boolean} - * @api private - */ - -Cast.prototype.boolean = function boolean(value) { - var parsed; - - if (_.isString(value)) { - if (value === 'true') return true; - if (value === 'false') return false; - return value; - } - - // Nicely cast [0, 1] to true and false - try { - parsed = parseInt(value, 10); - } catch(e) { - return false; - } - - if (parsed === 0) return false; - if (parsed === 1) return true; - - return value; -}; - -/** - * Cast Date Values - * - * @param {String|Date} value - * @return {Date} - * @api private - */ - -Cast.prototype.date = function date(value) { - var _value; - if (value.__proto__ == Date.prototype) { - _value = new Date(value.getTime()); - } else if (typeof value.toDate === 'function') { - _value = value.toDate(); - } else { - _value = new Date(Date.parse(value)); - } - - if (_value.toString() === 'Invalid Date') return value; - return _value; -}; - -/** - * Cast Array Values - * - * @param {Array|String} value - * @return {Array} - * @api private - */ - -Cast.prototype.array = function array(value) { - if (Array.isArray(value)) return value; - return [value]; -}; diff --git a/lib/waterline/core/validations.js b/lib/waterline/core/validations.js deleted file mode 100644 index 4d83fb310..000000000 --- a/lib/waterline/core/validations.js +++ /dev/null @@ -1,291 +0,0 @@ -/** - * Handles validation on a model - * - * Uses Anchor for validating - * https://github.com/balderdashy/anchor - */ - -var _ = require('lodash'); -var anchor = require('anchor'); -var async = require('async'); -var utils = require('../utils/helpers'); -var hasOwnProperty = utils.object.hasOwnProperty; -var WLValidationError = require('../error/WLValidationError'); - - -/** - * Build up validations using the Anchor module. - * - * @param {String} adapter - */ - -var Validator = module.exports = function(adapter) { - this.validations = {}; -}; - -/** - * Builds a Validation Object from a normalized attributes - * object. - * - * Loops through an attributes object to build a validation object - * containing attribute name as key and a series of validations that - * are run on each model. Skips over type and defaultsTo as they are - * schema properties. - * - * Example: - * - * attributes: { - * name: { - * type: 'string', - * length: { min: 2, max: 5 } - * } - * email: { - * type: 'string', - * required: true - * } - * } - * - * Returns: { - * name: { length: { min:2, max: 5 }}, - * email: { required: true } - * } - */ - -Validator.prototype.initialize = function(attrs, types, defaults) { - var self = this; - - defaults = defaults || {}; - - // These properties are reserved and may not be used as validations - this.reservedProperties = [ - 'defaultsTo', - 'primaryKey', - 'autoIncrement', - 'unique', - 'index', - 'collection', - 'dominant', - 'through', - 'columnName', - 'foreignKey', - 'references', - 'on', - 'groupKey', - 'model', - 'via', - 'size', - 'example', - 'validationMessage', - 'validations', - 'populateSettings', - 'onKey', - 'protected', - 'meta' - ]; - - - if (defaults.ignoreProperties && Array.isArray(defaults.ignoreProperties)) { - this.reservedProperties = this.reservedProperties.concat(defaults.ignoreProperties); - } - - // Add custom type definitions to anchor - types = types || {}; - anchor.define(types); - - Object.keys(attrs).forEach(function(attr) { - self.validations[attr] = {}; - - Object.keys(attrs[attr]).forEach(function(prop) { - - // Ignore null values - if (attrs[attr][prop] === null) { return; } - - // If property is reserved don't do anything with it - if (self.reservedProperties.indexOf(prop) > -1) { return; } - - // use the Anchor `in` method for enums - if (prop === 'enum') { - self.validations[attr]['in'] = attrs[attr][prop]; - return; - } - - self.validations[attr][prop] = attrs[attr][prop]; - }); - }); -}; - - -/** - * Validator.prototype.validate() - * - * Accepts a dictionary of values and validates them against - * the validation rules expected by this schema (`this.validations`). - * Validation is performed using Anchor. - * - * - * @param {Dictionary} values - * The dictionary of values to validate. - * - * @param {Boolean|String|String[]} presentOnly - * only validate present values (if `true`) or validate the - * specified attribute(s). - * - * @param {Function} callback - * @param {Error} err - a fatal error, if relevant. - * @param {Array} invalidAttributes - an array of errors - */ - -Validator.prototype.validate = function(values, presentOnly, cb) { - var self = this; - var errors = {}; - var validations = Object.keys(this.validations); - - // Handle optional second arg AND Use present values only, specified values, or all validations - /* eslint-disable no-fallthrough */ - switch (typeof presentOnly) { - case 'function': - cb = presentOnly; - break; - case 'string': - validations = [presentOnly]; - break; - case 'object': - if (Array.isArray(presentOnly)) { - validations = presentOnly; - break; - } // Fall through to the default if the object is not an array - default: - // Any other truthy value. - if (presentOnly) { - validations = _.intersection(validations, Object.keys(values)); - } - /* eslint-enable no-fallthrough */ - } - - - // Validate all validations in parallel - async.each(validations, function _eachValidation(validation, cb) { - var curValidation = self.validations[validation]; - - // Build Requirements - var requirements; - try { - requirements = anchor(curValidation); - } - catch (e) { - // Handle fatal error: - return cb(e); - } - requirements = _.cloneDeep(requirements); - - // Grab value and set to null if undefined - var value = values[validation]; - if (typeof value == 'undefined') { - value = null; - } - - // If value is not required and empty then don't - // try and validate it - if (!curValidation.required) { - if (value === null || value === '') { - return cb(); - } - } - - // If Boolean and required manually check - if (curValidation.required && curValidation.type === 'boolean' && (typeof value !== 'undefined' && value !== null)) { - if (value.toString() === 'true' || value.toString() === 'false') { - return cb(); - } - } - - // If type is integer and the value matches a mongoID let it validate - if (hasOwnProperty(self.validations[validation], 'type') && self.validations[validation].type === 'integer') { - if (utils.matchMongoId(value)) { - return cb(); - } - } - - // Rule values may be specified as sync or async functions. - // Call them and replace the rule value with the function's result - // before running validations. - async.each(Object.keys(requirements.data), function _eachKey(key, next) { - try { - if (typeof requirements.data[key] !== 'function') { - return next(); - } - - // Run synchronous function - if (requirements.data[key].length < 1) { - requirements.data[key] = requirements.data[key].apply(values, []); - return next(); - } - - // Run async function - requirements.data[key].call(values, function(result) { - requirements.data[key] = result; - next(); - }); - } - catch (e) { - return next(e); - } - }, function afterwards(unexpectedErr) { - if (unexpectedErr) { - // Handle fatal error - return cb(unexpectedErr); - } - - // If the value has a dynamic required function and it evaluates to false lets look and see - // if the value supplied is null or undefined. If so then we don't need to check anything. This - // prevents type errors like `undefined` should be a string. - // if required is set to 'false', don't enforce as required rule - if (requirements.data.hasOwnProperty('required') && !requirements.data.required) { - if (_.isNull(value)) { - return cb(); - } - } - - // Now run the validations using Anchor. - var validationError; - try { - validationError = anchor(value).to(requirements.data, values); - } - catch (e) { - // Handle fatal error: - return cb(e); - } - - // If no validation errors, bail. - if (!validationError) { - return cb(); - } - - // Build an array of errors. - errors[validation] = []; - - validationError.forEach(function(obj) { - if (obj.property) { - delete obj.property; - } - errors[validation].push({ rule: obj.rule, message: obj.message }); - }); - - return cb(); - }); - - }, function allValidationsChecked(err) { - // Handle fatal error: - if (err) { - return cb(err); - } - - - if (Object.keys(errors).length === 0) { - return cb(); - } - - return cb(undefined, errors); - }); - -}; diff --git a/lib/waterline/error/WLError.js b/lib/waterline/error/WLError.js deleted file mode 100644 index fb9993d7a..000000000 --- a/lib/waterline/error/WLError.js +++ /dev/null @@ -1,143 +0,0 @@ -var util = require('util'); -var _ = require('lodash'); - -/** - * WLError - * - * All errors passed to a query callback in Waterline extend - * from this base error class. - * - * @param {Object} properties - * @constructor {WLError} - */ -function WLError(properties) { - WLError.super_.call(this); - - // Fold defined properties into the new WLError instance. - properties || (properties = { }); - _.extend(this, properties); - - // Generate stack trace - // (or use `originalError` if it is a true error instance) - if (_.isObject(this.originalError) && this.originalError instanceof Error) { - this._e = this.originalError; - } else { - this._e = new Error(); - } - - // Doctor up a modified version of the stack trace called `rawStack`: - this.rawStack = (this._e.stack.replace(/^Error(\r|\n)*(\r|\n)*/, '')); - - // Customize `details`: - // Try to dress up the wrapped "original" error as much as possible. - // @type {String} a detailed explanation of this error - if (_.isString(this.originalError)) { - this.details = this.originalError; - - // Run toString() on Errors: - } else if (this.originalError && util.isError(this.originalError)) { - this.details = this.originalError.toString(); - - // But for other objects, use util.inspect() - } else if (this.originalError) { - this.details = util.inspect(this.originalError); - } - - // If `details` is set, prepend it with "Details:" - if (this.details) { - this.details = 'Details: ' + this.details + '\n'; - } -} - -util.inherits(WLError, Error); - -// Default properties -WLError.prototype.status = 500; -WLError.prototype.code = 'E_UNKNOWN'; -WLError.prototype.reason = 'Encountered an unexpected error'; -WLError.prototype.details = ''; - -/** - * Override JSON serialization. - * (i.e. when this error is passed to `res.json()` or `JSON.stringify`) - * - * For example: - * ```json - * { - * status: 500, - * code: 'E_UNKNOWN' - * } - * ``` - * - * @return {Object} - */ -WLError.prototype.toJSON = -WLError.prototype.toPOJO = -function() { - var obj = { - error: this.code, - status: this.status, - summary: this.reason, - raw: this.originalError - }; - - // Only include `raw` if its truthy. - if (!obj.raw) delete obj.raw; - - return obj; -}; - -/** - * Override output for `sails.log[.*]` - * - * @return {String} - * - * For example: - * ```sh - * Waterline: ORM encountered an unexpected error: - * { ValidationError: { name: [ [Object], [Object] ] } } - * ``` - */ -WLError.prototype.toLog = function() { - return this.inspect(); -}; - -/** - * Override output for `util.inspect` - * (also when this error is logged using `console.log`) - * - * @return {String} - */ -WLError.prototype.inspect = function() { - return util.format('Error (%s) :: %s\n%s\n\n%s', this.code, this.reason, this.rawStack, this.details); -}; - -/** - * @return {String} - */ -WLError.prototype.toString = function() { - return util.format('[Error (%s) %s]', this.code, this.reason, this.details); -}; - -Object.defineProperties(WLError.prototype, { - stack: { - enumerable: true, - get: function() { - return util.format('Error (%s) :: %s\n%s', this.code, this.reason, this.rawStack); - }, - set: function(value) { - this.rawStack = value; - } - }, - message: { - enumerable: true, - get: function() { - return this.rawMessage || this.toString(); - }, - set: function(value) { - this.rawMessage = value; - } - } -}); - -module.exports = WLError; diff --git a/lib/waterline/error/WLUsageError.js b/lib/waterline/error/WLUsageError.js deleted file mode 100644 index 8ce671fb7..000000000 --- a/lib/waterline/error/WLUsageError.js +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Module dependencies - */ - -var WLError = require('./WLError'); -var util = require('util'); - - -/** - * WLUsageError - * - * @extends WLError - */ -function WLUsageError(properties) { - - // Call superclass - WLUsageError.super_.call(this, properties); -} -util.inherits(WLUsageError, WLError); - - -// Override WLError defaults with WLUsageError properties. -WLUsageError.prototype.code = -'E_USAGE'; -WLUsageError.prototype.status = -0; -WLUsageError.prototype.reason = -'Invalid usage'; - - -module.exports = WLUsageError; diff --git a/lib/waterline/error/WLValidationError.js b/lib/waterline/error/WLValidationError.js deleted file mode 100644 index 7b842232a..000000000 --- a/lib/waterline/error/WLValidationError.js +++ /dev/null @@ -1,159 +0,0 @@ -/** - * Module dependencies - */ - -var WLError = require('./WLError'); -var WLUsageError = require('./WLUsageError'); -var util = require('util'); -var _ = require('lodash'); - - -/** - * WLValidationError - * - * @extends WLError - */ -function WLValidationError(properties) { - - // Call superclass - WLValidationError.super_.call(this, properties); - - // Ensure valid usage - if (typeof this.invalidAttributes !== 'object') { - return new WLUsageError({ - reason: 'An `invalidAttributes` object must be passed into the constructor for `WLValidationError`' - }); - } - // if ( typeof this.model !== 'string' ) { - // return new WLUsageError({ - // reason: 'A `model` string (the collection\'s `globalId`) must be passed into the constructor for `WLValidationError`' - // }); - // } - - // Customize the `reason` based on the # of invalid attributes - // (`reason` may not be overridden) - var isSingular = this.length === 1; - this.reason = util.format('%d attribute%s %s invalid', - this.length, - isSingular ? '' : 's', - isSingular ? 'is' : 'are'); - - // Always apply the 'E_VALIDATION' error code, even if it was overridden. - this.code = 'E_VALIDATION'; - - // Status may be overridden. - this.status = properties.status || 400; - - // Model should always be set. - // (this should be the globalId of model, or "collection") - this.model = properties.model; - - // Ensure messages exist for each invalidAttribute - this.invalidAttributes = _.mapValues(this.invalidAttributes, function(rules, attrName) { - return _.map(rules, function(rule) { - if (!rule.message) { - rule.message = util.format('A record with that `%s` already exists (`%s`).', attrName, rule.value); - } - return rule; - }); - }); - - // Customize the `details` - this.details = util.format('Invalid attributes sent to %s:\n', this.model) + - _.reduce(this.messages, function(memo, messages, attrName) { - memo += ' • ' + attrName + '\n'; - memo += _.reduce(messages, function(memo, message) { - memo += ' • ' + message + '\n'; - return memo; - }, ''); - return memo; - }, ''); - -} -util.inherits(WLValidationError, WLError); - - -/** - * `rules` - * - * @return {Object[Array[String]]} dictionary of validation rule ids, indexed by attribute - */ -WLValidationError.prototype.__defineGetter__('rules', function() { - return _.mapValues(this.invalidAttributes, function(rules, attrName) { - return _.pluck(rules, 'rule'); - }); -}); - - -/** - * `messages` (aka `errors`) - * - * @return {Object[Array[String]]} dictionary of validation messages, indexed by attribute - */ -WLValidationError.prototype.__defineGetter__('messages', function() { - return _.mapValues(this.invalidAttributes, function(rules, attrName) { - return _.pluck(rules, 'message'); - }); -}); -WLValidationError.prototype.__defineGetter__('errors', function() { - return this.messages; -}); - - -/** - * `attributes` (aka `keys`) - * - * @return {Array[String]} of invalid attribute names - */ -WLValidationError.prototype.__defineGetter__('attributes', function() { - return _.keys(this.invalidAttributes); -}); -WLValidationError.prototype.__defineGetter__('keys', function() { - return this.attributes; -}); - - -/** - * `.length` - * - * @return {Integer} number of invalid attributes - */ -WLValidationError.prototype.__defineGetter__('length', function() { - return this.attributes.length; -}); - - -/** - * `.ValidationError` - * (backwards-compatibility) - * - * @return {Object[Array[Object]]} number of invalid attributes - */ -WLValidationError.prototype.__defineGetter__('ValidationError', function() { - // - // TODO: - // Down the road- emit deprecation event here-- - // (will log information about new error handling options) - // - return this.invalidAttributes; -}); - - -/** - * [toJSON description] - * @type {[type]} - */ -WLValidationError.prototype.toJSON = -WLValidationError.prototype.toPOJO = -function() { - return { - error: this.code, - status: this.status, - summary: this.reason, - model: this.model, - invalidAttributes: this.invalidAttributes - }; -}; - - -module.exports = WLValidationError; diff --git a/lib/waterline/error/index.js b/lib/waterline/error/index.js deleted file mode 100644 index 31c74f4e1..000000000 --- a/lib/waterline/error/index.js +++ /dev/null @@ -1,98 +0,0 @@ -/** - * Module dependencies - */ - -var util = require('util'); -var _ = require('lodash'); -var WLError = require('./WLError'); -var WLValidationError = require('./WLValidationError'); - - -/** - * A classifier which normalizes a mystery error into a simple, - * consistent format. This ensures that our instance which is - * "new"-ed up belongs to one of a handful of distinct categories - * and has a predictable method signature and properties. - * - * The returned error instance will always be or extend from - * `WLError` (which extends from `Error`) - * - * NOTE: - * This method should eventually be deprecated in a - * future version of Waterline. It exists to help - * w/ error type negotiation. In general, Waterline - * should use WLError, or errors which extend from it - * to construct error objects of the appropriate type. - * In other words, no ** new ** errors should need to - * be wrapped in a call to `errorify` - instead, code - * necessary to handle any new error conditions should - * construct a `WLError` directly and return that. - * - * @param {???} err - * @return {WLError} - */ -module.exports = function errorify(err) { - - // If specified `err` is already a WLError, just return it. - if (typeof err === 'object' && err instanceof WLError) return err; - - return duckType(err); -}; - - -/** - * Determine which type of error we're working with. - * Err... using hacks. - * - * @return {[type]} [description] - */ -function duckType(err) { - - // Validation or constraint violation error (`E_VALIDATION`) - // - // i.e. detected before talking to adapter, like `minLength` - // i.e. constraint violation reported by adapter, like `unique` - if (/* _isValidationError(err) || */ _isConstraintViolation(err)) { - - // Dress `unique` rule violations to be consistent with other - // validation errors. - return new WLValidationError(err); - } - - // Unexpected miscellaneous error (`E_UNKNOWN`) - // - // (i.e. helmet fire. The database crashed or something. Or there's an adapter - // bug. Or a bug in WL core.) - return new WLError({ - originalError: err - }); -} - - -/** - * @param {?} err - * @return {Boolean} whether this is an adapter-level constraint - * violation (e.g. `unique`) - */ -function _isConstraintViolation(err) { - - // If a proper error code is specified, this error can be classified. - if (err && typeof err === 'object' && err.code === 'E_UNIQUE') { - return true; - } - - // Otherwise, there is not enough information to call this a - // constraint violation error and provide proper explanation to - // the architect. - else return false; -} - - -// /** -// * @param {?} err -// * @return {Boolean} whether this is a validation error (e.g. minLength exceeded for attribute) -// */ -// function _isValidationError(err) { -// return _.isObject(err) && err.ValidationError; -// } - diff --git a/lib/waterline/methods/add-to-collection.js b/lib/waterline/methods/add-to-collection.js new file mode 100644 index 000000000..eb3ce3e7e --- /dev/null +++ b/lib/waterline/methods/add-to-collection.js @@ -0,0 +1,491 @@ +/** + * Module dependencies + */ + +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('addToCollection'); + + + +/** + * addToCollection() + * + * Add new child records to the specified collection in each of the target record(s). + * + * ``` + * // For users 3 and 4, add pets 99 and 98 to the "pets" collection. + * // > (if either user record already has one of those pets in its "pets", + * // > then we just silently skip over it) + * User.addToCollection([3,4], 'pets', [99,98]).exec(...); + * ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {Array?|String?|Number?} targetRecordIds + * + * @param {String?} collectionAttrName + * + * @param {Array?} associatedIds + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {Array|String|Number} targetRecordIds + * The primary key value(s) (i.e. ids) for the parent record(s). + * Must be a number or string; e.g. '507f191e810c19729de860ea' or 49 + * Or an array of numbers or strings; e.g. ['507f191e810c19729de860ea', '14832ace0c179de897'] or [49, 32, 37] + * If an empty array (`[]`) is specified, then this is a no-op. + * + * @qkey {String} collectionAttrName + * The name of the collection association (e.g. "pets") + * + * @qkey {Array} associatedIds + * The primary key values (i.e. ids) for the child records to add. + * Must be an array of numbers or strings; e.g. ['334724948aca33ea0f13', '913303583e0af031358bac931'] or [18, 19] + * If an empty array (`[]`) is specified, then this is a no-op. + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function addToCollection(/* targetRecordIds, collectionAttrName, associatedIds?, explicitCbMaybe?, meta? */) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Build an omen for potential use in the asynchronous callback below. + var omen = buildOmen(addToCollection); + + // Build query w/ initial, universal keys. + var query = { + method: 'addToCollection', + using: modelIdentity + }; + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback, and extend the `query` dictionary) + + // The `explicitCbMaybe` callback, if one was provided. + var explicitCbMaybe; + + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback) + // + // > Note that we define `args` so that we can insulate access + // > to the arguments provided to this function. + var args = arguments; + (function _handleVariadicUsage(){ + + // The metadata container, if one was provided. + var _meta; + + + // Handle first two arguments: + // (both of which always have exactly one meaning) + // + // • addToCollection(targetRecordIds, collectionAttrName, ...) + query.targetRecordIds = args[0]; + query.collectionAttrName = args[1]; + + + // Handle double meaning of third argument, & then handle the rest: + // + // • addToCollection(____, ____, associatedIds, explicitCbMaybe, _meta) + var is3rdArgArray = !_.isUndefined(args[2]); + if (is3rdArgArray) { + query.associatedIds = args[2]; + explicitCbMaybe = args[3]; + _meta = args[4]; + } + // • addToCollection(____, ____, explicitCbMaybe, _meta) + else { + explicitCbMaybe = args[2]; + _meta = args[3]; + } + + // Fold in `_meta`, if relevant. + if (!_.isUndefined(_meta)) { + query.meta = _meta; + } // >- + + })(); + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If an explicit callback function was specified, then immediately run the logic below + // and trigger the explicit callback when the time comes. Otherwise, build and return + // a new Deferred now. (If/when the Deferred is executed, the logic below will run.) + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that it's time to actually do some stuff. + // So... + // + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + + case 'E_INVALID_TARGET_RECORD_IDS': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'The target record ids (i.e. first argument) passed to `.addToCollection()` '+ + 'should be the ID (or IDs) of target records whose collection will be modified.\n'+ + 'Details:\n'+ + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_INVALID_COLLECTION_ATTR_NAME': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'The collection attr name (i.e. second argument) to `.addToCollection()` should '+ + 'be the name of a collection association from this model.\n'+ + 'Details:\n'+ + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_INVALID_ASSOCIATED_IDS': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'The associated ids (i.e. using `.members()`, or the third argument) passed to `.addToCollection()` should be '+ + 'the ID (or IDs) of associated records to add.\n'+ + 'Details:\n'+ + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_NOOP': + return done(); + // ^ tolerate no-ops -- i.e. empty array of target record ids or empty array of associated ids (members) + + case 'E_INVALID_META': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: e.message + }, omen) + ); + // ^ when the standard usage error message is good enough as-is, without any further customization + + default: + return done(e); + // ^ when an internal, miscellaneous, or unexpected error occurs + + } + } // >-• + + + // ┌┐┌┌─┐┬ ┬ ╔═╗╔═╗╔╦╗╦ ╦╔═╗╦ ╦ ╦ ╦ ┌┬┐┌─┐┬ ┬┌─ ┌┬┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌┬┐┌┐ ┌─┐ + // ││││ ││││ ╠═╣║ ║ ║ ║╠═╣║ ║ ╚╦╝ │ ├─┤│ ├┴┐ │ │ │ │ ├─┤├┤ ││├┴┐└─┐ + // ┘└┘└─┘└┴┘ ╩ ╩╚═╝ ╩ ╚═╝╩ ╩╩═╝╩═╝╩ ┴ ┴ ┴┴─┘┴ ┴ ┴ └─┘ ┴ ┴ ┴└─┘ ─┴┘└─┘└─┘ + (function (proceed){ + + // Get the model being used as the parent + var WLModel = orm.collections[query.using]; + assert.equal(query.using.toLowerCase(), query.using, '`query.using` (identity) should have already been normalized before getting here! But it was not: '+query.using); + + // Look up the association by name in the schema definition. + var schemaDef = WLModel.schema[query.collectionAttrName]; + + // Look up the associated collection using the schema def which should have + // join tables normalized + var WLChild = orm.collections[schemaDef.collection]; + assert.equal(schemaDef.collection.toLowerCase(), schemaDef.collection, '`schemaDef.collection` (identity) should have already been normalized before getting here! But it was not: '+schemaDef.collection); + assert.equal(schemaDef.referenceIdentity.toLowerCase(), schemaDef.referenceIdentity, '`schemaDef.referenceIdentity` (identity) should have already been normalized before getting here! But it was not: '+schemaDef.referenceIdentity); + assert.equal(Object.getPrototypeOf(WLChild).identity.toLowerCase(), Object.getPrototypeOf(WLChild).identity, '`Object.getPrototypeOf(WLChild).identity` (identity) should have already been normalized before getting here! But it was not: '+Object.getPrototypeOf(WLChild).identity); + + + // Flag to determine if the WLChild is a manyToMany relation + var manyToMany = false; + + // Check if the schema references something other than the WLChild + if (schemaDef.referenceIdentity !== Object.getPrototypeOf(WLChild).identity) { + manyToMany = true; + WLChild = orm.collections[schemaDef.referenceIdentity]; + } + + // Check if the child is a join table + if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) { + manyToMany = true; + } + + // Check if the child is a through table + if (_.has(Object.getPrototypeOf(WLChild), 'throughTable') && _.keys(WLChild.throughTable).length) { + manyToMany = true; + } + + // Ensure the query skips lifecycle callbacks + // Build a modified shallow clone of the originally-provided `meta` + var modifiedMeta = _.extend({}, query.meta || {}, { skipAllLifecycleCallbacks: true }); + + + // ███╗ ███╗ █████╗ ███╗ ██╗██╗ ██╗ ████████╗ ██████╗ ███╗ ███╗ █████╗ ███╗ ██╗██╗ ██╗ + // ████╗ ████║██╔══██╗████╗ ██║╚██╗ ██╔╝ ╚══██╔══╝██╔═══██╗ ████╗ ████║██╔══██╗████╗ ██║╚██╗ ██╔╝ + // ██╔████╔██║███████║██╔██╗ ██║ ╚████╔╝ ██║ ██║ ██║ ██╔████╔██║███████║██╔██╗ ██║ ╚████╔╝ + // ██║╚██╔╝██║██╔══██║██║╚██╗██║ ╚██╔╝ ██║ ██║ ██║ ██║╚██╔╝██║██╔══██║██║╚██╗██║ ╚██╔╝ + // ██║ ╚═╝ ██║██║ ██║██║ ╚████║ ██║ ██║ ╚██████╔╝ ██║ ╚═╝ ██║██║ ██║██║ ╚████║ ██║ + // ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝ ╚═╝ + // + // If the collection uses a join table, build a query that inserts the records + // into the table. + if (manyToMany) { + + // ╔╗ ╦ ╦╦╦ ╔╦╗ ┬─┐┌─┐┌─┐┌─┐┬─┐┌─┐┌┐┌┌─┐┌─┐ ┌┬┐┌─┐┌─┐┌─┐┬┌┐┌┌─┐ + // ╠╩╗║ ║║║ ║║ ├┬┘├┤ ├┤ ├┤ ├┬┘├┤ ││││ ├┤ │││├─┤├─┘├─┘│││││ ┬ + // ╚═╝╚═╝╩╩═╝═╩╝ ┴└─└─┘└ └─┘┴└─└─┘┘└┘└─┘└─┘ ┴ ┴┴ ┴┴ ┴ ┴┘└┘└─┘ + // + // Maps out the parent and child attribute names to use for the query. + var parentReference; + var childReference; + + // Find the parent reference + if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) { + // Assumes the generated junction table will only ever have two foreign key + // values. Should be safe for now and any changes would need to be made in + // Waterline-Schema where a map could be formed anyway. + _.each(WLChild.schema, function(wlsAttrDef, key) { + if (!_.has(wlsAttrDef, 'references')) { + return; + } + + // If this is the piece of the join table, set the parent reference. + if (_.has(wlsAttrDef, 'columnName') && wlsAttrDef.columnName === schemaDef.on) { + parentReference = key; + } + }); + } + //‡ + // If it's a through table, grab the parent and child reference from the + // through table mapping that was generated by Waterline-Schema. + else if (_.has(Object.getPrototypeOf(WLChild), 'throughTable')) { + childReference = WLChild.throughTable[WLModel.identity + '.' + query.collectionAttrName]; + _.each(WLChild.throughTable, function(rhs, key) { + if (key !== WLModel.identity + '.' + query.collectionAttrName) { + parentReference = rhs; + } + }); + } + + // Find the child reference in a junction table + if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) { + // Assumes the generated junction table will only ever have two foreign key + // values. Should be safe for now and any changes would need to be made in + // Waterline-Schema where a map could be formed anyway. + _.each(WLChild.schema, function(wlsAttrDef, key) { + if (!_.has(wlsAttrDef, 'references')) { + return; + } + + // If this is the other piece of the join table, set the child reference. + if (_.has(wlsAttrDef, 'columnName') && wlsAttrDef.columnName !== schemaDef.on) { + childReference = key; + } + }); + } + + // FUTURE: If anonymous junction model's primary key attribute is explicitly + // required, then this isn't going to work, because we're specifying + // a value for the primary key for the new junction records we're creating. + // We could, in waterline-schema (or possibly in sails-hook-orm or maybe + // even in Waterline core?), automatically un-require-ify the primary key + // attribute for anonymous junction models. + // > See https://github.com/balderdashy/sails/issues/4591 for background. + // + // But for now we just do this: + if (WLChild.junctionTable || WLChild.throughTable) { + if (WLChild.schema.id) { + if (WLChild.schema.id.required) { + throw new Error( + 'Cannot add to the collection for this many-to-many association because the anonymous '+ + 'junction model\'s "id" (primary key) is required. This might mean that the default id '+ + 'in this app\'s `config/models.js` file makes all primary keys required. For more info, '+ + 'see https://github.com/balderdashy/sails/issues/4591. If you are unsure, check out '+ + 'https://sailsjs.com/support for help.' + ); + } + } else { + // FUTURE: Maybe be smarter about this instead of just checking for `id` + // For now, we just ignore it and let the error happen. + } + }//fi + + + // ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╩╗║ ║║║ ║║ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚═╝╚═╝╩╩═╝═╩╝ └─┘└└─┘└─┘┴└─ ┴ + + // Build an array to hold all the records being inserted + var joinRecords = []; + + // For each target record, build an insert query for the associated records. + _.each(query.targetRecordIds, function(targetId) { + _.each(query.associatedIds, function(associatedId) { + var record = {}; + record[parentReference] = targetId; + record[childReference] = associatedId; + joinRecords.push(record); + }); + }); + + + // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴ + WLChild.createEach(joinRecords, proceed, modifiedMeta); + + return; + }//-• + + + // ██████╗ ███████╗██╗ ██████╗ ███╗ ██╗ ██████╗ ███████╗ ████████╗ ██████╗ + // ██╔══██╗██╔════╝██║ ██╔═══██╗████╗ ██║██╔════╝ ██╔════╝ ╚══██╔══╝██╔═══██╗ + // ██████╔╝█████╗ ██║ ██║ ██║██╔██╗ ██║██║ ███╗███████╗ ██║ ██║ ██║ + // ██╔══██╗██╔══╝ ██║ ██║ ██║██║╚██╗██║██║ ██║╚════██║ ██║ ██║ ██║ + // ██████╔╝███████╗███████╗╚██████╔╝██║ ╚████║╚██████╔╝███████║ ██║ ╚██████╔╝ + // ╚═════╝ ╚══════╝╚══════╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝ ╚══════╝ ╚═╝ ╚═════╝ + // + // Otherwise the child records need to be updated to reflect the new foreign + // key value. Because in this case the targetRecordIds **should** only be a + // single value, just an update here should do the trick. + + + // ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╩╗║ ║║║ ║║ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚═╝╚═╝╩╩═╝═╩╝ └─┘└└─┘└─┘┴└─ ┴ + + + // Build up a search criteria + var criteria = { + where: {} + }; + + criteria.where[WLChild.primaryKey] = query.associatedIds; + + // Build up the values to update + var valuesToUpdate = {}; + valuesToUpdate[schemaDef.via] = _.first(query.targetRecordIds); + + + // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴ + WLChild.update(criteria, valuesToUpdate, proceed, modifiedMeta); + + })(function (err) { + if (err) { return done(err); } + + // IWMIH, everything worked! + // > Note that we do not send back a result of any kind-- this it to reduce the likelihood + // > writing userland code that relies undocumented/experimental output. + return done(); + + });// + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/archive-one.js b/lib/waterline/methods/archive-one.js new file mode 100644 index 000000000..0942ce9ec --- /dev/null +++ b/lib/waterline/methods/archive-one.js @@ -0,0 +1,196 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('archiveOne'); + + +/** + * archiveOne() + * + * Archive (s.k.a. "soft-delete") a record that matches the specified criteria, + * saving it as a new records in the built-in Archive model, then destroying + * the original. (Returns the original, now-destroyed record.) + * + * @experimental + * + * TODO: document further + */ + +module.exports = function archiveOne(criteria, explicitCbMaybe, metaContainer){ + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Potentially build an omen for use below. + var omenMaybe = flaverr.omen(archiveOne); + + // Build initial query. + var query = { + method: 'archiveOne', + using: modelIdentity, + criteria: criteria, + meta: metaContainer + }; + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + // N/A + // (there are no out-of-order, optional arguments) + + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If a callback function was not specified, then build a new Deferred and bail now. + // + // > This method will be called AGAIN automatically when the Deferred is executed. + // > and next time, it'll have a callback. + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that a callback was specified. + // So... + + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + // This ensures a normalized format. + + try { + forgeStageTwoQuery(query, orm); + } catch (err) { + switch (err.code) { + case 'E_INVALID_CRITERIA': + return done( + flaverr({ + name: 'UsageError', + code: err.code, + details: err.details, + message: + 'Invalid criteria.\n'+ + 'Details:\n'+ + ' '+err.details+'\n' + }, omenMaybe) + ); + + case 'E_NOOP': + // Determine the appropriate no-op result. + // If `fetch` meta key is set, use `[]`-- otherwise use `undefined`. + var noopResult = undefined; + return done(undefined, noopResult); + + default: + return done(err); + } + } + + // Do a .count() to ensure that there are ≤1 matching records. + // FUTURE: Make this transactional, if supported by the underlying adapter. + var modifiedCriteriaForCount = _.omit(query.criteria, ['select', 'omit', 'limit', 'skip', 'sort']); + WLModel.count(modifiedCriteriaForCount, function _afterCounting(err, total) { + if (err) { + return done(err); + } + + // If more than one matching record was found, then consider this an error. + if (total > 1) { + return done(flaverr({ + message: + 'Preventing `.'+query.method+'()`: found too many ('+total+') matching records.\n'+ + '\n'+ + 'Criteria used:\n'+ + '···\n'+ + util.inspect(modifiedCriteriaForCount,{depth:5})+'\n'+ + '···' + }, omenMaybe)); + }//-• + + // Build a modified shallow clone of the originally-provided `meta` from + // userland, but that also has `fetch: true`. + var modifiedMetaForArchive = _.extend({}, query.meta || {}, { + fetch: true, + }); + + var modifiedCriteriaForArchive = _.omit(query.criteria, ['select', 'omit', 'limit', 'skip', 'sort']); + WLModel.archive(modifiedCriteriaForArchive, function _afterArchiving(err, affectedRecords) { + if (err) { + return done(err); + } + + // Note that we always get `affectedRecords` here because "fetch" is enabled. + return done(undefined, affectedRecords[0]); + + }, modifiedMetaForArchive);//_∏_ + }, query.meta);//_∏_ + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/archive.js b/lib/waterline/methods/archive.js new file mode 100644 index 000000000..1daf27f69 --- /dev/null +++ b/lib/waterline/methods/archive.js @@ -0,0 +1,313 @@ +/** + * Module Dependencies + */ + +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var getModel = require('../utils/ontology/get-model'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('archive'); + + + +/** + * archive() + * + * Archive (s.k.a. "soft-delete") records that match the specified criteria, + * saving them as new records in the built-in Archive model, then destroying + * the originals. + * + * ``` + * // Archive all bank accounts with more than $32,000 in them. + * BankAccount.archive().where({ + * balance: { '>': 32000 } + * }).exec(function(err) { + * // ... + * }); + * ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {Dictionary?} criteria + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {Dictionary?} criteria + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function archive(/* criteria, explicitCbMaybe, metaContainer */) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Build an omen for potential use in the asynchronous callback below. + var omen = buildOmen(archive); + + // Build initial query. + var query = { + method: 'archive', + using: modelIdentity, + criteria: undefined, + meta: undefined + }; + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + // FUTURE: when time allows, update this to match the "VARIADICS" format + // used in the other model methods. + + // The explicit callback, if one was provided. + var explicitCbMaybe; + + // Handle double meaning of first argument: + // + // • archive(criteria, ...) + if (!_.isFunction(arguments[0])) { + query.criteria = arguments[0]; + explicitCbMaybe = arguments[1]; + query.meta = arguments[2]; + } + // • archive(explicitCbMaybe, ...) + else { + explicitCbMaybe = arguments[0]; + query.meta = arguments[1]; + } + + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If a callback function was not specified, then build a new Deferred and bail now. + // + // > This method will be called AGAIN automatically when the Deferred is executed. + // > and next time, it'll have a callback. + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that a callback was specified. + // So... + + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + // + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + // This ensures a normalized format. + try { + forgeStageTwoQuery(query, orm); + } catch (err) { + switch (err.code) { + case 'E_INVALID_CRITERIA': + return done( + flaverr({ + name: 'UsageError', + code: err.code, + details: err.details, + message: + 'Invalid criteria.\n'+ + 'Details:\n'+ + ' '+err.details+'\n' + }, omen) + ); + + case 'E_NOOP': + // Determine the appropriate no-op result. + // If `fetch` meta key is set, use `[]`-- otherwise use `undefined`. + var noopResult = undefined; + if (query.meta && query.meta.fetch) { + noopResult = []; + }//>- + return done(undefined, noopResult); + + default: + return done(err); + } + }//fi + + // Bail now if archiving has been disabled. + if (!WLModel.archiveModelIdentity) { + return done(flaverr({ + name: 'UsageError', + message: 'Since the `archiveModelIdentity` setting was explicitly disabled, .archive() cannot be used.' + }, omen)); + }//• + + // Look up the Archive model. + var Archive = WLModel.archiveModelIdentity; + try { + Archive = getModel(WLModel.archiveModelIdentity, orm); + } catch (err) { return done(err); }//fi + + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: pass through the `omen` in the metadata. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Maybe refactor this into more-generic `.move()` and/or + // `.copy()` methods for migrating data between models/datastores. + // Then just leverage those methods here in `.archive()`. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + // ╔═╗═╗ ╦╔═╗╔═╗╦ ╦╔╦╗╔═╗ ┌─┐┬┌┐┌┌┬┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ║╣ ╔╩╦╝║╣ ║ ║ ║ ║ ║╣ ├┤ ││││ ││ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚═╝╩ ╚═╚═╝╚═╝╚═╝ ╩ ╚═╝ └ ┴┘└┘─┴┘ └─┘└└─┘└─┘┴└─ ┴ + // Note that we pass in `meta` here, as well as in the other queries + // below. (This ensures we're on the same db connection, provided one + // was explicitly passed in!) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // WARNING: + // + // Before proceeding with calling an additional model method that relies + // on criteria other than the primary .destroy(), we'll want to back up a + // copy of our s2q's criteria (`query.criteria`). + // + // This is important because, in an effort to improve performance, + // Waterline methods destructively mutate criteria when forging queries + // for use in the adapter(s). Since we'll be reusing criteria, we need + // to insulate ourselves from those destructive changes in case there are + // custom column names involved. (e.g. Mongo's `_id``) + // + // > While the criteria might contain big crazy stuff for comparing with + // > type:ref attributes, a deep clone is the best option we have. + // + // FUTURE: in s2q forge logic, for "archive" method, reject with an error + // if deep refs (non-JSON-serializable data) are discovered in criteria. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + var s2qCriteriaForFind = _.cloneDeep(query.criteria); + WLModel.find(s2qCriteriaForFind, function _afterFinding(err, foundRecords) { + if (err) { return done(err); } + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: as an optimization, fetch records batch-at-a-time + // using .stream() instead of just doing a naïve `.find()`. + // (This would allow you to potentially archive millions of records + // at a time without overflowing RAM.) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + var archives = []; + _.each(foundRecords, function(record){ + archives.push({ + originalRecord: record, + originalRecordId: record[WLModel.primaryKey], + fromModel: WLModel.identity, + }); + });//∞ + + // ╔═╗═╗ ╦╔═╗╔═╗╦ ╦╔╦╗╔═╗ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐┌─┐┌─┐┌─┐┬ ┬ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ║╣ ╔╩╦╝║╣ ║ ║ ║ ║ ║╣ │ ├┬┘├┤ ├─┤ │ ├┤ ├┤ ├─┤│ ├─┤ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚═╝╩ ╚═╚═╝╚═╝╚═╝ ╩ ╚═╝ └─┘┴└─└─┘┴ ┴ ┴ └─┘└─┘┴ ┴└─┘┴ ┴ └─┘└└─┘└─┘┴└─ ┴ + Archive.createEach(archives, function _afterCreatingEach(err) { + if (err) { return done(err); } + + // Remove the `limit`, `skip`, `sort`, `select`, and `omit` clauses so + // that our `destroy` query is valid. + // (This is because they were automatically attached above in the forging.) + delete query.criteria.limit; + delete query.criteria.skip; + delete query.criteria.sort; + delete query.criteria.select; + delete query.criteria.omit; + + // ╔═╗═╗ ╦╔═╗╔═╗╦ ╦╔╦╗╔═╗ ┌┬┐┌─┐┌─┐┌┬┐┬─┐┌─┐┬ ┬ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ║╣ ╔╩╦╝║╣ ║ ║ ║ ║ ║╣ ││├┤ └─┐ │ ├┬┘│ │└┬┘ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚═╝╩ ╚═╚═╝╚═╝╚═╝ ╩ ╚═╝ ─┴┘└─┘└─┘ ┴ ┴└─└─┘ ┴ └─┘└└─┘└─┘┴└─ ┴ + WLModel.destroy(query.criteria, function _afterDestroying(err) { + if (err) { return done(err); } + + if (query.meta&&query.meta.fetch){ + return done(undefined, foundRecords); + } + else { + return done(); + } + + }, query.meta);// + }, query.meta);// + }, query.meta);// + + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + + );// + +}; diff --git a/lib/waterline/methods/avg.js b/lib/waterline/methods/avg.js new file mode 100644 index 000000000..e4029a40f --- /dev/null +++ b/lib/waterline/methods/avg.js @@ -0,0 +1,310 @@ +/** + * Module dependencies + */ + +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var forgeAdapterError = require('../utils/query/forge-adapter-error'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var forgeStageThreeQuery = require('../utils/query/forge-stage-three-query'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('avg'); + + + +/** + * avg() + * + * Get the arithmetic mean of the specified attribute across all matching records. + * + * ``` + * // The average balance of bank accounts owned by people between + * // the ages of 35 and 45. + * BankAccount.avg('balance').where({ + * ownerAge: { '>=': 35, '<=': 45 } + * }).exec(function (err, averageBalance){ + * // ... + * }); + * ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {String?} numericAttrName + * + * @param {Dictionary?} criteria + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @param {Dictionary} moreQueryKeys + * For internal use. + * (A dictionary of query keys.) + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {String} numericAttrName + * The name of a numeric attribute. + * (Must be declared as `type: 'number'`.) + * + * @qkey {Dictionary?} criteria + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function avg( /* numericAttrName?, criteria?, explicitCbMaybe?, meta?, moreQueryKeys? */ ) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Build an omen for potential use in the asynchronous callback below. + var omen = buildOmen(avg); + + + // Build query w/ initial, universal keys. + var query = { + method: 'avg', + using: modelIdentity + }; + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + + // The `explicitCbMaybe` callback, if one was provided. + var explicitCbMaybe; + + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback, and extend the `query` dictionary) + // + // > Note that we define `args` to minimize the chance of this "variadics" code + // > introducing any unoptimizable performance problems. For details, see: + // > https://github.com/petkaantonov/bluebird/wiki/Optimization-killers#32-leaking-arguments + // > •=> `.length` is just an integer, this doesn't leak the `arguments` object itself + // > •=> `i` is always valid index in the arguments object + var args = new Array(arguments.length); + for (var i = 0; i < args.length; ++i) { + args[i] = arguments[i]; + } + + // • avg(numericAttrName, explicitCbMaybe, ..., ...) + if (args.length >= 2 && _.isFunction(args[1])) { + query.numericAttrName = args[0]; + explicitCbMaybe = args[1]; + query.meta = args[2]; + if (args[3]) { _.extend(query, args[3]); } + } + // • avg(numericAttrName, criteria, ..., ..., ...) + else { + query.numericAttrName = args[0]; + query.criteria = args[1]; + explicitCbMaybe = args[2]; + query.meta = args[3]; + if (args[4]) { _.extend(query, args[4]); } + } + + // Due to the somewhat unusual variadic usage of this method, and because + // parley doesn't enforce this itself for performance reasons, make sure the + // explicit callback argument is a function, if provided. + if (explicitCbMaybe !== undefined && !_.isFunction(explicitCbMaybe)) { + throw flaverr({ + name: 'UsageError', + message: + '`.avg()` received an explicit callback function argument... but it '+ + 'was not a function: '+explicitCbMaybe + }, omen); + }//• + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If an explicit callback function was specified, then immediately run the logic below + // and trigger the explicit callback when the time comes. Otherwise, build and return + // a new Deferred now. (If/when the Deferred is executed, the logic below will run.) + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that it's time to actually do some stuff. + // So... + // + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + + case 'E_INVALID_NUMERIC_ATTR_NAME': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'The numeric attr name (i.e. first argument) to `.avg()` should '+ + 'be the name of an attribute in this model which is defined with `type: \'number\'`.\n'+ + 'Details:\n'+ + ' ' + e.details + '\n' + }, omen) + ); + // ^ custom override for the standard usage error. Note that we use `.details` to get at + // the underlying, lower-level error message (instead of logging redundant stuff from + // the envelope provided by the default error msg.) + + // If the criteria wouldn't match anything, that'd basically be like dividing by zero, which is impossible. + case 'E_NOOP': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'Attempting to compute this average would be like dividing by zero, which is impossible.\n'+ + 'Details:\n'+ + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_INVALID_CRITERIA': + case 'E_INVALID_META': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: e.message + }, omen) + ); + // ^ when the standard usage error message is good enough as-is, without any further customization + + default: + return done(e); + // ^ when an internal, miscellaneous, or unexpected error occurs + } + } // >-• + + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┬─┐┌─┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ├─┤├┬┘├┤ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ ┴ ┴┴└─└─┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + try { + query = forgeStageThreeQuery({ + stageTwoQuery: query, + identity: modelIdentity, + transformer: WLModel._transformer, + originalModels: orm.collections + }); + } catch (e) { return done(e); } + + + // ┌─┐┌─┐┌┐┌┌┬┐ ┌┬┐┌─┐ ╔═╗╔╦╗╔═╗╔═╗╔╦╗╔═╗╦═╗ + // └─┐├┤ │││ ││ │ │ │ ╠═╣ ║║╠═╣╠═╝ ║ ║╣ ╠╦╝ + // └─┘└─┘┘└┘─┴┘ ┴ └─┘ ╩ ╩═╩╝╩ ╩╩ ╩ ╚═╝╩╚═ + // Grab the appropriate adapter method and call it. + var adapter = WLModel._adapter; + if (!adapter.avg) { + return done(new Error('The adapter used by this model (`' + modelIdentity + '`) doesn\'t support the `'+query.method+'` method.')); + } + + adapter.avg(WLModel.datastore, query, function _afterTalkingToAdapter(err, arithmeticMean) { + if (err) { + err = forgeAdapterError(err, omen, 'avg', modelIdentity, orm); + return done(err); + }//-• + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Log a warning like the ones in `process-all-records` if + // the arithmeticMean sent back by the adapter turns out to be something + // other than a number (for example, the naive behavior of a MySQL adapter + // in circumstances where criteria does not match any records); i.e. + // ``` + // !_.isNumber(arithmeticMean) || arithmeticMean === Infinity || arithmeticMean === -Infinity || _.isNaN(arithmeticMean) + // ```` + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + return done(undefined, arithmeticMean); + + });// + + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/count.js b/lib/waterline/methods/count.js new file mode 100644 index 000000000..86d11e5e4 --- /dev/null +++ b/lib/waterline/methods/count.js @@ -0,0 +1,249 @@ +/** + * Module dependencies + */ + +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var forgeAdapterError = require('../utils/query/forge-adapter-error'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var forgeStageThreeQuery = require('../utils/query/forge-stage-three-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('count'); + + +/** + * count() + * + * Get the number of matching records matching a criteria. + * + * ``` + * // The number of bank accounts with more than $32,000 in them. + * BankAccount.count().where({ + * balance: { '>': 32000 } + * }).exec(function(err, numBankAccounts) { + * // ... + * }); + * ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {Dictionary?} criteria + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @param {Dictionary} moreQueryKeys + * For internal use. + * (A dictionary of query keys.) + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {Dictionary?} criteria + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function count( /* criteria?, explicitCbMaybe?, meta?, moreQueryKeys? */ ) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Build an omen for potential use in the asynchronous callback below. + var omen = buildOmen(count); + + // Build query w/ initial, universal keys. + var query = { + method: 'count', + using: modelIdentity + }; + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + + // The `explicitCbMaybe` callback, if one was provided. + var explicitCbMaybe; + + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback, and extend the `query` dictionary) + // + // > Note that we define `args` to minimize the chance of this "variadics" code + // > introducing any unoptimizable performance problems. For details, see: + // > https://github.com/petkaantonov/bluebird/wiki/Optimization-killers#32-leaking-arguments + // > •=> `.length` is just an integer, this doesn't leak the `arguments` object itself + // > •=> `i` is always valid index in the arguments object + var args = new Array(arguments.length); + for (var i = 0; i < args.length; ++i) { + args[i] = arguments[i]; + } + + // • count(explicitCbMaybe, ..., ...) + if (args.length >= 1 && _.isFunction(args[0])) { + explicitCbMaybe = args[0]; + query.meta = args[1]; + if (args[2]) { _.extend(query, args[2]); } + } + // • count(criteria, ..., ..., ...) + else { + query.criteria = args[0]; + explicitCbMaybe = args[1]; + query.meta = args[2]; + if (args[3]) { _.extend(query, args[3]); } + } + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If an explicit callback function was specified, then immediately run the logic below + // and trigger the explicit callback when the time comes. Otherwise, build and return + // a new Deferred now. (If/when the Deferred is executed, the logic below will run.) + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that it's time to actually do some stuff. + // So... + // + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + + case 'E_INVALID_CRITERIA': + case 'E_INVALID_META': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: e.message + }, omen) + ); + // ^ when the standard usage error message is good enough as-is, without any further customization + + case 'E_NOOP': + return done(undefined, 0); + + default: + return done(e); + // ^ when an internal, miscellaneous, or unexpected error occurs + } + } // >-• + + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┬─┐┌─┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ├─┤├┬┘├┤ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ ┴ ┴┴└─└─┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + try { + query = forgeStageThreeQuery({ + stageTwoQuery: query, + identity: modelIdentity, + transformer: WLModel._transformer, + originalModels: orm.collections + }); + } catch (e) { return done(e); } + + + // ┌─┐┌─┐┌┐┌┌┬┐ ┌┬┐┌─┐ ╔═╗╔╦╗╔═╗╔═╗╔╦╗╔═╗╦═╗ + // └─┐├┤ │││ ││ │ │ │ ╠═╣ ║║╠═╣╠═╝ ║ ║╣ ╠╦╝ + // └─┘└─┘┘└┘─┴┘ ┴ └─┘ ╩ ╩═╩╝╩ ╩╩ ╩ ╚═╝╩╚═ + // Grab the appropriate adapter method and call it. + var adapter = WLModel._adapter; + if (!adapter.count) { + return done(new Error('The adapter used by this model (`' + modelIdentity + '`) doesn\'t support the `'+query.method+'` method.')); + } + + adapter.count(WLModel.datastore, query, function _afterTalkingToAdapter(err, numRecords) { + if (err) { + err = forgeAdapterError(err, omen, 'count', modelIdentity, orm); + return done(err); + } + + return done(undefined, numRecords); + + });// + + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/create-each.js b/lib/waterline/methods/create-each.js new file mode 100644 index 000000000..324fca180 --- /dev/null +++ b/lib/waterline/methods/create-each.js @@ -0,0 +1,470 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var async = require('async'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var forgeAdapterError = require('../utils/query/forge-adapter-error'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var forgeStageThreeQuery = require('../utils/query/forge-stage-three-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var processAllRecords = require('../utils/query/process-all-records'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('createEach'); + + + +/** + * createEach() + * + * Create a set of records in the database. + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {Array?} newRecords + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function createEach( /* newRecords?, explicitCbMaybe?, meta? */ ) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Build an omen for potential use in the asynchronous callback below. + var omen = buildOmen(createEach); + + // Build query w/ initial, universal keys. + var query = { + method: 'createEach', + using: modelIdentity + }; + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + + // The `explicitCbMaybe` callback, if one was provided. + var explicitCbMaybe; + + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback) + // + // > Note that we define `args` so that we can insulate access + // > to the arguments provided to this function. + var args = arguments; + (function _handleVariadicUsage(){ + + // The metadata container, if one was provided. + var _meta; + + + // First argument always means one thing: the array of new records. + // + // • createEach(newRecords, ...) + + // • createEach(..., explicitCbMaybe, _meta) + query.newRecords = args[0]; + explicitCbMaybe = args[1]; + _meta = args[2]; + + // Fold in `_meta`, if relevant. + if (!_.isUndefined(_meta)) { + query.meta = _meta; + } // >- + + })(); + + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If a callback function was not specified, then build a new Deferred and bail now. + // + // > This method will be called AGAIN automatically when the Deferred is executed. + // > and next time, it'll have a callback. + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that a callback was specified. + // So... + // + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + + case 'E_INVALID_NEW_RECORDS': + case 'E_INVALID_META': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + message: e.message, + details: e.details, + }, omen) + ); + // ^ when the standard usage error message is good enough as-is, without any further customization + + case 'E_NOOP': + // Determine the appropriate no-op result. + // If `fetch` meta key is set, use `[]`-- otherwise use `undefined`. + var noopResult = undefined; + if (query.meta && query.meta.fetch) { + noopResult = []; + }//>- + return done(undefined, noopResult); + + default: + return done(e); + // ^ when an internal, miscellaneous, or unexpected error occurs + } + } // >-• + + // console.log('Successfully forged s2q ::', require('util').inspect(query, {depth:null})); + + // ╔╗ ╔═╗╔═╗╔═╗╦═╗╔═╗ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐ ┬ ┬┌─┐┌─┐┌─┐┬ ┬┌─┐┬ ┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─ + // ╠╩╗║╣ ╠╣ ║ ║╠╦╝║╣ │ ├┬┘├┤ ├─┤ │ ├┤ │ │├┤ ├┤ │ └┬┘│ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐ + // ╚═╝╚═╝╚ ╚═╝╩╚═╚═╝ └─┘┴└─└─┘┴ ┴ ┴ └─┘ ┴─┘┴└ └─┘└─┘ ┴ └─┘┴─┘└─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴ + // Determine what to do about running "before" lifecycle callbacks + (function _maybeRunBeforeLC(proceed){ + + // If the `skipAllLifecycleCallbacks` meta key was enabled, then don't run this LC. + if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) { + return proceed(undefined, query); + }//-• + + // If there is no relevant "before" lifecycle callback, then just proceed. + if (!_.has(WLModel._callbacks, 'beforeCreate')) { + return proceed(undefined, query); + }//-• + + // IWMIH, run the "before" lifecycle callback on each new record. + async.each(query.newRecords, WLModel._callbacks.beforeCreate, function(err) { + if (err) { return proceed(err); } + return proceed(undefined, query); + }); + + })(function _afterPotentiallyRunningBeforeLC(err, query) { + if (err) { + return done(err); + } + + + // ╔═╗╦ ╦╔═╗╔═╗╦╔═ ┌─┐┌─┐┬─┐ ┌─┐┌┐┌┬ ┬ + // ║ ╠═╣║╣ ║ ╠╩╗ ├┤ │ │├┬┘ ├─┤│││└┬┘ + // ╚═╝╩ ╩╚═╝╚═╝╩ ╩ └ └─┘┴└─ ┴ ┴┘└┘ ┴ + // ┌─┐┌─┐┬ ┬ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌ ┬─┐┌─┐┌─┐┌─┐┌┬┐┌─┐ + // │ │ ││ │ ├┤ │ │ ││ ││││ ├┬┘├┤ └─┐├┤ │ └─┐ + // └─┘└─┘┴─┘┴─┘└─┘└─┘ ┴ ┴└─┘┘└┘ ┴└─└─┘└─┘└─┘ ┴ └─┘ + // Also removes them from the newRecords before sending to the adapter. + var allCollectionResets = []; + + _.each(query.newRecords, function _eachRecord(record) { + // Hold the individual resets + var reset = {}; + + _.each(WLModel.attributes, function _eachKnownAttrDef(attrDef, attrName) { + + if (attrDef.collection) { + // Only create a reset if the value isn't an empty array. If the value + // is an empty array there isn't any resetting to do. + if (record[attrName].length) { + reset[attrName] = record[attrName]; + } + + // Remove the collection value from the newRecord because the adapter + // doesn't need to do anything during the initial create. + delete record[attrName]; + } + });// + + allCollectionResets.push(reset); + });// + + // Hold a variable for the queries `meta` property that could possibly be + // changed by us later on. + var modifiedMeta; + + // If any collection resets were specified, force `fetch: true` (meta key) + // so that the adapter will send back the records and we can use them below + // in order to call `resetCollection()`. + var anyActualCollectionResets = _.any(allCollectionResets, function (reset){ + return _.keys(reset).length > 0; + }); + if (anyActualCollectionResets) { + // Build a modified shallow clone of the originally-provided `meta` + // that also has `fetch: true`. + modifiedMeta = _.extend({}, query.meta || {}, { fetch: true }); + }//>- + + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┬─┐┌─┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ├─┤├┬┘├┤ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ ┴ ┴┴└─└─┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // Now, destructively forge this S2Q into a S3Q. + try { + query = forgeStageThreeQuery({ + stageTwoQuery: query, + identity: modelIdentity, + transformer: WLModel._transformer, + originalModels: orm.collections + }); + } catch (e) { return done(e); } + + // ┌─┐┌─┐┌┐┌┌┬┐ ┌┬┐┌─┐ ╔═╗╔╦╗╔═╗╔═╗╔╦╗╔═╗╦═╗ + // └─┐├┤ │││ ││ │ │ │ ╠═╣ ║║╠═╣╠═╝ ║ ║╣ ╠╦╝ + // └─┘└─┘┘└┘─┴┘ ┴ └─┘ ╩ ╩═╩╝╩ ╩╩ ╩ ╚═╝╩╚═ + // Grab the appropriate adapter method and call it. + var adapter = WLModel._adapter; + if (!adapter.createEach) { + return done(new Error('The adapter used by this model (`' + modelIdentity + '`) doesn\'t support the `'+query.method+'` method.')); + } + + // Allow the query to possibly use the modified meta + query.meta = modifiedMeta || query.meta; + + // console.log('Successfully forged S3Q ::', require('util').inspect(query, {depth:null})); + adapter.createEach(WLModel.datastore, query, function(err, rawAdapterResult) { + if (err) { + err = forgeAdapterError(err, omen, 'createEach', modelIdentity, orm); + return done(err); + }//-• + + // ╔═╗╔╦╗╔═╗╔═╗ ╔╗╔╔═╗╦ ╦ ┬ ┬┌┐┌┬ ┌─┐┌─┐┌─┐ ╔═╗╔═╗╔╦╗╔═╗╦ ╦ ┌┬┐┌─┐┌┬┐┌─┐ ┬┌─┌─┐┬ ┬ + // ╚═╗ ║ ║ ║╠═╝ ║║║║ ║║║║ │ │││││ ├┤ └─┐└─┐ ╠╣ ║╣ ║ ║ ╠═╣ │││├┤ │ ├─┤ ├┴┐├┤ └┬┘ + // ╚═╝ ╩ ╚═╝╩ ╝╚╝╚═╝╚╩╝ooo └─┘┘└┘┴─┘└─┘└─┘└─┘ ╚ ╚═╝ ╩ ╚═╝╩ ╩ ┴ ┴└─┘ ┴ ┴ ┴ ┴ ┴└─┘ ┴ + // ┬ ┬┌─┐┌─┐ ┌─┐┌─┐┌┬┐ ┌┬┐┌─┐ ┌┬┐┬─┐┬ ┬┌─┐ + // │││├─┤└─┐ └─┐├┤ │ │ │ │ │ ├┬┘│ │├┤ + // └┴┘┴ ┴└─┘ └─┘└─┘ ┴ ┴ └─┘ ┴ ┴└─└─┘└─┘ + // If `fetch` was not enabled, return. + var fetch = modifiedMeta || (_.has(query.meta, 'fetch') && query.meta.fetch); + if (!fetch) { + + // > Note: This `if` statement is a convenience, for cases where the result from + // > the adapter may have been coerced from `undefined` to `null` automatically. + // > (we want it to be `undefined` still, for consistency) + if (_.isNull(rawAdapterResult)) { + return done(); + }//-• + + if (!_.isUndefined(rawAdapterResult)) { + console.warn('\n'+ + 'Warning: Unexpected behavior in database adapter:\n'+ + 'Since `fetch` is NOT enabled, this adapter (for datastore `'+WLModel.datastore+'`)\n'+ + 'should NOT have sent back anything as the 2nd argument when triggering the callback\n'+ + 'from its `createEach` method. But it did -- which is why this warning is being displayed:\n'+ + 'to help avoid confusion and draw attention to the bug. Specifically, got:\n'+ + util.inspect(rawAdapterResult, {depth:5})+'\n'+ + '(Ignoring it and proceeding anyway...)'+'\n' + ); + }//>- + + return done(); + + }//-• + + + // IWMIH then we know that `fetch: true` meta key was set, and so the + // adapter should have sent back an array. + + // ╔╦╗╦═╗╔═╗╔╗╔╔═╗╔═╗╔═╗╦═╗╔╦╗ ┌─┐┌┬┐┌─┐┌─┐┌┬┐┌─┐┬─┐ ┬─┐┌─┐┌─┐┬ ┬┬ ┌┬┐ + // ║ ╠╦╝╠═╣║║║╚═╗╠╣ ║ ║╠╦╝║║║ ├─┤ ││├─┤├─┘ │ ├┤ ├┬┘ ├┬┘├┤ └─┐│ ││ │ + // ╩ ╩╚═╩ ╩╝╚╝╚═╝╚ ╚═╝╩╚═╩ ╩ ┴ ┴─┴┘┴ ┴┴ ┴ └─┘┴└─ ┴└─└─┘└─┘└─┘┴─┘┴ + // Attempt to convert the records' column names to attribute names. + var transformationErrors = []; + var transformedRecords = []; + _.each(rawAdapterResult, function(record) { + var transformedRecord; + try { + transformedRecord = WLModel._transformer.unserialize(record); + } catch (e) { + transformationErrors.push(e); + } + + transformedRecords.push(transformedRecord); + }); + + if (transformationErrors.length > 0) { + return done(new Error( + 'Encountered '+transformationErrors.length+' error(s) processing the record(s) sent back '+ + 'from the adapter-- specifically, when converting column names back to attribute names. '+ + 'Details: '+ + util.inspect(transformationErrors,{depth:5})+'' + )); + }//-• + + // Check the record to verify compliance with the adapter spec, + // as well as any issues related to stale data that might not have been + // been migrated to keep up with the logical schema (`type`, etc. in + // attribute definitions). + try { + processAllRecords(transformedRecords, query.meta, WLModel.identity, orm); + } catch (e) { return done(e); } + + + // ┌─┐┌─┐┬ ┬ ╦═╗╔═╗╔═╗╦ ╔═╗╔═╗╔═╗ ╔═╗╔═╗╦ ╦ ╔═╗╔═╗╔╦╗╦╔═╗╔╗╔ ┌─┐┌─┐┬─┐ + // │ ├─┤│ │ ╠╦╝║╣ ╠═╝║ ╠═╣║ ║╣ ║ ║ ║║ ║ ║╣ ║ ║ ║║ ║║║║ ├┤ │ │├┬┘ + // └─┘┴ ┴┴─┘┴─┘ ╩╚═╚═╝╩ ╩═╝╩ ╩╚═╝╚═╝ ╚═╝╚═╝╩═╝╩═╝╚═╝╚═╝ ╩ ╩╚═╝╝╚╝ └ └─┘┴└─ + // ┌─┐─┐ ┬┌─┐┬ ┬┌─┐┬┌┬┐┬ ┬ ┬ ┌─┐┌─┐┌─┐┌─┐┬┌─┐┬┌─┐┌┬┐ ┌─┐┌─┐┌─┐┌─┐┌─┐┬┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ + // ├┤ ┌┴┬┘├─┘│ ││ │ │ │ └┬┘───└─┐├─┘├┤ │ │├┤ │├┤ ││ ├─┤└─┐└─┐│ ││ │├─┤ │ ││ ││││└─┐ + // └─┘┴ └─┴ ┴─┘┴└─┘┴ ┴ ┴─┘┴ └─┘┴ └─┘└─┘┴└ ┴└─┘─┴┘ ┴ ┴└─┘└─┘└─┘└─┘┴┴ ┴ ┴ ┴└─┘┘└┘└─┘ + var argsForEachReplaceOp = []; + _.each(transformedRecords, function (record, idx) { + + // Grab the dictionary of collection resets corresponding to this record. + var reset = allCollectionResets[idx]; + + // If there are no resets, then there's no need to build up a replaceCollection() query. + if (_.keys(reset).length === 0) { + return; + }//-• + + // Otherwise, build an array of arrays, where each sub-array contains + // the first three arguments that need to be passed in to `replaceCollection()`. + var targetIds = [ record[WLModel.primaryKey] ]; + _.each(_.keys(reset), function (collectionAttrName) { + + // (targetId(s), collectionAttrName, associatedPrimaryKeys) + argsForEachReplaceOp.push([ + targetIds, + collectionAttrName, + reset[collectionAttrName] + ]); + + });// + });// + + async.each(argsForEachReplaceOp, function _eachReplaceCollectionOp(argsForReplace, next) { + + // Note that, by using the same `meta`, we use same db connection + // (if one was explicitly passed in, anyway) + WLModel.replaceCollection(argsForReplace[0], argsForReplace[1], argsForReplace[2], function(err) { + if (err) { return next(err); } + return next(); + }, query.meta); + + },// ~∞%° + function _afterReplacingAllCollections(err) { + if (err) { + return done(err); + } + + // ╔═╗╔═╗╔╦╗╔═╗╦═╗ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─ + // ╠═╣╠╣ ║ ║╣ ╠╦╝ │ ├┬┘├┤ ├─┤ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐ + // ╩ ╩╚ ╩ ╚═╝╩╚═ └─┘┴└─└─┘┴ ┴ ┴ └─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴ + (function _maybeRunAfterLC(proceed){ + + // If the `skipAllLifecycleCallbacks` meta flag was set, don't run the LC. + if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) { + return proceed(undefined, transformedRecords); + }//-• + + // If no afterCreate callback defined, just proceed. + if (!_.has(WLModel._callbacks, 'afterCreate')) { + return proceed(undefined, transformedRecords); + }//-• + + async.each(transformedRecords, WLModel._callbacks.afterCreate, function(err) { + if (err) { + return proceed(err); + } + return proceed(undefined, transformedRecords); + }); + + })(function _afterPotentiallyRunningAfterLC(err, transformedRecords) { + if (err) { return done(err); } + + // Return the new record. + return done(undefined, transformedRecords); + + });// + + });// + });// + + }); + + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/create.js b/lib/waterline/methods/create.js new file mode 100644 index 000000000..c6c60742a --- /dev/null +++ b/lib/waterline/methods/create.js @@ -0,0 +1,400 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var async = require('async'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var forgeAdapterError = require('../utils/query/forge-adapter-error'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var forgeStageThreeQuery = require('../utils/query/forge-stage-three-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var processAllRecords = require('../utils/query/process-all-records'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('create'); + + + +/** + * create() + * + * Create a new record using the specified initial values. + * + * ``` + * // Create a new bank account with a half million dollars, + * // and associate it with the logged in user. + * BankAccount.create({ + * balance: 500000, + * owner: req.session.userId + * }) + * .exec(function(err) { + * // ... + * }); + * ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {Dictionary?} newRecord + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {Dictionary?} newRecord + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function create(newRecord, explicitCbMaybe, metaContainer) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Build an omen for potential use in the asynchronous callback below. + var omen = buildOmen(create); + + // Build initial query. + var query = { + method: 'create', + using: modelIdentity, + newRecord: newRecord, + meta: metaContainer + }; + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + // FUTURE: when time allows, update this to match the "VARIADICS" format + // used in the other model methods. + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If a callback function was not specified, then build a new Deferred and bail now. + // + // > This method will be called AGAIN automatically when the Deferred is executed. + // > and next time, it'll have a callback. + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that a callback was specified. + // So... + + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + // + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + // This ensures a normalized format. + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + case 'E_INVALID_NEW_RECORD': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'Invalid new record.\n'+ + 'Details:\n'+ + ' '+e.details+'\n' + }, omen) + ); + + default: + return done(e); + } + } + + + // ╔╗ ╔═╗╔═╗╔═╗╦═╗╔═╗ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐ ┬ ┬┌─┐┌─┐┌─┐┬ ┬┌─┐┬ ┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─ + // ╠╩╗║╣ ╠╣ ║ ║╠╦╝║╣ │ ├┬┘├┤ ├─┤ │ ├┤ │ │├┤ ├┤ │ └┬┘│ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐ + // ╚═╝╚═╝╚ ╚═╝╩╚═╚═╝ └─┘┴└─└─┘┴ ┴ ┴ └─┘ ┴─┘┴└ └─┘└─┘ ┴ └─┘┴─┘└─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴ + // Determine what to do about running "before" lifecycle callbacks + (function _maybeRunBeforeLC(proceed){ + + // If the `skipAllLifecycleCallbacks` meta key was enabled, then don't run this LC. + if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) { + return proceed(undefined, query); + }//-• + + // If there is no relevant "before" lifecycle callback, then just proceed. + if (!_.has(WLModel._callbacks, 'beforeCreate')) { + return proceed(undefined, query); + }//-• + + // IWMIH, run the "before" lifecycle callback. + WLModel._callbacks.beforeCreate(query.newRecord, function(err){ + if (err) { return proceed(err); } + return proceed(undefined, query); + }); + + })(function _afterPotentiallyRunningBeforeLC(err, query) { + if (err) { + return done(err); + } + + // ╔═╗╦ ╦╔═╗╔═╗╦╔═ ┌─┐┌─┐┬─┐ ┌─┐┌┐┌┬ ┬ + // ║ ╠═╣║╣ ║ ╠╩╗ ├┤ │ │├┬┘ ├─┤│││└┬┘ + // ╚═╝╩ ╩╚═╝╚═╝╩ ╩ └ └─┘┴└─ ┴ ┴┘└┘ ┴ + // ┌─┐┌─┐┬ ┬ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌ ┬─┐┌─┐┌─┐┌─┐┌┬┐┌─┐ + // │ │ ││ │ ├┤ │ │ ││ ││││ ├┬┘├┤ └─┐├┤ │ └─┐ + // └─┘└─┘┴─┘┴─┘└─┘└─┘ ┴ ┴└─┘┘└┘ ┴└─└─┘└─┘└─┘ ┴ └─┘ + // Also removes them from the newRecord before sending to the adapter. + var collectionResets = {}; + _.each(WLModel.attributes, function _eachKnownAttrDef(attrDef, attrName) { + if (attrDef.collection) { + // Only track a reset if the value isn't an empty array. If the value + // is an empty array there isn't any resetting to do. + if (query.newRecord[attrName].length > 0) { + collectionResets[attrName] = query.newRecord[attrName]; + } + + // Remove the collection value from the newRecord because the adapter + // doesn't need to do anything during the initial create. + delete query.newRecord[attrName]; + } + });// + + // Hold a variable for the queries `meta` property that could possibly be + // changed by us later on. + var modifiedMeta; + + // If any collection resets were specified, force `fetch: true` (meta key) + // so that we can use it below. + if (_.keys(collectionResets).length > 0) { + // Build a modified shallow clone of the originally-provided `meta` + // that also has `fetch: true`. + modifiedMeta = _.extend({}, query.meta || {}, { fetch: true }); + }//>- + + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┬─┐┌─┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ├─┤├┬┘├┤ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ ┴ ┴┴└─└─┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // Now, destructively forge this S2Q into a S3Q. + try { + query = forgeStageThreeQuery({ + stageTwoQuery: query, + identity: modelIdentity, + transformer: WLModel._transformer, + originalModels: orm.collections + }); + } catch (e) { return done(e); } + + + // ┌─┐┌─┐┌┐┌┌┬┐ ┌┬┐┌─┐ ╔═╗╔╦╗╔═╗╔═╗╔╦╗╔═╗╦═╗ + // └─┐├┤ │││ ││ │ │ │ ╠═╣ ║║╠═╣╠═╝ ║ ║╣ ╠╦╝ + // └─┘└─┘┘└┘─┴┘ ┴ └─┘ ╩ ╩═╩╝╩ ╩╩ ╩ ╚═╝╩╚═ + // Grab the appropriate adapter method and call it. + var adapter = WLModel._adapter; + if (!adapter.create) { + return done(new Error('The adapter used by this model (`' + modelIdentity + '`) doesn\'t support the `'+query.method+'` method.')); + } + + // Allow the query to possibly use the modified meta + query.meta = modifiedMeta || query.meta; + + // And call the adapter method. + adapter.create(WLModel.datastore, query, function _afterTalkingToAdapter(err, rawAdapterResult) { + if (err) { + err = forgeAdapterError(err, omen, 'create', modelIdentity, orm); + return done(err); + }//-• + + + // ╔═╗╔╦╗╔═╗╔═╗ ╔╗╔╔═╗╦ ╦ ┬ ┬┌┐┌┬ ┌─┐┌─┐┌─┐ ╔═╗╔═╗╔╦╗╔═╗╦ ╦ ┌┬┐┌─┐┌┬┐┌─┐ ┬┌─┌─┐┬ ┬ + // ╚═╗ ║ ║ ║╠═╝ ║║║║ ║║║║ │ │││││ ├┤ └─┐└─┐ ╠╣ ║╣ ║ ║ ╠═╣ │││├┤ │ ├─┤ ├┴┐├┤ └┬┘ + // ╚═╝ ╩ ╚═╝╩ ╝╚╝╚═╝╚╩╝ooo └─┘┘└┘┴─┘└─┘└─┘└─┘ ╚ ╚═╝ ╩ ╚═╝╩ ╩ ┴ ┴└─┘ ┴ ┴ ┴ ┴ ┴└─┘ ┴ + // ┬ ┬┌─┐┌─┐ ┌─┐┌─┐┌┬┐ ┌┬┐┌─┐ ┌┬┐┬─┐┬ ┬┌─┐ + // │││├─┤└─┐ └─┐├┤ │ │ │ │ │ ├┬┘│ │├┤ + // └┴┘┴ ┴└─┘ └─┘└─┘ ┴ ┴ └─┘ ┴ ┴└─└─┘└─┘ + // If `fetch` was not enabled, return. + var fetch = modifiedMeta || (_.has(query.meta, 'fetch') && query.meta.fetch); + if (!fetch) { + + // > Note: This `if` statement is a convenience, for cases where the result from + // > the adapter may have been coerced from `undefined` to `null` automatically. + // > (we want it to be `undefined` still, for consistency) + if (_.isNull(rawAdapterResult)) { + return done(); + }//-• + + if (!_.isUndefined(rawAdapterResult)) { + console.warn('\n'+ + 'Warning: Unexpected behavior in database adapter:\n'+ + 'Since `fetch` is NOT enabled, this adapter (for datastore `'+WLModel.datastore+'`)\n'+ + 'should NOT have sent back anything as the 2nd argument when triggering the callback\n'+ + 'from its `create` method. But it did -- which is why this warning is being displayed:\n'+ + 'to help avoid confusion and draw attention to the bug. Specifically, got:\n'+ + util.inspect(rawAdapterResult, {depth:5})+'\n'+ + '(Ignoring it and proceeding anyway...)'+'\n' + ); + }//>- + + return done(); + + }//-• + + + // IWMIH then we know that `fetch: true` meta key was set, and so the + // adapter should have sent back an array. + + // Sanity check: + if (!_.isObject(rawAdapterResult) || _.isArray(rawAdapterResult) || _.isFunction(rawAdapterResult)) { + return done(new Error('Consistency violation: expected `create` adapter method to send back the created record b/c `fetch: true` was enabled. But instead, got: ' + util.inspect(rawAdapterResult, {depth:5})+'')); + } + + // ╔╦╗╦═╗╔═╗╔╗╔╔═╗╔═╗╔═╗╦═╗╔╦╗ ┌─┐┌┬┐┌─┐┌─┐┌┬┐┌─┐┬─┐ ┬─┐┌─┐┌─┐┬ ┬┬ ┌┬┐ + // ║ ╠╦╝╠═╣║║║╚═╗╠╣ ║ ║╠╦╝║║║ ├─┤ ││├─┤├─┘ │ ├┤ ├┬┘ ├┬┘├┤ └─┐│ ││ │ + // ╩ ╩╚═╩ ╩╝╚╝╚═╝╚ ╚═╝╩╚═╩ ╩ ┴ ┴─┴┘┴ ┴┴ ┴ └─┘┴└─ ┴└─└─┘└─┘└─┘┴─┘┴ + // Attempt to convert the record's column names to attribute names. + var transformedRecord; + try { + transformedRecord = WLModel._transformer.unserialize(rawAdapterResult); + } catch (e) { return done(e); } + + // Check the record to verify compliance with the adapter spec, + // as well as any issues related to stale data that might not have been + // been migrated to keep up with the logical schema (`type`, etc. in + // attribute definitions). + try { + processAllRecords([ transformedRecord ], query.meta, modelIdentity, orm); + } catch (e) { return done(e); } + + + // ┌─┐┌─┐┬ ┬ ╦═╗╔═╗╔═╗╦ ╔═╗╔═╗╔═╗ ╔═╗╔═╗╦ ╦ ╔═╗╔═╗╔╦╗╦╔═╗╔╗╔ ┌─┐┌─┐┬─┐ + // │ ├─┤│ │ ╠╦╝║╣ ╠═╝║ ╠═╣║ ║╣ ║ ║ ║║ ║ ║╣ ║ ║ ║║ ║║║║ ├┤ │ │├┬┘ + // └─┘┴ ┴┴─┘┴─┘ ╩╚═╚═╝╩ ╩═╝╩ ╩╚═╝╚═╝ ╚═╝╚═╝╩═╝╩═╝╚═╝╚═╝ ╩ ╩╚═╝╝╚╝ └ └─┘┴└─ + // ┌─┐─┐ ┬┌─┐┬ ┬┌─┐┬┌┬┐┬ ┬ ┬ ┌─┐┌─┐┌─┐┌─┐┬┌─┐┬┌─┐┌┬┐ ┌─┐┌─┐┌─┐┌─┐┌─┐┬┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ + // ├┤ ┌┴┬┘├─┘│ ││ │ │ │ └┬┘───└─┐├─┘├┤ │ │├┤ │├┤ ││ ├─┤└─┐└─┐│ ││ │├─┤ │ ││ ││││└─┐ + // └─┘┴ └─┴ ┴─┘┴└─┘┴ ┴ ┴─┘┴ └─┘┴ └─┘└─┘┴└ ┴└─┘─┴┘ ┴ ┴└─┘└─┘└─┘└─┘┴┴ ┴ ┴ ┴└─┘┘└┘└─┘ + var targetId = transformedRecord[WLModel.primaryKey]; + async.each(_.keys(collectionResets), function _eachReplaceCollectionOp(collectionAttrName, next) { + + WLModel.replaceCollection(targetId, collectionAttrName, collectionResets[collectionAttrName], function(err){ + if (err) { return next(err); } + return next(); + }, query.meta); + + },// ~∞%° + function _afterReplacingAllCollections(err) { + if (err) { return done(err); } + + // ╔═╗╔═╗╔╦╗╔═╗╦═╗ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─ + // ╠═╣╠╣ ║ ║╣ ╠╦╝ │ ├┬┘├┤ ├─┤ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐ + // ╩ ╩╚ ╩ ╚═╝╩╚═ └─┘┴└─└─┘┴ ┴ ┴ └─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴ + (function _maybeRunAfterLC(proceed){ + + // If the `skipAllLifecycleCallbacks` meta flag was set, don't run the LC. + if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) { + return proceed(undefined, transformedRecord); + }//-• + + // If no afterCreate callback defined, just proceed. + if (!_.has(WLModel._callbacks, 'afterCreate')) { + return proceed(undefined, transformedRecord); + }//-• + + // Otherwise, run it. + return WLModel._callbacks.afterCreate(transformedRecord, function(err) { + if (err) { + return proceed(err); + } + + return proceed(undefined, transformedRecord); + }); + + })(function _afterPotentiallyRunningAfterLC(err, transformedRecord) { + if (err) { return done(err); } + + // Return the new record. + return done(undefined, transformedRecord); + + });// + + });// + });// + });// + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/destroy-one.js b/lib/waterline/methods/destroy-one.js new file mode 100644 index 000000000..83120902f --- /dev/null +++ b/lib/waterline/methods/destroy-one.js @@ -0,0 +1,195 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('destroyOne'); + + +/** + * destroyOne() + * + * Destroy a single record that matches the specified criteria, returning + * the destroyed record. + * + * @experimental + * + * TODO: document further + */ + +module.exports = function destroyOne(criteria, explicitCbMaybe, metaContainer){ + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Potentially build an omen for use below. + var omenMaybe = flaverr.omen(destroyOne); + + // Build initial query. + var query = { + method: 'destroyOne', + using: modelIdentity, + criteria: criteria, + meta: metaContainer + }; + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + // N/A + // (there are no out-of-order, optional arguments) + + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If a callback function was not specified, then build a new Deferred and bail now. + // + // > This method will be called AGAIN automatically when the Deferred is executed. + // > and next time, it'll have a callback. + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that a callback was specified. + // So... + + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + // This ensures a normalized format. + + try { + forgeStageTwoQuery(query, orm); + } catch (err) { + switch (err.code) { + case 'E_INVALID_CRITERIA': + return done( + flaverr({ + name: 'UsageError', + code: err.code, + details: err.details, + message: + 'Invalid criteria.\n'+ + 'Details:\n'+ + ' '+err.details+'\n' + }, omenMaybe) + ); + + case 'E_NOOP': + // Determine the appropriate no-op result. + // If `fetch` meta key is set, use `[]`-- otherwise use `undefined`. + var noopResult = undefined; + return done(undefined, noopResult); + + default: + return done(err); + } + } + + // Do a .count() to ensure that there are ≤1 matching records. + // FUTURE: Make this transactional, if supported by the underlying adapter. + var modifiedCriteriaForCount = _.omit(query.criteria, ['select', 'omit', 'limit', 'skip', 'sort']); + WLModel.count(modifiedCriteriaForCount, function _afterCounting(err, total) { + if (err) { + return done(err); + } + + // If more than one matching record was found, then consider this an error. + if (total > 1) { + return done(flaverr({ + message: + 'Preventing `.'+query.method+'()`: found too many ('+total+') matching records.\n'+ + '\n'+ + 'Criteria used:\n'+ + '···\n'+ + util.inspect(modifiedCriteriaForCount,{depth:5})+'\n'+ + '···' + }, omenMaybe)); + }//-• + + // Build a modified shallow clone of the originally-provided `meta` from + // userland, but that also has `fetch: true`. + var modifiedMetaForDestroy = _.extend({}, query.meta || {}, { + fetch: true + }); + + var modifiedCriteriaForDestroy = _.omit(query.criteria, ['select', 'omit', 'limit', 'skip', 'sort']); + WLModel.destroy(modifiedCriteriaForDestroy, function _afterDestroying(err, affectedRecords) { + if (err) { + return done(err); + } + + // Note that we always get `affectedRecords` here because "fetch" is enabled. + return done(undefined, affectedRecords[0]); + + }, modifiedMetaForDestroy);//_∏_ + }, query.meta);//_∏_ + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/destroy.js b/lib/waterline/methods/destroy.js new file mode 100644 index 000000000..31543dc92 --- /dev/null +++ b/lib/waterline/methods/destroy.js @@ -0,0 +1,576 @@ +/** + * Module Dependencies + */ + +var util = require('util'); +var async = require('async'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var forgeAdapterError = require('../utils/query/forge-adapter-error'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var forgeStageThreeQuery = require('../utils/query/forge-stage-three-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var processAllRecords = require('../utils/query/process-all-records'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('destroy'); + + + +/** + * destroy() + * + * Destroy records that match the specified criteria. + * + * ``` + * // Destroy all bank accounts with more than $32,000 in them. + * BankAccount.destroy().where({ + * balance: { '>': 32000 } + * }).exec(function(err) { + * // ... + * }); + * ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {Dictionary?} criteria + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {Dictionary?} criteria + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function destroy(/* criteria, explicitCbMaybe, metaContainer */) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Build an omen for potential use in the asynchronous callback below. + var omen = buildOmen(destroy); + + // Build initial query. + var query = { + method: 'destroy', + using: modelIdentity, + criteria: undefined, + meta: undefined + }; + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + // FUTURE: when time allows, update this to match the "VARIADICS" format + // used in the other model methods. + + // The explicit callback, if one was provided. + var explicitCbMaybe; + + // Handle double meaning of first argument: + // + // • destroy(criteria, ...) + if (!_.isFunction(arguments[0])) { + query.criteria = arguments[0]; + explicitCbMaybe = arguments[1]; + query.meta = arguments[2]; + } + // • destroy(explicitCbMaybe, ...) + else { + explicitCbMaybe = arguments[0]; + query.meta = arguments[1]; + } + + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If a callback function was not specified, then build a new Deferred and bail now. + // + // > This method will be called AGAIN automatically when the Deferred is executed. + // > and next time, it'll have a callback. + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that a callback was specified. + // So... + + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + // + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + // This ensures a normalized format. + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + case 'E_INVALID_CRITERIA': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'Invalid criteria.\n'+ + 'Details:\n'+ + ' '+e.details+'\n' + }, omen) + ); + + case 'E_NOOP': + // Determine the appropriate no-op result. + // If `fetch` meta key is set, use `[]`-- otherwise use `undefined`. + // + // > Note that future versions might simulate output from the raw driver. + // > (e.g. `{ numRecordsDestroyed: 0 }`) + // > See: https://github.com/treelinehq/waterline-query-docs/blob/master/docs/results.md#destroy + var noopResult = undefined; + if (query.meta && query.meta.fetch) { + noopResult = []; + }//>- + return done(undefined, noopResult); + + default: + return done(e); + } + } + + // ╦ ╦╔═╗╔╗╔╔╦╗╦ ╔═╗ ┬ ┬┌─┐┌─┐┌─┐┬ ┬┌─┐┬ ┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─ + // ╠═╣╠═╣║║║ ║║║ ║╣ BEFORE │ │├┤ ├┤ │ └┬┘│ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐ + // ╩ ╩╩ ╩╝╚╝═╩╝╩═╝╚═╝ ┴─┘┴└ └─┘└─┘ ┴ └─┘┴─┘└─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴ + // Determine what to do about running any lifecycle callback. + (function _runBeforeLC(proceed) { + // If the `skipAllLifecycleCallbacks` meta flag was set, don't run the lifecycle callback. + if (query.meta && query.meta.skipAllLifecycleCallbacks) { + return proceed(undefined, query); + } + + // If there is no relevant LC, then just proceed. + if (!_.has(WLModel._callbacks, 'beforeDestroy')) { + return proceed(undefined, query); + } + + // But otherwise, run it. + WLModel._callbacks.beforeDestroy(query.criteria, function (err){ + if (err) { return proceed(err); } + return proceed(undefined, query); + }); + + })(function _afterRunningBeforeLC(err, query) { + if (err) { + return done(err); + } + + // ┬ ┌─┐┌─┐┬┌─┬ ┬┌─┐ ┌─┐┌┬┐┌─┐┌─┐┌┬┐┌─┐┬─┐ + // │ │ ││ │├┴┐│ │├─┘ ├─┤ ││├─┤├─┘ │ ├┤ ├┬┘ + // ┴─┘└─┘└─┘┴ ┴└─┘┴ ┴ ┴─┴┘┴ ┴┴ ┴ └─┘┴└─ + // Look up the appropriate adapter to use for this model. + + // Get a reference to the adapter. + var adapter = WLModel._adapter; + if (!adapter) { + // ^^One last sanity check to make sure the adapter exists-- again, for compatibility's sake. + return done(new Error('Consistency violation: Cannot find adapter for model (`' + modelIdentity + '`). This model appears to be using datastore `'+WLModel.datastore+'`, but the adapter for that datastore cannot be located.')); + } + + // Verify the adapter has a `destroy` method. + if (!adapter.destroy) { + return done(new Error('The adapter used by this model (`' + modelIdentity + '`) doesn\'t support the `destroy` method.')); + } + + // If `cascade` is enabled, do an extra assertion... + if (query.meta && query.meta.cascade){ + + // First, a sanity check to ensure the adapter has a `find` method too. + if (!adapter.find) { + return done(new Error('The adapter used by this model (`' + modelIdentity + '`) doesn\'t support the `find` method, but that method is mandatory to be able to use `cascade: true`.')); + } + + }//>- + + + + // ================================================================================ + // FUTURE: potentially bring this back (but also would need the `omit clause`) + // ================================================================================ + // // Before we get to forging again, save a copy of the stage 2 query's + // // `select` clause. We'll need this later on when processing the resulting + // // records, and if we don't copy it now, it might be damaged by the forging. + // // + // // > Note that we don't need a deep clone. + // // > (That's because the `select` clause is only 1 level deep.) + // var s2QSelectClause = _.clone(query.criteria.select); + // ================================================================================ + + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┬─┐┌─┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ├─┤├┬┘├┤ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ ┴ ┴┴└─└─┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // Now, destructively forge this S2Q into a S3Q. + try { + query = forgeStageThreeQuery({ + stageTwoQuery: query, + identity: modelIdentity, + transformer: WLModel._transformer, + originalModels: orm.collections + }); + } catch (e) { return done(e); } + + + // ┬┌─┐ ╔═╗╔═╗╔═╗╔═╗╔═╗╔╦╗╔═╗ ┌─┐┌┐┌┌─┐┌┐ ┬ ┌─┐┌┬┐ ┌┬┐┬ ┬┌─┐┌┐┌ + // │├┤ ║ ╠═╣╚═╗║ ╠═╣ ║║║╣ ├┤ │││├─┤├┴┐│ ├┤ ││ │ ├─┤├┤ │││ + // ┴└ ╚═╝╩ ╩╚═╝╚═╝╩ ╩═╩╝╚═╝ └─┘┘└┘┴ ┴└─┘┴─┘└─┘─┴┘┘ ┴ ┴ ┴└─┘┘└┘ + // ┌─┐┬┌┐┌┌┬┐ ╦╔╦╗╔═╗ ┌┬┐┌─┐ ┌┬┐┌─┐┌─┐┌┬┐┬─┐┌─┐┬ ┬ + // ├┤ ││││ ││ ║ ║║╚═╗ │ │ │ ││├┤ └─┐ │ ├┬┘│ │└┬┘ + // └ ┴┘└┘─┴┘ ╩═╩╝╚═╝ ┴ └─┘ ─┴┘└─┘└─┘ ┴ ┴└─└─┘ ┴ + (function _maybeFindIdsToDestroy(proceed) { + + // If `cascade` meta key is NOT enabled, then just proceed. + if (!query.meta || !query.meta.cascade) { + return proceed(); + } + + // Look up the ids of records that will be destroyed. + // (We need these because, later, since `cascade` is enabled, we'll need + // to empty out all of their associated collections.) + // + // > FUTURE: instead of doing this, consider forcing `fetch: true` in the + // > implementation of `.destroy()` when `cascade` meta key is enabled (mainly + // > for consistency w/ the approach used in createEach()/create()) + + // To do this, we'll grab the appropriate adapter method and call it with a stage 3 + // "find" query, using almost exactly the same QKs as in the incoming "destroy". + // The only tangible difference is that its criteria has a `select` clause so that + // records only contain the primary key field (by column name, of course.) + var pkColumnName = WLModel.schema[WLModel.primaryKey].columnName; + if (!pkColumnName) { + return done(new Error('Consistency violation: model `' + WLModel.identity + '` schema has no primary key column name!')); + } + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // > Note: We have to look up the column name this way (instead of simply using the + // > getAttribute() utility) because it is currently only fully normalized on the + // > `schema` dictionary-- the model's attributes don't necessarily have valid, + // > normalized column names. For more context, see: + // > https://github.com/balderdashy/waterline/commit/19889b7ee265e9850657ec2b4c7f3012f213a0ae#commitcomment-20668097 + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + adapter.find(WLModel.datastore, { + method: 'find', + using: query.using, + criteria: { + where: query.criteria.where, + skip: query.criteria.skip, + limit: query.criteria.limit, + sort: query.criteria.sort, + select: [ pkColumnName ] + }, + meta: query.meta //<< this is how we know that the same db connection will be used + }, function _afterPotentiallyFindingIdsToDestroy(err, pRecords) { + if (err) { + err = forgeAdapterError(err, omen, 'find', modelIdentity, orm); + return proceed(err); + } + + // Slurp out just the array of ids (pk values), and send that back. + var ids = _.pluck(pRecords, pkColumnName); + return proceed(undefined, ids); + + });// + + })(function _afterPotentiallyLookingUpRecordsToCascade(err, idsOfRecordsBeingDestroyedMaybe) { + if (err) { return done(err); } + + + // Now we'll actually perform the `destroy`. + + // ┌─┐┌─┐┌┐┌┌┬┐ ┌┬┐┌─┐ ╔═╗╔╦╗╔═╗╔═╗╔╦╗╔═╗╦═╗ + // └─┐├┤ │││ ││ │ │ │ ╠═╣ ║║╠═╣╠═╝ ║ ║╣ ╠╦╝ + // └─┘└─┘┘└┘─┴┘ ┴ └─┘ ╩ ╩═╩╝╩ ╩╩ ╩ ╚═╝╩╚═ + // Call the `destroy` adapter method. + adapter.destroy(WLModel.datastore, query, function _afterTalkingToAdapter(err, rawAdapterResult) { + if (err) { + err = forgeAdapterError(err, omen, 'destroy', modelIdentity, orm); + return done(err); + }//-• + + + // ╦═╗╔═╗╦╔╗╔ ╔╦╗╔═╗╦ ╦╔╗╔ ╔╦╗╔═╗╔═╗╔╦╗╦═╗╦ ╦╔═╗╔╦╗╦╔═╗╔╗╔ ┌─┐┌┐┌┌┬┐┌─┐ + // ╠╦╝╠═╣║║║║ ║║║ ║║║║║║║ ║║║╣ ╚═╗ ║ ╠╦╝║ ║║ ║ ║║ ║║║║ │ ││││ │ │ │ + // ╩╚═╩ ╩╩╝╚╝ ═╩╝╚═╝╚╩╝╝╚╝ ═╩╝╚═╝╚═╝ ╩ ╩╚═╚═╝╚═╝ ╩ ╩╚═╝╝╚╝ └─┘┘└┘ ┴ └─┘ + // ┌─┐┌─┐┌─┐┌─┐┌─┐┬┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ ┌─ ┬ ┌─┐ ┌─┐┌─┐┌─┐┌─┐┌─┐┌┬┐┌─┐ ─┐ + // ├─┤└─┐└─┐│ ││ │├─┤ │ ││ ││││└─┐ │ │ ├┤ │ ├─┤└─┐│ ├─┤ ││├┤ │ + // ┴ ┴└─┘└─┘└─┘└─┘┴┴ ┴ ┴ ┴└─┘┘└┘└─┘ └─ ┴o└─┘o └─┘┴ ┴└─┘└─┘┴ ┴─┴┘└─┘ ─┘ + (function _maybeWipeAssociatedCollections(proceed) { + + // If `cascade` meta key is NOT enabled, then just proceed. + if (!query.meta || !query.meta.cascade) { + return proceed(); + } + + // Otherwise, then we should have the records we looked up before. + // (Here we do a quick sanity check.) + if (!_.isArray(idsOfRecordsBeingDestroyedMaybe)) { + return proceed(new Error('Consistency violation: Should have an array of records looked up before! But instead, got: '+util.inspect(idsOfRecordsBeingDestroyedMaybe, {depth: 5})+'')); + } + + // --• + // Now we'll clear out collections belonging to the specified records. + // (i.e. use `replaceCollection` to wipe them all out to be `[]`) + + + // First, if there are no target records, then gracefully bail without complaint. + // (i.e. this is a no-op) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Revisit this and verify that it's unnecessary. While this isn't a bad micro-optimization, + // its existence makes it seem like this wouldn't work or would cause a warning or something. And it + // really shouldn't be necessary. (It's doubtful that it adds any real tangible performance benefit anyway.) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + if (idsOfRecordsBeingDestroyedMaybe.length === 0) { + return proceed(); + }//-• + + // Otherwise, we have work to do. + // + // Run .replaceCollection() for each associated collection of the targets, wiping them all out. + // (if n..m, this destroys junction records; otherwise, it's n..1, so this just nulls out the other side) + // + // > Note that we pass through `meta` here, ensuring that the same db connection is used, if possible. + async.each(_.keys(WLModel.attributes), function _eachAttribute(attrName, next) { + + var attrDef = WLModel.attributes[attrName]; + + // Skip everything other than collection attributes. + if (!attrDef.collection){ return next(); } + + // But otherwise, this is a collection attribute. So wipe it. + WLModel.replaceCollection(idsOfRecordsBeingDestroyedMaybe, attrName, [], function (err) { + if (err) { + if (err.name === 'PropagationError') { + return next(flaverr({ + name: err.name, + code: err.code, + message: 'Failed to run the "cascade" polyfill. Could not propagate the potential '+ + 'destruction of '+(idsOfRecordsBeingDestroyedMaybe.length===1?'this '+WLModel.identity+' record':('these '+idsOfRecordsBeingDestroyedMaybe.length+' '+WLModel.identity+' records'))+'.\n'+ + 'Details:\n'+ + ' '+err.message+'\n'+ + '\n'+ + 'This error originated from the fact that the "cascade" polyfill was enabled for this query.\n'+ + 'Tip: Try reordering your .destroy() calls.\n'+ + ' [?] See https://sailsjs.com/support for more help.\n' + }, omen)); + }//• + else { return next(err); } + }//• + + return next(); + + }, query.meta);// + + },// ~∞%° + function _afterwards(err) { + if (err) { return proceed(err); } + + return proceed(); + + });// + + })(function _afterPotentiallyWipingCollections(err) {// ~∞%° + if (err) { + return done(err); + } + + // ╔╦╗╦═╗╔═╗╔╗╔╔═╗╔═╗╔═╗╦═╗╔╦╗ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐ ┌┐ ┬ ┬┌┬┐ ┌─┐┌┐┌┬ ┬ ┬ ┬┌─┐ + // ║ ╠╦╝╠═╣║║║╚═╗╠╣ ║ ║╠╦╝║║║ ├┬┘├┤ │ │ │├┬┘ ││└─┐ ├┴┐│ │ │ │ │││││ └┬┘ │├┤ + // ╩ ╩╚═╩ ╩╝╚╝╚═╝╚ ╚═╝╩╚═╩ ╩ ┴└─└─┘└─┘└─┘┴└──┴┘└─┘ooo└─┘└─┘ ┴ └─┘┘└┘┴─┘┴ ┴└ + // ╔═╗╔═╗╔╦╗╔═╗╦ ╦ ┌┬┐┌─┐┌┬┐┌─┐ ┬┌─┌─┐┬ ┬ ┬ ┬┌─┐┌─┐ ┌─┐┌─┐┌┬┐ ┌┬┐┌─┐ ┌┬┐┬─┐┬ ┬┌─┐ + // ╠╣ ║╣ ║ ║ ╠═╣ │││├┤ │ ├─┤ ├┴┐├┤ └┬┘ │││├─┤└─┐ └─┐├┤ │ │ │ │ │ ├┬┘│ │├┤ + // ╚ ╚═╝ ╩ ╚═╝╩ ╩ ┴ ┴└─┘ ┴ ┴ ┴ ┴ ┴└─┘ ┴ └┴┘┴ ┴└─┘ └─┘└─┘ ┴ ┴ └─┘ ┴ ┴└─└─┘└─┘ + (function _maybeTransformRecords(proceed){ + + // If `fetch` was not enabled, return. + if (!_.has(query.meta, 'fetch') || query.meta.fetch === false) { + + // > Note: This `if` statement is a convenience, for cases where the result from + // > the adapter may have been coerced from `undefined` to `null` automatically. + // > (we want it to be `undefined` still, for consistency) + if (_.isNull(rawAdapterResult)) { + return proceed(); + }//-• + + if (!_.isUndefined(rawAdapterResult)) { + console.warn('\n'+ + 'Warning: Unexpected behavior in database adapter:\n'+ + 'Since `fetch` is NOT enabled, this adapter (for datastore `'+WLModel.datastore+'`)\n'+ + 'should NOT have sent back anything as the 2nd argument when triggering the callback\n'+ + 'from its `destroy` method. But it did!\n'+ + '\n'+ + '(Displaying this warning to help avoid confusion and draw attention to the bug.\n'+ + 'Specifically, got:\n'+ + util.inspect(rawAdapterResult, {depth:5})+'\n'+ + '(Ignoring it and proceeding anyway...)'+'\n' + ); + }//>- + + // Continue on. + return proceed(); + + }//-• + + // IWMIH then we know that `fetch: true` meta key was set, and so the + // adapter should have sent back an array. + + // Verify that the raw result from the adapter is an array. + if (!_.isArray(rawAdapterResult)) { + return proceed(new Error( + 'Unexpected behavior in database adapter: Since `fetch: true` was enabled, this adapter '+ + '(for datastore `'+WLModel.datastore+'`) should have sent back an array of records as the 2nd argument when triggering '+ + 'the callback from its `destroy` method. But instead, got: '+util.inspect(rawAdapterResult, {depth:5})+'' + )); + }//-• + + // Attempt to convert the column names in each record back into attribute names. + var transformedRecords; + try { + transformedRecords = rawAdapterResult.map(function(record) { + return WLModel._transformer.unserialize(record); + }); + } catch (e) { return proceed(e); } + + // Check the records to verify compliance with the adapter spec, + // as well as any issues related to stale data that might not have been + // been migrated to keep up with the logical schema (`type`, etc. in + // attribute definitions). + try { + processAllRecords(transformedRecords, query.meta, modelIdentity, orm); + } catch (e) { return proceed(e); } + + // Now continue on. + return proceed(undefined, transformedRecords); + + })(function (err, transformedRecordsMaybe){ + if (err) { + return done(err); + } + + // ╔═╗╔═╗╔╦╗╔═╗╦═╗ ┌┬┐┌─┐┌─┐┌┬┐┬─┐┌─┐┬ ┬ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─ + // ╠═╣╠╣ ║ ║╣ ╠╦╝ ││├┤ └─┐ │ ├┬┘│ │└┬┘ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐ + // ╩ ╩╚ ╩ ╚═╝╩╚═ ─┴┘└─┘└─┘ ┴ ┴└─└─┘ ┴ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴ + // Run "after" lifecycle callback AGAIN and AGAIN- once for each record. + // ============================================================ + async.each(transformedRecordsMaybe, function _eachRecord(record, next) { + + // If the `skipAllLifecycleCallbacks` meta flag was set, don't run any of + // the methods. + if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) { + return next(); + } + + // Skip "after" lifecycle callback, if not defined. + if (!_.has(WLModel._callbacks, 'afterDestroy')) { + return next(); + } + + // Otherwise run it. + WLModel._callbacks.afterDestroy(record, function _afterMaybeRunningAfterDestroyForThisRecord(err) { + if (err) { + return next(err); + } + + return next(); + }); + + },// ~∞%° + function _afterIteratingOverRecords(err) { + if (err) { + return done(err); + } + + return done(undefined, transformedRecordsMaybe); + });//_∏_ (†: async.each() -- ran "after" lifecycle callback on each record) + });//_∏_ (†: after determining (and potentially transforming) the result from the adapter) + });//_∏_ (†: _afterPotentiallyWipingCollections) + });//_∏_ (adapter.destroy) + }); //_∏_ (†: after potentially looking up records to cascade) + }); //_∏_ (†: "before" LC) + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + + );// + +}; diff --git a/lib/waterline/methods/find-one.js b/lib/waterline/methods/find-one.js new file mode 100644 index 000000000..a68b5cec2 --- /dev/null +++ b/lib/waterline/methods/find-one.js @@ -0,0 +1,354 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var helpFind = require('../utils/query/help-find'); +var processAllRecords = require('../utils/query/process-all-records'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('findOne'); + + + +/** + * findOne() + * + * Find the record matching the specified criteria. + * + * ``` + * // Look up the bank account with exactly $34,986 in it. + * BankAccount.findOne().where({ + * balance: { '>': 34986 } + * }).exec(function(err, bankAccount) { + * // ... + * }); + * ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {Dictionary?} criteria + * + * @param {Dictionary} populates + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {Dictionary?} criteria + * @qkey {Dictionary?} populates + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function findOne( /* criteria?, populates?, explicitCbMaybe?, meta? */ ) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Build an omen for potential use in the asynchronous callbacks below. + var omen = buildOmen(findOne); + + // Build query w/ initial, universal keys. + var query = { + method: 'findOne', + using: modelIdentity + }; + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + + // The `explicitCbMaybe` callback, if one was provided. + var explicitCbMaybe; + + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback, and extend the `query` dictionary) + // + // > Note that we define `args` so that we can insulate access + // > to the arguments provided to this function. + var args = arguments; + (function _handleVariadicUsage() { + // The metadata container, if one was provided. + var _meta; + + + // Handle first argument: + // + // • findOne(criteria, ...) + query.criteria = args[0]; + + + // Handle double meaning of second argument: + // + // • findOne(..., populates, explicitCbMaybe, _meta) + var is2ndArgDictionary = (_.isObject(args[1]) && !_.isFunction(args[1]) && !_.isArray(args[1])); + if (is2ndArgDictionary) { + query.populates = args[1]; + explicitCbMaybe = args[2]; + _meta = args[3]; + } + // • findOne(..., explicitCbMaybe, _meta) + else { + explicitCbMaybe = args[1]; + _meta = args[2]; + } + + // Fold in `_meta`, if relevant. + if (_meta) { + query.meta = _meta; + } // >- + + })(); + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If an explicit callback function was specified, then immediately run the logic below + // and trigger the explicit callback when the time comes. Otherwise, build and return + // a new Deferred now. (If/when the Deferred is executed, the logic below will run.) + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that it's time to actually do some stuff. + // So... + // + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + + case 'E_INVALID_CRITERIA': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'Invalid criteria.\n' + + 'Details:\n' + + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_INVALID_POPULATES': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'Invalid populate(s).\n' + + 'Details:\n' + + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_NOOP': + return done(undefined, undefined); + + default: + return done(e); + } + } // >-• + + + // ┬ ┬┌─┐┌┐┌┌┬┐┬ ┌─┐ ╔╗ ╔═╗╔═╗╔═╗╦═╗╔═╗ ┬ ┬┌─┐┌─┐┌─┐┬ ┬┌─┐┬ ┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─ + // ├─┤├─┤│││ │││ ├┤ ╠╩╗║╣ ╠╣ ║ ║╠╦╝║╣ │ │├┤ ├┤ │ └┬┘│ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐ + // ┴ ┴┴ ┴┘└┘─┴┘┴─┘└─┘ ╚═╝╚═╝╚ ╚═╝╩╚═╚═╝ ┴─┘┴└ └─┘└─┘ ┴ └─┘┴─┘└─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴ + // Determine what to do about running any lifecycle callbacks + (function _maybeRunBeforeLC(proceed){ + + // If the `skipAllLifecycleCallbacks` meta key was enabled, then don't run this LC. + if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) { + return proceed(undefined, query); + }//-• + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: This is where the `beforeFindOne()` lifecycle callback would go + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + return proceed(undefined, query); + + })(function _afterPotentiallyRunningBeforeLC(err, query) { + if (err) { + return done(err); + } + + // ================================================================================ + // FUTURE: potentially bring this back (but also would need the `omit clause`) + // ================================================================================ + // // Before we get to forging again, save a copy of the stage 2 query's + // // `select` clause. We'll need this later on when processing the resulting + // // records, and if we don't copy it now, it might be damaged by the forging. + // // + // // > Note that we don't need a deep clone. + // // > (That's because the `select` clause is only 1 level deep.) + // var s2QSelectClause = _.clone(query.criteria.select); + // ================================================================================ + + + // ┌─┐┌─┐┌┐┌┌┬┐ ┌┬┐┌─┐ ╔═╗╔╦╗╔═╗╔═╗╔╦╗╔═╗╦═╗ + // └─┐├┤ │││ ││ │ │ │ ╠═╣ ║║╠═╣╠═╝ ║ ║╣ ╠╦╝ + // └─┘└─┘┘└┘─┴┘ ┴ └─┘ ╩ ╩═╩╝╩ ╩╩ ╩ ╚═╝╩╚═ + // Use `helpFind()` to forge stage 3 quer(y/ies) and then call the appropriate adapters' method(s). + // > Note: `helpFind` is responsible for running the `transformer`. + // > (i.e. so that column names are transformed back into attribute names) + helpFind(WLModel, query, omen, function _afterFetchingRecords(err, populatedRecords) { + if (err) { + return done(err); + }//-• + // console.log('result from operation runner:', record); + + // If more than one matching record was found, then consider this an error. + if (populatedRecords.length > 1) { + return done(flaverr({ + message: + 'More than one matching record found for `'+ + modelIdentity[0].toUpperCase()+modelIdentity.substring(1)+ + '.findOne()`:\n'+ + '···\n'+ + _.pluck(populatedRecords, WLModel.primaryKey)+'\n'+ + '···\n'+ + '\n'+ + 'Criteria used:\n'+ + '···\n'+ + util.inspect(query.criteria, {depth:5})+'\n'+ + '···' + }, omen)); + }//-• + + // Check and see if we actually found a record. + var thePopulatedRecord = _.first(populatedRecords); + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Allow a `mustExist: true` meta key to be specified, probably via the use of a simple new query + // method-- something like `.mustExist()`. If set, then if the record is not found, bail with an error. + // This is just a nicety to simplify some of the more annoyingly repetitive userland code that one needs + // to write in a Node/Sails app. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + // If so... + if (thePopulatedRecord) { + + // Check the record to verify compliance with the adapter spec, + // as well as any issues related to stale data that might not have been + // been migrated to keep up with the logical schema (`type`, etc. in + // attribute definitions). + try { + processAllRecords([ thePopulatedRecord ], query.meta, modelIdentity, orm); + } catch (e) { return done(e); } + + }//>- + + // ┬ ┬┌─┐┌┐┌┌┬┐┬ ┌─┐ ╔═╗╔═╗╔╦╗╔═╗╦═╗ ┬ ┬┌─┐┌─┐┌─┐┬ ┬┌─┐┬ ┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─ + // ├─┤├─┤│││ │││ ├┤ ╠═╣╠╣ ║ ║╣ ╠╦╝ │ │├┤ ├┤ │ └┬┘│ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐ + // ┴ ┴┴ ┴┘└┘─┴┘┴─┘└─┘ ╩ ╩╚ ╩ ╚═╝╩╚═ ┴─┘┴└ └─┘└─┘ ┴ └─┘┴─┘└─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴ + (function _maybeRunAfterLC(proceed){ + + // If the `skipAllLifecycleCallbacks` meta key was enabled, then don't run this LC. + if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) { + return proceed(undefined, thePopulatedRecord); + }//-• + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: This is where the `afterFindOne()` lifecycle callback would go + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + return proceed(undefined, thePopulatedRecord); + + })(function _afterPotentiallyRunningAfterLC(err, thePopulatedRecord){ + if (err) { return done(err); } + + // All done. + return done(undefined, thePopulatedRecord); + + });// + }); // + }); // + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/find-or-create.js b/lib/waterline/methods/find-or-create.js new file mode 100644 index 000000000..c92bf55c5 --- /dev/null +++ b/lib/waterline/methods/find-or-create.js @@ -0,0 +1,293 @@ +/** + * Module dependencies + */ + +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('findOrCreate'); + + + +/** + * findOrCreate() + * + * Find the record matching the specified criteria. If no record exists or more + * than one record matches the criteria, an error will be returned. + * + * ``` + * // Ensure an a pet with type dog exists + * PetType.findOrCreate({ type: 'dog' }, { name: 'Pretend pet type', type: 'dog' }) + * .exec(function(err, petType, wasCreated) { + * // ... + * }); + * ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {Dictionary?} criteria + * + * @param {Dictionary} newRecord + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {Dictionary?} criteria + * @qkey {Dictionary?} newRecord + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function findOrCreate( /* criteria?, newRecord?, explicitCbMaybe?, meta? */ ) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Build an omen for potential use in the asynchronous callback below. + var omen = buildOmen(findOrCreate); + + // Build query w/ initial, universal keys. + var query = { + method: 'findOrCreate', + using: modelIdentity + }; + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + + // The `explicitCbMaybe` callback, if one was provided. + var explicitCbMaybe; + + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback, and extend the `query` dictionary) + // + // > Note that we define `args` to minimize the chance of this "variadics" code + // > introducing any unoptimizable performance problems. For details, see: + // > https://github.com/petkaantonov/bluebird/wiki/Optimization-killers#32-leaking-arguments + // > •=> `.length` is just an integer, this doesn't leak the `arguments` object itself + // > •=> `i` is always valid index in the arguments object + var args = new Array(arguments.length); + for (var i = 0; i < args.length; ++i) { + args[i] = arguments[i]; + } + + // • findOrCreate(criteria, newRecord, explicitCbMaybe, ...) + query.criteria = args[0]; + query.newRecord = args[1]; + explicitCbMaybe = args[2]; + query.meta = args[3]; + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If an explicit callback function was specified, then immediately run the logic below + // and trigger the explicit callback when the time comes. Otherwise, build and return + // a new Deferred now. (If/when the Deferred is executed, the logic below will run.) + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that it's time to actually do some stuff. + // So... + // + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + case 'E_INVALID_CRITERIA': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'Invalid criteria.\n' + + 'Details:\n' + + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_INVALID_NEW_RECORDS': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'Invalid new record(s).\n'+ + 'Details:\n'+ + ' '+e.details+'\n' + }, omen) + ); + case 'E_NOOP': + // If the criteria is deemed to be a no-op, then normalize it into a standard format. + // This way, it will continue to represent a no-op as we proceed below, so the `findOne()` + // call will also come back with an E_NOOP, and so then it will go on to do a `.create()`. + // And most importantly, this way we don't have to worry about the case where the no-op + // was caused by an edge case like `false` (we need to be able to munge the criteria -- + // i.e. deleting the `limit`). + var STD_NOOP_CRITERIA = { where: { or: [] } }; + query.criteria = STD_NOOP_CRITERIA; + break; + + default: + return done(e); + } + }// >-• + + + // Remove the `limit`, `skip`, and `sort` clauses so that our findOne query is valid. + // (This is because they were automatically attached above.) + delete query.criteria.limit; + delete query.criteria.skip; + delete query.criteria.sort; + + // ╔═╗═╗ ╦╔═╗╔═╗╦ ╦╔╦╗╔═╗ ┌─┐┬┌┐┌┌┬┐ ┌─┐┌┐┌┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ║╣ ╔╩╦╝║╣ ║ ║ ║ ║ ║╣ ├┤ ││││ ││ │ ││││├┤ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚═╝╩ ╚═╚═╝╚═╝╚═╝ ╩ ╚═╝ └ ┴┘└┘─┴┘ └─┘┘└┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // Note that we pass in `meta` here, which ensures we're on the same db connection. + // (provided one was explicitly passed in!) + WLModel.findOne(query.criteria, function _afterPotentiallyFinding(err, foundRecord) { + if (err) { + return done(err); + } + + // Note that we pass through a flag as the third argument to our callback, + // indicating whether a new record was created. + if (foundRecord) { + return done(undefined, foundRecord, false); + } + + // So that the create query is valid, check if the primary key value was + // automatically set to `null` by FS2Q (i.e. because it was unspecified.) + // And if so, remove it. + // + // > IWMIH, we know this was automatic because, if `null` had been + // > specified explicitly, it would have already caused an error in + // > our call to FS2Q above (`null` is NEVER a valid PK value) + var pkAttrName = WLModel.primaryKey; + var wasPKValueCoercedToNull = _.isNull(query.newRecord[pkAttrName]); + if (wasPKValueCoercedToNull) { + delete query.newRecord[pkAttrName]; + } + + // Build a modified shallow clone of the originally-provided `meta` from + // userland, but that also has `fetch: true` and the private/experimental + // flag, `skipEncryption: true`. For context on the bit about encryption, + // see: https://github.com/balderdashy/sails/issues/4302#issuecomment-363883885 + // > PLEASE DO NOT RELY ON `skipEncryption` IN YOUR OWN CODE- IT COULD CHANGE + // > AT ANY TIME AND BREAK YOUR APP OR PLUGIN! + var modifiedMetaForCreate = _.extend({}, query.meta || {}, { + fetch: true, + skipEncryption: true + }); + + // ╔═╗═╗ ╦╔═╗╔═╗╦ ╦╔╦╗╔═╗ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ║╣ ╔╩╦╝║╣ ║ ║ ║ ║ ║╣ │ ├┬┘├┤ ├─┤ │ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚═╝╩ ╚═╚═╝╚═╝╚═╝ ╩ ╚═╝ └─┘┴└─└─┘┴ ┴ ┴ └─┘ └─┘└└─┘└─┘┴└─ ┴ + WLModel.create(query.newRecord, function _afterCreating(err, createdRecord) { + if (err) { + return done(err); + } + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Instead of preventing projections (`omit`/`select`) for findOrCreate, + // instead allow them and just modify the newly created record after the fact + // (i.e. trim properties in-memory). + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + // Pass the newly-created record to our callback. + // > Note we set the `wasCreated` flag to `true` in this case. + return done(undefined, createdRecord, true); + + }, modifiedMetaForCreate);// + }, query.meta);// + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/find.js b/lib/waterline/methods/find.js new file mode 100644 index 000000000..e9680daf8 --- /dev/null +++ b/lib/waterline/methods/find.js @@ -0,0 +1,312 @@ +/** + * Module dependencies + */ + +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var helpFind = require('../utils/query/help-find'); +var processAllRecords = require('../utils/query/process-all-records'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('find'); + + +/** + * find() + * + * Find records that match the specified criteria. + * + * ``` + * // Look up all bank accounts with more than $32,000 in them. + * BankAccount.find().where({ + * balance: { '>': 32000 } + * }).exec(function(err, bankAccounts) { + * // ... + * }); + * ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {Dictionary?} criteria + * + * @param {Dictionary} populates + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {Dictionary?} criteria + * @qkey {Dictionary?} populates + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function find( /* criteria?, populates?, explicitCbMaybe?, meta? */ ) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Build an omen for potential use in the asynchronous callbacks below. + var omen = buildOmen(find); + + // Build query w/ initial, universal keys. + var query = { + method: 'find', + using: modelIdentity + }; + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + + // The `explicitCbMaybe` callback, if one was provided. + var explicitCbMaybe; + + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback, and extend the `query` dictionary) + // + // > Note that we define `args` to minimize the chance of this "variadics" code + // > introducing any unoptimizable performance problems. For details, see: + // > https://github.com/petkaantonov/bluebird/wiki/Optimization-killers#32-leaking-arguments + // > •=> `.length` is just an integer, this doesn't leak the `arguments` object itself + // > •=> `i` is always valid index in the arguments object + var args = new Array(arguments.length); + for (var i = 0; i < args.length; ++i) { + args[i] = arguments[i]; + } + + // • find(explicitCbMaybe, ...) + if (args.length >= 1 && _.isFunction(args[0])) { + explicitCbMaybe = args[0]; + query.meta = args[1]; + } + // • find(criteria, explicitCbMaybe, ...) + else if (args.length >= 2 && _.isFunction(args[1])) { + query.criteria = args[0]; + explicitCbMaybe = args[1]; + query.meta = args[2]; + } + // • find() + // • find(criteria) + // • find(criteria, populates, ...) + else { + query.criteria = args[0]; + query.populates = args[1]; + explicitCbMaybe = args[2]; + query.meta = args[3]; + } + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If a callback function was not specified, then build a new Deferred and bail now. + // + // > This method will be called AGAIN automatically when the Deferred is executed. + // > and next time, it'll have a callback. + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that a callback was specified. + // So... + + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + + case 'E_INVALID_CRITERIA': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'Invalid criteria.\n' + + 'Details:\n' + + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_INVALID_POPULATES': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'Invalid populate(s).\n' + + 'Details:\n' + + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_NOOP': + return done(undefined, []); + + default: + return done(e); + } + } // >-• + + // ┬ ┬┌─┐┌┐┌┌┬┐┬ ┌─┐ ╔╗ ╔═╗╔═╗╔═╗╦═╗╔═╗ ┬ ┬┌─┐┌─┐┌─┐┬ ┬┌─┐┬ ┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─ + // ├─┤├─┤│││ │││ ├┤ ╠╩╗║╣ ╠╣ ║ ║╠╦╝║╣ │ │├┤ ├┤ │ └┬┘│ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐ + // ┴ ┴┴ ┴┘└┘─┴┘┴─┘└─┘ ╚═╝╚═╝╚ ╚═╝╩╚═╚═╝ ┴─┘┴└ └─┘└─┘ ┴ └─┘┴─┘└─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴ + // Determine what to do about running any lifecycle callbacks + (function _maybeRunBeforeLC(proceed) { + // If the `skipAllLifecycleCallbacks` meta flag was set, don't run any of + // the methods. + if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) { + return proceed(undefined, query); + } + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: This is where the `beforeFind()` lifecycle callback would go + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + return proceed(undefined, query); + + })(function _afterPotentiallyRunningBeforeLC(err, query) { + if (err) { + return done(err); + } + + + // ================================================================================ + // FUTURE: potentially bring this back (but also would need the `omit clause`) + // ================================================================================ + // // Before we get to forging again, save a copy of the stage 2 query's + // // `select` clause. We'll need this later on when processing the resulting + // // records, and if we don't copy it now, it might be damaged by the forging. + // // + // // > Note that we don't need a deep clone. + // // > (That's because the `select` clause is only 1 level deep.) + // var s2QSelectClause = _.clone(query.criteria.select); + // ================================================================================ + + // ┌─┐┌─┐┌┐┌┌┬┐ ┌┬┐┌─┐ ╔═╗╔╦╗╔═╗╔═╗╔╦╗╔═╗╦═╗ + // └─┐├┤ │││ ││ │ │ │ ╠═╣ ║║╠═╣╠═╝ ║ ║╣ ╠╦╝ + // └─┘└─┘┘└┘─┴┘ ┴ └─┘ ╩ ╩═╩╝╩ ╩╩ ╩ ╚═╝╩╚═ + // Use `helpFind()` to forge stage 3 quer(y/ies) and then call the appropriate adapters' method(s). + // > Note: `helpFind` is responsible for running the `transformer`. + // > (i.e. so that column names are transformed back into attribute names, amongst other things) + helpFind(WLModel, query, omen, function _afterFetchingRecords(err, populatedRecords) { + if (err) { + return done(err); + }//-• + + // Perform post-processing on the populated (no longer "physical"!) records. + try { + processAllRecords(populatedRecords, query.meta, modelIdentity, orm); + } catch (err) { return done(err); } + + + // ┬ ┬┌─┐┌┐┌┌┬┐┬ ┌─┐ ╔═╗╔═╗╔╦╗╔═╗╦═╗ ┬ ┬┌─┐┌─┐┌─┐┬ ┬┌─┐┬ ┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─ + // ├─┤├─┤│││ │││ ├┤ ╠═╣╠╣ ║ ║╣ ╠╦╝ │ │├┤ ├┤ │ └┬┘│ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐ + // ┴ ┴┴ ┴┘└┘─┴┘┴─┘└─┘ ╩ ╩╚ ╩ ╚═╝╩╚═ ┴─┘┴└ └─┘└─┘ ┴ └─┘┴─┘└─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴ + (function _maybeRunAfterLC(proceed){ + + // If the `skipAllLifecycleCallbacks` meta key was enabled, then don't run this LC. + if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) { + return proceed(undefined, populatedRecords); + }//-• + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: This is where the `afterFind()` lifecycle callback would go + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + return proceed(undefined, populatedRecords); + + })(function _afterPotentiallyRunningAfterLC(err, populatedRecords) { + if (err) { return done(err); } + + // All done. + return done(undefined, populatedRecords); + + });// + }); // + }); // + + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/remove-from-collection.js b/lib/waterline/methods/remove-from-collection.js new file mode 100644 index 000000000..5f0721d3e --- /dev/null +++ b/lib/waterline/methods/remove-from-collection.js @@ -0,0 +1,505 @@ +/** + * Module dependencies + */ + +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var async = require('async'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('removeFromCollection'); + + + +/** + * removeFromCollection() + * + * Remove a subset of the members from the specified collection in each of the target record(s). + * + * ``` + * // For users 3 and 4, remove pets 99 and 98 from their "pets" collection. + * // > (if either user record does not actually have one of those pets in its "pets", + * // > then we just silently skip over it) + * User.removeFromCollection([3,4], 'pets', [99,98]).exec(...); + * ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {Array?|String?|Number?} targetRecordIds + * + * @param {String?} collectionAttrName + * + * @param {Array?} associatedIds + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {Array|String|Number} targetRecordIds + * The primary key value(s) (i.e. ids) for the parent record(s). + * Must be a number or string; e.g. '507f191e810c19729de860ea' or 49 + * Or an array of numbers or strings; e.g. ['507f191e810c19729de860ea', '14832ace0c179de897'] or [49, 32, 37] + * If an empty array (`[]`) is specified, then this is a no-op. + * + * @qkey {String} collectionAttrName + * The name of the collection association (e.g. "pets") + * + * @qkey {Array} associatedIds + * The primary key values (i.e. ids) for the associated child records to remove from the collection. + * Must be an array of numbers or strings; e.g. ['334724948aca33ea0f13', '913303583e0af031358bac931'] or [18, 19] + * If an empty array (`[]`) is specified, then this is a no-op. + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function removeFromCollection(/* targetRecordIds?, collectionAttrName?, associatedIds?, explicitCbMaybe?, meta? */) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Build an omen for potential use in the asynchronous callback below. + var omen = buildOmen(removeFromCollection); + + // Build query w/ initial, universal keys. + var query = { + method: 'removeFromCollection', + using: modelIdentity + }; + + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback, and extend the `query` dictionary) + + // The `explicitCbMaybe` callback, if one was provided. + var explicitCbMaybe; + + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback) + // + // > Note that we define `args` so that we can insulate access + // > to the arguments provided to this function. + var args = arguments; + (function _handleVariadicUsage(){ + + // The metadata container, if one was provided. + var _meta; + + + // Handle first two arguments: + // (both of which always have exactly one meaning) + // + // • removeFromCollection(targetRecordIds, collectionAttrName, ...) + query.targetRecordIds = args[0]; + query.collectionAttrName = args[1]; + + + // Handle double meaning of third argument, & then handle the rest: + // + // • removeFromCollection(____, ____, associatedIds, explicitCbMaybe, _meta) + var is3rdArgArray = !_.isUndefined(args[2]); + if (is3rdArgArray) { + query.associatedIds = args[2]; + explicitCbMaybe = args[3]; + _meta = args[4]; + } + // • removeFromCollection(____, ____, explicitCbMaybe, _meta) + else { + explicitCbMaybe = args[2]; + _meta = args[3]; + } + + // Fold in `_meta`, if relevant. + if (!_.isUndefined(_meta)) { + query.meta = _meta; + } // >- + + })(); + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If an explicit callback function was specified, then immediately run the logic below + // and trigger the explicit callback when the time comes. Otherwise, build and return + // a new Deferred now. (If/when the Deferred is executed, the logic below will run.) + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that it's time to actually do some stuff. + // So... + // + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + + case 'E_INVALID_TARGET_RECORD_IDS': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'The target record ids (i.e. first argument) passed to `.removeFromCollection()` '+ + 'should be the ID (or IDs) of target records whose collection will be modified.\n'+ + 'Details:\n'+ + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_INVALID_COLLECTION_ATTR_NAME': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'The collection attr name (i.e. second argument) to `.removeFromCollection()` should '+ + 'be the name of a collection association from this model.\n'+ + 'Details:\n'+ + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_INVALID_ASSOCIATED_IDS': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'The associated ids (i.e. using `.members()`, or the third argument) passed to `.removeFromCollection()` should be '+ + 'the ID (or IDs) of associated records to remove.\n'+ + 'Details:\n'+ + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_NOOP': + return done(); + // ^ tolerate no-ops -- i.e. empty array of target record ids or empty array of associated ids (members) + + case 'E_INVALID_META': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: e.message + }, omen) + ); + // ^ when the standard usage error message is good enough as-is, without any further customization + + default: + return done(e); + // ^ when an internal, miscellaneous, or unexpected error occurs + + } + } // >-• + + + // ┌┐┌┌─┐┬ ┬ ╔═╗╔═╗╔╦╗╦ ╦╔═╗╦ ╦ ╦ ╦ ┌┬┐┌─┐┬ ┬┌─ ┌┬┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌┬┐┌┐ ┌─┐ + // ││││ ││││ ╠═╣║ ║ ║ ║╠═╣║ ║ ╚╦╝ │ ├─┤│ ├┴┐ │ │ │ │ ├─┤├┤ ││├┴┐└─┐ + // ┘└┘└─┘└┴┘ ╩ ╩╚═╝ ╩ ╚═╝╩ ╩╩═╝╩═╝╩ ┴ ┴ ┴┴─┘┴ ┴ ┴ └─┘ ┴ ┴ ┴└─┘ ─┴┘└─┘└─┘ + (function (proceed) { + + // Get the model being used as the parent + var WLModel = orm.collections[query.using]; + try { assert.equal(query.using.toLowerCase(), query.using, '`query.using` (identity) should have already been normalized before getting here! But it was not: '+query.using); } catch (e) { return proceed(e); } + + // Look up the association by name in the schema definition. + var schemaDef = WLModel.schema[query.collectionAttrName]; + + // Look up the associated collection using the schema def which should have + // join tables normalized + var WLChild = orm.collections[schemaDef.collection]; + try { + assert.equal(schemaDef.collection.toLowerCase(), schemaDef.collection, '`schemaDef.collection` (identity) should have already been normalized before getting here! But it was not: '+schemaDef.collection); + assert.equal(schemaDef.referenceIdentity.toLowerCase(), schemaDef.referenceIdentity, '`schemaDef.referenceIdentity` (identity) should have already been normalized before getting here! But it was not: '+schemaDef.referenceIdentity); + assert.equal(Object.getPrototypeOf(WLChild).identity.toLowerCase(), Object.getPrototypeOf(WLChild).identity, '`Object.getPrototypeOf(WLChild).identity` (identity) should have already been normalized before getting here! But it was not: '+Object.getPrototypeOf(WLChild).identity); + } catch (e) { return proceed(e); } + + // Flag to determine if the WLChild is a manyToMany relation + var manyToMany = false; + + // Check if the schema references something other than the WLChild + if (schemaDef.referenceIdentity !== Object.getPrototypeOf(WLChild).identity) { + manyToMany = true; + WLChild = orm.collections[schemaDef.referenceIdentity]; + } + + // Check if the child is a join table + if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) { + manyToMany = true; + } + + // Check if the child is a through table + if (_.has(Object.getPrototypeOf(WLChild), 'throughTable') && _.keys(WLChild.throughTable).length) { + manyToMany = true; + } + + // Ensure the query skips lifecycle callbacks + // Build a modified shallow clone of the originally-provided `meta` + var modifiedMeta = _.extend({}, query.meta || {}, { skipAllLifecycleCallbacks: true }); + + + // ██╗███╗ ██╗ ███╗ ███╗██╗ + // ██╔╝████╗ ██║ ████╗ ████║╚██╗ + // ██║ ██╔██╗ ██║ ██╔████╔██║ ██║ + // ██║ ██║╚██╗██║ ██║╚██╔╝██║ ██║ + // ╚██╗██║ ╚████║██╗██╗██║ ╚═╝ ██║██╔╝ + // ╚═╝╚═╝ ╚═══╝╚═╝╚═╝╚═╝ ╚═╝╚═╝ + // + // ███╗ ███╗ █████╗ ███╗ ██╗██╗ ██╗ ████████╗ ██████╗ ███╗ ███╗ █████╗ ███╗ ██╗██╗ ██╗ + // ████╗ ████║██╔══██╗████╗ ██║╚██╗ ██╔╝ ╚══██╔══╝██╔═══██╗ ████╗ ████║██╔══██╗████╗ ██║╚██╗ ██╔╝ + // ██╔████╔██║███████║██╔██╗ ██║ ╚████╔╝ ██║ ██║ ██║ ██╔████╔██║███████║██╔██╗ ██║ ╚████╔╝ + // ██║╚██╔╝██║██╔══██║██║╚██╗██║ ╚██╔╝ ██║ ██║ ██║ ██║╚██╔╝██║██╔══██║██║╚██╗██║ ╚██╔╝ + // ██║ ╚═╝ ██║██║ ██║██║ ╚████║ ██║ ██║ ╚██████╔╝ ██║ ╚═╝ ██║██║ ██║██║ ╚████║ ██║ + // ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝ ╚═╝ + // + // If the collection uses a join table, build a query that removes the records + // from the table. + if (manyToMany) { + + // ╔╗ ╦ ╦╦╦ ╔╦╗ ┬─┐┌─┐┌─┐┌─┐┬─┐┌─┐┌┐┌┌─┐┌─┐ ┌┬┐┌─┐┌─┐┌─┐┬┌┐┌┌─┐ + // ╠╩╗║ ║║║ ║║ ├┬┘├┤ ├┤ ├┤ ├┬┘├┤ ││││ ├┤ │││├─┤├─┘├─┘│││││ ┬ + // ╚═╝╚═╝╩╩═╝═╩╝ ┴└─└─┘└ └─┘┴└─└─┘┘└┘└─┘└─┘ ┴ ┴┴ ┴┴ ┴ ┴┘└┘└─┘ + // + // Maps out the parent and child attribute names to use for the query. + var parentReference; + var childReference; + + // Find the parent reference + if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) { + + // Assumes the generated junction table will only ever have two foreign key + // values. Should be safe for now and any changes would need to be made in + // Waterline-Schema where a map could be formed anyway. + _.each(WLChild.schema, function(wlsAttrDef, key) { + if (!_.has(wlsAttrDef, 'references')) { + return; + } + + // If this is the piece of the join table, set the parent reference. + if (_.has(wlsAttrDef, 'columnName') && wlsAttrDef.columnName === schemaDef.on) { + parentReference = key; + } + }); + + } + // If it's a through table, grab the parent and child reference from the + // through table mapping that was generated by Waterline-Schema. + else if (_.has(Object.getPrototypeOf(WLChild), 'throughTable')) { + + childReference = WLChild.throughTable[WLModel.identity + '.' + query.collectionAttrName]; + _.each(WLChild.throughTable, function(rhs, key) { + if (key !== WLModel.identity + '.' + query.collectionAttrName) { + parentReference = rhs; + } + }); + + }//>- + + // Find the child reference in a junction table + if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) { + + // Assumes the generated junction table will only ever have two foreign key + // values. Should be safe for now and any changes would need to be made in + // Waterline-Schema where a map could be formed anyway. + _.each(WLChild.schema, function(wlsAttrDef, key) { + if (!_.has(wlsAttrDef, 'references')) { + return; + } + + // If this is the other piece of the join table, set the child reference. + if (_.has(wlsAttrDef, 'columnName') && wlsAttrDef.columnName !== schemaDef.on) { + childReference = key; + } + });// + + }//>- + + + // ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╩╗║ ║║║ ║║ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚═╝╚═╝╩╩═╝═╩╝ └─┘└└─┘└─┘┴└─ ┴ (S) + // + // If only a single targetRecordId is used, this can be proceed in a single + // query. Otherwise multiple queries will be needed - one for each parent. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Combine this bit into one single query using something like: + // ``` + // { or: [ { and: [{..},{..:{in:[..]}}] }, { and: [{..},{..:{in: [..]}}] }, ... ] } + // ``` + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + // Build an array to hold `where` clauses for all records being removed. + // For each target record, build a constraint destroy query for the associated records. + var joinRecordWhereClauses = []; + _.each(query.targetRecordIds, function(targetId) { + var whereClauseForTarget = {}; + whereClauseForTarget[parentReference] = targetId; + whereClauseForTarget[childReference] = { in: query.associatedIds }; + joinRecordWhereClauses.push(whereClauseForTarget); + }); + + // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴ + async.each(joinRecordWhereClauses, function(whereClause, next) { + + WLChild.destroy(whereClause, function(err){ + if (err) { return next(err); } + return next(); + }, modifiedMeta); + + },// ~∞%° + function _after(err) { + if (err) { return proceed(err); } + return proceed(); + });// + + return; + }//_∏_. + + + // ██╗███╗ ██╗ ██╗██╗ + // ██╔╝████╗ ██║ ███║╚██╗ + // ██║ ██╔██╗ ██║ ╚██║ ██║ + // ██║ ██║╚██╗██║ ██║ ██║ + // ╚██╗██║ ╚████║██╗██╗██║██╔╝ + // ╚═╝╚═╝ ╚═══╝╚═╝╚═╝╚═╝╚═╝ + // + // ██████╗ ███████╗██╗ ██████╗ ███╗ ██╗ ██████╗ ███████╗ ████████╗ ██████╗ + // ██╔══██╗██╔════╝██║ ██╔═══██╗████╗ ██║██╔════╝ ██╔════╝ ╚══██╔══╝██╔═══██╗ + // ██████╔╝█████╗ ██║ ██║ ██║██╔██╗ ██║██║ ███╗███████╗ ██║ ██║ ██║ + // ██╔══██╗██╔══╝ ██║ ██║ ██║██║╚██╗██║██║ ██║╚════██║ ██║ ██║ ██║ + // ██████╔╝███████╗███████╗╚██████╔╝██║ ╚████║╚██████╔╝███████║ ██║ ╚██████╔╝ + // ╚═════╝ ╚══════╝╚══════╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝ ╚══════╝ ╚═╝ ╚═════╝ + // + // Otherwise, this association is exclusive-- so rather than deleting junction records, we'll need + // to update the child records themselves, nulling out their foreign key value (aka singular, "model", association). + + + // ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╩╗║ ║║║ ║║ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚═╝╚═╝╩╩═╝═╩╝ └─┘└└─┘└─┘┴└─ ┴ + // + // Build up criteria that selects child records. + var criteria = { where: {} }; + criteria.where[WLChild.primaryKey] = query.associatedIds; + criteria.where[schemaDef.via] = query.targetRecordIds; + + // Build up the values to set (we'll null out the other side). + var valuesToUpdate = {}; + valuesToUpdate[schemaDef.via] = null; + + + // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴ + WLChild.update(criteria, valuesToUpdate, function(err){ + if (err) { return proceed(err); } + + return proceed(); + + }, modifiedMeta);// + + })(function (err) { + if (err) { return done(err); } + + // IWMIH, everything worked! + // > Note that we do not send back a result of any kind-- this it to reduce the likelihood + // > writing userland code that relies undocumented/experimental output. + return done(); + + });// + + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/replace-collection.js b/lib/waterline/methods/replace-collection.js new file mode 100644 index 000000000..fd9679041 --- /dev/null +++ b/lib/waterline/methods/replace-collection.js @@ -0,0 +1,623 @@ +/** + * Module dependencies + */ + +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('replaceCollection'); + + + +/** + * replaceCollection() + * + * Replace all members of the specified collection in each of the target record(s). + * + * ``` + * // For users 3 and 4, change their "pets" collection to contain ONLY pets 99 and 98. + * User.replaceCollection([3,4], 'pets', [99,98]).exec(...); + * ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {Array?|String?|Number?} targetRecordIds + * + * @param {String?} collectionAttrName + * + * @param {Array?} associatedIds + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {Array|String|Number} targetRecordIds + * The primary key value(s) (i.e. ids) for the parent record(s). + * Must be a number or string; e.g. '507f191e810c19729de860ea' or 49 + * Or an array of numbers or strings; e.g. ['507f191e810c19729de860ea', '14832ace0c179de897'] or [49, 32, 37] + * If an empty array (`[]`) is specified, then this is a no-op. + * + * @qkey {String} collectionAttrName + * The name of the collection association (e.g. "pets") + * + * @qkey {Array} associatedIds + * The primary key values (i.e. ids) for the child records that will be the new members of the association. + * Must be an array of numbers or strings; e.g. ['334724948aca33ea0f13', '913303583e0af031358bac931'] or [18, 19] + * Specify an empty array (`[]`) to completely wipe out the collection's contents. + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function replaceCollection(/* targetRecordIds?, collectionAttrName?, associatedIds?, explicitCbMaybe?, meta? */) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Build an omen for potential use in the asynchronous callback below. + var omen = buildOmen(replaceCollection); + + // Build query w/ initial, universal keys. + var query = { + method: 'replaceCollection', + using: modelIdentity + }; + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback, and extend the `query` dictionary) + + // The `explicitCbMaybe` callback, if one was provided. + var explicitCbMaybe; + + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback) + // + // > Note that we define `args` so that we can insulate access + // > to the arguments provided to this function. + var args = arguments; + (function _handleVariadicUsage(){ + + // The metadata container, if one was provided. + var _meta; + + + // Handle first two arguments: + // (both of which always have exactly one meaning) + // + // • replaceCollection(targetRecordIds, collectionAttrName, ...) + query.targetRecordIds = args[0]; + query.collectionAttrName = args[1]; + + + // Handle double meaning of third argument, & then handle the rest: + // + // • replaceCollection(____, ____, associatedIds, explicitCbMaybe, _meta) + var is3rdArgArray = !_.isUndefined(args[2]); + if (is3rdArgArray) { + query.associatedIds = args[2]; + explicitCbMaybe = args[3]; + _meta = args[4]; + } + // • replaceCollection(____, ____, explicitCbMaybe, _meta) + else { + explicitCbMaybe = args[2]; + _meta = args[3]; + } + + // Fold in `_meta`, if relevant. + if (!_.isUndefined(_meta)) { + query.meta = _meta; + } // >- + + })(); + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If an explicit callback function was specified, then immediately run the logic below + // and trigger the explicit callback when the time comes. Otherwise, build and return + // a new Deferred now. (If/when the Deferred is executed, the logic below will run.) + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that it's time to actually do some stuff. + // So... + // + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + + case 'E_INVALID_TARGET_RECORD_IDS': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'The target record ids (i.e. first argument) passed to `.replaceCollection()` '+ + 'should be the ID (or IDs) of compatible target records whose collection will '+ + 'be modified.\n'+ + 'Details:\n'+ + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_INVALID_COLLECTION_ATTR_NAME': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'The collection attr name (i.e. second argument) to `.replaceCollection()` should '+ + 'be the name of a collection association from this model.\n'+ + 'Details:\n'+ + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_INVALID_ASSOCIATED_IDS': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'The associated ids (i.e. using `.members()`, or the third argument) passed to `.replaceCollection()` should be '+ + 'the ID (or IDs) of associated records to use.\n'+ + 'Details:\n'+ + ' ' + e.details + '\n' + }, omen) + ); + + case 'E_NOOP': + return done(); + // ^ tolerate no-ops -- i.e. empty array of target record ids + + case 'E_INVALID_META': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: e.message + }, omen) + ); + // ^ when the standard usage error message is good enough as-is, without any further customization + + default: + return done(e); + // ^ when an internal, miscellaneous, or unexpected error occurs + + } + } // >-• + + + // ┌┐┌┌─┐┬ ┬ ╔═╗╔═╗╔╦╗╦ ╦╔═╗╦ ╦ ╦ ╦ ┌┬┐┌─┐┬ ┬┌─ ┌┬┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌┬┐┌┐ ┌─┐ + // ││││ ││││ ╠═╣║ ║ ║ ║╠═╣║ ║ ╚╦╝ │ ├─┤│ ├┴┐ │ │ │ │ ├─┤├┤ ││├┴┐└─┐ + // ┘└┘└─┘└┴┘ ╩ ╩╚═╝ ╩ ╚═╝╩ ╩╩═╝╩═╝╩ ┴ ┴ ┴┴─┘┴ ┴ ┴ └─┘ ┴ ┴ ┴└─┘ ─┴┘└─┘└─┘ + (function (proceed){ + + // Get the model being used as the parent + var WLModel = orm.collections[query.using]; + try { assert.equal(query.using.toLowerCase(), query.using, '`query.using` (identity) should have already been normalized before getting here! But it was not: '+query.using); } catch (e) { return proceed(e); } + + // Look up the association by name in the schema definition. + var schemaDef = WLModel.schema[query.collectionAttrName]; + + // Look up the associated collection using the schema def which should have + // join tables normalized + var WLChild = orm.collections[schemaDef.collection]; + try { + assert.equal(schemaDef.collection.toLowerCase(), schemaDef.collection, '`schemaDef.collection` (identity) should have already been normalized before getting here! But it was not: '+schemaDef.collection); + assert.equal(schemaDef.referenceIdentity.toLowerCase(), schemaDef.referenceIdentity, '`schemaDef.referenceIdentity` (identity) should have already been normalized before getting here! But it was not: '+schemaDef.referenceIdentity); + assert.equal(Object.getPrototypeOf(WLChild).identity.toLowerCase(), Object.getPrototypeOf(WLChild).identity, '`Object.getPrototypeOf(WLChild).identity` (identity) should have already been normalized before getting here! But it was not: '+Object.getPrototypeOf(WLChild).identity); + } catch (e) { return proceed(e); } + + // Flag to determine if the WLChild is a manyToMany relation + var manyToMany = false; + + // Check if the schema references something other than the WLChild + if (schemaDef.referenceIdentity !== Object.getPrototypeOf(WLChild).identity) { + manyToMany = true; + WLChild = orm.collections[schemaDef.referenceIdentity]; + } + + // Check if the child is a join table + if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) { + manyToMany = true; + } + + // Check if the child is a through table + if (_.has(Object.getPrototypeOf(WLChild), 'throughTable') && _.keys(WLChild.throughTable).length) { + manyToMany = true; + } + + + // Ensure the query skips lifecycle callbacks + // Build a modified shallow clone of the originally-provided `meta` + var modifiedMeta = _.extend({}, query.meta || {}, { skipAllLifecycleCallbacks: true }); + + + + // ██╗███╗ ██╗ ███╗ ███╗██╗ + // ██╔╝████╗ ██║ ████╗ ████║╚██╗ + // ██║ ██╔██╗ ██║ ██╔████╔██║ ██║ + // ██║ ██║╚██╗██║ ██║╚██╔╝██║ ██║ + // ╚██╗██║ ╚████║██╗██╗██║ ╚═╝ ██║██╔╝ + // ╚═╝╚═╝ ╚═══╝╚═╝╚═╝╚═╝ ╚═╝╚═╝ + // + // ███╗ ███╗ █████╗ ███╗ ██╗██╗ ██╗ ████████╗ ██████╗ ███╗ ███╗ █████╗ ███╗ ██╗██╗ ██╗ + // ████╗ ████║██╔══██╗████╗ ██║╚██╗ ██╔╝ ╚══██╔══╝██╔═══██╗ ████╗ ████║██╔══██╗████╗ ██║╚██╗ ██╔╝ + // ██╔████╔██║███████║██╔██╗ ██║ ╚████╔╝ ██║ ██║ ██║ ██╔████╔██║███████║██╔██╗ ██║ ╚████╔╝ + // ██║╚██╔╝██║██╔══██║██║╚██╗██║ ╚██╔╝ ██║ ██║ ██║ ██║╚██╔╝██║██╔══██║██║╚██╗██║ ╚██╔╝ + // ██║ ╚═╝ ██║██║ ██║██║ ╚████║ ██║ ██║ ╚██████╔╝ ██║ ╚═╝ ██║██║ ██║██║ ╚████║ ██║ + // ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝ ╚═╝ + // + // If the collection uses a join table, build a query that removes the records + // from the table. + if (manyToMany) { + + // ╔╗ ╦ ╦╦╦ ╔╦╗ ┬─┐┌─┐┌─┐┌─┐┬─┐┌─┐┌┐┌┌─┐┌─┐ ┌┬┐┌─┐┌─┐┌─┐┬┌┐┌┌─┐ + // ╠╩╗║ ║║║ ║║ ├┬┘├┤ ├┤ ├┤ ├┬┘├┤ ││││ ├┤ │││├─┤├─┘├─┘│││││ ┬ + // ╚═╝╚═╝╩╩═╝═╩╝ ┴└─└─┘└ └─┘┴└─└─┘┘└┘└─┘└─┘ ┴ ┴┴ ┴┴ ┴ ┴┘└┘└─┘ + // + // Maps out the parent and child attribute names to use for the query. + var parentReference; + var childReference; + + // Find the parent reference + if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) { + // Assumes the generated junction table will only ever have two foreign key + // values. Should be safe for now and any changes would need to be made in + // Waterline-Schema where a map could be formed anyway. + _.each(WLChild.schema, function(wlsAttrDef, key) { + if (!_.has(wlsAttrDef, 'references')) { + return; + } + + // If this is the piece of the join table, set the parent reference. + if (_.has(wlsAttrDef, 'columnName') && wlsAttrDef.columnName === schemaDef.on) { + parentReference = key; + } + }); + } + // If it's a through table, grab the parent and child reference from the + // through table mapping that was generated by Waterline-Schema. + else if (_.has(Object.getPrototypeOf(WLChild), 'throughTable')) { + childReference = WLChild.throughTable[WLModel.identity + '.' + query.collectionAttrName]; + _.each(WLChild.throughTable, function(rhs, key) { + if (key !== WLModel.identity + '.' + query.collectionAttrName) { + parentReference = rhs; + } + }); + }//>- + + + + // Find the child reference in a junction table + if (_.has(Object.getPrototypeOf(WLChild), 'junctionTable') && WLChild.junctionTable) { + // Assumes the generated junction table will only ever have two foreign key + // values. Should be safe for now and any changes would need to be made in + // Waterline-Schema where a map could be formed anyway. + _.each(WLChild.schema, function(wlsAttrDef, key) { + if (!_.has(wlsAttrDef, 'references')) { + return; + } + + // If this is the other piece of the join table, set the child reference. + if (_.has(wlsAttrDef, 'columnName') && wlsAttrDef.columnName !== schemaDef.on) { + childReference = key; + } + }); + } + + + // ╔╗ ╦ ╦╦╦ ╔╦╗ ┌┬┐┌─┐┌─┐┌┬┐┬─┐┌─┐┬ ┬ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╩╗║ ║║║ ║║ ││├┤ └─┐ │ ├┬┘│ │└┬┘ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚═╝╚═╝╩╩═╝═╩╝ ─┴┘└─┘└─┘ ┴ ┴└─└─┘ ┴ └─┘└└─┘└─┘┴└─ ┴ + // + // When replacing a collection, the first step is to remove all the records + // for the target id's in the join table. + var criteriaOfDestruction = { + where: {} + }; + + criteriaOfDestruction.where[parentReference] = { + in: query.targetRecordIds + }; + + // Don't worry about fetching + modifiedMeta.fetch = false; + + // ╔╗ ╦ ╦╦╦ ╔╦╗ ┬┌┐┌┌─┐┌─┐┬─┐┌┬┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╩╗║ ║║║ ║║ ││││└─┐├┤ ├┬┘ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚═╝╚═╝╩╩═╝═╩╝ ┴┘└┘└─┘└─┘┴└─ ┴ └─┘└└─┘└─┘┴└─ ┴ + // + // Then build up an insert query for creating the new join table records. + var insertRecords = []; + + // For each target record, build an insert query for the associated records. + _.each(query.targetRecordIds, function(targetId) { + _.each(query.associatedIds, function(associatedId) { + var record = {}; + record[parentReference] = targetId; + record[childReference] = associatedId; + insertRecords.push(record); + }); + }); + + + // ╦═╗╦ ╦╔╗╔ ┌┬┐┌─┐┌─┐┌┬┐┬─┐┌─┐┬ ┬ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╦╝║ ║║║║ ││├┤ └─┐ │ ├┬┘│ │└┬┘ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╩╚═╚═╝╝╚╝ ─┴┘└─┘└─┘ ┴ ┴└─└─┘ ┴ └─┘└└─┘└─┘┴└─ ┴ + WLChild.destroy(criteriaOfDestruction, function $afterDestroyingChildRecords(err) { + if (err) { return proceed(err); } + + // If there were no associated id's to insert, exit out + if (!query.associatedIds.length) { + return proceed(); + } + + // ╦═╗╦ ╦╔╗╔ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╦╝║ ║║║║ │ ├┬┘├┤ ├─┤ │ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╩╚═╚═╝╝╚╝ └─┘┴└─└─┘┴ ┴ ┴ └─┘ └─┘└└─┘└─┘┴└─ ┴ + WLChild.createEach(insertRecords, proceed, modifiedMeta); + + }, modifiedMeta); + + return; + }//-• + + + // ██╗███╗ ██╗ ██╗██╗ + // ██╔╝████╗ ██║ ███║╚██╗ + // ██║ ██╔██╗ ██║ ╚██║ ██║ + // ██║ ██║╚██╗██║ ██║ ██║ + // ╚██╗██║ ╚████║██╗██╗██║██╔╝ + // ╚═╝╚═╝ ╚═══╝╚═╝╚═╝╚═╝╚═╝ + // + // ██████╗ ███████╗██╗ ██████╗ ███╗ ██╗ ██████╗ ███████╗ ████████╗ ██████╗ + // ██╔══██╗██╔════╝██║ ██╔═══██╗████╗ ██║██╔════╝ ██╔════╝ ╚══██╔══╝██╔═══██╗ + // ██████╔╝█████╗ ██║ ██║ ██║██╔██╗ ██║██║ ███╗███████╗ ██║ ██║ ██║ + // ██╔══██╗██╔══╝ ██║ ██║ ██║██║╚██╗██║██║ ██║╚════██║ ██║ ██║ ██║ + // ██████╔╝███████╗███████╗╚██████╔╝██║ ╚████║╚██████╔╝███████║ ██║ ╚██████╔╝ + // ╚═════╝ ╚══════╝╚══════╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝ ╚══════╝ ╚═╝ ╚═════╝ + // + // Otherwise the child records need to have their foreign keys updated to reflect the + // new realities of the association. We'll either (A) set the new child records to + // have the same fk and null out any other existing child records or (B) just null out + // all existing child records. That's because there should only ever be either (A) exactly + // one target record with >=1 new child records to associate or (B) >=1 target records with + // zero new child records to associate (i.e. a null-out) + if (query.targetRecordIds.length >= 2 && query.associatedIds.length > 0) { return proceed(new Error('Consistency violation: Too many target record ids and associated ids-- should never have been possible, because this query should have been halted when it was being forged at stage 2.')); } + if (query.targetRecordIds.length === 0) { return proceed(new Error('Consistency violation: No target record ids-- should never have been possible, because this query should have been halted when it was being forged at stage 2.')); } + + + // First, check whether the foreign key attribute is required/optional so that we know whether + // it's safe to null things out without checking for collisions beforehand. + var isFkAttributeOptional = !WLChild.attributes[schemaDef.via].required; + (function(proceed){ + if (isFkAttributeOptional) { + return proceed(undefined, 0); + }//• + + var potentialCollisionCriteria = { where: {} }; + potentialCollisionCriteria.where[schemaDef.via] = { in: query.targetRecordIds }; + potentialCollisionCriteria.where[WLChild.primaryKey] = { nin: query.associatedIds }; + WLChild.count(potentialCollisionCriteria, function(err, total) { + if (err) { return proceed(err); } + return proceed(undefined, total); + });//_∏_ + + })(function (err, numCollisions) { + if (err) { return proceed(err); } + + if (!isFkAttributeOptional && numCollisions > 0) { + return proceed(flaverr({ + name: 'PropagationError', + code: 'E_COLLISIONS_WHEN_NULLING_OUT_REQUIRED_FK', + message: + 'Cannot '+(query.associatedIds.length===0?'wipe':'replace')+' the contents of '+ + 'association (`'+query.collectionAttrName+'`) because there '+ + (numCollisions===1?('is one conflicting '+WLChild.identity+' record'):('are '+numCollisions+' conflicting '+WLChild.identity+' records'))+' '+ + 'whose `'+schemaDef.via+'` cannot be set to `null`. (That attribute is required.)' + // For example, if you have a car with four tires, and you set out + // to replace the four old tires with only three new ones, then you'll need to + // destroy the spare tire before attempting to call `Car.replaceCollection()`) + // ^^ Actually maybe just do that last bit in FS2Q (see other note there) + }, omen)); + }//• + + // So to recap: IWMIH we know that one of two things is true. + // + // Either: + // (A) there are >=1 associated record ids, but EXACTLY ONE target record id (**null out fks for existing associated records except for the new ones, then set all the new ones to the same value**), or + // (B) there is >=1 target record id, but ZERO associated record ids (**just null out fks for all existing associated records**) + // + // ╦═╗╦ ╦╔╗╔ ┌─┐┌─┐┬─┐┌┬┐┬┌─┐┬ ┌┐┌┬ ┬┬ ┬ ┌─┐┬ ┬┌┬┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ ┌┬┐┬ ┬┌─┐┌┐┌ + // ╠╦╝║ ║║║║ ├─┘├─┤├┬┘ │ │├─┤│ ││││ ││ │───│ ││ │ │ │─┼┐│ │├┤ ├┬┘└┬┘ │ ├─┤├┤ │││ + // ╩╚═╚═╝╝╚╝ ┴ ┴ ┴┴└─ ┴ ┴┴ ┴┴─┘ ┘└┘└─┘┴─┘┴─┘ └─┘└─┘ ┴ └─┘└└─┘└─┘┴└─ ┴┘ ┴ ┴ ┴└─┘┘└┘ + // ┌─┐┌┐┌┌─┐┌┬┐┬ ┬┌─┐┬─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ ┌┬┐┌─┐ ┌─┐┌─┐┌┬┐ ┌─┐┌─┐┬─┐┌─┐┬┌─┐┌┐┌ ┬┌─┌─┐┬ ┬┌─┐ + // ├─┤││││ │ │ ├─┤├┤ ├┬┘ │─┼┐│ │├┤ ├┬┘└┬┘ │ │ │ └─┐├┤ │ ├┤ │ │├┬┘├┤ ││ ┬│││ ├┴┐├┤ └┬┘└─┐ + // ┴ ┴┘└┘└─┘ ┴ ┴ ┴└─┘┴└─ └─┘└└─┘└─┘┴└─ ┴ ┴ └─┘ └─┘└─┘ ┴ └ └─┘┴└─└─┘┴└─┘┘└┘ ┴ ┴└─┘ ┴ └─┘ + // We'll start with scenario A, where we first null out the fk on any existing records + // other than the new ones, then update all the foreign key values for new associated + // records to point to one particular parent record (aka target record). + if (query.associatedIds.length > 0) { + + // console.log('** partial null-out ** # collisions:', numCollisions); + + var partialNullOutCriteria = { where: {} }; + partialNullOutCriteria.where[WLChild.primaryKey] = { nin: query.associatedIds }; + partialNullOutCriteria.where[schemaDef.via] = query.targetRecordIds[0]; + // ^^ we know there has to be exactly one target record id at this point + // (see assertions above) so this is safe. + + var partialNullOutVts = {}; + partialNullOutVts[schemaDef.via] = null; + + // If the FK attribute is required, then we've already looked up the # of collisions, + // so we can use that as an optimization to decide whether we can skip past this query + // altogether. (If we already know there are no collisions, there's nothing to "null out"!) + if (!isFkAttributeOptional && numCollisions === 0) { + // > To accomplish this, we just use an empty "values to set" query key to make + // > this first query into a no-op. This saves us doing yet another self-calling + // > function. (One day, when the world has entirely switched to Node >= 7.9, + // > we could just use `await` for all this exciting stuff.) + partialNullOutVts = {}; + }//fi + + WLChild.update(partialNullOutCriteria, partialNullOutVts, function(err) { + if (err) { return proceed(err); } + + var newFkUpdateCriteria = { where: {} }; + newFkUpdateCriteria.where[WLChild.primaryKey] = { in: query.associatedIds }; + + var newFkUpdateVts = {}; + newFkUpdateVts[schemaDef.via] = query.targetRecordIds[0]; + // ^^ we know there has to be exactly one target record id at this point + // (see assertions above) so this is safe. + + WLChild.update(newFkUpdateCriteria, newFkUpdateVts, function(err) { + if (err) { return proceed(err); } + + return proceed(); + }, modifiedMeta);//_∏_ + + }, modifiedMeta);//_∏_ + + }//‡ + // ╦═╗╦ ╦╔╗╔ ┌┐ ┬ ┌─┐┌┐┌┬┌─┌─┐┌┬┐ ┌┐┌┬ ┬┬ ┬ ┌─┐┬ ┬┌┬┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╦╝║ ║║║║ ├┴┐│ ├─┤│││├┴┐├┤ │ ││││ ││ │ │ ││ │ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╩╚═╚═╝╝╚╝ └─┘┴─┘┴ ┴┘└┘┴ ┴└─┘ ┴ ┘└┘└─┘┴─┘┴─┘ └─┘└─┘ ┴ └─┘└└─┘└─┘┴└─ ┴ + // Alternatively, we'll go with scenario B, where we potentially null all the fks out. + else { + // console.log('** BLANKET NULL-OUT ** # collisions:', numCollisions); + + // If the FK attribute is required, then we've already looked up the # of collisions, + // so we can use that as an optimization to decide whether we can skip past this query + // altogether. (If we already know there are no collisions, there's nothing to "null out"!) + if (!isFkAttributeOptional && numCollisions === 0) { + return proceed(); + }//• + + // Otherwise, proceed with the "null out" + var nullOutCriteria = { where: {} }; + nullOutCriteria.where[schemaDef.via] = { in: query.targetRecordIds }; + + var blanketNullOutVts = {}; + blanketNullOutVts[schemaDef.via] = null; + + WLChild.update(nullOutCriteria, blanketNullOutVts, function(err) { + if (err) { return proceed(err); } + return proceed(); + }, modifiedMeta);//_∏_ + + }//fi + + // (Reminder: don't put any code down here!) + + });//_∏_ + + })(function (err) { + if (err) { return done(err); } + + // IWMIH, everything worked! + // > Note that we do not send back a result of any kind-- this it to reduce the likelihood + // > writing userland code that relies undocumented/experimental output. + return done(); + + });// + + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/stream.js b/lib/waterline/methods/stream.js new file mode 100644 index 000000000..54194b00b --- /dev/null +++ b/lib/waterline/methods/stream.js @@ -0,0 +1,520 @@ +/** + * Module dependencies + */ + +var _ = require('@sailshq/lodash'); +var async = require('async'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('stream'); +var STRIP_COMMENTS_RX = /(\/\/.*$)|(\/\*[\s\S]*?\*\/)|(\s*=[^,\)]*(('(?:\\'|[^'\r\n])*')|("(?:\\"|[^"\r\n])*"))|(\s*=[^,\)]*))/mg; + + + +/** + * stream() + * + * Iterate over individual records (or batches of records) that match + * the specified criteria, populating associations if instructed. + * + * ``` + * BlogPost.stream() + * .limit(50000) + * .sort('title ASC') + * .eachRecord(function (blogPost, next){ ... }) + * .exec(function (err){ ... }); + * + * // For more usage info (/history), see: + * // https://gist.github.com/mikermcneil/d1e612cd1a8564a79f61e1f556fc49a6#examples + * ``` + * + * ---------------------------------- + * ~• This is the "new .stream()". •~ + * ---------------------------------- + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {Dictionary?} criteria + * + * @param {Function?} eachRecordFn + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @param {Dictionary} moreQueryKeys + * For internal use. + * (A dictionary of query keys.) + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {Dictionary?} criteria + * + * @qkey {Dictionary?} populates + * + * @qkey {Function?} eachRecordFn + * An iteratee function to run for each record. + * (If specified, then `eachBatchFn` should not ALSO be set.) + * + * @qkey {Function?} eachBatchFn + * An iteratee function to run for each batch of records. + * (If specified, then `eachRecordFn` should not ALSO be set.) + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function stream( /* criteria?, eachRecordFn?, explicitCbMaybe?, meta?, moreQueryKeys? */ ) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Potentially build an omen here for potential use in an + // asynchronous callback below if/when an error occurs. This would + // provide for a better stack trace, since it would be based off of + // the original method call, rather than containing extra stack entries + // from various utilities calling each other within Waterline itself. + // + // > Note that it'd need to be passed in to the other model methods that + // > get called internally. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + // Build query w/ initial, universal keys. + var query = { + method: 'stream', + using: modelIdentity + }; + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + + // The `explicitCbMaybe` callback, if one was provided. + var explicitCbMaybe; + + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback, and extend the `query` dictionary) + // + // > Note that we define `args` to minimize the chance of this "variadics" code + // > introducing any unoptimizable performance problems. For details, see: + // > https://github.com/petkaantonov/bluebird/wiki/Optimization-killers#32-leaking-arguments + // > •=> `.length` is just an integer, this doesn't leak the `arguments` object itself + // > •=> `i` is always valid index in the arguments object + var args = new Array(arguments.length); + for (var i = 0; i < args.length; ++i) { + args[i] = arguments[i]; + } + + // • stream(eachRecordFn, ..., ..., ...) + // • stream(eachRecordFn, explicitCbMaybe, ..., ...) + if (args.length >= 1 && _.isFunction(args[0])) { + query.eachRecordFn = args[0]; + explicitCbMaybe = args[1]; + query.meta = args[2]; + if (args[3]) { + _.extend(query, args[3]); + } + } + // • stream(criteria, ..., ..., ..., ...) + // • stream(criteria, eachRecordFn, ..., ..., ...) + // • stream() + else { + query.criteria = args[0]; + query.eachRecordFn = args[1]; + explicitCbMaybe = args[2]; + query.meta = args[3]; + if (args[4]) { + _.extend(query, args[4]); + } + } + + + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If an explicit callback function was specified, then immediately run the logic below + // and trigger the explicit callback when the time comes. Otherwise, build and return + // a new Deferred now. (If/when the Deferred is executed, the logic below will run.) + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that it's time to actually do some stuff. + // So... + // + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + + case 'E_INVALID_STREAM_ITERATEE': + return done( + flaverr( + { + name: 'UsageError', + code: e.code, + details: e.details, + }, + new Error( + 'Missing or invalid iteratee function for `.stream()`.\n'+ + 'Details:\n' + + ' ' + e.details + '\n' + ) + ) + ); + + case 'E_INVALID_CRITERIA': + case 'E_INVALID_POPULATES': + case 'E_INVALID_META': + return done(e); + // ^ when the standard usage error is good enough as-is, without any further customization + + case 'E_NOOP': + return done(); + + default: + return done(e); + // ^ when an internal, miscellaneous, or unexpected error occurs + + } + } //>-• + + + + // ┌┐┌┌─┐┬ ┬ ╔═╗╔═╗╔╦╗╦ ╦╔═╗╦ ╦ ╦ ╦ ┌┬┐┌─┐┬ ┬┌─ ┌┬┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌┬┐┌┐ ┌─┐ + // ││││ ││││ ╠═╣║ ║ ║ ║╠═╣║ ║ ╚╦╝ │ ├─┤│ ├┴┐ │ │ │ │ ├─┤├┤ ││├┴┐└─┐ + // ┘└┘└─┘└┴┘ ╩ ╩╚═╝ ╩ ╚═╝╩ ╩╩═╝╩═╝╩ ┴ ┴ ┴┴─┘┴ ┴ ┴ └─┘ ┴ ┴ ┴└─┘ ─┴┘└─┘└─┘ + // + // When running a `.stream()`, Waterline grabs batches (pages) of 30 + // records at a time, by default. (This can be overridden using the + // "batchSize" meta key.) + var DEFAULT_BATCH_SIZE = 30; + var batchSize = (query.meta && query.meta.batchSize !== undefined) ? query.meta.batchSize : DEFAULT_BATCH_SIZE; + + // A flag that will be set to true after we've reached the VERY last batch. + var reachedLastBatch; + + // The index of the current batch. + var i = 0; + + + async.whilst(function _checkHasntReachedLastBatchYet(){ + if (!reachedLastBatch) { return true; } + else { return false; } + },// ~∞%° + function _beginBatchMaybe(next) { + + // 0 => 15 + // 15 => 15 + // 30 => 15 + // 45 => 5 + // 50 + var numRecordsLeftUntilAbsLimit = query.criteria.limit - ( i*batchSize ); + var limitForThisBatch = Math.min(numRecordsLeftUntilAbsLimit, batchSize); + var skipForThisBatch = query.criteria.skip + ( i*batchSize ); + // |_initial offset + |_relative offset from end of previous batch + + + // If we've exceeded the absolute limit, then we go ahead and stop. + if (limitForThisBatch <= 0) { + reachedLastBatch = true; + return next(); + }//-• + + // Build the criteria + deferred object to do a `.find()` for this batch. + var criteriaForThisBatch = { + skip: skipForThisBatch, + limit: limitForThisBatch, + sort: query.criteria.sort, + select: query.criteria.select, + omit: query.criteria.omit, + where: query.criteria.where + }; + // console.log('---iterating---'); + // console.log('i:',i); + // console.log(' batchSize:',batchSize); + // console.log(' query.criteria.limit:',query.criteria.limit); + // console.log(' query.criteria.skip:',query.criteria.skip); + // console.log(' query.criteria.sort:',query.criteria.sort); + // console.log(' query.criteria.where:',query.criteria.where); + // console.log(' query.criteria.select:',query.criteria.select); + // console.log(' query.criteria.omit:',query.criteria.omit); + // console.log(' --'); + // console.log(' criteriaForThisBatch.limit:',criteriaForThisBatch.limit); + // console.log(' criteriaForThisBatch.skip:',criteriaForThisBatch.skip); + // console.log(' criteriaForThisBatch.sort:',criteriaForThisBatch.sort); + // console.log(' criteriaForThisBatch.where:',criteriaForThisBatch.where); + // console.log(' criteriaForThisBatch.select:',criteriaForThisBatch.select); + // console.log(' criteriaForThisBatch.omit:',criteriaForThisBatch.omit); + // console.log('---•••••••••---'); + var deferredForThisBatch = WLModel.find(criteriaForThisBatch); + + _.each(query.populates, function (assocCriteria, assocName){ + deferredForThisBatch = deferredForThisBatch.populate(assocName, assocCriteria); + }); + + // Pass through `meta` so we're sure to use the same db connection + // and settings (esp. relevant if we happen to be inside a transaction). + // > Note that we trim out `batchSize` to avoid tripping assertions about + // > method compatibility. + deferredForThisBatch.meta(query.meta ? _.omit(query.meta, ['batchSize']) : undefined); + + deferredForThisBatch.exec(function (err, batchOfRecords){ + if (err) { return next(err); } + + // If there were no records returned, then we have already reached the last batch of results. + // (i.e. it was the previous batch-- since this batch was empty) + // In this case, we'll set the `reachedLastBatch` flag and trigger our callback, + // allowing `async.whilst()` to call _its_ callback, which will pass control back + // to userland. + if (batchOfRecords.length === 0) { + reachedLastBatch = true; + return next(); + }// --• + + // But otherwise, we need to go ahead and call the appropriate + // iteratee for this batch. If it's eachBatchFn, we'll call it + // once. If it's eachRecordFn, we'll call it once per record. + (function _makeCallOrCallsToAppropriateIteratee(proceed){ + + // Check if the iteratee declares a callback parameter + var seemsToExpectCallback = (function(){ + var fn = query.eachBatchFn || query.eachRecordFn; + var fnStr = fn.toString().replace(STRIP_COMMENTS_RX, ''); + var parametersAsString = fnStr.slice(fnStr.indexOf('(')+1, fnStr.indexOf(')')); + // console.log(':seemsToExpectCallback:',parametersAsString, !!parametersAsString.match(/\,\s*([^,\{\}\[\]\s]+)\s*$/)); + return !! parametersAsString.match(/\,\s*([^,\{\}\[\]\s]+)\s*$/); + })();//† + + // If an `eachBatchFn` iteratee was provided, we'll call it. + // > At this point we already know it's a function, because + // > we validated usage at the very beginning. + if (query.eachBatchFn) { + + // Note that, if you try to call next() more than once in the iteratee, Waterline + // logs a warning explaining what's up, ignoring all subsequent calls to next() + // that occur after the first. + var didIterateeAlreadyHalt; + try { + var promiseMaybe = query.eachBatchFn(batchOfRecords, function (err) { + if (!seemsToExpectCallback) { return proceed(new Error('Unexpected attempt to invoke callback. Since this per-batch iteratee function does not appear to expect a callback parameter, this stub callback was provided instead. Please either explicitly list the callback parameter among the arguments or change this code to no longer use a callback.')); }//• + if (err) { return proceed(err); }//• + if (didIterateeAlreadyHalt) { + console.warn( + 'Warning: The per-batch iteratee provided to `.stream()` triggered its callback \n'+ + 'again-- after already triggering it once! Please carefully check your iteratee\'s \n'+ + 'code to figure out why this is happening. (Ignoring this subsequent invocation...)' + ); + return; + }//-• + didIterateeAlreadyHalt = true; + return proceed(); + });//_∏_ + + // Take care of unhandled promise rejections from `await` (if appropriate) + if (query.eachBatchFn.constructor.name === 'AsyncFunction') { + if (!seemsToExpectCallback) { + promiseMaybe = promiseMaybe.then(function(){ + didIterateeAlreadyHalt = true; + proceed(); + });//_∏_ + }//fi + promiseMaybe.catch(function(e){ proceed(e); });//_∏_ + } else { + if (!seemsToExpectCallback) { + didIterateeAlreadyHalt = true; + return proceed(); + } + } + + } catch (e) { return proceed(e); }//>-• + + return; + }//_∏_. + + + // Otherwise `eachRecordFn` iteratee must have been provided. + // We'll call it once per record in this batch. + // > We validated usage at the very beginning, so we know that + // > one or the other iteratee must have been provided as a + // > valid function if we made it here. + async.eachSeries(batchOfRecords, function _eachRecordInBatch(record, next) { + // Note that, if you try to call next() more than once in the iteratee, Waterline + // logs a warning explaining what's up, ignoring all subsequent calls to next() + // that occur after the first. + var didIterateeAlreadyHalt; + try { + var promiseMaybe = query.eachRecordFn(record, function (err) { + if (!seemsToExpectCallback) { return next(new Error('Unexpected attempt to invoke callback. Since this per-record iteratee function does not appear to expect a callback parameter, this stub callback was provided instead. Please either explicitly list the callback parameter among the arguments or change this code to no longer use a callback.')); }//• + if (err) { return next(err); } + + if (didIterateeAlreadyHalt) { + console.warn( + 'Warning: The per-record iteratee provided to `.stream()` triggered its callback\n'+ + 'again-- after already triggering it once! Please carefully check your iteratee\'s\n'+ + 'code to figure out why this is happening. (Ignoring this subsequent invocation...)' + ); + return; + }//-• + + didIterateeAlreadyHalt = true; + + return next(); + + });//_∏_ + + // Take care of unhandled promise rejections from `await` (if appropriate) + if (query.eachRecordFn.constructor.name === 'AsyncFunction') { + if (!seemsToExpectCallback) { + promiseMaybe = promiseMaybe.then(function(){ + didIterateeAlreadyHalt = true; + next(); + });//_∏_ + }//fi + promiseMaybe.catch(function(e){ next(e); });//_∏_ + } else { + if (!seemsToExpectCallback) { + didIterateeAlreadyHalt = true; + return next(); + } + }//fl + + } catch (e) { return next(e); } + + },// ~∞%° + function _afterIteratingOverRecordsInBatch(err) { + if (err) { return proceed(err); } + + return proceed(); + + });// + + })(function _afterCallingIteratee(err){ + if (err) { + return next(err); + } + + // Increment the batch counter. + i++; + + // On to the next batch! + return next(); + + });// + + });// + + },// ~∞%° + function _afterAsyncWhilst(err) { + if (err) { return done(err); }//-• + + // console.log('finished `.whilst()` successfully'); + return done(); + + });// + + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; + + + + + +/** + * ad hoc demonstration... + */ + +/*``` +theOrm = { collections: { user: { attributes: { id: { type: 'string', required: true, unique: true }, age: { type: 'number', required: false }, foo: { type: 'string', required: true }, pets: { collection: 'pet' } }, primaryKey: 'id', hasSchema: false}, pet: { attributes: { id: { type:'number', required: true, unique: true } }, primaryKey: 'id', hasSchema: false } } }; +// ^^ except use a real ORM instance +testStream = require('./lib/waterline/methods/stream'); +testStream = require('@sailshq/lodash').bind(testStream, { waterline: theOrm, identity: 'user' }); +testStream({}, function (record, next){ return next(); }, console.log) +```*/ + + +// Or using `sails console` in a sample app: +// ``` +// Product.stream({where: {luckyNumber: 29}}).eachBatch(function(record, next){console.log('batch:', record); return next(); }).then(function(){ console.log('ok.', arguments); }).catch(function(){ console.log('uh oh!!!!', arguments); }) +// ``` diff --git a/lib/waterline/methods/sum.js b/lib/waterline/methods/sum.js new file mode 100644 index 000000000..231ca5e80 --- /dev/null +++ b/lib/waterline/methods/sum.js @@ -0,0 +1,300 @@ +/** + * Module dependencies + */ + +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var forgeAdapterError = require('../utils/query/forge-adapter-error'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var forgeStageThreeQuery = require('../utils/query/forge-stage-three-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('sum'); + + + +/** + * sum() + * + * Get the aggregate sum of the specified attribute across all matching records. + * + * ``` + * // The cumulative account balance of all bank accounts that have + * // less than $32,000, or that are flagged as "suspended". + * BankAccount.sum('balance').where({ + * or: [ + * { balance: { '<': 32000 } }, + * { suspended: true } + * ] + * }).exec(function (err, total){ + * // ... + * }); + * ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {String?} numericAttrName + * + * @param {Dictionary?} criteria + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @param {Dictionary} moreQueryKeys + * For internal use. + * (A dictionary of query keys.) + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {String} numericAttrName + * The name of a numeric attribute. + * (Must be declared as `type: 'number'`.) + * + * @qkey {Dictionary?} criteria + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function sum( /* numericAttrName?, criteria?, explicitCbMaybe?, meta?, moreQueryKeys? */ ) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Build an omen for potential use in the asynchronous callback below. + var omen = buildOmen(sum); + + // Build query w/ initial, universal keys. + var query = { + method: 'sum', + using: modelIdentity + }; + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + + // The `explicitCbMaybe` callback, if one was provided. + var explicitCbMaybe; + + // Handle the various supported usage possibilities + // (locate the `explicitCbMaybe` callback, and extend the `query` dictionary) + // + // > Note that we define `args` to minimize the chance of this "variadics" code + // > introducing any unoptimizable performance problems. For details, see: + // > https://github.com/petkaantonov/bluebird/wiki/Optimization-killers#32-leaking-arguments + // > •=> `.length` is just an integer, this doesn't leak the `arguments` object itself + // > •=> `i` is always valid index in the arguments object + var args = new Array(arguments.length); + for (var i = 0; i < args.length; ++i) { + args[i] = arguments[i]; + } + + // • sum(numericAttrName, explicitCbMaybe, ..., ...) + if (args.length >= 2 && _.isFunction(args[1])) { + query.numericAttrName = args[0]; + explicitCbMaybe = args[1]; + query.meta = args[2]; + if (args[3]) { _.extend(query, args[3]); } + } + // • sum(numericAttrName, criteria, ..., ..., ...) + else { + query.numericAttrName = args[0]; + query.criteria = args[1]; + explicitCbMaybe = args[2]; + query.meta = args[3]; + if (args[4]) { _.extend(query, args[4]); } + } + + // Due to the somewhat unusual variadic usage of this method, and because + // parley doesn't enforce this itself for performance reasons, make sure the + // explicit callback argument is a function, if provided. + if (explicitCbMaybe !== undefined && !_.isFunction(explicitCbMaybe)) { + throw flaverr({ + name: 'UsageError', + message: + '`.sum()` received an explicit callback function argument... but it '+ + 'was not a function: '+explicitCbMaybe + }, omen); + }//• + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If an explicit callback function was specified, then immediately run the logic below + // and trigger the explicit callback when the time comes. Otherwise, build and return + // a new Deferred now. (If/when the Deferred is executed, the logic below will run.) + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that it's time to actually do some stuff. + // So... + // + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + + case 'E_INVALID_NUMERIC_ATTR_NAME': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'The numeric attr name (i.e. first argument) to `.sum()` should '+ + 'be the name of an attribute in this model which is defined with `type: \'number\'`.\n'+ + 'Details:\n'+ + ' ' + e.details + '\n' + }, omen) + ); + // ^ custom override for the standard usage error. Note that we use `.details` to get at + // the underlying, lower-level error message (instead of logging redundant stuff from + // the envelope provided by the default error msg.) + + case 'E_INVALID_CRITERIA': + case 'E_INVALID_META': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: e.message + }, omen) + ); + // ^ when the standard usage error message is good enough as-is, without any further customization + + case 'E_NOOP': + return done(undefined, 0); + + default: + return done(e); + // ^ when an internal, miscellaneous, or unexpected error occurs + } + } // >-• + + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┬─┐┌─┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ├─┤├┬┘├┤ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ ┴ ┴┴└─└─┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + try { + query = forgeStageThreeQuery({ + stageTwoQuery: query, + identity: modelIdentity, + transformer: WLModel._transformer, + originalModels: orm.collections + }); + } catch (e) { return done(e); } + + + // ┌─┐┌─┐┌┐┌┌┬┐ ┌┬┐┌─┐ ╔═╗╔╦╗╔═╗╔═╗╔╦╗╔═╗╦═╗ + // └─┐├┤ │││ ││ │ │ │ ╠═╣ ║║╠═╣╠═╝ ║ ║╣ ╠╦╝ + // └─┘└─┘┘└┘─┴┘ ┴ └─┘ ╩ ╩═╩╝╩ ╩╩ ╩ ╚═╝╩╚═ + // Grab the appropriate adapter method and call it. + var adapter = WLModel._adapter; + if (!adapter.sum) { + return done(new Error('The adapter used by this model (`' + modelIdentity + '`) doesn\'t support the `'+query.method+'` method.')); + } + + adapter.sum(WLModel.datastore, query, function _afterTalkingToAdapter(err, sum) { + if (err) { + err = forgeAdapterError(err, omen, 'sum', modelIdentity, orm); + return done(err); + }//-• + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Log a warning like the ones in `process-all-records` if + // the sum sent back by the adapter turns out to be something other + // than a number (for example, the naive behavior of a MySQL adapter + // in circumstances where criteria does not match any records); i.e. + // ``` + // !_.isNumber(sum) || sum === Infinity || sum === -Infinity || _.isNaN(sum) + // ```` + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + return done(undefined, sum); + + });// + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/update-one.js b/lib/waterline/methods/update-one.js new file mode 100644 index 000000000..4da505fdb --- /dev/null +++ b/lib/waterline/methods/update-one.js @@ -0,0 +1,219 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('updateOne'); + + + +/** + * updateOne() + * + * Update a single record that matches the specified criteria, patching it with + * the provided values and returning the updated record. + * + * @experimental + * + * TODO: document further + */ + +module.exports = function updateOne(criteria, valuesToSet, explicitCbMaybe, metaContainer) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + // Potentially build an omen for use below. + var omenMaybe = flaverr.omen(updateOne); + + // Build initial query. + var query = { + method: 'updateOne', + using: modelIdentity, + criteria: criteria, + valuesToSet: valuesToSet, + meta: metaContainer + }; + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + // N/A + // (there are no out-of-order, optional arguments) + + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If a callback function was not specified, then build a new Deferred and bail now. + // + // > This method will be called AGAIN automatically when the Deferred is executed. + // > and next time, it'll have a callback. + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that a callback was specified. + // So... + + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + // This ensures a normalized format. + + try { + // Skip encryption on first forgeStageTwoQuery + // call to prevent encrypted validation errors on + // second call: https://github.com/balderdashy/sails/issues/6939 + query.meta = _.extend({}, query.meta || {}, { + skipEncryption: true + }); + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + case 'E_INVALID_CRITERIA': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'Invalid criteria.\n'+ + 'Details:\n'+ + ' '+e.details+'\n' + }, omenMaybe) + ); + + case 'E_INVALID_VALUES_TO_SET': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'Cannot perform update with the provided values.\n'+ + 'Details:\n'+ + ' '+e.details+'\n' + }, omenMaybe) + ); + + case 'E_NOOP': + var noopResult = undefined; + return done(undefined, noopResult); + + default: + return done(e); + } + } + + // Do a .count() to ensure that there are ≤1 matching records. + // FUTURE: Make this transactional, if supported by the underlying adapter. + var modifiedCriteriaForCount = _.omit(query.criteria, ['select', 'omit', 'limit', 'skip', 'sort']); + WLModel.count(modifiedCriteriaForCount, function _afterCounting(err, total) { + if (err) { + return done(err); + } + + // If more than one matching record was found, then consider this an error. + if (total > 1) { + return done(flaverr({ + message: + 'Preventing `.'+query.method+'()`: found too many ('+total+') matching records.\n'+ + '\n'+ + 'Criteria used:\n'+ + '···\n'+ + util.inspect(modifiedCriteriaForCount,{depth:5})+'\n'+ + '···' + }, omenMaybe)); + }//-• + + // Build a modified shallow clone of the originally-provided `meta` from + // userland, but that also has `fetch: true` and the private/experimental + // flag, `skipEncryption: false`. For context on the bit about encryption, + // see: https://github.com/balderdashy/sails/issues/4302#issuecomment-363883885 + // > PLEASE DO NOT RELY ON `skipEncryption` IN YOUR OWN CODE- IT COULD CHANGE + // > AT ANY TIME AND BREAK YOUR APP OR PLUGIN! + var modifiedMetaForUpdate = _.extend({}, query.meta || {}, { + fetch: true, + skipEncryption: false + }); + + var modifiedCriteriaForUpdate = _.omit(query.criteria, ['select', 'omit', 'limit', 'skip', 'sort']); + WLModel.update(modifiedCriteriaForUpdate, query.valuesToSet, function _afterUpdating(err, affectedRecords) { + if (err) { + return done(err); + } + + // Note that we always get `affectedRecords` here because "fetch" is enabled. + return done(undefined, affectedRecords[0]); + + }, modifiedMetaForUpdate);//_∏_ + }, query.meta);//_∏_ + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/update.js b/lib/waterline/methods/update.js new file mode 100644 index 000000000..61bdc7859 --- /dev/null +++ b/lib/waterline/methods/update.js @@ -0,0 +1,467 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var async = require('async'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var parley = require('parley'); +var buildOmen = require('../utils/query/build-omen'); +var forgeAdapterError = require('../utils/query/forge-adapter-error'); +var forgeStageTwoQuery = require('../utils/query/forge-stage-two-query'); +var forgeStageThreeQuery = require('../utils/query/forge-stage-three-query'); +var getQueryModifierMethods = require('../utils/query/get-query-modifier-methods'); +var processAllRecords = require('../utils/query/process-all-records'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * Module constants + */ + +var DEFERRED_METHODS = getQueryModifierMethods('update'); + + + +/** + * update() + * + * Update records that match the specified criteria, patching them with + * the provided values. + * + * ``` + * // Forgive all debts: Zero out bank accounts with less than $0 in them. + * BankAccount.update().where({ + * balance: { '<': 0 } + * }).set({ + * balance: 0 + * }).exec(function(err) { + * // ... + * }); + * ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * Usage without deferred object: + * ================================================ + * + * @param {Dictionary} criteria + * + * @param {Dictionary} valuesToSet + * + * @param {Function?} explicitCbMaybe + * Callback function to run when query has either finished successfully or errored. + * (If unspecified, will return a Deferred object instead of actually doing anything.) + * + * @param {Ref?} meta + * For internal use. + * + * @returns {Ref?} Deferred object if no `explicitCbMaybe` callback was provided + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * The underlying query keys: + * ============================== + * + * @qkey {Dictionary?} criteria + * @qkey {Dictionary?} valuesToSet + * + * @qkey {Dictionary?} meta + * @qkey {String} using + * @qkey {String} method + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function update(criteria, valuesToSet, explicitCbMaybe, metaContainer) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var WLModel = this; + var orm = this.waterline; + var modelIdentity = this.identity; + + + // Build an omen for potential use in the asynchronous callback below. + var omen = buildOmen(update); + + // Build initial query. + var query = { + method: 'update', + using: modelIdentity, + criteria: criteria, + valuesToSet: valuesToSet, + meta: metaContainer + }; + + + // ██╗ ██╗ █████╗ ██████╗ ██╗ █████╗ ██████╗ ██╗ ██████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██║██╔══██╗██╔══██╗██║██╔════╝██╔════╝ + // ██║ ██║███████║██████╔╝██║███████║██║ ██║██║██║ ███████╗ + // ╚██╗ ██╔╝██╔══██║██╔══██╗██║██╔══██║██║ ██║██║██║ ╚════██║ + // ╚████╔╝ ██║ ██║██║ ██║██║██║ ██║██████╔╝██║╚██████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═════╝ ╚═╝ ╚═════╝╚══════╝ + // + // N/A + // (there are no out-of-order, optional arguments) + + + + // ██████╗ ███████╗███████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗ + // ██║ ██║█████╗ █████╗ █████╗ ██████╔╝ + // ██║ ██║██╔══╝ ██╔══╝ ██╔══╝ ██╔══██╗ + // ██████╔╝███████╗██║ ███████╗██║ ██║ + // ╚═════╝ ╚══════╝╚═╝ ╚══════╝╚═╝ ╚═╝ + // + // ██╗███╗ ███╗ █████╗ ██╗ ██╗██████╗ ███████╗██╗ + // ██╔╝████╗ ████║██╔══██╗╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██║ ██╔████╔██║███████║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║ ██║╚██╔╝██║██╔══██║ ╚██╔╝ ██╔══██╗██╔══╝ ██║ + // ╚██╗██║ ╚═╝ ██║██║ ██║ ██║ ██████╔╝███████╗██╔╝ + // ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ + // + // ┌┐ ┬ ┬┬┬ ┌┬┐ ┬ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌ ┌┐┌┌─┐┬ ┬ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬─┐┌─┐┌┬┐ + // ├┴┐│ │││ ││ ┌┼─ ├┬┘├┤ │ │ │├┬┘│││ │││├┤ │││ ││├┤ ├┤ ├┤ ├┬┘├┬┘├┤ ││ + // └─┘└─┘┴┴─┘─┴┘ └┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘ ┘└┘└─┘└┴┘ ─┴┘└─┘└ └─┘┴└─┴└─└─┘─┴┘ + // ┌─ ┬┌─┐ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ─┐ + // │─── │├┤ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ ───│ + // └─ ┴└ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ ─┘ + // If a callback function was not specified, then build a new Deferred and bail now. + // + // > This method will be called AGAIN automatically when the Deferred is executed. + // > and next time, it'll have a callback. + return parley( + + function (done){ + + // Otherwise, IWMIH, we know that a callback was specified. + // So... + + // ███████╗██╗ ██╗███████╗ ██████╗██╗ ██╗████████╗███████╗ + // ██╔════╝╚██╗██╔╝██╔════╝██╔════╝██║ ██║╚══██╔══╝██╔════╝ + // █████╗ ╚███╔╝ █████╗ ██║ ██║ ██║ ██║ █████╗ + // ██╔══╝ ██╔██╗ ██╔══╝ ██║ ██║ ██║ ██║ ██╔══╝ + // ███████╗██╔╝ ██╗███████╗╚██████╗╚██████╔╝ ██║ ███████╗ + // ╚══════╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ││││ │ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ └┴┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // + // Forge a stage 2 query (aka logical protostatement) + // This ensures a normalized format. + + try { + forgeStageTwoQuery(query, orm); + } catch (e) { + switch (e.code) { + case 'E_INVALID_CRITERIA': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'Invalid criteria.\n'+ + 'Details:\n'+ + ' '+e.details+'\n' + }, omen) + ); + + case 'E_INVALID_VALUES_TO_SET': + return done( + flaverr({ + name: 'UsageError', + code: e.code, + details: e.details, + message: + 'Cannot perform update with the provided values.\n'+ + 'Details:\n'+ + ' '+e.details+'\n' + }, omen) + ); + + case 'E_NOOP': + // Determine the appropriate no-op result. + // If `fetch` meta key is set, use `[]`-- otherwise use `undefined`. + // + // > Note that future versions might simulate output from the raw driver. + // > (e.g. `{ numRecordsUpdated: 0 }`) + // > See: https://github.com/treelinehq/waterline-query-docs/blob/master/docs/results.md#update + var noopResult = undefined; + if (query.meta && query.meta.fetch) { + noopResult = []; + }//>- + return done(undefined, noopResult); + + default: + return done(e); + } + } + + + // ╦ ╦╔═╗╔╗╔╔╦╗╦ ╔═╗ ┬ ┬┌─┐┌─┐┌─┐┬ ┬┌─┐┬ ┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─ + // ╠═╣╠═╣║║║ ║║║ ║╣ BEFORE │ │├┤ ├┤ │ └┬┘│ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐ + // ╩ ╩╩ ╩╝╚╝═╩╝╩═╝╚═╝ ┴─┘┴└ └─┘└─┘ ┴ └─┘┴─┘└─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴ + // Run the "before" lifecycle callback, if appropriate. + (function(proceed) { + // If the `skipAllLifecycleCallbacks` meta flag was set, don't run any of + // the methods. + if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) { + return proceed(undefined, query); + } + + if (!_.has(WLModel._callbacks, 'beforeUpdate')) { + return proceed(undefined, query); + } + + WLModel._callbacks.beforeUpdate(query.valuesToSet, function(err){ + if (err) { return proceed(err); } + return proceed(undefined, query); + }); + + })(function(err, query) { + if (err) { + return done(err); + } + + // ================================================================================ + // FUTURE: potentially bring this back (but also would need the `omit clause`) + // ================================================================================ + // // Before we get to forging again, save a copy of the stage 2 query's + // // `select` clause. We'll need this later on when processing the resulting + // // records, and if we don't copy it now, it might be damaged by the forging. + // // + // // > Note that we don't need a deep clone. + // // > (That's because the `select` clause is only 1 level deep.) + // var s2QSelectClause = _.clone(query.criteria.select); + // ================================================================================ + + + // ╔═╗╦ ╦╔═╗╔═╗╦╔═ ┌─┐┌─┐┬─┐ ┌─┐┌┐┌┬ ┬ + // ║ ╠═╣║╣ ║ ╠╩╗ ├┤ │ │├┬┘ ├─┤│││└┬┘ + // ╚═╝╩ ╩╚═╝╚═╝╩ ╩ └ └─┘┴└─ ┴ ┴┘└┘ ┴ + // ┌─┐┌─┐┬ ┬ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌ ┬─┐┌─┐┌─┐┌─┐┌┬┐┌─┐ + // │ │ ││ │ ├┤ │ │ ││ ││││ ├┬┘├┤ └─┐├┤ │ └─┐ + // └─┘└─┘┴─┘┴─┘└─┘└─┘ ┴ ┴└─┘┘└┘ ┴└─└─┘└─┘└─┘ ┴ └─┘ + // Also removes them from the valuesToSet before sending to the adapter. + var collectionResets = {}; + _.each(WLModel.attributes, function _eachKnownAttrDef(attrDef, attrName) { + if (attrDef.collection) { + + // Only track a reset if a value was explicitly specified for this collection assoc. + // (All we have to do is just check for truthiness, since we've already done FS2Q at this point) + if (query.valuesToSet[attrName]) { + collectionResets[attrName] = query.valuesToSet[attrName]; + + // Remove the collection value from the valuesToSet because the adapter + // doesn't need to do anything during the initial update. + delete query.valuesToSet[attrName]; + } + + } + });// + + // Hold a variable for the queries `meta` property that could possibly be + // changed by us later on. + var modifiedMetaForCollectionResets; + + // If any collection resets were specified, force `fetch: true` (meta key) + // so that we can use it below. + if (_.keys(collectionResets).length > 0) { + // Build a modified shallow clone of the originally-provided `meta` + // that also has `fetch: true`. + modifiedMetaForCollectionResets = _.extend({}, query.meta || {}, { fetch: true }); + }//>- + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┌┬┐┌─┐┌─┐┌─┐ ┌┬┐┬ ┬┬─┐┌─┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ + // ╠╣ ║ ║╠╦╝║ ╦║╣ └─┐ │ ├─┤│ ┬├┤ │ ├─┤├┬┘├┤ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘ + // ╚ ╚═╝╩╚═╚═╝╚═╝ └─┘ ┴ ┴ ┴└─┘└─┘ ┴ ┴ ┴┴└─└─┘└─┘ └─┘└└─┘└─┘┴└─ ┴ + // Now, destructively forge this S2Q into a S3Q. + try { + query = forgeStageThreeQuery({ + stageTwoQuery: query, + identity: modelIdentity, + transformer: WLModel._transformer, + originalModels: orm.collections + }); + } catch (e) { return done(e); } + + + // ┌─┐┌─┐┌┐┌┌┬┐ ┌┬┐┌─┐ ╔═╗╔╦╗╔═╗╔═╗╔╦╗╔═╗╦═╗ + // └─┐├┤ │││ ││ │ │ │ ╠═╣ ║║╠═╣╠═╝ ║ ║╣ ╠╦╝ + // └─┘└─┘┘└┘─┴┘ ┴ └─┘ ╩ ╩═╩╝╩ ╩╩ ╩ ╚═╝╩╚═ + // Grab the appropriate adapter method and call it. + var adapter = WLModel._adapter; + if (!adapter.update) { + return done(new Error('The adapter used by this model (`' + modelIdentity + '`) doesn\'t support the `'+query.method+'` method.')); + } + + // Allow the query to possibly use the modified meta + if (modifiedMetaForCollectionResets) { + query.meta = modifiedMetaForCollectionResets; + } + + adapter.update(WLModel.datastore, query, function _afterTalkingToAdapter(err, rawAdapterResult) { + if (err) { + err = forgeAdapterError(err, omen, 'update', modelIdentity, orm); + return done(err); + }//-• + + + // ╔═╗╔╦╗╔═╗╔═╗ ╔╗╔╔═╗╦ ╦ ┬ ┬┌┐┌┬ ┌─┐┌─┐┌─┐ ╔═╗╔═╗╔╦╗╔═╗╦ ╦ ┌┬┐┌─┐┌┬┐┌─┐ ┬┌─┌─┐┬ ┬ + // ╚═╗ ║ ║ ║╠═╝ ║║║║ ║║║║ │ │││││ ├┤ └─┐└─┐ ╠╣ ║╣ ║ ║ ╠═╣ │││├┤ │ ├─┤ ├┴┐├┤ └┬┘ + // ╚═╝ ╩ ╚═╝╩ ╝╚╝╚═╝╚╩╝ooo └─┘┘└┘┴─┘└─┘└─┘└─┘ ╚ ╚═╝ ╩ ╚═╝╩ ╩ ┴ ┴└─┘ ┴ ┴ ┴ ┴ ┴└─┘ ┴ + // ┬ ┬┌─┐┌─┐ ┌─┐┌─┐┌┬┐ ┌┬┐┌─┐ ┌┬┐┬─┐┬ ┬┌─┐ + // │││├─┤└─┐ └─┐├┤ │ │ │ │ │ ├┬┘│ │├┤ + // └┴┘┴ ┴└─┘ └─┘└─┘ ┴ ┴ └─┘ ┴ ┴└─└─┘└─┘ + var fetch = modifiedMetaForCollectionResets || (_.has(query.meta, 'fetch') && query.meta.fetch); + // If `fetch` was not enabled, return. + if (!fetch) { + + // > Note: This `if` statement is a convenience, for cases where the result from + // > the adapter may have been coerced from `undefined` to `null` automatically. + // > (we want it to be `undefined` still, for consistency) + if (_.isNull(rawAdapterResult)) { + return done(); + }//-• + + if (!_.isUndefined(rawAdapterResult)) { + console.warn('\n'+ + 'Warning: Unexpected behavior in database adapter:\n'+ + 'Since `fetch` is NOT enabled, this adapter (for datastore `'+WLModel.datastore+'`)\n'+ + 'should NOT have sent back anything as the 2nd argument when triggering the callback\n'+ + 'from its `update` method. But it did! And since it\'s an array, displaying this\n'+ + 'warning to help avoid confusion and draw attention to the bug. Specifically, got:\n'+ + util.inspect(rawAdapterResult, {depth:5})+'\n'+ + '(Ignoring it and proceeding anyway...)'+'\n' + ); + }//>- + + return done(); + + }//-• + + + // IWMIH then we know that `fetch: true` meta key was set, and so the + // adapter should have sent back an array. + + // Verify that the raw result from the adapter is an array. + if (!_.isArray(rawAdapterResult)) { + return done(new Error( + 'Unexpected behavior in database adapter: Since `fetch: true` was enabled, this adapter '+ + '(for datastore `'+WLModel.datastore+'`) should have sent back an array of records as the '+ + '2nd argument when triggering the callback from its `update` method. But instead, got: '+ + util.inspect(rawAdapterResult, {depth:5})+'' + )); + }//-• + + // Unserialize each record + var transformedRecords; + try { + // Attempt to convert the column names in each record back into attribute names. + transformedRecords = rawAdapterResult.map(function(record) { + return WLModel._transformer.unserialize(record); + }); + } catch (e) { return done(e); } + + + // Check the records to verify compliance with the adapter spec, + // as well as any issues related to stale data that might not have been + // been migrated to keep up with the logical schema (`type`, etc. in + // attribute definitions). + try { + processAllRecords(transformedRecords, query.meta, modelIdentity, orm); + } catch (e) { return done(e); } + + + // ┌─┐┌─┐┬ ┬ ╦═╗╔═╗╔═╗╦ ╔═╗╔═╗╔═╗ ╔═╗╔═╗╦ ╦ ╔═╗╔═╗╔╦╗╦╔═╗╔╗╔ ┌─┐┌─┐┬─┐ + // │ ├─┤│ │ ╠╦╝║╣ ╠═╝║ ╠═╣║ ║╣ ║ ║ ║║ ║ ║╣ ║ ║ ║║ ║║║║ ├┤ │ │├┬┘ + // └─┘┴ ┴┴─┘┴─┘ ╩╚═╚═╝╩ ╩═╝╩ ╩╚═╝╚═╝ ╚═╝╚═╝╩═╝╩═╝╚═╝╚═╝ ╩ ╩╚═╝╝╚╝ └ └─┘┴└─ + // ┌─┐─┐ ┬┌─┐┬ ┬┌─┐┬┌┬┐┬ ┬ ┬ ┌─┐┌─┐┌─┐┌─┐┬┌─┐┬┌─┐┌┬┐ ┌─┐┌─┐┌─┐┌─┐┌─┐┬┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ + // ├┤ ┌┴┬┘├─┘│ ││ │ │ │ └┬┘───└─┐├─┘├┤ │ │├┤ │├┤ ││ ├─┤└─┐└─┐│ ││ │├─┤ │ ││ ││││└─┐ + // └─┘┴ └─┴ ┴─┘┴└─┘┴ ┴ ┴─┘┴ └─┘┴ └─┘└─┘┴└ ┴└─┘─┴┘ ┴ ┴└─┘└─┘└─┘└─┘┴┴ ┴ ┴ ┴└─┘┘└┘└─┘ + var targetIds = _.pluck(transformedRecords, WLModel.primaryKey); + async.each(_.keys(collectionResets), function _eachReplaceCollectionOp(collectionAttrName, next) { + + WLModel.replaceCollection(targetIds, collectionAttrName, collectionResets[collectionAttrName], function(err){ + if (err) { return next(err); } + return next(); + }, query.meta); + + },// ~∞%° + function _afterReplacingAllCollections(err) { + if (err) { return done(err); } + + + // ╔═╗╔═╗╔╦╗╔═╗╦═╗ ┬ ┬┌─┐┌┬┐┌─┐┌┬┐┌─┐ ┌─┐┌─┐┬ ┬ ┌┐ ┌─┐┌─┐┬┌─ + // ╠═╣╠╣ ║ ║╣ ╠╦╝ │ │├─┘ ││├─┤ │ ├┤ │ ├─┤│ │ ├┴┐├─┤│ ├┴┐ + // ╩ ╩╚ ╩ ╚═╝╩╚═ └─┘┴ ─┴┘┴ ┴ ┴ └─┘ └─┘┴ ┴┴─┘┴─┘└─┘┴ ┴└─┘┴ ┴ + // Run "after" lifecycle callback AGAIN and AGAIN- once for each record. + // ============================================================ + // FUTURE: look into this + // (we probably shouldn't call this again and again-- + // plus what if `fetch` is not in use and you want to use an LC? + // Then again- the right answer isn't immediately clear. And it + // probably not worth breaking compatibility until we have a much + // better solution) + // ============================================================ + async.each(transformedRecords, function _eachRecord(record, next) { + + // If the `skipAllLifecycleCallbacks` meta flag was set, don't run any of + // the methods. + if (_.has(query.meta, 'skipAllLifecycleCallbacks') && query.meta.skipAllLifecycleCallbacks) { + return next(); + } + + // Skip "after" lifecycle callback, if not defined. + if (!_.has(WLModel._callbacks, 'afterUpdate')) { + return next(); + } + + // Otherwise run it. + WLModel._callbacks.afterUpdate(record, function _afterMaybeRunningAfterUpdateForThisRecord(err) { + if (err) { + return next(err); + } + + return next(); + }); + + },// ~∞%° + function _afterIteratingOverRecords(err) { + if (err) { + return done(err); + } + + return done(undefined, transformedRecords); + + });// + + });// + });// + });// + + }, + + + explicitCbMaybe, + + + _.extend(DEFERRED_METHODS, { + + // Provide access to this model for use in query modifier methods. + _WLModel: WLModel, + + // Set up initial query metadata. + _wlQueryInfo: query, + + }) + + );// + +}; diff --git a/lib/waterline/methods/validate.js b/lib/waterline/methods/validate.js new file mode 100644 index 000000000..a0b5e063c --- /dev/null +++ b/lib/waterline/methods/validate.js @@ -0,0 +1,172 @@ +/** + * Module dependencies + */ + +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var normalizeValueToSet = require('../utils/query/private/normalize-value-to-set'); +var verifyModelMethodContext = require('../utils/query/verify-model-method-context'); + + +/** + * validate() + * + * Verify that a value would be valid for a given attribute, then return it, loosely coerced. + * + * > Note that this validates the value in the same way it would be checked + * > if it was passed in to an `.update()` query-- NOT a `.create()`!! + * + * ``` + * // Check the given string and return a normalized version. + * var normalizedBalance = BankAccount.validate('balance', '349.86'); + * //=> 349.86 + * + * // Note that if normalization is not possible, this throws: + * var normalizedBalance; + * try { + * normalizedBalance = BankAccount.validate('balance', '$349.86'); + * } catch (e) { + * switch (e.code) { + * case 'E_': + * console.log(e); + * // => '[Error: Invalid `bankAccount`]' + * throw e; + * default: throw e; + * } + * } + * + * // IWMIH, then it was valid...although it may have been normalized a bit (potentially in-place). + * + * ``` + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * @param {String} attrName + * The name of the attribute to validate against. + * + * @param {Ref} value + * The value to validate/normalize. + * + * -- + * + * @returns {Ref} + * The successfully-normalized value. (MAY or MAY NOT be the same as the original reference.) + * + * -- + * + * @throws {Error} If it encounters incompatible usage in the provided `value`, + * including e.g. the case where an invalid value is specified for + * an association. + * @property {String} code + * - E_HIGHLY_IRREGULAR + * + * + * @throws {Error} If the provided `value` has an incompatible data type. + * | @property {String} code + * | - E_TYPE + * | @property {String} expectedType + * | - string + * | - number + * | - boolean + * | - json + * | + * | This is only versus the attribute's declared "type", or other similar type safety issues -- + * | certain failed checks for associations result in a different error code (see above). + * | + * | Remember: + * | This is the case where a _completely incorrect type of data_ was passed in. + * | This is NOT a high-level "anchor" validation failure! (see below for that) + * | > Unlike anchor validation errors, this exception should never be negotiated/parsed/used + * | > for delivering error messages to end users of an application-- it is carved out + * | > separately purely to make things easier to follow for the developer. + * + * + * @throws {Error} If the provided `value` fails the requiredness guarantee of the corresponding attribute. + * | @property {String} code + * | - E_REQUIRED + * + * + * @throws {Error} If the provided `value` violates one or more of the high-level validation rules + * | configured for the corresponding attribute. + * | @property {String} code + * | - E_VIOLATES_RULES + * | @property {Array} ruleViolations + * | e.g. + * | ``` + * | [ + * | { + * | rule: 'minLength', //(isEmail/isNotEmptyString/max/isNumber/etc) + * | message: 'Too few characters (max 30)' + * | } + * | ] + * | ``` + * + * + * @throws {Error} If anything else unexpected occurs. + * + */ + +module.exports = function validate(attrName, value) { + + // Verify `this` refers to an actual Sails/Waterline model. + verifyModelMethodContext(this); + + // Set up a few, common local vars for convenience / familiarity. + var orm = this.waterline; + var modelIdentity = this.identity; + + if (!_.isString(attrName)) { + throw flaverr({ name: 'UsageError' }, new Error( + 'Please specify the name of the attribute to validate against (1st argument).' + )); + }//-• + + var normalizedVal; + try { + normalizedVal = normalizeValueToSet(value, attrName, modelIdentity, orm); + } catch (e) { + switch (e.code) { + + // If it is determined that this should be ignored, it's either because + // the attr is outside of the schema or the value is undefined. In this + // case, set it to `undefined` and then continue on ahead to the checks + // below. + case 'E_SHOULD_BE_IGNORED': + normalizedVal = undefined; + break; + + // Violated the attribute's validation ruleset + case 'E_VIOLATES_RULES': + throw e; + + // Failed requireness guarantee + case 'E_REQUIRED': + throw e; + + // Failed type safety check + case 'E_TYPE': + throw e; + + // Miscellaneous incompatibility + case 'E_HIGHLY_IRREGULAR': + throw e; + + // Unexpected error + default: + throw e; + } + }//>-• + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: expand this logic so that it can work like it does for `.create()` + // (in addition or instead of just working like it does for .update()) + // + // That entails applying required and defaultsTo down here at the bottom, + // and figuring out what makes sense to do for the auto timestamps. Note + // that we'll also need to change the `false` flag above to `true` (the one + // we pass in to normalizeValueToSet) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + // Return normalized value. + return normalizedVal; + +}; diff --git a/lib/waterline/model/index.js b/lib/waterline/model/index.js deleted file mode 100644 index 688c49d7a..000000000 --- a/lib/waterline/model/index.js +++ /dev/null @@ -1,134 +0,0 @@ - -/** - * Module dependencies - */ - -var _ = require('lodash'); -var Bluebird = require('bluebird'); -var Model = require('./lib/model'); -var defaultMethods = require('./lib/defaultMethods'); -var internalMethods = require('./lib/internalMethods'); - -/** - * Build Extended Model Prototype - * - * @param {Object} context - * @param {Object} mixins - * @return {Object} - * @api public - */ - -module.exports = function(context, mixins) { - - /** - * Extend the model prototype with default instance methods - */ - - var prototypeFns = { - - toObject: function() { - return new defaultMethods.toObject(context, this); - }, - - save: function(options, cb) { - return new defaultMethods.save(context, this, options, cb); - }, - - destroy: function(cb) { - return new defaultMethods.destroy(context, this, cb); - }, - - _defineAssociations: function() { - new internalMethods.defineAssociations(context, this); - }, - - _normalizeAssociations: function() { - new internalMethods.normalizeAssociations(context, this); - }, - - _cast: function(values) { - _.keys(context._attributes).forEach(function(key) { - var type = context._attributes[key].type; - - // Attempt to parse Array or JSON type - if (type === 'array' || type === 'json') { - if (!_.isString(values[key])) return; - try { - values[key] = JSON.parse(values[key]); - } catch(e) { - return; - } - } - - // Convert booleans back to true/false - if (type === 'boolean') { - var val = values[key]; - if (val === 0) values[key] = false; - if (val === 1) values[key] = true; - } - - }); - }, - - /** - * Model.validate() - * - * Takes the currently set attributes and validates the model - * Shorthand for Model.validate({ attributes }, cb) - * - * @param {Function} callback - (err) - * @return {Promise} - */ - - validate: function(cb) { - // Collect current values - var values = this.toObject(); - - if (cb) { - context.validate(values, function(err) { - if (err) { return cb(err); } - cb(); - }); - return; - } else { - return new Bluebird(function(resolve, reject) { - context.validate(values, function(err) { - if (err) { return reject(err); } - resolve(); - }); - }); - } - } - - }; - - // If any of the attributes are protected, the default toJSON method should - // remove them. - var protectedAttributes = _.compact(_.map(context._attributes, function(attr, key) {return attr.protected ? key : undefined;})); - - prototypeFns.toJSON = function() { - var obj = this.toObject(); - - if (protectedAttributes.length) { - _.each(protectedAttributes, function(key) { - delete obj[key]; - }); - } - - // Remove toJSON from the result, to prevent infinite recursion with - // msgpack or other recursive object transformation tools. - // - // Causes issues if set to null and will error in Sails if we delete it because blueprints call it. - // - // obj.toJSON = null; - - return obj; - }; - - var prototype = _.extend(prototypeFns, mixins); - - var model = Model.extend(prototype); - - // Return the extended model for use in Waterline - return model; -}; diff --git a/lib/waterline/model/lib/association.js b/lib/waterline/model/lib/association.js deleted file mode 100644 index 1b812c6c9..000000000 --- a/lib/waterline/model/lib/association.js +++ /dev/null @@ -1,63 +0,0 @@ - -/** - * Handles an Association - */ - -var Association = module.exports = function() { - this.addModels = []; - this.removeModels = []; - this.value = []; -}; - -/** - * Set Value - * - * @param {Number|Object} value - * @api private - */ - -Association.prototype._setValue = function(value) { - if (Array.isArray(value)) { - this.value = value; - return; - } - - this.value = this.value = [value]; -}; - -/** - * Get Value - * - * @api private - */ - -Association.prototype._getValue = function() { - var self = this; - var value = this.value; - - // Attach association methods to values array - // This allows access using the getter and the desired - // API for synchronously adding and removing associations. - - value.add = function add(obj) { - if (Array.isArray(obj)) { - obj.forEach(function(el) { - self.addModels.push(el); - }); - } else { - self.addModels.push(obj); - } - }; - - value.remove = function remove(obj) { - if (Array.isArray(obj)) { - obj.forEach(function(el) { - self.removeModels.push(el); - }); - } else { - self.removeModels.push(obj); - } - }; - - return value; -}; diff --git a/lib/waterline/model/lib/associationMethods/add.js b/lib/waterline/model/lib/associationMethods/add.js deleted file mode 100644 index e494492d8..000000000 --- a/lib/waterline/model/lib/associationMethods/add.js +++ /dev/null @@ -1,394 +0,0 @@ -/** - * Module dependencies - */ - -var _ = require('lodash'); -var async = require('async'); -var utils = require('../../../utils/helpers'); -var hasOwnProperty = utils.object.hasOwnProperty; - -/** - * Add associations for a model. - * - * If an object was used a new record should be created and linked to the parent. - * If only a primary key was used then the record should only be linked to the parent. - * - * Called in the model instance context. - * - * @param {Object} collection - * @param {Object} proto - * @param {Object} records - * @param {Function} callback - */ - -var Add = module.exports = function(collection, proto, records, cb) { - - this.collection = collection; - this.proto = proto; - this.failedTransactions = []; - this.primaryKey = null; - - var values = proto.toObject(); - var attributes = collection.waterline.schema[collection.identity].attributes; - - this.primaryKey = this.findPrimaryKey(attributes, values); - - if (!this.primaryKey) { - return cb(new Error('No Primary Key set to associate the record with! ' + - 'Try setting an attribute as a primary key or include an ID property.')); - } - - if (!proto.toObject()[this.primaryKey]) { - return cb(new Error('No Primary Key set to associate the record with! ' + - 'Primary Key must have a value, it can\'t be an optional value.')); - } - - // Loop through each of the associations on this model and add any associations - // that have been specified. Do this in series and limit the actual saves to 10 - // at a time so that connection pools are not exhausted. - // - // In the future when transactions are available this will all be done on a single - // connection and can be re-written. - - this.createCollectionAssociations(records, cb); -}; - -/** - * Find Primary Key - * - * @param {Object} attributes - * @param {Object} values - * @api private - */ - -Add.prototype.findPrimaryKey = function(attributes, values) { - var primaryKey = null; - - for (var attribute in attributes) { - if (hasOwnProperty(attributes[attribute], 'primaryKey') && attributes[attribute].primaryKey) { - primaryKey = attribute; - break; - } - } - - // If no primary key check for an ID property - if (!primaryKey && hasOwnProperty(values, 'id')) primaryKey = 'id'; - - return primaryKey; -}; - -/** - * Create Collection Associations - * - * @param {Object} records - * @param {Function} callback - * @api private - */ - -Add.prototype.createCollectionAssociations = function(records, cb) { - var self = this; - - async.eachSeries(Object.keys(records), function(associationKey, next) { - self.createAssociations(associationKey, records[associationKey], next); - }, - - function(err) { - if (err || self.failedTransactions.length > 0) { - return cb(null, self.failedTransactions); - } - - cb(); - }); -}; - -/** - * Create Records for an Association property on a collection - * - * @param {String} key - * @param {Array} records - * @param {Function} callback - * @api private - */ - -Add.prototype.createAssociations = function(key, records, cb) { - var self = this; - - // Grab the collection the attribute references - // this allows us to make a query on it - var attribute = this.collection._attributes[key]; - var collectionName = attribute.collection.toLowerCase(); - var associatedCollection = this.collection.waterline.collections[collectionName]; - var relatedPK = _.find(associatedCollection.attributes, { primaryKey: true }); - var schema = this.collection.waterline.schema[this.collection.identity].attributes[key]; - - // Limit Adds to 10 at a time to prevent the connection pool from being exhausted - async.eachLimit(records, 10, function(association, next) { - - // If an object was passed in it should be created. - // This allows new records to be created through the association interface - if (association !== null && typeof association === 'object' && Object.keys(association).length > 0) { - - // If a custom PK was used on the associated collection and it's not - // autoIncrementing, create the record. This allows nested - // creates to work when custom PK's are used. - if (!relatedPK || !relatedPK.autoIncrement && !associatedCollection.autoPK) { - return self.createNewRecord(associatedCollection, schema, association, key, next); - } - - // Check if the record contains a primary key, if so just link the values - if (hasOwnProperty(association, associatedCollection.primaryKey)) { - var pk = associatedCollection.primaryKey; - return self.updateRecord(associatedCollection, schema, association[pk], key, next); - } - - return self.createNewRecord(associatedCollection, schema, association, key, next); - } - - // If the value is a primary key just update the association's foreign key - // This will either create the new association through a foreign key or re-associatiate - // with another collection. - self.updateRecord(associatedCollection, schema, association, key, next); - - }, cb); -}; - -/** - * Create A New Record - * - * @param {Object} collection - * @param {Object} attribute - * @param {Object} values - * @param {Function} callback - * @api private - */ - -Add.prototype.createNewRecord = function(collection, attribute, values, key, cb) { - var self = this; - - // Check if this is a many-to-many by looking at the junctionTable flag - var schema = this.collection.waterline.schema[attribute.collection.toLowerCase()]; - var junctionTable = schema.junctionTable || schema.throughTable; - - // If this isn't a many-to-many then add the foreign key in to the values - if (!junctionTable) { - values[attribute.onKey] = this.proto[this.primaryKey]; - } - - collection.create(values, function(err, record) { - if (err) { - - // If no via was specified and the insert failed on a one-to-many build up an error message that - // properly reflects the error. - if (!junctionTable && !hasOwnProperty(attribute, 'via')) { - err = new Error('You attempted to create a has many relationship but didn\'t link the two ' + - 'atttributes together. Please setup a link using the via keyword.'); - } - - self.failedTransactions.push({ - type: 'insert', - collection: collection.identity, - values: values, - err: err - }); - } - - // if no junction table then return - if (!junctionTable) return cb(); - - // if junction table but there was an error don't try and link the records - if (err) return cb(); - - // Find the collection's Primary Key value - var primaryKey = self.findPrimaryKey(collection._attributes, record.toObject()); - - if (!primaryKey) { - self.failedTransactions.push({ - type: 'insert', - collection: collection.identity, - values: {}, - err: new Error('No Primary Key value was found on the joined collection') - }); - } - - // Find the Many To Many Collection - var joinCollection = self.collection.waterline.collections[attribute.collection.toLowerCase()]; - - // The related record was created now the record in the junction table - // needs to be created to link the two records - self.createManyToMany(joinCollection, attribute, record[primaryKey], key, cb); - }); -}; - -/** - * Update A Record - * - * @param {Object} collection - * @param {Object} attribute - * @param {Object} values - * @param {Function} callback - * @api private - */ - -Add.prototype.updateRecord = function(collection, attribute, pk, key, cb) { - var self = this; - - // Check if this is a many-to-many by looking at the junctionTable flag - var schema = this.collection.waterline.schema[attribute.collection.toLowerCase()]; - var junctionTable = schema.junctionTable || schema.throughTable; - - // If so build out the criteria and create a new record in the junction table - if (junctionTable) { - var joinCollection = this.collection.waterline.collections[attribute.collection.toLowerCase()]; - return this.createManyToMany(joinCollection, attribute, pk, key, cb); - } - - // Grab the associated collection's primaryKey - var attributes = this.collection.waterline.schema[collection.identity].attributes; - var associationKey = this.findPrimaryKey(attributes, attributes); - - if (!associationKey) { - return cb(new Error('No Primary Key defined on the child record you ' + - 'are trying to associate the record with! Try setting an attribute as a primary key or ' + - 'include an ID property.')); - } - - // Build up criteria and updated values used to update the record - var criteria = {}; - var _values = {}; - - criteria[associationKey] = pk; - _values[attribute.onKey] = this.proto[this.primaryKey]; - - collection.update(criteria, _values, function(err) { - - if (err) { - self.failedTransactions.push({ - type: 'update', - collection: collection.identity, - criteria: criteria, - values: _values, - err: err - }); - } - - cb(); - }); -}; - -/** - * Create A Many To Many Join Table Record - * - * @param {Object} collection - * @param {Object} attribute - * @param {Object} values - * @param {Function} callback - * @api private - */ - -Add.prototype.createManyToMany = function(collection, attribute, pk, key, cb) { - var self = this; - - // Grab the associated collection's primaryKey - var collectionAttributes = this.collection.waterline.schema[attribute.collection.toLowerCase()]; - var associationKey = collectionAttributes.attributes[attribute.on].via; - - // If this is a throughTable, look into the meta data cache for what key to use - if (collectionAttributes.throughTable) { - var cacheKey = collectionAttributes.throughTable[attribute.on + '.' + key]; - if (!cacheKey) { - return cb(new Error('Unable to find the proper cache key in the through table definition')); - } - - associationKey = cacheKey; - } - - if (!associationKey) { - return cb(new Error('No Primary Key set on the child record you ' + - 'are trying to associate the record with! Try setting an attribute as a primary key or ' + - 'include an ID property.')); - } - - // Build up criteria and updated values used to create the record - var criteria = {}; - var _values = {}; - - criteria[associationKey] = pk; - criteria[attribute.onKey] = this.proto[this.primaryKey]; - _values = _.clone(criteria); - - async.auto({ - - validateAssociation: function(next) { - var associatedCollectionName = collectionAttributes.attributes[associationKey].references; - var associatedCollection = self.collection.waterline.collections[associatedCollectionName]; - var primaryKey = self.findPrimaryKey(associatedCollection.attributes, {}); - var _criteria = {}; - _criteria[primaryKey] = pk; - - associatedCollection.findOne(_criteria, function(err, record) { - if (err) return next(err); - if (!record) { - return next(new Error('Associated Record For ' + associatedCollectionName + - ' with ' + primaryKey + ' = ' + pk + ' No Longer Exists')); - } - - next(); - }); - }, - - validateRecord: function(next) { - - // First look up the record to ensure it doesn't exist - collection.findOne(criteria, function(err, val) { - if (err || val) { - return next(new Error('Trying to \'.add()\' an instance which already exists!')); - } - next(); - }); - }, - - createRecord: ['validateAssociation', 'validateRecord', function(next) { - collection.create(_values, next); - }] - - }, function(err) { - if (err) { - self.failedTransactions.push({ - type: 'insert', - collection: collection.identity, - criteria: criteria, - values: _values, - err: err - }); - } - - return cb(); - - }); -}; - -/** - * Find Association Key - * - * @param {Object} collection - * @return {String} - * @api private - */ - -Add.prototype.findAssociationKey = function(collection) { - var associationKey = null; - - for (var attribute in collection.attributes) { - var attr = collection.attributes[attribute]; - var identity = this.collection.identity; - - if (!hasOwnProperty(attr, 'references')) continue; - var attrCollection = attr.references; - - if (attrCollection !== identity) { - associationKey = attr.columnName; - } - } - - return associationKey; -}; diff --git a/lib/waterline/model/lib/associationMethods/remove.js b/lib/waterline/model/lib/associationMethods/remove.js deleted file mode 100644 index cfecf34a0..000000000 --- a/lib/waterline/model/lib/associationMethods/remove.js +++ /dev/null @@ -1,294 +0,0 @@ -var _ = require('lodash'); -var async = require('async'); -var utils = require('../../../utils/helpers'); -var hasOwnProperty = utils.object.hasOwnProperty; - -/** - * Remove associations from a model. - * - * Accepts a primary key value of an associated record that already exists in the database. - * - * - * @param {Object} collection - * @param {Object} proto - * @param {Object} records - * @param {Function} callback - */ - -var Remove = module.exports = function(collection, proto, records, cb) { - - this.collection = collection; - this.proto = proto; - this.failedTransactions = []; - this.primaryKey = null; - - var values = proto.toObject(); - var attributes = collection.waterline.schema[collection.identity].attributes; - - this.primaryKey = this.findPrimaryKey(attributes, values); - - if (!this.primaryKey) { - return cb(new Error('No Primary Key set to associate the record with! ' + - 'Try setting an attribute as a primary key or include an ID property.')); - } - - if (!proto.toObject()[this.primaryKey]) { - return cb(new Error('No Primary Key set to associate ' + - 'the record with! Primary Key must have a value, it can\'t be an optional value.')); - } - - // Loop through each of the associations on this model and remove any associations - // that have been specified. Do this in series and limit the actual saves to 10 - // at a time so that connection pools are not exhausted. - // - // In the future when transactions are available this will all be done on a single - // connection and can be re-written. - this.removeCollectionAssociations(records, cb); -}; - -/** - * Find Primary Key - * - * @param {Object} attributes - * @param {Object} values - * @api private - */ - -Remove.prototype.findPrimaryKey = function(attributes, values) { - var primaryKey = null; - - for (var attribute in attributes) { - if (hasOwnProperty(attributes[attribute], 'primaryKey') && attributes[attribute].primaryKey) { - primaryKey = attribute; - break; - } - } - - // If no primary key check for an ID property - if (!primaryKey && hasOwnProperty(values, 'id')) primaryKey = 'id'; - - return primaryKey; -}; - -/** - * Remove Collection Associations - * - * @param {Object} records - * @param {Function} callback - * @api private - */ - -Remove.prototype.removeCollectionAssociations = function(records, cb) { - var self = this; - - async.eachSeries(_.keys(records), function(associationKey, next) { - self.removeAssociations(associationKey, records[associationKey], next); - }, - - function(err) { - if (err || self.failedTransactions.length > 0) { - return cb(null, self.failedTransactions); - } - - cb(); - }); -}; - -/** - * Remove Associations - * - * @param {String} key - * @param {Array} records - * @param {Function} callback - * @api private - */ - -Remove.prototype.removeAssociations = function(key, records, cb) { - var self = this; - - // Grab the collection the attribute references - // this allows us to make a query on it - var attribute = this.collection._attributes[key]; - var collectionName = attribute.collection.toLowerCase(); - var associatedCollection = this.collection.waterline.collections[collectionName]; - var schema = this.collection.waterline.schema[this.collection.identity].attributes[key]; - - // Limit Removes to 10 at a time to prevent the connection pool from being exhausted - async.eachLimit(records, 10, function(associationId, next) { - self.removeRecord(associatedCollection, schema, associationId, key, next); - }, cb); - -}; - -/** - * Remove A Single Record - * - * @param {Object} collection - * @param {Object} attribute - * @param {Object} values - * @param {Function} callback - * @api private - */ - -Remove.prototype.removeRecord = function(collection, attribute, associationId, key, cb) { - var self = this; - - // Validate `values` is a correct primary key format - var validAssociationKey = this.validatePrimaryKey(associationId); - - if (!validAssociationKey) { - this.failedTransactions.push({ - type: 'remove', - collection: collection.identity, - values: associationId, - err: new Error('Remove association only accepts a single primary key value') - }); - - return cb(); - } - - // Check if this is a many-to-many by looking at the junctionTable flag - var schema = this.collection.waterline.schema[attribute.collection.toLowerCase()]; - var junctionTable = schema.junctionTable || schema.throughTable; - - // If so build out the criteria and remove a record from the junction table - if (junctionTable) { - var joinCollection = this.collection.waterline.collections[attribute.collection.toLowerCase()]; - return this.removeManyToMany(joinCollection, attribute, associationId, key, cb); - } - - // Grab the associated collection's primaryKey - var attributes = this.collection.waterline.schema[collection.identity].attributes; - var associationKey = this.findPrimaryKey(attributes, attributes); - - if (!associationKey) { - return cb(new Error('No Primary Key defined on the child record you ' + - 'are trying to un-associate the record with! Try setting an attribute as a primary key or ' + - 'include an ID property.')); - } - - // Build up criteria and updated values used to update the record - var criteria = {}; - var _values = {}; - - criteria[associationKey] = associationId; - _values[attribute.on] = null; - - collection.update(criteria, _values, function(err) { - - if (err) { - self.failedTransactions.push({ - type: 'update', - collection: collection.identity, - criteria: criteria, - values: _values, - err: err - }); - } - - cb(); - }); -}; - -/** - * Validate A Primary Key - * - * Only support primary keys being passed in to the remove function. Check if it's a mongo - * id or anything that has a toString method. - * - * @param {Integer|String} key - * @return {Boolean} - * @api private - */ - -Remove.prototype.validatePrimaryKey = function(key) { - var validAssociation = false; - - // Attempt to see if the value is an ID and resembles a MongoID - if (_.isString(key) && utils.matchMongoId(key)) validAssociation = true; - - // Check it can be turned into a string - if (key && key.toString() !== '[object Object]') validAssociation = true; - - return validAssociation; -}; - -/** - * Remove A Many To Many Join Table Record - * - * @param {Object} collection - * @param {Object} attribute - * @param {Object} values - * @param {Function} callback - * @api private - */ - -Remove.prototype.removeManyToMany = function(collection, attribute, pk, key, cb) { - var self = this; - - // Grab the associated collection's primaryKey - var collectionAttributes = this.collection.waterline.schema[attribute.collection.toLowerCase()]; - var associationKey = collectionAttributes.attributes[attribute.on].via; - - // If this is a throughTable, look into the meta data cache for what key to use - if (collectionAttributes.throughTable) { - var cacheKey = collectionAttributes.throughTable[attribute.on + '.' + key]; - if (!cacheKey) { - return cb(new Error('Unable to find the proper cache key in the through table definition')); - } - - associationKey = cacheKey; - } - - if (!associationKey) { - return cb(new Error('No Primary Key set on the child record you ' + - 'are trying to associate the record with! Try setting an attribute as a primary key or ' + - 'include an ID property.')); - } - - // Build up criteria and updated values used to create the record - var criteria = {}; - criteria[associationKey] = pk; - criteria[attribute.on] = this.proto[this.primaryKey]; - - // Run a destroy on the join table record - collection.destroy(criteria, function(err) { - - if (err) { - self.failedTransactions.push({ - type: 'destroy', - collection: collection.identity, - criteria: criteria, - err: err - }); - } - - cb(); - }); -}; - -/** - * Find Association Key - * - * @param {Object} collection - * @return {String} - * @api private - */ - -Remove.prototype.findAssociationKey = function(collection) { - var associationKey = null; - - for (var attribute in collection.attributes) { - var attr = collection.attributes[attribute]; - var identity = this.collection.identity; - - if (!hasOwnProperty(attr, 'references')) continue; - var attrCollection = attr.references.toLowerCase(); - - if (attrCollection !== identity) { - associationKey = attr.columnName; - } - } - - return associationKey; -}; diff --git a/lib/waterline/model/lib/associationMethods/update.js b/lib/waterline/model/lib/associationMethods/update.js deleted file mode 100644 index b805132d8..000000000 --- a/lib/waterline/model/lib/associationMethods/update.js +++ /dev/null @@ -1,100 +0,0 @@ - -/** - * Module dependencies - */ - -var _ = require('lodash'); -var utils = require('../../../utils/helpers'); -var nestedOperations = require('../../../utils/nestedOperations'); -var hop = utils.object.hasOwnProperty; - -/** - * Update the current instance with the currently set values - * - * Called in the model instance context. - * - * @param {Object} collection - * @param {Object} proto - * @param {Array} mutatedModels - * @param {Function} callback - */ - -var Update = module.exports = function(collection, proto, mutatedModels, cb) { - - var values = typeof proto.toObject === 'function' ? proto.toObject() : proto; - var attributes = collection.waterline.schema[collection.identity].attributes; - var primaryKey = this.findPrimaryKey(attributes, values); - - if (!primaryKey) { - return cb(new Error('No Primary Key set to update the record with! ' + - 'Try setting an attribute as a primary key or include an ID property.')); - } - - if (!values[primaryKey]) { - return cb(new Error('No Primary Key set to update the record with! ' + - 'Primary Key must have a value, it can\'t be an optional value.')); - } - - // Build Search Criteria - var criteria = {}; - criteria[primaryKey] = values[primaryKey]; - - // Clone values so they can be mutated - var _values = _.cloneDeep(values); - - // For any nested model associations (objects not collection arrays) that were not changed, - // lets set the value to just the foreign key so that an update query is not performed on the - // associatied model. - var keys = _.keys(_values); - keys.forEach(function(key) { - - // Nix any collection attributes so that they do not get sync'd during the update process. - // One reason for this is that the result set is not guaranteed to be complete, - // so the sync could exclude items. - if (attributes[key] && hop(attributes[key], 'collection') && attributes[key].collection) { - - delete _values[key]; - return; - } - - // If the key was changed, keep it expanded - if (mutatedModels.indexOf(key) !== -1) return; - - // Reduce it down to a foreign key value - var vals = {}; - vals[key] = _values[key]; - - // Delete and replace the value with a reduced version - delete _values[key]; - var reduced = nestedOperations.reduceAssociations.call(collection, collection.identity, collection.waterline.schema, vals); - _values = _.merge(_values, reduced); - }); - - // Update the collection with the new values - collection.update(criteria, _values, cb); -}; - - -/** - * Find Primary Key - * - * @param {Object} attributes - * @param {Object} values - * @api private - */ - -Update.prototype.findPrimaryKey = function(attributes, values) { - var primaryKey = null; - - for (var attribute in attributes) { - if (hop(attributes[attribute], 'primaryKey') && attributes[attribute].primaryKey) { - primaryKey = attribute; - break; - } - } - - // If no primary key check for an ID property - if (!primaryKey && hop(values, 'id')) primaryKey = 'id'; - - return primaryKey; -}; diff --git a/lib/waterline/model/lib/defaultMethods/destroy.js b/lib/waterline/model/lib/defaultMethods/destroy.js deleted file mode 100644 index ae0ce61db..000000000 --- a/lib/waterline/model/lib/defaultMethods/destroy.js +++ /dev/null @@ -1,109 +0,0 @@ - -/** - * Module dependencies - */ - -var utils = require('../../../utils/helpers'); -var hasOwnProperty = utils.object.hasOwnProperty; -var defer = require('../../../utils/defer'); -var noop = function() {}; - -/** - * Model.destroy() - * - * Destroys an instance of a model - * - * @param {Object} context, - * @param {Object} proto - * @param {Function} callback - * @return {Promise} - * @api public - */ - -var Destroy = module.exports = function(context, proto, cb) { - - var deferred; - var err; - - if (typeof cb !== 'function') { - deferred = defer(); - } - - cb = cb || noop; - - var values = proto.toObject(); - var attributes = context.waterline.schema[context.identity].attributes; - var primaryKey = this.findPrimaryKey(attributes, values); - - if (!primaryKey) { - err = new Error('No Primary Key set to update the record with! ' + - 'Try setting an attribute as a primary key or include an ID property.'); - - if (deferred) { - deferred.reject(err); - } - - return cb(err); - } - - if (!values[primaryKey]) { - err = new Error('No Primary Key set to update the record with! ' + - 'Primary Key must have a value, it can\'t be an optional value.'); - - if (deferred) { - deferred.reject(err); - } - - return cb(err); - } - - // Build Search Criteria - var criteria = {}; - criteria[primaryKey] = values[primaryKey]; - - // Execute Query - context.destroy(criteria, function(err, status) { - if (err) { - - if (deferred) { - deferred.reject(err); - } - - return cb(err); - } - - if (deferred) { - deferred.resolve(status); - } - - cb.apply(this, arguments); - }); - - if (deferred) { - return deferred.promise; - } -}; - -/** - * Find Primary Key - * - * @param {Object} attributes - * @param {Object} values - * @api private - */ - -Destroy.prototype.findPrimaryKey = function(attributes, values) { - var primaryKey = null; - - for (var attribute in attributes) { - if (hasOwnProperty(attributes[attribute], 'primaryKey') && attributes[attribute].primaryKey) { - primaryKey = attribute; - break; - } - } - - // If no primary key check for an ID property - if (!primaryKey && hasOwnProperty(values, 'id')) primaryKey = 'id'; - - return primaryKey; -}; diff --git a/lib/waterline/model/lib/defaultMethods/index.js b/lib/waterline/model/lib/defaultMethods/index.js deleted file mode 100644 index e0e06fb4f..000000000 --- a/lib/waterline/model/lib/defaultMethods/index.js +++ /dev/null @@ -1,10 +0,0 @@ - -/** - * Export Default Methods - */ - -module.exports = { - toObject: require('./toObject'), - destroy: require('./destroy'), - save: require('./save') -}; diff --git a/lib/waterline/model/lib/defaultMethods/save.js b/lib/waterline/model/lib/defaultMethods/save.js deleted file mode 100644 index 331f2cfad..000000000 --- a/lib/waterline/model/lib/defaultMethods/save.js +++ /dev/null @@ -1,220 +0,0 @@ -var _ = require('lodash'); -var async = require('async'); -var deep = require('deep-diff'); -var updateInstance = require('../associationMethods/update'); -var addAssociation = require('../associationMethods/add'); -var removeAssociation = require('../associationMethods/remove'); -var hop = require('../../../utils/helpers').object.hasOwnProperty; -var defer = require('../../../utils/defer'); -var WLError = require('../../../error/WLError'); -var noop = function() {}; - -/** - * Model.save() - * - * Takes the currently set attributes and updates the database. - * Shorthand for Model.update({ attributes }, cb) - * - * @param {Object} context - * @param {Object} proto - * @param {Function} callback - * @param {Object} options - * @return {Promise} - * @api public - */ - -module.exports = function(context, proto, options, cb) { - - var deferred; - - if (typeof options === 'function') { - cb = options; - options = {}; - } - - if (typeof cb !== 'function') { - deferred = defer(); - } - - cb = cb || noop; - - /** - * TO-DO: - * This should all be wrapped in a transaction. That's coming next but for the meantime - * just hope we don't get in a nasty state where the operation fails! - */ - - var mutatedModels = []; - - async.auto({ - - // Compare any populated model values to their current state. - // If they have been mutated then the values will need to be synced. - compareModelValues: function(next) { - var modelKeys = Object.keys(proto.associationsCache); - - async.each(modelKeys, function(key, nextKey) { - if (!hop(proto, key) || proto[key] === undefined) { - return async.setImmediate(function() { - nextKey(); - }); - } - - var currentVal = proto[key]; - var previousVal = proto.associationsCache[key]; - - // Normalize previousVal to an object - if (Array.isArray(previousVal)) { - previousVal = previousVal[0]; - } - - if (deep(currentVal, previousVal)) { - mutatedModels.push(key); - } - - return async.setImmediate(function() { - nextKey(); - }); - }, next); - }, - - // Update The Current Record - updateRecord: ['compareModelValues', function(next) { - - // Shallow clone proto.toObject() to remove all the functions - var data = _.clone(proto.toObject()); - - new updateInstance(context, data, mutatedModels, function(err, data) { - next(err, data); - }); - }], - - - // Build a set of associations to add and remove. - // These are populated from using model[associationKey].add() and - // model[associationKey].remove(). - buildAssociationOperations: ['compareModelValues', function(next) { - - // Build a dictionary to hold operations based on association key - var operations = { - addKeys: {}, - removeKeys: {} - }; - - Object.keys(proto.associations).forEach(function(key) { - - // Ignore belongsTo associations - if (proto.associations[key].hasOwnProperty('model')) return; - - // Grab what records need adding - if (proto.associations[key].addModels.length > 0) { - operations.addKeys[key] = proto.associations[key].addModels; - } - - // Grab what records need removing - if (proto.associations[key].removeModels.length > 0) { - operations.removeKeys[key] = proto.associations[key].removeModels; - } - }); - - return async.setImmediate(function() { - return next(null, operations); - }); - - }], - - // Create new associations for each association key - addAssociations: ['buildAssociationOperations', 'updateRecord', function(next, results) { - var keys = results.buildAssociationOperations.addKeys; - return new addAssociation(context, proto, keys, function(err, failedTransactions) { - if (err) return next(err); - - // reset addKeys - for (var key in results.buildAssociationOperations.addKeys) { - proto.associations[key].addModels = []; - } - - next(null, failedTransactions); - }); - }], - - // Remove associations for each association key - // Run after the addAssociations so that the connection pools don't get exhausted. - // Once transactions are ready we can remove this restriction as they will be run on the same - // connection. - removeAssociations: ['buildAssociationOperations', 'addAssociations', function(next, results) { - var keys = results.buildAssociationOperations.removeKeys; - return new removeAssociation(context, proto, keys, function(err, failedTransactions) { - if (err) return next(err); - - // reset removeKeys - for (var key in results.buildAssociationOperations.removeKeys) { - proto.associations[key].removeModels = []; - } - - next(null, failedTransactions); - }); - }] - - }, - - function(err, results) { - if (err) { - if (deferred) { - deferred.reject(err); - } - return cb(err); - } - - // Collect all failed transactions if any - var failedTransactions = []; - var error; - - if (results.addAssociations) { - failedTransactions = failedTransactions.concat(results.addAssociations); - } - - if (results.removeAssociations) { - failedTransactions = failedTransactions.concat(results.removeAssociations); - } - - if (failedTransactions.length > 0) { - error = new Error('Some associations could not be added or destroyed during save().'); - error.failedTransactions = failedTransactions; - - if (deferred) { - deferred.reject(new WLError(error)); - } - return cb(new WLError(error)); - } - - if (!results.updateRecord.length) { - error = new Error('Error updating a record.'); - if (deferred) { - deferred.reject(new WLError(error)); - } - return cb(new WLError(error)); - } - - // Reset the model attribute values with the new values. - // This is needed because you could have a lifecycle callback that has - // changed the data since last time you accessed it. - // Attach attributes to the model instance - var newData = results.updateRecord[0]; - _.each(newData, function(val, key) { - proto[key] = val; - }); - - // If a promise, resolve it - if (deferred) { - deferred.resolve(); - } - - // Return the callback - return cb(); - }); - - if (deferred) { - return deferred.promise; - } -}; diff --git a/lib/waterline/model/lib/defaultMethods/toObject.js b/lib/waterline/model/lib/defaultMethods/toObject.js deleted file mode 100644 index 2baf4824b..000000000 --- a/lib/waterline/model/lib/defaultMethods/toObject.js +++ /dev/null @@ -1,257 +0,0 @@ -/** - * Module dependencies - */ - -var _ = require('lodash'); -var utils = require('../../../utils/helpers'); -var hasOwnProperty = utils.object.hasOwnProperty; - -/** - * Model.toObject() - * - * Returns an object containing just the model values. Useful for doing - * operations on the current values minus the instance methods. - * - * @param {Object} context, Waterline collection instance - * @param {Object} proto, model prototype - * @api public - * @return {Object} - */ - -var toObject = module.exports = function(context, proto) { - - var self = this; - - this.context = context; - this.proto = proto; - - // Hold joins used in the query - this.usedJoins = []; - - // Create an object that can hold the values to be returned - this.object = {}; - - // Run methods to add and modify values to the above object - this.addAssociations(); - this.addProperties(); - this.makeObject(); - this.filterJoins(); - this.filterFunctions(); - - // Ok now we want to create a POJO that can be serialized for use in a response. - // This is after all usually called in a toJSON method so lets make sure its all - // good in there. This could be faster and safer I recon. - try { - - // Stringify/parse the object - var _obj = JSON.parse(JSON.stringify(this.object)); - - return _obj; - - // Return a nicer error message than just throwing the json parse message - } catch (e) { - var err = new Error(); - err.message = 'There was an error turning the model into an object.'; - err.data = self.object; - throw err; - } -}; - - -/** - * Add Association Keys - * - * If a showJoins flag is active, add all association keys. - * - * @param {Object} keys - * @api private - */ - -toObject.prototype.addAssociations = function() { - var self = this; - - if (!this.proto._properties) return; - if (!this.proto._properties.showJoins) return; - - // Copy prototype over for attributes - for (var association in this.proto.associations) { - - // Handle hasMany attributes - if (hasOwnProperty(this.proto.associations[association], 'value')) { - - var records = []; - var values = this.proto.associations[association].value; - - values.forEach(function(record) { - if (typeof record !== 'object') return; - // Since `typeof null` === `"object"`, we should also check for that case: - if (record === null) return; - var item = Object.create(record.__proto__); - Object.keys(record).forEach(function(key) { - item[key] = _.cloneDeep(record[key]); - }); - records.push(item); - }); - - this.object[association] = records; - continue; - } - - // Handle belongsTo attributes - var record = this.proto[association]; - var item; - - // Check if the association foreign key is a date. If so set the object's - // association and continue. Manual check here is needed because _.isObject - // matches dates and you will end up with a loop that never exits. - if (_.isDate(record)) { - - item = new Date(record); - _.extend(item.__proto__ , record.__proto__); - - this.object[association] = item; - } - - // Is the record is a populated object, create a new object from it. - // _.isObject() does not match null, so we're good here. - else if (_.isObject(record) && !Array.isArray(record)) { - - item = Object.create(record.__proto__); - - Object.keys(record).forEach(function(key) { - item[key] = record[key]; - }); - - this.object[association] = item; - } - - else if (!_.isUndefined(record)) { - this.object[association] = record; - } - } -}; - -/** - * Add Properties - * - * Copies over non-association attributes to the newly created object. - * - * @api private - */ - -toObject.prototype.addProperties = function() { - var self = this; - - Object.keys(this.proto).forEach(function(key) { - if (hasOwnProperty(self.object, key)) return; - self.object[key] = self.proto[key]; - }); - -}; - -/** - * Make Object - * - * Runs toJSON on all associated values - * - * @api private - */ - -toObject.prototype.makeObject = function() { - var self = this; - - if (!this.proto._properties) return; - if (!this.proto._properties.showJoins) return; - - // Handle Joins - Object.keys(this.proto.associations).forEach(function(association) { - - // Don't run toJSON on records that were not populated - if (!self.proto._properties || !self.proto._properties.joins) return; - - // Build up a join key name based on the attribute's model/collection name - var joinsName = association; - if (self.context._attributes[association].model) joinsName = self.context._attributes[association].model.toLowerCase(); - if (self.context._attributes[association].collection) joinsName = self.context._attributes[association].collection.toLowerCase(); - - // Check if the join was used - if (self.proto._properties.joins.indexOf(joinsName) < 0 && self.proto._properties.joins.indexOf(association) < 0) return; - self.usedJoins.push(association); - - // Call toJSON on each associated record - if (Array.isArray(self.object[association])) { - var records = []; - - self.object[association].forEach(function(item) { - if (!hasOwnProperty(item.__proto__, 'toJSON')) return; - records.push(item.toJSON()); - }); - - self.object[association] = records; - return; - } - - if (!self.object[association]) return; - - // Association was null or not valid - // (don't try to `hasOwnProperty` it so we don't throw) - if (typeof self.object[association] !== 'object') { - self.object[association] = self.object[association]; - return; - } - - if (!hasOwnProperty(self.object[association].__proto__, 'toJSON')) return; - self.object[association] = self.object[association].toJSON(); - }); - -}; - -/** - * Remove Non-Joined Associations - * - * @api private - */ - -toObject.prototype.filterJoins = function() { - var attributes = this.context._attributes; - var properties = this.proto._properties; - - for (var attribute in attributes) { - if (!hasOwnProperty(attributes[attribute], 'model') && !hasOwnProperty(attributes[attribute], 'collection')) continue; - - // If no properties and a collection attribute, delete the association and return - if (!properties && hasOwnProperty(attributes[attribute], 'collection')) { - delete this.object[attribute]; - continue; - } - - // If showJoins is false remove the association object - if (properties && !properties.showJoins) { - - // Don't delete belongs to keys - if (!attributes[attribute].model) delete this.object[attribute]; - } - - if (properties && properties.joins) { - if (this.usedJoins.indexOf(attribute) < 0) { - - // Don't delete belongs to keys - if (!attributes[attribute].model) delete this.object[attribute]; - } - } - } -}; - -/** - * Filter Functions - * - * @api private - */ - -toObject.prototype.filterFunctions = function() { - for (var key in this.object) { - if (typeof this.object[key] === 'function') { - delete this.object[key]; - } - } -}; diff --git a/lib/waterline/model/lib/internalMethods/defineAssociations.js b/lib/waterline/model/lib/internalMethods/defineAssociations.js deleted file mode 100644 index fc89f6f25..000000000 --- a/lib/waterline/model/lib/internalMethods/defineAssociations.js +++ /dev/null @@ -1,134 +0,0 @@ - -/** - * Module dependencies - */ - -var _ = require('lodash'); -var Association = require('../association'); -var utils = require('../../../utils/helpers'); -var hasOwnProperty = utils.object.hasOwnProperty; - -/** - * Add association getters and setters for any has_many - * attributes. - * - * @param {Object} context - * @param {Object} proto - * @api private - */ - -var Define = module.exports = function(context, proto) { - var self = this; - - this.proto = proto; - - // Build Associations Listing - Object.defineProperty(proto, 'associations', { - enumerable: false, - writable: true, - value: {} - }); - - // Build associations cache to hold original values. - // Used to check if values have been mutated and need to be synced when - // a model.save call is made. - Object.defineProperty(proto, 'associationsCache', { - enumerable: false, - writable: true, - value: {} - }); - - var attributes = context._attributes || {}; - var collections = this.collectionKeys(attributes); - var models = this.modelKeys(attributes); - - if (collections.length === 0 && models.length === 0) return; - - // Create an Association getter and setter for each collection - collections.forEach(function(collection) { - self.buildHasManyProperty(collection); - }); - - // Attach Models to the prototype and set in the associations object - models.forEach(function(model) { - self.buildBelongsToProperty(model); - }); -}; - -/** - * Find Collection Keys - * - * @param {Object} attributes - * @api private - * @return {Array} - */ - -Define.prototype.collectionKeys = function(attributes) { - var collections = []; - - // Find any collection keys - for (var attribute in attributes) { - if (!hasOwnProperty(attributes[attribute], 'collection')) continue; - collections.push(_.cloneDeep(attribute)); - } - - return collections; -}; - -/** - * Find Model Keys - * - * @param {Object} attributes - * @api private - * @return {Array} - */ - -Define.prototype.modelKeys = function(attributes) { - var models = []; - - // Find any collection keys - for (var attribute in attributes) { - if (!hasOwnProperty(attributes[attribute], 'model')) continue; - models.push({ key: _.cloneDeep(attribute), val: _.cloneDeep(attributes[attribute]) }); - } - - return models; -}; - -/** - * Create Getter/Setter for hasMany associations - * - * @param {String} collection - * @api private - */ - -Define.prototype.buildHasManyProperty = function(collection) { - var self = this; - - // Attach to a non-enumerable property - this.proto.associations[collection] = new Association(); - - // Attach getter and setter to the model - Object.defineProperty(this.proto, collection, { - set: function(val) { self.proto.associations[collection]._setValue(val); }, - get: function() { return self.proto.associations[collection]._getValue(); }, - enumerable: true, - configurable: true - }); -}; - -/** - * Add belongsTo attributes to associations object - * - * @param {String} collection - * @api private - */ - -Define.prototype.buildBelongsToProperty = function(model) { - - // Attach to a non-enumerable property - this.proto.associations[model.key] = model.val; - - // Build a cache for this model - this.proto.associationsCache[model.key] = {}; -}; diff --git a/lib/waterline/model/lib/internalMethods/index.js b/lib/waterline/model/lib/internalMethods/index.js deleted file mode 100644 index 609a2d2fc..000000000 --- a/lib/waterline/model/lib/internalMethods/index.js +++ /dev/null @@ -1,9 +0,0 @@ - -/** - * Export Internal Methods - */ - -module.exports = { - normalizeAssociations: require('./normalizeAssociations'), - defineAssociations: require('./defineAssociations') -}; diff --git a/lib/waterline/model/lib/internalMethods/normalizeAssociations.js b/lib/waterline/model/lib/internalMethods/normalizeAssociations.js deleted file mode 100644 index 03d38cd35..000000000 --- a/lib/waterline/model/lib/internalMethods/normalizeAssociations.js +++ /dev/null @@ -1,61 +0,0 @@ - -/** - * Check and normalize belongs_to and has_many association keys - * - * Ensures that a belongs_to association is an object and that a has_many association - * is an array. - * - * @param {Object} context, - * @param {Object} proto - * @api private - */ - -var Normalize = module.exports = function(context, proto) { - - this.proto = proto; - - var attributes = context.waterline.collections[context.identity].attributes || {}; - - this.collections(attributes); - this.models(attributes); -}; - -/** - * Normalize Collection Attribute to Array - * - * @param {Object} attributes - * @api private - */ - -Normalize.prototype.collections = function(attributes) { - for (var attribute in attributes) { - - // If attribute is not a collection, it doesn't need normalizing - if (!attributes[attribute].collection) continue; - - // Sets the attribute as an array if it's not already - if (this.proto[attribute] && !Array.isArray(this.proto[attribute])) { - this.proto[attribute] = [this.proto[attribute]]; - } - } -}; - -/** - * Normalize Model Attribute to Object - * - * @param {Object} attributes - * @api private - */ - -Normalize.prototype.models = function(attributes) { - for (var attribute in attributes) { - - // If attribute is not a model, it doesn't need normalizing - if (!attributes[attribute].model) continue; - - // Sets the attribute to the first item in the array if it's an array - if (this.proto[attribute] && Array.isArray(this.proto[attribute])) { - this.proto[attribute] = this.proto[attribute][0]; - } - } -}; diff --git a/lib/waterline/model/lib/model.js b/lib/waterline/model/lib/model.js deleted file mode 100644 index 062afe2ed..000000000 --- a/lib/waterline/model/lib/model.js +++ /dev/null @@ -1,81 +0,0 @@ - -/** - * Dependencies - */ - -var extend = require('../../utils/extend'); -var _ = require('lodash'); -var util = require('util'); - -/** - * A Basic Model Interface - * - * Initialize a new Model with given params - * - * @param {Object} attrs - * @param {Object} options - * @return {Object} - * @api public - * - * var Person = Model.prototype; - * var person = new Person({ name: 'Foo Bar' }); - * person.name # => 'Foo Bar' - */ - -var Model = module.exports = function(attrs, options) { - var self = this; - - attrs = attrs || {}; - options = options || {}; - - // Store options as properties - Object.defineProperty(this, '_properties', { - enumerable: false, - writable: false, - value: options - }); - - // Cast things that need to be cast - this._cast(attrs); - - // Build association getters and setters - this._defineAssociations(); - - // Attach attributes to the model instance - for (var key in attrs) { - this[key] = attrs[key]; - - if (this.associationsCache.hasOwnProperty(key)) { - this.associationsCache[key] = _.cloneDeep(attrs[key]); - } - } - - // Normalize associations - this._normalizeAssociations(); - - - /** - * Log output - * @return {String} output when this model is util.inspect()ed - * (usually with console.log()) - */ - - Object.defineProperty(this, 'inspect', { - enumerable: false, - configurable: false, - writable: false, - value: function() { - var output; - try { - output = self.toObject(); - } catch (e) {} - - return output ? util.inspect(output) : self; - } - }); - - return this; -}; - -// Make Extendable -Model.extend = extend; diff --git a/lib/waterline/query/adapters.js b/lib/waterline/query/adapters.js deleted file mode 100644 index fc7cd5ba0..000000000 --- a/lib/waterline/query/adapters.js +++ /dev/null @@ -1,44 +0,0 @@ -/** - * Mixes Custom Non-CRUD Adapter Methods into the prototype. - */ - -module.exports = function() { - var self = this; - - Object.keys(this.connections).forEach(function(conn) { - - var adapter = self.connections[conn]._adapter || {}; - - Object.keys(adapter).forEach(function(key) { - - // Ignore the Identity Property - if (['identity', 'tableName'].indexOf(key) >= 0) return; - - // Don't override keys that already exists - if (self[key]) return; - - // Don't override a property, only functions - if (typeof adapter[key] != 'function') { - self[key] = adapter[key]; - return; - } - - // Apply the Function with passed in args and set this.identity as - // the first argument - self[key] = function() { - - var tableName = self.tableName || self.identity; - - // If this is the teardown method, just pass in the connection name, - // otherwise pass the connection and the tableName - var defaultArgs = key === 'teardown' ? [conn] : [conn, tableName]; - - // Concat self.identity with args (must massage arguments into a proper array) - // Use a normalized _tableName set in the core module. - var args = defaultArgs.concat(Array.prototype.slice.call(arguments)); - return adapter[key].apply(self, args); - }; - }); - }); - -}; diff --git a/lib/waterline/query/aggregate.js b/lib/waterline/query/aggregate.js deleted file mode 100644 index f5ba75afa..000000000 --- a/lib/waterline/query/aggregate.js +++ /dev/null @@ -1,239 +0,0 @@ -/** - * Aggregate Queries - */ - -var async = require('async'); -var _ = require('lodash'); -var usageError = require('../utils/usageError'); -var utils = require('../utils/helpers'); -var normalize = require('../utils/normalize'); -var callbacks = require('../utils/callbacksRunner'); -var Deferred = require('./deferred'); -var hasOwnProperty = utils.object.hasOwnProperty; - -module.exports = { - - /** - * Create an Array of records - * - * @param {Array} array of values to create - * @param {Function} callback - * @return Deferred object if no callback - */ - - createEach: function(valuesList, cb, metaContainer) { - var self = this; - - // Handle Deferred where it passes criteria first - if(_.isPlainObject(arguments[0]) && _.isArray(arguments[1])) { - valuesList = arguments[1]; - cb = arguments[2]; - } - - // Return Deferred or pass to adapter - if (typeof cb !== 'function') { - return new Deferred(this, this.createEach, {}, valuesList); - } - - // Validate Params - var usage = utils.capitalize(this.identity) + '.createEach(valuesList, callback)'; - - if (!valuesList) return usageError('No valuesList specified!', usage, cb); - if (!Array.isArray(valuesList)) return usageError('Invalid valuesList specified (should be an array!)', usage, cb); - if (typeof cb !== 'function') return usageError('Invalid callback specified!', usage, cb); - - var errStr = _validateValues(_.cloneDeep(valuesList)); - if (errStr) return usageError(errStr, usage, cb); - - // Handle undefined values - var filteredValues = _.filter(valuesList, function(value) { - return value !== undefined; - }); - - // Create will take care of cloning values so original isn't mutated - async.map(filteredValues, function(data, next) { - self.create(data, next, metaContainer); - }, cb); - }, - - /** - * Iterate through a list of objects, trying to find each one - * For any that don't exist, create them - * - * @param {Object} criteria - * @param {Array} valuesList - * @param {Function} callback - * @return Deferred object if no callback - */ - - findOrCreateEach: function(criteria, valuesList, cb, metaContainer) { - var self = this; - - if (typeof valuesList === 'function') { - cb = valuesList; - valuesList = null; - } - - // Normalize criteria - criteria = normalize.criteria(criteria); - - // Return Deferred or pass to adapter - if (typeof cb !== 'function') { - return new Deferred(this, this.findOrCreateEach, criteria, valuesList); - } - - // Validate Params - var usage = utils.capitalize(this.identity) + '.findOrCreateEach(criteria, valuesList, callback)'; - - if (typeof cb !== 'function') return usageError('Invalid callback specified!', usage, cb); - if (!criteria) return usageError('No criteria specified!', usage, cb); - if (!Array.isArray(criteria)) return usageError('No criteria specified!', usage, cb); - if (!valuesList) return usageError('No valuesList specified!', usage, cb); - if (!Array.isArray(valuesList)) return usageError('Invalid valuesList specified (should be an array!)', usage, cb); - - var errStr = _validateValues(valuesList); - if (errStr) return usageError(errStr, usage, cb); - - // Validate each record in the array and if all are valid - // pass the array to the adapter's findOrCreateEach method - var validateItem = function(item, next) { - _validate.call(self, item, next); - }; - - - async.each(valuesList, validateItem, function(err) { - if (err) return cb(err); - - // Transform Values - var transformedValues = []; - - valuesList.forEach(function(value) { - - // Transform values - value = self._transformer.serialize(value); - - // Clean attributes - value = self._schema.cleanValues(value); - transformedValues.push(value); - }); - - // Set values array to the transformed array - valuesList = transformedValues; - - // Transform Search Criteria - var transformedCriteria = []; - - criteria.forEach(function(value) { - value = self._transformer.serialize(value); - transformedCriteria.push(value); - }); - - // Set criteria array to the transformed array - criteria = transformedCriteria; - - // Pass criteria and attributes to adapter definition - self.adapter.findOrCreateEach(criteria, valuesList, function(err, values) { - if (err) return cb(err); - - // Unserialize Values - var unserializedValues = []; - - values.forEach(function(value) { - value = self._transformer.unserialize(value); - unserializedValues.push(value); - }); - - // Set values array to the transformed array - values = unserializedValues; - - // Run AfterCreate Callbacks - async.each(values, function(item, next) { - callbacks.afterCreate(self, item, next); - }, function(err) { - if (err) return cb(err); - - var models = []; - - // Make each result an instance of model - values.forEach(function(value) { - models.push(new self._model(value)); - }); - - cb(null, models); - }); - }, metaContainer); - }); - } -}; - - -/** - * Validate valuesList - * - * @param {Array} valuesList - * @return {String} - * @api private - */ - -function _validateValues(valuesList) { - var err; - - for (var i = 0; i < valuesList.length; i++) { - if (valuesList[i] !== Object(valuesList[i])) { - err = 'Invalid valuesList specified (should be an array of valid values objects!)'; - } - } - - return err; -} - - -/** - * Validate values and add in default values - * - * @param {Object} record - * @param {Function} cb - * @api private - */ - -function _validate(record, cb) { - var self = this; - - // Set Default Values if available - for (var key in self.attributes) { - if (!record[key] && record[key] !== false && hasOwnProperty(self.attributes[key], 'defaultsTo')) { - var defaultsTo = self.attributes[key].defaultsTo; - record[key] = typeof defaultsTo === 'function' ? defaultsTo.call(record) : _.clone(defaultsTo); - } - } - - // Cast values to proper types (handle numbers as strings) - record = self._cast.run(record); - - async.series([ - - // Run Validation with Validation LifeCycle Callbacks - function(next) { - callbacks.validate(self, record, true, next); - }, - - // Before Create Lifecycle Callback - function(next) { - callbacks.beforeCreate(self, record, next); - } - - ], function(err) { - if (err) return cb(err); - - // Automatically add updatedAt and createdAt (if enabled) - if (self.autoCreatedAt) { - record[self.autoCreatedAt] = new Date(); - } - - if (self.autoUpdatedAt) { - record[self.autoUpdatedAt] = new Date(); - } - - cb(); - }); -} diff --git a/lib/waterline/query/composite.js b/lib/waterline/query/composite.js deleted file mode 100644 index 6bb9c26c8..000000000 --- a/lib/waterline/query/composite.js +++ /dev/null @@ -1,89 +0,0 @@ -/** - * Composite Queries - */ - -var async = require('async'); -var _ = require('lodash'); -var usageError = require('../utils/usageError'); -var utils = require('../utils/helpers'); -var normalize = require('../utils/normalize'); -var Deferred = require('./deferred'); -var hasOwnProperty = utils.object.hasOwnProperty; - -module.exports = { - - /** - * Find or Create a New Record - * - * @param {Object} search criteria - * @param {Object} values to create if no record found - * @param {Function} callback - * @return Deferred object if no callback - */ - - findOrCreate: function(criteria, values, cb, metaContainer) { - var self = this; - - if (typeof values === 'function') { - cb = values; - values = null; - } - - // If no criteria is specified, bail out with a vengeance. - var usage = utils.capitalize(this.identity) + '.findOrCreate([criteria], values, callback)'; - if (typeof cb == 'function' && (!criteria || criteria.length === 0)) { - return usageError('No criteria option specified!', usage, cb); - } - - // Normalize criteria - criteria = normalize.criteria(criteria); - // If no values were specified, use criteria - if (!values) values = criteria.where ? criteria.where : criteria; - - // Return Deferred or pass to adapter - if (typeof cb !== 'function') { - return new Deferred(this, this.findOrCreate, criteria, values); - } - - // This is actually an implicit call to findOrCreateEach - if (Array.isArray(criteria) && Array.isArray(values)) { - return this.findOrCreateEach(criteria, values, cb); - } - - if (typeof cb !== 'function') return usageError('Invalid callback specified!', usage, cb); - - // Try a find first. - var q = this.find(criteria); - - if(metaContainer) { - q.meta(metaContainer); - } - - q.exec(function(err, results) { - if (err) return cb(err); - - if (results && results.length !== 0) { - - // Unserialize values - results = self._transformer.unserialize(results[0]); - - // Return an instance of Model - var model = new self._model(results); - return cb(null, model); - } - - // Create a new record if nothing is found. - var q2 = self.create(values); - - if(metaContainer) { - q2.meta(metaContainer); - } - - q2.exec(function(err, result) { - if (err) return cb(err); - return cb(null, result); - }); - }); - } - -}; diff --git a/lib/waterline/query/ddl.js b/lib/waterline/query/ddl.js deleted file mode 100644 index ed63d096e..000000000 --- a/lib/waterline/query/ddl.js +++ /dev/null @@ -1,31 +0,0 @@ -/** - * DDL Queries - */ - -module.exports = { - - /** - * Describe a collection - */ - - describe: function(cb) { - this.adapter.describe(cb); - }, - - /** - * Alter a table/set/etc - */ - - alter: function(attributes, cb) { - this.adapter.alter(attributes, cb); - }, - - /** - * Drop a table/set/etc - */ - - drop: function(cb) { - this.adapter.drop(cb); - } - -}; diff --git a/lib/waterline/query/deferred.js b/lib/waterline/query/deferred.js deleted file mode 100644 index b795e3228..000000000 --- a/lib/waterline/query/deferred.js +++ /dev/null @@ -1,617 +0,0 @@ -/** - * Deferred Object - * - * Used for building up a Query - */ - -var util = require('util'); -var Promise = require('bluebird'); -var _ = require('lodash'); -var normalize = require('../utils/normalize'); -var utils = require('../utils/helpers'); -var acyclicTraversal = require('../utils/acyclicTraversal'); -var hasOwnProperty = utils.object.hasOwnProperty; - -// Alias "catch" as "fail", for backwards compatibility with projects -// that were created using Q -Promise.prototype.fail = Promise.prototype.catch; - -var Deferred = module.exports = function(context, method, criteria, values) { - - if (!context) return new Error('Must supply a context to a new Deferred object. Usage: new Deferred(context, method, criteria)'); - if (!method) return new Error('Must supply a method to a new Deferred object. Usage: new Deferred(context, method, criteria)'); - - this._context = context; - this._method = method; - this._criteria = criteria; - this._values = values || null; - - this._deferred = null; // deferred object for promises - - return this; -}; - - -/** - * Add join clause(s) to the criteria object to populate - * the specified alias all the way down (or at least until a - * circular pattern is detected.) - * - * @param {String} keyName [the initial alias aka named relation] - * @param {Object} criteria [optional] - * @return this - * @chainable - * - * WARNING: - * This method is not finished yet!! - */ -Deferred.prototype.populateDeep = function(keyName, criteria) { - - // The identity of the initial model - var identity = this._context.identity; - - // The input schema - var schema = this._context.waterline.schema; - - // Kick off recursive function to traverse the schema graph. - var plan = acyclicTraversal(schema, identity, keyName); - - // TODO: convert populate plan into a join plan - // this._criteria.joins = .... - - // TODO: also merge criteria object into query - - return this; -}; - -/** - * Populate all associations of a collection. - * - * @return this - * @chainable - */ -Deferred.prototype.populateAll = function(criteria) { - var self = this; - this._context.associations.forEach(function(association) { - self.populate(association.alias, criteria); - }); - return this; - -}; - -/** - * Add a `joins` clause to the criteria object. - * - * Used for populating associations. - * - * @param {String|Array} key, the key to populate or array of string keys - * @return this - * @chainable - */ - -Deferred.prototype.populate = function(keyName, criteria) { - - var self = this; - var joins = []; - var pk = 'id'; - var attr; - var join; - - // Adds support for arrays into keyName so that a list of - // populates can be passed - if (_.isArray(keyName)) { - keyName.forEach(function(populate) { - self.populate(populate, criteria); - }); - return this; - } - - // Normalize sub-criteria - try { - criteria = normalize.criteria(criteria); - - //////////////////////////////////////////////////////////////////////// - // TODO: - // instead of doing this, target the relevant pieces of code - // with weird expectations and teach them a lesson - // e.g. `lib/waterline/query/finders/operations.js:665:12` - // (delete userCriteria.sort) - // - // Except make sure `where` exists - criteria.where = criteria.where === false ? false : (criteria.where || {}); - //////////////////////////////////////////////////////////////////////// - - } catch (e) { - throw new Error( - 'Could not parse sub-criteria passed to ' + - util.format('`.populate("%s")`', keyName) + - '\nSub-criteria:\n' + util.inspect(criteria, false, null) + - '\nDetails:\n' + util.inspect(e, false, null) - ); - } - - try { - - // Set the attr value to the generated schema attribute - attr = this._context.waterline.schema[this._context.identity].attributes[keyName]; - - // Get the current collection's primary key attribute - Object.keys(this._context._attributes).forEach(function(key) { - if (hasOwnProperty(self._context._attributes[key], 'primaryKey') && self._context._attributes[key].primaryKey) { - pk = self._context._attributes[key].columnName || key; - } - }); - - if (!attr) { - throw new Error( - 'In ' + util.format('`.populate("%s")`', keyName) + - ', attempting to populate an attribute that doesn\'t exist' - ); - } - - // Grab the key being populated to check if it is a has many to belongs to - // If it's a belongs_to the adapter needs to know that it should replace the foreign key - // with the associated value. - var parentKey = this._context.waterline.collections[this._context.identity].attributes[keyName]; - - // Build the initial join object that will link this collection to either another collection - // or to a junction table. - join = { - parent: this._context.identity, - parentKey: attr.columnName || pk, - child: attr.references, - childKey: attr.on, - alias: keyName, - removeParentKey: !!parentKey.model, - model: !!hasOwnProperty(parentKey, 'model'), - collection: !!hasOwnProperty(parentKey, 'collection') - }; - - // Build select object to use in the integrator - var select = []; - var customSelect = criteria.select && _.isArray(criteria.select); - _.each(this._context.waterline.schema[attr.references].attributes, function(val, key) { - // Ignore virtual attributes - if(_.has(val, 'collection')) { - return; - } - - // Check if the user has defined a custom select and if so normalize it - if(customSelect && !_.includes(criteria.select, key)) { - return; - } - - if (!_.has(val, 'columnName')) { - select.push(key); - return; - } - - select.push(val.columnName); - }); - - // Ensure the PK and FK on the child are always selected - otherwise things - // like the integrator won't work correctly - var childPk; - _.each(this._context.waterline.schema[attr.references].attributes, function(val, key) { - if(_.has(val, 'primaryKey') && val.primaryKey) { - childPk = val.columnName || key; - } - }); - - select.push(childPk); - - // Add the foreign key for collections - if(join.collection) { - select.push(attr.on); - } - - join.select = select; - - var schema = this._context.waterline.schema[attr.references]; - var reference = null; - - // If linking to a junction table the attributes shouldn't be included in the return value - if (schema.junctionTable) { - join.select = false; - reference = _.find(schema.attributes, function(attribute) { - return attribute.references && attribute.columnName !== attr.on; - }); - } else if (schema.throughTable && schema.throughTable[self._context.identity + '.' + keyName]) { - join.select = false; - reference = schema.attributes[schema.throughTable[self._context.identity + '.' + keyName]]; - } - - joins.push(join); - - // If a junction table is used add an additional join to get the data - if (reference && hasOwnProperty(attr, 'on')) { - var selects = []; - _.each(this._context.waterline.schema[reference.references].attributes, function(val, key) { - // Ignore virtual attributes - if(_.has(val, 'collection')) { - return; - } - - // Check if the user has defined a custom select and if so normalize it - if(customSelect && !_.includes(criteria.select, key)) { - return; - } - - if (!_.has(val, 'columnName')) { - selects.push(key); - return; - } - - selects.push(val.columnName); - }); - - // Ensure the PK and FK are always selected - otherwise things like the - // integrator won't work correctly - _.each(this._context.waterline.schema[reference.references].attributes, function(val, key) { - if(_.has(val, 'primaryKey') && val.primaryKey) { - childPk = val.columnName || key; - } - }); - - selects.push(childPk); - - join = { - parent: attr.references, - parentKey: reference.columnName, - child: reference.references, - childKey: reference.on, - select: _.uniq(selects), - alias: keyName, - junctionTable: true, - removeParentKey: !!parentKey.model, - model: false, - collection: true - }; - - joins.push(join); - } - - // Append the criteria to the correct join if available - if (criteria && joins.length > 1) { - joins[1].criteria = criteria; - joins[1].criteria.select = join.select; - } else if (criteria) { - joins[0].criteria = criteria; - joins[0].criteria.select = join.select; - } - - // Set the criteria joins - this._criteria.joins = Array.prototype.concat(this._criteria.joins || [], joins); - - return this; - } catch (e) { - throw new Error( - 'Encountered unexpected error while building join instructions for ' + - util.format('`.populate("%s")`', keyName) + - '\nDetails:\n' + - util.inspect(e, false, null) - ); - } -}; - -/** - * Add projections to the parent - * - * @param {Array} attributes to select - * @return this - */ - -Deferred.prototype.select = function(attributes) { - if(!_.isArray(attributes)) { - attributes = [attributes]; - } - - var select = this._criteria.select || []; - select = select.concat(attributes); - this._criteria.select = _.uniq(select); - - return this; -}; - -/** - * Add a Where clause to the criteria object - * - * @param {Object} criteria to append - * @return this - */ - -Deferred.prototype.where = function(criteria) { - - if (!criteria) return this; - - // If the criteria is an array of objects, wrap it in an "or" - if (Array.isArray(criteria) && _.all(criteria, function(crit) {return _.isObject(crit);})) { - criteria = {or: criteria}; - } - - // Normalize criteria - criteria = normalize.criteria(criteria); - - // Wipe out the existing WHERE clause if the specified criteria ends up `false` - // (since neither could match anything) - if (criteria === false) { - this._criteria = false; - } - - if (!criteria || !criteria.where) return this; - - if (!this._criteria) this._criteria = {}; - var where = this._criteria.where || {}; - - // Merge with existing WHERE clause - Object.keys(criteria.where).forEach(function(key) { - where[key] = criteria.where[key]; - }); - - this._criteria.where = where; - - return this; -}; - -/** - * Add a Limit clause to the criteria object - * - * @param {Integer} number to limit - * @return this - */ - -Deferred.prototype.limit = function(limit) { - this._criteria.limit = limit; - - return this; -}; - -/** - * Add a Skip clause to the criteria object - * - * @param {Integer} number to skip - * @return this - */ - -Deferred.prototype.skip = function(skip) { - this._criteria.skip = skip; - - return this; -}; - -/** - * Add a Paginate clause to the criteria object - * - * This is syntatical sugar that calls skip and - * limit from a single function. - * - * @param {Object} page and limit - * @return this - */ -Deferred.prototype.paginate = function(options) { - var defaultLimit = 10; - - if (options === undefined) options = { page: 0, limit: defaultLimit }; - - var page = options.page || 0; - var limit = options.limit || defaultLimit; - var skip = 0; - - if (page > 0 && limit === 0) skip = page - 1; - if (page > 0 && limit > 0) skip = (page * limit) - limit; - - this - .skip(skip) - .limit(limit); - - return this; -}; - -/** - * Add a groupBy clause to the criteria object - * - * @param {Array|Arguments} Keys to group by - * @return this - */ -Deferred.prototype.groupBy = function() { - buildAggregate.call(this, 'groupBy', Array.prototype.slice.call(arguments)); - return this; -}; - - -/** - * Add a Sort clause to the criteria object - * - * @param {String|Object} key and order - * @return this - */ - -Deferred.prototype.sort = function(criteria) { - - if (!criteria) return this; - - // Normalize criteria - criteria = normalize.criteria({ sort: criteria }); - - var sort = this._criteria.sort || {}; - - Object.keys(criteria.sort).forEach(function(key) { - sort[key] = criteria.sort[key]; - }); - - this._criteria.sort = sort; - - return this; -}; - -/** - * Add a Sum clause to the criteria object - * - * @param {Array|Arguments} Keys to sum over - * @return this - */ -Deferred.prototype.sum = function() { - buildAggregate.call(this, 'sum', Array.prototype.slice.call(arguments)); - return this; -}; - -/** - * Add an Average clause to the criteria object - * - * @param {Array|Arguments} Keys to average over - * @return this - */ -Deferred.prototype.average = function() { - buildAggregate.call(this, 'average', Array.prototype.slice.call(arguments)); - return this; -}; - -/** - * Add a min clause to the criteria object - * - * @param {Array|Arguments} Keys to min over - * @return this - */ -Deferred.prototype.min = function() { - buildAggregate.call(this, 'min', Array.prototype.slice.call(arguments)); - return this; -}; - -/** - * Add a min clause to the criteria object - * - * @param {Array|Arguments} Keys to min over - * @return this - */ -Deferred.prototype.max = function() { - buildAggregate.call(this, 'max', Array.prototype.slice.call(arguments)); - return this; -}; - - -/** - * Add values to be used in update or create query - * - * @param {Object, Array} values - * @return this - */ - -Deferred.prototype.set = function(values) { - this._values = values; - - return this; -}; - -/** - * Pass metadata down to the adapter that won't be processed or touched by Waterline - */ - -Deferred.prototype.meta = function(data) { - this._meta = data; - return this; -}; - -/** - * Execute a Query using the method passed into the - * constuctor. - * - * @param {Function} callback - * @return callback with parameters (err, results) - */ - -Deferred.prototype.exec = function(cb) { - if (_.isUndefined(cb)) { - console.log( - 'Error: No callback supplied. Please define a callback function when executing a query. '+ - 'See http://sailsjs.com/docs/reference/waterline-orm/queries/exec for help.' - ); - return; - } - - var isValidCb = _.isFunction(cb) || (_.isObject(cb) && !_.isArray(cb)); - if (!isValidCb) { - console.log( - 'Error: Sorry, `.exec()` doesn\'t know how to handle a callback like that:\n'+ - util.inspect(cb, {depth: null})+'\n'+ - 'Instead, please provide a callback function when executing a query. '+ - 'See http://sailsjs.com/docs/reference/waterline-orm/queries/exec for help.' - ); - return; - } - - // Otherwise, the provided callback function is pretty cool, and all is right and well. - - // Normalize callback/switchback - cb = normalize.callback(cb); - - // Set up arguments + callback - var args = [this._criteria, cb]; - if (this._values) args.splice(1, 0, this._values); - - // If there is a meta value, throw it on the very end - if(this._meta) { - args.push(this._meta); - } - - // Pass control to the adapter with the appropriate arguments. - this._method.apply(this._context, args); -}; - -/** - * Executes a Query, and returns a promise - */ - -Deferred.prototype.toPromise = function() { - if (!this._deferred) { - this._deferred = Promise.promisify(this.exec).bind(this)(); - } - return this._deferred; -}; - -/** - * Executes a Query, and returns a promise that applies cb/ec to the - * result/error. - */ - -Deferred.prototype.then = function(cb, ec) { - return this.toPromise().then(cb, ec); -}; - -/** - * Applies results to function fn.apply, and returns a promise - */ - -Deferred.prototype.spread = function(cb) { - return this.toPromise().spread(cb); -}; - -/** - * returns a promise and gets resolved with error - */ - -Deferred.prototype.catch = function(cb) { - return this.toPromise().catch(cb); -}; - - -/** - * Alias "catch" as "fail" - */ -Deferred.prototype.fail = Deferred.prototype.catch; - -/** - * Build An Aggregate Criteria Option - * - * @param {String} key - * @api private - */ - -function buildAggregate(key, args) { - - // If passed in a list, set that as the min criteria - if (args[0] instanceof Array) { - args = args[0]; - } - - this._criteria[key] = args || {}; -} diff --git a/lib/waterline/query/dql/count.js b/lib/waterline/query/dql/count.js deleted file mode 100644 index e8ef48327..000000000 --- a/lib/waterline/query/dql/count.js +++ /dev/null @@ -1,60 +0,0 @@ -/** - * Module Dependencies - */ - -var _ = require('lodash'); -var usageError = require('../../utils/usageError'); -var utils = require('../../utils/helpers'); -var normalize = require('../../utils/normalize'); -var Deferred = require('../deferred'); - -/** - * Count of Records - * - * @param {Object} criteria - * @param {Object} options - * @param {Function} callback - * @return Deferred object if no callback - */ - -module.exports = function(criteria, options, cb, metaContainer) { - var usage = utils.capitalize(this.identity) + '.count([criteria],[options],callback)'; - - if (typeof criteria === 'function') { - cb = criteria; - criteria = null; - options = null; - } - - if (typeof options === 'function') { - cb = options; - options = null; - } - - // Return Deferred or pass to adapter - if (typeof cb !== 'function') { - return new Deferred(this, this.count, criteria); - } - - // Normalize criteria and fold in options - criteria = normalize.criteria(criteria); - - // If there was something defined in the criteria that would return no results, don't even - // run the query and just return 0 - if (criteria === false) { - return cb(null, 0); - } - - if (_.isObject(options) && _.isObject(criteria)) { - criteria = _.extend({}, criteria, options); - } - - if (_.isFunction(criteria) || _.isFunction(options)) { - return usageError('Invalid options specified!', usage, cb); - } - - // Transform Search Criteria - criteria = this._transformer.serialize(criteria); - - this.adapter.count(criteria, cb, metaContainer); -}; diff --git a/lib/waterline/query/dql/create.js b/lib/waterline/query/dql/create.js deleted file mode 100644 index d63008757..000000000 --- a/lib/waterline/query/dql/create.js +++ /dev/null @@ -1,257 +0,0 @@ -/** - * Module Dependencies - */ - -var async = require('async'); -var _ = require('lodash'); -var utils = require('../../utils/helpers'); -var Deferred = require('../deferred'); -var callbacks = require('../../utils/callbacksRunner'); -var nestedOperations = require('../../utils/nestedOperations'); -var hop = utils.object.hasOwnProperty; - - -/** - * Create a new record - * - * @param {Object || Array} values for single model or array of multiple values - * @param {Function} callback - * @return Deferred object if no callback - */ - -module.exports = function(values, cb, metaContainer) { - - var self = this; - - // Handle Deferred where it passes criteria first - if(_.isPlainObject(arguments[0]) && (_.isPlainObject(arguments[1]) || _.isArray(arguments[1]))) { - values = arguments[1]; - cb = arguments[2]; - } - - - // Loop through values and pull out any buffers before cloning - var bufferValues = {}; - - _.each(_.keys(values), function(key) { - if (Buffer.isBuffer(values[key])) { - bufferValues[key] = values[key]; - } - }); - - values = _.cloneDeep(values) || {}; - - // Replace clone keys with the buffer values - _.each(_.keys(bufferValues), function(key) { - values[key] = bufferValues[key]; - }); - - // Remove all undefined values - if (_.isArray(values)) { - values = _.remove(values, undefined); - } - - // Return Deferred or pass to adapter - if (typeof cb !== 'function') { - return new Deferred(this, this.create, {}, values); - } - - - // Handle Array of values - if (Array.isArray(values)) { - return this.createEach(values, cb, metaContainer); - } - - // Process Values - var valuesObject = processValues.call(this, values); - - // Create any of the belongsTo associations and set the foreign key values - createBelongsTo.call(this, valuesObject, function(err) { - if (err) return cb(err); - - beforeCallbacks.call(self, valuesObject, function(err) { - if (err) return cb(err); - createValues.call(self, valuesObject, cb, metaContainer); - }, metaContainer); - }); -}; - - -/** - * Process Values - * - * @param {Object} values - * @return {Object} - */ - -function processValues(values) { - - // Set Default Values if available - for (var key in this.attributes) { - if ((!hop(values, key) || values[key] === undefined) && hop(this.attributes[key], 'defaultsTo')) { - var defaultsTo = this.attributes[key].defaultsTo; - values[key] = typeof defaultsTo === 'function' ? defaultsTo.call(values) : _.clone(defaultsTo); - } - } - - // Pull out any associations in the values - var _values = _.cloneDeep(values); - var associations = nestedOperations.valuesParser.call(this, this.identity, this.waterline.schema, values); - - // Replace associated models with their foreign key values if available. - // Unless the association has a custom primary key (we want to create the object) - values = nestedOperations.reduceAssociations.call(this, this.identity, this.waterline.schema, values, 'create'); - - // Cast values to proper types (handle numbers as strings) - values = this._cast.run(values); - - return { values: values, originalValues: _values, associations: associations }; -} - -/** - * Create BelongsTo Records - * - */ - -function createBelongsTo(valuesObject, cb, metaContainer) { - var self = this; - - async.each(valuesObject.associations.models, function(item, next) { - - // Check if value is an object. If not don't try and create it. - if (!_.isPlainObject(valuesObject.values[item])) return next(); - - // Check for any transformations - var attrName = hop(self._transformer._transformations, item) ? self._transformer._transformations[item] : item; - - var attribute = self._schema.schema[attrName]; - var modelName; - - if (hop(attribute, 'collection')) modelName = attribute.collection; - if (hop(attribute, 'model')) modelName = attribute.model; - if (!modelName) return next(); - - var model = self.waterline.collections[modelName]; - var pkValue = valuesObject.originalValues[item][model.primaryKey]; - - var criteria = {}; - criteria[model.primaryKey] = pkValue; - - // If a pkValue if found, do a findOrCreate and look for a record matching the pk. - var query; - if (pkValue) { - query = model.findOrCreate(criteria, valuesObject.values[item]); - } else { - query = model.create(valuesObject.values[item]); - } - - if(metaContainer) { - query.meta(metaContainer); - } - - query.exec(function(err, val) { - if (err) return next(err); - - // attach the new model's pk value to the original value's key - var pk = val[model.primaryKey]; - - valuesObject.values[item] = pk; - next(); - }); - - }, cb); -} - -/** - * Run Before* Lifecycle Callbacks - * - * @param {Object} valuesObject - * @param {Function} cb - */ - -function beforeCallbacks(valuesObject, cb) { - var self = this; - - async.series([ - - // Run Validation with Validation LifeCycle Callbacks - function(cb) { - callbacks.validate(self, valuesObject.values, false, cb); - }, - - // Before Create Lifecycle Callback - function(cb) { - callbacks.beforeCreate(self, valuesObject.values, cb); - } - - ], cb); - -} - -/** - * Create Parent Record and any associated values - * - * @param {Object} valuesObject - * @param {Function} cb - */ - -function createValues(valuesObject, cb, metaContainer) { - var self = this; - var date; - - // Automatically add updatedAt and createdAt (if enabled) - if (self.autoCreatedAt) { - if (!valuesObject.values[self.autoCreatedAt]) { - date = date || new Date(); - valuesObject.values[self.autoCreatedAt] = date; - } - } - - if (self.autoUpdatedAt) { - if (!valuesObject.values[self.autoUpdatedAt]) { - date = date || new Date(); - valuesObject.values[self.autoUpdatedAt] = date; - } - } - - // Transform Values - valuesObject.values = self._transformer.serialize(valuesObject.values); - - // Clean attributes - valuesObject.values = self._schema.cleanValues(valuesObject.values); - - // Pass to adapter here - self.adapter.create(valuesObject.values, function(err, values) { - if (err) { - if (typeof err === 'object') { err.model = self._model.globalId; } - return cb(err); - } - - // Unserialize values - values = self._transformer.unserialize(values); - - // If no associations were used, run after - if (valuesObject.associations.collections.length === 0) return after(values); - - var parentModel = new self._model(values); - nestedOperations.create.call(self, parentModel, valuesObject.originalValues, valuesObject.associations.collections, function(err) { - if (err) return cb(err); - - return after(parentModel.toObject()); - }); - - - function after(values) { - - // Run After Create Callbacks - callbacks.afterCreate(self, values, function(err) { - if (err) return cb(err); - - // Return an instance of Model - var model = new self._model(values); - cb(null, model); - }); - } - - }, metaContainer); -} diff --git a/lib/waterline/query/dql/destroy.js b/lib/waterline/query/dql/destroy.js deleted file mode 100644 index 04562cd08..000000000 --- a/lib/waterline/query/dql/destroy.js +++ /dev/null @@ -1,140 +0,0 @@ -/** - * Module Dependencies - */ - -var async = require('async'); -var _ = require('lodash'); -var usageError = require('../../utils/usageError'); -var utils = require('../../utils/helpers'); -var normalize = require('../../utils/normalize'); -var Deferred = require('../deferred'); -var getRelations = require('../../utils/getRelations'); -var callbacks = require('../../utils/callbacksRunner'); -var hasOwnProperty = utils.object.hasOwnProperty; - -/** - * Destroy a Record - * - * @param {Object} criteria to destroy - * @param {Function} callback - * @return Deferred object if no callback - */ - -module.exports = function(criteria, cb, metaContainer) { - var self = this; - var pk; - - if (typeof criteria === 'function') { - cb = criteria; - criteria = {}; - } - - // Check if criteria is an integer or string and normalize criteria - // to object, using the specified primary key field. - criteria = normalize.expandPK(self, criteria); - - // Normalize criteria - criteria = normalize.criteria(criteria); - - // Return Deferred or pass to adapter - if (typeof cb !== 'function') { - return new Deferred(this, this.destroy, criteria); - } - - var usage = utils.capitalize(this.identity) + '.destroy([options], callback)'; - if (typeof cb !== 'function') return usageError('Invalid callback specified!', usage, cb); - - // If there was something defined in the criteria that would return no results, don't even - // run the query and just return an empty result set. - if (criteria === false) { - return cb(null, []); - } - - callbacks.beforeDestroy(self, criteria, function(err) { - if (err) return cb(err); - - // Transform Search Criteria - criteria = self._transformer.serialize(criteria); - - // Pass to adapter - self.adapter.destroy(criteria, function(err, result) { - if (err) return cb(err); - - // Look for any m:m associations and destroy the value in the join table - var relations = getRelations({ - schema: self.waterline.schema, - parentCollection: self.identity - }); - - if (relations.length === 0) return after(); - - // Find the collection's primary key - for (var key in self.attributes) { - if (!self.attributes[key].hasOwnProperty('primaryKey')) continue; - - // Check if custom primaryKey value is falsy - if (!self.attributes[key].primaryKey) continue; - - if (self.attributes[key].columnName) { - pk = self.attributes[key].columnName; - } else { - pk = key; - } - - break; - } - - function destroyJoinTableRecords(item, next) { - var collection = self.waterline.collections[item]; - var refKey; - - Object.keys(collection._attributes).forEach(function(key) { - var attr = collection._attributes[key]; - if (attr.references !== self.identity) return; - refKey = key; - }); - - // If no refKey return, this could leave orphaned join table values but it's better - // than crashing. - if (!refKey) return next(); - - // Make sure we don't return any undefined pks - var mappedValues = result.reduce(function(memo, vals) { - if (vals[pk] !== undefined) { - memo.push(vals[pk]); - } - return memo; - }, []); - - var criteria = {}; - - if (mappedValues.length > 0) { - criteria[refKey] = mappedValues; - var q = collection.destroy(criteria); - - if(metaContainer) { - q.meta(metaContainer); - } - - q.exec(next); - } else { - return next(); - } - - } - - async.each(relations, destroyJoinTableRecords, function(err) { - if (err) return cb(err); - after(); - }); - - function after() { - callbacks.afterDestroy(self, result, function(err) { - if (err) return cb(err); - cb(null, result); - }); - } - - }, metaContainer); - }); -}; diff --git a/lib/waterline/query/dql/index.js b/lib/waterline/query/dql/index.js deleted file mode 100644 index 0ecdb5df7..000000000 --- a/lib/waterline/query/dql/index.js +++ /dev/null @@ -1,12 +0,0 @@ - -/** - * Export DQL Methods - */ - -module.exports = { - create: require('./create'), - update: require('./update'), - destroy: require('./destroy'), - count: require('./count'), - join: require('./join') -}; diff --git a/lib/waterline/query/dql/join.js b/lib/waterline/query/dql/join.js deleted file mode 100644 index 1ded4fb9b..000000000 --- a/lib/waterline/query/dql/join.js +++ /dev/null @@ -1,10 +0,0 @@ -/** - * Join - * - * Join with another collection - * (use optimized join in adapter if one was provided) - */ - -module.exports = function(collection, fk, pk, cb, metaContainer) { - this._adapter.join(collection, fk, pk, cb, metaContainer); -}; diff --git a/lib/waterline/query/dql/update.js b/lib/waterline/query/dql/update.js deleted file mode 100644 index 27c59d509..000000000 --- a/lib/waterline/query/dql/update.js +++ /dev/null @@ -1,279 +0,0 @@ -/** - * Module Dependencies - */ - -var async = require('async'); -var _ = require('lodash'); -var usageError = require('../../utils/usageError'); -var utils = require('../../utils/helpers'); -var normalize = require('../../utils/normalize'); -var Deferred = require('../deferred'); -var callbacks = require('../../utils/callbacksRunner'); -var nestedOperations = require('../../utils/nestedOperations'); -var hop = utils.object.hasOwnProperty; - - -/** - * Update all records matching criteria - * - * @param {Object} criteria - * @param {Object} values - * @param {Function} cb - * @return Deferred object if no callback - */ - -module.exports = function(criteria, values, cb, metaContainer) { - - var self = this; - - if (typeof criteria === 'function') { - cb = criteria; - criteria = null; - } - - // Return Deferred or pass to adapter - if (typeof cb !== 'function') { - return new Deferred(this, this.update, criteria, values); - } - - // If there was something defined in the criteria that would return no results, don't even - // run the query and just return an empty result set. - if (criteria === false) { - return cb(null, []); - } - - // Ensure proper function signature - var usage = utils.capitalize(this.identity) + '.update(criteria, values, callback)'; - if (!values) return usageError('No updated values specified!', usage, cb); - - // Format Criteria and Values - var valuesObject = prepareArguments.call(this, criteria, values); - - // Create any of the belongsTo associations and set the foreign key values - createBelongsTo.call(this, valuesObject, function(err) { - if (err) return cb(err); - - beforeCallbacks.call(self, valuesObject.values, function(err) { - if (err) return cb(err); - updateRecords.call(self, valuesObject, cb, metaContainer); - }); - }, metaContainer); -}; - - -/** - * Prepare Arguments - * - * @param {Object} criteria - * @param {Object} values - * @return {Object} - */ - -function prepareArguments(criteria, values) { - - // Check if options is an integer or string and normalize criteria - // to object, using the specified primary key field. - criteria = normalize.expandPK(this, criteria); - - // Normalize criteria - criteria = normalize.criteria(criteria); - - // Pull out any associations in the values - var _values = _.cloneDeep(values); - var associations = nestedOperations.valuesParser.call(this, this.identity, this.waterline.schema, values); - - // Replace associated models with their foreign key values if available. - // Unless the association has a custom primary key (we want to create the object) - values = nestedOperations.reduceAssociations.call(this, this.identity, this.waterline.schema, values, 'update'); - - // Cast values to proper types (handle numbers as strings) - values = this._cast.run(values); - - return { - criteria: criteria, - values: values, - originalValues: _values, - associations: associations - }; -} - -/** - * Create BelongsTo Records - * - */ - -function createBelongsTo(valuesObject, cb, metaContainer) { - var self = this; - - async.each(valuesObject.associations.models.slice(0), function(item, next) { - - // Check if value is an object. If not don't try and create it. - if (!_.isPlainObject(valuesObject.values[item])) return next(); - - // Check for any transformations - var attrName = hop(self._transformer._transformations, item) ? self._transformer._transformations[item] : item; - - var attribute = self._schema.schema[attrName]; - var modelName; - - if (hop(attribute, 'collection')) modelName = attribute.collection; - if (hop(attribute, 'model')) modelName = attribute.model; - if (!modelName) return next(); - - var model = self.waterline.collections[modelName]; - var pkValue = valuesObject.originalValues[item][model.primaryKey]; - - var criteria = {}; - - var pkField = hop(model._transformer._transformations, model.primaryKey) ? model._transformer._transformations[model.primaryKey] : model.primaryKey; - - criteria[pkField] = pkValue; - - // If a pkValue if found, do a findOrCreate and look for a record matching the pk. - var query; - if (pkValue) { - query = model.findOrCreate(criteria, valuesObject.values[item]); - } else { - query = model.create(valuesObject.values[item]); - } - - if(metaContainer) { - query.meta(metaContainer); - } - - query.exec(function(err, val) { - if (err) return next(err); - - // attach the new model's pk value to the original value's key - var pk = val[model.primaryKey]; - - valuesObject.values[item] = pk; - - // now we have pk value attached, remove it from models - _.remove(valuesObject.associations.models, function(_item) { return _item == item; }); - next(); - }); - - }, cb); -} - -/** - * Run Before* Lifecycle Callbacks - * - * @param {Object} values - * @param {Function} cb - */ - -function beforeCallbacks(values, cb) { - var self = this; - - async.series([ - - // Run Validation with Validation LifeCycle Callbacks - function(cb) { - callbacks.validate(self, values, true, cb); - }, - - // Before Update Lifecycle Callback - function(cb) { - callbacks.beforeUpdate(self, values, cb); - } - - ], cb); -} - -/** - * Update Records - * - * @param {Object} valuesObjecy - * @param {Function} cb - */ - -function updateRecords(valuesObject, cb, metaContainer) { - var self = this; - - // Automatically change updatedAt (if enabled) - if (this.autoUpdatedAt) { - // take into account that the autoUpdateAt attribute may be a string with a different column name - valuesObject.values[self.autoUpdatedAt] = new Date(); - } - - // Transform Values - valuesObject.values = this._transformer.serialize(valuesObject.values); - - // Clean attributes - valuesObject.values = this._schema.cleanValues(valuesObject.values); - - // Transform Search Criteria - valuesObject.criteria = self._transformer.serialize(valuesObject.criteria); - - - // Pass to adapter - self.adapter.update(valuesObject.criteria, valuesObject.values, function(err, values) { - if (err) { - if (typeof err === 'object') { err.model = self._model.globalId; } - return cb(err); - } - - // If values is not an array, return an array - if (!Array.isArray(values)) values = [values]; - - // Unserialize each value - var transformedValues = values.map(function(value) { - return self._transformer.unserialize(value); - }); - - // Update any nested associations and run afterUpdate lifecycle callbacks for each parent - updatedNestedAssociations.call(self, valuesObject, transformedValues, function(err) { - if (err) return cb(err); - - async.each(transformedValues, function(record, callback) { - callbacks.afterUpdate(self, record, callback); - }, function(err) { - if (err) return cb(err); - - var models = transformedValues.map(function(value) { - return new self._model(value); - }); - - cb(null, models); - }); - }); - - }, metaContainer); -} - -/** - * Update Nested Associations - * - * @param {Object} valuesObject - * @param {Object} values - * @param {Function} cb - */ - -function updatedNestedAssociations(valuesObject, values, cb) { - - var self = this; - var associations = valuesObject.associations || {}; - - // Only attempt nested updates if values are an object or an array - associations.models = _.filter(associations.models, function(model) { - var vals = valuesObject.originalValues[model]; - return _.isPlainObject(vals) || Array.isArray(vals); - }); - - // If no associations were used, return callback - if (associations.collections.length === 0 && associations.models.length === 0) { - return cb(); - } - - // Create an array of model instances for each parent - var parents = values.map(function(val) { - return new self._model(val); - }); - - // Update any nested associations found in the values object - var args = [parents, valuesObject.originalValues, valuesObject.associations, cb]; - nestedOperations.update.apply(self, args); - -} diff --git a/lib/waterline/query/finders/basic.js b/lib/waterline/query/finders/basic.js deleted file mode 100644 index a74a7696a..000000000 --- a/lib/waterline/query/finders/basic.js +++ /dev/null @@ -1,470 +0,0 @@ -/** - * Basic Finder Queries - */ - -var usageError = require('../../utils/usageError'); -var utils = require('../../utils/helpers'); -var normalize = require('../../utils/normalize'); -var sorter = require('../../utils/sorter'); -var Deferred = require('../deferred'); -var Joins = require('./joins'); -var Operations = require('./operations'); -var Integrator = require('../integrator'); -var waterlineCriteria = require('waterline-criteria'); -var _ = require('lodash'); -var async = require('async'); -var hasOwnProperty = utils.object.hasOwnProperty; - -module.exports = { - - /** - * Find a single record that meets criteria - * - * @param {Object} criteria to search - * @param {Function} callback - * @return Deferred object if no callback - */ - - findOne: function(criteria, cb, metaContainer) { - var self = this; - - if (typeof criteria === 'function') { - cb = criteria; - criteria = null; - } - - // If the criteria is an array of objects, wrap it in an "or" - if (Array.isArray(criteria) && _.all(criteria, function(crit) {return _.isObject(crit);})) { - criteria = {or: criteria}; - } - - // Check if criteria is an integer or string and normalize criteria - // to object, using the specified primary key field. - criteria = normalize.expandPK(self, criteria); - - // Normalize criteria - criteria = normalize.criteria(criteria); - - // Return Deferred or pass to adapter - if (typeof cb !== 'function') { - return new Deferred(this, this.findOne, criteria); - } - - // Transform Search Criteria - criteria = self._transformer.serialize(criteria); - - // If a projection is being used, ensure that the Primary Key is included - if(criteria.select) { - _.each(this._schema.schema, function(val, key) { - if (_.has(val, 'primaryKey') && val.primaryKey) { - criteria.select.push(key); - } - }); - - criteria.select = _.uniq(criteria.select); - } - - // serialize populated object - if (criteria.joins) { - criteria.joins.forEach(function(join) { - if (join.criteria && join.criteria.where) { - var joinCollection = self.waterline.collections[join.child]; - join.criteria.where = joinCollection._transformer.serialize(join.criteria.where); - } - }); - } - - // If there was something defined in the criteria that would return no results, don't even - // run the query and just return an empty result set. - if (criteria === false || criteria.where === null) { - // Build Default Error Message - var err = '.findOne() requires a criteria. If you want the first record try .find().limit(1)'; - return cb(new Error(err)); - } - - // Build up an operations set - var operations = new Operations(self, criteria, 'findOne', metaContainer); - - // Run the operations - operations.run(function(err, values) { - if (err) return cb(err); - if (!values.cache) return cb(); - - // If no joins are used grab the only item from the cache and pass to the returnResults - // function. - if (!criteria.joins) { - values = values.cache[self.identity]; - return returnResults(values); - } - - // If the values are already combined, return the results - if (values.combined) { - return returnResults(values.cache[self.identity]); - } - - // Find the primaryKey of the current model so it can be passed down to the integrator. - // Use 'id' as a good general default; - var primaryKey = 'id'; - - Object.keys(self._schema.schema).forEach(function(key) { - if (self._schema.schema[key].hasOwnProperty('primaryKey') && self._schema.schema[key].primaryKey) { - primaryKey = key; - } - }); - - - // Perform in-memory joins - Integrator(values.cache, criteria.joins, primaryKey, function(err, results) { - if (err) return cb(err); - if (!results) return cb(); - - // We need to run one last check on the results using the criteria. This allows a self - // association where we end up with two records in the cache both having each other as - // embedded objects and we only want one result. However we need to filter any join criteria - // out of the top level where query so that searchs by primary key still work. - var tmpCriteria = _.cloneDeep(criteria.where); - if (!tmpCriteria) tmpCriteria = {}; - - criteria.joins.forEach(function(join) { - if (!hasOwnProperty(join, 'alias')) return; - - // Check for `OR` criteria - if (hasOwnProperty(tmpCriteria, 'or')) { - tmpCriteria.or.forEach(function(search) { - if (!hasOwnProperty(search, join.alias)) return; - delete search[join.alias]; - }); - return; - } - - if (!hasOwnProperty(tmpCriteria, join.alias)) return; - delete tmpCriteria[join.alias]; - }); - - // Pass results into Waterline-Criteria - var _criteria = { where: tmpCriteria }; - results = waterlineCriteria('parent', { parent: results }, _criteria).results; - - results.forEach(function(res) { - - // Go Ahead and perform any sorts on the associated data - criteria.joins.forEach(function(join) { - if (!join.criteria) return; - var c = normalize.criteria(join.criteria); - if (!c.sort) return; - - var alias = join.alias; - res[alias] = sorter(res[alias], c.sort); - }); - }); - - returnResults(results); - }); - - function returnResults(results) { - - if (!results) return cb(); - - // Normalize results to an array - if (!Array.isArray(results) && results) results = [results]; - - // Unserialize each of the results before attempting any join logic on them - var unserializedModels = []; - results.forEach(function(result) { - unserializedModels.push(self._transformer.unserialize(result)); - }); - - var models = []; - var joins = criteria.joins ? criteria.joins : []; - var data = new Joins(joins, unserializedModels, self.identity, self._schema.schema, self.waterline.collections); - - // If `data.models` is invalid (not an array) return early to avoid getting into trouble. - if (!data || !data.models || !data.models.forEach) { - return cb(new Error('Values returned from operations set are not an array...')); - } - - // Create a model for the top level values - data.models.forEach(function(model) { - models.push(new self._model(model, data.options)); - }); - - cb(null, models[0]); - } - }); - }, - - /** - * Find All Records that meet criteria - * - * @param {Object} search criteria - * @param {Object} options - * @param {Function} callback - * @return Deferred object if no callback - */ - - find: function(criteria, options, cb, metaContainer) { - var self = this; - var usage = utils.capitalize(this.identity) + '.find([criteria],[options]).exec(callback|switchback)'; - - if (typeof criteria === 'function') { - cb = criteria; - criteria = null; - - if(arguments.length === 1) { - options = null; - } - } - - // If options is a function, we want to check for any more values before nulling - // them out or overriding them. - if (typeof options === 'function') { - - // If cb also exists it means there is a metaContainer value - if (cb) { - metaContainer = cb; - cb = options; - options = null; - } else { - cb = options; - options = null; - } - - } - - // If the criteria is an array of objects, wrap it in an "or" - if (Array.isArray(criteria) && _.all(criteria, function(crit) {return _.isObject(crit);})) { - criteria = {or: criteria}; - } - - // Check if criteria is an integer or string and normalize criteria - // to object, using the specified primary key field. - criteria = normalize.expandPK(self, criteria); - - // Normalize criteria - criteria = normalize.criteria(criteria); - - // Validate Arguments - if (typeof criteria === 'function' || typeof options === 'function') { - return usageError('Invalid options specified!', usage, cb); - } - - // Return Deferred or pass to adapter - if (typeof cb !== 'function') { - return new Deferred(this, this.find, criteria, options); - } - - // If there was something defined in the criteria that would return no results, don't even - // run the query and just return an empty result set. - if (criteria === false) { - return cb(null, []); - } - - // Fold in options - if (options === Object(options) && criteria === Object(criteria)) { - criteria = _.extend({}, criteria, options); - } - - // If a projection is being used, ensure that the Primary Key is included - if(criteria.select) { - _.each(this._schema.schema, function(val, key) { - if (_.has(val, 'primaryKey') && val.primaryKey) { - criteria.select.push(key); - } - }); - - criteria.select = _.uniq(criteria.select); - } - - // Transform Search Criteria - if (!self._transformer) { - throw new Error('Waterline can not access transformer-- maybe the context of the method is being overridden?'); - } - - criteria = self._transformer.serialize(criteria); - - // serialize populated object - if (criteria.joins) { - criteria.joins.forEach(function(join) { - if (join.criteria && join.criteria.where) { - var joinCollection = self.waterline.collections[join.child]; - join.criteria.where = joinCollection._transformer.serialize(join.criteria.where); - } - }); - } - - // Build up an operations set - var operations = new Operations(self, criteria, 'find', metaContainer); - - // Run the operations - operations.run(function(err, values) { - if (err) return cb(err); - if (!values.cache) return cb(); - - // If no joins are used grab current collection's item from the cache and pass to the returnResults - // function. - if (!criteria.joins) { - values = values.cache[self.identity]; - return returnResults(values); - } - - // If the values are already combined, return the results - if (values.combined) { - return returnResults(values.cache[self.identity]); - } - - // Find the primaryKey of the current model so it can be passed down to the integrator. - // Use 'id' as a good general default; - var primaryKey = 'id'; - - Object.keys(self._schema.schema).forEach(function(key) { - if (self._schema.schema[key].hasOwnProperty('primaryKey') && self._schema.schema[key].primaryKey) { - primaryKey = key; - } - }); - - // Perform in-memory joins - Integrator(values.cache, criteria.joins, primaryKey, function(err, results) { - if (err) return cb(err); - if (!results) return cb(); - - // We need to run one last check on the results using the criteria. This allows a self - // association where we end up with two records in the cache both having each other as - // embedded objects and we only want one result. However we need to filter any join criteria - // out of the top level where query so that searchs by primary key still work. - var tmpCriteria = _.cloneDeep(criteria.where); - if (!tmpCriteria) tmpCriteria = {}; - - criteria.joins.forEach(function(join) { - if (!hasOwnProperty(join, 'alias')) return; - - // Check for `OR` criteria - if (hasOwnProperty(tmpCriteria, 'or')) { - tmpCriteria.or.forEach(function(search) { - if (!hasOwnProperty(search, join.alias)) return; - delete search[join.alias]; - }); - return; - } - - if (!hasOwnProperty(tmpCriteria, join.alias)) return; - delete tmpCriteria[join.alias]; - }); - - // Pass results into Waterline-Criteria - var _criteria = { where: tmpCriteria }; - results = waterlineCriteria('parent', { parent: results }, _criteria).results; - - // Serialize values coming from an in-memory join before modelizing - results.forEach(function(res) { - - // Go Ahead and perform any sorts on the associated data - criteria.joins.forEach(function(join) { - if (!join.criteria) return; - var c = normalize.criteria(join.criteria); - var alias = join.alias; - if (c.sort) { - res[alias] = sorter(res[alias], c.sort); - } - - // If a junction table was used we need to do limit and skip in-memory - // This is where it gets nasty, paginated stuff here is a pain and needs work - // Hopefully we can get a chance to re-do it in WL2 and not have this. Basically - // if you need paginated populates try and have all the tables in the query on the - // same connection so it can be done in a nice single query. - if (!join.junctionTable) return; - - if (c.skip) { - res[alias].splice(0, c.skip); - } - - if (c.limit) { - res[alias] = _.take(res[alias], c.limit); - } - }); - }); - - returnResults(results); - }); - - function returnResults(results) { - - if (!results) return cb(null, []); - - // Normalize results to an array - if (!Array.isArray(results) && results) results = [results]; - - // Unserialize each of the results before attempting any join logic on them - var unserializedModels = []; - - if (results) { - results.forEach(function(result) { - unserializedModels.push(self._transformer.unserialize(result)); - }); - } - - var models = []; - var joins = criteria.joins ? criteria.joins : []; - var data = new Joins(joins, unserializedModels, self.identity, self._schema.schema, self.waterline.collections); - - // NOTE: - // If a "belongsTo" (i.e. HAS_FK) association is null, should it be transformed into - // an empty array here? That is not what is happening currently, and it can cause - // unexpected problems when implementing the native join method as an adapter implementor. - // ~Mike June 22, 2014 - - // If `data.models` is invalid (not an array) return early to avoid getting into trouble. - if (!data || !data.models || !data.models.forEach) { - return cb(new Error('Values returned from operations set are not an array...')); - } - - // Create a model for the top level values - data.models.forEach(function(model) { - models.push(new self._model(model, data.options)); - }); - - - cb(null, models); - } - - }); - }, - - where: function() { - this.find.apply(this, Array.prototype.slice.call(arguments)); - }, - - select: function() { - this.find.apply(this, Array.prototype.slice.call(arguments)); - }, - - - /** - * findAll - * [[ Deprecated! ]] - * - * @param {Object} criteria - * @param {Object} options - * @param {Function} cb - */ - findAll: function(criteria, options, cb) { - if (typeof criteria === 'function') { - cb = criteria; - criteria = null; - options = null; - } - - if (typeof options === 'function') { - cb = options; - options = null; - } - - // Return Deferred or pass to adapter - if (typeof cb !== 'function') { - return new Deferred(this, this.findAll, criteria); - } - - cb(new Error('In Waterline >= 0.9, findAll() has been deprecated in favor of find().' + - '\nPlease visit the migration guide at http://sailsjs.org for help upgrading.')); - } - -}; diff --git a/lib/waterline/query/finders/dynamicFinders.js b/lib/waterline/query/finders/dynamicFinders.js deleted file mode 100644 index b8330ce64..000000000 --- a/lib/waterline/query/finders/dynamicFinders.js +++ /dev/null @@ -1,290 +0,0 @@ -/** - * Dynamic Queries - * - * Query the collection using the name of the attribute directly - */ - -var _ = require('lodash'); -var usageError = require('../../utils/usageError'); -var utils = require('../../utils/helpers'); -var normalize = require('../../utils/normalize'); -var hasOwnProperty = utils.object.hasOwnProperty; - -var finder = module.exports = {}; - -/** - * buildDynamicFinders - * - * Attaches shorthand dynamic methods to the prototype for each attribute - * in the schema. - */ - -finder.buildDynamicFinders = function() { - var self = this; - - // For each defined attribute, create a dynamic finder function - Object.keys(this._attributes).forEach(function(attrName) { - - // Check if attribute is an association, if so generate limited dynamic finders - if (hasOwnProperty(self._schema.schema[attrName], 'foreignKey')) { - if (self.associationFinders !== false) { - self.generateAssociationFinders(attrName); - } - return; - } - - var capitalizedMethods = ['findOneBy*', 'findOneBy*In', 'findOneBy*Like', 'findBy*', 'findBy*In', - 'findBy*Like', 'countBy*', 'countBy*In', 'countBy*Like']; - - var lowercasedMethods = ['*StartsWith', '*Contains', '*EndsWith']; - - - if (self.dynamicFinders !== false) { - capitalizedMethods.forEach(function(method) { - self.generateDynamicFinder(attrName, method); - }); - lowercasedMethods.forEach(function(method) { - self.generateDynamicFinder(attrName, method, true); - }); - } - }); -}; - - -/** - * generateDynamicFinder - * - * Creates a dynamic method based off the schema. Used for shortcuts for various - * methods where a criteria object can automatically be built. - * - * @param {String} attrName - * @param {String} method - * @param {Boolean} dont capitalize the attrName or do, defaults to false - */ - -finder.generateDynamicFinder = function(attrName, method, dontCapitalize) { - var self = this; - var criteria; - - // Capitalize Attribute Name for camelCase - var preparedAttrName = dontCapitalize ? attrName : utils.capitalize(attrName); - - // Figure out actual dynamic method name by injecting attribute name - var actualMethodName = method.replace(/\*/g, preparedAttrName); - - // Assign this finder to the collection - this[actualMethodName] = function dynamicMethod(value, options, cb) { - - if (typeof options === 'function') { - cb = options; - options = null; - } - - options = options || {}; - - var usage = utils.capitalize(self.identity) + '.' + actualMethodName + '(someValue,[options],callback)'; - - if (typeof value === 'undefined') return usageError('No value specified!', usage, cb); - if (options.where) return usageError('Cannot specify `where` option in a dynamic ' + method + '*() query!', usage, cb); - - // Build criteria query and submit it - options.where = {}; - options.where[attrName] = value; - - switch (method) { - - - /////////////////////////////////////// - // Finders - /////////////////////////////////////// - - - case 'findOneBy*': - case 'findOneBy*In': - return self.findOne(options, cb); - - case 'findOneBy*Like': - criteria = _.extend(options, { - where: { - like: options.where - } - }); - - return self.findOne(criteria, cb); - - - /////////////////////////////////////// - // Aggregate Finders - /////////////////////////////////////// - - - case 'findBy*': - case 'findBy*In': - return self.find(options, cb); - - case 'findBy*Like': - criteria = _.extend(options, { - where: { - like: options.where - } - }); - - return self.find(criteria, cb); - - - /////////////////////////////////////// - // Count Finders - /////////////////////////////////////// - - - case 'countBy*': - case 'countBy*In': - return self.count(options, cb); - - case 'countBy*Like': - criteria = _.extend(options, { - where: { - like: options.where - } - }); - - return self.count(criteria, cb); - - - /////////////////////////////////////// - // Searchers - /////////////////////////////////////// - - case '*StartsWith': - return self.startsWith(options, cb); - - case '*Contains': - return self.contains(options, cb); - - case '*EndsWith': - return self.endsWith(options, cb); - } - }; -}; - - -/** - * generateAssociationFinders - * - * Generate Dynamic Finders for an association. - * Adds a .findBy() method for has_one and belongs_to associations. - * - * @param {String} attrName, the column name of the attribute - */ - -finder.generateAssociationFinders = function(attrName) { - var self = this; - var name, model; - - // Find the user defined key for this attrName, look in self defined columnName - // properties and if that's not set see if the generated columnName matches the attrName - for (var key in this._attributes) { - - // Cache the value - var cache = this._attributes[key]; - - if (!hasOwnProperty(cache, 'model')) continue; - - if (cache.model.toLowerCase() + '_id' === attrName) { - name = key; - model = cache.model; - } - } - - if (!name || !model) return; - - // Build a findOneBy dynamic finder that forces a join on the association - this['findOneBy' + utils.capitalize(name)] = function dynamicAssociationMethod(value, cb) { - - // Check proper usage - var usage = utils.capitalize(self.identity) + '.' + 'findBy' + utils.capitalize(name) + - '(someValue, callback)'; - - if (typeof value === 'undefined') return usageError('No value specified!', usage, cb); - if (typeof value === 'function') return usageError('No value specified!', usage, cb); - - var criteria = associationQueryCriteria(self, value, attrName); - return this.findOne(criteria, cb); - }; - - // Build a findBy dynamic finder that forces a join on the association - this['findBy' + utils.capitalize(name)] = function dynamicAssociationMethod(value, cb) { - - // Check proper usage - var usage = utils.capitalize(self.identity) + '.' + 'findBy' + utils.capitalize(name) + - '(someValue, callback)'; - - if (typeof value === 'undefined') return usageError('No value specified!', usage, cb); - if (typeof value === 'function') return usageError('No value specified!', usage, cb); - - var criteria = associationQueryCriteria(self, value, attrName); - return this.find(criteria, cb); - }; -}; - - -/** - * Build Join Array - */ - -function buildJoin() { - var self = this; - var pk, attr; - - // Set the attr value to the generated schema attribute - attr = self.waterline.schema[self.identity].attributes[name]; - - // Get the current collection's primary key attribute - Object.keys(self._attributes).forEach(function(key) { - if (hasOwnProperty(self._attributes[key], 'primaryKey') && self._attributes[key].primaryKey) { - pk = key; - } - }); - - if (!attr) throw new Error('Attempting to populate an attribute that doesn\'t exist'); - - // Grab the key being populated to check if it is a has many to belongs to - // If it's a belongs_to the adapter needs to know that it should replace the foreign key - // with the associated value. - var parentKey = self.waterline.collections[self.identity].attributes[name]; - - - // Build the initial join object that will link this collection to either another collection - // or to a junction table. - var join = { - parent: self._tableName, - parentKey: attr.columnName || pk, - child: attr.references, - childKey: attr.on, - select: true, - removeParentKey: !!parentKey.model - }; - - return join; -} - -/** - * Query Criteria Builder for associations - */ - -function associationQueryCriteria(context, value, attrName) { - - // Build a criteria object - var criteria = { - where: {}, - joins: [] - }; - - // Build a join condition - var join = buildJoin.call(context); - criteria.joins.push(join); - - // Add where values - criteria.where[attrName] = value; - return criteria; -} diff --git a/lib/waterline/query/finders/helpers.js b/lib/waterline/query/finders/helpers.js deleted file mode 100644 index 4176351c2..000000000 --- a/lib/waterline/query/finders/helpers.js +++ /dev/null @@ -1,74 +0,0 @@ -/** - * Finder Helper Queries - * - * (these call other collection-level methods) - */ - -var usageError = require('../../utils/usageError'); -var utils = require('../../utils/helpers'); -var normalize = require('../../utils/normalize'); - -module.exports = { - - // Return models where ALL of the specified attributes match queryString - - findOneLike: function(criteria, options, cb) { - var usage = utils.capitalize(this.identity) + '.findOneLike([criteria],[options],callback)'; - - // Normalize criteria - criteria = normalize.likeCriteria(criteria, this._schema.schema); - if (!criteria) return usageError('Criteria must be an object!', usage, cb); - - this.findOne(criteria, options, cb); - }, - - findLike: function(criteria, options, cb) { - var usage = utils.capitalize(this.identity) + '.findLike([criteria],[options],callback)'; - - // Normalize criteria - criteria = normalize.likeCriteria(criteria, this._schema.schema); - if (!criteria) return usageError('Criteria must be an object!', usage, cb); - - this.find(criteria, options, cb); - }, - - // Return models where >= 1 of the specified attributes start with queryString - startsWith: function(criteria, options, cb) { - var usage = utils.capitalize(this.identity) + '.startsWith([criteria],[options],callback)'; - - criteria = normalize.likeCriteria(criteria, this._schema.schema, function applyStartsWith(criteria) { - return criteria + '%'; - }); - - if (!criteria) return usageError('Criteria must be an object!', usage, cb); - - this.find(criteria, options, cb); - }, - - // Return models where >= 1 of the specified attributes end with queryString - endsWith: function(criteria, options, cb) { - var usage = utils.capitalize(this.identity) + '.startsWith([criteria],[options],callback)'; - - criteria = normalize.likeCriteria(criteria, this._schema.schema, function applyEndsWith(criteria) { - return '%' + criteria; - }); - - if (!criteria) return usageError('Criteria must be an object!', usage, cb); - - this.find(criteria, options, cb); - }, - - // Return models where >= 1 of the specified attributes contain queryString - contains: function(criteria, options, cb) { - var usage = utils.capitalize(this.identity) + '.startsWith([criteria],[options],callback)'; - - criteria = normalize.likeCriteria(criteria, this._schema.schema, function applyContains(criteria) { - return '%' + criteria + '%'; - }); - - if (!criteria) return usageError('Criteria must be an object!', usage, cb); - - this.find(criteria, options, cb); - } - -}; diff --git a/lib/waterline/query/finders/joins.js b/lib/waterline/query/finders/joins.js deleted file mode 100644 index aca6f2e77..000000000 --- a/lib/waterline/query/finders/joins.js +++ /dev/null @@ -1,231 +0,0 @@ -/** - * Module Dependencies - */ - -var _ = require('lodash'); -var utils = require('../../utils/helpers'); -var hop = utils.object.hasOwnProperty; - -/** - * Logic For Handling Joins inside a Query Results Object - */ - -var Joins = module.exports = function(joins, values, identity, schema, collections) { - - this.identity = identity; - - // Hold Joins specified in the criteria - this.joins = joins || []; - - // Hold the result values - this.values = values || []; - - // Hold the overall schema - this.schema = schema || {}; - - // Hold all the Waterline collections so we can make models - this.collections = collections || {}; - - // Build up modelOptions - this.modelOptions(); - - // Modelize values - this.models = this.makeModels(); - - return this; -}; - -/** - * Build up Join Options that will be passed down to a Model instance. - * - * @api private - */ - -Joins.prototype.modelOptions = function modelOptions() { - - var self = this; - var joins; - - // Build Model Options, determines what associations to render in toObject - this.options = { - showJoins: !!this.joins - }; - - // If no joins were used, just return - if (!this.joins) return; - - // Map out join names to pass down to the model instance - joins = this.joins.filter(function(join) { - - // If the value is not being selected, don't add it to the array - if (!join.select) return false; - - return join; - }); - - // Map out join key names and attach to the options object. - // For normal assoiciations, use the child table name that is being joined. For many-to-many - // associations the child table name won't work so grab the alias used and use that for the - // join name. It will be the one that is transformed. - this.options.joins = joins.map(function(join) { - var child = []; - // If a junctionTable was not used, return the child table - if (!join.junctionTable) return join.child; - - // Find the original alias for the join - self.joins.forEach(function(j) { - if (j.child !== join.parent) return; - child.push(j.alias); - }); - - // If a child was found, return it otherwise just return the original child join - if (child) return child; - return join.child; - }); - - // Flatten joins - this.options.joins = _.uniq(_.flatten(this.options.joins)); -}; - -/** - * Transform Values into instantiated Models. - * - * @return {Array} - * @api private - */ - -Joins.prototype.makeModels = function makeModels() { - - var self = this; - var models = []; - var model; - - // If values are invalid (not an array), return them early. - if (!this.values || !this.values.forEach) return this.values; - - // Make each result an instance of model - this.values.forEach(function(value) { - model = self.modelize(value); - models.push(model); - }); - - return models; -}; - -/** - * Handle a single Result and inspect it's values for anything - * that needs to become a Model instance. - * - * @param {Object} value - * @return {Object} - * @api private - */ - -Joins.prototype.modelize = function modelize(value) { - var self = this; - - // Look at each key in the object and see if it was used in a join - Object.keys(value).forEach(function(key) { - - var joinKey = false; - var attr, - usedInJoin; - - // If showJoins wasn't set or no joins were found there is nothing to modelize - if (!self.options.showJoins || !self.options.joins) return; - - // Look at the schema for an attribute and check if it's a foreign key - // or a virtual hasMany collection attribute - - // Check if there is a transformation on this attribute - var transformer = self.collections[self.identity]._transformer._transformations; - if (hop(transformer, key)) { - attr = self.schema[transformer[key]]; - } else { - attr = self.schema[key]; - } - - // If an attribute was found but it's not a model, this means it's a normal - // key/value attribute and not an association so there is no need to modelize it. - if (attr && !attr.hasOwnProperty('model')) return; - - // If the attribute has a `model` property, the joinKey is the collection of the model - if (attr && attr.hasOwnProperty('model')) joinKey = attr.model; - - // If the attribute is a foreign key but it was not populated, just leave the foreign key - // as it is and don't try and modelize it. - if (joinKey && self.options.joins.indexOf(joinKey) < 0) return; - - // Check if the key was used in a join - usedInJoin = self.checkForJoin(key); - - // If the attribute wasn't used in the join, don't turn it into a model instance. - // NOTE: Not sure if this is correct or not? - if (!usedInJoin.used) return; - - // If the attribute is an array of child values, for each one make a model out of it. - if (Array.isArray(value[key])) { - - var records = []; - - value[key].forEach(function(val) { - var collection, - model; - - // If there is a joinKey this means it's a belongsTo association so the collection - // containing the proper model will be the name of the joinKey model. - if (joinKey) { - collection = self.collections[joinKey]; - val = collection._transformer.unserialize(val); - model = new collection._model(val, { showJoins: false }); - return records.push(model); - } - - // Otherwise look at the join used and determine which key should be used to get - // the proper model from the collections. - collection = self.collections[usedInJoin.join.child]; - val = collection._transformer.unserialize(val); - model = new collection._model(val, { showJoins: false }); - return records.push(model); - }); - - // Set the value to the array of model values - value[key] = records; - return; - } - - // If the value isn't an array it's a populated foreign key so modelize it and attach - // it directly on the attribute - collection = self.collections[joinKey]; - value[key] = collection._transformer.unserialize(value[key]); - value[key] = new collection._model(value[key], { showJoins: false }); - }); - - return value; -}; - -/** - * Test if an attribute was used in a join. - * Requires generating a key to test against an attribute because the model process - * will be run before any transformations have taken place. - * - * @param {String} key - * @return {Object} - * @api private - */ - -Joins.prototype.checkForJoin = function checkForJoin(key) { - - var generatedKey; - var usedInJoin = false; - var relatedJoin; - - // Loop through each join and see if the given key matches a join used - this.joins.forEach(function(join) { - if (join.alias !== key) return; - usedInJoin = true; - relatedJoin = join; - }); - - return { used: usedInJoin, join: relatedJoin }; -}; diff --git a/lib/waterline/query/finders/operations.js b/lib/waterline/query/finders/operations.js deleted file mode 100644 index c5cb8803f..000000000 --- a/lib/waterline/query/finders/operations.js +++ /dev/null @@ -1,724 +0,0 @@ - -/** - * Module Dependencies - */ - -var _ = require('lodash'); -var async = require('async'); -var utils = require('../../utils/helpers'); -var normalize = require('../../utils/normalize'); -var hasOwnProperty = utils.object.hasOwnProperty; - -/** - * Builds up a set of operations to perform based on search criteria. - * - * This allows the ability to do cross-adapter joins as well as fake joins - * on adapters that haven't implemented the join interface yet. - */ - -var Operations = module.exports = function(context, criteria, parent, metaContainer) { - - // Build up a cache - this.cache = {}; - - // Set context - this.context = context; - - // Set criteria - this.criteria = criteria; - - // Set parent - this.parent = parent; - - this.metaContainer = metaContainer; - - // Hold a default value for pre-combined results (native joins) - this.preCombined = false; - - // Seed the Cache - this._seedCache(); - - // Build Up Operations - this.operations = this._buildOperations(); - - return this; -}; - - -/* - *********************************************************************************** - * PUBLIC METHODS - ***********************************************************************************/ - - -/** - * Run Operations - * - * Execute a set of generated operations returning an array of results that can - * joined in-memory to build out a valid results set. - * - * @param {Function} cb - * @api public - */ - -Operations.prototype.run = function run(cb) { - - var self = this; - - // Grab the parent operation, it will always be the very first operation - var parentOp = this.operations.shift(); - - // Run The Parent Operation - this._runOperation(parentOp.collection, parentOp.method, parentOp.criteria, function(err, results) { - - if (err) return cb(err); - - // Set the cache values - self.cache[parentOp.collection] = results; - - // If results are empty, or we're already combined, nothing else to so do return - if (!results || self.preCombined) return cb(null, { combined: true, cache: self.cache }); - - // Run child operations and populate the cache - self._execChildOpts(results, function(err) { - if (err) return cb(err); - cb(null, { combined: self.preCombined, cache: self.cache }); - }); - - }); - -}; - - -/* - *********************************************************************************** - * PRIVATE METHODS - ***********************************************************************************/ - - -/** - * Seed Cache with empty values. - * - * For each Waterline Collection set an empty array of values into the cache. - * - * @api private - */ - -Operations.prototype._seedCache = function _seedCache() { - var self = this; - - // Fill the cache with empty values for each collection - Object.keys(this.context.waterline.schema).forEach(function(key) { - self.cache[key] = []; - }); -}; - -/** - * Build up the operations needed to perform the query based on criteria. - * - * @return {Array} - * @api private - */ - -Operations.prototype._buildOperations = function _buildOperations() { - var operations = []; - - // Check if joins were used, if not only a single operation is needed on a single connection - if (!hasOwnProperty(this.criteria, 'joins')) { - - // Grab the collection - var collection = this.context.waterline.collections[this.context.identity]; - - // Find the name of the connection to run the query on using the dictionary - var connectionName = collection.adapterDictionary[this.parent]; - if (!connectionName) connectionName = collection.adapterDictionary.find; - - operations.push({ - connection: connectionName, - collection: this.context.identity, - method: this.parent, - criteria: this.criteria - }); - - return operations; - } - - // Joins were used in this operation. Lets grab the connections needed for these queries. It may - // only be a single connection in a simple case or it could be multiple connections in some cases. - var connections = this._getConnections(); - - // Now that all the connections are created, build up operations needed to accomplish the end - // goal of getting all the results no matter which connection they are on. To do this, - // figure out if a connection supports joins and if so pass down a criteria object containing - // join instructions. If joins are not supported by a connection, build a series of operations - // to achieve the end result. - operations = this._stageOperations(connections); - - return operations; -}; - -/** - * Stage Operation Sets - * - * @param {Object} connections - * @api private - */ - -Operations.prototype._stageOperations = function _stageOperations(connections) { - - var self = this; - var operations = []; - - // Build the parent operation and set it as the first operation in the array - operations = operations.concat(this._createParentOperation(connections)); - - // Parent Connection Name - var parentConnection = this.context.adapterDictionary[this.parent]; - - // Parent Operation - var parentOperation = operations[0]; - - // For each additional connection build operations - Object.keys(connections).forEach(function(connection) { - - // Ignore the connection used for the parent operation if a join can be used on it. - // This means all of the operations for the query can take place on a single connection - // using a single query. - if (connection === parentConnection && parentOperation.method === 'join') return; - - // Operations are needed that will be run after the parent operation has been completed. - // If there are more than a single join, set the parent join and build up children operations. - // This occurs in a many-to-many relationship when a join table is needed. - - // Criteria is omitted until after the parent operation has been run so that an IN query can - // be formed on child operations. - - var localOpts = []; - - connections[connection].joins.forEach(function(join, idx) { - - var optCollection = self.context.waterline.collections[join.child]; - var optConnectionName = optCollection.adapterDictionary['find']; - - var operation = { - connection: optConnectionName, - collection: join.child, - method: 'find', - join: join - }; - - // If this is the first join, it can't have any parents - if (idx === 0) { - localOpts.push(operation); - return; - } - - // Look into the previous operations and see if this is a child of any of them - var child = false; - localOpts.forEach(function(localOpt) { - if (localOpt.join.child !== join.parent) return; - localOpt.child = operation; - child = true; - }); - - if (child) return; - localOpts.push(operation); - }); - - operations = operations.concat(localOpts); - }); - - return operations; -}; - -/** - * Create The Parent Operation - * - * @param {Object} connections - * @return {Object} - * @api private - */ - -Operations.prototype._createParentOperation = function _createParentOperation(connections) { - - var nativeJoin = this.context.adapter.hasJoin(); - var operation, - connectionName, - connection; - - // If the parent supports native joins, check if all the joins on the connection can be - // run on the same connection and if so just send the entire criteria down to the connection. - if (nativeJoin) { - - connectionName = this.context.adapterDictionary.join; - connection = connections[connectionName]; - - // Hold any joins that can't be run natively on this connection - var unsupportedJoins = false; - - // Pull out any unsupported joins - connection.joins.forEach(function(join) { - if (connection.collections.indexOf(join.child) > -1) return; - unsupportedJoins = true; - }); - - // If all the joins were supported then go ahead and build an operation. - if (!unsupportedJoins) { - operation = [{ - connection: connectionName, - collection: this.context.identity, - method: 'join', - criteria: this.criteria - }]; - - // Set the preCombined flag - this.preCombined = true; - - return operation; - } - } - - // Remove the joins from the criteria object, this will be an in-memory join - var tmpCriteria = _.cloneDeep(this.criteria); - delete tmpCriteria.joins; - connectionName = this.context.adapterDictionary[this.parent]; - - // If findOne was used, use the same connection `find` is on. - if (this.parent === 'findOne' && !connectionName) { - connectionName = this.context.adapterDictionary.find; - } - - connection = connections[connectionName]; - - operation = [{ - connection: connectionName, - collection: this.context.identity, - method: this.parent, - criteria: tmpCriteria - }]; - - return operation; -}; - - -/** - * Get the connections used in this query and the join logic for each piece. - * - * @return {Object} - * @api private - */ - -Operations.prototype._getConnections = function _getConnections() { - - var self = this; - var connections = {}; - - // Default structure for connection objects - var defaultConnection = { - collections: [], - children: [], - joins: [] - }; - - // For each join build a connection item to build up an entire collection/connection registry - // for this query. Using this, queries should be able to be seperated into discrete queries - // which can be run on connections in parallel. - this.criteria.joins.forEach(function(join) { - var connection; - var parentConnection; - var childConnection; - - function getConnection(collName) { - var collection = self.context.waterline.collections[collName]; - var connectionName = collection.adapterDictionary['find']; - connections[connectionName] = connections[connectionName] || _.cloneDeep(defaultConnection); - return connections[connectionName]; - } - - // If this join is a junctionTable, find the parent operation and add it to that connections - // children instead of creating a new operation on another connection. This allows cross-connection - // many-to-many joins to be used where the join relies on the results of the parent operation - // being run first. - - if (join.junctionTable) { - - // Find the previous join - var parentJoin = _.find(self.criteria.joins, function(otherJoin) { - return otherJoin.child == join.parent; - }); - - // Grab the parent join connection - var parentJoinConnection = getConnection(parentJoin.parent); - - // Find the connection the parent and child collections belongs to - parentConnection = getConnection(join.parent); - childConnection = getConnection(join.child); - - // Update the registry - parentConnection.collections.push(join.parent); - childConnection.collections.push(join.child); - parentConnection.children.push(join.parent); - - // Ensure the arrays are made up only of unique values - parentConnection.collections = _.uniq(parentConnection.collections); - childConnection.collections = _.uniq(childConnection.collections); - parentConnection.children = _.uniq(parentConnection.children); - - // Add the join to the correct joins array. We want it to be on the same - // connection as the operation before so the timing is correct. - parentJoinConnection.joins = parentJoinConnection.joins.concat(join); - - // Build up the connection registry like normal - } else { - parentConnection = getConnection(join.parent); - childConnection = getConnection(join.child); - - parentConnection.collections.push(join.parent); - childConnection.collections.push(join.child); - parentConnection.joins = parentConnection.joins.concat(join); - } - - }); - return connections; -}; - - -/** - * Run An Operation - * - * Performs an operation and runs a supplied callback. - * - * @param {Object} collectionName - * @param {String} method - * @param {Object} criteria - * @param {Function} cb - * - * @api private - */ - -Operations.prototype._runOperation = function _runOperation(collectionName, method, criteria, cb) { - - // Ensure the collection exist - if (!hasOwnProperty(this.context.waterline.collections, collectionName)) { - return cb(new Error('Invalid Collection specfied in operation.')); - } - - // Find the connection object to run the operation - var collection = this.context.waterline.collections[collectionName]; - - // Run the operation - collection.adapter[method](criteria, cb, this.metaContainer); - -}; - -/** - * Execute Child Operations - * - * If joins are used and an adapter doesn't support them, there will be child operations that will - * need to be run. Parse each child operation and run them along with any tree joins and return - * an array of children results that can be combined with the parent results. - * - * @param {Array} parentResults - * @param {Function} cb - */ - -Operations.prototype._execChildOpts = function _execChildOpts(parentResults, cb) { - - var self = this; - - // Build up a set of child operations that will need to be run - // based on the results returned from the parent operation. - this._buildChildOpts(parentResults, function(err, opts) { - if (err) return cb(err); - - // Run the generated operations in parallel - async.each(opts, function(item, next) { - self._collectChildResults(item, next); - }, cb); - }); - -}; - -/** - * Build Child Operations - * - * Using the results of a parent operation, build up a set of operations that contain criteria - * based on what is returned from a parent operation. These can be arrays containing more than - * one operation for each child, which will happen when "join tables" would be used. - * - * Each set should be able to be run in parallel. - * - * @param {Array} parentResults - * @param {Function} cb - * @return {Array} - * @api private - */ - -Operations.prototype._buildChildOpts = function _buildChildOpts(parentResults, cb) { - - var self = this; - var opts = []; - - // Build up operations that can be run in parallel using the results of the parent operation - async.each(this.operations, function(item, next) { - - var localOpts = []; - var parents = []; - var idx = 0; - - // Go through all the parent records and build up an array of keys to look in. This - // will be used in an IN query to grab all the records needed for the "join". - parentResults.forEach(function(result) { - - if (!hasOwnProperty(result, item.join.parentKey)) return; - if (result[item.join.parentKey] === null || typeof result[item.join.parentKey] === undefined) return; - parents.push(result[item.join.parentKey]); - - }); - - // If no parents match the join criteria, don't build up an operation - if (parents.length === 0) return next(); - - // Build up criteria that will be used inside an IN query - var criteria = {}; - criteria[item.join.childKey] = parents; - - var _tmpCriteria = {}; - - // Check if the join contains any criteria - if (item.join.criteria) { - var userCriteria = _.cloneDeep(item.join.criteria); - _tmpCriteria = _.cloneDeep(userCriteria); - _tmpCriteria = normalize.criteria(_tmpCriteria); - - // Ensure `where` criteria is properly formatted - if (hasOwnProperty(userCriteria, 'where')) { - if (userCriteria.where === undefined) { - delete userCriteria.where; - } else { - - // If an array of primary keys was passed in, normalize the criteria - if (Array.isArray(userCriteria.where)) { - var pk = self.context.waterline.collections[item.join.child].primaryKey; - var obj = {}; - obj[pk] = _.clone(userCriteria.where); - userCriteria.where = obj; - } - } - } - - - criteria = _.merge(userCriteria, { where: criteria }); - } - - // Normalize criteria - criteria = normalize.criteria(criteria); - - // If criteria contains a skip or limit option, an operation will be needed for each parent. - if (hasOwnProperty(_tmpCriteria, 'skip') || hasOwnProperty(_tmpCriteria, 'limit')) { - parents.forEach(function(parent) { - - var tmpCriteria = _.cloneDeep(criteria); - tmpCriteria.where[item.join.childKey] = parent; - - // Mixin the user defined skip and limit - if (hasOwnProperty(_tmpCriteria, 'skip')) tmpCriteria.skip = _tmpCriteria.skip; - if (hasOwnProperty(_tmpCriteria, 'limit')) tmpCriteria.limit = _tmpCriteria.limit; - - // Build a simple operation to run with criteria from the parent results. - // Give it an ID so that children operations can reference it if needed. - localOpts.push({ - id: idx, - collection: item.collection, - method: item.method, - criteria: tmpCriteria, - join: item.join - }); - - }); - } else { - - // Build a simple operation to run with criteria from the parent results. - // Give it an ID so that children operations can reference it if needed. - localOpts.push({ - id: idx, - collection: item.collection, - method: item.method, - criteria: criteria, - join: item.join - }); - - } - - // If there are child records, add the opt but don't add the criteria - if (!item.child) { - opts.push(localOpts); - return next(); - } - - localOpts.push({ - collection: item.child.collection, - method: item.child.method, - parent: idx, - join: item.child.join - }); - - // Add the local opt to the opts array - opts.push(localOpts); - - next(); - }, function(err) { - cb(err, opts); - }); -}; - -/** - * Collect Child Operation Results - * - * Run a set of child operations and return the results in a namespaced array - * that can later be used to do an in-memory join. - * - * @param {Array} opts - * @param {Function} cb - * @api private - */ - -Operations.prototype._collectChildResults = function _collectChildResults(opts, cb) { - - var self = this; - var intermediateResults = []; - var i = 0; - - if (!opts || opts.length === 0) return cb(null, {}); - - // Run the operations and any child operations in series so that each can access the - // results of the previous operation. - async.eachSeries(opts, function(opt, next) { - self._runChildOperations(intermediateResults, opt, function(err, values) { - if (err) return next(err); - - // If there are multiple operations and we are on the first one lets put the results - // into an intermediate results array - if (opts.length > 1 && i === 0) { - intermediateResults = intermediateResults.concat(values); - } - - // Add values to the cache key - self.cache[opt.collection] = self.cache[opt.collection] || []; - self.cache[opt.collection] = self.cache[opt.collection].concat(values); - - // Ensure the values are unique - var pk = self._findCollectionPK(opt.collection); - self.cache[opt.collection] = _.uniq(self.cache[opt.collection], pk); - - i++; - next(); - }); - }, cb); - -}; - -/** - * Run A Child Operation - * - * Executes a child operation and appends the results as a namespaced object to the - * main operation results object. - * - * @param {Object} optResults - * @param {Object} opt - * @param {Function} callback - * @api private - */ - -Operations.prototype._runChildOperations = function _runChildOperations(intermediateResults, opt, cb) { - var self = this; - - // Check if value has a parent, if so a join table was used and we need to build up dictionary - // values that can be used to join the parent and the children together. - - // If the operation doesn't have a parent operation run it - if (!hasOwnProperty(opt, 'parent')) { - return self._runOperation(opt.collection, opt.method, opt.criteria, function(err, values) { - if (err) return cb(err); - cb(null, values); - }); - } - - // If the operation has a parent, look into the optResults and build up a criteria - // object using the results of a previous operation - var parents = []; - - // Normalize to array - var res = _.cloneDeep(intermediateResults); - - // Build criteria that can be used with an `in` query - res.forEach(function(result) { - parents.push(result[opt.join.parentKey]); - }); - - var criteria = {}; - criteria[opt.join.childKey] = parents; - - // Check if the join contains any criteria - if (opt.join.criteria) { - var userCriteria = _.cloneDeep(opt.join.criteria); - - // Ensure `where` criteria is properly formatted - if (hasOwnProperty(userCriteria, 'where')) { - if (userCriteria.where === undefined) { - delete userCriteria.where; - } - } - - delete userCriteria.sort; - delete userCriteria.skip; - delete userCriteria.limit; - - criteria = _.merge({}, userCriteria, { where: criteria }); - } - - criteria = normalize.criteria(criteria); - - // Empty the cache for the join table so we can only add values used - var cacheCopy = _.cloneDeep(self.cache[opt.join.parent]); - self.cache[opt.join.parent] = []; - - self._runOperation(opt.collection, opt.method, criteria, function(err, values) { - if (err) return cb(err); - - // Build up the new join table result - values.forEach(function(val) { - cacheCopy.forEach(function(copy) { - if (copy[opt.join.parentKey] === val[opt.join.childKey]) self.cache[opt.join.parent].push(copy); - }); - }); - - // Ensure the values are unique - var pk = self._findCollectionPK(opt.join.parent); - self.cache[opt.join.parent] = _.uniq(self.cache[opt.join.parent], pk); - - cb(null, values); - }); -}; - -/** - * Find A Collection's Primary Key - * - * @param {String} collectionName - * @api private - * @return {String} - */ - -Operations.prototype._findCollectionPK = function _findCollectionPK(collectionName) { - var pk; - - for (var attribute in this.context.waterline.collections[collectionName]._attributes) { - var attr = this.context.waterline.collections[collectionName]._attributes[attribute]; - if (hasOwnProperty(attr, 'primaryKey') && attr.primaryKey) { - pk = attr.columnName || attribute; - break; - } - } - - return pk || null; -}; diff --git a/lib/waterline/query/index.js b/lib/waterline/query/index.js deleted file mode 100644 index ed0ef2231..000000000 --- a/lib/waterline/query/index.js +++ /dev/null @@ -1,92 +0,0 @@ -/** - * Dependencies - */ - -var _ = require('lodash'); -var extend = require('../utils/extend'); -var AdapterBase = require('../adapter'); -var utils = require('../utils/helpers'); -var AdapterMixin = require('./adapters'); -var hop = utils.object.hasOwnProperty; - -/** - * Query - */ - -var Query = module.exports = function() { - - // Create a reference to an internal Adapter Base - this.adapter = new AdapterBase({ - connections: this.connections, - query: this, - collection: this.tableName || this.identity, - identity: this.identity, - dictionary: this.adapterDictionary - }); - - // Mixin Custom Adapter Functions. - AdapterMixin.call(this); - - // Generate Dynamic Finders - this.buildDynamicFinders(); -}; - - -/** - * Automigrate - * - * @param {Function} cb - */ -Query.prototype.sync = function(cb) { - var self = this; - - // If any adapters used in this collection have syncable turned off set migrate to safe. - // - // I don't think a collection would ever need two adapters where one needs migrations and - // the other doesn't but it may be a possibility. The way the auto-migrations work now doesn't - // allow for this either way so this should be good. We will probably need to revist this soonish - // however and take a pass at getting something working for better migration systems. - // - particlebanana - - _.keys(this.connections).forEach(function(connectionName) { - var adapter = self.connections[connectionName]._adapter; - - // If not syncable, don't sync - if (hop(adapter, 'syncable') && !adapter.syncable) { - self.migrate = 'safe'; - } - }); - - // Assign synchronization behavior depending on migrate option in collection - if (this.migrate && ['drop', 'alter', 'create', 'safe'].indexOf(this.migrate) > -1) { - - // Determine which sync strategy to use - var strategyMethodName = 'migrate' + utils.capitalize(this.migrate); - - // Run automigration strategy - this.adapter[strategyMethodName](function(err) { - if (err) return cb(err); - cb(); - }); - } - - // Throw Error - else cb(new Error('Invalid `migrate` strategy defined for collection. Must be one of the following: drop, alter, create, safe')); -}; - - -_.extend( - Query.prototype, - require('./validate'), - require('./ddl'), - require('./dql'), - require('./aggregate'), - require('./composite'), - require('./finders/basic'), - require('./finders/helpers'), - require('./finders/dynamicFinders'), - require('./stream') -); - -// Make Extendable -Query.extend = extend; diff --git a/lib/waterline/query/integrator/JOIN_INSTRUCTIONS.md b/lib/waterline/query/integrator/JOIN_INSTRUCTIONS.md deleted file mode 100644 index 17c00ca9a..000000000 --- a/lib/waterline/query/integrator/JOIN_INSTRUCTIONS.md +++ /dev/null @@ -1,37 +0,0 @@ - - -## Join Syntax - -```javascript -// A join instruction object -{ - - // The attributes to pluck from results. - // - // By default, should include all attributes of child collection, e.g. - // `populate('friends')` might result in: - // [ 'name', 'email', 'age', 'favoriteColor', 'id' ] - // - // Or it can be explicitly specified, e.g. - // `populate('friends', { select: ['name', 'favoriteColor'] } ))` might result in: - select: ['name', 'favoriteColor'], - - // join subcriteria-- (e.g. populate('friends', { age: { '>' : 40 } } )) - // this should be handled by the individual queries themselves - where: { age: { '>' : 40 } }, - - // limit, skip, and sort are expected to be handled by the individual queries themselves - // other options-- - // e.g. populate('friends', {limit: 30, skip: 0, sort: 'name ASC' }) - limit: 30, - skip: 0, - sort: 'name ASC' - - // Existing alias, parent/child key and table name data: - alias: 'friends', // the `alias`/ name of association-- (e.g. populate('friends') ) - parent: 'message', // left table name - parentKey: 'id', // left table PK -OR- left table FK -> right table - child: 'message_to_user', // right table name - childKey: 'message_id' // right table PK -OR- right table's FK -> left table -} -``` diff --git a/lib/waterline/query/integrator/_join.js b/lib/waterline/query/integrator/_join.js deleted file mode 100644 index f7a9694c1..000000000 --- a/lib/waterline/query/integrator/_join.js +++ /dev/null @@ -1,105 +0,0 @@ -/** - * Module dependencies - */ -var anchor = require('anchor'); -var _ = require('lodash'); -var partialJoin = require('./_partialJoin'); - - -/** - * _join - * - * @api private - * - * Helper method- can perform and inner -OR- outer join. - * - * @option {String|Boolean} outer [whether to do an outer join, and if so the direction ("left"|"right")] - * @option {Array} parent [rows from the "lefthand table"] - * @option {Array} child [rows from the "righthand table"] - * @option {String} parentKey [primary key of the "lefthand table"] - * @option {String} childKey [foreign key from the "righthand table" to the "lefthand table"] - * @option {String} childNamespace [string prepended to child attribute keys (default='.')] - * - * @return {Array} new joined row data - * - * @throws {Error} on invalid input - * - * @synchronous - */ -module.exports = function _join(options) { - - - // Usage - var invalid = false; - invalid = invalid || anchor(options).to({ - type: 'object' - }); - - // Tolerate `right` and `left` usage - _.defaults(options, { - parent: options.left, - child: options.right, - parentKey: options.leftKey, - childKey: options.rightKey, - childNamespace: options.childNamespace || '.' - }); - - invalid = invalid || anchor(options.parent).to({ - type: 'array' - }); - invalid = invalid || anchor(options.child).to({ - type: 'array' - }); - invalid = invalid || anchor(options.parentKey).to({ - type: 'string' - }); - invalid = invalid || anchor(options.childKey).to({ - type: 'string' - }); - - invalid = invalid || (options.outer === 'right' ? - new Error('Right joins not supported yet.') : false); - - if (invalid) throw invalid; - - - var resultSet = _.reduce(options.parent, function eachParentRow(memo, parentRow) { - - // For each childRow whose childKey matches - // this parentRow's parentKey... - var foundMatch = _.reduce(options.child, function eachChildRow(hasFoundMatchYet, childRow) { - - var newRow = partialJoin({ - parentRow: parentRow, - childRow: childRow, - parentKey: options.parentKey, - childKey: options.childKey, - childNamespace: options.childNamespace - }); - - // console.log('PARENT ROW: ', parentRow); - // console.log('CHILD ROW: ', childRow); - // console.log('JOIN ROW: ', newRow); - - // Save the new row for the join result if it exists - // and mark the match as found - if (newRow) { - memo.push(newRow); - return true; - } - return hasFoundMatchYet; - }, false); - - // If this is a left outer join and we didn't find a match - // for this parentRow, add it to the result set anyways - if (!foundMatch && options.outer === 'left') { - memo.push(_.cloneDeep(parentRow)); - } - - return memo; - }, []); - - // console.log('JOIN RESULT SET::', resultSet); - return resultSet; - -}; diff --git a/lib/waterline/query/integrator/_partialJoin.js b/lib/waterline/query/integrator/_partialJoin.js deleted file mode 100644 index d8a875b77..000000000 --- a/lib/waterline/query/integrator/_partialJoin.js +++ /dev/null @@ -1,91 +0,0 @@ -/** - * Module dependencies - */ -var assert = require('assert'); -var _ = require('lodash'); - - -/** - * _partialJoin - * - * @api private - * - * Check whether two rows match on the specified keys, - * and if they do, merge `parentRow` into a copy of `childRow` - * and return it (omit `childRow`'s key, since it === `parentRow`'s). - * - * Hypothetically, this function could be operated by a stream, - * but in the case of a left outer join, at least, the final - * result set cannot be accurately known until both the complete - * contents of both the `left` and `right` data set have been checked. - * - * An optimization from polynomial to logarithmic computational - * complexity could potentially be achieved by taking advantage - * of the known L[k..l] and R[m..n] values as each new L[i] or R[j] - * arrives from a stream, but a comparably-sized cache would have to - * be maintained, so we'd still be stuck with polynomial memory usage. - * i.e. O( |R|*|L| ) This could be resolved by batching-- e.g. grab the - * first 3000 parent and child rows, join matches together, discard - * the unneeded data, and repeat. - * - * Anyways, worth investigating, since this is a hot code path for - * cross-adapter joins. - * - * - * Usage: - * - * partialJoin({ - * parentRow: { id: 5, name: 'Lucy', email: 'lucy@fakemail.org' } - * childRow: { owner_id: 5, name: 'Rover', breed: 'Australian Shepherd' } - * parentKey: 'id' - * childKey: 'owner_id', - * childNamespace: '.' - * }) - * - * @param {Object} options - * @return {Object|False} If false, don't save the join row. - * @synchronous - */ -module.exports = function partialJoin(options) { - - // Usage - var invalid = false; - invalid = invalid || !_.isObject(options); - invalid = invalid || !_.isString(options.parentKey); - invalid = invalid || !_.isString(options.childKey); - invalid = invalid || !_.isObject(options.parentRow); - invalid = invalid || !_.isObject(options.childRow); - assert(!invalid); - - var CHILD_ATTR_PREFIX = (options.childNamespace || '.'); - - // If the rows aren't a match, bail out - if ( - options.childRow[options.childKey] !== - options.parentRow[options.parentKey] - ) { - return false; - } - - // deep clone the childRow, then delete `childKey` in the copy. - var newJoinRow = _.cloneDeep(options.childRow); - // console.log('deleting childKEy :: ',options.childKey); - // var _childKeyValue = newJoinRow[options.childKey]; - // delete newJoinRow[options.childKey]; - - // namespace the remaining attributes in childRow - var namespacedJoinRow = {}; - _.each(newJoinRow, function(value, key) { - var namespacedKey = CHILD_ATTR_PREFIX + key; - namespacedJoinRow[namespacedKey] = value; - }); - - - // Merge namespaced values from current parentRow into the copy. - _.merge(namespacedJoinRow, options.parentRow); - - - // Return the newly joined row. - return namespacedJoinRow; -}; - diff --git a/lib/waterline/query/integrator/index.js b/lib/waterline/query/integrator/index.js deleted file mode 100644 index 40dddffb1..000000000 --- a/lib/waterline/query/integrator/index.js +++ /dev/null @@ -1,208 +0,0 @@ -/** - * Module dependencies - */ -var anchor = require('anchor'); -var _ = require('lodash'); -var leftOuterJoin = require('./leftOuterJoin'); -var innerJoin = require('./innerJoin'); -var populate = require('./populate'); - - -/** - * Query Integrator - * - * Combines the results from multiple child queries into - * the final return format using an in-memory join. - * Final step in fulfilling a `.find()` with one or more - * `populate(alias[n])` modifiers. - * - * > Why is this asynchronous? - * > - * > While this function isn't doing anything strictly - * > asynchronous, it still expects a callback to enable - * > future use of `process[setImmediate|nextTick]()` as - * > an optimization. - * - * @param {Object} cache - * @param {Array} joinInstructions - see JOIN_INSTRUCTIONS.md - * @callback {Function} cb(err, results) - * @param {Error} - * @param {Array} [results, complete w/ populations] - * - * @throws {Error} on invalid input - * @asynchronous - */ -module.exports = function integrate(cache, joinInstructions, primaryKey, cb) { - - // Ensure valid usage - var invalid = false; - invalid = invalid || anchor(cache).to({ type: 'object' }); - invalid = invalid || anchor(joinInstructions).to({ type: 'array' }); - invalid = invalid || anchor(joinInstructions[0]).to({ type: 'object' }); - invalid = invalid || anchor(joinInstructions[0].parent).to({ type: 'string' }); - invalid = invalid || anchor(cache[joinInstructions[0].parent]).to({ type: 'object' }); - invalid = invalid || typeof primaryKey !== 'string'; - invalid = invalid || typeof cb !== 'function'; - if (invalid) return cb(invalid); - - - // Constant: String prepended to child attribute keys for use in namespacing. - var CHILD_ATTR_PREFIX = '.'; - var GRANDCHILD_ATTR_PREFIX = '..'; - - - // We'll reuse the cached data from the `parent` table modifying it in-place - // and returning it as our result set. (`results`) - var results = cache[ joinInstructions[0].parent ]; - - // Group the joinInstructions array by alias, then interate over each one - // s.t. `instructions` in our lambda function contains a list of join instructions - // for the particular `populate` on the specified key (i.e. alias). - // - // Below, `results` are mutated inline. - _.each(_.groupBy(joinInstructions, 'alias'), - function eachAssociation(instructions, alias) { - - var parentPK, fkToParent, fkToChild, childPK; - - // N..N Association - if (instructions.length === 2) { - - // Name keys explicitly - // (makes it easier to see what's going on) - parentPK = instructions[0].parentKey; - fkToParent = instructions[0].childKey; - fkToChild = instructions[1].parentKey; - childPK = instructions[1].childKey; - - // console.log('\n\n------------:: n..m leftOuterJoin ::--------\n', - // leftOuterJoin({ - // left: cache[instructions[0].parent], - // right: cache[instructions[0].child], - // leftKey: parentPK, - // rightKey: fkToParent - // }) - // ); - // console.log('------------:: / ::--------\n'); - - // console.log('\n\n------------:: n..m childRows ::--------\n',innerJoin({ - // left: leftOuterJoin({ - // left: cache[instructions[0].parent], - // right: cache[instructions[0].child], - // leftKey: parentPK, - // rightKey: fkToParent - // }), - // right: cache[instructions[1].child], - // leftKey: CHILD_ATTR_PREFIX+fkToChild, - // rightKey: childPK, - // childNamespace: GRANDCHILD_ATTR_PREFIX - // })); - // console.log('------------:: / ::--------\n'); - - // Calculate and sanitize join data, - // then shove it into the parent results under `alias` - populate({ - parentRows: results, - alias: alias, - - childRows: innerJoin({ - left: leftOuterJoin({ - left: cache[instructions[0].parent], - right: cache[instructions[0].child], - leftKey: parentPK, - rightKey: fkToParent - }), - right: cache[instructions[1].child], - leftKey: CHILD_ATTR_PREFIX + fkToChild, - rightKey: childPK, - childNamespace: GRANDCHILD_ATTR_PREFIX - }), - - parentPK: parentPK, // e.g. `id` (of message) - fkToChild: CHILD_ATTR_PREFIX + fkToChild, // e.g. `user_id` (of join table) - childPK: GRANDCHILD_ATTR_PREFIX + childPK, // e.g. `id` (of user) - - childNamespace: GRANDCHILD_ATTR_PREFIX - }); - - // 1 ..N Association - } else if (instructions.length === 1) { - - // Name keys explicitly - // (makes it easier to see what's going on) - parentPK = primaryKey; - fkToParent = parentPK; - fkToChild = instructions[0].parentKey; - childPK = instructions[0].childKey; - - // Determine if this is a "hasOne" or a "belongsToMany" - // if the parent's primary key is the same as the fkToChild, it must be belongsToMany - if (parentPK === fkToChild) { - // In belongsToMany case, fkToChild needs prefix because it's actually the - // console.log('belongsToMany'); - fkToChild = CHILD_ATTR_PREFIX + fkToChild; - // "hasOne" case - } else { - // console.log('hasOne'); - } - - // var childRows = innerJoin({ - // left: cache[instructions[0].parent], - // right: cache[instructions[0].child], - // leftKey: instructions[0].parentKey, - // rightKey: instructions[0].childKey - // }); - - // console.log('1..N JOIN--------------\n',instructions,'\n^^^^^^^^^^^^^^^^^^^^^^'); - // console.log('1..N KEYS--------------\n',{ - // parentPK: parentPK, - // fkToParent: fkToParent, - // fkToChild: fkToChild, - // childPK: childPK, - // },'\n^^^^^^^^^^^^^^^^^^^^^^'); - // console.log('1..N CHILD ROWS--------\n',childRows); - - // Calculate and sanitize join data, - // then shove it into the parent results under `alias` - populate({ - parentRows: results, - alias: alias, - - childRows: innerJoin({ - left: cache[instructions[0].parent], - right: cache[instructions[0].child], - leftKey: instructions[0].parentKey, - rightKey: instructions[0].childKey - }), - - parentPK: fkToParent, // e.g. `id` (of message) - fkToChild: fkToChild, // e.g. `from` - childPK: childPK, // e.g. `id` (of user) - - childNamespace: CHILD_ATTR_PREFIX - }); - - // If the alias isn't the same as the parent_key AND removeParentKey is set to true - // in the instructions this means that we are turning a FK into an embedded record and a - // columnName was used. We need to move the values attached to the alias property to - // the parent key value. If we don't then when we run the transformer everything would get crazy. - if (alias !== instructions[0].parentKey && instructions[0].removeParentKey === true) { - - results = _.map(results, function(result) { - result[instructions[0].parentKey] = result[alias]; - delete result[alias]; - return result; - }); - - } - } - - } - ); - - - // And call the callback - // (the final joined data is in the cache -- also referenced by `results`) - return cb(null, results); - -}; diff --git a/lib/waterline/query/integrator/innerJoin.js b/lib/waterline/query/integrator/innerJoin.js deleted file mode 100644 index 405293c61..000000000 --- a/lib/waterline/query/integrator/innerJoin.js +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Module dependencies - */ -var join = require('./_join'); - - -/** - * Inner join - * - * Return a result set with data from child and parent - * merged on childKey===parentKey, where t.e. exactly one - * entry for each match. - * - * @option {Array} parent [rows from the "lefthand table"] - * @option {Array} child [rows from the "righthand table"] - * @option {String} parentKey [primary key of the "lefthand table"] - * @option {String} childKey [foreign key from the "righthand table" to the "lefthand table"] - * @return {Array} [a new array of joined row data] - * - * @throws {Error} on invalid input - * @synchronous - */ -module.exports = function leftOuterJoin(options) { - options.outer = false; - return join(options); -}; diff --git a/lib/waterline/query/integrator/leftOuterJoin.js b/lib/waterline/query/integrator/leftOuterJoin.js deleted file mode 100644 index aa2065fe1..000000000 --- a/lib/waterline/query/integrator/leftOuterJoin.js +++ /dev/null @@ -1,26 +0,0 @@ -/** - * Module dependencies - */ -var join = require('./_join'); - - -/** - * Left outer join - * - * Return a result set with data from child and parent - * merged on childKey===parentKey, where t.e. at least one - * entry for each row of parent (unmatched columns in child are null). - * - * @option {Array} parent [rows from the "lefthand table"] - * @option {Array} child [rows from the "righthand table"] - * @option {String} parentKey [primary key of the "lefthand table"] - * @option {String} childKey [foreign key from the "righthand table" to the "lefthand table"] - * @return {Array} [a new array of joined row data] - * - * @throws {Error} on invalid input - * @synchronous - */ -module.exports = function leftOuterJoin(options) { - options.outer = 'left'; - return join(options); -}; diff --git a/lib/waterline/query/integrator/populate.js b/lib/waterline/query/integrator/populate.js deleted file mode 100644 index 93f6212ff..000000000 --- a/lib/waterline/query/integrator/populate.js +++ /dev/null @@ -1,109 +0,0 @@ -/** - * Module dependencies - */ -var _ = require('lodash'); - - -/** - * populate() - * - * Destructive mapping of `parentRows` to include a new key, `alias`, - * which is an ordered array of child rows. - * - * @option [{Object}] parentRows - the parent rows the joined rows will be folded into - * @option {String} alias - the alias of the association - * @option [{Object}] childRows - the unfolded result set from the joins - * - * @option {String} parentPK - the primary key of the parent table (optional- only needed for M..N associations) - * @option {String} fkToChild - the foreign key associating a row with the child table - * @option {String} childPK - the primary key of the child table - * - * @option [{String}] childNamespace- attributes to keep - * - * @return {*Object} reference to `parentRows` - */ -module.exports = function populate(options) { - - var parentRows = options.parentRows; - var alias = options.alias; - var childRows = options.childRows; - - var parentPK = options.parentPK; - var childPK = options.childPK; - var fkToChild = options.fkToChild; - var fkToParent = parentPK;// At least for all use cases currently, `fkToParent` <=> `parentPK` - - var childNamespace = options.childNamespace || ''; - - return _.map(parentRows, function _insertJoinedResults(parentRow) { - - // Gather the subset of child rows associated with the current parent row - var associatedChildRows = _.where(childRows, - // { (parentPK): (parentRow[(parentPK)]) }, e.g. { id: 3 } - _cons(fkToParent, parentRow[parentPK]) - ); - - // Clone the `associatedChildRows` to avoid mutating the original - // `childRows` in the cache. - associatedChildRows = _.cloneDeep(associatedChildRows); - - // Stuff the sanitized associated child rows into the parent row. - parentRow[alias] = - _.reduce(associatedChildRows, function(memo, childRow) { - - // Ignore child rows without an appropriate foreign key - // to an instance in the REAL child collection. - if (!childRow[childNamespace + childPK] && !childRow[childPK]) return memo; - - // Rename childRow's [fkToChild] key to [childPK] - // (so that it will have the proper primary key attribute for its collection) - var childPKValue = childRow[fkToChild]; - childRow[childPK] = childPKValue; - - // Determine if we have any double nested attributes. - // These would come from m:m joins - var doubleNested = _.find(childRow, function(name, key) { - return _.startsWith(key, '..'); - }); - - // Grab all the keys that start with a dot or double dot depending on - // the status of doubleNested - childRow = _.pick(childRow, function(name, key) { - if (doubleNested) { - return _.startsWith(key, '..'); - } else { - return _.startsWith(key, '.'); - } - }); - - var _origChildRow = childRow; - - // Strip off childNamespace prefix - childRow = {}; - var PREFIX_REGEXP = new RegExp('^' + childNamespace + ''); - _.each(_origChildRow, function(attrValue, attrName) { - var unprefixedKey = attrName.replace(PREFIX_REGEXP, ''); - childRow[unprefixedKey] = attrValue; - }); - - // Build the set of rows to stuff into our parent row. - memo.push(childRow); - return memo; - }, []); - - return parentRow; - }); -}; - - -/** - * Dumb little helper because I hate naming anonymous objects just to use them once. - * - * @return {Object} [a tuple] - * @api private - */ -function _cons(key, value) { - var obj = {}; - obj[key] = value; - return obj; -} diff --git a/lib/waterline/query/stream.js b/lib/waterline/query/stream.js deleted file mode 100644 index c069fd4d5..000000000 --- a/lib/waterline/query/stream.js +++ /dev/null @@ -1,48 +0,0 @@ -/** - * Streaming Queries - */ - -var usageError = require('../utils/usageError'); -var utils = require('../utils/helpers'); -var normalize = require('../utils/normalize'); -var ModelStream = require('../utils/stream'); - -module.exports = { - - /** - * Stream a Result Set - * - * @param {Object} criteria - * @param {Object} transformation, defaults to JSON - */ - - stream: function(criteria, transformation, metaContainer) { - var self = this; - - var usage = utils.capitalize(this.identity) + '.stream([criteria],[options])'; - - // Normalize criteria and fold in options - criteria = normalize.criteria(criteria); - - // Transform Search Criteria - criteria = self._transformer.serialize(criteria); - - // Configure stream to adapter, kick off fetch, and return stream object - // so that user code can use it as it fires data events - var stream = new ModelStream(transformation); - - // very important to wait until next tick before triggering adapter - // otherwise write() and end() won't fire properly - process.nextTick(function() { - - // Write once immediately to force prefix in case no models are returned - stream.write(); - - // Trigger Adapter Method - self.adapter.stream(criteria, stream, metaContainer); - }); - - return stream; - } - -}; diff --git a/lib/waterline/query/validate.js b/lib/waterline/query/validate.js deleted file mode 100644 index f15c202e4..000000000 --- a/lib/waterline/query/validate.js +++ /dev/null @@ -1,84 +0,0 @@ -/** - * Validation - * - * Used in create and update methods validate a model - * Can also be used independently - */ - -var _ = require('lodash'); -var WLValidationError = require('../error/WLValidationError'); -var async = require('async'); - -module.exports = { - - validate: function(values, presentOnly, cb) { - var self = this; - - // Handle optional second arg - if (typeof presentOnly === 'function') { - cb = presentOnly; - presentOnly = false; - } - - async.series([ - - // Run Before Validate Lifecycle Callbacks - function(cb) { - var runner = function(item, callback) { - item.call(self, values, function(err) { - if (err) return callback(err); - callback(); - }); - }; - - async.eachSeries(self._callbacks.beforeValidate, runner, function(err) { - if (err) return cb(err); - cb(); - }); - }, - - // Run Validation - function(cb) { - self._validator.validate(values, presentOnly, function _afterValidating(err, invalidAttributes) { - // If fatal error occurred, handle it accordingly. - if (err) { - return cb(err); - } - - // Otherwise, check out the invalid attributes that were sent back. - // - // Create validation error here - // (pass in the invalid attributes as well as the collection's globalId) - if (invalidAttributes) { - return cb(new WLValidationError({ - invalidAttributes: invalidAttributes, - model: self.globalId || self.adapter.identity - })); - } - - cb(); - }); - }, - - // Run After Validate Lifecycle Callbacks - function(cb) { - var runner = function(item, callback) { - item(values, function(err) { - if (err) return callback(err); - callback(); - }); - }; - - async.eachSeries(self._callbacks.afterValidate, runner, function(err) { - if (err) return cb(err); - cb(); - }); - } - - ], function(err) { - if (err) return cb(err); - cb(); - }); - } - -}; diff --git a/lib/waterline/utils/acyclicTraversal.js b/lib/waterline/utils/acyclicTraversal.js deleted file mode 100644 index 2a0d39ce2..000000000 --- a/lib/waterline/utils/acyclicTraversal.js +++ /dev/null @@ -1,110 +0,0 @@ -/** - * Module dependencies - */ - -var _ = require('lodash'); - - -/** - * Traverse the schema to build a populate plan object - * that will populate every relation, sub-relation, and so on - * reachable from the initial model and relation at least once - * (perhaps most notable is that this provides access to most - * related data without getting caught in loops.) - * - * @param {[type]} schema [description] - * @param {[type]} initialModel [description] - * @param {[type]} initialRelation [description] - * @return {[type]} [description] - */ -module.exports = function acyclicTraversal(schema, initialModel, initialRelation) { - - // Track the edges which have already been traversed - var alreadyTraversed = [ - // { - // relation: initialRelation, - // model: initialModel - // } - ]; - - return traverseSchemaGraph(initialModel, initialRelation); - - /** - * Recursive function - * @param {[type]} modelIdentity [description] - * @param {[type]} nameOfRelation [description] - * @return {[type]} [description] - */ - function traverseSchemaGraph(modelIdentity, nameOfRelation) { - - var currentModel = schema[modelIdentity]; - var currentAttributes = currentModel.attributes; - - var isRedundant; - - // If this relation has already been traversed, return. - // (i.e. `schema.attributes.modelIdentity.nameOfRelation`) - isRedundant = _.findWhere(alreadyTraversed, { - alias: nameOfRelation, - model: modelIdentity - }); - - if (isRedundant) return; - - // Push this relation onto the `alreadyTraversed` stack. - alreadyTraversed.push({ - alias: nameOfRelation, - model: modelIdentity - }); - - - var relation = currentAttributes[nameOfRelation]; - if (!relation) throw new Error('Unknown relation in schema: ' + modelIdentity + '.' + nameOfRelation); - var identityOfRelatedModel = relation.model || relation.collection; - - // Get the related model - var relatedModel = schema[identityOfRelatedModel]; - - // If this relation is a collection with a `via` back-reference, - // push it on to the `alreadyTraversed` stack. - // (because the information therein is probably redundant) - // TODO: evaluate this-- it may or may not be a good idea - // (but I think it's a nice touch) - if (relation.via) { - alreadyTraversed.push({ - alias: relation.via, - model: identityOfRelatedModel - }); - } - - // Lookup ALL the relations OF THE RELATED model. - var relations = - _(relatedModel.attributes).reduce(function buildSubsetOfAssociations(relations, attrDef, attrName) { - if (_.isObject(attrDef) && (attrDef.model || attrDef.collection)) { - relations.push(_.merge({ - alias: attrName, - identity: attrDef.model || attrDef.collection, - cardinality: attrDef.model ? 'model' : 'collection' - }, attrDef)); - return relations; - } - return relations; - }, []); - - // Return a piece of the result plan by calling `traverseSchemaGraph` - // on each of the RELATED model's relations. - return _.reduce(relations, function(resultPlanPart, relation) { - - // Recursive step - resultPlanPart[relation.alias] = traverseSchemaGraph(identityOfRelatedModel, relation.alias); - - // Trim undefined result plan parts - if (resultPlanPart[relation.alias] === undefined) { - delete resultPlanPart[relation.alias]; - } - - return resultPlanPart; - }, {}); - } - -}; diff --git a/lib/waterline/utils/callbacks.js b/lib/waterline/utils/callbacks.js deleted file mode 100644 index 0f8377c65..000000000 --- a/lib/waterline/utils/callbacks.js +++ /dev/null @@ -1,14 +0,0 @@ -/** - * Lifecycle Callbacks Allowed - */ - -module.exports = [ - 'beforeValidate', - 'afterValidate', - 'beforeUpdate', - 'afterUpdate', - 'beforeCreate', - 'afterCreate', - 'beforeDestroy', - 'afterDestroy' -]; diff --git a/lib/waterline/utils/callbacksRunner.js b/lib/waterline/utils/callbacksRunner.js deleted file mode 100644 index 296e67c95..000000000 --- a/lib/waterline/utils/callbacksRunner.js +++ /dev/null @@ -1,140 +0,0 @@ -/** - * Module Dependencies - */ - -var async = require('async'); - -/** - * Run Lifecycle Callbacks - */ - -var runner = module.exports = {}; - - -/** - * Run Validation Callbacks - * - * @param {Object} context - * @param {Object} values - * @param {Boolean} presentOnly - * @param {Function} cb - * @api public - */ - -runner.validate = function(context, values, presentOnly, cb) { - context.validate(values, presentOnly, cb); -}; - - -/** - * Run Before Create Callbacks - * - * @param {Object} context - * @param {Object} values - * @param {Function} cb - * @api public - */ - -runner.beforeCreate = function(context, values, cb) { - - var fn = function(item, next) { - item.call(context, values, next); - }; - - async.eachSeries(context._callbacks.beforeCreate, fn, cb); -}; - - -/** - * Run After Create Callbacks - * - * @param {Object} context - * @param {Object} values - * @param {Function} cb - * @api public - */ - -runner.afterCreate = function(context, values, cb) { - - var fn = function(item, next) { - item.call(context, values, next); - }; - - async.eachSeries(context._callbacks.afterCreate, fn, cb); -}; - - -/** - * Run Before Update Callbacks - * - * @param {Object} context - * @param {Object} values - * @param {Function} cb - * @api public - */ - -runner.beforeUpdate = function(context, values, cb) { - - var fn = function(item, next) { - item.call(context, values, next); - }; - - async.eachSeries(context._callbacks.beforeUpdate, fn, cb); -}; - - -/** - * Run After Update Callbacks - * - * @param {Object} context - * @param {Object} values - * @param {Function} cb - * @api public - */ - -runner.afterUpdate = function(context, values, cb) { - - var fn = function(item, next) { - item.call(context, values, next); - }; - - async.eachSeries(context._callbacks.afterUpdate, fn, cb); -}; - - -/** - * Run Before Destroy Callbacks - * - * @param {Object} context - * @param {Object} criteria - * @param {Function} cb - * @api public - */ - -runner.beforeDestroy = function(context, criteria, cb) { - - var fn = function(item, next) { - item.call(context, criteria, next); - }; - - async.eachSeries(context._callbacks.beforeDestroy, fn, cb); -}; - - -/** - * Run After Destroy Callbacks - * - * @param {Object} context - * @param {Object} values - * @param {Function} cb - * @api public - */ - -runner.afterDestroy = function(context, values, cb) { - - var fn = function(item, next) { - item.call(context, values, next); - }; - - async.eachSeries(context._callbacks.afterDestroy, fn, cb); -}; diff --git a/lib/waterline/utils/defer.js b/lib/waterline/utils/defer.js deleted file mode 100644 index feab09367..000000000 --- a/lib/waterline/utils/defer.js +++ /dev/null @@ -1,16 +0,0 @@ -var Promise = require('bluebird'); - -module.exports = function defer() { - var resolve, reject; - - var promise = new Promise(function() { - resolve = arguments[0]; - reject = arguments[1]; - }); - - return { - resolve: resolve, - reject: reject, - promise: promise - }; -}; diff --git a/lib/waterline/utils/extend.js b/lib/waterline/utils/extend.js deleted file mode 100644 index dde786bc8..000000000 --- a/lib/waterline/utils/extend.js +++ /dev/null @@ -1,31 +0,0 @@ -/** - * Extend Method - * - * Taken from Backbone Source: - * http://backbonejs.org/docs/backbone.html#section-189 - */ - -var _ = require('lodash'); - -module.exports = function(protoProps, staticProps) { - var parent = this; - var child; - - if (protoProps && _.has(protoProps, 'constructor')) { - child = protoProps.constructor; - } else { - child = function() { return parent.apply(this, arguments); }; - } - - _.extend(child, parent, staticProps); - - var Surrogate = function() { this.constructor = child; }; - Surrogate.prototype = parent.prototype; - child.prototype = new Surrogate(); - - if (protoProps) _.extend(child.prototype, protoProps); - - child.__super__ = parent.prototype; - - return child; -}; diff --git a/lib/waterline/utils/getRelations.js b/lib/waterline/utils/getRelations.js deleted file mode 100644 index 404d2f92a..000000000 --- a/lib/waterline/utils/getRelations.js +++ /dev/null @@ -1,29 +0,0 @@ -/** - * getRelations - * - * Find any `junctionTables` that reference the parent collection. - * - * @param {[type]} options [description] - * @option parentCollection - * @option schema - * @return {[type]} [relations] - */ - -module.exports = function getRelations(options) { - - var schema = options.schema; - var relations = []; - - Object.keys(schema).forEach(function(collection) { - var collectionSchema = schema[collection]; - if (!collectionSchema.hasOwnProperty('junctionTable')) return; - - Object.keys(collectionSchema.attributes).forEach(function(key) { - if (!collectionSchema.attributes[key].hasOwnProperty('foreignKey')) return; - if (collectionSchema.attributes[key].references !== options.parentCollection) return; - relations.push(collection); - }); - }); - - return relations; -}; diff --git a/lib/waterline/utils/helpers.js b/lib/waterline/utils/helpers.js deleted file mode 100644 index e2bd4fa71..000000000 --- a/lib/waterline/utils/helpers.js +++ /dev/null @@ -1,89 +0,0 @@ - -/** - * Module Dependencies - */ - -var _ = require('lodash'); - -/** - * Equivalent to _.objMap, _.map for objects, keeps key/value associations - * - * Should be deprecated. - * - * @api public - */ -exports.objMap = function objMap(input, mapper, context) { - return _.reduce(input, function(obj, v, k) { - obj[k] = mapper.call(context, v, k, input); - return obj; - }, {}, context); -}; - -/** - * Run a method meant for a single object on a object OR array - * For an object, run the method and return the result. - * For a list, run the method on each item return the resulting array. - * For anything else, return it silently. - * - * Should be deprecated. - * - * @api public - */ - -exports.pluralize = function pluralize(collection, application) { - if (Array.isArray(collection)) return _.map(collection, application); - if (_.isObject(collection)) return application(collection); - return collection; -}; - -/** - * _.str.capitalize - * - * @param {String} str - * @return {String} - * @api public - */ - -exports.capitalize = function capitalize(str) { - str = str === null ? '' : String(str); - return str.charAt(0).toUpperCase() + str.slice(1); -}; - -/** - * ignore - */ - -exports.object = {}; - -/** - * Safer helper for hasOwnProperty checks - * - * @param {Object} obj - * @param {String} prop - * @return {Boolean} - * @api public - */ - -var hop = Object.prototype.hasOwnProperty; -exports.object.hasOwnProperty = function(obj, prop) { - if (obj === null || obj === undefined) return false; - return hop.call(obj, prop); -}; - -/** - * Check if an ID resembles a Mongo BSON ID. - * Can't use the `hop` helper above because BSON ID's will have their own hasOwnProperty value. - * - * @param {String} id - * @return {Boolean} - * @api public - */ - -exports.matchMongoId = function matchMongoId(id) { - // id must be truthy- and either BE a string, or be an object - // with a toString method. - if (!id || - !(_.isString(id) || (_.isObject(id) || _.isFunction(id.toString))) - ) return false; - else return /^[a-fA-F0-9]{24}$/.test(id.toString()); -}; diff --git a/lib/waterline/utils/nestedOperations/create.js b/lib/waterline/utils/nestedOperations/create.js deleted file mode 100644 index b00af5388..000000000 --- a/lib/waterline/utils/nestedOperations/create.js +++ /dev/null @@ -1,62 +0,0 @@ -/** - * Module Dependencies - */ - -var _ = require('lodash'); -var hasOwnProperty = require('../helpers').object.hasOwnProperty; - -/** - * Queue up .add() operations on a model instance for any nested association - * values in a .create() query. - * - * @param {Object} parentModel - * @param {Object} values - * @param {Object} associations - * @param {Function} cb - * @api private - */ - -module.exports = function(parentModel, values, associations, cb) { - var self = this; - - // For each association, grab the primary key value and normalize into model.add methods - associations.forEach(function(association) { - var attribute = self.waterline.schema[self.identity].attributes[association]; - var modelName; - - if (hasOwnProperty(attribute, 'collection')) modelName = attribute.collection; - - if (!modelName) return; - - // Grab the relation's PK - var related = self.waterline.collections[modelName]; - var relatedPK = _.find(related.attributes, { primaryKey: true }); - - // Get the attribute's name - var pk = self.waterline.collections[modelName].primaryKey; - - var optValues = values[association]; - if (!optValues) return; - if (!_.isArray(optValues)) { - optValues = _.isString(optValues) ? optValues.split(',') : [optValues]; - } - optValues.forEach(function(val) { - - // If value is not an object, queue up an add - if (!_.isPlainObject(val)) return parentModel[association].add(val); - - // If value is an object, check if a primary key is defined - // If a custom PK was used and it's not autoIncrementing and the record - // is being created then go ahead and don't reduce it. This allows nested - // creates to work when custom PK's are used. - if (relatedPK.autoIncrement && related.autoPK && hasOwnProperty(val, pk)) { - return parentModel[association].add(val[pk]); - } else { - parentModel[association].add(val); - } - }); - }); - - // Save the parent model - parentModel.save(cb); -}; diff --git a/lib/waterline/utils/nestedOperations/index.js b/lib/waterline/utils/nestedOperations/index.js deleted file mode 100644 index af36fff4f..000000000 --- a/lib/waterline/utils/nestedOperations/index.js +++ /dev/null @@ -1,10 +0,0 @@ -/** - * Handlers for parsing nested associations within create/update values. - */ - -module.exports = { - reduceAssociations: require('./reduceAssociations'), - valuesParser: require('./valuesParser'), - create: require('./create'), - update: require('./update') -}; diff --git a/lib/waterline/utils/nestedOperations/reduceAssociations.js b/lib/waterline/utils/nestedOperations/reduceAssociations.js deleted file mode 100644 index c2a8210db..000000000 --- a/lib/waterline/utils/nestedOperations/reduceAssociations.js +++ /dev/null @@ -1,67 +0,0 @@ -/** - * Module Dependencies - */ - -var hop = require('../helpers').object.hasOwnProperty; -var _ = require('lodash'); -var assert = require('assert'); -var util = require('util'); - -/** - * Traverse an object representing values replace associated objects with their - * foreign keys. - * - * @param {String} model - * @param {Object} schema - * @param {Object} values - * @return {Object} - * @api private - */ - - -module.exports = function(model, schema, values, method) { - var self = this; - - Object.keys(values).forEach(function(key) { - - // Check to see if this key is a foreign key - var attribute = schema[model].attributes[key]; - - // If not a plainObject, check if this is a model instance and has a toObject method - if (!_.isPlainObject(values[key])) { - if (_.isObject(values[key]) && !Array.isArray(values[key]) && values[key].toObject && typeof values[key].toObject === 'function') { - values[key] = values[key].toObject(); - } else { - return; - } - } - // Check that this user-specified value is not NULL - if (values[key] === null) return; - - // Check that this user-specified value actually exists - // as an attribute in `model`'s schema. - // If it doesn't- just ignore it - if (typeof attribute !== 'object') return; - - if (!hop(values[key], attribute.on)) return; - - // Look and see if the related model has a custom primary key AND that - // the intended method is "create" - var related = self.waterline.collections[attribute.references]; - var relatedPK = _.find(related.attributes, { primaryKey: true }); - - // If a custom PK was used and it's not autoIncrementing and the record - // is being created then go ahead and don't reduce it. This allows nested - // creates to work when custom PK's are used. - if (!relatedPK.autoIncrement && !related.autoPK && method && (method == 'create' || method == 'update')) { - return; - } - - // Otherwise reduce the association like normal - var fk = values[key][attribute.on]; - values[key] = fk; - - }); - - return values; -}; diff --git a/lib/waterline/utils/nestedOperations/update.js b/lib/waterline/utils/nestedOperations/update.js deleted file mode 100644 index a2266d20e..000000000 --- a/lib/waterline/utils/nestedOperations/update.js +++ /dev/null @@ -1,556 +0,0 @@ -/** - * Module Dependencies - */ - -var _ = require('lodash'); -var async = require('async'); -var hop = require('../helpers').object.hasOwnProperty; - - -/** - * Update nested associations. Will take a values object and perform updating and - * creating of all the nested associations. It's the same as syncing so it will first - * remove any associations related to the parent and then "sync" the new associations. - * - * @param {Array} parents - * @param {Object} values - * @param {Object} associations - * @param {Function} cb - */ - -module.exports = function(parents, values, associations, cb) { - - var self = this; - - // Combine model and collection associations - associations = associations.collections.concat(associations.models); - - // Build up .add and .update operations for each association - var operations = buildOperations.call(self, parents, associations, values); - - // Now that our operations are built, lets go through and run any updates. - // Then for each parent, find all the current associations and remove them then add - // all the new associations in using .add() - sync.call(self, parents, operations, cb); - -}; - - -/** - * Build Up Operations (add and update) - * - * @param {Array} associations - * @param {Object} values - * @return {Object} - */ - -function buildOperations(parents, associations, values) { - - var self = this; - var operations = {}; - - // For each association, grab the primary key value and normalize into model.add methods - associations.forEach(function(association) { - - var optValues = values[association]; - - // If values are being nulled out just return. This is used when removing foreign - // keys on the parent model. - if (optValues === null) return; - - // Pull out any association values that have primary keys, these will need to be updated. All - // values can be added for each parent however. - operations[association] = { - add: [], - update: [] - }; - - // Normalize optValues to an array - if (!Array.isArray(optValues)) optValues = [optValues]; - queueOperations.call(self, parents, association, operations[association], optValues); - }); - - return operations; -} - -/** - * Queue Up Operations. - * - * Takes the array normalized association values and queues up - * operations for the specific association. - * - * @param {String} association - * @param {Object} operation - * @param {Array} values - */ - -function queueOperations(parents, association, operation, values) { - - var self = this; - var attribute = self.waterline.schema[self.identity].attributes[association]; - var modelName; - - if (hop(attribute, 'collection')) modelName = attribute.collection; - if (hop(attribute, 'foreignKey')) modelName = attribute.references; - if (!modelName) return; - - var collection = self.waterline.collections[modelName]; - - // Grab the relation's PK - var relatedPK = _.find(collection.attributes, { primaryKey: true }); - var relatedPkName = collection.primaryKey; - - // If this is a join table, we can just queue up operations on the parent - // for this association. - if (collection.junctionTable) { - - // For each parent, queue up any .add() operations - parents.forEach(function(parent) { - values.forEach(function(val) { - if (!hop(parent, association)) return; - if (typeof parent[association].add !== 'function') return; - parent[association].add(val); - }); - }); - - return; - } - - values.forEach(function(val) { - - // Check the values and see if the model's primary key is given. If so look into - // the schema attribute and check if this is a collection or model attribute. If it's - // a collection attribute lets update the child record and if it's a model attribute, - // update the child and set the parent's foreign key value to the new primary key. - // - // If a custom PK was used and it's not autoIncrementing add the record. This - // allows nested creates to work when custom PK's are used. - if (!relatedPK.autoIncrement && !collection.autoPK) { - operation.add.push(val); - return; - } - - // If it's missing a PK queue up an add - if (!hop(val, relatedPkName)) { - operation.add.push(val); - return; - } - - // Build up the criteria that will be used to update the child record - var criteria = {}; - criteria[relatedPkName] = val[relatedPkName]; - - // Queue up the update operation - operation.update.push({ model: modelName, criteria: criteria, values: val }); - - // Check if the parents foreign key needs to be updated - if (!hop(attribute, 'foreignKey')) { - operation.add.push(val[relatedPkName]); - return; - } - - // Set the new foreign key value for each parent - parents.forEach(function(parent) { - parent[association] = val[relatedPkName]; - }); - - }); -} - -/** - * Sync Associated Data - * - * Using the operations, lets go through and run any updates on any nested object with - * primary keys. This ensures that all the data passed up is persisted. Then for each parent, - * find all the current associations and unlink them and then add all the new associations - * in using .add(). This ensures that whatever is passed in to an update is what the value will - * be when queried again. - * - * @param {Object} operations - * @param {Function} cb - */ - -function sync(parents, operations, cb) { - var self = this; - - async.auto({ - - // Update any nested associations - update: function(next) { - updateRunner.call(self, parents, operations, next); - }, - - // For each parent, unlink all the associations currently set - unlink: ['update', function(next) { - unlinkRunner.call(self, parents, operations, next); - }], - - // For each parent found, link any associations passed in by either creating - // the new record or linking an existing record - link: ['unlink', function(next) { - linkRunner.call(self, parents, operations, next); - }] - - }, cb); -} - - -//////////////////////////////////////////////////////////////////////////////////////// -// .sync() - Async Auto Runners -//////////////////////////////////////////////////////////////////////////////////////// - - -/** - * Run Update Operations. - * - * Uses the information stored in an operation to perform a .update() on the - * associated model using the new values. - * - * @param {Object} operation - * @param {Function} cb - */ - -function updateRunner(parents, operations, cb) { - - var self = this; - - // There will be an array of update operations inside of a namespace. Use this to run - // an update on the model instance of the association. - function associationLoop(association, next) { - async.each(operations[association].update, update, next); - } - - function update(operation, next) { - var model = self.waterline.collections[operation.model]; - model.update(operation.criteria, operation.values).exec(next); - } - - // Operations are namespaced under an association key. So run each association's updates - // in parallel for now. May need to be limited in the future but all adapters should - // support connection pooling. - async.each(Object.keys(operations), associationLoop, cb); - -} - - -/** - * Unlink Associated Records. - * - * For each association passed in to the update we are essentially replacing the - * association's value. In order to do this we first need to clear out any associations - * that currently exist. - * - * @param {Object} operations - * @param {Function} cb - */ - -function unlinkRunner(parents, operations, cb) { - - var self = this; - - // Given a parent, build up remove operations and run them. - function unlinkParentAssociations(parent, next) { - var opts = buildParentRemoveOperations.call(self, parent, operations); - removeOperationRunner.call(self, opts, next); - } - - async.each(parents, unlinkParentAssociations, cb); -} - - -/** - * Link Associated Records - * - * Given a set of operations, associate the records with the parent records. This - * can be done by either creating join table records or by setting foreign keys. - * It defaults to a parent.add() method for most situations. - * - * @param {Object} operations - * @param {Function} cb - */ - -function linkRunner(parents, operations, cb) { - - var self = this; - - function linkChildRecords(parent, next) { - - // Queue up `.add()` operations on the parent model and figure out - // which records need to be created. - // - // If an .add() method is available always use it. If this is a nested model an .add() - // method won't be available so queue up a create operation. - var recordsToCreate = buildParentLinkOperations.call(self, parent, operations); - - // Create the new records and update the parent with the new foreign key - // values that may have been set when creating child records. - createNewRecords.call(self, parent, recordsToCreate, function(err) { - if (err) return next(err); - updateParentRecord(parent, cb); - }); - } - - // Update the parent record one last time. This ensures a model attribute (single object) - // on the parent can create a new record and then set the parent's foreign key value to - // the newly created child record's primary key. - // - // Example: - // Parent.update({ - // name: 'foo', - // nestedModel: { - // name: 'bar' - // } - // }) - // - // The above query would create the new nested model and then set the parent's nestedModel - // value to the newly created model's primary key. - // - // We then run a .save() to persist any .add() records that may have been used. The update and - // .save() are used instead of a find and then save because it's the same amount of queries - // and it's easier to take advantage of all that the .add() method gives us. - // - // - // TO-DO: - // Make this much smarter to reduce the amount of queries that need to be run. We should probably - // be able to at least cut this in half! - // - function updateParentRecord(parent, next) { - - var criteria = {}; - var model = self.waterline.collections[self.identity]; - - criteria[self.primaryKey] = parent[self.primaryKey]; - var pValues = parent.toObject(); - - model.update(criteria, pValues).exec(function(err) { - if (err) return next(err); - - // Call .save() to persist any .add() functions that may have been used. - parent.save(next); - }); - } - - async.each(parents, linkChildRecords, cb); -} - - -//////////////////////////////////////////////////////////////////////////////////////// -// .sync() - Helper Functions -//////////////////////////////////////////////////////////////////////////////////////// - - -/** - * Build up operations for performing unlinks. - * - * Given a parent and a set of operations, queue up operations to either - * remove join table records or null out any foreign keys on an child model. - * - * @param {Object} parent - * @param {Object} operations - * @return {Array} - */ - -function buildParentRemoveOperations(parent, operations) { - - var self = this; - var opts = []; - - // Inspect the association and see if this relationship has a joinTable. - // If so create an operation criteria that clears all matching records from the - // table. If it doesn't have a join table, build an operation criteria that - // nulls out the foreign key on matching records. - Object.keys(operations).forEach(function(association) { - - var criteria = {}; - var searchCriteria = {}; - var attribute = self.waterline.schema[self.identity].attributes[association]; - - ///////////////////////////////////////////////////////////////////////// - // Parent Record: - // If the foreign key is stored on the parent side, null it out - ///////////////////////////////////////////////////////////////////////// - - if (hop(attribute, 'foreignKey')) { - - // Set search criteria where primary key is equal to the parents primary key - searchCriteria[self.primaryKey] = parent[self.primaryKey]; - - // Store any information we may need to build up an operation. - // Use the `nullify` key to show we want to perform an update and not a destroy. - criteria = { - model: self.identity, - criteria: searchCriteria, - keyName: association, - nullify: true - }; - - opts.push(criteria); - return; - } - - ///////////////////////////////////////////////////////////////////////// - // Child Record: - // Lookup the attribute on the other side of the association on in the - // case of a m:m association the child table will be the join table. - ///////////////////////////////////////////////////////////////////////// - - var child = self.waterline.schema[attribute.collection]; - var childAttribute = child.attributes[attribute.onKey]; - - // Set the search criteria to use the collection's `via` key and the parent's primary key. - searchCriteria[attribute.on] = parent[self.primaryKey]; - - // If the childAttribute stores the foreign key, find all children with the - // foreignKey equal to the parent's primary key and null them out or in the case of - // a `junctionTable` flag destroy them. - if (hop(childAttribute, 'foreignKey')) { - - // Store any information needed to perform the query. Set nullify to false if - // a `junctionTable` property is found. - criteria = { - model: child.identity, - criteria: searchCriteria, - keyName: attribute.on, - nullify: !hop(child, 'junctionTable') - }; - - - opts.push(criteria); - return; - } - }); - - return opts; -} - - -/** - * Remove Operation Runner - * - * Given a criteria object matching a remove operation, perform the - * operation using waterline collection instances. - * - * @param {Array} operations - * @param {Function} callback - */ - -function removeOperationRunner(operations, cb) { - - var self = this; - - function runner(operation, next) { - var values = {}; - - // If nullify is false, run a destroy method using the criteria to destroy - // the join table records. - if (!operation.nullify) { - self.waterline.collections[operation.model].destroy(operation.criteria).exec(next); - return; - } - - // Run an update operation to set the foreign key to null on all the - // associated child records. - values[operation.keyName] = null; - - self.waterline.collections[operation.model].update(operation.criteria, values).exec(next); - } - - - // Run the operations - async.each(operations, runner, cb); -} - - -/** - * Build up operations for performing links. - * - * Given a parent and a set of operations, queue up operations to associate two - * records together. This could be using the parent's `.add()` method which handles - * the logic for us or building up a `create` operation that we can run to create the - * associated record with the correct foreign key set. - * - * @param {Object} parent - * @param {Object} operations - * @return {Object} - */ - -function buildParentLinkOperations(parent, operations) { - - var recordsToCreate = {}; - - // Determine whether to use the parent association's `.add()` function - // or whether to queue up a create operation. - function determineOperation(association, opt) { - - // Check if the association has an `add` method, if so use it. - if (hop(parent[association], 'add')) { - parent[association].add(opt); - return; - } - - recordsToCreate[association] = recordsToCreate[association] || []; - recordsToCreate[association].push(opt); - } - - // For each operation look at all the .add operations and determine - // what to do with them. - Object.keys(operations).forEach(function(association) { - operations[association].add.forEach(function(opt) { - determineOperation(association, opt); - }); - }); - - return recordsToCreate; -} - - -/** - * Create New Records. - * - * Given an object of association records to create, perform a create - * on the child model and set the parent's foreign key to the newly - * created record's primary key. - * - * @param {Object} parent - * @param {Object} recordsToCreate - * @param {Function} cb - */ - -function createNewRecords(parent, recordsToCreate, cb) { - - var self = this; - - // For each association, run the createRecords function - // in the model context. - function mapAssociations(association, next) { - - // First, pull the model attribute's referenced (foreign) collection - var attribute = self.waterline.schema[self.identity].attributes[association]; - var referencedCollection = attribute.references; - - var model = self.waterline.collections[referencedCollection]; - var records = recordsToCreate[association]; - - function createRunner(record, nextRecord) { - var args = [parent, association, record, nextRecord]; - createRecord.apply(model, args); - } - - async.each(records, createRunner, next); - } - - // Create a record and set the parent's foreign key to the - // newly created record's primary key. - function createRecord(parent, association, record, next) { - var self = this; - - this.create(record).exec(function(err, val) { - if (err) return next(err); - parent[association] = val[self.primaryKey]; - next(); - }); - } - - - async.each(Object.keys(recordsToCreate), mapAssociations, cb); -} diff --git a/lib/waterline/utils/nestedOperations/valuesParser.js b/lib/waterline/utils/nestedOperations/valuesParser.js deleted file mode 100644 index a500c7abf..000000000 --- a/lib/waterline/utils/nestedOperations/valuesParser.js +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Module Dependencies - */ - -var hasOwnProperty = require('../helpers').object.hasOwnProperty; - -/** - * Traverse an object representing values and map out any associations. - * - * @param {String} model - * @param {Object} schema - * @param {Object} values - * @return {Object} - * @api private - */ - - -module.exports = function(model, schema, values) { - var self = this; - - // Pick out the top level associations - var associations = { - collections: [], - models: [] - }; - - Object.keys(values).forEach(function(key) { - - // Ignore values equal to null - if (values[key] === null) return; - - // Ignore joinTables - if (hasOwnProperty(schema[model], 'junctionTable')) return; - if (!hasOwnProperty(schema[model].attributes, key)) return; - - var attribute = schema[model].attributes[key]; - if (!hasOwnProperty(attribute, 'collection') && !hasOwnProperty(attribute, 'foreignKey')) return; - - if (hasOwnProperty(attribute, 'collection')) associations.collections.push(key); - if (hasOwnProperty(attribute, 'foreignKey')) associations.models.push(key); - - }); - - return associations; -}; diff --git a/lib/waterline/utils/normalize.js b/lib/waterline/utils/normalize.js deleted file mode 100644 index 475644ce0..000000000 --- a/lib/waterline/utils/normalize.js +++ /dev/null @@ -1,436 +0,0 @@ -var _ = require('lodash'); -var util = require('./helpers'); -var hop = util.object.hasOwnProperty; -var switchback = require('switchback'); -var errorify = require('../error'); -var WLUsageError = require('../error/WLUsageError'); - -module.exports = { - - // Expand Primary Key criteria into objects - expandPK: function(context, options) { - - // Default to id as primary key - var pk = 'id'; - - // If autoPK is not used, attempt to find a primary key - if (!context.autoPK) { - // Check which attribute is used as primary key - for (var key in context.attributes) { - if (!util.object.hasOwnProperty(context.attributes[key], 'primaryKey')) continue; - - // Check if custom primaryKey value is falsy - if (!context.attributes[key].primaryKey) continue; - - // If a custom primary key is defined, use it - pk = key; - break; - } - } - - // Check if options is an integer or string and normalize criteria - // to object, using the specified primary key field. - if (_.isNumber(options) || _.isString(options) || Array.isArray(options)) { - // Temporary store the given criteria - var pkCriteria = _.clone(options); - - // Make the criteria object, with the primary key - options = {}; - options[pk] = pkCriteria; - } - - // If we're querying by primary key, create a coercion function for it - // depending on the data type of the key - if (options && options[pk]) { - - var coercePK; - if(!context.attributes[pk]) { - return pk; - } - - if (context.attributes[pk].type == 'integer') { - coercePK = function(pk) {return +pk;}; - } else if (context.attributes[pk].type == 'string') { - coercePK = function(pk) {return String(pk).toString();}; - - // If the data type is unspecified, return the key as-is - } else { - coercePK = function(pk) {return pk;}; - } - - // If the criteria is an array of PKs, coerce them all - if (Array.isArray(options[pk])) { - options[pk] = options[pk].map(coercePK); - - // Otherwise just coerce the one - } else { - if (!_.isObject(options[pk])) { - options[pk] = coercePK(options[pk]); - } - } - - } - - return options; - - }, - - // Normalize the different ways of specifying criteria into a uniform object - criteria: function(origCriteria) { - var criteria = _.cloneDeep(origCriteria); - - // If original criteria is already false, keep it that way. - if (criteria === false) return criteria; - - if (!criteria) { - return { - where: null - }; - } - - // Let the calling method normalize array criteria. It could be an IN query - // where we need the PK of the collection or a .findOrCreateEach - if (Array.isArray(criteria)) return criteria; - - // Empty undefined values from criteria object - _.each(criteria, function(val, key) { - if (_.isUndefined(val)) criteria[key] = null; - }); - - // Convert non-objects (ids) into a criteria - // TODO: use customizable primary key attribute - if (!_.isObject(criteria)) { - criteria = { - id: +criteria || criteria - }; - } - - if (_.isObject(criteria) && !criteria.where && criteria.where !== null) { - criteria = { where: criteria }; - } - - // Return string to indicate an error - if (!_.isObject(criteria)) throw new WLUsageError('Invalid options/criteria :: ' + criteria); - - // If criteria doesn't seem to contain operational keys, assume all the keys are criteria - if (!criteria.where && !criteria.joins && !criteria.join && !criteria.limit && !criteria.skip && - !criteria.sort && !criteria.sum && !criteria.average && - !criteria.groupBy && !criteria.min && !criteria.max && !criteria.select) { - - // Delete any residuals and then use the remaining keys as attributes in a criteria query - delete criteria.where; - delete criteria.joins; - delete criteria.join; - delete criteria.limit; - delete criteria.skip; - delete criteria.sort; - criteria = { - where: criteria - }; - - // If where is null, turn it into an object - } else if (_.isNull(criteria.where)) criteria.where = {}; - - - // Move Limit, Skip, sort outside the where criteria - if (hop(criteria, 'where') && criteria.where !== null && hop(criteria.where, 'limit')) { - criteria.limit = parseInt(_.clone(criteria.where.limit), 10); - if (criteria.limit < 0) criteria.limit = 0; - delete criteria.where.limit; - } else if (hop(criteria, 'limit')) { - criteria.limit = parseInt(criteria.limit, 10); - if (criteria.limit < 0) criteria.limit = 0; - } - - if (hop(criteria, 'where') && criteria.where !== null && hop(criteria.where, 'skip')) { - criteria.skip = parseInt(_.clone(criteria.where.skip), 10); - if (criteria.skip < 0) criteria.skip = 0; - delete criteria.where.skip; - } else if (hop(criteria, 'skip')) { - criteria.skip = parseInt(criteria.skip, 10); - if (criteria.skip < 0) criteria.skip = 0; - } - - if (hop(criteria, 'where') && criteria.where !== null && hop(criteria.where, 'sort')) { - criteria.sort = _.clone(criteria.where.sort); - delete criteria.where.sort; - } - - // Pull out aggregation keys from where key - if (hop(criteria, 'where') && criteria.where !== null && hop(criteria.where, 'sum')) { - criteria.sum = _.clone(criteria.where.sum); - delete criteria.where.sum; - } - - if (hop(criteria, 'where') && criteria.where !== null && hop(criteria.where, 'average')) { - criteria.average = _.clone(criteria.where.average); - delete criteria.where.average; - } - - if (hop(criteria, 'where') && criteria.where !== null && hop(criteria.where, 'groupBy')) { - criteria.groupBy = _.clone(criteria.where.groupBy); - delete criteria.where.groupBy; - } - - if (hop(criteria, 'where') && criteria.where !== null && hop(criteria.where, 'min')) { - criteria.min = _.clone(criteria.where.min); - delete criteria.where.min; - } - - if (hop(criteria, 'where') && criteria.where !== null && hop(criteria.where, 'max')) { - criteria.max = _.clone(criteria.where.max); - delete criteria.where.max; - } - - if (hop(criteria, 'where') && criteria.where !== null && hop(criteria.where, 'select') || hop(criteria, 'select')) { - - if(criteria.where.select) { - criteria.select = _.clone(criteria.where.select); - } - - // If the select contains a '*' then remove the whole projection, a '*' - // will always return all records. - if(!_.isArray(criteria.select)) { - criteria.select = [criteria.select]; - } - - if(_.includes(criteria.select, '*')) { - delete criteria.select; - } - - delete criteria.where.select; - } - - // If WHERE is {}, always change it back to null - if (criteria.where && _.keys(criteria.where).length === 0) { - criteria.where = null; - } - - // If an IN was specified in the top level query and is an empty array, we can return an - // empty object without running the query because nothing will match anyway. Let's return - // false from here so the query knows to exit out. - if (criteria.where) { - var falsy = false; - Object.keys(criteria.where).forEach(function(key) { - if (Array.isArray(criteria.where[key]) && criteria.where[key].length === 0) { - falsy = true; - } - }); - - if (falsy) return false; - } - - // If an IN was specified inside an OR clause and is an empty array, remove it because nothing will - // match it anyway and it can prevent errors in the adapters - if (criteria.where && hop(criteria.where, 'or')) { - - // Ensure `or` is an array - if (!_.isArray(criteria.where.or)) { - throw new WLUsageError('An `or` clause in a query should be specified as an array of subcriteria'); - } - - var _clone = _.cloneDeep(criteria.where.or); - criteria.where.or.forEach(function(clause, i) { - Object.keys(clause).forEach(function(key) { - if (Array.isArray(clause[key]) && clause[key].length === 0) { - _clone.splice(i, 1); - } - }); - }); - - criteria.where.or = _clone; - } - - // Normalize sort criteria - if (hop(criteria, 'sort') && criteria.sort !== null) { - - // Split string into attr and sortDirection parts (default to 'asc') - if (_.isString(criteria.sort)) { - var parts = criteria.sort.split(' '); - - // Set default sort to asc - parts[1] = parts[1] ? parts[1].toLowerCase() : 'asc'; - - // Expand criteria.sort into object - criteria.sort = {}; - criteria.sort[parts[0]] = parts[1]; - } - - // normalize ASC/DESC notation - Object.keys(criteria.sort).forEach(function(attr) { - - if (_.isString(criteria.sort[attr])) { - criteria.sort[attr] = criteria.sort[attr].toLowerCase(); - - // Throw error on invalid sort order - if (criteria.sort[attr] !== 'asc' && criteria.sort[attr] !== 'desc') { - throw new WLUsageError('Invalid sort criteria :: ' + criteria.sort); - } - } - - if (criteria.sort[attr] === 'asc') criteria.sort[attr] = 1; - if (criteria.sort[attr] === 'desc') criteria.sort[attr] = -1; - }); - - // normalize binary sorting criteria - Object.keys(criteria.sort).forEach(function(attr) { - if (criteria.sort[attr] === 0) criteria.sort[attr] = -1; - }); - - // Verify that user either specified a proper object - // or provided explicit comparator function - if (!_.isObject(criteria.sort) && !_.isFunction(criteria.sort)) { - throw new WLUsageError('Invalid sort criteria for ' + attrName + ' :: ' + direction); - } - } - - return criteria; - }, - - // Normalize the capitalization and % wildcards in a like query - // Returns false if criteria is invalid, - // otherwise returns normalized criteria obj. - // Enhancer is an optional function to run on each criterion to preprocess the string - likeCriteria: function(criteria, attributes, enhancer) { - - // Only accept criteria as an object - if (criteria !== Object(criteria)) return false; - - criteria = _.clone(criteria); - - if (!criteria.where) criteria = { where: criteria }; - - // Apply enhancer to each - if (enhancer) criteria.where = util.objMap(criteria.where, enhancer); - - criteria.where = { like: criteria.where }; - - return criteria; - }, - - - // Normalize a result set from an adapter - resultSet: function(resultSet) { - - // Ensure that any numbers that can be parsed have been - return util.pluralize(resultSet, numberizeModel); - }, - - - /** - * Normalize the different ways of specifying callbacks in built-in Waterline methods. - * Switchbacks vs. Callbacks (but not deferred objects/promises) - * - * @param {Function|Handlers} cb - * @return {Handlers} - */ - callback: function(cb) { - - // Build modified callback: - // (only works for functions currently) - var wrappedCallback; - if (_.isFunction(cb)) { - wrappedCallback = function(err) { - - // If no error occurred, immediately trigger the original callback - // without messing up the context or arguments: - if (!err) { - return applyInOriginalCtx(cb, arguments); - } - - // If an error argument is present, upgrade it to a WLError - // (if it isn't one already) - err = errorify(err); - - var modifiedArgs = Array.prototype.slice.call(arguments, 1); - modifiedArgs.unshift(err); - - // Trigger callback without messing up the context or arguments: - return applyInOriginalCtx(cb, modifiedArgs); - }; - } - - - // - // TODO: Make it clear that switchback support it experimental. - // - // Push switchback support off until >= v0.11 - // or at least add a warning about it being a `stage 1: experimental` - // feature. - // - - if (!_.isFunction(cb)) wrappedCallback = cb; - return switchback(wrappedCallback, { - invalid: 'error', // Redirect 'invalid' handler to 'error' handler - error: function _defaultErrorHandler() { - console.error.apply(console, Array.prototype.slice.call(arguments)); - } - }); - - - // ???? - // TODO: determine support target for 2-way switchback usage - // ???? - - // Allow callback to be -HANDLED- in different ways - // at the app-level. - // `cb` may be passed in (at app-level) as either: - // => an object of handlers - // => or a callback function - // - // If a callback function was provided, it will be - // automatically upgraded to a simplerhandler object. - // var cb_fromApp = switchback(cb); - - // Allow callback to be -INVOKED- in different ways. - // (adapter def) - // var cb_fromAdapter = cb_fromApp; - - } -}; - -// If any attribute looks like a number, but it's a string -// cast it to a number -function numberizeModel(model) { - return util.objMap(model, numberize); -} - - -// If specified attr looks like a number, but it's a string, cast it to a number -function numberize(attr) { - if (_.isString(attr) && isNumbery(attr) && parseInt(attr, 10) < Math.pow(2, 53)) return +attr; - else return attr; -} - -// Returns whether this value can be successfully parsed as a finite number -function isNumbery(value) { - return Math.pow(+value, 2) > 0; -} - -// Replace % with %%% -function escapeLikeQuery(likeCriterion) { - return likeCriterion.replace(/[^%]%[^%]/g, '%%%'); -} - -// Replace %%% with % -function unescapeLikeQuery(likeCriterion) { - return likeCriterion.replace(/%%%/g, '%'); -} - - -/** - * Like _.partial, but accepts an array of arguments instead of - * comma-seperated args (if _.partial is `call`, this is `apply`.) - * The biggest difference from `_.partial`, other than the usage, - * is that this helper actually CALLS the partially applied function. - * - * This helper is mainly useful for callbacks. - * - * @param {Function} fn [description] - * @param {[type]} args [description] - * @return {[type]} [description] - */ - -function applyInOriginalCtx(fn, args) { - return (_.partial.apply(null, [fn].concat(Array.prototype.slice.call(args))))(); -} diff --git a/lib/waterline/utils/ontology/README.md b/lib/waterline/utils/ontology/README.md new file mode 100644 index 000000000..2117fc2b4 --- /dev/null +++ b/lib/waterline/utils/ontology/README.md @@ -0,0 +1,3 @@ +# utils/ontology/ + +Utilities for accessing information about the logical state of the ORM. This consists of things like accessors for live WLModels, attribute definitions, etc., and other looker-uppers (e.g. `isCapableOfOptimizedPopulate()`). diff --git a/lib/waterline/utils/ontology/get-attribute.js b/lib/waterline/utils/ontology/get-attribute.js new file mode 100644 index 000000000..b5c39be69 --- /dev/null +++ b/lib/waterline/utils/ontology/get-attribute.js @@ -0,0 +1,169 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var getModel = require('./get-model'); + + +/** + * Module constants + */ + +var KNOWN_ATTR_TYPES = ['string', 'number', 'boolean', 'json', 'ref']; + + +/** + * getAttribute() + * + * Look up an attribute definition (by name) from the specified model. + * Usable with normal attributes AND with associations. + * + * > Note that we do a few quick assertions in the process, purely as sanity checks + * > and to help prevent bugs. If any of these fail, then it means there is some + * > unhandled usage error, or a bug going on elsewhere in Waterline. + * + * ------------------------------------------------------------------------------------------ + * @param {String} attrName + * The name of the attribute (e.g. "id" or "favoriteBrands") + * > Useful for looking up the Waterline model and accessing its attribute definitions. + * + * @param {String} modelIdentity + * The identity of the model this is referring to (e.g. "pet" or "user") + * > Useful for looking up the Waterline model and accessing its attribute definitions. + * + * @param {Ref} orm + * The Waterline ORM instance. + * ------------------------------------------------------------------------------------------ + * @returns {Ref} [the attribute definition (a direct reference to it, so be careful!!)] + * ------------------------------------------------------------------------------------------ + * @throws {Error} If no such model exists. + * E_MODEL_NOT_REGISTERED + * + * @throws {Error} If no such attribute exists. + * E_ATTR_NOT_REGISTERED + * + * @throws {Error} If anything else goes wrong. + * ------------------------------------------------------------------------------------------ + */ + +module.exports = function getAttribute(attrName, modelIdentity, orm) { + + // ================================================================================================ + // Check that the provided `attrName` is valid. + // (`modelIdentity` and `orm` will be automatically checked by calling `getModel()`) + // + // > Note that this attr name MIGHT be empty string -- although it should never be. + // > (we prevent against that elsewhere) + if (!_.isString(attrName)) { + throw new Error('Consistency violation: `attrName` must be a string.'); + } + // ================================================================================================ + + + // Try to look up the Waterline model. + // + // > Note that, in addition to being the model definition, this + // > "WLModel" is actually the hydrated model object (fka a "Waterline collection") + // > which has methods like `find`, `create`, etc. + var WLModel = getModel(modelIdentity, orm); + + // Try to look up the attribute definition. + var attrDef = WLModel.attributes[attrName]; + if (_.isUndefined(attrDef)) { + throw flaverr('E_ATTR_NOT_REGISTERED', new Error('No such attribute (`'+attrName+'`) exists in model (`'+modelIdentity+'`).')); + } + + // ================================================================================================ + // This section consists of more sanity checks for the attribute definition: + + if (!_.isObject(attrDef) || _.isArray(attrDef) || _.isFunction(attrDef)) { + throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) corresponds with a CORRUPTED attribute definition: '+util.inspect(attrDef, {depth:5})+''); + } + + // Some basic sanity checks that this is a valid model association. + // (note that we don't get too deep here-- though we could) + if (!_.isUndefined(attrDef.model)) { + if(!_.isString(attrDef.model) || attrDef.model === '') { + throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) has an invalid `model` property. If specified, `model` should be a non-empty string. But instead, got: '+util.inspect(attrDef.model, {depth:5})+''); + } + if (!_.isUndefined(attrDef.via)){ + throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) is an association, because it declares a `model`. But with a "model" association, the `via` property should always be undefined. But instead, it is: '+util.inspect(attrDef.via, {depth:5})+''); + } + if (!_.isUndefined(attrDef.dominant)){ + throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) is an association, because it declares a `model`. But with a "model" association, the `dominant` property should always be undefined. But instead, it is: '+util.inspect(attrDef.dominant, {depth:5})+''); + } + try { + getModel(attrDef.model, orm); + } catch (e){ throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) is an association, because it declares a `model`. But the other model it references (`'+attrDef.model+'`) is missing or invalid. Details: '+e.stack); } + } + // Some basic sanity checks that this is a valid collection association. + // (note that we don't get too deep here-- though we could) + else if (!_.isUndefined(attrDef.collection)) { + if (!_.isString(attrDef.collection) || attrDef.collection === '') { + throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) has an invalid `collection` property. If specified, `collection` should be a non-empty string. But instead, got: '+util.inspect(attrDef.collection, {depth:5})+''); + } + + var OtherWLModel; + try { + OtherWLModel = getModel(attrDef.collection, orm); + } catch (e){ throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) is an association, because it declares a `collection`. But the other model it references (`'+attrDef.collection+'`) is missing or invalid. Details: '+e.stack); } + + if (!_.isUndefined(attrDef.via)) { + if (!_.isString(attrDef.via) || attrDef.via === '') { + throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) has an invalid `via` property. If specified, `via` should be a non-empty string. But instead, got: '+util.inspect(attrDef.via, {depth:5})+''); + } + + // Note that we don't call getAttribute recursively. (That would be madness.) + // We also don't check for reciprocity on the other side. + // Instead, we simply do a watered down check. + // > waterline-schema goes much deeper here. + // > Remember, these are just sanity checks for development. + if (!_.isUndefined(attrDef.through)) { + + var ThroughWLModel; + try { + ThroughWLModel = getModel(attrDef.through, orm); + } catch (e){ throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) is a "through" association, because it declares a `through`. But the junction model it references as "through" (`'+attrDef.through+'`) is missing or invalid. Details: '+e.stack); } + + if (!ThroughWLModel.attributes[attrDef.via]) { + throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) is a "through" association, because it declares a `through`. But the association\'s specified `via` ('+attrDef.via+'`) does not correspond with a recognized attribute on the junction model (`'+attrDef.through+'`)'); + } + if (!ThroughWLModel.attributes[attrDef.via].model) { + throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) is a "through" association, but its specified `via` ('+attrDef.via+'`) corresponds with an unexpected attribute on the junction model (`'+attrDef.through+'`). The attribute referenced by `via` should be a singular ("model") association, but instead, got: '+util.inspect(ThroughWLModel.attributes[attrDef.via],{depth: 5})+''); + } + + } + else { + + if (!OtherWLModel.attributes[attrDef.via]) { + throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) is an association, because it declares a `collection`. But that association also specifies a `via` ('+attrDef.via+'`) which does not correspond with a recognized attribute on the other model (`'+attrDef.collection+'`)'); + } + + } + }// + }// + // Otherwise, check that this is a valid, miscellaneous attribute. + else { + if(!_.isString(attrDef.type) || attrDef.type === '') { + throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) has an invalid `type` property. If specified, `type` should be a non-empty string. But instead, got: '+util.inspect(attrDef.type, {depth:5})+''); + } + if(!_.contains(KNOWN_ATTR_TYPES, attrDef.type)) { + throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) has an unrecognized `type`: `'+attrDef.type+'`.'); + } + if (!_.isBoolean(attrDef.required)) { + throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) has an unrecognized `required` property in its definition. By this time, it should always be true or false. But instead, got: '+util.inspect(attrDef.required, {depth:5})+''); + } + if (attrDef.required && !_.isUndefined(attrDef.defaultsTo)) { + throw new Error('Consistency violation: The referenced attribute (`'+attrName+'`, from model `'+modelIdentity+'`) has `required: true`, but it also specifies a `defaultsTo`. This should never have been allowed-- defaultsTo should be undefined! But instead, got: '+util.inspect(attrDef.defaultsTo, {depth:5})+''); + } + } + // ================================================================================================ + + //-• + // Send back a reference to this attribute definition. + return attrDef; + +}; diff --git a/lib/waterline/utils/ontology/get-model.js b/lib/waterline/utils/ontology/get-model.js new file mode 100644 index 000000000..1458e8f0c --- /dev/null +++ b/lib/waterline/utils/ontology/get-model.js @@ -0,0 +1,110 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); + + +/** + * getModel() + * + * Look up a Waterline model by identity. + * + * > Note that we do a few quick assertions in the process, purely as sanity checks + * > and to help prevent bugs. If any of these fail, then it means there is some + * > unhandled usage error, or a bug going on elsewhere in Waterline. + * + * ------------------------------------------------------------------------------------------ + * @param {String} modelIdentity + * The identity of the model this is referring to (e.g. "pet" or "user") + * > Useful for looking up the Waterline model and accessing its attribute definitions. + * + * @param {Ref} orm + * The Waterline ORM instance. + * ------------------------------------------------------------------------------------------ + * @returns {Ref} [the Waterline model] + * ------------------------------------------------------------------------------------------ + * @throws {Error} If no such model exists. + * E_MODEL_NOT_REGISTERED + * + * @throws {Error} If anything else goes wrong. + * ------------------------------------------------------------------------------------------ + */ + +module.exports = function getModel(modelIdentity, orm) { + + // ================================================================================================ + // Check that this utility function is being used properly, and that the provided `modelIdentity` and `orm` are valid. + if (!_.isString(modelIdentity) || modelIdentity === '') { + throw new Error('Consistency violation: `modelIdentity` must be a non-empty string. Instead got: '+modelIdentity); + } + var isORMDictionary = _.isObject(orm) && !_.isArray(orm) && !_.isFunction(orm); + if (!isORMDictionary) { + throw new Error('Consistency violation: `orm` must be a valid Waterline ORM instance (must be a dictionary)'); + } + + var doesORMHaveValidCollectionsDictionary = _.isObject(orm.collections) && !_.isArray(orm.collections) && !_.isFunction(orm.collections); + if (!doesORMHaveValidCollectionsDictionary) { + throw new Error('Consistency violation: `orm` must be a valid Waterline ORM instance (must have a dictionary of "collections")'); + } + // ================================================================================================ + + + // Try to look up the Waterline model. + // + // > Note that, in addition to being the model definition, this + // > "WLModel" is actually the hydrated model object (fka a "Waterline collection") + // > which has methods like `find`, `create`, etc. + var WLModel = orm.collections[modelIdentity]; + if (_.isUndefined(WLModel)) { + throw flaverr('E_MODEL_NOT_REGISTERED', new Error('The provided `modelIdentity` references a model (`'+modelIdentity+'`) which is not registered in this `orm`.')); + } + + + // ================================================================================================ + // Finally, do a couple of quick sanity checks on the registered + // Waterline model, such as verifying that it declares an extant, + // valid primary key attribute. + + var isWLModelDictionary = _.isObject(WLModel) && !_.isArray(WLModel) && !_.isFunction(WLModel); + if (!isWLModelDictionary) { + throw new Error('Consistency violation: All model definitions must be dictionaries, but somehow, the referenced Waterline model (`'+modelIdentity+'`) seems to have become corrupted. Here it is: '+util.inspect(WLModel, {depth: 1})); + } + + var doesWLModelHaveValidAttributesDictionary = _.isObject(WLModel.attributes) && !_.isArray(WLModel.attributes) && !_.isFunction(WLModel.attributes); + if (!doesWLModelHaveValidAttributesDictionary) { + throw new Error('Consistency violation: All model definitions must have a dictionary of `attributes`. But somehow, the referenced Waterline model (`'+modelIdentity+'`) seems to have become corrupted and has a missing or invalid `attributes` property. Here is the Waterline model: '+util.inspect(WLModel, {depth: 1})); + } + + var doesWLModelHaveValidPrimaryKeySetting = _.isString(WLModel.primaryKey); + if (!doesWLModelHaveValidPrimaryKeySetting) { + throw new Error('Consistency violation: The referenced Waterline model (`'+modelIdentity+'`) defines an invalid `primaryKey` setting. Should be a string (the name of the primary key attribute), but instead, it is: '+util.inspect(WLModel.primaryKey, {depth:5})); + } + + // Now a few more checks for the primary key attribute. + var pkAttrDef = WLModel.attributes[WLModel.primaryKey]; + if (_.isUndefined(pkAttrDef)) { + throw new Error('Consistency violation: The referenced Waterline model (`'+modelIdentity+'`) declares `primaryKey: \''+WLModel.primaryKey+'\'`, yet there is no `'+WLModel.primaryKey+'` attribute defined in the model!'); + } + + var isPkAttrDefDictionary = _.isObject(pkAttrDef) && !_.isArray(pkAttrDef) && !_.isFunction(pkAttrDef); + if (!isPkAttrDefDictionary) { + throw new Error('Consistency violation: The `primaryKey` (`'+WLModel.primaryKey+'`) in the referenced Waterline model (`'+modelIdentity+'`) corresponds with a CORRUPTED attribute definition: '+util.inspect(pkAttrDef, {depth:5})+'\n(^^this should have been caught already!)'); + } + + if (!_.isBoolean(pkAttrDef.required)) { + throw new Error('Consistency violation: The `primaryKey` (`'+WLModel.primaryKey+'`) in the referenced Waterline model (`'+modelIdentity+'`) corresponds with a CORRUPTED attribute definition '+util.inspect(pkAttrDef, {depth:5})+'\n(^^this should have been caught already! `required` must be either true or false!)'); + } + + if (pkAttrDef.type !== 'number' && pkAttrDef.type !== 'string') { + throw new Error('Consistency violation: The `primaryKey` (`'+WLModel.primaryKey+'`) in the referenced Waterline model (`'+modelIdentity+'`) corresponds with an INCOMPATIBLE attribute definition. In order to be used as the logical primary key, the referenced attribute should declare itself `type: \'string\'` or `type: \'number\'`...but instead its `type` is: '+util.inspect(pkAttrDef.type, {depth:5})+'\n(^^this should have been caught already!)'); + } + // ================================================================================================ + + + // Send back a reference to this Waterline model. + return WLModel; + +}; diff --git a/lib/waterline/utils/ontology/is-capable-of-optimized-populate.js b/lib/waterline/utils/ontology/is-capable-of-optimized-populate.js new file mode 100644 index 000000000..0e541681e --- /dev/null +++ b/lib/waterline/utils/ontology/is-capable-of-optimized-populate.js @@ -0,0 +1,147 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var getModel = require('./get-model'); +var getAttribute = require('./get-attribute'); + + +/** + * isCapableOfOptimizedPopulate() + * + * Determine whether this association fully supports optimized populate. + * + * > Note that, if this is a plural association (a `collection` assoc. that is pointed at + * > by `via` on the other side, or for which there IS no "other side"), then there will be + * > a junction model in play. For this utility to return `true`, that junction model must + * > also be on the same datastore! + * + * --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + * @param {String} attrName [the name of the association in question] + * @param {String} modelIdentity [the identity of the model this association belongs to] + * @param {Ref} orm [the Waterline ORM instance] + * --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + * @returns {Boolean} + */ + +module.exports = function isCapableOfOptimizedPopulate(attrName, modelIdentity, orm) { + + if (!_.isString(attrName)) { + throw new Error('Consistency violation: Must specify `attrName` as a string. But instead, got: '+util.inspect(attrName, {depth:5})+''); + } + if (!_.isString(modelIdentity)) { + throw new Error('Consistency violation: Must specify `modelIdentity` as a string. But instead, got: '+util.inspect(modelIdentity, {depth:5})+''); + } + if (_.isUndefined(orm)) { + throw new Error('Consistency violation: Must pass in `orm` (a reference to the Waterline ORM instance). But instead, got: '+util.inspect(orm, {depth:5})+''); + } + + + // ╦ ╔═╗╔═╗╦╔═ ╦ ╦╔═╗ ┌─┐┌─┐┌─┐┌─┐┌─┐┬┌─┐┌┬┐┬┌─┐┌┐┌ ┬ ┌┬┐┌─┐┌┬┐┌─┐┬ ┌─┐ + // ║ ║ ║║ ║╠╩╗ ║ ║╠═╝ ├─┤└─┐└─┐│ ││ │├─┤ │ ││ ││││ ┌┼─ ││││ │ ││├┤ │ └─┐ + // ╩═╝╚═╝╚═╝╩ ╩ ╚═╝╩ ┴ ┴└─┘└─┘└─┘└─┘┴┴ ┴ ┴ ┴└─┘┘└┘ └┘ ┴ ┴└─┘─┴┘└─┘┴─┘└─┘ + + // Look up the containing model for this association, and the attribute definition itself. + var PrimaryWLModel = getModel(modelIdentity, orm); + var attrDef = getAttribute(attrName, modelIdentity, orm); + + assert(attrDef.model || attrDef.collection, 'Attempting to check whether attribute `'+attrName+'` of model `'+modelIdentity+'` is capable of optimized populate, but it\'s not even an association!'); + + // Look up the other, associated model. + var otherModelIdentity = attrDef.model ? attrDef.model : attrDef.collection; + var OtherWLModel = getModel(otherModelIdentity, orm); + + + + // ┌─┐┬ ┬┌─┐┌─┐┬┌─ ┬ ┬┬ ┬┌─┐┌┬┐┬ ┬┌─┐┬─┐ ╔═╗╦ ╦ ┌┬┐┌─┐┌┬┐┌─┐┬ ┌─┐ + // │ ├─┤├┤ │ ├┴┐ │││├─┤├┤ │ ├─┤├┤ ├┬┘ ╠═╣║ ║ ││││ │ ││├┤ │ └─┐ + // └─┘┴ ┴└─┘└─┘┴ ┴ └┴┘┴ ┴└─┘ ┴ ┴ ┴└─┘┴└─ ╩ ╩╩═╝╩═╝ ┴ ┴└─┘─┴┘└─┘┴─┘└─┘ + // ┌─┐┬─┐┌─┐ ┬ ┬┌─┐┬┌┐┌┌─┐ ┌┬┐┬ ┬┌─┐ ╔═╗╔═╗╔╦╗╔═╗ ╔╦╗╔═╗╔╦╗╔═╗╔═╗╔╦╗╔═╗╦═╗╔═╗ + // ├─┤├┬┘├┤ │ │└─┐│││││ ┬ │ ├─┤├┤ ╚═╗╠═╣║║║║╣ ║║╠═╣ ║ ╠═╣╚═╗ ║ ║ ║╠╦╝║╣ + // ┴ ┴┴└─└─┘ └─┘└─┘┴┘└┘└─┘ ┴ ┴ ┴└─┘ ╚═╝╩ ╩╩ ╩╚═╝ ═╩╝╩ ╩ ╩ ╩ ╩╚═╝ ╩ ╚═╝╩╚═╚═╝ + + // Determine if the two models are using the same datastore. + var isUsingSameDatastore = (PrimaryWLModel.datastore === OtherWLModel.datastore); + + // Sanity check + if (!_.isString(PrimaryWLModel.datastore) || !_.isString(OtherWLModel.datastore)) { + throw new Error('Consistency violation: Outdated semantics (see https://github.com/balderdashy/waterline/commit/ecd3e1c8f05e27a3b0c1ea4f08a73a0b4ad83c07#commitcomment-20271012) The `datastore` property should be a string, not an array or whatever else. But for either the `'+PrimaryWLModel.identity+'` or `'+OtherWLModel.identity+'` model, it is not!'); + } + + + + // Now figure out if this association is using a junction (aka "many to many"), + // and if so, which model it is. + // > If it is not using a junction, we'll leave `JunctionWLModel` as undefined. + // ------ + var JunctionWLModel; + // To accomplish this, we'll grab the already-mapped relationship info (attached by wl-schema + // to models, as the `schema` property). If our directly-related model (as mapped by WL-schema + // has a `junctionTable` flag or a `throughTable` dictionary, then we can safely say this association + // is using a junction, and that this directly-related model is indeed that junction. + var junctionOrOtherModelIdentity = PrimaryWLModel.schema[attrName].referenceIdentity; + var JunctionOrOtherWLModel = getModel(junctionOrOtherModelIdentity, orm); + var arcaneProto = Object.getPrototypeOf(JunctionOrOtherWLModel); + if (_.isBoolean(arcaneProto.junctionTable) || _.isPlainObject(arcaneProto.throughTable)) { + JunctionWLModel = JunctionOrOtherWLModel; + }//>- + // ----- + + // If there is a junction, make sure to factor that in too. + // (It has to be using the same datastore as the other two for it to count.) + if (JunctionWLModel) { + isUsingSameDatastore = isUsingSameDatastore && (JunctionWLModel.datastore === PrimaryWLModel.datastore); + + // Sanity check + if (!_.isString(JunctionWLModel.datastore)) { + throw new Error('Consistency violation: Outdated semantics (see https://github.com/balderdashy/waterline/commit/ecd3e1c8f05e27a3b0c1ea4f08a73a0b4ad83c07#commitcomment-20271012) The `datastore` property should be a string, not an array or whatever else. But for the `'+JunctionWLModel.identity+'` model, it is not!'); + } + + }//>- + + // Now, if any of the models involved is using a different datastore, then bail. + if (!isUsingSameDatastore) { + return false; + }//-• + + + // --• + // IWMIH, we know that this association is using exactly ONE datastore. + // And we even know that datastore's name. + // + // (remember, we just checked to verify that they're exactly the same above-- so we could have grabbed + // this datastore name from ANY of the involved models) + var relevantDatastoreName = PrimaryWLModel.datastore; + + // Sanity check + if (!_.isString(PrimaryWLModel.datastore)) { + throw new Error('Consistency violation: Outdated semantics (see https://github.com/balderdashy/waterline/commit/ecd3e1c8f05e27a3b0c1ea4f08a73a0b4ad83c07#commitcomment-20271012) The `datastore` property should be a string, not an array or whatever else. But for the `'+PrimaryWLModel.identity+'` model, it is not!'); + } + + // Another sanity check + assert(_.isString(relevantDatastoreName)); + + + // Finally, now that we know which datastore we're dealing with, check to see if that datastore's + // configured adapter supports optimized populates. + var doesDatastoreSupportOptimizedPopulates = PrimaryWLModel._adapter.join; + + // If not, then we're done. + if (!doesDatastoreSupportOptimizedPopulates) { + return false; + }//-• + + // IWMIH, then we know that all involved models in this query share a datastore, and that the datastore's + // adapter supports optimized populates. So we return true! + return true; + +}; + + +// Quick test: +/*``` +require('./lib/waterline/utils/ontology/is-capable-of-optimized-populate')('pets', 'user', { collections: { user: { attributes: { id: { type: 'string', required: true, unique: true }, age: { type: 'number', required: false }, foo: { type: 'string', required: true }, pets: { collection: 'pet' } }, primaryKey: 'id', hasSchema: true}, pet: { attributes: { id: { type:'number', required: true, unique: true } }, primaryKey: 'id', hasSchema: true } } }); +```*/ diff --git a/lib/waterline/utils/ontology/is-exclusive.js b/lib/waterline/utils/ontology/is-exclusive.js new file mode 100644 index 000000000..51c784777 --- /dev/null +++ b/lib/waterline/utils/ontology/is-exclusive.js @@ -0,0 +1,92 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var getAttribute = require('./get-attribute'); + + +/** + * isExclusive() + * + * Determine whether this association is "exclusive" -- meaning that it is + * a two-way, plural ("collection") association, whose `via` points at a + * singular ("model") on the other side. + * + * > Note that "through" associations do not count. Although the "via" does + * > refer to a singular ("model") association in the intermediate junction + * > model, the underlying logical association is still non-exclusive. + * > i.e. the same child record can be added to the "through" association + * > of multiple different parent records. + * + * --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + * @param {String} attrName [the name of the association in question] + * @param {String} modelIdentity [the identity of the model this association belongs to] + * @param {Ref} orm [the Waterline ORM instance] + * --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + * @returns {Boolean} + */ + +module.exports = function isExclusive(attrName, modelIdentity, orm) { + + if (!_.isString(attrName)) { + throw new Error('Consistency violation: Must specify `attrName` as a string. But instead, got: '+util.inspect(attrName, {depth:5})+''); + } + if (!_.isString(modelIdentity)) { + throw new Error('Consistency violation: Must specify `modelIdentity` as a string. But instead, got: '+util.inspect(modelIdentity, {depth:5})+''); + } + if (_.isUndefined(orm)) { + throw new Error('Consistency violation: Must pass in `orm` (a reference to the Waterline ORM instance). But instead, got: '+util.inspect(orm, {depth:5})+''); + } + + + // ╦ ╔═╗╔═╗╦╔═ ╦ ╦╔═╗ ┌─┐┌─┐┌─┐┌─┐┌─┐┬┌─┐┌┬┐┬┌─┐┌┐┌ ┬ ┌┬┐┌─┐┌┬┐┌─┐┬ ┌─┐ + // ║ ║ ║║ ║╠╩╗ ║ ║╠═╝ ├─┤└─┐└─┐│ ││ │├─┤ │ ││ ││││ ┌┼─ ││││ │ ││├┤ │ └─┐ + // ╩═╝╚═╝╚═╝╩ ╩ ╚═╝╩ ┴ ┴└─┘└─┘└─┘└─┘┴┴ ┴ ┴ ┴└─┘┘└┘ └┘ ┴ ┴└─┘─┴┘└─┘┴─┘└─┘ + + // Look up the containing model for this association, and the attribute definition itself. + var attrDef = getAttribute(attrName, modelIdentity, orm); + + assert(attrDef.model || attrDef.collection, 'Attempting to check whether attribute `'+attrName+'` of model `'+modelIdentity+'` is an "exclusive" association, but it\'s not even an association in the first place!'); + + + + // ┌┐┌┌─┐┬ ┬ ╔═╗╦ ╦╔═╗╔═╗╦╔═ ╦╔╦╗ ╔═╗╦ ╦╔╦╗ + // ││││ ││││ ║ ╠═╣║╣ ║ ╠╩╗ ║ ║ ║ ║║ ║ ║ + // ┘└┘└─┘└┴┘┘ ╚═╝╩ ╩╚═╝╚═╝╩ ╩ ╩ ╩ ╚═╝╚═╝ ╩ + + // If this association is singular, then it is not exclusive. + if (!attrDef.collection) { + return false; + }//-• + + // If it has no `via`, then it is not two-way, and also not exclusive. + if (!attrDef.via) { + return false; + }//-• + + // If it has a "through" junction model defined, then it is not exclusive. + if (attrDef.through) { + return false; + }//-• + + // If its `via` points at a plural association, then it is not exclusive. + // > Note that, to do this, we look up the attribute on the OTHER model + // > that is pointed at by THIS association's `via`. + var viaAttrDef = getAttribute(attrDef.via, attrDef.collection, orm); + if (viaAttrDef.collection) { + return false; + }//-• + + // Otherwise, its `via` must be pointing at a singular association, so it's exclusive! + return true; + +}; + + +// Quick test: +/*``` +require('./lib/waterline/utils/ontology/is-exclusive')('pets', 'user', { collections: { user: { attributes: { id: { type: 'string', required: true, unique: true }, age: { type: 'number', required: false }, foo: { type: 'string', required: true }, pets: { collection: 'pet', via: 'owner' } }, primaryKey: 'id', hasSchema: true}, pet: { attributes: { id: { type:'number', required: true, unique: true }, owner: { model: 'user' } }, primaryKey: 'id', hasSchema: true } } }); +```*/ diff --git a/lib/waterline/utils/query/build-omen.js b/lib/waterline/utils/query/build-omen.js new file mode 100644 index 000000000..9ce3134e7 --- /dev/null +++ b/lib/waterline/utils/query/build-omen.js @@ -0,0 +1,43 @@ +/** + * Module dependencies + */ + +var flaverr = require('flaverr'); + + +/** + * buildOmen() + * + * Build an omen, an Error instance defined ahead of time in order to grab a stack trace. + * (used for providing a better experience when viewing the stack trace of errors + * that come from one or more asynchronous ticks down the line; e.g. uniqueness errors) + * + * > Note that the Error returned by this utility can only be used once. + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * @param {Function} caller + * The function to use for context. + * The stack trace of the omen will be snipped based on the instruction where + * this "caller" function was invoked. + * + * @returns {Error} + * The new omen (an Error instance.) + */ +module.exports = function buildOmen(caller){ + + var omen = flaverr({}, new Error('omen'), caller); + return omen; + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: do something fancier here, or where this is called, to keep track of the omen so that it + // can support both sorts of usages (Deferred and explicit callback.) + // + // This way, it could do an even better job of reporting exactly where the error came from in + // userland code as the very first entry in the stack trace. e.g. + // ``` + // var omen = flaverr({}, new Error('omen'), Deferred.prototype.exec); + // // ^^ but would need to pass through the original omen or something + // ``` + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + +}; diff --git a/lib/waterline/utils/query/forge-adapter-error.js b/lib/waterline/utils/query/forge-adapter-error.js new file mode 100644 index 000000000..3d4a77f16 --- /dev/null +++ b/lib/waterline/utils/query/forge-adapter-error.js @@ -0,0 +1,373 @@ +/** + * Module dependencies + */ + +var assert = require('assert'); +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var getModel = require('../ontology/get-model'); + + +/** + * forgeAdapterError() + * + * Given a raw error from the adapter, convert it into a normalized, higher-level Error instance + * with a better stack trace. + * + * > This includes potentially examining its `footprint` property. + * > For more info on the lower-level driver specification, from whence this error originates, see: + * > https://github.com/treelinehq/waterline-query-docs/blob/a0689b6a6536a3c196dff6a9528f2ef72d4f6b7d/docs/errors.md#notunique + * > + * > Note that after calling this utility, the provided `omen` must NEVER BE USED AGAIN! + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * @param {Ref} originalError [The original error from the adapter] + * @param {Ref} omen [Used purely for improving the quality of the stack trace. Should be an error instance w/ its stack trace already adjusted.] + * @param {String} adapterMethodName [The name of the adapter method] + * @param {String} modelIdentity [The identity of the originating model] + * @param {Ref} orm [The current ORM instance] + * + * @returns {Error} the new error + * @property {Ref} raw [The original error, just as it came] + * @property {String} modelIdentity [The identity of the originating model] + * @property {Function?} toJSON [Might be included, but only if this is a recognized error] + * @property {String?} code [Might be included, but only if this is a recognized error (e.g. "E_UNIQUE")] + * @property {Array?} attrNames [Might be included if this is an E_UNIQUE error] + * @of {String} + * + * > Note that if any internal error occurs, this utility still returns an Error + * > instance rather than throwing. Just note that the Error will not necessarily + * > have any of the standard properties above. (This is purely to avoid the burden + * > of an extra try/catch in code that calls this utility.) + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ +module.exports = function forgeAdapterError(err, omen, adapterMethodName, modelIdentity, orm){ + + try { + // Sanity checks + assert(err, 'Should never call `forgeAdapterError` with a falsy first argument!'); + assert(_.isError(omen), 'An already-set-up, generic uniqueness error should be provided (in the second argument) to this utility. This is for use as an omen, to improve the quality of the stack trace.'); + assert(_.isString(adapterMethodName) && adapterMethodName, 'Unexpected third argument to`forgeAdapterError`! Expecting non-empty string.'); + + // Look up model. + var WLModel = getModel(modelIdentity, orm); + + // If this is an Error-like object (e.g. from bluebird) but not technically + // valid for _.isError(), then parse it to obtain the underlying Error. + err = flaverr.parseError(err) || err; + + // Call a self-invoking function which determines the customizations that we'll need + // to fold into this particular adapter error below. + // + // > Note that it is NOT THE RESPONSIBILITY OF THIS SELF-INVOKING FUNCTION to new up an + // > Error instance, and also that OTHER PROPERTIES ARE FOLDED IN AFTERWARDS! The only + // > reason this code is extrapolated is to reduce the likelihood of accidentally using + // > the wrong stack trace as adapter errors are added on in the future. + var customizations = (function(){ + + // ███╗ ██╗ ██████╗ ████████╗ █████╗ ███╗ ██╗ ███████╗██████╗ ██████╗ ██████╗ ██████╗ + // ████╗ ██║██╔═══██╗╚══██╔══╝ ██╔══██╗████╗ ██║ ██╔════╝██╔══██╗██╔══██╗██╔═══██╗██╔══██╗ + // ██╔██╗ ██║██║ ██║ ██║ ███████║██╔██╗ ██║ █████╗ ██████╔╝██████╔╝██║ ██║██████╔╝ + // ██║╚██╗██║██║ ██║ ██║ ██╔══██║██║╚██╗██║ ██╔══╝ ██╔══██╗██╔══██╗██║ ██║██╔══██╗ + // ██║ ╚████║╚██████╔╝ ██║ ██║ ██║██║ ╚████║ ███████╗██║ ██║██║ ██║╚██████╔╝██║ ██║ + // ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝╚═╝ ╚═══╝ ╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ + // + // If the incoming `err` is not an error instance, then handle it as a special case. + // (this should never happen) + if (!_.isError(err)) { + return { + + message: 'Malformed error from adapter: Should always be an Error instance, '+ + 'but instead, got:\n'+ + '```\n'+ + util.inspect(err, {depth:5})+'\n'+ + '```' + + }; + }//-• + + + // IWMIH, it's a valid Error instance. + + // ███╗ ███╗██╗███████╗███████╗██╗███╗ ██╗ ██████╗ + // ████╗ ████║██║██╔════╝██╔════╝██║████╗ ██║██╔════╝ + // ██╔████╔██║██║███████╗███████╗██║██╔██╗ ██║██║ ███╗ + // ██║╚██╔╝██║██║╚════██║╚════██║██║██║╚██╗██║██║ ██║ + // ██║ ╚═╝ ██║██║███████║███████║██║██║ ╚████║╚██████╔╝ + // ╚═╝ ╚═╝╚═╝╚══════╝╚══════╝╚═╝╚═╝ ╚═══╝ ╚═════╝ + // + // ███████╗ ██████╗ ██████╗ ████████╗██████╗ ██████╗ ██╗███╗ ██╗████████╗ + // ██╔════╝██╔═══██╗██╔═══██╗╚══██╔══╝██╔══██╗██╔══██╗██║████╗ ██║╚══██╔══╝ + // █████╗ ██║ ██║██║ ██║ ██║ ██████╔╝██████╔╝██║██╔██╗ ██║ ██║ + // ██╔══╝ ██║ ██║██║ ██║ ██║ ██╔═══╝ ██╔══██╗██║██║╚██╗██║ ██║ + // ██║ ╚██████╔╝╚██████╔╝ ██║ ██║ ██║ ██║██║██║ ╚████║ ██║ + // ╚═╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚═╝ ╚═══╝ ╚═╝ + // + // If it doesn't have a footprint, then this is some miscellaneous error from the adapter. + // Still, wrap it up before sending it back. + if (!err.footprint) { + return { + + message: 'Unexpected error from database adapter: '+err.message + + }; + }//-• + + + // ██╗███╗ ██╗██╗ ██╗ █████╗ ██╗ ██╗██████╗ + // ██║████╗ ██║██║ ██║██╔══██╗██║ ██║██╔══██╗ + // ██║██╔██╗ ██║██║ ██║███████║██║ ██║██║ ██║ + // ██║██║╚██╗██║╚██╗ ██╔╝██╔══██║██║ ██║██║ ██║ + // ██║██║ ╚████║ ╚████╔╝ ██║ ██║███████╗██║██████╔╝ + // ╚═╝╚═╝ ╚═══╝ ╚═══╝ ╚═╝ ╚═╝╚══════╝╚═╝╚═════╝ + // + // ███████╗ ██████╗ ██████╗ ████████╗██████╗ ██████╗ ██╗███╗ ██╗████████╗ + // ██╔════╝██╔═══██╗██╔═══██╗╚══██╔══╝██╔══██╗██╔══██╗██║████╗ ██║╚══██╔══╝ + // █████╗ ██║ ██║██║ ██║ ██║ ██████╔╝██████╔╝██║██╔██╗ ██║ ██║ + // ██╔══╝ ██║ ██║██║ ██║ ██║ ██╔═══╝ ██╔══██╗██║██║╚██╗██║ ██║ + // ██║ ╚██████╔╝╚██████╔╝ ██║ ██║ ██║ ██║██║██║ ╚████║ ██║ + // ╚═╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚═╝ ╚═══╝ ╚═╝ + // + // If it has an invalid footprint (not a dictionary, or missing the fundamentals), + // then handle it as a special case. This should never happen. + if (!_.isObject(err.footprint) || !_.isString(err.footprint.identity) || err.footprint.identity === '') { + return { + + message: 'Malformed error from adapter: If Error has a `footprint`, it should be a dictionary '+ + 'with a valid `identity`. But instead, the error\'s `footprint` is:\n'+ + '```\n'+ + util.inspect(err.footprint, {depth:5})+'\n'+ + '```' + + }; + }//-• + + + + // IWMIH, it's an Error instance with a superficially-valid footprint. + switch (err.footprint.identity) { + + // ███╗ ██╗ ██████╗ ████████╗ ██╗ ██╗███╗ ██╗██╗ ██████╗ ██╗ ██╗███████╗ + // ████╗ ██║██╔═══██╗╚══██╔══╝ ██║ ██║████╗ ██║██║██╔═══██╗██║ ██║██╔════╝ + // ██╔██╗ ██║██║ ██║ ██║ ██║ ██║██╔██╗ ██║██║██║ ██║██║ ██║█████╗ + // ██║╚██╗██║██║ ██║ ██║ ██║ ██║██║╚██╗██║██║██║▄▄ ██║██║ ██║██╔══╝ + // ██║ ╚████║╚██████╔╝ ██║ ╚██████╔╝██║ ╚████║██║╚██████╔╝╚██████╔╝███████╗ + // ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚══▀▀═╝ ╚═════╝ ╚══════╝ + // + // If this appears to be a uniqueness constraint violation error, then... + case 'notUnique': return (function(){ + + // ┌─┐┌─┐┌─┐┌┬┐┌─┐┬─┐┬┌┐┌┌┬┐ ┬┌─┐ ┌┬┐┬┌─┐┌─┐┬┌┐┌┌─┐ ╦╔═╔═╗╦ ╦╔═╗ + // ├┤ │ ││ │ │ ├─┘├┬┘││││ │ │└─┐ ││││└─┐└─┐│││││ ┬ ╠╩╗║╣ ╚╦╝╚═╗ + // └ └─┘└─┘ ┴ ┴ ┴└─┴┘└┘ ┴ ┴└─┘ ┴ ┴┴└─┘└─┘┴┘└┘└─┘ ╩ ╩╚═╝ ╩ ╚═╝ + if (!_.isArray(err.footprint.keys)) { + return { + + message: 'Malformed error from adapter: Since `footprint.identity` is "notUnique", '+ + 'this error\'s footprint should have an array of `keys`! But instead, the error\'s '+ + '`footprint.keys` is:\n'+ + '```\n'+ + util.inspect(err.footprint.keys, {depth:5})+'\n'+ + '```' + + }; + }//-• + + // But otherwise, it looks good, so we'll go on to forge it into a uniqueness error. + + + // ┌─┐┌─┐┌─┐┌┬┐┌─┐┬─┐┬┌┐┌┌┬┐ ┬┌─┐ ┌─┐┬─┐┌─┐┌─┐┌─┐┬─┐┬ ┬ ┬ ┌─┐┌─┐┬─┐┌┬┐┌─┐┌┬┐┌┬┐┌─┐┌┬┐ + // ├┤ │ ││ │ │ ├─┘├┬┘││││ │ │└─┐ ├─┘├┬┘│ │├─┘├┤ ├┬┘│ └┬┘ ├┤ │ │├┬┘│││├─┤ │ │ ├┤ ││ + // └ └─┘└─┘ ┴ ┴ ┴└─┴┘└┘ ┴ ┴└─┘ ┴ ┴└─└─┘┴ └─┘┴└─┴─┘┴ └ └─┘┴└─┴ ┴┴ ┴ ┴ ┴ └─┘─┴┘ + // Determine the standard customizations for this kind of error, mapping the `footprint.keys` + // (~=column names) back to attribute names, and attaching a `toJSON()` function. + + // Format the `attrNames` property of our error by parsing `footprint.keys`. + // Along the way, also track any unmatched keys. + var namesOfOffendingAttrs = []; + var unmatchedKeys = []; + _.each(err.footprint.keys, function(key){ + + // Find matching attr name. + var matchingAttrName; + _.any(WLModel.schema, function(wlsAttr, attrName) { + + var attrDef = WLModel.attributes[attrName]; + assert(attrDef, 'Attribute (`'+attrName+'`) is corrupted! This attribute exists as a WLS attr in `schema`, so it should always exist in `attributes` as well-- but it does not! If you are seeing this message, it probably means your model (`'+modelIdentity+'`) has become corrupted.'); + + // If this is a plural association, then skip it. + // (it is impossible for a key from this error to match up with one of these-- they don't even have column names) + if (attrDef.collection) { return; } + + // Otherwise, we can expect a valid column name to exist. + assert(wlsAttr.columnName, 'The normalized `schema` of model `'+modelIdentity+'` has an attribute (`'+attrName+'`) with no `columnName`. But at this point, every WLS-normalized attribute should have a column name! (If you are seeing this error, the model definition may have been corrupted in-memory-- or there might be a bug in WL schema.)'); + + if (wlsAttr.columnName === key) { + matchingAttrName = attrName; + return true; + } + });// + + // Push it on, if it could be found. + if (matchingAttrName) { + namesOfOffendingAttrs.push(matchingAttrName); + } + // Otherwise track this as an unmatched key. + else { + unmatchedKeys.push(key); + } + + });// + + + // If there were any unmatched keys, log a warning and silently ignore them. + if (unmatchedKeys.length > 0) { + console.warn('\n'+ + 'Warning: Adapter sent back a uniqueness error, but that error references key(s) ('+unmatchedKeys+') which cannot\n'+ + 'be matched up with the column names of any attributes in this model (`'+modelIdentity+'`). This probably\n'+ + 'means there is a bug in this adapter.\n'+ + '(Note for adapter implementors: If your adapter doesn\'t support granular reporting of the keys violated\n'+ + 'in uniqueness errors, then just use an empty array for the `keys` property of this error.)\n'+ + '(Proceeding anyway as if these keys weren\'t included...)\n' + ); + }//>- + + + // Build the customizations for our uniqueness error. + return { + message: 'Would violate uniqueness constraint-- a record already exists with conflicting value(s).', + code: 'E_UNIQUE', + attrNames: namesOfOffendingAttrs, + toJSON: function (){ + return { + code: this.code, + message: this.message, + modelIdentity: this.modelIdentity, + attrNames: this.attrNames, + }; + } + }; + + })(); + + // ███╗ ██╗ ██████╗ ███████╗██╗ ██╗ ██████╗██╗ ██╗ + // ████╗ ██║██╔═══██╗ ██╔════╝██║ ██║██╔════╝██║ ██║ + // ██╔██╗ ██║██║ ██║ ███████╗██║ ██║██║ ███████║ + // ██║╚██╗██║██║ ██║ ╚════██║██║ ██║██║ ██╔══██║ + // ██║ ╚████║╚██████╔╝ ███████║╚██████╔╝╚██████╗██║ ██║ + // ╚═╝ ╚═══╝ ╚═════╝ ╚══════╝ ╚═════╝ ╚═════╝╚═╝ ╚═╝ + // + // ██████╗ ██╗ ██╗██╗ ██╗███████╗██╗ ██████╗ █████╗ ██╗ + // ██╔══██╗██║ ██║╚██╗ ██╔╝██╔════╝██║██╔════╝██╔══██╗██║ + // ██████╔╝███████║ ╚████╔╝ ███████╗██║██║ ███████║██║ + // ██╔═══╝ ██╔══██║ ╚██╔╝ ╚════██║██║██║ ██╔══██║██║ + // ██║ ██║ ██║ ██║ ███████║██║╚██████╗██║ ██║███████╗ + // ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝╚═╝ ╚═════╝╚═╝ ╚═╝╚══════╝ + // + // ███╗ ███╗ ██████╗ ██████╗ ███████╗██╗ + // ████╗ ████║██╔═══██╗██╔══██╗██╔════╝██║ + // ██╔████╔██║██║ ██║██║ ██║█████╗ ██║ + // ██║╚██╔╝██║██║ ██║██║ ██║██╔══╝ ██║ + // ██║ ╚═╝ ██║╚██████╔╝██████╔╝███████╗███████╗ + // ╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ + // + case 'noSuchPhysicalModel': return (function(){ + return { + message: 'Database says there is no such table/collection/etc. '+err.message, + code: 'E_NO_SUCH_PHYSICAL_MODEL', + toJSON: function (){ + return { + code: this.code, + message: this.message, + modelIdentity: this.modelIdentity, + }; + } + }; + })(); + + + // ██████╗ █████╗ ████████╗ ██████╗██╗ ██╗ █████╗ ██╗ ██╗ + // ██╔════╝██╔══██╗╚══██╔══╝██╔════╝██║ ██║██╔══██╗██║ ██║ + // ██║ ███████║ ██║ ██║ ███████║███████║██║ ██║ + // ██║ ██╔══██║ ██║ ██║ ██╔══██║██╔══██║██║ ██║ + // ╚██████╗██║ ██║ ██║ ╚██████╗██║ ██║██║ ██║███████╗███████╗ + // ╚═════╝╚═╝ ╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝╚══════╝ + // + case 'catchall': return (function(){ + return { + message: 'Unexpected error from database adapter: '+err.message + }; + })(); + + + // ██╗ ██╗███╗ ██╗██████╗ ███████╗ ██████╗ ██████╗ ██████╗ ███╗ ██╗██╗███████╗███████╗██████╗ + // ██║ ██║████╗ ██║██╔══██╗██╔════╝██╔════╝██╔═══██╗██╔════╝ ████╗ ██║██║╚══███╔╝██╔════╝██╔══██╗ + // ██║ ██║██╔██╗ ██║██████╔╝█████╗ ██║ ██║ ██║██║ ███╗██╔██╗ ██║██║ ███╔╝ █████╗ ██║ ██║ + // ██║ ██║██║╚██╗██║██╔══██╗██╔══╝ ██║ ██║ ██║██║ ██║██║╚██╗██║██║ ███╔╝ ██╔══╝ ██║ ██║ + // ╚██████╔╝██║ ╚████║██║ ██║███████╗╚██████╗╚██████╔╝╚██████╔╝██║ ╚████║██║███████╗███████╗██████╔╝ + // ╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═══╝╚═╝╚══════╝╚══════╝╚═════╝ + // + // ███████╗ ██████╗ ██████╗ ████████╗██████╗ ██████╗ ██╗███╗ ██╗████████╗ + // ██╔════╝██╔═══██╗██╔═══██╗╚══██╔══╝██╔══██╗██╔══██╗██║████╗ ██║╚══██╔══╝ + // █████╗ ██║ ██║██║ ██║ ██║ ██████╔╝██████╔╝██║██╔██╗ ██║ ██║ + // ██╔══╝ ██║ ██║██║ ██║ ██║ ██╔═══╝ ██╔══██╗██║██║╚██╗██║ ██║ + // ██║ ╚██████╔╝╚██████╔╝ ██║ ██║ ██║ ██║██║██║ ╚████║ ██║ + // ╚═╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚═╝ ╚═══╝ ╚═╝ + // + // Handle unrecognized footprint identity as a special case. (This should never happen.) + default: return { + + message: + 'Malformed error from adapter: If Error has a `footprint`, it should be a dictionary with a recognized `identity`. '+ + 'But this error\'s footprint identity (`'+err.footprint.identity+'`) is not recognized.' + + }; + + }// + + })();// + + assert(_.isObject(customizations) && !_.isError(customizations), 'At this point, `customizations` should be a dictionary, but it should not be an Error instance!'); + + + // ██████╗ ██╗ ██╗██╗██╗ ██████╗ ██╗ + // ██╔══██╗██║ ██║██║██║ ██╔══██╗ ██║ + // ██████╔╝██║ ██║██║██║ ██║ ██║ ████████╗ + // ██╔══██╗██║ ██║██║██║ ██║ ██║ ██╔═██╔═╝ + // ██████╔╝╚██████╔╝██║███████╗██████╔╝ ██████║ + // ╚═════╝ ╚═════╝ ╚═╝╚══════╝╚═════╝ ╚═════╝ + // + // ██████╗ ███████╗████████╗██╗ ██╗██████╗ ███╗ ██╗ ███████╗██╗███╗ ██╗ █████╗ ██╗ + // ██╔══██╗██╔════╝╚══██╔══╝██║ ██║██╔══██╗████╗ ██║ ██╔════╝██║████╗ ██║██╔══██╗██║ + // ██████╔╝█████╗ ██║ ██║ ██║██████╔╝██╔██╗ ██║ █████╗ ██║██╔██╗ ██║███████║██║ + // ██╔══██╗██╔══╝ ██║ ██║ ██║██╔══██╗██║╚██╗██║ ██╔══╝ ██║██║╚██╗██║██╔══██║██║ + // ██║ ██║███████╗ ██║ ╚██████╔╝██║ ██║██║ ╚████║ ██║ ██║██║ ╚████║██║ ██║███████╗ + // ╚═╝ ╚═╝╚══════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝╚══════╝ + // + // ███████╗██████╗ ██████╗ ██████╗ ██████╗ + // ██╔════╝██╔══██╗██╔══██╗██╔═══██╗██╔══██╗ + // █████╗ ██████╔╝██████╔╝██║ ██║██████╔╝ + // ██╔══╝ ██╔══██╗██╔══██╗██║ ██║██╔══██╗ + // ███████╗██║ ██║██║ ██║╚██████╔╝██║ ██║ + // ╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ + // + // Tack on the baseline customizations that are used in every adapter error. + _.extend(customizations, { + name: 'AdapterError', + adapterMethodName: adapterMethodName, + modelIdentity: modelIdentity, + raw: err + }); + + // Then build and return the final error. + // + // > Remember: This cannibalizes the `omen` that was passed in! + return flaverr(customizations, omen); + + } catch (e) { + return new Error('Consistency violation: Waterline encountered an unexpected internal error: '+e.stack); + } + +}; diff --git a/lib/waterline/utils/query/forge-stage-three-query.js b/lib/waterline/utils/query/forge-stage-three-query.js new file mode 100644 index 000000000..5e9c99d3a --- /dev/null +++ b/lib/waterline/utils/query/forge-stage-three-query.js @@ -0,0 +1,711 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); + +// ███████╗ ██████╗ ██████╗ ██████╗ ███████╗ ███████╗████████╗ █████╗ ██████╗ ███████╗ +// ██╔════╝██╔═══██╗██╔══██╗██╔════╝ ██╔════╝ ██╔════╝╚══██╔══╝██╔══██╗██╔════╝ ██╔════╝ +// █████╗ ██║ ██║██████╔╝██║ ███╗█████╗ ███████╗ ██║ ███████║██║ ███╗█████╗ +// ██╔══╝ ██║ ██║██╔══██╗██║ ██║██╔══╝ ╚════██║ ██║ ██╔══██║██║ ██║██╔══╝ +// ██║ ╚██████╔╝██║ ██║╚██████╔╝███████╗ ███████║ ██║ ██║ ██║╚██████╔╝███████╗ +// ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝ ╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚══════╝ +// +// ████████╗██╗ ██╗██████╗ ███████╗███████╗ ██████╗ ██╗ ██╗███████╗██████╗ ██╗ ██╗ +// ╚══██╔══╝██║ ██║██╔══██╗██╔════╝██╔════╝ ██╔═══██╗██║ ██║██╔════╝██╔══██╗╚██╗ ██╔╝ +// ██║ ███████║██████╔╝█████╗ █████╗ ██║ ██║██║ ██║█████╗ ██████╔╝ ╚████╔╝ +// ██║ ██╔══██║██╔══██╗██╔══╝ ██╔══╝ ██║▄▄ ██║██║ ██║██╔══╝ ██╔══██╗ ╚██╔╝ +// ██║ ██║ ██║██║ ██║███████╗███████╗ ╚██████╔╝╚██████╔╝███████╗██║ ██║ ██║ +// ╚═╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝╚══════╝ ╚══▀▀═╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝ ╚═╝ +// + + +/** + * forgeStageThreeQuery() + * + * @param {Dictionary} stageTwoQuery + * @param {String} identity + * @param {Ref} transformer + * @param {Dictionary} originalModels + * + * @return {Dictionary} [the stage 3 query] + */ +module.exports = function forgeStageThreeQuery(options) { + // ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ + // ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ │ │├─┘ │ ││ ││││└─┐ + // ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ └─┘┴ ┴ ┴└─┘┘└┘└─┘ + if (!_.has(options, 'stageTwoQuery') || !_.isPlainObject(options.stageTwoQuery)) { + throw new Error('Invalid options passed to `.buildStageThreeQuery()`. Missing or invalid `stageTwoQuery` option.'); + } + + if (!_.has(options, 'identity') || !_.isString(options.identity)) { + throw new Error('Invalid options passed to `.buildStageThreeQuery()`. Missing or invalid `identity` option.'); + } + + if (!_.has(options, 'transformer') || !_.isObject(options.transformer)) { + throw new Error('Invalid options passed to `.buildStageThreeQuery()`. Missing or invalid `transformer` option.'); + } + + if (!_.has(options, 'originalModels') || !_.isPlainObject(options.originalModels)) { + throw new Error('Invalid options passed to `.buildStageThreeQuery()`. Missing or invalid `originalModels` option.'); + } + + // Store the options to prevent typing so much + var s3Q = options.stageTwoQuery; + var identity = options.identity; + var transformer = options.transformer; + var originalModels = options.originalModels; + + + // ╔═╗╦╔╗╔╔╦╗ ┌┬┐┌─┐┌┬┐┌─┐┬ + // ╠╣ ║║║║ ║║ ││││ │ ││├┤ │ + // ╚ ╩╝╚╝═╩╝ ┴ ┴└─┘─┴┘└─┘┴─┘ + // Grab the current model definition. It will be used in all sorts of ways. + var model = originalModels[identity]; + if (!model) { + throw new Error('A model with the identity ' + identity + ' could not be found in the schema. Perhaps the wrong schema was used?'); + } + + // ╔═╗╦╔╗╔╔╦╗ ┌─┐┬─┐┬┌┬┐┌─┐┬─┐┬ ┬ ┬┌─┌─┐┬ ┬ + // ╠╣ ║║║║ ║║ ├─┘├┬┘││││├─┤├┬┘└┬┘ ├┴┐├┤ └┬┘ + // ╚ ╩╝╚╝═╩╝ ┴ ┴└─┴┴ ┴┴ ┴┴└─ ┴ ┴ ┴└─┘ ┴ + // Get the current model's primary key attribute + var modelPrimaryKey = model.primaryKey; + + + // ╔╦╗╦═╗╔═╗╔╗╔╔═╗╔═╗╔═╗╦═╗╔╦╗ ┬ ┬┌─┐┬┌┐┌┌─┐ + // ║ ╠╦╝╠═╣║║║╚═╗╠╣ ║ ║╠╦╝║║║ │ │└─┐│││││ ┬ + // ╩ ╩╚═╩ ╩╝╚╝╚═╝╚ ╚═╝╩╚═╩ ╩ └─┘└─┘┴┘└┘└─┘ + s3Q.using = model.tableName; + + + // ██████╗██████╗ ███████╗ █████╗ ████████╗███████╗ + // ██╔════╝██╔══██╗██╔════╝██╔══██╗╚══██╔══╝██╔════╝ + // ██║ ██████╔╝█████╗ ███████║ ██║ █████╗ + // ██║ ██╔══██╗██╔══╝ ██╔══██║ ██║ ██╔══╝ + // ╚██████╗██║ ██║███████╗██║ ██║ ██║ ███████╗ + // ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝ + // + // For `create` queries, the values need to be run through the transformer. + if (s3Q.method === 'create') { + // Validate that there is a `newRecord` key on the object + if (!_.has(s3Q, 'newRecord') || !_.isPlainObject(s3Q.newRecord)) { + throw flaverr('E_INVALID_RECORD', new Error( + 'Failed process the values set for the record.' + )); + } + + try { + transformer.serializeValues(s3Q.newRecord); + } catch (e) { + throw flaverr('E_INVALID_RECORD', new Error( + 'Failed process the values set for the record.\n'+ + 'Details:\n'+ + e.message + )); + } + + return s3Q; + } + + + // ██████╗██████╗ ███████╗ █████╗ ████████╗███████╗ ███████╗ █████╗ ██████╗██╗ ██╗ + // ██╔════╝██╔══██╗██╔════╝██╔══██╗╚══██╔══╝██╔════╝ ██╔════╝██╔══██╗██╔════╝██║ ██║ + // ██║ ██████╔╝█████╗ ███████║ ██║ █████╗ █████╗ ███████║██║ ███████║ + // ██║ ██╔══██╗██╔══╝ ██╔══██║ ██║ ██╔══╝ ██╔══╝ ██╔══██║██║ ██╔══██║ + // ╚██████╗██║ ██║███████╗██║ ██║ ██║ ███████╗ ███████╗██║ ██║╚██████╗██║ ██║ + // ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝ ╚══════╝╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝ + // + // For `createEach` queries, the values of each record need to be run through the transformer. + if (s3Q.method === 'createEach') { + // Validate that there is a `newRecord` key on the object + if (!_.has(s3Q, 'newRecords') || !_.isArray(s3Q.newRecords)) { + throw flaverr('E_INVALID_RECORDS', new Error( + 'Failed process the values set for the record.' + )); + } + + // Transform each new record. + _.each(s3Q.newRecords, function(record) { + try { + transformer.serializeValues(record); + } catch (e) { + throw flaverr('E_INVALID_RECORD', new Error( + 'Failed process the values set for the record.\n'+ + 'Details:\n'+ + e.message + )); + } + }); + + return s3Q; + } + + + // ██╗ ██╗██████╗ ██████╗ █████╗ ████████╗███████╗ + // ██║ ██║██╔══██╗██╔══██╗██╔══██╗╚══██╔══╝██╔════╝ + // ██║ ██║██████╔╝██║ ██║███████║ ██║ █████╗ + // ██║ ██║██╔═══╝ ██║ ██║██╔══██║ ██║ ██╔══╝ + // ╚██████╔╝██║ ██████╔╝██║ ██║ ██║ ███████╗ + // ╚═════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝ + // + // For `update` queries, both the values and the criteria need to be run + // through the transformer. + if (s3Q.method === 'update') { + // Validate that there is a `valuesToSet` key on the object + if (!_.has(s3Q, 'valuesToSet') || !_.isPlainObject(s3Q.valuesToSet)) { + throw flaverr('E_INVALID_RECORD', new Error( + 'Failed process the values set for the record.' + )); + } + + // Validate that there is a `criteria` key on the object + if (!_.has(s3Q, 'criteria') || !_.isPlainObject(s3Q.criteria)) { + throw flaverr('E_INVALID_RECORD', new Error( + 'Failed process the criteria for the record.' + )); + } + + // Transform the values to set to use column names instead of attribute names. + try { + transformer.serializeValues(s3Q.valuesToSet); + } catch (e) { + throw flaverr('E_INVALID_RECORD', new Error( + 'Failed process the values set for the record.\n'+ + 'Details:\n'+ + e.message + )); + } + + // Transform the criteria into column names + try { + s3Q.criteria.where = transformer.serializeCriteria(s3Q.criteria.where); + } catch (e) { + throw flaverr('E_INVALID_RECORD', new Error( + 'Failed process the criteria for the record.\n'+ + 'Details:\n'+ + e.message + )); + } + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // TODO: Probably rip this next bit out, since `sort` isn't supported + // for update & destroy queries anyway (that's already been validated + // in FS2Q at this point.) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // Transform sort clauses into column names + if (!_.isUndefined(s3Q.criteria.sort) && s3Q.criteria.sort.length) { + s3Q.criteria.sort = _.map(s3Q.criteria.sort, function(sortClause) { + var comparatorTarget = _.first(_.keys(sortClause)); + var attrName = _.first(comparatorTarget.split(/\./)); + var sortDirection = sortClause[comparatorTarget]; + + var sort = {}; + var columnName = model.schema[attrName].columnName; + sort[[columnName].concat(comparatorTarget.split(/\./).slice(1)).join('.')] = sortDirection; + return sort; + }); + } + + // Remove any invalid properties + delete s3Q.criteria.omit; + delete s3Q.criteria.select; + + return s3Q; + } + + + // ██████╗ ███████╗███████╗████████╗██████╗ ██████╗ ██╗ ██╗ + // ██╔══██╗██╔════╝██╔════╝╚══██╔══╝██╔══██╗██╔═══██╗╚██╗ ██╔╝ + // ██║ ██║█████╗ ███████╗ ██║ ██████╔╝██║ ██║ ╚████╔╝ + // ██║ ██║██╔══╝ ╚════██║ ██║ ██╔══██╗██║ ██║ ╚██╔╝ + // ██████╔╝███████╗███████║ ██║ ██║ ██║╚██████╔╝ ██║ + // ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ + // + // For `destroy` queries, the criteria needs to be run through the transformer. + if (s3Q.method === 'destroy') { + // Validate that there is a `criteria` key on the object + if (!_.has(s3Q, 'criteria') || !_.isPlainObject(s3Q.criteria)) { + throw flaverr('E_INVALID_RECORD', new Error( + 'Failed process the criteria for the record.' + )); + } + + // Transform the criteria into column names + try { + s3Q.criteria.where = transformer.serializeCriteria(s3Q.criteria.where); + } catch (e) { + throw flaverr('E_INVALID_RECORD', new Error( + 'Failed process the criteria for the record.\n'+ + 'Details:\n'+ + e.message + )); + } + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // TODO: Probably rip this next bit out, since `sort` isn't supported + // for update & destroy queries anyway (that's already been validated + // in FS2Q at this point.) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // Transform sort clauses into column names + if (!_.isUndefined(s3Q.criteria.sort) && s3Q.criteria.sort.length) { + s3Q.criteria.sort = _.map(s3Q.criteria.sort, function(sortClause) { + var comparatorTarget = _.first(_.keys(sortClause)); + var attrName = _.first(comparatorTarget.split(/\./)); + var sortDirection = sortClause[comparatorTarget]; + + var sort = {}; + var columnName = model.schema[attrName].columnName; + sort[[columnName].concat(comparatorTarget.split(/\./).slice(1)).join('.')] = sortDirection; + return sort; + }); + } + + // Remove any invalid properties + delete s3Q.criteria.omit; + delete s3Q.criteria.select; + + return s3Q; + } + + + // ███████╗██╗███╗ ██╗██████╗ + // ██╔════╝██║████╗ ██║██╔══██╗ + // █████╗ ██║██╔██╗ ██║██║ ██║ + // ██╔══╝ ██║██║╚██╗██║██║ ██║ + // ██║ ██║██║ ╚████║██████╔╝ + // ╚═╝ ╚═╝╚═╝ ╚═══╝╚═════╝ + // + // Build join instructions and transform criteria to column names. + if (s3Q.method === 'find' || s3Q.method === 'findOne') { + s3Q.method = 'find'; + + // ╔╗ ╦ ╦╦╦ ╔╦╗ ┬┌─┐┬┌┐┌ ┬┌┐┌┌─┐┌┬┐┬─┐┬ ┬┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ + // ╠╩╗║ ║║║ ║║ ││ │││││ ││││└─┐ │ ├┬┘│ ││ │ ││ ││││└─┐ + // ╚═╝╚═╝╩╩═╝═╩╝ └┘└─┘┴┘└┘ ┴┘└┘└─┘ ┴ ┴└─└─┘└─┘ ┴ ┴└─┘┘└┘└─┘ + // Build the JOIN logic for the population + // (And also: identify attribute names of singular associations for use below when expanding `select` clause criteria) + var joins = []; + var singularAssocAttrNames = []; + _.each(s3Q.populates, function(populateCriteria, populateAttribute) { + // If the populationCriteria is a boolean, make sure it's not a falsy value. + if (!populateCriteria) { + return; + } + + if (_.isPlainObject(populateCriteria) && !_.keys(populateCriteria).length) { + return; + } + + // If the populate criteria is a truthy boolean, expand it out to {} + if (_.isBoolean(populateCriteria)) { + populateCriteria = {}; + } + + try { + // Find the normalized schema value for the populated attribute + var attrDefToPopulate = model.attributes[populateAttribute]; + var schemaAttribute = model.schema[populateAttribute]; + + if (!attrDefToPopulate) { + throw new Error('In ' + util.format('`.populate("%s")`', populateAttribute) + ', attempting to populate an attribute that doesn\'t exist'); + } + + // Grab the key being populated from the original model definition to check + // if it is a has many or belongs to. If it's a belongs_to the adapter needs + // to know that it should replace the foreign key with the associated value. + var parentAttr = originalModels[identity].schema[populateAttribute]; + + // If this is a singular association, track it for use below + // (when processing projections in the top-level criteria) + if (parentAttr.model) { + singularAssocAttrNames.push(populateAttribute); + } + + if (parentAttr.collection && schemaAttribute.columnName) { + console.warn('Ignoring `columnName` setting for collection `' + attrDefToPopulate + '` on attribute `' + populateAttribute + '`.'); + delete schemaAttribute.columnName; + } + + // Build the initial join object that will link this collection to either another collection + // or to a junction table. + var join = { + parentCollectionIdentity: identity, + parent: s3Q.using, + parentAlias: s3Q.using + '__' + populateAttribute, + // For singular associations, the populated attribute will have a schema (since it represents + // a real column). For plural associations, we'll use the primary key column of the parent table. + parentKey: schemaAttribute.columnName || model.schema[modelPrimaryKey].columnName, + childCollectionIdentity: parentAttr.referenceIdentity, + child: parentAttr.references, + childAlias: parentAttr.references + '__' + populateAttribute, + childKey: parentAttr.on, + alias: populateAttribute, + removeParentKey: !!parentAttr.foreignKey, + model: !!_.has(parentAttr, 'model'), + collection: !!_.has(parentAttr, 'collection'), + criteria: _.clone(populateCriteria) + }; + + // Build select object to use in the integrator + var select = []; + var customSelect = populateCriteria.select && _.isArray(populateCriteria.select); + + // Expand out the `*` criteria + if (customSelect && populateCriteria.select.length === 1 && _.first(populateCriteria.select) === '*') { + customSelect = false; + } + + _.each(originalModels[parentAttr.referenceIdentity].schema, function(val, key) { + // Ignore virtual attributes + if(_.has(val, 'collection')) { + return; + } + + // Check if the user has defined a custom select + if(customSelect && !_.includes(populateCriteria.select, key)) { + return; + } + + // Add the key to the select + select.push(key); + }); + + // Ensure the primary key and foreign key on the child are always selected. + // otherwise things like the integrator won't work correctly + var childPk = originalModels[parentAttr.referenceIdentity].primaryKey; + select.push(childPk); + + // Add the foreign key for collections so records can be turned into nested + // objects. + if (join.collection) { + select.push(parentAttr.on); + } + + // Make sure the join's select is unique + join.criteria.select = _.uniq(select); + + // Find the schema of the model the attribute references + var referencedSchema = originalModels[parentAttr.referenceIdentity]; + var reference = null; + + // If linking to a junction table, the attributes shouldn't be included in the return value + if (referencedSchema.junctionTable) { + join.select = false; + reference = _.find(referencedSchema.schema, function(referencedPhysicalAttr) { + return referencedPhysicalAttr.references && referencedPhysicalAttr.columnName !== schemaAttribute.on; + }); + } + // If it's a through table, treat it the same way as a junction table for now + else if (referencedSchema.throughTable && referencedSchema.throughTable[identity + '.' + populateAttribute]) { + join.select = false; + reference = referencedSchema.schema[referencedSchema.throughTable[identity + '.' + populateAttribute]]; + } + + // Otherwise apply any omits to the selected attributes + else { + if (populateCriteria.omit && _.isArray(populateCriteria.omit) && populateCriteria.omit.length) { + _.each(populateCriteria.omit, function(omitValue) { + _.pull(join.criteria.select, omitValue); + }); + } + // Remove omit from populate criteria + delete populateCriteria.omit; + } + + // Add the first join + joins.push(join); + + // If a junction table is used, add an additional join to get the data + if (reference && _.has(schemaAttribute, 'on')) { + var selects = []; + _.each(originalModels[reference.referenceIdentity].schema, function(val, key) { + // Ignore virtual attributes + if(_.has(val, 'collection')) { + return; + } + + // Check if the user has defined a custom select and if so normalize it + if(customSelect && !_.includes(populateCriteria.select, key)) { + return; + } + + // Add the value to the select + selects.push(key); + }); + + // Apply any omits to the selected attributes + if (populateCriteria.omit && populateCriteria.omit.length) { + _.each(populateCriteria.omit, function(omitValue) { + _.pull(selects, omitValue); + }); + } + + // Ensure the primary key and foreign are always selected. Otherwise things like the + // integrator won't work correctly + childPk = originalModels[reference.referenceIdentity].primaryKey; + selects.push(childPk); + + join = { + parentCollectionIdentity: schemaAttribute.referenceIdentity, + parent: schemaAttribute.references, + parentAlias: schemaAttribute.references + '__' + populateAttribute, + parentKey: reference.columnName, + childCollectionIdentity: reference.referenceIdentity, + child: reference.references, + childAlias: reference.references + '__' + populateAttribute, + childKey: reference.on, + alias: populateAttribute, + junctionTable: true, + removeParentKey: !!parentAttr.foreignKey, + model: false, + collection: true, + criteria: _.clone(populateCriteria) + }; + + join.criteria.select = _.uniq(selects); + + joins.push(join); + } + + // Append the criteria to the correct join if available + if (populateCriteria && joins.length > 1) { + joins[1].criteria = _.extend({}, joins[1].criteria); + delete joins[0].criteria; + } else if (populateCriteria) { + joins[0].criteria = _.extend({}, joins[0].criteria); + } + + // Set the criteria joins + s3Q.joins = s3Q.joins || []; + s3Q.joins = s3Q.joins.concat(joins); + + // Clear out the joins + joins = []; + + } catch (e) { + throw new Error( + 'Encountered unexpected error while building join instructions for ' + + util.format('`.populate("%s")`', populateAttribute) + + '\nDetails:\n' + + util.inspect(e, {depth:null}) + ); + } + }); // + + // Replace populates on the s3Q with joins + delete s3Q.populates; + + // Ensure a joins array exists + if (!_.has(s3Q, 'joins')) { + s3Q.joins = []; + } + + + // ╔═╗╔═╗╦ ╔═╗╔═╗╔╦╗ ╔═╗╔╦╗╦╔╦╗ ┌─ ┌─┐┬─┐┌─┐ ┬┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ ─┐ + // ╚═╗║╣ ║ ║╣ ║ ║ ║ ║║║║║ ║ │ ├─┘├┬┘│ │ │├┤ │ │ ││ ││││└─┐ │ + // ╚═╝╚═╝╩═╝╚═╝╚═╝ ╩ooo╚═╝╩ ╩╩ ╩ └─ ┴ ┴└─└─┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘└─┘ ─┘ + // If the model's hasSchema value is set to false AND it has the default `select` clause (i.e. `['*']`), + // remove the select. + if ((model.hasSchema === false && (_.indexOf(s3Q.criteria.select, '*') > -1)) || (s3Q.meta && s3Q.meta.skipExpandingDefaultSelectClause)) { + delete s3Q.criteria.select; + } + + if (s3Q.criteria.select) { + + // If an EXPLICIT `select` clause is being used, ensure that the primary key + // of the model is included. (This gets converted to its proper columnName below.) + // + // > Why do this? + // > The primary key is always required in Waterline for further processing needs. + if (!_.contains(s3Q.criteria.select, '*')) { + + s3Q.criteria.select.push(model.primaryKey); + + }//‡ + // Otherwise, `select: ['*']` is in use, so expand it out into column names. + // This makes it much easier to work with in adapters, and to dynamically modify + // the select statement to alias values as needed when working with populates. + else { + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: consider doing this in-place instead: + // (just need to verify that it'd be safe to change re polypopulates) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + var selectedKeys = []; + _.each(model.attributes, function(val, key) { + if (!_.has(val, 'collection')) { + selectedKeys.push(key); + } + }); + s3Q.criteria.select = selectedKeys; + } + + // Apply any omits to the selected attributes + if (s3Q.criteria.omit.length > 0) { + _.each(s3Q.criteria.omit, function(omitAttrName) { + _.pull(s3Q.criteria.select, omitAttrName); + }); + } + + // If this query is populating any singular associations, then make sure + // their foreign keys are included. (Remember, we already calculated this above) + // > We do this using attribute names because we're about to transform everything + // > to column names anything momentarily. + if (singularAssocAttrNames.length > 0) { + _.each(singularAssocAttrNames, function (attrName){ + s3Q.criteria.select.push(attrName); + }); + } + + // Just an additional check after modifying the select to make sure + // that it only contains unique values. + s3Q.criteria.select = _.uniq(s3Q.criteria.select); + + // Finally, transform the `select` clause into column names + s3Q.criteria.select = _.map(s3Q.criteria.select, function(attrName) { + return model.schema[attrName].columnName; + }); + + }//>- + + // Remove `omit` clause, since it's no longer relevant for the FS3Q. + delete s3Q.criteria.omit; + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Keep `omit` (see https://trello.com/c/b57sDgVr/124-adapter-spec-change-to-allow-for-more-flexible-base-values) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + // ╔═╗╔═╗╦═╗╔╦╗ + // ╚═╗║ ║╠╦╝ ║ + // ╚═╝╚═╝╩╚═ ╩ + // Transform the `sort` clause into column names + if (!_.isUndefined(s3Q.criteria.sort) && s3Q.criteria.sort.length) { + s3Q.criteria.sort = _.map(s3Q.criteria.sort, function(sortClause) { + var comparatorTarget = _.first(_.keys(sortClause)); + var attrName = _.first(comparatorTarget.split(/\./)); + var sortDirection = sortClause[comparatorTarget]; + + var sort = {}; + var columnName = model.schema[attrName].columnName; + sort[[columnName].concat(comparatorTarget.split(/\./).slice(1)).join('.')] = sortDirection; + return sort; + }); + } + + // ╦ ╦╦ ╦╔═╗╦═╗╔═╗ + // ║║║╠═╣║╣ ╠╦╝║╣ + // ╚╩╝╩ ╩╚═╝╩╚═╚═╝ + // Transform the `where` clause into column names + try { + s3Q.criteria.where = transformer.serializeCriteria(s3Q.criteria.where); + } catch (e) { + throw flaverr('E_INVALID_RECORD', new Error( + 'Failed process the criteria for the record.\n'+ + 'Details:\n'+ + e.message + )); + } + + // Now, in the subcriteria `where` clause(s), if relevant: + // + // Transform any populate...where clauses to use the correct columnName values + _.each(s3Q.joins, function(join) { + + var joinCollection = originalModels[join.childCollectionIdentity]; + + // Ensure a join criteria exists + join.criteria = join.criteria || {}; + join.criteria = joinCollection._transformer.serializeCriteria(join.criteria); + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // TODO -- is this necessary? Leaving in so that many-to-many tests pass. + // + // Note that this is NOT necessary as part of: + // https://github.com/balderdashy/waterline/blob/7f58b07be54542f4e127c2dc29cf80ce2110f32a/lib/waterline/utils/query/forge-stage-two-query.js#L763-L766 + // ^^the implementation of that is in help-find now. + // + // That said, removing this might still break it/other things. So that needs to be double-checked. + // Either way, it'd be good to add some clarification here. + // ``` + // If the join's `select` is false, leave it that way and eliminate the join criteria. + if (join.select === false) { + delete join.criteria.select; + return; + } + // ``` + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + // Ensure the join select doesn't contain duplicates + join.criteria.select = _.uniq(join.criteria.select); + delete join.select; + + }); + + // console.log('\n\n****************************\n\n\n********\nStage 3 query: ',util.inspect(s3Q,{depth:5}),'\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^'); + return s3Q; + } + + + // █████╗ ██████╗ ██████╗ ██████╗ ███████╗ ██████╗ █████╗ ████████╗██╗ ██████╗ ███╗ ██╗███████╗ + // ██╔══██╗██╔════╝ ██╔════╝ ██╔══██╗██╔════╝██╔════╝ ██╔══██╗╚══██╔══╝██║██╔═══██╗████╗ ██║██╔════╝ + // ███████║██║ ███╗██║ ███╗██████╔╝█████╗ ██║ ███╗███████║ ██║ ██║██║ ██║██╔██╗ ██║███████╗ + // ██╔══██║██║ ██║██║ ██║██╔══██╗██╔══╝ ██║ ██║██╔══██║ ██║ ██║██║ ██║██║╚██╗██║╚════██║ + // ██║ ██║╚██████╔╝╚██████╔╝██║ ██║███████╗╚██████╔╝██║ ██║ ██║ ██║╚██████╔╝██║ ╚████║███████║ + // ╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝╚══════╝ + // + // For `avg` and `sum` queries, the criteria needs to be run through the transformer. + if (s3Q.method === 'avg' || s3Q.method === 'sum' || s3Q.method === 'count') { + // Validate that there is a `criteria` key on the object + if (!_.has(s3Q, 'criteria') || !_.isPlainObject(s3Q.criteria)) { + throw flaverr('E_INVALID_RECORD', new Error( + 'Failed process the criteria for the record.' + )); + } + + // Transform the criteria into column names + try { + s3Q.criteria = transformer.serializeCriteria(s3Q.criteria); + } catch (e) { + throw flaverr('E_INVALID_RECORD', new Error( + 'Failed process the criteria for the record.\n'+ + 'Details:\n'+ + e.message + )); + } + + // Transform the numericAttrName into a column name using a nasty hack. + try { + var _tmpNumericKeyNameHolder = {}; + _tmpNumericKeyNameHolder[s3Q.numericAttrName] = ''; + transformer.serializeValues(_tmpNumericKeyNameHolder); + s3Q.numericAttrName = _.first(_.keys(_tmpNumericKeyNameHolder)); + } catch (e) { + throw flaverr('E_INVALID_RECORD', new Error( + 'Failed process the criteria for the record.\n'+ + 'Details:\n'+ + e.message + )); + } + + // Remove any invalid properties + delete s3Q.criteria.omit; + delete s3Q.criteria.select; + delete s3Q.criteria.where.populates; + + if (s3Q.method === 'count') { + delete s3Q.criteria.skip; + delete s3Q.criteria.sort; + delete s3Q.criteria.limit; + } + + return s3Q; + } + + + // If the method wasn't recognized, throw an error + throw flaverr('E_INVALID_QUERY', new Error( + 'Invalid query method set - `' + s3Q.method + '`.' + )); +}; diff --git a/lib/waterline/utils/query/forge-stage-two-query.js b/lib/waterline/utils/query/forge-stage-two-query.js new file mode 100644 index 000000000..820b7521e --- /dev/null +++ b/lib/waterline/utils/query/forge-stage-two-query.js @@ -0,0 +1,1907 @@ +/** + * Module dependencies + */ + +var assert = require('assert'); +var util = require('util'); +var _ = require('@sailshq/lodash'); +var getModel = require('../ontology/get-model'); +var getAttribute = require('../ontology/get-attribute'); +var isCapableOfOptimizedPopulate = require('../ontology/is-capable-of-optimized-populate'); +var isExclusive = require('../ontology/is-exclusive'); +var normalizePkValueOrValues = require('./private/normalize-pk-value-or-values'); +var normalizeCriteria = require('./private/normalize-criteria'); +var normalizeNewRecord = require('./private/normalize-new-record'); +var normalizeValueToSet = require('./private/normalize-value-to-set'); +var buildUsageError = require('./private/build-usage-error'); +var isSafeNaturalNumber = require('./private/is-safe-natural-number'); + + +/** + * forgeStageTwoQuery() + * + * Normalize and validate userland query keys (called a "stage 1 query" -- see `ARCHITECTURE.md`) + * i.e. these are things like `criteria` or `populates` that are passed in, either explicitly or + * implicitly, to a static model method (fka "collection method") such as `.find()`. + * + * > This DOES NOT RETURN ANYTHING! Instead, it modifies the provided "stage 1 query" in-place. + * > And when this is finished, the provided "stage 1 query" will be a normalized, validated + * > "stage 2 query" - aka logical protostatement. + * > + * > ALSO NOTE THAT THIS IS NOT ALWAYS IDEMPOTENT!! (Consider encryption.) + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * @param {Dictionary} query [A stage 1 query to destructively mutate into a stage 2 query.] + * | @property {String} method + * | @property {Dictionary} meta + * | @property {String} using + * | + * |...PLUS a number of other potential properties, depending on the "method". (see below) + * + * + * @param {Ref} orm + * The Waterline ORM instance. + * > Useful for accessing the model definitions, datastore configurations, etc. + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * @throws {Error} If it encounters irrecoverable problems or unsupported usage in the provided query keys. + * @property {String} name (Always "UsageError") + * @property {String} code + * One of: + * - E_INVALID_META (universal) + * - E_INVALID_CRITERIA + * - E_INVALID_POPULATES + * - E_INVALID_NUMERIC_ATTR_NAME + * - E_INVALID_STREAM_ITERATEE (for `eachBatchFn` & `eachRecordFn`) + * - E_INVALID_NEW_RECORD + * - E_INVALID_NEW_RECORDS + * - E_INVALID_VALUES_TO_SET + * - E_INVALID_TARGET_RECORD_IDS + * - E_INVALID_COLLECTION_ATTR_NAME + * - E_INVALID_ASSOCIATED_IDS + * - E_NOOP (relevant for various different methods, like find/count/addToCollection/etc.) + * @property {String} details + * The lower-level, original error message, without any sort of "Invalid yada yada. Details: ..." wrapping. + * Use this property to create custom messages -- for example: + * ``` + * new Error(e.details); + * ``` + * @property {String} message + * The standard `message` property of any Error-- just note that this Error's `message` is composed + * from an original, lower-level error plus a template (see buildUsageError() for details.) + * @property {String} stack + * The standard `stack` property, like any Error. Combines name + message + stack trace. + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * @throws {Error} If anything else unexpected occurs + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ +module.exports = function forgeStageTwoQuery(query, orm) { + // if (process.env.NODE_ENV !== 'production') { + // console.time('forgeStageTwoQuery'); + // } + + + // Create a JS timestamp to represent the current (timezone-agnostic) date+time. + var theMomentBeforeFS2Q = Date.now(); + // ^^ -- -- -- -- -- -- -- -- -- -- -- -- -- + // Since Date.now() has trivial performance impact, we generate our + // JS timestamp up here no matter what, just in case we end up needing + // it later for `autoCreatedAt` or `autoUpdatedAt`, in situations where + // we might need to automatically add it in multiple spots (such as + // in `newRecords`, when processing a `.createEach()`.) + // + // > Benchmark: + // > • Absolute: ~0.021ms + // > • Relative: http://jsben.ch/#/TOF9y (vs. `(new Date()).getTime()`) + // -- -- -- -- -- -- -- -- -- -- -- -- -- -- + + + + // ██████╗██╗ ██╗███████╗ ██████╗██╗ ██╗ ████████╗██╗ ██╗███████╗ + // ██╔════╝██║ ██║██╔════╝██╔════╝██║ ██╔╝ ╚══██╔══╝██║ ██║██╔════╝ + // ██║ ███████║█████╗ ██║ █████╔╝ ██║ ███████║█████╗ + // ██║ ██╔══██║██╔══╝ ██║ ██╔═██╗ ██║ ██╔══██║██╔══╝ + // ╚██████╗██║ ██║███████╗╚██████╗██║ ██╗ ██║ ██║ ██║███████╗ + // ╚═════╝╚═╝ ╚═╝╚══════╝ ╚═════╝╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝╚══════╝ + // + // ███████╗███████╗███████╗███████╗███╗ ██╗████████╗██╗ █████╗ ██╗ ███████╗ + // ██╔════╝██╔════╝██╔════╝██╔════╝████╗ ██║╚══██╔══╝██║██╔══██╗██║ ██╔════╝ + // █████╗ ███████╗███████╗█████╗ ██╔██╗ ██║ ██║ ██║███████║██║ ███████╗ + // ██╔══╝ ╚════██║╚════██║██╔══╝ ██║╚██╗██║ ██║ ██║██╔══██║██║ ╚════██║ + // ███████╗███████║███████║███████╗██║ ╚████║ ██║ ██║██║ ██║███████╗███████║ + // ╚══════╝╚══════╝╚══════╝╚══════╝╚═╝ ╚═══╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝╚══════╝ + + + // ┌─┐┬ ┬┌─┐┌─┐┬┌─ ╦ ╦╔═╗╦╔╗╔╔═╗ + // │ ├─┤├┤ │ ├┴┐ ║ ║╚═╗║║║║║ ╦ + // └─┘┴ ┴└─┘└─┘┴ ┴ ╚═╝╚═╝╩╝╚╝╚═╝ + // Always check `using`. + if (!_.isString(query.using) || query.using === '') { + throw new Error( + 'Consistency violation: Every stage 1 query should include a property called `using` as a non-empty string.'+ + ' But instead, got: ' + util.inspect(query.using, {depth:5}) + ); + }//-• + + + // Look up the Waterline model for this query. + // > This is so that we can reference the original model definition. + var WLModel; + try { + WLModel = getModel(query.using, orm); + } catch (e) { + switch (e.code) { + case 'E_MODEL_NOT_REGISTERED': throw new Error('Consistency violation: The specified `using` ("'+query.using+'") does not match the identity of any registered model.'); + default: throw e; + } + }// + + + // ┌─┐┬ ┬┌─┐┌─┐┬┌─ ╔╦╗╔═╗╔╦╗╦ ╦╔═╗╔╦╗ + // │ ├─┤├┤ │ ├┴┐ ║║║║╣ ║ ╠═╣║ ║ ║║ + // └─┘┴ ┴└─┘└─┘┴ ┴ ╩ ╩╚═╝ ╩ ╩ ╩╚═╝═╩╝ + // ┬ ┌─┐┬ ┬┌─┐┌─┐┬┌─ ┌─┐┌─┐┬─┐ ┌─┐─┐ ┬┌┬┐┬─┐┌─┐┌┐┌┌─┐┌─┐┬ ┬┌─┐ ┬┌─┌─┐┬ ┬┌─┐ + // ┌┼─ │ ├─┤├┤ │ ├┴┐ ├┤ │ │├┬┘ ├┤ ┌┴┬┘ │ ├┬┘├─┤│││├┤ │ ││ │└─┐ ├┴┐├┤ └┬┘└─┐ + // └┘ └─┘┴ ┴└─┘└─┘┴ ┴ └ └─┘┴└─ └─┘┴ └─ ┴ ┴└─┴ ┴┘└┘└─┘└─┘└─┘└─┘ ┴ ┴└─┘ ┴ └─┘┘ + // ┬ ┌┬┐┌─┐┌┬┐┌─┐┬─┐┌┬┐┬┌┐┌┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ ┬┌─┌─┐┬ ┬┌─┐ + // ┌┼─ ││├┤ │ ├┤ ├┬┘│││││││├┤ │─┼┐│ │├┤ ├┬┘└┬┘ ├┴┐├┤ └┬┘└─┐ + // └┘ ─┴┘└─┘ ┴ └─┘┴└─┴ ┴┴┘└┘└─┘ └─┘└└─┘└─┘┴└─ ┴ ┴ ┴└─┘ ┴ └─┘ + // Always check `method`. + if (!_.isString(query.method) || query.method === '') { + throw new Error( + 'Consistency violation: Every stage 1 query should include a property called `method` as a non-empty string.'+ + ' But instead, got: ' + util.inspect(query.method, {depth:5}) + ); + }//-• + + + + + // Determine the set of acceptable query keys for the specified `method`. + // (and, in the process, verify that we recognize this method in the first place) + var queryKeys = (function _getQueryKeys (){ + + switch(query.method) { + + case 'find': return [ 'criteria', 'populates' ]; + case 'findOne': return [ 'criteria', 'populates' ]; + case 'stream': return [ 'criteria', 'populates', 'eachRecordFn', 'eachBatchFn' ]; + case 'count': return [ 'criteria' ]; + case 'sum': return [ 'numericAttrName', 'criteria' ]; + case 'avg': return [ 'numericAttrName', 'criteria' ]; + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: consider renaming "numericAttrName" to something like "targetField" + // so that it's more descriptive even after being forged as part of a s3q. + // But note that this would be a pretty big change throughout waterline core, + // possibly other utilities, as well as being a breaking change to the spec + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + case 'create': return [ 'newRecord' ]; + case 'createEach': return [ 'newRecords' ]; + case 'findOrCreate': return [ 'criteria', 'newRecord' ]; + + case 'update': return [ 'criteria', 'valuesToSet' ]; + case 'updateOne': return [ 'criteria', 'valuesToSet' ]; + case 'destroy': return [ 'criteria' ]; + case 'destroyOne': return [ 'criteria' ]; + case 'archive': return [ 'criteria' ]; + case 'archiveOne': return [ 'criteria' ]; + case 'addToCollection': return [ 'targetRecordIds', 'collectionAttrName', 'associatedIds' ]; + case 'removeFromCollection': return [ 'targetRecordIds', 'collectionAttrName', 'associatedIds' ]; + case 'replaceCollection': return [ 'targetRecordIds', 'collectionAttrName', 'associatedIds' ]; + + default: + throw new Error('Consistency violation: Unrecognized `method` ("'+query.method+'")'); + + } + + })();// + + + // > Note: + // > + // > It's OK if keys are missing at this point. We'll do our best to + // > infer a reasonable default, when possible. In some cases, it'll + // > still fail validation later, but in other cases, it'll pass. + // > + // > Anyway, that's all handled below. + + + // Now check that we see ONLY the expected keys for that method. + // (i.e. there should never be any miscellaneous stuff hanging out on the stage1 query dictionary) + + // We start off by building up an array of legal keys, starting with the universally-legal ones. + var allowedKeys = [ + 'meta', + 'using', + 'method' + ].concat(queryKeys); + + + // Then finally, we check that no extraneous keys are present. + var extraneousKeys = _.difference(_.keys(query), allowedKeys); + if (extraneousKeys.length > 0) { + throw new Error('Consistency violation: Provided "stage 1 query" contains extraneous top-level keys: '+extraneousKeys); + } + + + + + + // ███╗ ███╗███████╗████████╗ █████╗ + // ████╗ ████║██╔════╝╚══██╔══╝██╔══██╗ + // ██╔████╔██║█████╗ ██║ ███████║ + // ██║╚██╔╝██║██╔══╝ ██║ ██╔══██║ + // ██║ ╚═╝ ██║███████╗ ██║ ██║ ██║ + // ╚═╝ ╚═╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ + // + + // ┌─┐┬ ┬┌─┐┌─┐┬┌─ ╔╦╗╔═╗╔╦╗╔═╗ ┌─ ┬┌─┐ ┌─┐┬─┐┌─┐┬ ┬┬┌┬┐┌─┐┌┬┐ ─┐ + // │ ├─┤├┤ │ ├┴┐ ║║║║╣ ║ ╠═╣ │ │├┤ ├─┘├┬┘│ │└┐┌┘│ ││├┤ ││ │ + // └─┘┴ ┴└─┘└─┘┴ ┴ ╩ ╩╚═╝ ╩ ╩ ╩ └─ ┴└ ┴ ┴└─└─┘ └┘ ┴─┴┘└─┘─┴┘ ─┘ + // If specified, check that `meta` is a dictionary. + if (!_.isUndefined(query.meta)) { + + if (!_.isObject(query.meta) || _.isArray(query.meta) || _.isFunction(query.meta)) { + throw buildUsageError( + 'E_INVALID_META', + 'If `meta` is provided, it should be a dictionary (i.e. a plain JavaScript object). '+ + 'But instead, got: ' + util.inspect(query.meta, {depth:5})+'', + query.using + ); + }//-• + + }//>-• + + + // Now check a few different model settings that correspond with `meta` keys, + // and set the relevant `meta` keys accordingly. + // + // > Remember, we rely on waterline-schema to have already validated + // > these model settings when the ORM was first initialized. + + // ┌─┐┌─┐┌─┐┌─┐┌─┐┌┬┐┌─┐ ┌─┐┌┐┌ ┌┬┐┌─┐┌─┐┌┬┐┬─┐┌─┐┬ ┬┌─┐ + // │ ├─┤└─┐│ ├─┤ ││├┤ │ ││││ ││├┤ └─┐ │ ├┬┘│ │└┬┘ ┌┘ + // └─┘┴ ┴└─┘└─┘┴ ┴─┴┘└─┘ └─┘┘└┘ ─┴┘└─┘└─┘ ┴ ┴└─└─┘ ┴ o + if (query.method === 'destroy' && !_.isUndefined(WLModel.cascadeOnDestroy)) { + if (!_.isBoolean(WLModel.cascadeOnDestroy)) { + throw new Error('Consistency violation: If specified, expecting `cascadeOnDestroy` model setting to be `true` or `false`. But instead, got: '+util.inspect(WLModel.cascadeOnDestroy, {depth:5})+''); + } + + if (!query.meta || query.meta.cascade === undefined) { + // Only bother setting the `cascade` meta key if the model setting is `true`. + // (because otherwise it's `false`, which is the default anyway) + if (WLModel.cascadeOnDestroy) { + query.meta = query.meta || {}; + query.meta.cascade = WLModel.cascadeOnDestroy; + } + }//fi + + }//>- + + + // ┌─┐┌─┐┌┬┐┌─┐┬ ┬ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐ ┌─┐┌┐┌ ┬ ┬┌─┐┌┬┐┌─┐┌┬┐┌─┐┌─┐ + // ├┤ ├┤ │ │ ├─┤ ├┬┘├┤ │ │ │├┬┘ ││└─┐ │ ││││ │ │├─┘ ││├─┤ │ ├┤ ┌┘ + // └ └─┘ ┴ └─┘┴ ┴ ┴└─└─┘└─┘└─┘┴└──┴┘└─┘ └─┘┘└┘ └─┘┴ ─┴┘┴ ┴ ┴ └─┘ o + if (query.method === 'update' && !_.isUndefined(WLModel.fetchRecordsOnUpdate)) { + if (!_.isBoolean(WLModel.fetchRecordsOnUpdate)) { + throw new Error('Consistency violation: If specified, expecting `fetchRecordsOnUpdate` model setting to be `true` or `false`. But instead, got: '+util.inspect(WLModel.fetchRecordsOnUpdate, {depth:5})+''); + } + + if (!query.meta || query.meta.fetch === undefined) { + // Only bother setting the `fetch` meta key if the model setting is `true`. + // (because otherwise it's `false`, which is the default anyway) + if (WLModel.fetchRecordsOnUpdate) { + query.meta = query.meta || {}; + query.meta.fetch = WLModel.fetchRecordsOnUpdate; + } + }//fi + + }//>- + + // ┌─┐┌─┐┌┬┐┌─┐┬ ┬ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐ ┌─┐┌┐┌ ┌┬┐┌─┐┌─┐┌┬┐┬─┐┌─┐┬ ┬┌─┐ + // ├┤ ├┤ │ │ ├─┤ ├┬┘├┤ │ │ │├┬┘ ││└─┐ │ ││││ ││├┤ └─┐ │ ├┬┘│ │└┬┘ ┌┘ + // └ └─┘ ┴ └─┘┴ ┴ ┴└─└─┘└─┘└─┘┴└──┴┘└─┘ └─┘┘└┘ ─┴┘└─┘└─┘ ┴ ┴└─└─┘ ┴ o + if (query.method === 'destroy' && !_.isUndefined(WLModel.fetchRecordsOnDestroy)) { + if (!_.isBoolean(WLModel.fetchRecordsOnDestroy)) { + throw new Error('Consistency violation: If specified, expecting `fetchRecordsOnDestroy` model setting to be `true` or `false`. But instead, got: '+util.inspect(WLModel.fetchRecordsOnDestroy, {depth:5})+''); + } + + if (!query.meta || query.meta.fetch === undefined) { + // Only bother setting the `fetch` meta key if the model setting is `true`. + // (because otherwise it's `false`, which is the default anyway) + if (WLModel.fetchRecordsOnDestroy) { + query.meta = query.meta || {}; + query.meta.fetch = WLModel.fetchRecordsOnDestroy; + } + }//fi + + }//>- + + // ┌─┐┌─┐┌┬┐┌─┐┬ ┬ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐ ┌─┐┌┐┌ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐┌─┐ + // ├┤ ├┤ │ │ ├─┤ ├┬┘├┤ │ │ │├┬┘ ││└─┐ │ ││││ │ ├┬┘├┤ ├─┤ │ ├┤ ┌┘ + // └ └─┘ ┴ └─┘┴ ┴ ┴└─└─┘└─┘└─┘┴└──┴┘└─┘ └─┘┘└┘ └─┘┴└─└─┘┴ ┴ ┴ └─┘ o + if (query.method === 'create' && !_.isUndefined(WLModel.fetchRecordsOnCreate)) { + if (!_.isBoolean(WLModel.fetchRecordsOnCreate)) { + throw new Error('Consistency violation: If specified, expecting `fetchRecordsOnCreate` model setting to be `true` or `false`. But instead, got: '+util.inspect(WLModel.fetchRecordsOnCreate, {depth:5})+''); + } + + if (!query.meta || query.meta.fetch === undefined) { + // Only bother setting the `fetch` meta key if the model setting is `true`. + // (because otherwise it's `false`, which is the default anyway) + if (WLModel.fetchRecordsOnCreate) { + query.meta = query.meta || {}; + query.meta.fetch = WLModel.fetchRecordsOnCreate; + } + }//fi + + }//>- + + // ┌─┐┌─┐┌┬┐┌─┐┬ ┬ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐ ┌─┐┌┐┌ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐ ┌─┐┌─┐┌─┐┬ ┬┌─┐ + // ├┤ ├┤ │ │ ├─┤ ├┬┘├┤ │ │ │├┬┘ ││└─┐ │ ││││ │ ├┬┘├┤ ├─┤ │ ├┤ ├┤ ├─┤│ ├─┤ ┌┘ + // └ └─┘ ┴ └─┘┴ ┴ ┴└─└─┘└─┘└─┘┴└──┴┘└─┘ └─┘┘└┘ └─┘┴└─└─┘┴ ┴ ┴ └─┘ └─┘┴ ┴└─┘┴ ┴ o + if (query.method === 'createEach' && !_.isUndefined(WLModel.fetchRecordsOnCreateEach)) { + if (!_.isBoolean(WLModel.fetchRecordsOnCreateEach)) { + throw new Error('Consistency violation: If specified, expecting `fetchRecordsOnCreateEach` model setting to be `true` or `false`. But instead, got: '+util.inspect(WLModel.fetchRecordsOnCreateEach, {depth:5})+''); + } + + if (!query.meta || query.meta.fetch === undefined) { + // Only bother setting the `fetch` meta key if the model setting is `true`. + // (because otherwise it's `false`, which is the default anyway) + if (WLModel.fetchRecordsOnCreateEach) { + query.meta = query.meta || {}; + query.meta.fetch = WLModel.fetchRecordsOnCreateEach; + } + } + + }//>- + + + // ┌─┐┬─┐┌─┐┌─┐┌─┐┌─┐┌─┐┌┬┐┌─┐ ┌┐┌┌─┐┌┐┌ ┌─┐┌┐ ┬┌─┐┌─┐┌┬┐ ┬┌┬┐ ┌┬┐┌─┐┬ ┌─┐┬─┐┌─┐┌┐┌┌─┐┌─┐ + // ├─┘├┬┘│ │├─┘├─┤│ ┬├─┤ │ ├┤ ││││ ││││───│ │├┴┐ │├┤ │ │───│ ││ │ │ ││ ├┤ ├┬┘├─┤││││ ├┤ + // ┴ ┴└─└─┘┴ ┴ ┴└─┘┴ ┴ ┴ └─┘ ┘└┘└─┘┘└┘ └─┘└─┘└┘└─┘└─┘ ┴ ┴─┴┘ ┴ └─┘┴─┘└─┘┴└─┴ ┴┘└┘└─┘└─┘ + // ┌┬┐┌─┐┌┬┐┌─┐┬ ┌─┐┌─┐┌┬┐┌┬┐┬┌┐┌┌─┐ ┌┬┐┌─┐ ┌┬┐┬ ┬┌─┐ ┌─┐┌─┐┌─┐┬─┐┌─┐┌─┐┬─┐┬┌─┐┌┬┐┌─┐ + // ││││ │ ││├┤ │ └─┐├┤ │ │ │││││ ┬ │ │ │ │ ├─┤├┤ ├─┤├─┘├─┘├┬┘│ │├─┘├┬┘│├─┤ │ ├┤ + // ┴ ┴└─┘─┴┘└─┘┴─┘ └─┘└─┘ ┴ ┴ ┴┘└┘└─┘ ┴ └─┘ ┴ ┴ ┴└─┘ ┴ ┴┴ ┴ ┴└─└─┘┴ ┴└─┴┴ ┴ ┴ └─┘ + // ┌┬┐┌─┐┌┬┐┌─┐ ┬┌─┌─┐┬ ┬ ┌─ ┌─┐┌─┐┬─┐ ┌┬┐┌─┐┌┐┌┌─┐┌─┐ ─┐ + // │││├┤ │ ├─┤ ├┴┐├┤ └┬┘ │ ├┤ │ │├┬┘ ││││ │││││ ┬│ │ │ + // ┴ ┴└─┘ ┴ ┴ ┴ ┴ ┴└─┘ ┴ └─ └ └─┘┴└─ ┴ ┴└─┘┘└┘└─┘└─┘ ─┘ + // Set the `modelsNotUsingObjectIds` meta key of the query based on + // the `dontUseObjectIds` model setting of relevant models. + // + // Note that if no models have this flag set, the meta key won't be set at all. + // This avoids the weirdness of seeing this key pop up in a query for a non-mongo adapter. + // + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Remove the need for this mongo-specific code by respecting this model setting + // in the adapter itself. (To do that, Waterline needs to be sending down actual WL models + // though. See the waterline.js file in this repo for notes about that.) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + (function() { + var modelsNotUsingObjectIds = _.reduce(orm.collections, function(memo, WLModel) { + if (WLModel.dontUseObjectIds === true) { memo.push(WLModel.identity); } + return memo; + }, []); + if (modelsNotUsingObjectIds.length > 0) { + query.meta = query.meta || {}; + query.meta.modelsNotUsingObjectIds = modelsNotUsingObjectIds; + } + })(); + + + // Next, check specific, common `meta` keys, to make sure they're valid. + // > (Not all `meta` keys can be checked, obviously, because there could be **anything** + // > in there, such as meta keys proprietary to particular adapters. But certain core + // > `meta` keys can be properly verified. Currently, we only validate _some_ of the + // > ones that are more commonly used.) + + if (query.meta !== undefined) { + + // ┌─┐┌─┐┌┬┐┌─┐┬ ┬ + // ├┤ ├┤ │ │ ├─┤ + // └ └─┘ ┴ └─┘┴ ┴ + if (query.meta.fetch !== undefined) { + + if (!_.isBoolean(query.meta.fetch)) { + throw buildUsageError( + 'E_INVALID_META', + 'If provided, `fetch` should be either `true` or `false`.', + query.using + ); + }//• + + // If this is a findOrCreate/updateOne/destroyOne/archiveOne query, + // make sure that the `fetch` meta key hasn't been explicitly set + // (because that wouldn't make any sense). + if (_.contains(['findOrCreate', 'updateOne', 'destroyOne', 'archiveOne'], query.method)) { + console.warn( + 'warn: `fetch` is unnecessary when calling .'+query.method+'(). '+ + 'If successful, .'+query.method+'() *always* returns the affected record.' + ); + }//fi + + }//fi + + + // ┌┬┐┬ ┬┌┬┐┌─┐┌┬┐┌─┐ ┌─┐┬─┐┌─┐┌─┐ + // ││││ │ │ ├─┤ │ ├┤ ├─┤├┬┘│ ┬└─┐ + // ┴ ┴└─┘ ┴ ┴ ┴ ┴ └─┘ ┴ ┴┴└─└─┘└─┘ + // + // EXPERIMENTAL: The `mutateArgs` meta key enabled optimizations by preventing + // unnecessary cloning of arguments. + // + // > Note that this is ONLY respected at the stage 2 level! + // > That is, it doesn't matter if this meta key is set or not when you call adapters. + // + // > PLEASE DO NOT RELY ON `mutateArgs` IN YOUR OWN CODE- IT COULD CHANGE + // > AT ANY TIME AND BREAK YOUR APP OR PLUGIN! + if (query.meta.mutateArgs !== undefined) { + + if (!_.isBoolean(query.meta.mutateArgs)) { + throw buildUsageError( + 'E_INVALID_META', + 'If provided, `mutateArgs` should be either `true` or `false`.', + query.using + ); + }//• + + }//fi + + + // ┌┬┐┌─┐┌─┐┬─┐┬ ┬┌─┐┌┬┐ + // ││├┤ │ ├┬┘└┬┘├─┘ │ + // ─┴┘└─┘└─┘┴└─ ┴ ┴ ┴ + if (query.meta.decrypt !== undefined) { + + if (!_.isBoolean(query.meta.decrypt)) { + throw buildUsageError( + 'E_INVALID_META', + 'If provided, `decrypt` should be either `true` or `false`.', + query.using + ); + }//• + + }//fi + + + // ┌─┐┌┐┌┌─┐┬─┐┬ ┬┌─┐┌┬┐┬ ┬┬┌┬┐┬ ┬ + // ├┤ ││││ ├┬┘└┬┘├─┘ │ ││││ │ ├─┤ + // └─┘┘└┘└─┘┴└─ ┴ ┴ ┴ └┴┘┴ ┴ ┴ ┴ + if (query.meta.encryptWith !== undefined) { + + if (!query.meta.encryptWith || !_.isString(query.meta.encryptWith)) { + throw buildUsageError( + 'E_INVALID_META', + 'If provided, `encryptWith` should be a non-empty string (the name of '+ + 'one of the configured data encryption keys).', + query.using + ); + }//• + + }//fi + + // ┌─┐┬┌─┬┌─┐┌─┐┌┐┌┌─┐┬─┐┬ ┬┌─┐┌┬┐┬┌─┐┌┐┌ + // └─┐├┴┐│├─┘├┤ ││││ ├┬┘└┬┘├─┘ │ ││ ││││ + // └─┘┴ ┴┴┴ └─┘┘└┘└─┘┴└─ ┴ ┴ ┴ ┴└─┘┘└┘ + // + // EXPERIMENTAL: The `skipEncryption` meta key prevents encryption. + // (see the implementation of findOrCreate() for more information) + // + // > PLEASE DO NOT RELY ON `skipEncryption` IN YOUR OWN CODE- IT COULD + // > CHANGE AT ANY TIME AND BREAK YOUR APP OR PLUGIN! + if (query.meta.skipEncryption !== undefined) { + + if (!_.isBoolean(query.meta.skipEncryption)) { + throw buildUsageError( + 'E_INVALID_META', + 'If provided, `skipEncryption` should be true or false.', + query.using + ); + }//• + + }//fi + + // ┌┐ ┌─┐┌┬┐┌─┐┬ ┬┌─┐┬┌─┐┌─┐ + // ├┴┐├─┤ │ │ ├─┤└─┐│┌─┘├┤ + // └─┘┴ ┴ ┴ └─┘┴ ┴└─┘┴└─┘└─┘ + if (query.meta.batchSize !== undefined) { + + if (!_.isNumber(query.meta.batchSize) || !isSafeNaturalNumber(query.meta.batchSize)) { + throw buildUsageError( + 'E_INVALID_META', + 'If provided, `batchSize` should be a whole, positive, safe, and natural integer. '+ + 'Instead, got '+util.inspect(query.meta.batchSize, {depth: null})+'.', + query.using + ); + }//• + + if (query.method !== 'stream') { + // FUTURE: consider changing this usage error to a warning instead. + throw buildUsageError( + 'E_INVALID_META', + '`batchSize` cannot be used with .'+query.method+'() -- it is only compatible '+ + 'with the .stream() model method.', + query.using + ); + }//• + + }//fi + + // … + + }//fi + + + + + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + // -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + // -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + + + // ██████╗██████╗ ██╗████████╗███████╗██████╗ ██╗ █████╗ + // ██╔════╝██╔══██╗██║╚══██╔══╝██╔════╝██╔══██╗██║██╔══██╗ + // ██║ ██████╔╝██║ ██║ █████╗ ██████╔╝██║███████║ + // ██║ ██╔══██╗██║ ██║ ██╔══╝ ██╔══██╗██║██╔══██║ + // ╚██████╗██║ ██║██║ ██║ ███████╗██║ ██║██║██║ ██║ + // ╚═════╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚══════╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝ + // + if (_.contains(queryKeys, 'criteria')) { + + + // ╔═╗╔═╗╔═╗╔═╗╦╔═╗╦ ╔═╗╔═╗╔═╗╔═╗╔═╗ + // ╚═╗╠═╝║╣ ║ ║╠═╣║ ║ ╠═╣╚═╗║╣ ╚═╗ + // ╚═╝╩ ╚═╝╚═╝╩╩ ╩╩═╝ ╚═╝╩ ╩╚═╝╚═╝╚═╝ + // ┌─ ┬ ┌─┐ ┬ ┬┌┐┌┌─┐┬ ┬┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐┌┬┐ ┌─┐┌─┐┌┬┐┌┐ ┬┌┐┌┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ ┌─┐┌─┐ + // │─── │ ├┤ │ ││││└─┐│ │├─┘├─┘│ │├┬┘ │ ├┤ ││ │ │ ││││├┴┐││││├─┤ │ ││ ││││└─┐ │ │├┤ + // └─ ┴o└─┘o └─┘┘└┘└─┘└─┘┴ ┴ └─┘┴└─ ┴ └─┘─┴┘ └─┘└─┘┴ ┴└─┘┴┘└┘┴ ┴ ┴ ┴└─┘┘└┘└─┘ └─┘└ + // ┌─┐┌─┐┬─┐┌┬┐┌─┐┬┌┐┌ ┌─┐┬─┐┬┌┬┐┌─┐┬─┐┬┌─┐ ┌─┐┬ ┌─┐┬ ┬┌─┐┌─┐┌─┐ ┌─┐┌─┐┬─┐ + // │ ├┤ ├┬┘ │ ├─┤││││ │ ├┬┘│ │ ├┤ ├┬┘│├─┤ │ │ ├─┤│ │└─┐├┤ └─┐ ├┤ │ │├┬┘ + // └─┘└─┘┴└─ ┴ ┴ ┴┴┘└┘ └─┘┴└─┴ ┴ └─┘┴└─┴┴ ┴ └─┘┴─┘┴ ┴└─┘└─┘└─┘└─┘ └ └─┘┴└─ + // ┌─┐┌─┐┌─┐┌─┐┬┌─┐┬┌─┐ ┌┬┐┌─┐┌┬┐┌─┐┬ ┌┬┐┌─┐┌┬┐┬ ┬┌─┐┌┬┐┌─┐ ─┐ + // └─┐├─┘├┤ │ │├┤ ││ ││││ │ ││├┤ │ │││├┤ │ ├─┤│ │ ││└─┐ ───│ + // └─┘┴ └─┘└─┘┴└ ┴└─┘ ┴ ┴└─┘─┴┘└─┘┴─┘ ┴ ┴└─┘ ┴ ┴ ┴└─┘─┴┘└─┘ ─┘ + // + // Next, handle a few special cases that we are careful to fail loudly about. + // + // > Because if we don't, it can cause major confusion. Think about it: in some cases, + // > certain usage can seem intuitive, and like a reasonable enough thing to try out... + // > ...but it might actually be unsupported. + // > + // > When you do try it out, unless it fails LOUDLY, then you could easily end + // > up believing that it is actually doing something. And then, as is true when + // > working w/ any library or framework, you end up with all sorts of weird superstitions + // > and false assumptions that take a long time to wring out of your code base. + // > So let's do our best to prevent that. + + // + // > WARNING: + // > It is really important that we do this BEFORE we normalize the criteria! + // > (Because by then, it'll be too late to tell what was and wasn't included + // > in the original, unnormalized criteria dictionary.) + // + + // If the criteria explicitly specifies `select` or `omit`, then make sure the query method + // is actually compatible with those clauses. + if (_.isObject(query.criteria) && !_.isArray(query.criteria) && (!_.isUndefined(query.criteria.select) || !_.isUndefined(query.criteria.omit))) { + + var PROJECTION_COMPATIBLE_METHODS = ['find', 'findOne', 'stream']; + var isCompatibleWithProjections = _.contains(PROJECTION_COMPATIBLE_METHODS, query.method); + if (!isCompatibleWithProjections) { + throw buildUsageError('E_INVALID_CRITERIA', 'Cannot use `select`/`omit` with this method (`'+query.method+'`).', query.using); + } + + }//>-• + + // If the criteria explicitly specifies `limit`, `skip`, or `sort`, then make sure + // the query method is actually compatible with those clauses. + if (_.isObject(query.criteria) && !_.isArray(query.criteria) && (!_.isUndefined(query.criteria.limit) || !_.isUndefined(query.criteria.skip) || !_.isUndefined(query.criteria.sort))) { + + var PAGINATION_COMPATIBLE_METHODS = ['find', 'stream']; + var isCompatibleWithLimit = _.contains(PAGINATION_COMPATIBLE_METHODS, query.method); + if (!isCompatibleWithLimit) { + throw buildUsageError('E_INVALID_CRITERIA', 'Cannot use `limit`, `skip`, or `sort` with this method (`'+query.method+'`).', query.using); + } + + }//>-• + + // If the criteria is not defined, then in most cases, we treat it like `{}`. + // BUT if this query will be running as a result of an `update()`, or a `destroy()`, + // or an `.archive()`, then we'll be a bit more picky in order to prevent accidents. + if (_.isUndefined(query.criteria) && (query.method === 'update' || query.method === 'destroy' || query.method === 'archive')) { + + throw buildUsageError('E_INVALID_CRITERIA', 'Cannot use this method (`'+query.method+'`) with a criteria of `undefined`. (This is just a simple failsafe to help protect your data: if you really want to '+query.method+' ALL records, no problem-- please just be explicit and provide a criteria of `{}`.)', query.using); + + }//>-• + + + + // ╔╦╗╔═╗╔═╗╔═╗╦ ╦╦ ╔╦╗ + // ║║║╣ ╠╣ ╠═╣║ ║║ ║ + // ═╩╝╚═╝╚ ╩ ╩╚═╝╩═╝╩ + // Tolerate this being left undefined by inferring a reasonable default. + // (This will be further processed below.) + if (_.isUndefined(query.criteria)) { + query.criteria = {}; + }//>- + + + + + // ╔╗╔╔═╗╦═╗╔╦╗╔═╗╦ ╦╔═╗╔═╗ ┬ ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ + // ║║║║ ║╠╦╝║║║╠═╣║ ║╔═╝║╣ ┌┼─ ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ + // ╝╚╝╚═╝╩╚═╩ ╩╩ ╩╩═╝╩╚═╝╚═╝ └┘ ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ + // Validate and normalize the provided `criteria`. + try { + query.criteria = normalizeCriteria(query.criteria, query.using, orm, query.meta); + } catch (e) { + switch (e.code) { + + case 'E_HIGHLY_IRREGULAR': + throw buildUsageError('E_INVALID_CRITERIA', e.message, query.using); + + case 'E_WOULD_RESULT_IN_NOTHING': + throw buildUsageError('E_NOOP', 'The provided criteria would not match any records. '+e.message, query.using); + + // If no error code (or an unrecognized error code) was specified, + // then we assume that this was a spectacular failure do to some + // kind of unexpected, internal error on our part. + default: + throw new Error('Consistency violation: Encountered unexpected internal error when attempting to normalize/validate the provided criteria:\n```\n'+util.inspect(query.criteria, {depth:5})+'\n```\nAnd here is the actual error itself:\n```\n'+e.stack+'\n```'); + } + }//>-• + + + // ┌─┐┬ ┬ ┬┌─┐┬ ┬┌─┐ ┌─┐┌─┐┬─┐┌─┐┌─┐ ╦ ╦╔╦╗╦╔╦╗ ┌┬┐┌─┐ ╔╦╗╦ ╦╔═╗ + // ├─┤│ │││├─┤└┬┘└─┐ ├┤ │ │├┬┘│ ├┤ ║ ║║║║║ ║ │ │ │ ║ ║║║║ ║ + // ┴ ┴┴─┘└┴┘┴ ┴ ┴ └─┘ └ └─┘┴└─└─┘└─┘ ╩═╝╩╩ ╩╩ ╩ ┴ └─┘ ╩ ╚╩╝╚═╝ + // ┌─ ┬┌─┐ ┌┬┐┬ ┬┬┌─┐ ┬┌─┐ ┌─┐ ╔═╗╦╔╗╔╔╦╗ ╔═╗╔╗╔╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ ─┐ + // │─── │├┤ │ ├─┤│└─┐ │└─┐ ├─┤ ╠╣ ║║║║ ║║ ║ ║║║║║╣ │─┼┐│ │├┤ ├┬┘└┬┘ ───│ + // └─ ┴└ ┴ ┴ ┴┴└─┘ ┴└─┘ ┴ ┴ ╚ ╩╝╚╝═╩╝ ╚═╝╝╚╝╚═╝ └─┘└└─┘└─┘┴└─ ┴ ─┘ + // Last but not least, if the current method is `findOne`, then set `limit: 2`. + // + // > This is a performance/stability check that prevents accidentally fetching the entire database + // > with queries like `.findOne({})`. If > 1 record is found, the findOne will fail w/ an error + // > anyway, so it only makes sense to fetch _just enough_. + if (query.method === 'findOne') { + + query.criteria.limit = 2; + + }//>- + + // ┌─┐┌┐┌┌─┐┬ ┬┬─┐┌─┐ ╦ ╦╦ ╦╔═╗╦═╗╔═╗ ┌─┐┬ ┌─┐┬ ┬┌─┐┌─┐ ┬┌─┐ ┌─┐┌─┐┌─┐┌─┐┬┌─┐┬┌─┐ + // ├┤ │││└─┐│ │├┬┘├┤ ║║║╠═╣║╣ ╠╦╝║╣ │ │ ├─┤│ │└─┐├┤ │└─┐ └─┐├─┘├┤ │ │├┤ ││ + // └─┘┘└┘└─┘└─┘┴└─└─┘ ╚╩╝╩ ╩╚═╝╩╚═╚═╝ └─┘┴─┘┴ ┴└─┘└─┘└─┘ ┴└─┘ └─┘┴ └─┘└─┘┴└ ┴└─┘ + // ┌─ ┬┌─┐ ┌┬┐┬ ┬┬┌─┐ ┬┌─┐ ┌─┐ \│/╔═╗╔╗╔╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ ─┐ + // │─── │├┤ │ ├─┤│└─┐ │└─┐ ├─┤ ─ ─║ ║║║║║╣ │─┼┐│ │├┤ ├┬┘└┬┘ ───│ + // └─ ┴└ ┴ ┴ ┴┴└─┘ ┴└─┘ ┴ ┴ o/│\╚═╝╝╚╝╚═╝ └─┘└└─┘└─┘┴└─ ┴ ─┘ + // If this is a `findOne`/`updateOne`/`destroyOne`/`archiveOne` query, + // and the `where` clause is not defined, or if it is `{}`, then fail + // with a usage error (for clarity's sake). + if (_.contains(['findOne','updateOne','destroyOne','archiveOne'], query.method) && _.isEqual(query.criteria.where, {})) { + + throw buildUsageError( + 'E_INVALID_CRITERIA', + 'Cannot `'+query.method+'()` without specifying a more specific `where` clause (the provided `where` clause, `{}`, is too broad).'+ + (query.method === 'findOne' ? ' (If you want to work around this, use `.find().limit(1)`.)' : ''), + query.using + ); + + }//>-• + + + }// >-• + + + + + // ██████╗ ██████╗ ██████╗ ██╗ ██╗██╗ █████╗ ████████╗███████╗███████╗ + // ██╔══██╗██╔═══██╗██╔══██╗██║ ██║██║ ██╔══██╗╚══██╔══╝██╔════╝██╔════╝ + // ██████╔╝██║ ██║██████╔╝██║ ██║██║ ███████║ ██║ █████╗ ███████╗ + // ██╔═══╝ ██║ ██║██╔═══╝ ██║ ██║██║ ██╔══██║ ██║ ██╔══╝ ╚════██║ + // ██║ ╚██████╔╝██║ ╚██████╔╝███████╗██║ ██║ ██║ ███████╗███████║ + // ╚═╝ ╚═════╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝╚══════╝ + // + // Validate/normalize the `populates` query key. + // + // > NOTE: At this point, we know that the `criteria` query key has already been checked/normalized. + if (_.contains(queryKeys, 'populates')) { + + // Tolerate this being left undefined by inferring a reasonable default. + if (_.isUndefined(query.populates)) { + query.populates = {}; + }//>- + + // Verify that `populates` is a dictionary. + if (!_.isObject(query.populates) || _.isArray(query.populates) || _.isFunction(query.populates)) { + throw buildUsageError( + 'E_INVALID_POPULATES', + '`populates` must be a dictionary. But instead, got: '+util.inspect(query.populates, {depth: 1}), + query.using + ); + }//-• + + + // For each key in our `populates` dictionary... + _.each(_.keys(query.populates), function (populateAttrName) { + + // For convenience/consistency, if the RHS of this "populate" directive was set + // to `false`/`undefined`, understand it to mean the same thing as if this particular + // populate directive wasn't included in the first place. In other words, strip + // this key from the `populates` dictionary and just return early. + if (query.populates[populateAttrName] === false || _.isUndefined(query.populates[populateAttrName])) { + delete query.populates[populateAttrName]; + return; + }//-• + + + + + // ┬ ┌─┐┌─┐┬┌─ ┬ ┬┌─┐ ╔═╗╔╦╗╔╦╗╦═╗ ╔╦╗╔═╗╔═╗ ┌─┐┌─┐┬─┐ ┌─┐┌─┐┌─┐┌─┐┌─┐┬┌─┐┌┬┐┬┌─┐┌┐┌ + // │ │ ││ │├┴┐ │ │├─┘ ╠═╣ ║ ║ ╠╦╝ ║║║╣ ╠╣ ├┤ │ │├┬┘ ├─┤└─┐└─┐│ ││ │├─┤ │ ││ ││││ + // ┴─┘└─┘└─┘┴ ┴ └─┘┴ ╩ ╩ ╩ ╩ ╩╚═ ═╩╝╚═╝╚ └ └─┘┴└─ ┴ ┴└─┘└─┘└─┘└─┘┴┴ ┴ ┴ ┴└─┘┘└┘ + // Look up the attribute definition for the association being populated. + // (at the same time, validating that an association by this name actually exists in this model definition.) + var populateAttrDef; + try { + populateAttrDef = getAttribute(populateAttrName, query.using, orm); + } catch (e) { + switch (e.code) { + case 'E_ATTR_NOT_REGISTERED': + throw buildUsageError( + 'E_INVALID_POPULATES', + 'Could not populate `'+populateAttrName+'`. '+ + 'There is no attribute named `'+populateAttrName+'` defined in this model.', + query.using + ); + default: throw new Error('Consistency violation: When attempting to populate `'+populateAttrName+'` for this model (`'+query.using+'`), an unexpected error occurred looking up the association\'s definition. This SHOULD never happen. Here is the original error:\n```\n'+e.stack+'\n```'); + } + }// + + + // ┬ ┌─┐┌─┐┬┌─ ┬ ┬┌─┐ ┬┌┐┌┌─┐┌─┐ ┌─┐┌┐┌ ┌┬┐┬ ┬┌─┐ ╔═╗╔╦╗╦ ╦╔═╗╦═╗ ╔╦╗╔═╗╔╦╗╔═╗╦ + // │ │ ││ │├┴┐ │ │├─┘ ││││├┤ │ │ │ ││││ │ ├─┤├┤ ║ ║ ║ ╠═╣║╣ ╠╦╝ ║║║║ ║ ║║║╣ ║ + // ┴─┘└─┘└─┘┴ ┴ └─┘┴ ┴┘└┘└ └─┘ └─┘┘└┘ ┴ ┴ ┴└─┘ ╚═╝ ╩ ╩ ╩╚═╝╩╚═ ╩ ╩╚═╝═╩╝╚═╝╩═╝ + // Determine the identity of the other (associated) model, then use that to make + // sure that the other model's definition is actually registered in our `orm`. + var otherModelIdentity; + if (populateAttrDef.model) { + otherModelIdentity = populateAttrDef.model; + }//‡ + else if (populateAttrDef.collection) { + otherModelIdentity = populateAttrDef.collection; + }//‡ + // Otherwise, this query is invalid, since the attribute with this name is + // neither a "collection" nor a "model" association. + else { + throw buildUsageError( + 'E_INVALID_POPULATES', + 'Could not populate `'+populateAttrName+'`. '+ + 'The attribute named `'+populateAttrName+'` defined in this model (`'+query.using+'`) '+ + 'is not defined as a "collection" or "model" association, and thus cannot '+ + 'be populated. Instead, its definition looks like this:\n'+ + util.inspect(populateAttrDef, {depth: 1}), + query.using + ); + }//>-• + + + + // ┬ ┬┌─┐ ╔═╗╦═╗╦╔╦╗╔═╗╦═╗╦ ╦ ╔═╗╦═╗╦╔╦╗╔═╗╦═╗╦╔═╗ + // └┐┌┘└─┐ ╠═╝╠╦╝║║║║╠═╣╠╦╝╚╦╝ ║ ╠╦╝║ ║ ║╣ ╠╦╝║╠═╣ + // └┘ └─┘o ╩ ╩╚═╩╩ ╩╩ ╩╩╚═ ╩ ╚═╝╩╚═╩ ╩ ╚═╝╩╚═╩╩ ╩ + + // If trying to populate an association that is ALSO being omitted (in the primary criteria), + // then we say this is invalid. + // + // > We know that the primary criteria has been normalized already at this point. + // > Note: You can NEVER `select` or `omit` plural associations anyway, but that's + // > already been dealt with above from when we normalized the criteria. + if (_.contains(query.criteria.omit, populateAttrName)) { + throw buildUsageError( + 'E_INVALID_POPULATES', + 'Could not populate `'+populateAttrName+'`. '+ + 'This query also indicates that this attribute should be omitted. '+ + 'Cannot populate AND omit an association at the same time!', + query.using + ); + }//-• + + // If trying to populate an association that was included in an explicit `select` clause + // in the primary criteria, then gracefully modify that select clause so that it is NOT included. + // (An explicit `select` clause is only used for singular associations that AREN'T populated.) + // + // > We know that the primary criteria has been normalized already at this point. + if (query.criteria.select[0] !== '*' && _.contains(query.criteria.select, populateAttrName)) { + _.remove(query.criteria.select, populateAttrName); + }//>- + + + // If trying to populate an association that was ALSO included in an explicit + // `sort` clause in the primary criteria, then don't allow this to be populated. + // + // > We know that the primary criteria has been normalized already at this point. + var isMentionedInPrimarySort = _.any(query.criteria.sort, function (comparatorDirective){ + var sortBy = _.keys(comparatorDirective)[0]; + return (sortBy === populateAttrName); + }); + if (isMentionedInPrimarySort) { + throw buildUsageError( + 'E_INVALID_POPULATES', + 'Could not populate `'+populateAttrName+'`. '+ + 'Cannot populate AND sort by an association at the same time!', + query.using + ); + }//>- + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // Similar to the above... + // + // FUTURE: Verify that trying to populate a association that was ALSO referenced somewhere + // from within the `where` clause in the primary criteria (i.e. as an fk) works properly. + // (This is an uncommon use case, and is not currently officially supported.) + // + // > Note that we already throw out any attempts to filter based on a plural ("collection") + // > association, whether it's populated or not-- but that's taken care of separately in + // > normalizeCriteria(). + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + // ┌─┐┬ ┬┌─┐┌─┐┬┌─ ┌┬┐┬ ┬┌─┐ ╦═╗╦ ╦╔═╗ + // │ ├─┤├┤ │ ├┴┐ │ ├─┤├┤ ╠╦╝╠═╣╚═╗ + // └─┘┴ ┴└─┘└─┘┴ ┴ ┴ ┴ ┴└─┘ ╩╚═╩ ╩╚═╝ + + // If this is a singular ("model") association, then it should always have + // an empty dictionary on the RHS. (For this type of association, there is + // always either exactly one associated record, or none of them.) + if (populateAttrDef.model) { + + // Tolerate a subcriteria of `{}`, interpreting it to mean that there is + // really no criteria at all, and that we should just use `true` (the + // default "enabled" value for singular "model" associations.) + if (_.isEqual(query.populates[populateAttrName], {})) { + query.populates[populateAttrName] = true; + } + // Otherwise, this simply must be `true`. Otherwise it's invalid. + else { + + if (query.populates[populateAttrName] !== true) { + throw buildUsageError( + 'E_INVALID_POPULATES', + 'Could not populate `'+populateAttrName+'`. '+ + 'This is a singular ("model") association, which means it never refers to '+ + 'more than _one_ associated record. So passing in subcriteria (i.e. as '+ + 'the second argument to `.populate()`) is not supported for this association, '+ + 'since it generally wouldn\'t make any sense. But that\'s the trouble-- it '+ + 'looks like some sort of a subcriteria (or something) _was_ provided!\n'+ + '(Note that subcriterias consisting ONLY of `omit` or `select` are a special '+ + 'case that _does_ make sense. This usage will be supported in a future version '+ + 'of Waterline.)\n'+ + '\n'+ + 'Here\'s what was passed in:\n'+ + util.inspect(query.populates[populateAttrName], {depth: 5}), + query.using + ); + }//-• + + }//>-• + + } + // Otherwise, this is a plural ("collection") association, so we'll need to + // validate and fully-normalize the provided subcriteria. + else { + + // For compatibility, interpet a subcriteria of `true` to mean that there + // is really no subcriteria at all, and that we should just use the default (`{}`). + // > This will be further expanded into a fully-formed criteria dictionary shortly. + if (query.populates[populateAttrName] === true) { + query.populates[populateAttrName] = {}; + }//>- + + // Track whether `sort` was effectively omitted from the subcriteria. + // (this is used just a little ways down below.) + // + // > Be sure to see "FUTURE (1)" for details about how we might improve this in + // > the future-- it's not a 100% accurate or clean check right now!! + var isUsingDefaultSort = ( + !_.isObject(query.populates[populateAttrName]) || + _.isUndefined(query.populates[populateAttrName].sort) || + _.isEqual(query.populates[populateAttrName].sort, []) + ); + + // Validate and normalize the provided subcriteria. + try { + query.populates[populateAttrName] = normalizeCriteria(query.populates[populateAttrName], otherModelIdentity, orm, query.meta); + } catch (e) { + switch (e.code) { + + case 'E_HIGHLY_IRREGULAR': + throw buildUsageError( + 'E_INVALID_POPULATES', + 'Could not use the specified subcriteria for populating `'+populateAttrName+'`: '+e.message, + // (Tip: Instead of that ^^^, when debugging Waterline itself, replace `e.message` with `e.stack`) + query.using + ); + + case 'E_WOULD_RESULT_IN_NOTHING': + // If the criteria indicates this populate would result in nothing, then set it to + // `false` - a special value indicating that it is a no-op. + // > • In Waterline's operation builder, whenever we see a subcriteria of `false`, + // > we simply skip the populate (i.e. don't factor it in to our stage 3 queries) + // > • And in the transformer, whenever we're putting back together a result set, + // > and we see a subcriteria of `false` from the original stage 2 query, then + // > we ensure that the virtual attributes comes back set to `[]` in the resulting + // > record. + query.populates[populateAttrName] = false; + + // And then return early from this iteration of our loop to skip further checks + // for this populate (since they won't be relevant anyway) + return; + + // If no error code (or an unrecognized error code) was specified, + // then we assume that this was a spectacular failure do to some + // kind of unexpected, internal error on our part. + default: + throw new Error('Consistency violation: Encountered unexpected internal error when attempting to normalize/validate the provided criteria for populating `'+populateAttrName+'`:\n```\n'+util.inspect(query.populates[populateAttrName], {depth:5})+'\n```\nThe following error occurred:\n```\n'+e.stack+'\n```'); + } + }//>-• + + + // ┌─┐┬─┐┌─┐┌┬┐┬ ┬┌─┐┌┬┐┬┌─┐┌┐┌ ┌─┐┬ ┬┌─┐┌─┐┬┌─ + // ├─┘├┬┘│ │ │││ ││ │ ││ ││││ │ ├─┤├┤ │ ├┴┐ + // ┴ ┴└─└─┘─┴┘└─┘└─┘ ┴ ┴└─┘┘└┘ └─┘┴ ┴└─┘└─┘┴ ┴ + // ┌─┐┌─┐┬─┐ ╔╗╔╔═╗╔╗╔ ╔═╗╔═╗╔╦╗╦╔╦╗╦╔═╗╔═╗╔╦╗ ┌─┐┌─┐┌─┐┬ ┬┬ ┌─┐┌┬┐┌─┐┌─┐ + // ├┤ │ │├┬┘ ║║║║ ║║║║───║ ║╠═╝ ║ ║║║║║╔═╝║╣ ║║ ├─┘│ │├─┘│ ││ ├─┤ │ ├┤ └─┐ + // └ └─┘┴└─ ╝╚╝╚═╝╝╚╝ ╚═╝╩ ╩ ╩╩ ╩╩╚═╝╚═╝═╩╝ ┴ └─┘┴ └─┘┴─┘┴ ┴ ┴ └─┘└─┘ + // ┌┬┐┬ ┬┌─┐┌┬┐ ╔═╗╦ ╔═╗╔═╗ ╦ ╦╔═╗╔═╗ ╔═╗╦ ╦╔╗ ╔═╗╦═╗╦╔╦╗╔═╗╦═╗╦╔═╗ + // │ ├─┤├─┤ │ ╠═╣║ ╚═╗║ ║ ║ ║╚═╗║╣ ╚═╗║ ║╠╩╗║ ╠╦╝║ ║ ║╣ ╠╦╝║╠═╣ + // ┴ ┴ ┴┴ ┴ ┴ ╩ ╩╩═╝╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═╝╚═╝╚═╝╩╚═╩ ╩ ╚═╝╩╚═╩╩ ╩ + // In production, if this check fails, a warning will be logged. + + // Determine if we are populating an association that does not support a fully-optimized populate. + var isAssociationFullyCapable = isCapableOfOptimizedPopulate(populateAttrName, query.using, orm); + + // If so, then make sure we are not attempting to perform a "dangerous" populate-- + // that is, one that is not currently safe using our built-in joining shim. + // (This is related to memory usage, and is a result of the shim's implementation.) + if (!isAssociationFullyCapable) { + + var subcriteria = query.populates[populateAttrName]; + var isPotentiallyDangerous = ( + subcriteria.skip !== 0 || + subcriteria.limit !== (Number.MAX_SAFE_INTEGER||9007199254740991) || + !isUsingDefaultSort + ); + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // > FUTURE (1): make this check more restrictive-- not EVERYTHING it prevents is actually + // > dangerous given the current implementation of the shim. But in the mean time, + // > better to err on the safe side. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // > FUTURE (2): overcome this by implementing a more complicated batching strategy-- however, + // > this is not a priority right now, since this is only an issue for xD/A associations, + // > which will likely never come up for the majority of applications. Our focus is on the + // > much more common real-world scenario of populating across associations in the same database. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + if (isPotentiallyDangerous) { + + if (process.env.NODE_ENV === 'production') { + console.warn('\n'+ + 'Warning: Attempting to populate `'+populateAttrName+'` with the specified subcriteria,\n'+ + 'but this MAY NOT BE SAFE, depending on the number of records stored in your models.\n'+ + 'Since this association does not support optimized populates (i.e. it spans multiple '+'\n'+ + 'datastores, or uses an adapter that does not support native joins), it is not a good '+'\n'+ + 'idea to populate it along with a subcriteria that uses `limit`, `skip`, and/or `sort`-- '+'\n'+ + 'at least not in a production environment.\n'+ + '\n'+ + 'This is because, to satisfy the specified `limit`/`skip`/`sort`, many additional records\n'+ + 'may need to be fetched along the way -- perhaps enough of them to overflow RAM on your server.\n'+ + '\n'+ + 'If you are just using sails-disk during development, or are certain this is not a problem\n'+ + 'based on your application\'s requirements, then you can safely ignore this message.\n'+ + 'But otherwise, to overcome this, either (A) remove or change this subcriteria and approach\n'+ + 'this query a different way (such as multiple separate queries or a native query), or\n'+ + '(B) configure all involved models to use the same datastore, and/or switch to an adapter\n'+ + 'like sails-mysql or sails-postgresql that supports native joins.\n'+ + ' [?] See https://sailsjs.com/support for help.\n' + ); + }//fi + + }//fi + + }//fi + + + + }// + + + });// + + }//>-• + + + + + + + + + + // ███╗ ██╗██╗ ██╗███╗ ███╗███████╗██████╗ ██╗ ██████╗ + // ████╗ ██║██║ ██║████╗ ████║██╔════╝██╔══██╗██║██╔════╝ + // ██╔██╗ ██║██║ ██║██╔████╔██║█████╗ ██████╔╝██║██║ + // ██║╚██╗██║██║ ██║██║╚██╔╝██║██╔══╝ ██╔══██╗██║██║ + // ██║ ╚████║╚██████╔╝██║ ╚═╝ ██║███████╗██║ ██║██║╚██████╗ + // ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝╚═╝ ╚═════╝ + // + // █████╗ ████████╗████████╗██████╗ ███╗ ██╗ █████╗ ███╗ ███╗███████╗ + // ██╔══██╗╚══██╔══╝╚══██╔══╝██╔══██╗ ████╗ ██║██╔══██╗████╗ ████║██╔════╝ + // ███████║ ██║ ██║ ██████╔╝ ██╔██╗ ██║███████║██╔████╔██║█████╗ + // ██╔══██║ ██║ ██║ ██╔══██╗ ██║╚██╗██║██╔══██║██║╚██╔╝██║██╔══╝ + // ██║ ██║ ██║ ██║ ██║ ██║ ██║ ╚████║██║ ██║██║ ╚═╝ ██║███████╗ + // ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝ + // + if (_.contains(queryKeys, 'numericAttrName')) { + + if (_.isUndefined(query.numericAttrName)) { + throw buildUsageError( + 'E_INVALID_NUMERIC_ATTR_NAME', + 'Please specify `numericAttrName` (required for this variety of query).', + query.using + ); + } + + if (!_.isString(query.numericAttrName)) { + throw buildUsageError( + 'E_INVALID_NUMERIC_ATTR_NAME', + 'Instead of a string, got: '+util.inspect(query.numericAttrName,{depth:5}), + query.using + ); + } + + // Validate that an attribute by this name actually exists in this model definition. + var numericAttrDef; + try { + numericAttrDef = getAttribute(query.numericAttrName, query.using, orm); + } catch (e) { + switch (e.code) { + case 'E_ATTR_NOT_REGISTERED': + throw buildUsageError( + 'E_INVALID_NUMERIC_ATTR_NAME', + 'There is no attribute named `'+query.numericAttrName+'` defined in this model.', + query.using + ); + default: throw e; + } + }// + + + // If this attempts to use a singular (`model`) association that happens to also + // correspond with an associated model that has a `type: 'number'` primary key, then + // STILL THROW -- but just use a more explicit error message explaining the reason this + // is not allowed (i.e. because it doesn't make any sense to get the sum or average of + // a bunch of ids... and more often than not, this scenario happens due to mistakes in + // userland code. We have yet to see a use case where this is necessary.) + var isSingularAssociationToModelWithNumericPk = numericAttrDef.model && (getAttribute(getModel(numericAttrDef.model, orm).primaryKey, numericAttrDef.model, orm).type === 'number'); + if (isSingularAssociationToModelWithNumericPk) { + throw buildUsageError( + 'E_INVALID_NUMERIC_ATTR_NAME', + 'While the attribute named `'+query.numericAttrName+'` defined in this model IS guaranteed '+ + 'to be a number (because it is a singular association to a model w/ a numeric primary key), '+ + 'it almost certainly shouldn\'t be used for this purpose. If you are seeing this error message, '+ + 'it is likely due to a mistake in userland code, so please check your query.', + query.using + ); + }//-• + + // Validate that the attribute with this name is a number. + if (numericAttrDef.type !== 'number') { + throw buildUsageError( + 'E_INVALID_NUMERIC_ATTR_NAME', + 'The attribute named `'+query.numericAttrName+'` defined in this model is not guaranteed to be a number '+ + '(it should declare `type: \'number\'`).', + query.using + ); + } + + }//>-• + + + + + + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + // -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + // -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + + + + // ███████╗ █████╗ ██████╗██╗ ██╗ ██████╗ ███████╗ ██████╗ ██████╗ ██████╗ ██████╗ + // ██╔════╝██╔══██╗██╔════╝██║ ██║ ██╔══██╗██╔════╝██╔════╝██╔═══██╗██╔══██╗██╔══██╗ + // █████╗ ███████║██║ ███████║ ██████╔╝█████╗ ██║ ██║ ██║██████╔╝██║ ██║ + // ██╔══╝ ██╔══██║██║ ██╔══██║ ██╔══██╗██╔══╝ ██║ ██║ ██║██╔══██╗██║ ██║ + // ███████╗██║ ██║╚██████╗██║ ██║ ██║ ██║███████╗╚██████╗╚██████╔╝██║ ██║██████╔╝ + // ╚══════╝╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝ ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═════╝ + // + // ██╗ ███████╗ █████╗ ██████╗██╗ ██╗ ██████╗ █████╗ ████████╗ ██████╗██╗ ██╗ + // ██╔╝ ██╔════╝██╔══██╗██╔════╝██║ ██║ ██╔══██╗██╔══██╗╚══██╔══╝██╔════╝██║ ██║ + // ██╔╝ █████╗ ███████║██║ ███████║ ██████╔╝███████║ ██║ ██║ ███████║ + // ██╔╝ ██╔══╝ ██╔══██║██║ ██╔══██║ ██╔══██╗██╔══██║ ██║ ██║ ██╔══██║ + // ██╔╝ ███████╗██║ ██║╚██████╗██║ ██║ ██████╔╝██║ ██║ ██║ ╚██████╗██║ ██║ + // ╚═╝ ╚══════╝╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝ + // + // ██╗███████╗██╗ ██╗███╗ ██╗ ██████╗████████╗██╗ ██████╗ ███╗ ██╗███████╗██╗ + // ██╔╝██╔════╝██║ ██║████╗ ██║██╔════╝╚══██╔══╝██║██╔═══██╗████╗ ██║██╔════╝╚██╗ + // ██║ █████╗ ██║ ██║██╔██╗ ██║██║ ██║ ██║██║ ██║██╔██╗ ██║███████╗ ██║ + // ██║ ██╔══╝ ██║ ██║██║╚██╗██║██║ ██║ ██║██║ ██║██║╚██╗██║╚════██║ ██║ + // ╚██╗██║ ╚██████╔╝██║ ╚████║╚██████╗ ██║ ██║╚██████╔╝██║ ╚████║███████║██╔╝ + // ╚═╝╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝╚══════╝╚═╝ + // + // If we are expecting either eachBatchFn or eachRecordFn, then make sure + // one or the other is set... but not both! And make sure that, whichever + // one is specified, it is a function. + // + // > This is only a problem if BOTH `eachRecordFn` and `eachBatchFn` are + // > left undefined, or if they are BOTH set. (i.e. xor) + // > See https://gist.github.com/mikermcneil/d1e612cd1a8564a79f61e1f556fc49a6#edge-cases--details + if (_.contains(queryKeys, 'eachRecordFn') || _.contains(queryKeys, 'eachBatchFn')) { + + // -> Both functions were defined + if (!_.isUndefined(query.eachRecordFn) && !_.isUndefined(query.eachBatchFn)) { + + throw buildUsageError( + 'E_INVALID_STREAM_ITERATEE', + 'An iteratee function should be passed in to `.stream()` via either ' + + '`.eachRecord()` or `.eachBatch()` -- but never both. Please set one or the other.', + query.using + ); + + } + // -> Only `eachRecordFn` was defined + else if (!_.isUndefined(query.eachRecordFn)) { + + if (!_.isFunction(query.eachRecordFn)) { + throw buildUsageError( + 'E_INVALID_STREAM_ITERATEE', + 'For `eachRecordFn`, instead of a function, got: '+util.inspect(query.eachRecordFn,{depth:5}), + query.using + ); + } + + } + // -> Only `eachBatchFn` was defined + else if (!_.isUndefined(query.eachBatchFn)) { + + if (!_.isFunction(query.eachBatchFn)) { + throw buildUsageError( + 'E_INVALID_STREAM_ITERATEE', + 'For `eachBatchFn`, instead of a function, got: '+util.inspect(query.eachBatchFn,{depth:5}), + query.using + ); + } + + } + // -> Both were left undefined + else { + + throw buildUsageError( + 'E_INVALID_STREAM_ITERATEE', + 'Either `eachRecordFn` or `eachBatchFn` should be defined, but neither of them are.', + query.using + ); + + } + + }//>-• + + + + + + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + // -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + // -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + + + + + // ███╗ ██╗███████╗██╗ ██╗ ██████╗ ███████╗ ██████╗ ██████╗ ██████╗ ██████╗ + // ████╗ ██║██╔════╝██║ ██║ ██╔══██╗██╔════╝██╔════╝██╔═══██╗██╔══██╗██╔══██╗ + // ██╔██╗ ██║█████╗ ██║ █╗ ██║ ██████╔╝█████╗ ██║ ██║ ██║██████╔╝██║ ██║ + // ██║╚██╗██║██╔══╝ ██║███╗██║ ██╔══██╗██╔══╝ ██║ ██║ ██║██╔══██╗██║ ██║ + // ██║ ╚████║███████╗╚███╔███╔╝ ██║ ██║███████╗╚██████╗╚██████╔╝██║ ██║██████╔╝ + // ╚═╝ ╚═══╝╚══════╝ ╚══╝╚══╝ ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═════╝ + if (_.contains(queryKeys, 'newRecord')) { + + // If this was provided as an array, apprehend it before calling our `normalizeNewRecord()` , + // in order to log a slightly more specific error message. + if (_.isArray(query.newRecord)) { + throw buildUsageError( + 'E_INVALID_NEW_RECORD', + 'Got an array, but expected new record to be provided as a dictionary (plain JavaScript object). '+ + 'Array usage is no longer supported as of Sails v1.0 / Waterline 0.13. Instead, please explicitly '+ + 'call `.createEach()`.', + query.using + ); + }//-• + + try { + query.newRecord = normalizeNewRecord(query.newRecord, query.using, orm, theMomentBeforeFS2Q, query.meta); + } catch (e) { + switch (e.code){ + + case 'E_TYPE': + case 'E_REQUIRED': + case 'E_VIOLATES_RULES': + throw buildUsageError('E_INVALID_NEW_RECORD', e.message, query.using); + + case 'E_HIGHLY_IRREGULAR': + throw buildUsageError('E_INVALID_NEW_RECORD', e.message, query.using); + + default: throw e; + } + }// + + }//>-• + + + + + + // ███╗ ██╗███████╗██╗ ██╗ ██████╗ ███████╗ ██████╗ ██████╗ ██████╗ ██████╗ ███████╗ + // ████╗ ██║██╔════╝██║ ██║ ██╔══██╗██╔════╝██╔════╝██╔═══██╗██╔══██╗██╔══██╗██╔════╝ + // ██╔██╗ ██║█████╗ ██║ █╗ ██║ ██████╔╝█████╗ ██║ ██║ ██║██████╔╝██║ ██║███████╗ + // ██║╚██╗██║██╔══╝ ██║███╗██║ ██╔══██╗██╔══╝ ██║ ██║ ██║██╔══██╗██║ ██║╚════██║ + // ██║ ╚████║███████╗╚███╔███╔╝ ██║ ██║███████╗╚██████╗╚██████╔╝██║ ██║██████╔╝███████║ + // ╚═╝ ╚═══╝╚══════╝ ╚══╝╚══╝ ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═════╝ ╚══════╝ + if (_.contains(queryKeys, 'newRecords')) { + + if (_.isUndefined(query.newRecords)) { + throw buildUsageError('E_INVALID_NEW_RECORDS', 'Please specify `newRecords`.', query.using); + }//-• + + if (!_.isArray(query.newRecords)) { + throw buildUsageError( + 'E_INVALID_NEW_RECORDS', + 'Expecting an array but instead, got: '+util.inspect(query.newRecords,{depth:5}), + query.using + ); + }//-• + + // If the array of new records contains any `undefined` items, strip them out. + // + // > Note that this does not work: + // > ``` + // > _.remove(query.newRecords, undefined); + // > ``` + _.remove(query.newRecords, function (newRecord){ + return _.isUndefined(newRecord); + }); + + // If the array is empty, bail out now with an E_NOOP error. + // (This will actually not be interpreted as an error. We will just + // pretend it worked.) + // + // > Note that we do this AFTER stripping undefineds. + if (query.newRecords.length === 0) { + throw buildUsageError('E_NOOP', 'No things to create were provided.', query.using); + }//-• + + // Ensure no two items in the `newRecords` array point to the same object reference. + // Why? Multiple references to the same object can get tangly and cause problems downstream + // in Waterline, such as this confusing error message: https://github.com/balderdashy/sails/issues/7266 + // + // On the other hand, simply using `.uniq()` to deduplicate can be somewhat unexpected behavior. + // (Imagine using `let x = {}; await Widget.createEach([x,x,x,x]);` to create four widgets. + // It would be a surprise if it only created one widget.) + if (query.newRecords.length !== _.uniq(query.newRecords).length) { + throw buildUsageError( + 'E_INVALID_NEW_RECORDS', + 'Two or more of the items in the provided array of new records are actually references '+ + 'to the same JavaScript object (`.createEach(x,y,x)`). This is too ambiguous, since it '+ + 'could mean creating much more or much less data than intended. Instead, pass in distinct '+ + 'dictionaries for each new record you would like to create (`.createEach({},{},x,y,z)`).', + query.using + ); + }//-• + + // Validate and normalize each new record in the provided array. + query.newRecords = _.map(query.newRecords, function (newRecord){ + + try { + return normalizeNewRecord(newRecord, query.using, orm, theMomentBeforeFS2Q, query.meta); + } catch (e) { + switch (e.code){ + + case 'E_TYPE': + case 'E_REQUIRED': + case 'E_VIOLATES_RULES': + throw buildUsageError( + 'E_INVALID_NEW_RECORDS', + 'Could not use one of the provided new records: '+e.message, + query.using + ); + + case 'E_HIGHLY_IRREGULAR': + throw buildUsageError( + 'E_INVALID_NEW_RECORDS', + 'Could not use one of the provided new records: '+e.message, + query.using + ); + + default: throw e; + } + }// + + });// + + }//>-• + + + + + + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + // -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + // -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + + + // ██╗ ██╗ █████╗ ██╗ ██╗ ██╗███████╗███████╗ + // ██║ ██║██╔══██╗██║ ██║ ██║██╔════╝██╔════╝ + // ██║ ██║███████║██║ ██║ ██║█████╗ ███████╗ + // ╚██╗ ██╔╝██╔══██║██║ ██║ ██║██╔══╝ ╚════██║ + // ╚████╔╝ ██║ ██║███████╗╚██████╔╝███████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚══════╝╚══════╝ + // + // ████████╗ ██████╗ ███████╗███████╗████████╗ + // ╚══██╔══╝██╔═══██╗ ██╔════╝██╔════╝╚══██╔══╝ + // ██║ ██║ ██║ ███████╗█████╗ ██║ + // ██║ ██║ ██║ ╚════██║██╔══╝ ██║ + // ██║ ╚██████╔╝ ███████║███████╗ ██║ + // ╚═╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ + if (_.contains(queryKeys, 'valuesToSet')) { + + if (!_.isObject(query.valuesToSet) || _.isFunction(query.valuesToSet) || _.isArray(query.valuesToSet)) { + throw buildUsageError( + 'E_INVALID_VALUES_TO_SET', + 'Expecting a dictionary (plain JavaScript object) but instead, got: '+util.inspect(query.valuesToSet,{depth:5}), + query.using + ); + }//-• + + // Now loop over and check every key specified in `valuesToSet`. + _.each(_.keys(query.valuesToSet), function (attrNameToSet){ + + // Validate & normalize this value. + // > Note that we could explicitly NOT allow literal arrays of pks to be provided + // > for collection attributes (plural associations) -- by passing in `false`. + // > That said, we currently still allow this. + try { + query.valuesToSet[attrNameToSet] = normalizeValueToSet(query.valuesToSet[attrNameToSet], attrNameToSet, query.using, orm, query.meta); + } catch (e) { + switch (e.code) { + + // If its RHS should be ignored (e.g. because it is `undefined`), then delete this key and bail early. + case 'E_SHOULD_BE_IGNORED': + delete query.valuesToSet[attrNameToSet]; + return; + + + case 'E_TYPE': + case 'E_REQUIRED': + case 'E_VIOLATES_RULES': + throw buildUsageError( + 'E_INVALID_VALUES_TO_SET', + 'Could not use specified `'+attrNameToSet+'`. '+e.message, + query.using + ); + + // For future reference, here are the additional properties we might expose: + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // • For E_TYPE: + // ``` + // throw flaverr({ + // code: 'E_TYPE', + // attrName: attrNameToSet, + // expectedType: e.expectedType + // }, new Error( + // 'The wrong type of data was specified for `'+attrNameToSet+'`. '+e.message + // )); + // ``` + // + // • For E_VIOLATES_RULES: + // ``` + // assert(_.isArray(e.ruleViolations) && e.ruleViolations.length > 0, 'This error should ALWAYS have a non-empty array as its `ruleViolations` property. But instead, its `ruleViolations` property is: '+e.ruleViolations+'\nAlso, for completeness/context, here is the error\'s complete stack: '+e.stack); + // throw flaverr({ + // code: 'E_VIOLATES_RULES', + // attrName: attrNameToSet, + // ruleViolations: ruleViolations + // }, new Error( + // 'Could not use specified `'+attrNameToSet+'`. '+e.message + // )); + // ``` + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + case 'E_HIGHLY_IRREGULAR': + throw buildUsageError( + 'E_INVALID_VALUES_TO_SET', + 'Could not use specified `'+attrNameToSet+'`. '+e.message, + query.using + ); + + default: + throw e; + } + }// + + });// + + + // Now, for each `autoUpdatedAt` attribute, check if there was a corresponding value provided. + // If not, then set the current timestamp as the value being set on the RHS. + _.each(WLModel.attributes, function (attrDef, attrName) { + if (!attrDef.autoUpdatedAt) { return; } + if (!_.isUndefined(query.valuesToSet[attrName])) { return; } + + // -• IWMIH, this is an attribute that has `autoUpdatedAt: true`, + // and no value was explicitly provided for it. + assert(attrDef.type === 'number' || attrDef.type === 'string' || attrDef.type === 'ref', 'If an attribute has `autoUpdatedAt: true`, then it should always have either `type: \'string\'`, `type: \'number\'` or `type: \'ref\'`. But the definition for attribute (`'+attrName+'`) has somehow gotten into this state! This should be impossible, but it has both `autoUpdatedAt: true` AND `type: \''+attrDef.type+'\'`'); + + // Set the value equal to the current timestamp, using the appropriate format. + if (attrDef.type === 'string') { + query.valuesToSet[attrName] = (new Date(theMomentBeforeFS2Q)).toJSON(); + } + else if (attrDef.type === 'ref') { + query.valuesToSet[attrName] = new Date(theMomentBeforeFS2Q); + } + else { + query.valuesToSet[attrName] = theMomentBeforeFS2Q; + } + + });// + + }//>-• + + + + + + + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + // -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + // -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + + + + + + + // ██████╗ ██████╗ ██╗ ██╗ ███████╗ ██████╗████████╗██╗ ██████╗ ███╗ ██╗ + // ██╔════╝██╔═══██╗██║ ██║ ██╔════╝██╔════╝╚══██╔══╝██║██╔═══██╗████╗ ██║ + // ██║ ██║ ██║██║ ██║ █████╗ ██║ ██║ ██║██║ ██║██╔██╗ ██║ + // ██║ ██║ ██║██║ ██║ ██╔══╝ ██║ ██║ ██║██║ ██║██║╚██╗██║ + // ╚██████╗╚██████╔╝███████╗███████╗███████╗╚██████╗ ██║ ██║╚██████╔╝██║ ╚████║ + // ╚═════╝ ╚═════╝ ╚══════╝╚══════╝╚══════╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ + // + // █████╗ ████████╗████████╗██████╗ ███╗ ██╗ █████╗ ███╗ ███╗███████╗ + // ██╔══██╗╚══██╔══╝╚══██╔══╝██╔══██╗ ████╗ ██║██╔══██╗████╗ ████║██╔════╝ + // ███████║ ██║ ██║ ██████╔╝ ██╔██╗ ██║███████║██╔████╔██║█████╗ + // ██╔══██║ ██║ ██║ ██╔══██╗ ██║╚██╗██║██╔══██║██║╚██╔╝██║██╔══╝ + // ██║ ██║ ██║ ██║ ██║ ██║ ██║ ╚████║██║ ██║██║ ╚═╝ ██║███████╗ + // ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝ + // Look up the association by this name in this model definition. + if (_.contains(queryKeys, 'collectionAttrName')) { + + if (!_.isString(query.collectionAttrName)) { + throw buildUsageError( + 'E_INVALID_COLLECTION_ATTR_NAME', + 'Instead of a string, got: '+util.inspect(query.collectionAttrName,{depth:5}), + query.using + ); + } + + // Validate that an association by this name actually exists in this model definition. + var associationDef; + try { + associationDef = getAttribute(query.collectionAttrName, query.using, orm); + } catch (e) { + switch (e.code) { + case 'E_ATTR_NOT_REGISTERED': + throw buildUsageError( + 'E_INVALID_COLLECTION_ATTR_NAME', + 'There is no attribute named `'+query.collectionAttrName+'` defined in this model.', + query.using + ); + default: throw e; + } + }// + + // Validate that the association with this name is a plural ("collection") association. + if (!associationDef.collection) { + throw buildUsageError( + 'E_INVALID_COLLECTION_ATTR_NAME', + 'The attribute named `'+query.collectionAttrName+'` defined in this model is not a plural ("collection") association.', + query.using + ); + } + + }//>-• + + + + + + + // ████████╗ █████╗ ██████╗ ██████╗ ███████╗████████╗ + // ╚══██╔══╝██╔══██╗██╔══██╗██╔════╝ ██╔════╝╚══██╔══╝ + // ██║ ███████║██████╔╝██║ ███╗█████╗ ██║ + // ██║ ██╔══██║██╔══██╗██║ ██║██╔══╝ ██║ + // ██║ ██║ ██║██║ ██║╚██████╔╝███████╗ ██║ + // ╚═╝ ╚═╝ ╚═╝╚═╝ ╚═╝ ╚═════╝ ╚══════╝ ╚═╝ + // + // ██████╗ ███████╗ ██████╗ ██████╗ ██████╗ ██████╗ ██╗██████╗ ███████╗ + // ██╔══██╗██╔════╝██╔════╝██╔═══██╗██╔══██╗██╔══██╗ ██║██╔══██╗██╔════╝ + // ██████╔╝█████╗ ██║ ██║ ██║██████╔╝██║ ██║ ██║██║ ██║███████╗ + // ██╔══██╗██╔══╝ ██║ ██║ ██║██╔══██╗██║ ██║ ██║██║ ██║╚════██║ + // ██║ ██║███████╗╚██████╗╚██████╔╝██║ ██║██████╔╝ ██║██████╔╝███████║ + // ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═════╝ ╚═╝╚═════╝ ╚══════╝ + if (_.contains(queryKeys, 'targetRecordIds')) { + + + // ╔╗╔╔═╗╦═╗╔╦╗╔═╗╦ ╦╔═╗╔═╗ ┬ ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐ ┌─┐┬┌─ ┬ ┬┌─┐┬ ┌─┐ + // ║║║║ ║╠╦╝║║║╠═╣║ ║╔═╝║╣ ┌┼─ ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ ├─┤└─┐ ├─┘├┴┐ └┐┌┘├─┤│ └─┐ + // ╝╚╝╚═╝╩╚═╩ ╩╩ ╩╩═╝╩╚═╝╚═╝ └┘ ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ ┴ ┴└─┘ ┴ ┴ ┴ └┘ ┴ ┴┴─┘└─┘ + // Normalize (and validate) the specified target record pk values. + // (if a singular string or number was provided, this converts it into an array.) + // + // > Note that this ensures that they match the expected type indicated by this + // > model's primary key attribute. + try { + var pkAttrDef = getAttribute(WLModel.primaryKey, query.using, orm); + query.targetRecordIds = normalizePkValueOrValues(query.targetRecordIds, pkAttrDef.type); + } catch(e) { + switch (e.code) { + + case 'E_INVALID_PK_VALUE': + throw buildUsageError( + 'E_INVALID_TARGET_RECORD_IDS', + e.message, + query.using + ); + + default: + throw e; + + } + }//< / catch : normalizePkValueOrValues > + + + // ┬ ┬┌─┐┌┐┌┌┬┐┬ ┌─┐ ╔╗╔╔═╗ ╔═╗╔═╗ + // ├─┤├─┤│││ │││ ├┤ ║║║║ ║───║ ║╠═╝ + // ┴ ┴┴ ┴┘└┘─┴┘┴─┘└─┘ ╝╚╝╚═╝ ╚═╝╩ + // No query that takes target record ids is meaningful without any of said ids. + if (query.targetRecordIds.length === 0) { + throw buildUsageError('E_NOOP', 'No target record ids were provided.', query.using); + }//-• + + + // ┬ ┬┌─┐┌┐┌┌┬┐┬ ┌─┐ ╔═╗╔═╗╔═╗╔═╗╦╔═╗╦ ╔═╗╔═╗╔═╗╔═╗ + // ├─┤├─┤│││ │││ ├┤ ╚═╗╠═╝║╣ ║ ║╠═╣║ ║ ╠═╣╚═╗║╣ + // ┴ ┴┴ ┴┘└┘─┴┘┴─┘└─┘ ╚═╝╩ ╚═╝╚═╝╩╩ ╩╩═╝ ╚═╝╩ ╩╚═╝╚═╝ + // ┌─┐┌─┐┬─┐ ╔═╗═╗ ╦╔═╗╦ ╦ ╦╔═╗╦╦ ╦╔═╗ ┌┬┐┬ ┬┌─┐ ┬ ┬┌─┐┬ ┬ ┌─┐┌─┐┌─┐┌─┐┌─┐┬┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ + // ├┤ │ │├┬┘ ║╣ ╔╩╦╝║ ║ ║ ║╚═╗║╚╗╔╝║╣ │ ││││ │───│││├─┤└┬┘ ├─┤└─┐└─┐│ ││ │├─┤ │ ││ ││││└─┐ + // └ └─┘┴└─ ╚═╝╩ ╚═╚═╝╩═╝╚═╝╚═╝╩ ╚╝ ╚═╝┘ ┴ └┴┘└─┘ └┴┘┴ ┴ ┴ ┴ ┴└─┘└─┘└─┘└─┘┴┴ ┴ ┴ ┴└─┘┘└┘└─┘ + // Next, handle one other special case that we are careful to fail loudly about. + + // If this query's method is `addToCollection` or `replaceCollection`, and if there is MORE THAN ONE target record, + // AND if there is AT LEAST ONE associated id... + var isRelevantMethod = (query.method === 'addToCollection' || query.method === 'replaceCollection'); + var isTryingToSetOneOrMoreAssociatedIds = _.isArray(query.associatedIds) && query.associatedIds.length > 0; + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // ^^Note: If there are zero associated ids, this query may still fail a bit later because of + // physical-layer constraints or Waterline's cascade polyfill (e.g. if the foreign key + // attribute happens to have required: true). Where possible, checks to protect against this + // live in the implementation of the `.replaceCollection()` method. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + if (query.targetRecordIds.length > 1 && isRelevantMethod && isTryingToSetOneOrMoreAssociatedIds) { + + // Now check to see if this is a two-way, exclusive association. + // If so, then this query is impossible. + // + // > Note that, IWMIH, we already know this association is plural + // > (we checked that above when validating `collectionAttrName`) + var isAssociationExclusive = isExclusive(query.collectionAttrName, query.using, orm); + + if (isAssociationExclusive) { + throw buildUsageError( + 'E_INVALID_TARGET_RECORD_IDS', + 'The `'+query.collectionAttrName+'` association of the `'+query.using+'` model is exclusive, meaning that associated child '+ + 'records cannot belong to the `'+query.collectionAttrName+'` collection of more than one `'+query.using+'` record. '+ + 'You are seeing this error because this query would have tried to share the same child record(s) across the `'+query.collectionAttrName+'` '+ + 'collections of 2 or more different `'+query.using+'` records. To resolve this error, change the query, or change your models '+ + 'to make this association non-exclusive (i.e. use `collection` & `via` on the other side of the association, instead of `model`.) '+ + 'In other words, imagine trying to run a query like `Car.replaceCollection([1,2], \'wheels\', [99, 98])`. If a wheel always belongs '+ + 'to one particular car via `wheels`, then this query would be impossible. To make it possible, you\'d have to change your models so '+ + 'that each wheel is capable of being associated with more than one car.', + query.using + ); + }//-• + + }//>-• + + + }//>-• + + + + + + + + + + // █████╗ ███████╗███████╗ ██████╗ ██████╗██╗ █████╗ ████████╗███████╗██████╗ + // ██╔══██╗██╔════╝██╔════╝██╔═══██╗██╔════╝██║██╔══██╗╚══██╔══╝██╔════╝██╔══██╗ + // ███████║███████╗███████╗██║ ██║██║ ██║███████║ ██║ █████╗ ██║ ██║ + // ██╔══██║╚════██║╚════██║██║ ██║██║ ██║██╔══██║ ██║ ██╔══╝ ██║ ██║ + // ██║ ██║███████║███████║╚██████╔╝╚██████╗██║██║ ██║ ██║ ███████╗██████╔╝ + // ╚═╝ ╚═╝╚══════╝╚══════╝ ╚═════╝ ╚═════╝╚═╝╚═╝ ╚═╝ ╚═╝ ╚══════╝╚═════╝ + // + // ██╗██████╗ ███████╗ + // ██║██╔══██╗██╔════╝ + // ██║██║ ██║███████╗ + // ██║██║ ██║╚════██║ + // ██║██████╔╝███████║ + // ╚═╝╚═════╝ ╚══════╝ + if (_.contains(queryKeys, 'associatedIds')) { + + // Look up the ASSOCIATED Waterline model for this query, based on the `collectionAttrName`. + // Then use that to look up the declared type of its primary key. + // + // > Note that, if there are any problems that would prevent us from doing this, they + // > should have already been caught above, and we should never have made it to this point + // > in the code. So i.e. we can proceed with certainty that the model will exist. + // > And since its definition will have already been verified for correctness when + // > initializing Waterline, we can safely assume that it has a primary key, etc. + var associatedPkType = (function(){ + var _associationDef = getAttribute(query.collectionAttrName, query.using, orm); + var _otherModelIdentity = _associationDef.collection; + var AssociatedModel = getModel(_otherModelIdentity, orm); + var _associatedPkDef = getAttribute(AssociatedModel.primaryKey, _otherModelIdentity, orm); + return _associatedPkDef.type; + })(); + + + // ╔╗╔╔═╗╦═╗╔╦╗╔═╗╦ ╦╔═╗╔═╗ ┬ ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐ ┌─┐┬┌─ ┬ ┬┌─┐┬ ┌─┐ + // ║║║║ ║╠╦╝║║║╠═╣║ ║╔═╝║╣ ┌┼─ ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ ├─┤└─┐ ├─┘├┴┐ └┐┌┘├─┤│ └─┐ + // ╝╚╝╚═╝╩╚═╩ ╩╩ ╩╩═╝╩╚═╝╚═╝ └┘ ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ ┴ ┴└─┘ ┴ ┴ ┴ └┘ ┴ ┴┴─┘└─┘ + // Validate the provided set of associated record ids. + // (if a singular string or number was provided, this converts it into an array.) + // + // > Note that this ensures that they match the expected type indicated by this + // > model's primary key attribute. + try { + query.associatedIds = normalizePkValueOrValues(query.associatedIds, associatedPkType); + } catch(e) { + switch (e.code) { + + case 'E_INVALID_PK_VALUE': + throw buildUsageError('E_INVALID_ASSOCIATED_IDS', e.message, query.using); + + default: + throw e; + + } + }//< / catch :: normalizePkValueOrValues > + + + // ╔═╗╔═╗╔═╗╔═╗╦╔═╗╦ ╔═╗╔═╗╔═╗╔═╗╔═╗ + // ╚═╗╠═╝║╣ ║ ║╠═╣║ ║ ╠═╣╚═╗║╣ ╚═╗ + // ╚═╝╩ ╚═╝╚═╝╩╩ ╩╩═╝ ╚═╝╩ ╩╚═╝╚═╝╚═╝ + // ┌─ ┬ ┌─┐ ┬ ┬┌┐┌┌─┐┬ ┬┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐┌┬┐ ┌─┐┌─┐┌┬┐┌┐ ┬┌┐┌┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ + // │─── │ ├┤ │ ││││└─┐│ │├─┘├─┘│ │├┬┘ │ ├┤ ││ │ │ ││││├┴┐││││├─┤ │ ││ ││││└─┐ + // └─ ┴o└─┘o └─┘┘└┘└─┘└─┘┴ ┴ └─┘┴└─ ┴ └─┘─┴┘ └─┘└─┘┴ ┴└─┘┴┘└┘┴ ┴ ┴ ┴└─┘┘└┘└─┘ + // ┌─┐┌─┐┬─┐ ┌─┐┌─┐┬─┐┌┬┐┌─┐┬┌┐┌ ┌┬┐┌─┐┌┬┐┌─┐┬ ┌┬┐┌─┐┌┬┐┬ ┬┌─┐┌┬┐┌─┐ ─┐ + // ├┤ │ │├┬┘ │ ├┤ ├┬┘ │ ├─┤││││ ││││ │ ││├┤ │ │││├┤ │ ├─┤│ │ ││└─┐ ───│ + // └ └─┘┴└─ └─┘└─┘┴└─ ┴ ┴ ┴┴┘└┘ ┴ ┴└─┘─┴┘└─┘┴─┘ ┴ ┴└─┘ ┴ ┴ ┴└─┘─┴┘└─┘ ─┘ + // + // Handle the case where this is a no-op. + // An empty array is only a no-op if this query's method is `removeFromCollection` or `addToCollection`. + var isQueryMeaningfulWithNoAssociatedIds = (query.method === 'removeFromCollection' || query.method === 'addToCollection'); + if (query.associatedIds.length === 0 && isQueryMeaningfulWithNoAssociatedIds) { + throw buildUsageError('E_NOOP', 'No associated ids were provided.', query.using); + }//-• + + }//>-• + + + + + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + // -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + // -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- + //-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- - + // if (process.env.NODE_ENV !== 'production') { + // console.timeEnd('forgeStageTwoQuery'); + // } + + // console.log('\n\n****************************\n\n\n********\nStage 2 query: ',util.inspect(query,{depth:5}),'\n^^^^^^^^^^^^^^^^^^^^^^^^^^^^'); + + // -- + // The provided "stage 1 query guts" dictionary is now a logical protostatement ("stage 2 query"). + // + // Do not return anything. + return; + +}; + + + + + + + +/** + * To quickly do an ad-hoc test of this utility from the Node REPL... + * (~7ms latency, Nov 22, 2016) + */ + +/*``` +q = { using: 'user', method: 'find', criteria: {where: {id: '3d'}, limit: 3} }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'string', required: true, unique: true } }, primaryKey: 'id', hasSchema: false } } }); console.log(util.inspect(q,{depth:5})); +```*/ + + + +/** + * Now a slightly more complex example... + * (~8ms latency, Nov 22, 2016) + */ + +/*``` +q = { using: 'user', method: 'find', populates: {pets: {}}, criteria: {where: {id: '3d'}, limit: 3} }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'string', required: true, unique: true }, pets: { collection: 'pet' } }, primaryKey: 'id', hasSchema: false }, pet: { attributes: { id: { type:'number', required: true, unique: true } }, primaryKey: 'id', hasSchema: true } } }); console.log(util.inspect(q,{depth:5})); +```*/ + + + +/** + * Now a simple `create`... + * (also demonstrates behavior of createdAt/updatedAt on create) + */ + +/*``` +q = { using: 'user', method: 'create', newRecord: { id: 3, age: 32, foo: 4 } }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'string', required: true, unique: true }, createdAt: { autoCreatedAt: true, type: 'string' }, updatedAt: { autoUpdatedAt: true, type: 'number' }, age: { type: 'number', required: false }, foo: { type: 'string', required: true }, pets: { collection: 'pet' } }, primaryKey: 'id', hasSchema: true}, pet: { attributes: { id: { type:'number', required: true, unique: true } }, primaryKey: 'id', hasSchema: true } } }); console.log(util.inspect(q,{depth:5})); +```*/ + + + +/** + * Now a simple `update`... + * (also demonstrates behavior of updatedAt on update) + */ + +/*``` +q = { using: 'user', method: 'update', valuesToSet: { id: 'asdfasdf', age: 32, foo: 4 } }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'string', required: true, unique: true }, createdAt: { autoCreatedAt: true, required: false, type: 'string' }, updatedAt: { autoUpdatedAt: true, required: false, type: 'number' }, age: { type: 'number', required: false }, foo: { type: 'string', required: true }, pets: { collection: 'pet' } }, primaryKey: 'id', hasSchema: true}, pet: { attributes: { id: { type:'number', required: true, unique: true } }, primaryKey: 'id', hasSchema: true } } }); console.log(util.inspect(q,{depth:5})); +```*/ + + + +/** + * Mongo-style `sort` clause semantics... + */ + +/*``` +q = { using: 'user', method: 'update', criteria: { sort: { age: -1 } }, valuesToSet: { id: 'wat', age: null, foo: 4 } }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'string', required: true, unique: true }, age: { type: 'number', required: false, defaultsTo: 99 }, foo: { type: 'string', required: true }, pets: { collection: 'pet' } }, primaryKey: 'id', hasSchema: true}, pet: { attributes: { id: { type:'number', required: true, unique: true } }, primaryKey: 'id', hasSchema: true } } }); console.log(util.inspect(q,{depth:5})); +```*/ + + +/** + * `where` fracturing... + */ + +/*``` +q = { using: 'user', method: 'find', criteria: {where: {id: '3d', foo: 'bar'}, limit: 3} }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'string', required: true, unique: true } }, primaryKey: 'id', hasSchema: false } } }); console.log(util.inspect(q,{depth:5})); +```*/ + + +/** + * Another fracturing test case, this time with fracturing of modifiers within a multi-key, complex filter... + */ + +/*``` +q = { using: 'user', method: 'find', criteria: {where: {id: '3d', foo: { startsWith: 'b', contains: 'bar'} }, limit: 3} }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'string', required: true, unique: true } }, primaryKey: 'id', hasSchema: false } } }); console.log(util.inspect(q,{depth:7})); +```*/ + +/** + * to demonstrate that you cannot both populate AND sort by an attribute at the same time... + */ + +/*``` +q = { using: 'user', method: 'find', populates: {mom: {}, pets: { sort: [{id: 'DESC'}] }}, criteria: {where: {}, limit: 3, sort: 'mom ASC'} }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'string', required: true, unique: true }, mom: { model: 'user' }, pets: { collection: 'pet' } }, primaryKey: 'id', hasSchema: false }, pet: { attributes: { id: { type:'number', required: true, unique: true } }, primaryKey: 'id', hasSchema: true } } }); console.log(util.inspect(q,{depth:5})); +```*/ + +/** + * to demonstrate that you cannot sort by a plural association... + */ + +/*``` +q = { using: 'user', method: 'find', populates: {pets: { sort: [{id: 'DESC'}] }}, criteria: {where: {and: [{id: '3d'}, {or: [{id: 'asdf'}]} ]}, limit: 3, sort: 'pets asc'} }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'string', required: true, unique: true }, pets: { collection: 'pet' } }, primaryKey: 'id', hasSchema: false }, pet: { attributes: { id: { type:'number', required: true, unique: true } }, primaryKey: 'id', hasSchema: true } } }); console.log(util.inspect(q,{depth:5})); +```*/ + +/** + * to demonstrate constraint normalization, and that it DOES NOT do full pk values checks... + * (this is on purpose -- see https://docs.google.com/spreadsheets/d/1whV739iW6O9SxRZLCIe2lpvuAUqm-ie7j7tn_Pjir3s/edit#gid=1814738146) + */ + +/*``` +q = { using: 'user', method: 'find', criteria: {where: {id: '3.5'}, limit: 3} }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'number', required: true, unique: true } }, primaryKey: 'id', hasSchema: false } } }); console.log(util.inspect(q,{depth:5})); +```*/ + +/** + * to demonstrate schema-aware normalization of modifiers... + */ + +/*``` +q = { using: 'user', method: 'find', criteria: {where: {id: { '>': '5' } }, limit: 3} }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'number', required: true, unique: true } }, primaryKey: 'id', hasSchema: false } } }); console.log(util.inspect(q,{depth:5})); +```*/ + + + +/** + * to demonstrate expansion and escaping in string search modifiers... + */ + +/*``` +q = { using: 'user', method: 'find', criteria: {where: {foo: { 'contains': '100%' } }, limit: 3} }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'number', required: true, unique: true } }, primaryKey: 'id', hasSchema: false } } }); console.log(util.inspect(q,{depth:5})); +```*/ + + + +/** + * to demonstrate how Date instances behave in criteria, and how they depend on the schema... + */ + +/*``` +q = { using: 'user', method: 'find', criteria: {where: {foo: { '>': new Date() }, createdAt: { '>': new Date() }, updatedAt: { '>': new Date() } }, limit: 3} }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'number', required: true, unique: true }, createdAt: { type: 'number', required: false }, updatedAt: { type: 'string', required: false } }, primaryKey: 'id', hasSchema: false } } }); console.log(util.inspect(q,{depth:5})); +```*/ + + + +/** + * to demonstrate propagation of cascadeOnDestroy and fetchRecordsOnDestroy model settings + */ + +/*``` +q = { using: 'user', method: 'destroy', criteria: { sort: 'age DESC' } }; require('./lib/waterline/utils/query/forge-stage-two-query')(q, { collections: { user: { attributes: { id: { type: 'string', required: true, unique: true }, age: { type: 'number', required: false, defaultsTo: 99 }, foo: { type: 'string', required: true }, pets: { collection: 'pet' } }, primaryKey: 'id', hasSchema: true, fetchRecordsOnDestroy: true, cascadeOnDestroy: true}, pet: { attributes: { id: { type:'number', required: true, unique: true } }, primaryKey: 'id', hasSchema: true } } }); console.log(util.inspect(q,{depth:5})); +```*/ diff --git a/lib/waterline/utils/query/get-query-modifier-methods.js b/lib/waterline/utils/query/get-query-modifier-methods.js new file mode 100644 index 000000000..8e0bb2b14 --- /dev/null +++ b/lib/waterline/utils/query/get-query-modifier-methods.js @@ -0,0 +1,764 @@ +/** + * Module dependencies + */ + +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var expandWhereShorthand = require('./private/expand-where-shorthand'); + + +/** + * Module constants + */ + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// FUTURE: Consider pulling these out into their own files. +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + +var BASELINE_Q_METHODS = { + + /** + * Pass special metadata (a dictionary of "meta keys") down to Waterline core, + * and all the way to the adapter that won't be processed or touched by Waterline. + * + * > Note that we use `_wlQueryInfo.meta` internally because we're already using + * > `.meta()` as a method! In an actual S2Q, this key continues to be called `meta`. + */ + + meta: function(metadata) { + + // If meta already exists, merge on top of it. + // (this is important for when this method is combined with other things + // like .usingConnection() that mutate meta keys) + if (this._wlQueryInfo.meta) { + _.extend(this._wlQueryInfo.meta, metadata); + } + else { + this._wlQueryInfo.meta = metadata; + } + + return this; + }, + + + /** + * Pass an active database connection down to the query. + */ + + usingConnection: function(db) { + this._wlQueryInfo.meta = this._wlQueryInfo.meta || {}; + this._wlQueryInfo.meta.leasedConnection = db; + return this; + } + +}; + + + + +var STREAM_Q_METHODS = { + + /** + * Add an iteratee to the query + * + * @param {Function} iteratee + * @returns {Query} + */ + + eachRecord: function(iteratee) { + assert(this._wlQueryInfo.method === 'stream', 'Cannot chain `.eachRecord()` onto the `.'+this._wlQueryInfo.method+'()` method. The `.eachRecord()` method is only chainable to `.stream()`. (In fact, this shouldn\'t even be possible! So the fact that you are seeing this message at all is, itself, likely due to a bug in Waterline.)'); + + this._wlQueryInfo.eachRecordFn = iteratee; + return this; + }, + + /** + * Add an iteratee to the query + * + * @param {Number|Function} batchSizeOrIteratee + * @param {Function} iteratee + * @returns {Query} + */ + + eachBatch: function(batchSizeOrIteratee, iteratee) { + assert(this._wlQueryInfo.method === 'stream', 'Cannot chain `.eachBatch()` onto the `.'+this._wlQueryInfo.method+'()` method. The `.eachBatch()` method is only chainable to `.stream()`. (In fact, this shouldn\'t even be possible! So the fact that you are seeing this message at all is, itself, likely due to a bug in Waterline.)'); + + if (arguments.length > 2) { + throw new Error('Invalid usage for `.eachBatch()` -- no more than 2 arguments should be passed in.'); + }//• + + if (iteratee === undefined) { + this._wlQueryInfo.eachBatchFn = batchSizeOrIteratee; + } else { + this._wlQueryInfo.eachBatchFn = iteratee; + + // Apply custom batch size: + // > If meta already exists, merge on top of it. + // > (this is important for when this method is combined with .meta()/.usingConnection()/etc) + if (this._wlQueryInfo.meta) { + _.extend(this._wlQueryInfo.meta, { batchSize: batchSizeOrIteratee }); + } + else { + this._wlQueryInfo.meta = { batchSize: batchSizeOrIteratee }; + } + } + + return this; + }, + +}; + +var SET_Q_METHODS = { + + /** + * Add values to be used in update or create query + * + * @param {Dictionary} values + * @returns {Query} + */ + + set: function(values) { + + if (this._wlQueryInfo.method === 'create') { + console.warn( + 'Deprecation warning: In future versions of Waterline, the use of .set() with .create()\n'+ + 'will no longer be supported. In the past, you could use .set() to provide the initial\n'+ + 'skeleton of a new record to create (like `.create().set({})`)-- but really .set() should\n'+ + 'only be used with .update(). So instead, please change this code so that it just passes in\n'+ + 'the initial new record as the first argument to `.create().`' + ); + this._wlQueryInfo.newRecord = values; + } + else if (this._wlQueryInfo.method === 'createEach') { + console.warn( + 'Deprecation warning: In future versions of Waterline, the use of .set() with .createEach()\n'+ + 'will no longer be supported. In the past, you could use .set() to provide an array of\n'+ + 'new records to create (like `.createEach().set([{}, {}])`)-- but really .set() was designed\n'+ + 'to be used with .update() only. So instead, please change this code so that it just\n'+ + 'passes in the initial new record as the first argument to `.createEach().`' + ); + this._wlQueryInfo.newRecords = values; + } + else { + this._wlQueryInfo.valuesToSet = values; + } + + return this; + + }, + +}; + +var COLLECTION_Q_METHODS = { + + /** + * Add associated IDs to the query + * + * @param {Array} associatedIds + * @returns {Query} + */ + + members: function(associatedIds) { + this._wlQueryInfo.associatedIds = associatedIds; + return this; + }, + +}; + + + +var POPULATE_Q_METHODS = { + + + /** + * Modify this query so that it populates all associations (singular and plural). + * + * @returns {Query} + */ + populateAll: function() { + var pleaseDoNotUseThisArgument = arguments[0]; + + if (!_.isUndefined(pleaseDoNotUseThisArgument)) { + console.warn( + 'Deprecation warning: Passing in an argument to `.populateAll()` is no longer supported.\n'+ + '(But interpreting this usage the original way for you this time...)\n'+ + 'Note: If you really want to use the _exact same_ criteria for simultaneously populating multiple\n'+ + 'different plural ("collection") associations, please use separate calls to `.populate()` instead.\n'+ + 'Or, alternatively, instead of using `.populate()`, you can choose to call `.find()`, `.findOne()`,\n'+ + 'or `.stream()` with a dictionary (plain JS object) as the second argument, where each key is the\n'+ + 'name of an association, and each value is either:\n'+ + ' • true (for singular aka "model" associations), or\n'+ + ' • a criteria dictionary (for plural aka "collection" associations)\n' + ); + }//>- + + var self = this; + this._WLModel.associations.forEach(function (associationInfo) { + self.populate(associationInfo.alias, pleaseDoNotUseThisArgument); + }); + return this; + }, + + /** + * .populate() + * + * Set the `populates` key for this query. + * + * > Used for populating associations. + * + * @param {String|Array} key, the key to populate or array of string keys + * @returns {Query} + */ + + populate: function(keyName, subcriteria) { + + assert(this._wlQueryInfo.method === 'find' || this._wlQueryInfo.method === 'findOne' || this._wlQueryInfo.method === 'stream', 'Cannot chain `.populate()` onto the `.'+this._wlQueryInfo.method+'()` method. (In fact, this shouldn\'t even be possible! So the fact that you are seeing this message at all is, itself, likely due to a bug in Waterline.)'); + + // Backwards compatibility for arrays passed in as `keyName`. + if (_.isArray(keyName)) { + console.warn( + 'Deprecation warning: `.populate()` no longer accepts an array as its first argument.\n'+ + 'Please use separate calls to `.populate()` instead. Or, alternatively, instead of\n'+ + 'using `.populate()`, you can choose to call `.find()`, `.findOne()` or `.stream()`\n'+ + 'with a dictionary (plain JS object) as the second argument, where each key is the\n'+ + 'name of an association, and each value is either:\n'+ + ' • true (for singular aka "model" associations), or\n'+ + ' • a criteria dictionary (for plural aka "collection" associations)\n'+ + '(Interpreting this usage the original way for you this time...)\n' + ); + var self = this; + _.each(keyName, function(populate) { + self.populate(populate, subcriteria); + }); + return this; + }//-• + + // Verify that we're dealing with a semi-reasonable string. + // (This is futher validated) + if (!keyName || !_.isString(keyName)) { + throw new Error('Invalid usage for `.populate()` -- first argument should be the name of an assocation.'); + } + + // If this is the first time, make the `populates` query key an empty dictionary. + if (_.isUndefined(this._wlQueryInfo.populates)) { + this._wlQueryInfo.populates = {}; + } + + // Then, if subcriteria was specified, use it. + if (!_.isUndefined(subcriteria)){ + this._wlQueryInfo.populates[keyName] = subcriteria; + } + else { + // (Note: even though we set {} regardless, even when it should really be `true` + // if it's a singular association, that's ok because it gets silently normalized + // in FS2Q.) + this._wlQueryInfo.populates[keyName] = {}; + } + + return this; + }, + +}; + + + +var PAGINATION_Q_METHODS = { + + /** + * Add a `limit` clause to the query's criteria. + * + * @param {Number} number to limit + * @returns {Query} + */ + + limit: function(limit) { + + if (!this._alreadyInitiallyExpandedCriteria) { + this._wlQueryInfo.criteria = expandWhereShorthand(this._wlQueryInfo.criteria); + this._alreadyInitiallyExpandedCriteria = true; + }//>- + + this._wlQueryInfo.criteria.limit = limit; + + return this; + }, + + /** + * Add a `skip` clause to the query's criteria. + * + * @param {Number} number to skip + * @returns {Query} + */ + + skip: function(skip) { + + if (!this._alreadyInitiallyExpandedCriteria) { + this._wlQueryInfo.criteria = expandWhereShorthand(this._wlQueryInfo.criteria); + this._alreadyInitiallyExpandedCriteria = true; + }//>- + + this._wlQueryInfo.criteria.skip = skip; + + return this; + }, + + + /** + * .paginate() + * + * Add a `skip`+`limit` clause to the query's criteria + * based on the specified page number (and optionally, + * the page size, which defaults to 30 otherwise.) + * + * > This method is really just a little dollop of syntactic sugar. + * + * ``` + * Show.find({ category: 'home-and-garden' }) + * .paginate(0) + * .exec(...) + * ``` + * + * -OR- (for backwards compat.) + * ``` + * Show.find({ category: 'home-and-garden' }) + * .paginate({ page: 0, limit: 30 }) + * .exec(...) + * ``` + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * @param {Number} pageNumOrOpts + * @param {Number?} pageSize + * + * -OR- + * + * @param {Number|Dictionary} pageNumOrOpts + * @property {Number} page [the page num. (backwards compat.)] + * @property {Number?} limit [the page size (backwards compat.)] + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * @returns {Query} + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + paginate: function(pageNumOrOpts, pageSize) { + + if (!this._alreadyInitiallyExpandedCriteria) { + this._wlQueryInfo.criteria = expandWhereShorthand(this._wlQueryInfo.criteria); + this._alreadyInitiallyExpandedCriteria = true; + }//>- + + // Interpret page number. + var pageNum; + // If not specified... + if (_.isUndefined(pageNumOrOpts)) { + console.warn( + 'Please always specify a `page` when calling .paginate() -- for example:\n'+ + '```\n'+ + 'var first30Boats = await Boat.find()\n'+ + '.sort(\'wetness DESC\')\n'+ + '.paginate(0, 30)\n'+ + '```\n'+ + '(In the mean time, assuming the first page (#0)...)' + ); + pageNum = 0; + } + // If dictionary... (temporary backwards-compat.) + else if (_.isObject(pageNumOrOpts)) { + pageNum = pageNumOrOpts.page || 0; + console.warn( + 'Deprecation warning: Passing in a dictionary (plain JS object) to .paginate()\n'+ + 'is no longer supported -- instead, please use:\n'+ + '```\n'+ + '.paginate(pageNum, pageSize)\n'+ + '```\n'+ + '(In the mean time, interpreting this as page #'+pageNum+'...)' + ); + } + // Otherwise, assume it's the proper usage. + else { + pageNum = pageNumOrOpts; + } + + + // Interpret the page size (number of records per page). + if (!_.isUndefined(pageSize)) { + if (!_.isNumber(pageSize)) { + console.warn( + 'Unrecognized usage for .paginate() -- if specified, 2nd argument (page size)\n'+ + 'should be a number like 10 (otherwise, it defaults to 30).\n'+ + '(Ignoring this and switching to a page size of 30 automatically...)' + ); + pageSize = 30; + } + } + else if (_.isObject(pageNumOrOpts) && !_.isUndefined(pageNumOrOpts.limit)) { + // Note: IWMIH, then we must have already logged a deprecation warning above-- + // so no need to do it again. + pageSize = pageNumOrOpts.limit || 30; + } + else { + // Note that this default is the same as the default batch size used by `.stream()`. + pageSize = 30; + } + + // If page size is Infinity, then bail out now without doing anything. + // (Unless of course, this is a page other than the first-- that would be an error, + // because ordinals beyond infinity don't exist in real life) + if (pageSize === Infinity) { + if (pageNum !== 0) { + console.warn( + 'Unrecognized usage for .paginate() -- if 2nd argument (page size) is Infinity,\n'+ + 'then the 1st argument (page num) must be zero, indicating the first page.\n'+ + '(Ignoring this and using page zero w/ an infinite page size automatically...)' + ); + } + return this; + }//-• + + // Now, apply the page size as the limit, and compute & apply the appropriate `skip`. + // (REMEMBER: pages are now zero-indexed!) + this + .skip(pageNum * pageSize) + .limit(pageSize); + + return this; + }, + + + /** + * Add a `sort` clause to the criteria object + * + * @param {Ref} sortClause + * @returns {Query} + */ + + sort: function(sortClause) { + + if (!this._alreadyInitiallyExpandedCriteria) { + this._wlQueryInfo.criteria = expandWhereShorthand(this._wlQueryInfo.criteria); + this._alreadyInitiallyExpandedCriteria = true; + }//>- + + this._wlQueryInfo.criteria.sort = sortClause; + + return this; + }, +}; + + + +var PROJECTION_Q_METHODS = { + + + /** + * Add projections to the query. + * + * @param {Array} attributes to select + * @returns {Query} + */ + + select: function(selectAttributes) { + + if (!this._alreadyInitiallyExpandedCriteria) { + this._wlQueryInfo.criteria = expandWhereShorthand(this._wlQueryInfo.criteria); + this._alreadyInitiallyExpandedCriteria = true; + }//>- + + this._wlQueryInfo.criteria.select = selectAttributes; + + return this; + }, + + /** + * Add an omit clause to the query's criteria. + * + * @param {Array} attributes to select + * @returns {Query} + */ + omit: function(omitAttributes) { + + if (!this._alreadyInitiallyExpandedCriteria) { + this._wlQueryInfo.criteria = expandWhereShorthand(this._wlQueryInfo.criteria); + this._alreadyInitiallyExpandedCriteria = true; + }//>- + + this._wlQueryInfo.criteria.omit = omitAttributes; + + return this; + }, + +}; + + + +var FILTER_Q_METHODS = { + + + /** + * Add a `where` clause to the query's criteria. + * + * @param {Dictionary} criteria to append + * @returns {Query} + */ + + where: function(whereCriteria) { + + if (!this._alreadyInitiallyExpandedCriteria) { + this._wlQueryInfo.criteria = expandWhereShorthand(this._wlQueryInfo.criteria); + this._alreadyInitiallyExpandedCriteria = true; + }//>- + + this._wlQueryInfo.criteria.where = whereCriteria; + + return this; + }, + +}; + + + +var FETCH_Q_METHODS = { + + + /** + * Add `fetch: true` to the query's `meta`. + * + * @returns {Query} + */ + + fetch: function() { + + if (arguments.length > 0) { + throw new Error('Invalid usage for `.fetch()` -- no arguments should be passed in.'); + } + + // If meta already exists, merge on top of it. + // (this is important for when this method is combined with .meta()/.usingConnection()/etc) + if (this._wlQueryInfo.meta) { + _.extend(this._wlQueryInfo.meta, { fetch: true }); + } + else { + this._wlQueryInfo.meta = { fetch: true }; + } + + return this; + }, + +}; + + + +var DECRYPT_Q_METHODS = { + + + /** + * Add `decrypt: true` to the query's `meta`. + * + * @returns {Query} + */ + + decrypt: function() { + + if (arguments.length > 0) { + throw new Error('Invalid usage for `.decrypt()` -- no arguments should be passed in.'); + } + + // If meta already exists, merge on top of it. + // (this is important for when this method is combined with .meta()/.usingConnection()/etc) + if (this._wlQueryInfo.meta) { + _.extend(this._wlQueryInfo.meta, { decrypt: true }); + } + else { + this._wlQueryInfo.meta = { decrypt: true }; + } + + return this; + }, + + +}; + + +// ██╗ ██╗███╗ ██╗███████╗██╗ ██╗██████╗ ██████╗ ██████╗ ██████╗ ████████╗███████╗██████╗ +// ██║ ██║████╗ ██║██╔════╝██║ ██║██╔══██╗██╔══██╗██╔═══██╗██╔══██╗╚══██╔══╝██╔════╝██╔══██╗ +// ██║ ██║██╔██╗ ██║███████╗██║ ██║██████╔╝██████╔╝██║ ██║██████╔╝ ██║ █████╗ ██║ ██║ +// ██║ ██║██║╚██╗██║╚════██║██║ ██║██╔═══╝ ██╔═══╝ ██║ ██║██╔══██╗ ██║ ██╔══╝ ██║ ██║ +// ╚██████╔╝██║ ╚████║███████║╚██████╔╝██║ ██║ ╚██████╔╝██║ ██║ ██║ ███████╗██████╔╝ +// ╚═════╝ ╚═╝ ╚═══╝╚══════╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝╚═════╝ +// +// ███╗ ███╗███████╗████████╗██╗ ██╗ ██████╗ ██████╗ ███████╗ +// ████╗ ████║██╔════╝╚══██╔══╝██║ ██║██╔═══██╗██╔══██╗██╔════╝ +// ██╔████╔██║█████╗ ██║ ███████║██║ ██║██║ ██║███████╗ +// ██║╚██╔╝██║██╔══╝ ██║ ██╔══██║██║ ██║██║ ██║╚════██║ +// ██║ ╚═╝ ██║███████╗ ██║ ██║ ██║╚██████╔╝██████╔╝███████║ +// ╚═╝ ╚═╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝ +// +var OLD_AGGREGATION_Q_METHODS = { + + /** + * Add the (NO LONGER SUPPORTED) `sum` clause to the criteria. + * + * > This is allowed through purposely, in order to trigger + * > the proper query error in FS2Q. + * + * @returns {Query} + */ + sum: function() { + + if (!this._alreadyInitiallyExpandedCriteria) { + this._wlQueryInfo.criteria = expandWhereShorthand(this._wlQueryInfo.criteria); + this._alreadyInitiallyExpandedCriteria = true; + }//>- + + this._wlQueryInfo.criteria.sum = arguments[0]; + + return this; + }, + + /** + * Add the (NO LONGER SUPPORTED) `avg` clause to the criteria. + * + * > This is allowed through purposely, in order to trigger + * > the proper query error in FS2Q. + * + * @returns {Query} + */ + avg: function() { + + if (!this._alreadyInitiallyExpandedCriteria) { + this._wlQueryInfo.criteria = expandWhereShorthand(this._wlQueryInfo.criteria); + this._alreadyInitiallyExpandedCriteria = true; + }//>- + + this._wlQueryInfo.criteria.avg = arguments[0]; + + return this; + }, + + + /** + * Add the (NO LONGER SUPPORTED) `min` clause to the criteria. + * + * > This is allowed through purposely, in order to trigger + * > the proper query error in FS2Q. + * + * @returns {Query} + */ + min: function() { + + if (!this._alreadyInitiallyExpandedCriteria) { + this._wlQueryInfo.criteria = expandWhereShorthand(this._wlQueryInfo.criteria); + this._alreadyInitiallyExpandedCriteria = true; + }//>- + + this._wlQueryInfo.criteria.min = arguments[0]; + + return this; + }, + + /** + * Add the (NO LONGER SUPPORTED) `max` clause to the criteria. + * + * > This is allowed through purposely, in order to trigger + * > the proper query error in FS2Q. + * + * @returns {Query} + */ + max: function() { + + if (!this._alreadyInitiallyExpandedCriteria) { + this._wlQueryInfo.criteria = expandWhereShorthand(this._wlQueryInfo.criteria); + this._alreadyInitiallyExpandedCriteria = true; + }//>- + + this._wlQueryInfo.criteria.max = arguments[0]; + + return this; + }, + + /** + * Add the (NO LONGER SUPPORTED) `groupBy` clause to the criteria. + * + * > This is allowed through purposely, in order to trigger + * > the proper query error in FS2Q. + */ + groupBy: function() { + + if (!this._alreadyInitiallyExpandedCriteria) { + this._wlQueryInfo.criteria = expandWhereShorthand(this._wlQueryInfo.criteria); + this._alreadyInitiallyExpandedCriteria = true; + }//>- + + this._wlQueryInfo.criteria.groupBy = arguments[0]; + + return this; + }, + +}; + + + + + + + +/** + * getQueryModifierMethods() + * + * Return a dictionary containing the appropriate query (Deferred) methods + * for the specified category (i.e. model method name). + * + * > For example, calling `getQueryModifierMethods('find')` returns a dictionary + * > of methods like `where` and `select`, as well as the usual suspects + * > like `meta` and `usingConnection`. + * > + * > This never returns generic, universal Deferred methods; i.e. `exec`, + * > `then`, `catch`, and `toPromise`. Those are expected to be supplied + * > by parley. + * + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * @param {String} category + * The name of the model method this query is for. + * + * @returns {Dictionary} + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ +module.exports = function getQueryModifierMethods(category){ + + assert(category && _.isString(category), 'A category must be provided as a valid string.'); + + // Set up the initial state of the dictionary that we'll be returning. + var queryMethods = {}; + + // No matter what category this is, we always begin with certain baseline methods. + _.extend(queryMethods, BASELINE_Q_METHODS); + + // But from there, the methods become category specific: + switch (category) { + case 'find': _.extend(queryMethods, FILTER_Q_METHODS, PAGINATION_Q_METHODS, OLD_AGGREGATION_Q_METHODS, PROJECTION_Q_METHODS, POPULATE_Q_METHODS, DECRYPT_Q_METHODS); break; + case 'findOne': _.extend(queryMethods, FILTER_Q_METHODS, PROJECTION_Q_METHODS, POPULATE_Q_METHODS, DECRYPT_Q_METHODS); break; + case 'stream': _.extend(queryMethods, FILTER_Q_METHODS, PAGINATION_Q_METHODS, PROJECTION_Q_METHODS, POPULATE_Q_METHODS, STREAM_Q_METHODS, DECRYPT_Q_METHODS); break; + case 'count': _.extend(queryMethods, FILTER_Q_METHODS); break; + case 'sum': _.extend(queryMethods, FILTER_Q_METHODS); break; + case 'avg': _.extend(queryMethods, FILTER_Q_METHODS); break; + + case 'create': _.extend(queryMethods, SET_Q_METHODS, FETCH_Q_METHODS, DECRYPT_Q_METHODS); break; + case 'createEach': _.extend(queryMethods, SET_Q_METHODS, FETCH_Q_METHODS, DECRYPT_Q_METHODS); break; + case 'findOrCreate': _.extend(queryMethods, FILTER_Q_METHODS, SET_Q_METHODS, FETCH_Q_METHODS, DECRYPT_Q_METHODS); break; + + case 'update': _.extend(queryMethods, FILTER_Q_METHODS, SET_Q_METHODS, FETCH_Q_METHODS, DECRYPT_Q_METHODS); break; + case 'updateOne': _.extend(queryMethods, FILTER_Q_METHODS, SET_Q_METHODS, FETCH_Q_METHODS, DECRYPT_Q_METHODS); break; + case 'destroy': _.extend(queryMethods, FILTER_Q_METHODS, FETCH_Q_METHODS, DECRYPT_Q_METHODS); break; + case 'destroyOne': _.extend(queryMethods, FILTER_Q_METHODS, FETCH_Q_METHODS, DECRYPT_Q_METHODS); break; + case 'archive': _.extend(queryMethods, FILTER_Q_METHODS, FETCH_Q_METHODS, DECRYPT_Q_METHODS); break; + case 'archiveOne': _.extend(queryMethods, FILTER_Q_METHODS, FETCH_Q_METHODS, DECRYPT_Q_METHODS); break; + + case 'addToCollection': _.extend(queryMethods, COLLECTION_Q_METHODS); break; + case 'removeFromCollection': _.extend(queryMethods, COLLECTION_Q_METHODS); break; + case 'replaceCollection': _.extend(queryMethods, COLLECTION_Q_METHODS); break; + + default: throw new Error('Consistency violation: Unrecognized category (model method name): `'+category+'`'); + } + + // Now that we're done, return the new dictionary of methods. + return queryMethods; + +}; diff --git a/lib/waterline/utils/query/help-find.js b/lib/waterline/utils/query/help-find.js new file mode 100644 index 000000000..4debf0951 --- /dev/null +++ b/lib/waterline/utils/query/help-find.js @@ -0,0 +1,758 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var async = require('async'); +var forgeAdapterError = require('./forge-adapter-error'); +var forgeStageThreeQuery = require('./forge-stage-three-query'); +var getModel = require('../ontology/get-model'); +var getAttribute = require('../ontology/get-attribute'); + +/** + * helpFind() + * + * Given a stage 2 "find" or "findOne" query, build and execute a sequence + * of generated stage 3 queries (aka "find" operations)-- and then run them. + * If disparate data sources need to be used, then perform in-memory joins + * as needed. Afterwards, transform the normalized result set into an array + * of records, and (potentially) populate them. + * + * > Fun facts: + * > • This is used for `.find()` and `.findOne()` queries. + * > • This file is sometimes informally known as the "operations runner". + * > • If particlebanana and mikermcneil were trees and you chopped us down, + * > there would be charred, black rings for the months in 2013-2016 we + * > spent figuring out the original implementation of the code in this + * > file, and in the integrator. + * > • It's a key piece of the puzzle when it comes to populating records + * > using the populate polyfill-- for example, when performing any + * > cross-datastore/adapter (xD/A) joins. + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * @param {Ref} WLModel + * The live Waterline model. + * + * @param {Dictionary} s2q + * Stage two query. + * + * @param {Error} omen + * Used purely for improving the quality of the stack trace. + * Should be an error instance w/ its stack trace already adjusted. + * + * @param {Function} done + * @param {Error?} err [if an error occured] + * @param {Array} records + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function helpFind(WLModel, s2q, omen, done) { + + if (!_.isFunction(done)) { + throw new Error('Consistency violation: `done` (4th argument) should be a function'); + } + if (!WLModel) { + return done(new Error('Consistency violation: Live Waterline model should be provided as the 1st argument')); + } + if (!s2q) { + return done(new Error('Consistency violation: Stage two query (S2Q) should be provided as the 2nd argument')); + } + if (!omen) { + return done(new Error('Consistency violation: Omen should be provided as the 3rd argument')); + } + + // Set up a few, common local vars for convenience / familiarity. + var orm = WLModel.waterline; + + // Keep track of any populates which were explicitly set to `false`. + // (This is a special indicator that FS2Q adds when a particular subcriteria + // turns out to be a no-op. This is important so that we make sure to still + // explicitly attach the appropriate base value for the association-- for + // example an empty array `[]`. This avoids breaking any userland code which + // might be relying on the datatype, such as a `.length`, a `x[n]`, or a loop.) + var populatesExplicitlySetToFalse = []; + for (var assocAttrName in s2q.populates) { + var subcriteria = s2q.populates[assocAttrName]; + if (subcriteria === false) { + populatesExplicitlySetToFalse.push(assocAttrName); + } + }//∞ + + // Build an initial stage three query (s3q) from the incoming stage 2 query (s2q). + var parentQuery = forgeStageThreeQuery({ + stageTwoQuery: s2q, + identity: WLModel.identity, + transformer: WLModel._transformer, + originalModels: WLModel.waterline.collections + }); + + // Expose a reference to the entire set of all WL models available + // in the current ORM instance. + var collections = WLModel.waterline.collections; + + var parentDatastoreName = WLModel.datastore; + + // Get a reference to the parent adapter. + var parentAdapter = WLModel._adapter; + + // Now, run whatever queries we need, and merge the results together. + (function _getPopulatedRecords(proceed){ + + // ┌┬┐┌─┐ ┬ ┬┌─┐ ┌┐┌┌─┐┌─┐┌┬┐ ┌─┐┬ ┬┬┌┬┐┌─┐ + // │││ │ │││├┤ │││├┤ ├┤ ││ └─┐├─┤││││ ┌┘ + // ─┴┘└─┘ └┴┘└─┘ ┘└┘└─┘└─┘─┴┘ └─┘┴ ┴┴┴ ┴ o + // First, determine if the parent model's adapter can handle all of the joining. + var doJoinsInParentAdapter = (function () { + // First of all, there must be joins in the query to make this relevant. + return (parentQuery.joins && parentQuery.joins.length) && + // Second, the adapter must support native joins. + _.isFunction(WLModel._adapter.join) && + // And lastly, all of the child models must be on the same datastore. + _.all(parentQuery.joins, function(join) { + // Check the child table in the join (we've already checked the parent table, + // either in a previous iteration or because it's the main parent). + return collections[join.childCollectionIdentity].datastore === WLModel.datastore; + }); + })(); + + // ┌┬┐┌─┐ ┌┐┌┌─┐┌┬┐┬┬ ┬┌─┐ ┬┌─┐┬┌┐┌┌─┐ + // │││ │ │││├─┤ │ │└┐┌┘├┤ ││ │││││└─┐ + // ─┴┘└─┘ ┘└┘┴ ┴ ┴ ┴ └┘ └─┘ └┘└─┘┴┘└┘└─┘ + // If the adapter can handle all of the joining of records itself, great -- we'll just + // send it the one stage 3 query, get the populated records back and continue on. + if (doJoinsInParentAdapter) { + // Run the stage 3 query and proceed. + parentAdapter.join(parentDatastoreName, parentQuery, function (err, rawResultFromAdapter) { + if (err) { + err = forgeAdapterError(err, omen, 'join', WLModel.identity, orm); + return proceed(err); + } + + return proceed(undefined, rawResultFromAdapter); + + });//_∏_ + } + //‡ + // ┬ ┬┌─┐ ┬ ┬┌─┐┌─┐ ┌┐┌┌─┐ ┬┌─┐┬┌┐┌┌─┐ + // │││├┤ ├─┤├─┤┌─┘ ││││ │ ││ │││││└─┐ + // └┴┘└─┘ ┴ ┴┴ ┴└─┘ ┘└┘└─┘ └┘└─┘┴┘└┘└─┘ + // If there are no joins, just run the `find` method on the parent adapter, get the + // results and proceed. + else if (!_.isArray(parentQuery.joins) || parentQuery.joins.length === 0) { + parentAdapter.find(parentDatastoreName, parentQuery, function (err, rawResultFromAdapter) { + if (err) { + err = forgeAdapterError(err, omen, 'find', WLModel.identity, orm); + return proceed(err); + } + + return proceed(undefined, rawResultFromAdapter); + + });//_∏_ + } + //‡ + // ┌┬┐┌─┐ ┬┌─┐┬┌┐┌┌─┐ ┬ ┬┬┌┬┐┬ ┬ ┌─┐┬ ┬┬┌┬┐ + // │││ │ ││ │││││└─┐ ││││ │ ├─┤ └─┐├─┤││││ + // ─┴┘└─┘ └┘└─┘┴┘└┘└─┘ └┴┘┴ ┴ ┴ ┴ └─┘┴ ┴┴┴ ┴ + // Otherwise we have some joining to do... + else { + + // First step -- group all of the joins by alias. + var joinsByAlias = _.groupBy(parentQuery.joins, function(join) { return join.alias; }); + + // console.log('joinsByAlias', require('util').inspect(joinsByAlias, {depth: null})); + + // Example entry in `joinsByAlias`: + // pets: + // [ { parentCollectionIdentity: 'user', + // parent: 'user', + // parentAlias: 'user__pets', + // parentKey: 'id', + // childCollectionIdentity: 'pet_owners__user_pets', + // child: 'pet_owners__user_pets', + // childAlias: 'pet_owners__user_pets__pets', + // childKey: 'user_pets', + // alias: 'pets', + // removeParentKey: false, + // model: false, + // collection: true, + // select: false }, + // { parentCollectionIdentity: 'pet_owners__user_pets', + // parent: 'pet_owners__user_pets', + // parentAlias: 'pet_owners__user_pets__pets', + // parentKey: 'pet_owners', + // childCollectionIdentity: 'pet', + // child: 'pet', + // childAlias: 'pet__pets', + // childKey: 'id', + // alias: 'pets', + // junctionTable: true, + // removeParentKey: false, + // model: false, + // collection: true, + // criteria: + // { sort: [ { name: 'DESC' } ], + // select: [ 'id', 'name' ], + // where: {}, + // limit: 9007199254740991, + // skip: 0 } } ], + + // Next, run the parent query and get the initial results. Just to be safe, we'll create a copy + // of the parent query _without_ the joins array, in case the underlying adapter is sneaky and + // tries to do joins even in its `find` method. + var parentQueryWithoutJoins = _.omit(parentQuery, 'joins'); + parentAdapter.find(parentDatastoreName, parentQueryWithoutJoins, function (err, parentResults) { + if (err) { + err = forgeAdapterError(err, omen, 'find', WLModel.identity, orm); + return proceed(err); + } + + // Now that we have the parent query results, we'll run each set of joins and integrate. + async.reduce(_.keys(joinsByAlias), parentResults, function(populatedParentRecords, alias, nextSetOfJoins) { + + // Get the set of joins for this alias. + var aliasJoins = joinsByAlias[alias]; + + // ┌┬┐┌─┐┌┐┌┬ ┬ ┌┬┐┌─┐ ┌┬┐┌─┐┌┐┌┬ ┬ ┌─┐┬─┐ ┬ ┬┬┌─┐┬ ┌─┐┌─┐┌─┐ + // │││├─┤│││└┬┘───│ │ │───│││├─┤│││└┬┘ │ │├┬┘ └┐┌┘│├─┤│ ├┤ └─┐└─┐ + // ┴ ┴┴ ┴┘└┘ ┴ ┴ └─┘ ┴ ┴┴ ┴┘└┘ ┴ └─┘┴└─ └┘ ┴┴ ┴┴─┘└─┘└─┘└─┘ + // If there's two joins in the set, we're using a junction table. + if (aliasJoins.length === 2) { + + // The first query we want to run is from the parent table to the junction table. + var firstJoin = _.first(_.remove(aliasJoins, function(join) { return join.parentCollectionIdentity === WLModel.identity; })); + + // The remaining join is to the child table. + var secondJoin = aliasJoins[0]; + + // Get a reference to the junction table model. + var junctionTableModel = collections[firstJoin.childCollectionIdentity]; + var junctionTablePrimaryKeyName = junctionTableModel.primaryKey; + var junctionTablePrimaryKeyColumnName = junctionTableModel.schema[junctionTablePrimaryKeyName].columnName; + + // Start building the query to the junction table. + var junctionTableQuery = { + using: firstJoin.child, + method: 'find', + criteria: { + where: { + and: [] + }, + skip: 0, + limit: Number.MAX_SAFE_INTEGER||9007199254740991, + select: [junctionTablePrimaryKeyColumnName, firstJoin.childKey, secondJoin.parentKey] + }, + meta: parentQuery.meta, + }; + + // Add an empty "sort" clause to the criteria. + junctionTableQuery.criteria.sort = []; + + // Grab all of the primary keys found in the parent query, build them into an + // `in` constraint, then push that on as a conjunct for the junction table query's + // criteria. + var junctionTableQueryInConjunct = {}; + junctionTableQueryInConjunct[firstJoin.childKey] = {in: _.pluck(parentResults, firstJoin.parentKey)}; + junctionTableQuery.criteria.where.and.push(junctionTableQueryInConjunct); + + // We now have a valid "stage 3" query, so let's run that and get the junction table results. + // First, figure out what datastore the junction table is on. + var junctionTableDatastoreName = junctionTableModel.datastore; + // Next, get the adapter for that datastore. + var junctionTableAdapter = junctionTableModel._adapter; + // Finally, run the query on the adapter. + junctionTableAdapter.find(junctionTableDatastoreName, junctionTableQuery, function(err, junctionTableResults) { + if (err) { + // Note that we're careful to use the identity, not the table name! + err = forgeAdapterError(err, omen, 'find', junctionTableModel.identity, orm); + return nextSetOfJoins(err); + } + + // Okay! We have a set of records from the junction table. + // For example: + // [ { user_pets: 1, pet_owners: 1 }, { user_pets: 1, pet_owners: 2 }, { user_pets: 2, pet_owners: 3 } ] + // Now, for each parent PK in that result set (e.g. each value of `user_pets` above), we'll build + // and run a query on the child table using all of the associated child pks (e.g. `1` and `2`), applying + // the skip, limit and sort (if any) provided in the subcriteria from the user's `.populate()`. + + // Get a reference to the child table model. + var childTableModel = collections[secondJoin.childCollectionIdentity]; + + // Figure out what datastore the child table is on. + var childTableDatastoreName = childTableModel.datastore; + + // Get the adapter for that datastore. + var childTableAdapter = childTableModel._adapter; + + // Inherit the `meta` properties from the parent query. + var meta = parentQuery.meta; + + // Start a base query object for the child table. We'll use a copy of this with modified + // "in" constraint for each query to the child table (one per unique parent ID in the join results). + var baseChildTableQuery = { + using: secondJoin.child, + method: 'find', + criteria: { + where: { + and: [] + } + }, + meta: meta + }; + + // If the user added a "where" clause, add it to our "and" + if (_.keys(secondJoin.criteria.where).length > 0) { + // If the "where" clause has an "and" predicate already, concatenate it with our "and". + if (secondJoin.criteria.where.and) { + baseChildTableQuery.criteria.where.and = baseChildTableQuery.criteria.where.and.concat(secondJoin.criteria.where.and); + } + else { + // Otherwise push the whole "where" clause in to the "and" array as a new conjunct. + // This handles cases like `populate('pets', {name: 'alice'})` AS WELL AS + // cases like `populate('pets', {or: [ {name: 'alice'}, {name: 'mr bailey'} ]})` + baseChildTableQuery.criteria.where.and.push(secondJoin.criteria.where); + } + } + + // If the user's subcriteria contained a `skip`, add it to our criteria. + // Otherwise use the default. + if (!_.isUndefined(secondJoin.criteria.skip)) { + baseChildTableQuery.criteria.skip = secondJoin.criteria.skip; + } else { + baseChildTableQuery.criteria.skip = 0; + } + + // If the user's subcriteria contained a `limit`, add it to our criteria. + // Otherwise use the default. + if (!_.isUndefined(secondJoin.criteria.limit)) { + baseChildTableQuery.criteria.limit = secondJoin.criteria.limit; + } else { + baseChildTableQuery.criteria.limit = Number.MAX_SAFE_INTEGER||9007199254740991; + } + + // If the user's subcriteria contained a `sort`, add it to our criteria. + // Otherwise use the default. + if (!_.isUndefined(secondJoin.criteria.sort)) { + baseChildTableQuery.criteria.sort = secondJoin.criteria.sort; + } + else { + baseChildTableQuery.criteria.sort = []; + } + + // If the user's subcriteria contained a `select`, add it to our criteria. + // Otherwise leave it as `undefined` (necessary for `schema: false` dbs). + if (!_.isUndefined(secondJoin.criteria.select)) { + baseChildTableQuery.criteria.select = secondJoin.criteria.select; + } + + // Get the unique parent primary keys from the junction table result. + var parentPks = _.uniq(_.pluck(junctionTableResults, firstJoin.childKey)); + + // Loop over those parent primary keys and do one query to the child table per parent, + // collecting the results in a dictionary organized by parent PK. + async.reduce(parentPks, {}, function(memo, parentPk, nextParentPk) { + + var childTableQuery = _.cloneDeep(baseChildTableQuery); + + // Get all the records in the junction table result where the value of the foreign key + // to the parent table is equal to the parent table primary key value we're currently looking at. + // For example, if parentPK is 2, get records from pet_owners__user_pets where `user_pets` == 2. + var junctionTableRecordsForThisParent = _.filter(junctionTableResults, function(record) { + return record[firstJoin.childKey] === parentPk; + }); + + // Get the child table primary keys to look for by plucking the value of the foreign key to + // the child table from the filtered record set we just created. + var childPks = _.pluck(junctionTableRecordsForThisParent, secondJoin.parentKey); + + // Create an `in` constraint that looks for just those primary key values, + // then push it on to the child table query as a conjunct. + var childInConjunct = {}; + childInConjunct[secondJoin.childKey] = {in: childPks}; + childTableQuery.criteria.where.and.push(childInConjunct); + + // We now have another valid "stage 3" query, so let's run that and get the child table results. + // Finally, run the query on the adapter. + childTableAdapter.find(childTableDatastoreName, childTableQuery, function(err, childTableResults) { + if (err) { + // Note that we're careful to use the identity, not the table name! + err = forgeAdapterError(err, omen, 'find', childTableModel.identity, orm); + return nextParentPk(err); + } + + // Add these results to the child table results dictionary, under the current parent's pk. + memo[parentPk] = childTableResults; + + // Continue! + return nextParentPk(undefined, memo); + + }); // + + + }, function _afterGettingChildRecords(err, childRecordsByParent) { + if (err) { return nextSetOfJoins(err); } + + // Get the name of the primary key of the parent table. + var parentKey = firstJoin.parentKey; + + // Loop through the current populated parent records. + _.each(populatedParentRecords, function(parentRecord) { + + // Get the current parent record's primary key value. + var parentPk = parentRecord[parentKey]; + + // If we have child records for this parent, attach them. + parentRecord[alias] = childRecordsByParent[parentPk] || []; + + }); + + return nextSetOfJoins(undefined, populatedParentRecords); + + }); // + + + }); // + + + + } // + + // ┌┬┐┌─┐ ┌─┐┌┐┌┌─┐ ┌─┐┬─┐ ┌┬┐┌─┐ ┌┬┐┌─┐┌┐┌┬ ┬ ┬ ┬┬┌┬┐┬ ┬ ┬ ┬┬┌─┐ + // │ │ │ │ ││││├┤ │ │├┬┘ │ │ │───│││├─┤│││└┬┘ ││││ │ ├─┤ └┐┌┘│├─┤ + // ┴ └─┘ └─┘┘└┘└─┘ └─┘┴└─ ┴ └─┘ ┴ ┴┴ ┴┘└┘ ┴ └┴┘┴ ┴ ┴ ┴ └┘ ┴┴ ┴ + // Otherwise, if there's one join in the set: no junction table. + else if (aliasJoins.length === 1) { + + // Get a reference to the single join we're doing. + var singleJoin = aliasJoins[0]; + + // Get a reference to the child table model. + var childTableModel = collections[singleJoin.childCollectionIdentity]; + + // Figure out what datastore the child table is on. + var childTableDatastoreName = childTableModel.datastore; + + // Get the adapter for that datastore. + var childTableAdapter = childTableModel._adapter; + + // Inherit the `meta` properties from the parent query. + var meta = parentQuery.meta; + + // Start a base query object for the child table. We'll use a copy of this with modifiec + // "in" criteria for each query to the child table (one per unique parent ID in the join results). + var baseChildTableQuery = { + using: singleJoin.child, + method: 'find', + criteria: { + where: { + and: [] + } + }, + meta: meta + }; + + // If the user added a "where" clause, add it to our "and". + if (_.keys(singleJoin.criteria.where).length > 0) { + // If the "where" clause has an "and" modifier already, just push it onto our "and". + if (singleJoin.criteria.where.and) { + baseChildTableQuery.criteria.where.and = baseChildTableQuery.criteria.where.and.concat(singleJoin.criteria.where.and); + } else { + // Otherwise push the whole "where" clause in to the "and" array. + // This handles cases like `populate('pets', {name: 'alice'})` AS WELL AS + // cases like `populate('pets', {or: [ {name: 'alice'}, {name: 'mr bailey'} ]})` + baseChildTableQuery.criteria.where.and.push(singleJoin.criteria.where); + } + } + + // If the user added a skip, add it to our criteria. + // Otherwise use the default. + if (!_.isUndefined(singleJoin.criteria.skip)) { + baseChildTableQuery.criteria.skip = singleJoin.criteria.skip; + } else { + baseChildTableQuery.criteria.skip = 0; + } + + // If the user added a limit, add it to our criteria. + // Otherwise use the default. + if (!_.isUndefined(singleJoin.criteria.limit)) { + baseChildTableQuery.criteria.limit = singleJoin.criteria.limit; + } else { + baseChildTableQuery.criteria.limit = Number.MAX_SAFE_INTEGER||9007199254740991; + } + + // If the user added a sort, add it to our criteria. + // Otherwise use the default. + if (!_.isUndefined(singleJoin.criteria.sort)) { + baseChildTableQuery.criteria.sort = singleJoin.criteria.sort; + } + else { + baseChildTableQuery.criteria.sort = []; + } + + // If the user's subcriteria contained a `select`, add it to our criteria. + // Otherwise leave it as `undefined` (necessary for `schema: false` dbs). + if (!_.isUndefined(singleJoin.criteria.select)) { + baseChildTableQuery.criteria.select = singleJoin.criteria.select; + } + + // Loop over those parent primary keys and do one query to the child table per parent, + // collecting the results in a dictionary organized by parent PK. + async.map(populatedParentRecords, function(parentRecord, nextParentRecord) { + + // If the parent's foreign key value is undefined, just set the value to null or [] + // depending on what kind of association it is. This can happen when using a pre-existing + // schemaless database with Sails, such that some parent records don't have the foreign key field + // set at all (as opposed to having it set to `null`, which is what Sails does for you). + // + // Besides acting as an optimization, this avoids errors for adapters that don't tolerate + // undefined values in `where` clauses (see https://github.com/balderdashy/waterline/issues/1501) + // + // Note that an adapter should never need to deal with an undefined value in a "where" clause. No constraint in a where clause + // should ever be undefined (because the adapter always receives a fully-formed S3Q) + // (https://github.com/balderdashy/waterline/commit/1aebb9eecb24efbccfc996ec881f9dc497dbb0e0#commitcomment-23776777) + if (_.isUndefined(parentRecord[singleJoin.parentKey])) { + if (singleJoin.collection === true) { + parentRecord[alias] = []; + } else { + parentRecord[singleJoin.parentKey] = null; + } + // Avoid blowing up the stack (https://github.com/caolan/async/issues/696) + setImmediate(function() { + return nextParentRecord(undefined, parentRecord); + }); + return; + } + + // Start with a copy of the base query. + var childTableQuery = _.cloneDeep(baseChildTableQuery); + + // Create a conjunct that will look for child records whose join key value matches + // this parent record's PK value, then push that on to our `and` predicate. + var pkConjunct = {}; + pkConjunct[singleJoin.childKey] = parentRecord[singleJoin.parentKey]; + childTableQuery.criteria.where.and.push(pkConjunct); + + // We now have another valid "stage 3" query, so let's run that and get the child table results. + childTableAdapter.find(childTableDatastoreName, childTableQuery, function(err, childTableResults) { + if (err) { + err = forgeAdapterError(err, omen, 'find', childTableModel.identity, orm); + return nextParentRecord(err); + } + + // If this is a to-many join, add the results to the alias on the parent record. + if (singleJoin.collection === true) { + parentRecord[alias] = childTableResults || []; + } + + // Otherwise, if this is a to-one join, add the single result to the join key column + // on the parent record. This will be normalized to an attribute name later, + // in `_afterGettingPopulatedPhysicalRecords`. + else { + parentRecord[singleJoin.parentKey] = childTableResults[0] || null; + } + + // Continue! + return nextParentRecord(undefined, parentRecord); + + }); // + + }, function _afterAsyncMap(err, result){ + if (err) { return nextSetOfJoins(err); } + return nextSetOfJoins(undefined, result); + });// + + } // + + // Otherwise, if we don't have either 1 or 2 joins for the alias. That's a prOblEm!!?! + else { + return nextSetOfJoins(new Error('Consistency violation: the alias `' + alias + '` should have either 1 or 2 joins, but instead had ' + aliasJoins.length + '!')); + } + + }, function _afterAsyncReduce(err, result) { + if (err) { return proceed(err); } + return proceed(undefined, result); + }); // + + }); // + + } // + + }) (function _afterGettingPopulatedPhysicalRecords (err, populatedRecords){ + + if (err) { return done(err); } + + // + // At this point, the records we've located are populated, but still "physical", + // meaning that they reference column names instead of attribute names (where relevant). + // + + // ┌┬┐┬─┐┌─┐┌┐┌┌─┐┌─┐┌─┐┬─┐┌┬┐ ┌─┐┌─┐┌─┐┬ ┬┬ ┌─┐┌┬┐┌─┐┌┬┐ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐ + // │ ├┬┘├─┤│││└─┐├┤ │ │├┬┘│││ ├─┘│ │├─┘│ ││ ├─┤ │ ├┤ ││ ├┬┘├┤ │ │ │├┬┘ ││└─┐ + // ┴ ┴└─┴ ┴┘└┘└─┘└ └─┘┴└─┴ ┴ ┴ └─┘┴ └─┘┴─┘┴ ┴ ┴ └─┘─┴┘ ┴└─└─┘└─┘└─┘┴└──┴┘└─┘ + // Transform column names into attribute names for each of the result records, + // mutating them inline. + + // First, perform the transformation at the top level. + populatedRecords = _.map(populatedRecords, function(populatedPhysicalRecord) { + return WLModel._transformer.unserialize(populatedPhysicalRecord); + }); + + // + // At this point, we now have partially transformed records. + // We still need to transform column names into attribute names for any&all + // nested child records too! + // + + // If the parent query did not specify joins, then short circuit to an empty array + // for our purposes below. + var joins = parentQuery.joins ? parentQuery.joins : []; + + // Sanity check: + if (!_.isArray(joins)) { + return done(new Error('Consistency violation: `joins` must be an array at this point. But instead, somehow it is this: ' + util.inspect(joins, { + depth: 5 + }) + '')); + }//-• + + // Now, perform the transformation for each and every nested child record, if relevant: + try { + // Process each record and look to see if there is anything to transform + // Look at each key in the object and see if it was used in a join + _.each(populatedRecords, function(record) { + _.each(_.keys(record), function(key) { + var attr = WLModel.schema[key]; + + // Skip unrecognized attributes. + if (!attr) { + return; + }//-• + + // If an attribute was found in the WL schema report, and it's not a singular + // or plural assoc., this means this value is for a normal, everyday attribute, + // and not an association of any sort. So in that case, there is no need to + // transform it. (We can just bail and skip on ahead.) + if (!_.has(attr, 'foreignKey') && !_.has(attr, 'collection')) { + return; + }//-• + + // Ascertain whether this attribute refers to a populate collection, and if so, + // get the identity of the child model in the join. + var joinModelIdentity = (function() { + + // Find the joins (if any) in this query that refer to the current attribute. + var joinsUsingThisAlias = _.where(joins, { alias: key }); + + // If there are no such joins, return `false`, signalling that we can continue to the next + // key in the record (there's nothing to transform). + if (joinsUsingThisAlias.length === 0) { + return false; + } + + // Get the reference identity. + var referenceIdentity = attr.referenceIdentity; + + // If there are two joins referring to this attribute, it means a junction table is being used. + // We don't want to do transformations using the junction table model, so find the join that + // has the junction table as the parent, and get the child identity. + if (joinsUsingThisAlias.length === 2) { + return _.find(joins, { parentCollectionIdentity: referenceIdentity }).childCollectionIdentity; + } + + // Otherwise return the identity specified by `referenceIdentity`, which should be that of the child model. + else { + return referenceIdentity; + } + + })(); + + // If the attribute references another identity, but no joins were made in this query using + // that identity (i.e. it was not populated), just leave the foreign key as it is and don't try + // and do any transformation to it. + if (joinModelIdentity === false) { + return; + } + + var WLChildModel = getModel(joinModelIdentity, orm); + + // If the value isn't an array, it must be a populated singular association + // (i.e. from a foreign key). So in that case, we'll just transform the + // child record and then attach it directly on the parent record. + if (!_.isArray(record[key])) { + + if (!_.isNull(record[key]) && !_.isObject(record[key])) { + throw new Error('Consistency violation: IWMIH, `record[\''+'\']` should always be either `null` (if populating failed) or a dictionary (if it worked). But instead, got: '+util.inspect(record[key], {depth: 5})+''); + } + + record[key] = WLChildModel._transformer.unserialize(record[key]); + return; + }//-• + + + // Otherwise the attribute is an array (presumably of populated child records). + // (We'll transform each and every one.) + var transformedChildRecords = []; + _.each(record[key], function(originalChildRecord) { + + // Transform the child record. + var transformedChildRecord; + + transformedChildRecord = WLChildModel._transformer.unserialize(originalChildRecord); + + // Finally, push the transformed child record onto our new array. + transformedChildRecords.push(transformedChildRecord); + + });// + + // Set the RHS of this key to either a single record or the array of transformedChildRecords + // (whichever is appropriate for this association). + if (_.has(attr, 'foreignKey')) { + record[key] = _.first(transformedChildRecords); + } else { + record[key] = transformedChildRecords; + } + + // If `undefined` is specified explicitly, use `null` instead. + if (_.isUndefined(record[key])) { + record[key] = null; + }//>- + + });//∞ + });//∞ + } catch (err) { return done(err); } + + // Sanity check: + // If `populatedRecords` is invalid (not an array) return early to avoid getting into trouble. + if (!_.isArray(populatedRecords)) { + return done(new Error('Consistency violation: Result from helpFind() utility should be an array, but instead got: ' + util.inspect(populatedRecords, { + depth: 5 + }) + '')); + } //-• + + // Now, last of all, loop through any populates with special subcriteria of `false` + // and attach the appropriate base value for each populated field in each of the + // final result records. (Remember, we figured this out at the top of this file, + // so we don't have to worry about the query potentially having changed.) + if (populatesExplicitlySetToFalse.length > 0) { + + _.each(populatedRecords, function(record) { + _.each(populatesExplicitlySetToFalse, function(attrName) { + var attrDef = getAttribute(attrName, WLModel.identity, orm); + if (attrDef.collection) { + record[attrName] = []; + } + else { + record[attrName] = null; + } + });//∞ + });//∞ + + }//fi + + // That's it! + return done(undefined, populatedRecords); + + }); // + +}; diff --git a/lib/waterline/utils/query/private/GENERIC_HELP_SUFFIX.string.js b/lib/waterline/utils/query/private/GENERIC_HELP_SUFFIX.string.js new file mode 100644 index 000000000..abfa86ce5 --- /dev/null +++ b/lib/waterline/utils/query/private/GENERIC_HELP_SUFFIX.string.js @@ -0,0 +1,18 @@ +/** + * A generic help suffix for use in error messages. + * + * @type {String} + */ + +module.exports = ' [?] See https://sailsjs.com/support for help.'; +// module.exports = '--\n'+ +// 'Read more (or ask for help):\n'+ +// ' • https://sailsjs.com/support\n'+ +// ' • https://sailsjs.com/docs/concepts/models-and-orm/query-language\n'+ +// ' • https://sailsjs.com/docs/concepts/models-and-orm\n'+ +// ' • https://sailsjs.com/docs/reference/waterline-orm\n'+ +// ''; + +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// FUTURE: Potentially build a more helpful landing page with the above links +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/lib/waterline/utils/query/private/build-usage-error.js b/lib/waterline/utils/query/private/build-usage-error.js new file mode 100644 index 000000000..fdc2c15d8 --- /dev/null +++ b/lib/waterline/utils/query/private/build-usage-error.js @@ -0,0 +1,197 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var GENERIC_HELP_SUFFIX = require('./GENERIC_HELP_SUFFIX.string'); + + + +/** + * Module constants + */ + + + +// Precompiled error message templates, one for each variety of recognized usage error. +// (Precompiled by Lodash into callable functions that return strings. Pass in `details` to use.) +var USAGE_ERR_MSG_TEMPLATES = { + + E_NOOP: _.template( + 'Query is a no-op.\n'+ + '(It would have no effect and retrieve no useful information.)\n'+ + '\n'+ + 'Details:\n'+ + ' <%= details %>'+ + '\n' + // =============================================================================================== + // NOTE: this error (^^^^^^) is so that there's some kind of default handling for + // the no-op case. This generic error is not always relevant or good. And anyway, + // most methods should handle E_NOOP explicitly. + // + // For example, if `.findOne()` notices an E_NOOP when forging, it simply swallows the error + // and calls its callback in exactly the same way as it would if specified criteria MIGHT have + // matched something but didn't. The fact that it NEVER could have matched anything doesn't + // particularly matter from a userland perspective, and since certain parts of `criteria` and + // other query keys are often built _dynamically_, any more aggressive failure in this case would + // be inconsiderate, at best. Thus it makes sense to avoid considering this an error whenever + // possible. + // =============================================================================================== + ), + + E_INVALID_META: _.template( + 'Invalid value provided for `meta`.\n'+ + 'Details:\n'+ + ' <%= details %>\n'+ + GENERIC_HELP_SUFFIX + ), + + E_INVALID_CRITERIA: _.template( + 'Invalid criteria.\n'+ + 'Refer to the docs for up-to-date info on query language syntax:\n'+ + 'https://sailsjs.com/docs/concepts/models-and-orm/query-language\n'+ + '\n'+ + 'Details:\n'+ + ' <%= details %>\n'+ + GENERIC_HELP_SUFFIX + ), + + E_INVALID_POPULATES: _.template( + 'Invalid populate(s).\n'+ + 'Details:\n'+ + ' <%= details %>\n'+ + GENERIC_HELP_SUFFIX + ), + + E_INVALID_NUMERIC_ATTR_NAME: _.template( + 'Invalid numeric attr name.\n'+ + 'Details:\n'+ + ' <%= details %>\n'+ + GENERIC_HELP_SUFFIX + ), + + E_INVALID_STREAM_ITERATEE: _.template( + 'Invalid iteratee function.\n'+ + 'Details:\n'+ + ' <%= details %>\n'+ + GENERIC_HELP_SUFFIX + ), + + E_INVALID_NEW_RECORD: _.template( + 'Invalid initial data for new record.\n'+ + 'Details:\n'+ + ' <%= details %>\n'+ + GENERIC_HELP_SUFFIX + ), + + E_INVALID_NEW_RECORDS: _.template( + 'Invalid initial data for new records.\n'+ + 'Details:\n'+ + ' <%= details %>\n'+ + GENERIC_HELP_SUFFIX + ), + + E_INVALID_VALUES_TO_SET: _.template( + 'Invalid data-- cannot perform update with the provided values.\n'+ + 'Details:\n'+ + ' <%= details %>\n'+ + GENERIC_HELP_SUFFIX + ), + + E_INVALID_TARGET_RECORD_IDS: _.template( + 'Invalid target record id(s).\n'+ + 'Details:\n'+ + ' <%= details %>\n'+ + GENERIC_HELP_SUFFIX + ), + + E_INVALID_COLLECTION_ATTR_NAME: _.template( + 'Invalid collection attr name.\n'+ + 'Details:\n'+ + ' <%= details %>\n'+ + GENERIC_HELP_SUFFIX + ), + + E_INVALID_ASSOCIATED_IDS: _.template( + 'Invalid associated id(s).\n'+ + 'Details:\n'+ + ' <%= details %>\n'+ + GENERIC_HELP_SUFFIX + ), +}; + + + +/** + * buildUsageError() + * + * Build a new Error instance from the provided metadata. + * + * > Currently, this is designed for use with the `forgeStageTwoQuery()` utility, and its recognized + * > error codes are all related to that use case. But the idea is that, over time, this can also + * > be used with any other sorts of new, end-developer-facing usage errors. + * + * --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + * @param {String} code [e.g. 'E_INVALID_CRITERIA'] + * @param {String} details [e.g. 'The provided criteria contains an unrecognized property (`foo`):\n\'bar\''] + * @param {String} modelIdentity [e.g. 'user'] + * --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + * @returns {Error} + * @property {String} name (==> 'UsageError') + * @property {String} message [composed from `details` and a built-in template] + * @property {String} stack [built automatically by `new Error()`] + * @property {String} code [the specified `code`] + * @property {String} details [the specified `details`] + * + * > The returned Error will have normalized properties and a standard, + * > nicely-formatted error message built from stitching together the + * > provided pieces of information. + * > + * > Note that, until we do automatic munging of stack traces, using + * > this utility adds another internal item to the top of the trace. + */ + +module.exports = function buildUsageError(code, details, modelIdentity) { + + // Sanity checks + if (!_.isString(code)) { + throw new Error('Consistency violation: `code` must be provided as a string, but instead, got: '+util.inspect(code, {depth:5})+''); + } + if (!_.isString(details)) { + throw new Error('Consistency violation: `details` must be provided as a string, but instead got: '+util.inspect(details, {depth:5})+''); + } + if (!_.isString(modelIdentity)) { + throw new Error('Consistency violation: `modelIdentity` must be provided as a string, but instead, got: '+util.inspect(code, {depth:5})+''); + } + + + // Look up standard template for this particular error code. + if (!USAGE_ERR_MSG_TEMPLATES[code]) { + throw new Error('Consistency violation: Unrecognized error code: '+code); + } + + // Build error message. + var errorMessage = USAGE_ERR_MSG_TEMPLATES[code]({ + details: details + }); + + // Instantiate Error. + // (This builds the stack trace.) + var err = new Error(errorMessage); + + // Flavor the error with the appropriate `code`, direct access to the provided `details`, + // and a consistent "name" (i.e. so it reads nicely when logged.) + err = flaverr({ + name: 'UsageError', + code: code, + details: details, + modelIdentity: modelIdentity + }, err); + + // That's it! + // Send it on back. + return err; + +}; diff --git a/lib/waterline/utils/query/private/expand-where-shorthand.js b/lib/waterline/utils/query/private/expand-where-shorthand.js new file mode 100644 index 000000000..3a71d9528 --- /dev/null +++ b/lib/waterline/utils/query/private/expand-where-shorthand.js @@ -0,0 +1,86 @@ +/** + * Module dependencies + */ + +var _ = require('@sailshq/lodash'); + + + +/** + * Module constants + */ + +var RECOGNIZED_S2Q_CRITERIA_CLAUSE_NAMES = ['where', 'limit', 'skip', 'sort', 'select', 'omit']; + + + +/** + * expandWhereShorthand() + * + * Return a new dictionary wrapping the provided `where` clause, or if the + * provided dictionary already contains a criteria clause (`where`, `limit`, etc), + * then just return it as-is. + * + * + * > This handles implicit `where` clauses provided instead of criteria. + * > + * > If the provided criteria dictionary DOES NOT contain the names of ANY known + * > criteria clauses (like `where`, `limit`, etc.) as properties, then we can + * > safely assume that it is relying on shorthand: i.e. simply specifying what + * > would normally be the `where` clause, but at the top level. + * + * + * > Note that, _in addition_ to calling this utility from FS2Q, it is sometimes + * > necessary to call this directly from relevant methods. That's because FS2Q + * > normalization does not occur until we _actually_ execute the query, and in + * > the mean time, we provide deferred methods for building criteria piece by piece. + * > In other words, we need to allow for hybrid usage like: + * > ``` + * > User.find({ name: 'Santa' }).limit(30) + * > ``` + * > + * > And: + * > ``` + * > User.find().limit(30) + * > ``` + * > + * > ...in addition to normal usage like this: + * > ``` + * > User.find({ limit: 30 }).where({ name: 'Santa', age: { '>': 1000 } }) + * > ``` + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * @param {Ref?} criteria + * @returns {Dictionary} + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function expandWhereShorthand(criteria){ + + + if (_.isUndefined(criteria)) { + + criteria = {}; + + } + else if (!_.isObject(criteria)) { + + criteria = { + where: criteria + }; + + } + else { + + var recognizedClauses = _.intersection(_.keys(criteria), RECOGNIZED_S2Q_CRITERIA_CLAUSE_NAMES); + if (recognizedClauses.length === 0) { + criteria = { + where: criteria + }; + } + + } + + return criteria; + +}; diff --git a/lib/waterline/utils/query/private/is-safe-natural-number.js b/lib/waterline/utils/query/private/is-safe-natural-number.js new file mode 100644 index 000000000..2a59d272d --- /dev/null +++ b/lib/waterline/utils/query/private/is-safe-natural-number.js @@ -0,0 +1,44 @@ +/** + * Module dependencies + */ + +var lodash4IsSafeInteger = require('lodash.issafeinteger'); + + +/** + * isSafeNaturalNumber() + * + * Determine whether this value is a safe, natural number: + * • `safe` | `<= Number.MAX_SAFE_INTEGER` (see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/MAX_SAFE_INTEGER) + * • `natural` | `> 0 && !== Infinity && !== NaN && Math.floor(x) === x` (positive, non-zero, finite, round number. In other words, no funny business -- aka "positive, non-zero integer") + * --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + * @param {Ref} value + * --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + * @returns {Boolean} + */ + +module.exports = function isSafeNaturalNumber(value) { + + // Return false for: + // • NaN + // • Infinity / -Infinity + // • 0 / -0 + // • fractions + // • negative integers + // • and integers greater than `Number.MAX_SAFE_INTEGER` + // + // Otherwise, return true! + // + // > For more on `Number.isSafeInteger()`, check out MDN: + // > https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isSafeInteger + + // Note that, eventually, we can just do: + // ``` + // return Number.isSafeInteger(value) && value > 0; + // ``` + + // But for compatibility with legacy versions of Node.js, we do: + // (implementation borrowed from https://github.com/lodash/lodash/blob/4.17.2/lodash.js#L12094) + return lodash4IsSafeInteger(value) && value > 0; + +}; diff --git a/lib/waterline/utils/query/private/is-valid-attribute-name.js b/lib/waterline/utils/query/private/is-valid-attribute-name.js new file mode 100644 index 000000000..ca825a1cc --- /dev/null +++ b/lib/waterline/utils/query/private/is-valid-attribute-name.js @@ -0,0 +1,49 @@ +/** + * Module dependencies + */ + +var _ = require('@sailshq/lodash'); + + +/** + * Module constants + */ + +var RX_IS_VALID_ECMASCRIPT_5_1_VAR_NAME = /^(?!(?:do|if|in|for|let|new|try|var|case|else|enum|eval|false|null|this|true|void|with|break|catch|class|const|super|throw|while|yield|delete|export|import|public|return|static|switch|typeof|default|extends|finally|package|private|continue|debugger|function|arguments|interface|protected|implements|instanceof)$)[$A-Z\_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05d0-\u05ea\u05f0-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u08a0\u08a2-\u08ac\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0977\u0979-\u097f\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c33\u0c35-\u0c39\u0c3d\u0c58\u0c59\u0c60\u0c61\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d60\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e87\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa\u0eab\u0ead-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f4\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f0\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1877\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191c\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19c1-\u19c7\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2e2f\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fcc\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua697\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua78e\ua790-\ua793\ua7a0-\ua7aa\ua7f8-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa80-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc][$A-Z\_a-z\xaa\xb5\xba\xc0-\xd6\xd8-\xf6\xf8-\u02c1\u02c6-\u02d1\u02e0-\u02e4\u02ec\u02ee\u0370-\u0374\u0376\u0377\u037a-\u037d\u0386\u0388-\u038a\u038c\u038e-\u03a1\u03a3-\u03f5\u03f7-\u0481\u048a-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05d0-\u05ea\u05f0-\u05f2\u0620-\u064a\u066e\u066f\u0671-\u06d3\u06d5\u06e5\u06e6\u06ee\u06ef\u06fa-\u06fc\u06ff\u0710\u0712-\u072f\u074d-\u07a5\u07b1\u07ca-\u07ea\u07f4\u07f5\u07fa\u0800-\u0815\u081a\u0824\u0828\u0840-\u0858\u08a0\u08a2-\u08ac\u0904-\u0939\u093d\u0950\u0958-\u0961\u0971-\u0977\u0979-\u097f\u0985-\u098c\u098f\u0990\u0993-\u09a8\u09aa-\u09b0\u09b2\u09b6-\u09b9\u09bd\u09ce\u09dc\u09dd\u09df-\u09e1\u09f0\u09f1\u0a05-\u0a0a\u0a0f\u0a10\u0a13-\u0a28\u0a2a-\u0a30\u0a32\u0a33\u0a35\u0a36\u0a38\u0a39\u0a59-\u0a5c\u0a5e\u0a72-\u0a74\u0a85-\u0a8d\u0a8f-\u0a91\u0a93-\u0aa8\u0aaa-\u0ab0\u0ab2\u0ab3\u0ab5-\u0ab9\u0abd\u0ad0\u0ae0\u0ae1\u0b05-\u0b0c\u0b0f\u0b10\u0b13-\u0b28\u0b2a-\u0b30\u0b32\u0b33\u0b35-\u0b39\u0b3d\u0b5c\u0b5d\u0b5f-\u0b61\u0b71\u0b83\u0b85-\u0b8a\u0b8e-\u0b90\u0b92-\u0b95\u0b99\u0b9a\u0b9c\u0b9e\u0b9f\u0ba3\u0ba4\u0ba8-\u0baa\u0bae-\u0bb9\u0bd0\u0c05-\u0c0c\u0c0e-\u0c10\u0c12-\u0c28\u0c2a-\u0c33\u0c35-\u0c39\u0c3d\u0c58\u0c59\u0c60\u0c61\u0c85-\u0c8c\u0c8e-\u0c90\u0c92-\u0ca8\u0caa-\u0cb3\u0cb5-\u0cb9\u0cbd\u0cde\u0ce0\u0ce1\u0cf1\u0cf2\u0d05-\u0d0c\u0d0e-\u0d10\u0d12-\u0d3a\u0d3d\u0d4e\u0d60\u0d61\u0d7a-\u0d7f\u0d85-\u0d96\u0d9a-\u0db1\u0db3-\u0dbb\u0dbd\u0dc0-\u0dc6\u0e01-\u0e30\u0e32\u0e33\u0e40-\u0e46\u0e81\u0e82\u0e84\u0e87\u0e88\u0e8a\u0e8d\u0e94-\u0e97\u0e99-\u0e9f\u0ea1-\u0ea3\u0ea5\u0ea7\u0eaa\u0eab\u0ead-\u0eb0\u0eb2\u0eb3\u0ebd\u0ec0-\u0ec4\u0ec6\u0edc-\u0edf\u0f00\u0f40-\u0f47\u0f49-\u0f6c\u0f88-\u0f8c\u1000-\u102a\u103f\u1050-\u1055\u105a-\u105d\u1061\u1065\u1066\u106e-\u1070\u1075-\u1081\u108e\u10a0-\u10c5\u10c7\u10cd\u10d0-\u10fa\u10fc-\u1248\u124a-\u124d\u1250-\u1256\u1258\u125a-\u125d\u1260-\u1288\u128a-\u128d\u1290-\u12b0\u12b2-\u12b5\u12b8-\u12be\u12c0\u12c2-\u12c5\u12c8-\u12d6\u12d8-\u1310\u1312-\u1315\u1318-\u135a\u1380-\u138f\u13a0-\u13f4\u1401-\u166c\u166f-\u167f\u1681-\u169a\u16a0-\u16ea\u16ee-\u16f0\u1700-\u170c\u170e-\u1711\u1720-\u1731\u1740-\u1751\u1760-\u176c\u176e-\u1770\u1780-\u17b3\u17d7\u17dc\u1820-\u1877\u1880-\u18a8\u18aa\u18b0-\u18f5\u1900-\u191c\u1950-\u196d\u1970-\u1974\u1980-\u19ab\u19c1-\u19c7\u1a00-\u1a16\u1a20-\u1a54\u1aa7\u1b05-\u1b33\u1b45-\u1b4b\u1b83-\u1ba0\u1bae\u1baf\u1bba-\u1be5\u1c00-\u1c23\u1c4d-\u1c4f\u1c5a-\u1c7d\u1ce9-\u1cec\u1cee-\u1cf1\u1cf5\u1cf6\u1d00-\u1dbf\u1e00-\u1f15\u1f18-\u1f1d\u1f20-\u1f45\u1f48-\u1f4d\u1f50-\u1f57\u1f59\u1f5b\u1f5d\u1f5f-\u1f7d\u1f80-\u1fb4\u1fb6-\u1fbc\u1fbe\u1fc2-\u1fc4\u1fc6-\u1fcc\u1fd0-\u1fd3\u1fd6-\u1fdb\u1fe0-\u1fec\u1ff2-\u1ff4\u1ff6-\u1ffc\u2071\u207f\u2090-\u209c\u2102\u2107\u210a-\u2113\u2115\u2119-\u211d\u2124\u2126\u2128\u212a-\u212d\u212f-\u2139\u213c-\u213f\u2145-\u2149\u214e\u2160-\u2188\u2c00-\u2c2e\u2c30-\u2c5e\u2c60-\u2ce4\u2ceb-\u2cee\u2cf2\u2cf3\u2d00-\u2d25\u2d27\u2d2d\u2d30-\u2d67\u2d6f\u2d80-\u2d96\u2da0-\u2da6\u2da8-\u2dae\u2db0-\u2db6\u2db8-\u2dbe\u2dc0-\u2dc6\u2dc8-\u2dce\u2dd0-\u2dd6\u2dd8-\u2dde\u2e2f\u3005-\u3007\u3021-\u3029\u3031-\u3035\u3038-\u303c\u3041-\u3096\u309d-\u309f\u30a1-\u30fa\u30fc-\u30ff\u3105-\u312d\u3131-\u318e\u31a0-\u31ba\u31f0-\u31ff\u3400-\u4db5\u4e00-\u9fcc\ua000-\ua48c\ua4d0-\ua4fd\ua500-\ua60c\ua610-\ua61f\ua62a\ua62b\ua640-\ua66e\ua67f-\ua697\ua6a0-\ua6ef\ua717-\ua71f\ua722-\ua788\ua78b-\ua78e\ua790-\ua793\ua7a0-\ua7aa\ua7f8-\ua801\ua803-\ua805\ua807-\ua80a\ua80c-\ua822\ua840-\ua873\ua882-\ua8b3\ua8f2-\ua8f7\ua8fb\ua90a-\ua925\ua930-\ua946\ua960-\ua97c\ua984-\ua9b2\ua9cf\uaa00-\uaa28\uaa40-\uaa42\uaa44-\uaa4b\uaa60-\uaa76\uaa7a\uaa80-\uaaaf\uaab1\uaab5\uaab6\uaab9-\uaabd\uaac0\uaac2\uaadb-\uaadd\uaae0-\uaaea\uaaf2-\uaaf4\uab01-\uab06\uab09-\uab0e\uab11-\uab16\uab20-\uab26\uab28-\uab2e\uabc0-\uabe2\uac00-\ud7a3\ud7b0-\ud7c6\ud7cb-\ud7fb\uf900-\ufa6d\ufa70-\ufad9\ufb00-\ufb06\ufb13-\ufb17\ufb1d\ufb1f-\ufb28\ufb2a-\ufb36\ufb38-\ufb3c\ufb3e\ufb40\ufb41\ufb43\ufb44\ufb46-\ufbb1\ufbd3-\ufd3d\ufd50-\ufd8f\ufd92-\ufdc7\ufdf0-\ufdfb\ufe70-\ufe74\ufe76-\ufefc\uff21-\uff3a\uff41-\uff5a\uff66-\uffbe\uffc2-\uffc7\uffca-\uffcf\uffd2-\uffd7\uffda-\uffdc0-9\u0300-\u036f\u0483-\u0487\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u0669\u0670\u06d6-\u06dc\u06df-\u06e4\u06e7\u06e8\u06ea-\u06ed\u06f0-\u06f9\u0711\u0730-\u074a\u07a6-\u07b0\u07c0-\u07c9\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0859-\u085b\u08e4-\u08fe\u0900-\u0903\u093a-\u093c\u093e-\u094f\u0951-\u0957\u0962\u0963\u0966-\u096f\u0981-\u0983\u09bc\u09be-\u09c4\u09c7\u09c8\u09cb-\u09cd\u09d7\u09e2\u09e3\u09e6-\u09ef\u0a01-\u0a03\u0a3c\u0a3e-\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a66-\u0a71\u0a75\u0a81-\u0a83\u0abc\u0abe-\u0ac5\u0ac7-\u0ac9\u0acb-\u0acd\u0ae2\u0ae3\u0ae6-\u0aef\u0b01-\u0b03\u0b3c\u0b3e-\u0b44\u0b47\u0b48\u0b4b-\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b66-\u0b6f\u0b82\u0bbe-\u0bc2\u0bc6-\u0bc8\u0bca-\u0bcd\u0bd7\u0be6-\u0bef\u0c01-\u0c03\u0c3e-\u0c44\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0c66-\u0c6f\u0c82\u0c83\u0cbc\u0cbe-\u0cc4\u0cc6-\u0cc8\u0cca-\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0ce6-\u0cef\u0d02\u0d03\u0d3e-\u0d44\u0d46-\u0d48\u0d4a-\u0d4d\u0d57\u0d62\u0d63\u0d66-\u0d6f\u0d82\u0d83\u0dca\u0dcf-\u0dd4\u0dd6\u0dd8-\u0ddf\u0df2\u0df3\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0e50-\u0e59\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0ed0-\u0ed9\u0f18\u0f19\u0f20-\u0f29\u0f35\u0f37\u0f39\u0f3e\u0f3f\u0f71-\u0f84\u0f86\u0f87\u0f8d-\u0f97\u0f99-\u0fbc\u0fc6\u102b-\u103e\u1040-\u1049\u1056-\u1059\u105e-\u1060\u1062-\u1064\u1067-\u106d\u1071-\u1074\u1082-\u108d\u108f-\u109d\u135d-\u135f\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b4-\u17d3\u17dd\u17e0-\u17e9\u180b-\u180d\u1810-\u1819\u18a9\u1920-\u192b\u1930-\u193b\u1946-\u194f\u19b0-\u19c0\u19c8\u19c9\u19d0-\u19d9\u1a17-\u1a1b\u1a55-\u1a5e\u1a60-\u1a7c\u1a7f-\u1a89\u1a90-\u1a99\u1b00-\u1b04\u1b34-\u1b44\u1b50-\u1b59\u1b6b-\u1b73\u1b80-\u1b82\u1ba1-\u1bad\u1bb0-\u1bb9\u1be6-\u1bf3\u1c24-\u1c37\u1c40-\u1c49\u1c50-\u1c59\u1cd0-\u1cd2\u1cd4-\u1ce8\u1ced\u1cf2-\u1cf4\u1dc0-\u1de6\u1dfc-\u1dff\u200c\u200d\u203f\u2040\u2054\u20d0-\u20dc\u20e1\u20e5-\u20f0\u2cef-\u2cf1\u2d7f\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua620-\ua629\ua66f\ua674-\ua67d\ua69f\ua6f0\ua6f1\ua802\ua806\ua80b\ua823-\ua827\ua880\ua881\ua8b4-\ua8c4\ua8d0-\ua8d9\ua8e0-\ua8f1\ua900-\ua909\ua926-\ua92d\ua947-\ua953\ua980-\ua983\ua9b3-\ua9c0\ua9d0-\ua9d9\uaa29-\uaa36\uaa43\uaa4c\uaa4d\uaa50-\uaa59\uaa7b\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uaaeb-\uaaef\uaaf5\uaaf6\uabe3-\uabea\uabec\uabed\uabf0-\uabf9\ufb1e\ufe00-\ufe0f\ufe20-\ufe26\ufe33\ufe34\ufe4d-\ufe4f\uff10-\uff19\uff3f]*$/; +// (^adapted from https://github.com/mikermcneil/machinepack-javascript/blob/master/machines/validate-varname.js) + + +/** + * isValidAttributeName() + * + * Determine whether this value is valid for use as a Waterline attribute name. + * --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + * @param {Ref} hypotheticalAttrName + * --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + * @returns {Boolean} + */ + +module.exports = function isValidAttributeName(hypotheticalAttrName) { + + if (!_.isString(hypotheticalAttrName)) { + return false; + }//-• + + if (hypotheticalAttrName === '') { + return false; + }//-• + + if (!hypotheticalAttrName.match(RX_IS_VALID_ECMASCRIPT_5_1_VAR_NAME)) { + return false; + }//-• + + // For compatibility: Don't allow an attribute named "toJSON" or "toObject" + // (regardless of how it is capitalized) + if (hypotheticalAttrName.match(/^toJSON$/i) || hypotheticalAttrName.match(/^toObject$/i)) { + return false; + }//-• + + // IWMIH, then the specified value seems to be a perfectly valid name for a Waterline attribute. + return true; + +}; diff --git a/lib/waterline/utils/query/private/normalize-comparison-value.js b/lib/waterline/utils/query/private/normalize-comparison-value.js new file mode 100644 index 000000000..363337c91 --- /dev/null +++ b/lib/waterline/utils/query/private/normalize-comparison-value.js @@ -0,0 +1,200 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var rttc = require('rttc'); +var getModel = require('../../ontology/get-model'); +var getAttribute = require('../../ontology/get-attribute'); + + +/** + * normalizeComparisonValue() + * + * Validate and normalize the provided value vs. a particular attribute, + * taking `type` into account, as well as whether the referenced attribute is + * a singular association or a primary key. And if no such attribute exists, + * then this at least ensure the value is JSON-compatible. + * ------------------------------------------------------------------------------------------ + * This utility is for the purposes of `normalizeConstraint()` (e.g. within `where` clause) + * so does not care about required/defaultsTo/etc. It is used for eq constraints, `in` and + * `nin` modifiers, as well as comparison modifiers like `!=`, `<`, `>`, `<=`, and `>=`. + * + * > • It always tolerates `null` (& does not care about required/defaultsTo/etc.) + * > • Collection attrs are never allowed. + * > (Attempting to use one will cause this to throw a consistency violation error + * > so i.e. it should be checked beforehand.) + * ------------------------------------------------------------------------------------------ + * @param {Ref} value + * The eq constraint or modifier to normalize. + * + * @param {String} attrName + * The name of the attribute to check against. + * + * @param {String} modelIdentity + * The identity of the model the attribute belongs to (e.g. "pet" or "user") + * + * @param {Ref} orm + * The Waterline ORM instance. + * ------------------------------------------------------------------------------------------ + * @returns {Ref} + * The provided value, now normalized and guaranteed to match the specified attribute. + * This might be the same original reference, or it might not. + * ------------------------------------------------------------------------------------------ + * @throws {Error} if invalid and cannot be coerced + * @property {String} code (=== "E_VALUE_NOT_USABLE") + * ------------------------------------------------------------------------------------------ + * @throws {Error} If anything unexpected happens, e.g. bad usage, or a failed assertion. + * ------------------------------------------------------------------------------------------ + */ + +module.exports = function normalizeComparisonValue (value, attrName, modelIdentity, orm){ + if (!_.isString(attrName)) { throw new Error('Consistency violation: This internal utility must always be called with a valid second argument (the attribute name). But instead, got: '+util.inspect(attrName, {depth:5})+''); } + if (!_.isString(modelIdentity)) { throw new Error('Consistency violation: This internal utility must always be called with a valid third argument (the model identity). But instead, got: '+util.inspect(modelIdentity, {depth:5})+''); } + if (!_.isObject(orm)) { throw new Error('Consistency violation: This internal utility must always be called with a valid fourth argument (the orm instance). But instead, got: '+util.inspect(orm, {depth:5})+''); } + + + if (_.isUndefined(value)) { + throw flaverr('E_VALUE_NOT_USABLE', new Error( + 'Cannot compare vs. `undefined`!\n'+ + '--\n'+ + 'Usually, this just means there is some kind of logic error in the code that builds this `where` clause. '+ + 'On the other hand, if you purposely built this query with `undefined`, bear in mind that you\'ll '+ + 'need to be more explicit: When comparing "emptiness" in a `where` clause, specify null, empty string (\'\'), '+ + '0, or false.\n'+ + '--' + )); + }//-• + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Maybe make the RTTC validation in this file strict (instead of also performing light coercion). + // On one hand, it's better to show an error than have experience of fetching stuff from the database + // be inconsistent with what you can search for. But on the other hand, it's nice to have Waterline + // automatically coerce the string "4" into the number 4 (and vice versa) within an eq constraint. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + // Look up the primary Waterline model and attribute. + var WLModel = getModel(modelIdentity, orm); + + // Try to look up the attribute definition. + // (`attrDef` will either be a valid attribute or `undefined`) + var attrDef = WLModel.attributes[attrName]; + + // If this attribute exists, ensure that it is not a plural association. + if (attrDef) { + assert(!attrDef.collection, 'Should not call this internal utility on a plural association (i.e. `collection` attribute).'); + } + + + // ╔╗╔╦ ╦╦ ╦ + // ║║║║ ║║ ║ + // ╝╚╝╚═╝╩═╝╩═╝ + if (_.isNull(value)) { + + // `null` is always allowed as a constraint. + + }//‡ + // ┌─┐┌─┐┬─┐ ╦ ╦╔╗╔╦═╗╔═╗╔═╗╔═╗╔═╗╔╗╔╦╔═╗╔═╗╔╦╗ ╔═╗╔╦╗╔╦╗╦═╗╦╔╗ ╦ ╦╔╦╗╔═╗ + // ├┤ │ │├┬┘ ║ ║║║║╠╦╝║╣ ║ ║ ║║ ╦║║║║╔═╝║╣ ║║ ╠═╣ ║ ║ ╠╦╝║╠╩╗║ ║ ║ ║╣ + // └ └─┘┴└─ ╚═╝╝╚╝╩╚═╚═╝╚═╝╚═╝╚═╝╝╚╝╩╚═╝╚═╝═╩╝ ╩ ╩ ╩ ╩ ╩╚═╩╚═╝╚═╝ ╩ ╚═╝ + // If unrecognized, normalize the value as if there was a matching attribute w/ `type: 'json'`. + // > This is because we don't want to leave potentially-circular/crazy constraints + // > in the criteria unless they correspond w/ `type: 'ref'` attributes. + else if (!attrDef) { + + try { + value = rttc.validate('json', value); + } catch (e) { + switch (e.code) { + + case 'E_INVALID': + throw flaverr('E_VALUE_NOT_USABLE', new Error( + 'There is no such attribute declared by this model... which is fine, '+ + 'because the model supports unrecognized attributes (`schema: false`). '+ + 'However, all comparison values in constraints for unrecognized attributes '+ + 'must be JSON-compatible, and this one is not. '+e.message + )); + + default: + throw e; + } + }//>-• + + }//‡ + // ┌─┐┌─┐┬─┐ ╔═╗╦╔╗╔╔═╗╦ ╦╦ ╔═╗╦═╗ ╔═╗╔═╗╔═╗╔═╗╔═╗╦╔═╗╔╦╗╦╔═╗╔╗╔ + // ├┤ │ │├┬┘ ╚═╗║║║║║ ╦║ ║║ ╠═╣╠╦╝ ╠═╣╚═╗╚═╗║ ║║ ║╠═╣ ║ ║║ ║║║║ + // └ └─┘┴└─ ╚═╝╩╝╚╝╚═╝╚═╝╩═╝╩ ╩╩╚═ ╩ ╩╚═╝╚═╝╚═╝╚═╝╩╩ ╩ ╩ ╩╚═╝╝╚╝ + else if (attrDef.model) { + + + // Ensure that this is a valid primary key value for the associated model. + var associatedPkType = getAttribute(getModel(attrDef.model, orm).primaryKey, attrDef.model, orm).type; + try { + // Note: While searching for an fk of 3.3 would be weird, we don't + // use the `normalizePKValue()` utility here. Instead we simply + // use rttc.validate(). + // + // > (This is just to allow for edge cases where the schema changed + // > and some records in the db were not migrated properly.) + value = rttc.validate(associatedPkType, value); + } catch (e) { + switch (e.code) { + + case 'E_INVALID': + throw flaverr('E_VALUE_NOT_USABLE', new Error( + 'The corresponding attribute (`'+attrName+'`) is a singular ("model") association, '+ + 'but the provided value does not match the declared type of the primary key attribute '+ + 'for the associated model (`'+attrDef.model+'`). '+ + e.message + )); + + default: + throw e; + + } + }// + + }//‡ + // ┌─┐┌─┐┬─┐ ╔═╗╦═╗╦╔╦╗╔═╗╦═╗╦ ╦ ╦╔═╔═╗╦ ╦ ╔═╗╔╦╗╔╦╗╦═╗╦╔╗ ╦ ╦╔╦╗╔═╗ + // ├┤ │ │├┬┘ ╠═╝╠╦╝║║║║╠═╣╠╦╝╚╦╝ ╠╩╗║╣ ╚╦╝ ╠═╣ ║ ║ ╠╦╝║╠╩╗║ ║ ║ ║╣ + // └ └─┘┴└─ ╩ ╩╚═╩╩ ╩╩ ╩╩╚═ ╩ ╩ ╩╚═╝ ╩ ╩ ╩ ╩ ╩ ╩╚═╩╚═╝╚═╝ ╩ ╚═╝ + // ┌─┐┬─┐ ╔╦╗╦╔═╗╔═╗╔═╗╦ ╦ ╔═╗╔╗╔╔═╗╔═╗╦ ╦╔═╗ ╔═╗╔╦╗╔╦╗╦═╗╦╔╗ ╦ ╦╔╦╗╔═╗ + // │ │├┬┘ ║║║║╚═╗║ ║╣ ║ ║ ╠═╣║║║║╣ ║ ║║ ║╚═╗ ╠═╣ ║ ║ ╠╦╝║╠╩╗║ ║ ║ ║╣ + // └─┘┴└─ ╩ ╩╩╚═╝╚═╝╚═╝╩═╝╩═╝╩ ╩╝╚╝╚═╝╚═╝╚═╝╚═╝ ╩ ╩ ╩ ╩ ╩╚═╩╚═╝╚═╝ ╩ ╚═╝ + // + // Note that even though primary key values have additional rules on top of basic + // RTTC type validation, we still treat them the same for our purposes here. + // > (That's because we want you to be able to search for things in the database + // > that you might not necessarily be possible to create/update in Waterline.) + else { + if (!_.isString(attrDef.type) || attrDef.type === '') { + throw new Error('Consistency violation: There is no way this attribute (`'+attrName+'`) should have been allowed to be registered with neither a `type`, `model`, nor `collection`! Here is the attr def: '+util.inspect(attrDef, {depth:5})+''); + } + + try { + value = rttc.validate(attrDef.type, value); + } catch (e) { + switch (e.code) { + + case 'E_INVALID': + throw flaverr('E_VALUE_NOT_USABLE', new Error( + 'Does not match the declared data type of the corresponding attribute. '+e.message + )); + + default: + throw e; + } + }// + + }//>- + + + // Return the normalized value. + return value; + +}; + diff --git a/lib/waterline/utils/query/private/normalize-constraint.js b/lib/waterline/utils/query/private/normalize-constraint.js new file mode 100644 index 000000000..218e36c1b --- /dev/null +++ b/lib/waterline/utils/query/private/normalize-constraint.js @@ -0,0 +1,873 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var rttc = require('rttc'); +var getModel = require('../../ontology/get-model'); +var getAttribute = require('../../ontology/get-attribute'); +var isValidAttributeName = require('./is-valid-attribute-name'); +var normalizeComparisonValue = require('./normalize-comparison-value'); + + +/** + * Module constants + */ + + +// Deprecated aliases +// (note that some aliases may not be listed here-- for example, +// `not` can actually be an alias for `nin`.) +var MODIFIER_ALIASES = { + lessThan: '<', + lessThanOrEqual: '<=', + greaterThan: '>', + greaterThanOrEqual: '>=', + not: '!=', + '!': '!=', + '!==': '!=' +}; + + +// The official set of supported modifiers. +var MODIFIER_KINDS = { + '<': true, + '<=': true, + '>': true, + '>=': true, + + '!=': true, + + 'nin': true, + 'in': true, + + 'like': true, + 'contains': true, + 'startsWith': true, + 'endsWith': true +}; + + +/** + * normalizeConstraint() + * + * Validate and normalize the provided constraint target (LHS), as well as the RHS. + * + * ------------------------------------------------------------------------------------------ + * @param {Ref} constraintRhs [may be MUTATED IN PLACE!] + * + * @param {String} constraintTarget + * The LHS of this constraint; usually, the attribute name it is referring to (unless + * the model is `schema: false` or the constraint is invalid). + * + * @param {String} modelIdentity + * The identity of the model this contraint is referring to (e.g. "pet" or "user") + * > Useful for looking up the Waterline model and accessing its attribute definitions. + * + * @param {Ref} orm + * The Waterline ORM instance. + * > Useful for accessing the model definitions. + * + * @param {Dictionary?} meta + * The contents of the `meta` query key, if one was provided. + * > Useful for propagating query options to low-level utilities like this one. + * ------------------------------------------------------------------------------------------ + * @returns {Dictionary|String|Number|Boolean|JSON} + * The constraint (potentially the same ref), guaranteed to be valid for a stage 2 query. + * This will always be either a complex constraint (dictionary), or an eq constraint (a + * primitive-- string/number/boolean/null) + * ------------------------------------------------------------------------------------------ + * @throws {Error} if the provided constraint cannot be normalized + * @property {String} code (=== "E_CONSTRAINT_NOT_USABLE") + * ------------------------------------------------------------------------------------------ + * @throws {Error} If the provided constraint would match everything + * @property {String} code (=== "E_CONSTRAINT_WOULD_MATCH_EVERYTHING") + * ------------------------------------------------------------------------------------------ + * @throws {Error} If the provided constraint would NEVER EVER match anything + * @property {String} code (=== "E_CONSTRAINT_WOULD_MATCH_NOTHING") + * ------------------------------------------------------------------------------------------ + * @throws {Error} If anything unexpected happens, e.g. bad usage, or a failed assertion. + * ------------------------------------------------------------------------------------------ + */ + +module.exports = function normalizeConstraint (constraintRhs, constraintTarget, modelIdentity, orm, meta){ + if (_.isUndefined(constraintRhs)) { + throw new Error('Consistency violation: The internal normalizeConstraint() utility must always be called with a first argument (the RHS of the constraint to normalize). But instead, got: '+util.inspect(constraintRhs, {depth:5})+''); + } + if (!_.isString(constraintTarget)) { + throw new Error('Consistency violation: The internal normalizeConstraint() utility must always be called with a valid second argument (a string). But instead, got: '+util.inspect(constraintTarget, {depth:5})+''); + } + if (!_.isString(modelIdentity)) { + throw new Error('Consistency violation: The internal normalizeConstraint() utility must always be called with a valid third argument (a string). But instead, got: '+util.inspect(modelIdentity, {depth:5})+''); + } + + + // Look up the Waterline model for this query. + var WLModel = getModel(modelIdentity, orm); + + // Before we look at the constraint's RHS, we'll check the key (the constraint target) + // to be sure it is valid for this model. + // (in the process, we look up the expected type for the corresponding attribute, + // so that we have something to validate against) + var attrName; + + var isDeepTarget; + var deepTargetHops; + if (_.isString(constraintTarget)){ + deepTargetHops = constraintTarget.split(/\./); + isDeepTarget = (deepTargetHops.length > 1); + } + + if (isDeepTarget) { + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Replace this opt-in experimental support with official support for + // deep targets for constraints: i.e. dot notation for lookups within JSON embeds. + // This will require additional tests + docs, as well as a clear way of indicating + // whether a particular adapter supports this feature so that proper error messages + // can be displayed otherwise. + // (See https://github.com/balderdashy/waterline/pull/1519) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + if (!meta || !meta.enableExperimentalDeepTargets) { + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'Cannot use dot notation in a constraint target without enabling experimental support '+ + 'for "deep targets". Please try again with `.meta({enableExperimentalDeepTargets:true})`.' + )); + }//• + + attrName = deepTargetHops[0]; + } + else { + attrName = constraintTarget; + } + + // Try to look up the definition of the attribute that this constraint is referring to. + var attrDef; + try { + attrDef = getAttribute(attrName, modelIdentity, orm); + } catch (e){ + switch (e.code) { + case 'E_ATTR_NOT_REGISTERED': + // If no matching attr def exists, then just leave `attrDef` undefined + // and continue... for now anyway. + break; + default: throw e; + } + }// + + // If model is `schema: true`... + if (WLModel.hasSchema === true) { + + // Make sure this matched a recognized attribute name. + if (!attrDef) { + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + '`'+attrName+'` is not a recognized attribute for this '+ + 'model (`'+modelIdentity+'`). And since the model declares `schema: true`, '+ + 'this is not allowed.' + )); + }//-• + + } + // Else if model is `schema: false`... + else if (WLModel.hasSchema === false) { + + // Make sure this is at least a valid name for a Waterline attribute. + if (!isValidAttributeName(attrName)) { + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + '`'+attrName+'` is not a valid name for an attribute in Waterline. '+ + 'Even though this model (`'+modelIdentity+'`) declares `schema: false`, '+ + 'this is not allowed.' + )); + }//-• + + } else { throw new Error('Consistency violation: Every instantiated Waterline model should always have a `hasSchema` property as either `true` or `false` (should have been derived from the `schema` model setting when Waterline was being initialized). But somehow, this model (`'+modelIdentity+'`) ended up with `hasSchema: '+util.inspect(WLModel.hasSchema, {depth:5})+'`'); } + + + + // If this attribute is a plural (`collection`) association, then reject it out of hand. + // (filtering by plural associations is not supported, regardless of what constraint you're using.) + if (attrDef && attrDef.collection) { + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'Cannot filter by `'+attrName+'` because it is a plural association (which wouldn\'t make sense).' + )); + }//-• + + + if (isDeepTarget) { + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: See the other note above. This is still experimental. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + if (isDeepTarget && attrDef && attrDef.type !== 'json' && attrDef.type !== 'ref') { + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'Cannot use dot notation in a constraint for the `'+attrName+'` attribute. '+ + (attrDef.model||attrDef.collection? + 'Dot notation is not currently supported for "whose" lookups across associations '+ + '(see https://github.com/balderdashy/waterline/pull/1519 for details).' + : + 'Dot notation is only supported for fields which might potentially contain embedded JSON.' + ) + )); + }//• + }//fi + + + // If this attribute is a singular (`model`) association, then look up + // the reciprocal model def, as well as its primary attribute def. + var Reciprocal; + var reciprocalPKA; + if (attrDef && attrDef.model) { + Reciprocal = getModel(attrDef.model, orm); + reciprocalPKA = getAttribute(Reciprocal.primaryKey, attrDef.model, orm); + }//>- + + + + // ███████╗██╗ ██╗ ██████╗ ██████╗ ████████╗██╗ ██╗ █████╗ ███╗ ██╗██████╗ + // ██╔════╝██║ ██║██╔═══██╗██╔══██╗╚══██╔══╝██║ ██║██╔══██╗████╗ ██║██╔══██╗ + // ███████╗███████║██║ ██║██████╔╝ ██║ ███████║███████║██╔██╗ ██║██║ ██║ + // ╚════██║██╔══██║██║ ██║██╔══██╗ ██║ ██╔══██║██╔══██║██║╚██╗██║██║ ██║ + // ███████║██║ ██║╚██████╔╝██║ ██║ ██║ ██║ ██║██║ ██║██║ ╚████║██████╔╝ + // ╚══════╝╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝╚═════╝ + // + // ███████╗ ██████╗ ██████╗ ██╗███╗ ██╗ + // ██╔════╝██╔═══██╗██╔══██╗ ██║████╗ ██║ + // █████╗ ██║ ██║██████╔╝ █████╗██║██╔██╗ ██║█████╗ + // ██╔══╝ ██║ ██║██╔══██╗ ╚════╝██║██║╚██╗██║╚════╝ + // ██║ ╚██████╔╝██║ ██║ ██║██║ ╚████║ + // ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝╚═╝ ╚═══╝ + // + // ██████╗ ██████╗ ███╗ ██╗███████╗████████╗██████╗ █████╗ ██╗███╗ ██╗████████╗ + // ██╔════╝██╔═══██╗████╗ ██║██╔════╝╚══██╔══╝██╔══██╗██╔══██╗██║████╗ ██║╚══██╔══╝ + // ██║ ██║ ██║██╔██╗ ██║███████╗ ██║ ██████╔╝███████║██║██╔██╗ ██║ ██║ + // ██║ ██║ ██║██║╚██╗██║╚════██║ ██║ ██╔══██╗██╔══██║██║██║╚██╗██║ ██║ + // ╚██████╗╚██████╔╝██║ ╚████║███████║ ██║ ██║ ██║██║ ██║██║██║ ╚████║ ██║ + // ╚═════╝ ╚═════╝ ╚═╝ ╚═══╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═══╝ ╚═╝ + // + // If this is "IN" shorthand (an array)... + if (_.isArray(constraintRhs)) { + + // Normalize this into a complex constraint with an `in` modifier. + var inConstraintShorthandArray = constraintRhs; + constraintRhs = { in: inConstraintShorthandArray }; + + }//>- + + + + + + + + + + + // ██████╗ ██████╗ ███╗ ███╗██████╗ ██╗ ███████╗██╗ ██╗ + // ██╔════╝██╔═══██╗████╗ ████║██╔══██╗██║ ██╔════╝╚██╗██╔╝ + // ██║ ██║ ██║██╔████╔██║██████╔╝██║ █████╗ ╚███╔╝ + // ██║ ██║ ██║██║╚██╔╝██║██╔═══╝ ██║ ██╔══╝ ██╔██╗ + // ╚██████╗╚██████╔╝██║ ╚═╝ ██║██║ ███████╗███████╗██╔╝ ██╗ + // ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚══════╝╚══════╝╚═╝ ╚═╝ + // + // ██████╗ ██████╗ ███╗ ██╗███████╗████████╗██████╗ █████╗ ██╗███╗ ██╗████████╗ + // ██╔════╝██╔═══██╗████╗ ██║██╔════╝╚══██╔══╝██╔══██╗██╔══██╗██║████╗ ██║╚══██╔══╝ + // ██║ ██║ ██║██╔██╗ ██║███████╗ ██║ ██████╔╝███████║██║██╔██╗ ██║ ██║ + // ██║ ██║ ██║██║╚██╗██║╚════██║ ██║ ██╔══██╗██╔══██║██║██║╚██╗██║ ██║ + // ╚██████╗╚██████╔╝██║ ╚████║███████║ ██║ ██║ ██║██║ ██║██║██║ ╚████║ ██║ + // ╚═════╝ ╚═════╝ ╚═╝ ╚═══╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═══╝ ╚═╝ + // + // If this is a complex constraint (a dictionary)... + if (_.isObject(constraintRhs) && !_.isFunction(constraintRhs) && !_.isArray(constraintRhs)) { + + // ┬ ┬┌─┐┌┐┌┌┬┐┬ ┌─┐ ┌─┐┌┬┐┌─┐┌┬┐┬ ┬ ┌┬┐┬┌─┐┌┬┐┬┌─┐┌┐┌┌─┐┬─┐┬ ┬ + // ├─┤├─┤│││ │││ ├┤ ├┤ │││├─┘ │ └┬┘ ││││ │ ││ ││││├─┤├┬┘└┬┘ + // ┴ ┴┴ ┴┘└┘─┴┘┴─┘└─┘ └─┘┴ ┴┴ ┴ ┴ ─┴┘┴└─┘ ┴ ┴└─┘┘└┘┴ ┴┴└─ ┴ + // An empty dictionary (or a dictionary w/ an unrecognized modifier key) + // is never allowed as a complex constraint. + var numKeys = _.keys(constraintRhs).length; + if (numKeys === 0) { + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'If specifying a complex constraint, there should always be at least one modifier. But the constraint provided as `'+constraintTarget+'` has no keys-- it is just `{}`, an empty dictionary (aka plain JavaScript object).' + )); + }//-• + + if (numKeys !== 1) { + throw new Error('Consistency violation: If provided as a dictionary, the constraint RHS passed in to the internal normalizeConstraint() utility must always have exactly one key. (Should have been normalized already.) But instead, got: '+util.inspect(constraintRhs, {depth:5})+''); + } + + // Determine what kind of modifier this constraint has, and get a reference to the modifier's RHS. + // > Note that we HAVE to set `constraint[modifierKind]` any time we make a by-value change. + // > We take care of this at the bottom of this section. + var modifierKind = _.keys(constraintRhs)[0]; + var modifier = constraintRhs[modifierKind]; + + + + + // ┬ ┬┌─┐┌┐┌┌┬┐┬ ┌─┐ ┌─┐┬ ┬┌─┐┌─┐┌─┐┌─┐ + // ├─┤├─┤│││ │││ ├┤ ├─┤│ │├─┤└─┐├┤ └─┐ + // ┴ ┴┴ ┴┘└┘─┴┘┴─┘└─┘ ┴ ┴┴─┘┴┴ ┴└─┘└─┘└─┘ + // Handle simple modifier aliases, for compatibility. + if (!MODIFIER_KINDS[modifierKind] && MODIFIER_ALIASES[modifierKind]) { + var originalModifierKind = modifierKind; + delete constraintRhs[originalModifierKind]; + modifierKind = MODIFIER_ALIASES[originalModifierKind]; + constraintRhs[modifierKind] = modifier; + + console.warn(); + console.warn( + 'Deprecated: The `where` clause of this query contains '+'\n'+ + 'a `'+originalModifierKind+'` modifier (for `'+constraintTarget+'`). But as of Sails v1.0,'+'\n'+ + 'this modifier is deprecated. (Please use `'+modifierKind+'` instead.)\n'+ + 'This was automatically normalized on your behalf for the'+'\n'+ + 'sake of compatibility, but please change this ASAP.'+'\n'+ + '> Warning: This backwards compatibility may be removed\n'+ + '> in a future release of Sails/Waterline. If this usage\n'+ + '> is left unchanged, then queries like this one may eventually \n'+ + '> fail with an error.' + ); + console.warn(); + + }//>- + + // Understand the "!=" modifier as "nin" if it was provided as an array. + if (modifierKind === '!=' && _.isArray(modifier)) { + delete constraintRhs[modifierKind]; + modifierKind = 'nin'; + constraintRhs[modifierKind] = modifier; + }//>- + + + + // + // --• At this point, we're doing doing uninformed transformations of the constraint. + // i.e. while, in some cases, the code below changes the `modifierKind`, the + // following if/else statements are effectively a switch statement. So in other + // words, any transformations going on are specific to a particular `modifierKind`. + // + + + + // ╔╗╔╔═╗╔╦╗ ╔═╗╔═╗ ╦ ╦╔═╗╦ + // ║║║║ ║ ║ ║╣ ║═╬╗║ ║╠═╣║ + // ╝╚╝╚═╝ ╩ ╚═╝╚═╝╚╚═╝╩ ╩╩═╝ + if (modifierKind === '!=') { + + // Ensure this modifier is valid, normalizing it if possible. + try { + modifier = normalizeComparisonValue(modifier, constraintTarget, modelIdentity, orm); + } catch (e) { + switch (e.code) { + case 'E_VALUE_NOT_USABLE': throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error('Invalid `!=` ("not equal") modifier. '+e.message)); + default: throw e; + } + }//>-• + + }//‡ + // ╦╔╗╔ + // ║║║║ + // ╩╝╚╝ + else if (modifierKind === 'in') { + + if (!_.isArray(modifier)) { + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'An `in` modifier should always be provided as an array. '+ + 'But instead, for the `in` modifier at `'+constraintTarget+'`, got: '+ + util.inspect(modifier, {depth:5})+'' + )); + }//-• + + // Strip undefined items. + _.remove(modifier, function (item) { return item === undefined; }); + + // If this modifier is now an empty array, then bail with a special exception. + if (modifier.length === 0) { + throw flaverr('E_CONSTRAINT_WOULD_MATCH_NOTHING', new Error( + 'Since this `in` modifier is an empty array, it would match nothing.' + )); + }//-• + + // Ensure that each item in the array matches the expected data type for the attribute. + modifier = _.map(modifier, function (item){ + + // First, ensure this is not `null`. + // (We never allow items in the array to be `null`.) + if (_.isNull(item)){ + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'Got unsupported value (`null`) in an `in` modifier array. Please use `or: [{ '+constraintTarget+': null }, ...]` instead.' + )); + }//-• + + // Ensure this item is valid, normalizing it if possible. + try { + item = normalizeComparisonValue(item, constraintTarget, modelIdentity, orm); + } catch (e) { + switch (e.code) { + case 'E_VALUE_NOT_USABLE': throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error('Invalid item within `in` modifier array. '+e.message)); + default: throw e; + } + }//>-• + + return item; + + });// + + }//‡ + // ╔╗╔╦╔╗╔ + // ║║║║║║║ + // ╝╚╝╩╝╚╝ + else if (modifierKind === 'nin') { + + if (!_.isArray(modifier)) { + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'A `nin` ("not in") modifier should always be provided as an array. '+ + 'But instead, for the `nin` modifier at `'+constraintTarget+'`, got: '+ + util.inspect(modifier, {depth:5})+'' + )); + }//-• + + // Strip undefined items. + _.remove(modifier, function (item) { return item === undefined; }); + + // If this modifier is now an empty array, then bail with a special exception. + if (modifier.length === 0) { + throw flaverr('E_CONSTRAINT_WOULD_MATCH_EVERYTHING', new Error( + 'Since this `nin` ("not in") modifier is an empty array, it would match ANYTHING.' + )); + }//-• + + // Ensure that each item in the array matches the expected data type for the attribute. + modifier = _.map(modifier, function (item){ + + // First, ensure this is not `null`. + // (We never allow items in the array to be `null`.) + if (_.isNull(item)){ + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'Got unsupported value (`null`) in a `nin` ("not in") modifier array. Please use `or: [{ '+constraintTarget+': { \'!=\': null }, ...]` instead.' + )); + }//-• + + // Ensure this item is valid, normalizing it if possible. + try { + item = normalizeComparisonValue(item, constraintTarget, modelIdentity, orm); + } catch (e) { + switch (e.code) { + case 'E_VALUE_NOT_USABLE': throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error('Invalid item within `nin` ("not in") modifier array. '+e.message)); + default: throw e; + } + }//>-• + + return item; + + });// + + }//‡ + // ╔═╗╦═╗╔═╗╔═╗╔╦╗╔═╗╦═╗ ╔╦╗╦ ╦╔═╗╔╗╔ + // ║ ╦╠╦╝║╣ ╠═╣ ║ ║╣ ╠╦╝ ║ ╠═╣╠═╣║║║ + // ╚═╝╩╚═╚═╝╩ ╩ ╩ ╚═╝╩╚═ ╩ ╩ ╩╩ ╩╝╚╝ + // `>` ("greater than") + else if (modifierKind === '>') { + + // If it matches a known attribute, verify that the attribute does not declare + // itself `type: 'boolean'` (it wouldn't make any sense to attempt that) + if (attrDef && attrDef.type === 'boolean'){ + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'A `>` ("greater than") modifier cannot be used with a boolean attribute. (Please use `or` instead.)' + )); + }//-• + + // Ensure this modifier is valid, normalizing it if possible. + // > Note that, in addition to using the standard utility, we also verify that this + // > was not provided as `null`. (It wouldn't make any sense.) + try { + + if (_.isNull(modifier)){ + throw flaverr('E_VALUE_NOT_USABLE', new Error( + '`null` is not supported with comparison modifiers. '+ + 'Please use `or: [{ '+constraintTarget+': { \'!=\': null }, ...]` instead.' + )); + }//-• + + modifier = normalizeComparisonValue(modifier, constraintTarget, modelIdentity, orm); + + } catch (e) { + switch (e.code) { + case 'E_VALUE_NOT_USABLE': throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error('Invalid `>` ("greater than") modifier. '+e.message)); + default: throw e; + } + }//>-• + + }//‡ + // ╔═╗╦═╗╔═╗╔═╗╔╦╗╔═╗╦═╗ ╔╦╗╦ ╦╔═╗╔╗╔ ╔═╗╦═╗ ╔═╗╔═╗ ╦ ╦╔═╗╦ + // ║ ╦╠╦╝║╣ ╠═╣ ║ ║╣ ╠╦╝ ║ ╠═╣╠═╣║║║ ║ ║╠╦╝ ║╣ ║═╬╗║ ║╠═╣║ + // ╚═╝╩╚═╚═╝╩ ╩ ╩ ╚═╝╩╚═ ╩ ╩ ╩╩ ╩╝╚╝ ╚═╝╩╚═ ╚═╝╚═╝╚╚═╝╩ ╩╩═╝ + // `>=` ("greater than or equal") + else if (modifierKind === '>=') { + + // If it matches a known attribute, verify that the attribute does not declare + // itself `type: 'boolean'` (it wouldn't make any sense to attempt that) + if (attrDef && attrDef.type === 'boolean'){ + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'A `>=` ("greater than or equal") modifier cannot be used with a boolean attribute. (Please use `or` instead.)' + )); + }//-• + + // Ensure this modifier is valid, normalizing it if possible. + // > Note that, in addition to using the standard utility, we also verify that this + // > was not provided as `null`. (It wouldn't make any sense.) + try { + + if (_.isNull(modifier)){ + throw flaverr('E_VALUE_NOT_USABLE', new Error( + '`null` is not supported with comparison modifiers. '+ + 'Please use `or: [{ '+constraintTarget+': { \'!=\': null }, ...]` instead.' + )); + }//-• + + modifier = normalizeComparisonValue(modifier, constraintTarget, modelIdentity, orm); + + } catch (e) { + switch (e.code) { + case 'E_VALUE_NOT_USABLE': throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error('Invalid `>=` ("greater than or equal") modifier. '+e.message)); + default: throw e; + } + }//>-• + + }//‡ + // ╦ ╔═╗╔═╗╔═╗ ╔╦╗╦ ╦╔═╗╔╗╔ + // ║ ║╣ ╚═╗╚═╗ ║ ╠═╣╠═╣║║║ + // ╩═╝╚═╝╚═╝╚═╝ ╩ ╩ ╩╩ ╩╝╚╝ + // `<` ("less than") + else if (modifierKind === '<') { + + // If it matches a known attribute, verify that the attribute does not declare + // itself `type: 'boolean'` (it wouldn't make any sense to attempt that) + if (attrDef && attrDef.type === 'boolean'){ + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'A `<` ("less than") modifier cannot be used with a boolean attribute. (Please use `or` instead.)' + )); + }//-• + + // Ensure this modifier is valid, normalizing it if possible. + // > Note that, in addition to using the standard utility, we also verify that this + // > was not provided as `null`. (It wouldn't make any sense.) + try { + + if (_.isNull(modifier)){ + throw flaverr('E_VALUE_NOT_USABLE', new Error( + '`null` is not supported with comparison modifiers. '+ + 'Please use `or: [{ '+constraintTarget+': { \'!=\': null }, ...]` instead.' + )); + }//-• + + modifier = normalizeComparisonValue(modifier, constraintTarget, modelIdentity, orm); + + } catch (e) { + switch (e.code) { + case 'E_VALUE_NOT_USABLE': throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error('Invalid `<` ("less than") modifier. '+e.message)); + default: throw e; + } + }//>-• + + }//‡ + // ╦ ╔═╗╔═╗╔═╗ ╔╦╗╦ ╦╔═╗╔╗╔ ╔═╗╦═╗ ╔═╗╔═╗ ╦ ╦╔═╗╦ + // ║ ║╣ ╚═╗╚═╗ ║ ╠═╣╠═╣║║║ ║ ║╠╦╝ ║╣ ║═╬╗║ ║╠═╣║ + // ╩═╝╚═╝╚═╝╚═╝ ╩ ╩ ╩╩ ╩╝╚╝ ╚═╝╩╚═ ╚═╝╚═╝╚╚═╝╩ ╩╩═╝ + // `<=` ("less than or equal") + else if (modifierKind === '<=') { + + // If it matches a known attribute, verify that the attribute does not declare + // itself `type: 'boolean'` (it wouldn't make any sense to attempt that) + if (attrDef && attrDef.type === 'boolean'){ + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'A `<=` ("less than or equal") modifier cannot be used with a boolean attribute. (Please use `or` instead.)' + )); + }//-• + + // Ensure this modifier is valid, normalizing it if possible. + // > Note that, in addition to using the standard utility, we also verify that this + // > was not provided as `null`. (It wouldn't make any sense.) + try { + + if (_.isNull(modifier)){ + throw flaverr('E_VALUE_NOT_USABLE', new Error( + '`null` is not supported with comparison modifiers. '+ + 'Please use `or: [{ '+constraintTarget+': { \'!=\': null }, ...]` instead.' + )); + }//-• + + modifier = normalizeComparisonValue(modifier, constraintTarget, modelIdentity, orm); + + } catch (e) { + switch (e.code) { + case 'E_VALUE_NOT_USABLE': throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error('Invalid `<=` ("less than or equal") modifier. '+e.message)); + default: throw e; + } + }//>-• + + }//‡ + // ╔═╗╔═╗╔╗╔╔╦╗╔═╗╦╔╗╔╔═╗ + // ║ ║ ║║║║ ║ ╠═╣║║║║╚═╗ + // ╚═╝╚═╝╝╚╝ ╩ ╩ ╩╩╝╚╝╚═╝ + else if (modifierKind === 'contains') { + + // If it matches a known attribute, verify that the attribute + // does not declare itself `type: 'boolean'` or `type: 'number'`; + // and also, if it is a singular association, that the associated + // model's primary key value is not a number either. + if (attrDef && ( + attrDef.type === 'number' || + attrDef.type === 'boolean' || + (attrDef.model && reciprocalPKA.type === 'number') + )){ + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'A `contains` (i.e. string search) modifier cannot be used with a '+ + 'boolean or numeric attribute (it wouldn\'t make any sense).' + )); + }//>-• + + // Ensure that this modifier is a string, normalizing it if possible. + // (note that this explicitly forbids the use of `null`) + try { + modifier = rttc.validate('string', modifier); + } catch (e) { + switch (e.code) { + + case 'E_INVALID': + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'Invalid `contains` (string search) modifier. '+e.message + )); + + default: + throw e; + } + }// + + + // If this modifier is the empty string (''), then it means that + // this constraint would match EVERYTHING. + if (modifier === '') { + throw flaverr('E_CONSTRAINT_WOULD_MATCH_EVERYTHING', new Error( + 'Since this `contains` (string search) modifier was provided as '+ + '`\'\'` (empty string), it would match ANYTHING!' + )); + }//-• + + // Convert this modifier into a `like`, making the necessary adjustments. + // + // > This involves escaping any existing occurences of '%', + // > converting them to '\\%' instead. + // > (It's actually just one backslash, but...you know...strings ) + delete constraintRhs[modifierKind]; + modifierKind = 'like'; + modifier = modifier.replace(/%/g,'\\%'); + modifier = '%'+modifier+'%'; + constraintRhs[modifierKind] = modifier; + + }//‡ + // ╔═╗╔╦╗╔═╗╦═╗╔╦╗╔═╗ ╦ ╦╦╔╦╗╦ ╦ + // ╚═╗ ║ ╠═╣╠╦╝ ║ ╚═╗ ║║║║ ║ ╠═╣ + // ╚═╝ ╩ ╩ ╩╩╚═ ╩ ╚═╝ ╚╩╝╩ ╩ ╩ ╩ + else if (modifierKind === 'startsWith') { + + // If it matches a known attribute, verify that the attribute + // does not declare itself `type: 'boolean'` or `type: 'number'`; + // and also, if it is a singular association, that the associated + // model's primary key value is not a number either. + if (attrDef && ( + attrDef.type === 'number' || + attrDef.type === 'boolean' || + (attrDef.model && reciprocalPKA.type === 'number') + )){ + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'A `startsWith` (i.e. string search) modifier cannot be used with a '+ + 'boolean or numeric attribute (it wouldn\'t make any sense).' + )); + }//>-• + + // Ensure that this modifier is a string, normalizing it if possible. + // (note that this explicitly forbids the use of `null`) + try { + modifier = rttc.validate('string', modifier); + } catch (e) { + switch (e.code) { + + case 'E_INVALID': + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'Invalid `startsWith` (string search) modifier. '+e.message + )); + + default: + throw e; + } + }// + + // If this modifier is the empty string (''), then it means that + // this constraint would match EVERYTHING. + if (modifier === '') { + throw flaverr('E_CONSTRAINT_WOULD_MATCH_EVERYTHING', new Error( + 'Since this `startsWith` (string search) modifier was provided as '+ + '`\'\'` (empty string), it would match ANYTHING!' + )); + }//-• + + // Convert this modifier into a `like`, making the necessary adjustments. + // + // > This involves escaping any existing occurences of '%', + // > converting them to '\\%' instead. + // > (It's actually just one backslash, but...you know...strings ) + delete constraintRhs[modifierKind]; + modifierKind = 'like'; + modifier = modifier.replace(/%/g,'\\%'); + modifier = modifier+'%'; + constraintRhs[modifierKind] = modifier; + + }//‡ + // ╔═╗╔╗╔╔╦╗╔═╗ ╦ ╦╦╔╦╗╦ ╦ + // ║╣ ║║║ ║║╚═╗ ║║║║ ║ ╠═╣ + // ╚═╝╝╚╝═╩╝╚═╝ ╚╩╝╩ ╩ ╩ ╩ + else if (modifierKind === 'endsWith') { + + // If it matches a known attribute, verify that the attribute + // does not declare itself `type: 'boolean'` or `type: 'number'`; + // and also, if it is a singular association, that the associated + // model's primary key value is not a number either. + if (attrDef && ( + attrDef.type === 'number' || + attrDef.type === 'boolean' || + (attrDef.model && reciprocalPKA.type === 'number') + )){ + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'An `endsWith` (i.e. string search) modifier cannot be used with a '+ + 'boolean or numeric attribute (it wouldn\'t make any sense).' + )); + }//>-• + + // Ensure that this modifier is a string, normalizing it if possible. + // (note that this explicitly forbids the use of `null`) + try { + modifier = rttc.validate('string', modifier); + } catch (e) { + switch (e.code) { + + case 'E_INVALID': + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'Invalid `endsWith` (string search) modifier. '+e.message + )); + + default: + throw e; + } + }// + + // If this modifier is the empty string (''), then it means that + // this constraint would match EVERYTHING. + if (modifier === '') { + throw flaverr('E_CONSTRAINT_WOULD_MATCH_EVERYTHING', new Error( + 'Since this `endsWith` (string search) modifier was provided as '+ + '`\'\'` (empty string), it would match ANYTHING!' + )); + }//-• + + // Convert this modifier into a `like`, making the necessary adjustments. + // + // > This involves escaping any existing occurences of '%', + // > converting them to '\\%' instead. + // > (It's actually just one backslash, but...you know...strings ) + delete constraintRhs[modifierKind]; + modifierKind = 'like'; + modifier = modifier.replace(/%/g,'\\%'); + modifier = '%'+modifier; + constraintRhs[modifierKind] = modifier; + + }//‡ + // ╦ ╦╦╔═╔═╗ + // ║ ║╠╩╗║╣ + // ╩═╝╩╩ ╩╚═╝ + else if (modifierKind === 'like') { + + // If it matches a known attribute, verify that the attribute + // does not declare itself `type: 'boolean'` or `type: 'number'`; + // and also, if it is a singular association, that the associated + // model's primary key value is not a number either. + if (attrDef && ( + attrDef.type === 'number' || + attrDef.type === 'boolean' || + (attrDef.model && reciprocalPKA.type === 'number') + )){ + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'A `like` (i.e. SQL-style "LIKE") modifier cannot be used with a '+ + 'boolean or numeric attribute (it wouldn\'t make any sense).' + )); + }//>-• + + // Strictly verify that this modifier is a string. + // > You should really NEVER use anything other than a non-empty string for + // > `like`, because of the special % syntax. So we won't try to normalize + // > for you. + if (!_.isString(modifier) || modifier === '') { + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'Invalid `like` (i.e. SQL-style "LIKE") modifier. Should be provided as '+ + 'a non-empty string, using `%` symbols as wildcards, but instead, got: '+ + util.inspect(modifier,{depth: 5})+'' + )); + }//-• + + // If this modifier is '%%', then it means that this `like` constraint + // would match EVERYTHING. + if (modifier === '%%') { + throw flaverr('E_CONSTRAINT_WOULD_MATCH_EVERYTHING', new Error( + 'Since this `like` (string search) modifier was provided as '+ + '`%%`, it would match ANYTHING!' + )); + }//-• + + }//‡ + // ┬ ┬┌┐┌┬─┐┌─┐┌─┐┌─┐┌─┐┌┐┌┬┌─┐┌─┐┌┬┐ ┌┬┐┌─┐┌┬┐┬┌─┐┬┌─┐┬─┐ + // │ ││││├┬┘├┤ │ │ ││ ┬││││┌─┘├┤ ││ ││││ │ │││├┤ │├┤ ├┬┘ + // └─┘┘└┘┴└─└─┘└─┘└─┘└─┘┘└┘┴└─┘└─┘─┴┘ ┴ ┴└─┘─┴┘┴└ ┴└─┘┴└─ + // A complex constraint must always contain a recognized modifier. + else { + + throw flaverr('E_CONSTRAINT_NOT_USABLE', new Error( + 'Unrecognized modifier (`'+modifierKind+'`) within provided constraint for `'+constraintTarget+'`.' + )); + + }//>-• + + + // Just in case we made a by-value change above, set our potentially-modified modifier + // on the constraint. + constraintRhs[modifierKind] = modifier; + + } + // ███████╗ ██████╗ ██████╗ ██████╗ ███╗ ██╗███████╗████████╗██████╗ █████╗ ██╗███╗ ██╗████████╗ + // ██╔════╝██╔═══██╗ ██╔════╝██╔═══██╗████╗ ██║██╔════╝╚══██╔══╝██╔══██╗██╔══██╗██║████╗ ██║╚══██╔══╝ + // █████╗ ██║ ██║ ██║ ██║ ██║██╔██╗ ██║███████╗ ██║ ██████╔╝███████║██║██╔██╗ ██║ ██║ + // ██╔══╝ ██║▄▄ ██║ ██║ ██║ ██║██║╚██╗██║╚════██║ ██║ ██╔══██╗██╔══██║██║██║╚██╗██║ ██║ + // ███████╗╚██████╔╝ ╚██████╗╚██████╔╝██║ ╚████║███████║ ██║ ██║ ██║██║ ██║██║██║ ╚████║ ██║ + // ╚══════╝ ╚══▀▀═╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═══╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚═╝ ╚═══╝ ╚═╝ + // + // Otherwise, ensure that this constraint is a valid eq constraint, including schema-aware + // normalization vs. the attribute def. + // + // > If there is no attr def, then check that it's a string, number, boolean, or `null`. + else { + + // Ensure the provided eq constraint is valid, normalizing it if possible. + try { + constraintRhs = normalizeComparisonValue(constraintRhs, constraintTarget, modelIdentity, orm); + } catch (e) { + switch (e.code) { + case 'E_VALUE_NOT_USABLE': throw flaverr('E_CONSTRAINT_NOT_USABLE', e); + default: throw e; + } + }//>-• + + }//>- + + // Return the normalized constraint. + return constraintRhs; + +}; + diff --git a/lib/waterline/utils/query/private/normalize-criteria.js b/lib/waterline/utils/query/private/normalize-criteria.js new file mode 100644 index 000000000..afbfcc8a5 --- /dev/null +++ b/lib/waterline/utils/query/private/normalize-criteria.js @@ -0,0 +1,962 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var getModel = require('../../ontology/get-model'); +var getAttribute = require('../../ontology/get-attribute'); +var isSafeNaturalNumber = require('./is-safe-natural-number'); +var isValidAttributeName = require('./is-valid-attribute-name'); +var normalizeWhereClause = require('./normalize-where-clause'); +var normalizeSortClause = require('./normalize-sort-clause'); + + +/** + * Module constants + */ + +var NAMES_OF_RECOGNIZED_CLAUSES = ['where', 'limit', 'skip', 'sort', 'select', 'omit']; + + +/** + * normalizeCriteria() + * + * Validate and normalize the provided value (`criteria`), hammering it destructively + * into the standardized format suitable to be part of a "stage 2 query" (see ARCHITECTURE.md). + * This allows us to present it in a normalized fashion to lifecycle callbacks, as well to + * other internal utilities within Waterline. + * + * Since the provided value _might_ be a string, number, or some other primitive that is + * NOT passed by reference, this function has a return value: a dictionary (plain JavaScript object). + * But realize that this is only to allow for a handful of edge cases. Most of the time, the + * provided value will be irreversibly mutated in-place, AS WELL AS returned. + * + * -- + * + * There are many criteria normalization steps performed by Waterline. + * But this function only performs some of them. + * + * It DOES: + * (•) validate the criteria's format (particularly the `where` clause) + * (•) normalize the structure of the criteria (particularly the `where` clause) + * (•) ensure defaults exist for `limit`, `skip`, `sort`, `select`, and `omit` + * (•) apply (logical, not physical) schema-aware validations and normalizations + * + * It DOES NOT: + * (x) transform attribute names to column names + * (x) check that the criteria isn't trying to use features which are not supported by the adapter(s) + * + * -- + * + * @param {Ref} criteria + * The original criteria (i.e. from a "stage 1 query"). + * > WARNING: + * > IN SOME CASES (BUT NOT ALL!), THE PROVIDED CRITERIA WILL + * > UNDERGO DESTRUCTIVE, IN-PLACE CHANGES JUST BY PASSING IT + * > IN TO THIS UTILITY. + * + * @param {String} modelIdentity + * The identity of the model this criteria is referring to (e.g. "pet" or "user") + * > Useful for looking up the Waterline model and accessing its attribute definitions. + * + * @param {Ref} orm + * The Waterline ORM instance. + * > Useful for accessing the model definitions. + * + * @param {Dictionary?} meta + * The contents of the `meta` query key, if one was provided. + * > Useful for propagating query options to low-level utilities like this one. + * + * -- + * + * @returns {Dictionary} + * The successfully-normalized criteria, ready for use in a stage 2 query. + * + * + * @throws {Error} If it encounters irrecoverable problems or unsupported usage in + * the provided criteria, including e.g. an invalid constraint is specified + * for an association. + * @property {String} code + * - E_HIGHLY_IRREGULAR + * + * + * @throws {Error} If the criteria indicates that it should never match anything. + * @property {String} code + * - E_WOULD_RESULT_IN_NOTHING + * + * + * @throws {Error} If anything else unexpected occurs. + */ +module.exports = function normalizeCriteria(criteria, modelIdentity, orm, meta) { + + // Sanity checks. + // > These are just some basic, initial usage assertions to help catch + // > bugs during development of Waterline core. + // + // At this point, `criteria` MUST NOT be undefined. + // (Any defaulting related to that should be taken care of before calling this function.) + if (_.isUndefined(criteria)) { + throw new Error('Consistency violation: `criteria` should never be `undefined` when it is passed in to the normalizeCriteria() utility.'); + } + + + + // Look up the Waterline model for this query. + // > This is so that we can reference the original model definition. + var WLModel; + try { + WLModel = getModel(modelIdentity, orm); + } catch (e) { + switch (e.code) { + case 'E_MODEL_NOT_REGISTERED': throw new Error('Consistency violation: '+e.message); + default: throw e; + } + }// + + + + + // ████████╗ ██████╗ ██████╗ ██╗ ███████╗██╗ ██╗███████╗██╗ + // ╚══██╔══╝██╔═══██╗██╔══██╗ ██║ ██╔════╝██║ ██║██╔════╝██║ + // ██║ ██║ ██║██████╔╝█████╗██║ █████╗ ██║ ██║█████╗ ██║ + // ██║ ██║ ██║██╔═══╝ ╚════╝██║ ██╔══╝ ╚██╗ ██╔╝██╔══╝ ██║ + // ██║ ╚██████╔╝██║ ███████╗███████╗ ╚████╔╝ ███████╗███████╗ + // ╚═╝ ╚═════╝ ╚═╝ ╚══════╝╚══════╝ ╚═══╝ ╚══════╝╚══════╝ + // + // ███████╗ █████╗ ███╗ ██╗██╗████████╗██╗███████╗ █████╗ ████████╗██╗ ██████╗ ███╗ ██╗ + // ██╔════╝██╔══██╗████╗ ██║██║╚══██╔══╝██║╚══███╔╝██╔══██╗╚══██╔══╝██║██╔═══██╗████╗ ██║ + // ███████╗███████║██╔██╗ ██║██║ ██║ ██║ ███╔╝ ███████║ ██║ ██║██║ ██║██╔██╗ ██║ + // ╚════██║██╔══██║██║╚██╗██║██║ ██║ ██║ ███╔╝ ██╔══██║ ██║ ██║██║ ██║██║╚██╗██║ + // ███████║██║ ██║██║ ╚████║██║ ██║ ██║███████╗██║ ██║ ██║ ██║╚██████╔╝██║ ╚████║ + // ╚══════╝╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ + // + + + // ╔═╗╔═╗╔╦╗╔═╗╔═╗╔╦╗╦╔╗ ╦╦ ╦╔╦╗╦ ╦ (COMPATIBILITY) + // ║ ║ ║║║║╠═╝╠═╣ ║ ║╠╩╗║║ ║ ║ ╚╦╝ + // ╚═╝╚═╝╩ ╩╩ ╩ ╩ ╩ ╩╚═╝╩╩═╝╩ ╩ ╩ + // ┌─ ┌┬┐┌─┐┌─┐┬ ┬ ┬┬ ┌─┐┌─┐┬ ┌─┐┌─┐ ┬ ┬┌─┐ ┌┬┐┬┌─┐┌─┐ ┌─┐┌─┐┬ ┌─┐┌─┐┬ ┬ ─┐ + // │─── │ │ │├─┘│ └┐┌┘│ ├┤ ├─┤│ └─┐├┤ └┐┌┘└─┐ ││││└─┐│ ├┤ ├─┤│ └─┐├┤ └┬┘ ───│ + // └─ ┴ └─┘┴ ┴─┘└┘ ┴─┘ └ ┴ ┴┴─┘└─┘└─┘ └┘ └─┘o ┴ ┴┴└─┘└─┘ └ ┴ ┴┴─┘└─┘└─┘ ┴ ─┘ + + // If criteria is `false`, then we take that to mean that this is a special reserved + // criteria (Ø) that will never match any records. + if (criteria === false) { + throw flaverr('E_WOULD_RESULT_IN_NOTHING', new Error( + 'In previous versions of Waterline, a criteria of `false` indicated that '+ + 'the specified query should simulate no matches. Now, it is up to the method. '+ + 'Be aware that support for using `false` in userland criterias may be completely '+ + 'removed in a future release of Sails/Waterline.' + )); + }//-• + + // If criteria is otherwise falsey (false, null, empty string, NaN, zero, negative zero) + // then understand it to mean the empty criteria (`{}`), which simulates ALL matches. + // Note that backwards-compatible support for this could be removed at any time! + if (!criteria) { + console.warn( + 'Deprecated: In previous versions of Waterline, the specified criteria '+ + '(`'+util.inspect(criteria,{depth:5})+'`) would match ALL records in '+ + 'this model. If that is what you are intending to happen, then please pass '+ + 'in `{}` instead, or simply omit the `criteria` dictionary altogether-- both of '+ + 'which are more explicit and future-proof ways of doing the same thing.\n'+ + '> Warning: This backwards compatibility will be removed\n'+ + '> in a future release of Sails/Waterline. If this usage\n'+ + '> is left unchanged, then queries like this one will eventually \n'+ + '> fail with an error.' + ); + criteria = {}; + }//>- + + + + // ┌┐┌┌─┐┬─┐┌┬┐┌─┐┬ ┬┌─┐┌─┐ ╔═╗╦╔═╦ ╦ ┌─┐┬─┐ ╦╔╗╔ ┌─┐┬ ┬┌─┐┬─┐┌┬┐┬ ┬┌─┐┌┐┌┌┬┐ + // ││││ │├┬┘│││├─┤│ │┌─┘├┤ ╠═╝╠╩╗╚╗╔╝ │ │├┬┘ ║║║║ └─┐├─┤│ │├┬┘ │ ├─┤├─┤│││ ││ + // ┘└┘└─┘┴└─┴ ┴┴ ┴┴─┘┴└─┘└─┘ ╩ ╩ ╩ ╚╝ └─┘┴└─ ╩╝╚╝ └─┘┴ ┴└─┘┴└─ ┴ ┴ ┴┴ ┴┘└┘─┴┘ + // ┌─ ┌┬┐┌─┐┌─┐┬ ┬ ┬┬ ┌─┐┌┬┐┬─┐ ┌┐┌┬ ┬┌┬┐ ┌─┐┬─┐ ┌─┐┬─┐┬─┐┌─┐┬ ┬ ─┐ + // │─── │ │ │├─┘│ └┐┌┘│ └─┐ │ ├┬┘ ││││ ││││ │ │├┬┘ ├─┤├┬┘├┬┘├─┤└┬┘ ───│ + // └─ ┴ └─┘┴ ┴─┘└┘ ┴─┘ └─┘ ┴ ┴└─┘ ┘└┘└─┘┴ ┴┘ └─┘┴└─ ┴ ┴┴└─┴└─┴ ┴ ┴ ─┘ + // + // If the provided criteria is an array, string, or number, then we'll be able + // to understand it as a primary key, or as an array of primary key values. + if (_.isArray(criteria) || _.isNumber(criteria) || _.isString(criteria)) { + + var topLvlPkValuesOrPkValue = criteria; + + // So expand that into the beginnings of a proper criteria dictionary. + // (This will be further normalized throughout the rest of this file-- + // this is just enough to get us to where we're working with a dictionary.) + criteria = {}; + criteria.where = {}; + criteria.where[WLModel.primaryKey] = topLvlPkValuesOrPkValue; + + }//>- + + + // ┬ ┬┌─┐┬─┐┬┌─┐┬ ┬ ╔═╗╦╔╗╔╔═╗╦ ┌┬┐┌─┐┌─┐ ┬ ┬ ┬┬ ┌┬┐┌─┐┌┬┐┌─┐ ┌┬┐┬ ┬┌─┐┌─┐ + // └┐┌┘├┤ ├┬┘│├┤ └┬┘ ╠╣ ║║║║╠═╣║ │ │ │├─┘───│ └┐┌┘│ ││├─┤ │ ├─┤ │ └┬┘├─┘├┤ + // └┘ └─┘┴└─┴└ ┴ ╚ ╩╝╚╝╩ ╩╩═╝ ┴ └─┘┴ ┴─┘└┘ ┴─┘ ─┴┘┴ ┴ ┴ ┴ ┴ ┴ ┴ ┴ └─┘ + // + // IWMIH and the provided criteria is anything OTHER than a proper dictionary, + // (e.g. if it's a function or regexp or something) then that means it is invalid. + if (!_.isObject(criteria) || _.isArray(criteria) || _.isFunction(criteria)){ + throw flaverr('E_HIGHLY_IRREGULAR', new Error('The provided criteria is invalid. Should be a dictionary (plain JavaScript object), but instead got: '+util.inspect(criteria, {depth:5})+'')); + }//-• + + + // ╔═╗╔═╗╔╦╗╔═╗╔═╗╔╦╗╦╔╗ ╦╦ ╦╔╦╗╦ ╦ (COMPATIBILITY) + // ║ ║ ║║║║╠═╝╠═╣ ║ ║╠╩╗║║ ║ ║ ╚╦╝ + // ╚═╝╚═╝╩ ╩╩ ╩ ╩ ╩ ╩╚═╝╩╩═╝╩ ╩ ╩ + // ┌─┐┌─┐┌─┐┬─┐┌─┐┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐ ┬ ┬┌─┐┬─┐┬┌─ ┌┬┐┬┌─┐┌─┐┌─┐┬─┐┌─┐┌┐┌┌┬┐┬ ┬ ┬ ┌┐┌┌─┐┬ ┬ + // ├─┤│ ┬│ ┬├┬┘├┤ │ ┬├─┤ │ ││ ││││└─┐ ││││ │├┬┘├┴┐ │││├┤ ├┤ ├┤ ├┬┘├┤ │││ │ │ └┬┘ ││││ ││││ + // ┴ ┴└─┘└─┘┴└─└─┘└─┘┴ ┴ ┴ ┴└─┘┘└┘└─┘ └┴┘└─┘┴└─┴ ┴ ─┴┘┴└ └ └─┘┴└─└─┘┘└┘ ┴ ┴─┘┴ ┘└┘└─┘└┴┘ + // + // If we see `sum`, `average`, `min`, `max`, or `groupBy`, throw a + // fatal error to explain what's up, and also to suggest a suitable + // alternative. + // + // > Support for basic aggregations via criteria clauses was removed + // > in favor of new model methods in Waterline v0.13. Specifically + // > for `min`, `max`, and `groupBy`, for which there are no new model + // > methods, we recommend using native queries (aka "stage 5 queries"). + // > (Note that, in the future, you will also be able to do the same thing + // > using Waterline statements, aka "stage 4 queries". But as of Nov 2016, + // > they only support the basic aggregations: count, sum, and avg.) + + + if (!_.isUndefined(criteria.groupBy)) { + // ^^ + // Note that `groupBy` comes first, since it might have been used in conjunction + // with the others (and if it was, you won't be able to do whatever it is you're + // trying to do using the approach suggested by the other compatibility errors + // below.) + throw new Error( + 'The `groupBy` clause is no longer supported in Sails/Waterline.\n'+ + 'In previous versions, `groupBy` could be provided in a criteria '+ + 'to perform an aggregation query. But as of Sails v1.0/Waterline v0.13, the '+ + 'usage has changed. Now, to run aggregate queries using the `groupBy` operator, '+ + 'use a native query instead.\n'+ + '\n'+ + 'Alternatively, if you are using `groupBy` as a column/attribute name then '+ + 'please be advised that some things won\'t work as expected.\n'+ + '\n'+ + 'For more info, visit:\n'+ + 'http://sailsjs.com/docs/upgrading/to-v1.0' + ); + }//-• + + if (!_.isUndefined(criteria.sum)) { + throw new Error( + 'The `sum` clause is no longer supported in Sails/Waterline.\n'+ + 'In previous versions, `sum` could be provided in a criteria '+ + 'to perform an aggregation query. But as of Sails v1.0/Waterline v0.13, the '+ + 'usage has changed. Now, to sum the value of an attribute across multiple '+ + 'records, use the `.sum()` model method.\n'+ + '\n'+ + 'For example:\n'+ + '```\n'+ + '// Get the cumulative account balance of all bank accounts that '+'\n'+ + '// have less than $32,000, or that are flagged as "suspended".'+'\n'+ + 'BankAccount.sum(\'balance\').where({'+'\n'+ + ' or: ['+'\n'+ + ' { balance: { \'<\': 32000 } },'+'\n'+ + ' { suspended: true }'+'\n'+ + ' ]'+'\n'+ + '}).exec(function (err, total){'+'\n'+ + ' // ...'+'\n'+ + '});'+'\n'+ + '```\n'+ + 'Alternatively, if you are using `sum` as a column/attribute name then '+ + 'please be advised that some things won\'t work as expected.\n'+ + '\n'+ + 'For more info, see:\n'+ + 'http://sailsjs.com/docs/reference/waterline-orm/models/sum' + ); + }//-• + + if (!_.isUndefined(criteria.average)) { + throw new Error( + 'The `average` clause is no longer supported in Sails/Waterline.\n'+ + 'In previous versions, `average` could be provided in a criteria '+ + 'to perform an aggregation query. But as of Sails v1.0/Waterline v0.13, the '+ + 'usage has changed. Now, to calculate the mean value of an attribute across '+ + 'multiple records, use the `.avg()` model method.\n'+ + '\n'+ + 'For example:\n'+ + '```\n'+ + '// Get the average balance of bank accounts owned by people between '+'\n'+ + '// the ages of 35 and 45.'+'\n'+ + 'BankAccount.avg(\'balance\').where({'+'\n'+ + ' ownerAge: { \'>=\': 35, \'<=\': 45 }'+'\n'+ + '}).exec(function (err, averageBalance){'+'\n'+ + ' // ...'+'\n'+ + '});'+'\n'+ + '```\n'+ + 'Alternatively, if you are using `average` as a column/attribute name then '+ + 'please be advised that some things won\'t work as expected.\n'+ + '\n'+ + 'For more info, see:\n'+ + 'http://sailsjs.com/docs/reference/waterline-orm/models/avg' + ); + }//-• + + if (!_.isUndefined(criteria.min)) { + throw new Error( + 'The `min` clause is no longer supported in Sails/Waterline.\n'+ + 'In previous versions, `min` could be provided in a criteria '+ + 'to perform an aggregation query. But as of Sails v1.0/Waterline v0.13, the '+ + 'usage has changed. Now, to calculate the minimum value of an attribute '+ + 'across multiple records, use the `.find()` model method.\n'+ + '\n'+ + 'For example:\n'+ + '```\n'+ + '// Get the smallest account balance from amongst all account holders '+'\n'+ + '// between the ages of 35 and 45.'+'\n'+ + 'BankAccount.find(\'balance\').where({'+'\n'+ + ' ownerAge: { \'>=\': 35, \'<=\': 45 }'+'\n'+ + '})'+'\n'+ + '.limit(1)'+'\n'+ + '.select([\'balance\'])'+'\n'+ + '.sort(\'balance ASC\')'+'\n'+ + '}).exec(function (err, relevantAccounts){'+'\n'+ + ' // ...'+'\n'+ + ' var minBalance;'+'\n'+ + ' if (relevantAccounts[0]) {'+'\n'+ + ' minBalance = relevantAccounts[0].balance;'+'\n'+ + ' }'+'\n'+ + ' else {'+'\n'+ + ' minBalance = null;'+'\n'+ + ' }'+'\n'+ + '});'+'\n'+ + '```\n'+ + 'Alternatively, if you are using `min` as a column/attribute name then '+ + 'please be advised that some things won\'t work as expected.\n'+ + '\n'+ + 'For more info, see:\n'+ + 'http://sailsjs.com/docs/reference/waterline-orm/models/find' + ); + }//-• + + if (!_.isUndefined(criteria.max)) { + throw new Error( + 'The `max` clause is no longer supported in Sails/Waterline.\n'+ + 'In previous versions, `max` could be provided in a criteria '+ + 'to perform an aggregation query. But as of Sails v1.0/Waterline v0.13, the '+ + 'usage has changed. Now, to calculate the maximum value of an attribute '+ + 'across multiple records, use the `.find()` model method.\n'+ + '\n'+ + 'For example:\n'+ + '```\n'+ + '// Get the largest account balance from amongst all account holders '+'\n'+ + '// between the ages of 35 and 45.'+'\n'+ + 'BankAccount.find(\'balance\').where({'+'\n'+ + ' ownerAge: { \'>=\': 35, \'<=\': 45 }'+'\n'+ + '})'+'\n'+ + '.limit(1)'+'\n'+ + '.select([\'balance\'])'+'\n'+ + '.sort(\'balance DESC\')'+'\n'+ + '}).exec(function (err, relevantAccounts){'+'\n'+ + ' // ...'+'\n'+ + ' var maxBalance;'+'\n'+ + ' if (relevantAccounts[0]) {'+'\n'+ + ' maxBalance = relevantAccounts[0].balance;'+'\n'+ + ' }'+'\n'+ + ' else {'+'\n'+ + ' maxBalance = null;'+'\n'+ + ' }'+'\n'+ + '});'+'\n'+ + '```\n'+ + 'Alternatively, if you are using `max` as a column/attribute name then '+ + 'please be advised that some things won\'t work as expected.\n'+ + '\n'+ + 'For more info, see:\n'+ + 'http://sailsjs.com/docs/reference/waterline-orm/models/find' + ); + }//-• + + + + // ┬ ┬┌─┐┌┐┌┌┬┐┬ ┌─┐ ╦╔╦╗╔═╗╦ ╦╔═╗╦╔╦╗ ╦ ╦╦ ╦╔═╗╦═╗╔═╗ ╔═╗╦ ╔═╗╦ ╦╔═╗╔═╗ + // ├─┤├─┤│││ │││ ├┤ ║║║║╠═╝║ ║║ ║ ║ ║║║╠═╣║╣ ╠╦╝║╣ ║ ║ ╠═╣║ ║╚═╗║╣ + // ┴ ┴┴ ┴┘└┘─┴┘┴─┘└─┘ ╩╩ ╩╩ ╩═╝╩╚═╝╩ ╩ ╚╩╝╩ ╩╚═╝╩╚═╚═╝ ╚═╝╩═╝╩ ╩╚═╝╚═╝╚═╝ + // + // Now, if the provided criteria dictionary DOES NOT contain the names of ANY + // known criteria clauses (like `where`, `limit`, etc.) as properties, then we + // can safely assume that it is relying on shorthand: i.e. simply specifying what + // would normally be the `where` clause, but at the top level. + var recognizedClauses = _.intersection(_.keys(criteria), NAMES_OF_RECOGNIZED_CLAUSES); + if (recognizedClauses.length === 0) { + + criteria = { + where: criteria + }; + + } + // Otherwise, it DOES contain a recognized clause keyword. + else { + // In which case... well, there's nothing else to do just yet. + // + // > Note: a little ways down, we do a check for any extraneous properties. + // > That check is important, because mixed criterias like `{foo: 'bar', limit: 3}` + // > _were_ supported in previous versions of Waterline, but they are not anymore. + }//>- + + + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // ╔═╗╔═╗╔╦╗╔═╗╔═╗╔╦╗╦╔╗ ╦╦ ╦╔╦╗╦ ╦ (COMPATIBILITY) + // ║ ║ ║║║║╠═╝╠═╣ ║ ║╠╩╗║║ ║ ║ ╚╦╝ + // ╚═╝╚═╝╩ ╩╩ ╩ ╩ ╩ ╩╚═╝╩╩═╝╩ ╩ ╩ + // ┌─ ┌─┐┌─┐┬─┐┬ ┬┌┐ ╔═╗╔═╗╔═╗╦ ╦╦ ╔═╗╔╦╗╔═╗ ┬ ╔═╗╔═╗╔═╗╦ ╦╦ ╔═╗╔╦╗╔═╗╔═╗ ─┐ + // │─── └─┐│ ├┬┘│ │├┴┐ ╠═╝║ ║╠═╝║ ║║ ╠═╣ ║ ║╣ ┌┼─ ╠═╝║ ║╠═╝║ ║║ ╠═╣ ║ ║╣ ╚═╗ ───│ + // └─ └─┘└─┘┴└─└─┘└─┘ ╩ ╚═╝╩ ╚═╝╩═╝╩ ╩ ╩ ╚═╝ └┘ ╩ ╚═╝╩ ╚═╝╩═╝╩ ╩ ╩ ╚═╝╚═╝ ─┘ + // + // - - - - - - - - - - - - - + // NOTE: + // Leaving this stuff commented out, because we should really just break + // backwards-compatibility here. If either of these properties are used, + // they are caught below by the unrecognized property check. + // + // This was not documented, and so hopefully was not widely used. If you've + // got feedback on that, hit up @particlebanana or @mikermcneil on Twitter. + // - - - - - - - - - - - - - + // ``` + // // For compatibility, tolerate the presence of `.populate` or `.populates` on the + // // criteria dictionary (but scrub those suckers off right away). + // delete criteria.populate; + // delete criteria.populates; + // ``` + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + // ┌─┐┬─┐┌─┐┬ ┬┌─┐┌┐┌┌┬┐ ╔═╗═╗ ╦╔╦╗╦═╗╔═╗╔╗╔╔═╗╔═╗╦ ╦╔═╗ ╔═╗╦═╗╔═╗╔═╗╔═╗╦═╗╔╦╗╦╔═╗╔═╗ + // ├─┘├┬┘├┤ └┐┌┘├┤ │││ │ ║╣ ╔╩╦╝ ║ ╠╦╝╠═╣║║║║╣ ║ ║║ ║╚═╗ ╠═╝╠╦╝║ ║╠═╝║╣ ╠╦╝ ║ ║║╣ ╚═╗ + // ┴ ┴└─└─┘ └┘ └─┘┘└┘ ┴ ╚═╝╩ ╚═ ╩ ╩╚═╩ ╩╝╚╝╚═╝╚═╝╚═╝╚═╝ ╩ ╩╚═╚═╝╩ ╚═╝╩╚═ ╩ ╩╚═╝╚═╝ + // + // Now that we've handled the "implicit `where`" case, make sure all remaining + // top-level keys on the criteria dictionary match up with recognized criteria + // clauses. + _.each(_.keys(criteria), function(clauseName) { + + var clauseDef = criteria[clauseName]; + + // If this is NOT a recognized criteria clause... + var isRecognized = _.contains(NAMES_OF_RECOGNIZED_CLAUSES, clauseName); + if (!isRecognized) { + // Then, check to see if the RHS is `undefined`. + // If so, just strip it out and move on. + if (_.isUndefined(clauseDef)) { + delete criteria[clauseName]; + return; + }//-• + + // Otherwise, this smells like a mistake. + // It's at least highly irregular, that's for sure. + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'The provided criteria contains an unrecognized property: '+ + util.inspect(clauseName, {depth:5})+'\n'+ + '* * *\n'+ + 'In previous versions of Sails/Waterline, this criteria _may_ have worked, since '+ + 'keywords like `limit` were allowed to sit alongside attribute names that are '+ + 'really supposed to be wrapped inside of the `where` clause. But starting in '+ + 'Sails v1.0/Waterline 0.13, if a `limit`, `skip`, `sort`, etc is defined, then '+ + 'any vs. pairs should be explicitly contained '+ + 'inside the `where` clause.\n'+ + '* * *' + )); + + }//-• + + // Otherwise, we know this must be a recognized criteria clause, so we're good. + // (We'll check it out more carefully in just a sec below.) + return; + + });// + + + + + + // ██╗ ██╗██╗ ██╗███████╗██████╗ ███████╗ + // ██║ ██║██║ ██║██╔════╝██╔══██╗██╔════╝ + // ██║ █╗ ██║███████║█████╗ ██████╔╝█████╗ + // ██║███╗██║██╔══██║██╔══╝ ██╔══██╗██╔══╝ + // ╚███╔███╔╝██║ ██║███████╗██║ ██║███████╗ + // ╚══╝╚══╝ ╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝╚══════╝ + // + + try { + criteria.where = normalizeWhereClause(criteria.where, modelIdentity, orm, meta); + } catch (e) { + switch (e.code) { + + case 'E_WHERE_CLAUSE_UNUSABLE': + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'Could not use the provided `where` clause. '+ e.message + )); + + case 'E_WOULD_RESULT_IN_NOTHING': + throw e; + + // If no error code (or an unrecognized error code) was specified, + // then we assume that this was a spectacular failure do to some + // kind of unexpected, internal error on our part. + default: + throw new Error('Consistency violation: Unexpected error normalizing/validating the `where` clause: '+e.stack); + } + }//>-• + + + + // ██╗ ██╗███╗ ███╗██╗████████╗ + // ██║ ██║████╗ ████║██║╚══██╔══╝ + // ██║ ██║██╔████╔██║██║ ██║ + // ██║ ██║██║╚██╔╝██║██║ ██║ + // ███████╗██║██║ ╚═╝ ██║██║ ██║ + // ╚══════╝╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ + // Validate/normalize `limit` clause. + + // ╔╦╗╔═╗╔═╗╔═╗╦ ╦╦ ╔╦╗ ┬ ┬┌┬┐┬┌┬┐ + // ║║║╣ ╠╣ ╠═╣║ ║║ ║ │ │││││ │ + // ═╩╝╚═╝╚ ╩ ╩╚═╝╩═╝╩ ┴─┘┴┴ ┴┴ ┴ + // If no `limit` clause was provided, give it a default value. + if (_.isUndefined(criteria.limit)) { + criteria.limit = (Number.MAX_SAFE_INTEGER||9007199254740991); + }//>- + + + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┬─┐┌─┐┌┬┐ ╔═╗╔╦╗╦═╗╦╔╗╔╔═╗ + // ╠═╝╠═╣╠╦╝╚═╗║╣ ├┤ ├┬┘│ ││││ ╚═╗ ║ ╠╦╝║║║║║ ╦ + // ╩ ╩ ╩╩╚═╚═╝╚═╝ └ ┴└─└─┘┴ ┴ ╚═╝ ╩ ╩╚═╩╝╚╝╚═╝ + // If the provided `limit` is a string, attempt to parse it into a number. + if (_.isString(criteria.limit)) { + criteria.limit = +criteria.limit; + }//>-• + + + // ╔═╗╔═╗╔╦╗╔═╗╔═╗╔╦╗╦╔╗ ╦╦ ╦╔╦╗╦ ╦ (COMPATIBILITY) + // ║ ║ ║║║║╠═╝╠═╣ ║ ║╠╩╗║║ ║ ║ ╚╦╝ + // ╚═╝╚═╝╩ ╩╩ ╩ ╩ ╩ ╩╚═╝╩╩═╝╩ ╩ ╩ + // ┌─ ┌┐┌┬ ┬┬ ┬ ┬┌┐┌┌─┐┬┌┐┌┬┌┬┐┬ ┬ ┌─┐┌─┐┬─┐┌─┐ + // │─── ││││ ││ │ ││││├┤ │││││ │ └┬┘ ┌─┘├┤ ├┬┘│ │ + // └─ ┘└┘└─┘┴─┘┴─┘┘ ┴┘└┘└ ┴┘└┘┴ ┴ ┴┘ └─┘└─┘┴└─└─┘┘ + // ┬ ┌┐┌┌─┐┌─┐┌─┐┌┬┐┬┬ ┬┌─┐ ┌┐┌┬ ┬┌┬┐┌┐ ┌─┐┬─┐┌─┐ ─┐ + // ┌┼─ │││├┤ │ ┬├─┤ │ │└┐┌┘├┤ ││││ ││││├┴┐├┤ ├┬┘└─┐ ───│ + // └┘ ┘└┘└─┘└─┘┴ ┴ ┴ ┴ └┘ └─┘ ┘└┘└─┘┴ ┴└─┘└─┘┴└─└─┘ ─┘ + // For convenience/compatibility, we also tolerate `null` and `Infinity`, + // and understand them to mean the same thing. + if (_.isNull(criteria.limit) || criteria.limit === Infinity) { + criteria.limit = (Number.MAX_SAFE_INTEGER||9007199254740991); + }//>- + + // If limit is zero, then that means we'll be returning NO results. + if (criteria.limit === 0) { + throw flaverr('E_WOULD_RESULT_IN_NOTHING', new Error('A criteria with `limit: 0` will never actually match any records.')); + }//-• + + // If limit is less than zero, then use the default limit. + // (But log a deprecation message.) + if (criteria.limit < 0) { + console.warn( + 'Deprecated: In previous versions of Waterline, the specified `limit` '+ + '(`'+util.inspect(criteria.limit,{depth:5})+'`) would work the same '+ + 'as if you had omitted the `limit` altogether-- i.e. defaulting to `Number.MAX_SAFE_INTEGER`. '+ + 'If that is what you are intending to happen, then please just omit `limit` instead, which is '+ + 'a more explicit and future-proof way of doing the same thing.\n'+ + '> Warning: This backwards compatibility will be removed\n'+ + '> in a future release of Sails/Waterline. If this usage\n'+ + '> is left unchanged, then queries like this one will eventually \n'+ + '> fail with an error.' + ); + criteria.limit = (Number.MAX_SAFE_INTEGER||9007199254740991); + }//>- + + + // ┬ ┬┌─┐┬─┐┬┌─┐┬ ┬ ┌┬┐┬ ┬┌─┐┌┬┐ ┬ ┬┌┬┐┬┌┬┐ ┬┌─┐ ┌┐┌┌─┐┬ ┬ + // └┐┌┘├┤ ├┬┘│├┤ └┬┘ │ ├─┤├─┤ │ │ │││││ │ │└─┐ ││││ ││││ + // └┘ └─┘┴└─┴└ ┴ ┴ ┴ ┴┴ ┴ ┴ ┴─┘┴┴ ┴┴ ┴ ┴└─┘ ┘└┘└─┘└┴┘ + // ┌─┐ ╔═╗╔═╗╔═╗╔═╗ ╔╗╔╔═╗╔╦╗╦ ╦╦═╗╔═╗╦ ╔╗╔╦ ╦╔╦╗╔╗ ╔═╗╦═╗ + // ├─┤ ╚═╗╠═╣╠╣ ║╣ ║║║╠═╣ ║ ║ ║╠╦╝╠═╣║ ║║║║ ║║║║╠╩╗║╣ ╠╦╝ + // ┴ ┴ ╚═╝╩ ╩╚ ╚═╝┘ ╝╚╝╩ ╩ ╩ ╚═╝╩╚═╩ ╩╩═╝ ╝╚╝╚═╝╩ ╩╚═╝╚═╝╩╚═ + // At this point, the `limit` should be a safe, natural number. + // But if that's not the case, we say that this criteria is highly irregular. + // + // > Remember, if the limit happens to have been provided as `Infinity`, we + // > already handled that special case above, and changed it to be + // > `Number.MAX_SAFE_INTEGER` instead (which is a safe, natural number). + if (!isSafeNaturalNumber(criteria.limit)) { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'The `limit` clause in the provided criteria is invalid. '+ + 'If provided, it should be a safe, natural number. '+ + 'But instead, got: '+ + util.inspect(criteria.limit, {depth:5})+'' + )); + }//-• + + + // ███████╗██╗ ██╗██╗██████╗ + // ██╔════╝██║ ██╔╝██║██╔══██╗ + // ███████╗█████╔╝ ██║██████╔╝ + // ╚════██║██╔═██╗ ██║██╔═══╝ + // ███████║██║ ██╗██║██║ + // ╚══════╝╚═╝ ╚═╝╚═╝╚═╝ + // + // Validate/normalize `skip` clause. + + + // ╔╦╗╔═╗╔═╗╔═╗╦ ╦╦ ╔╦╗ + // ║║║╣ ╠╣ ╠═╣║ ║║ ║ + // ═╩╝╚═╝╚ ╩ ╩╚═╝╩═╝╩ + // If no `skip` clause was provided, give it a default value. + if (_.isUndefined(criteria.skip)) { + criteria.skip = 0; + }//>- + + + // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐┬─┐┌─┐┌┬┐ ╔═╗╔╦╗╦═╗╦╔╗╔╔═╗ + // ╠═╝╠═╣╠╦╝╚═╗║╣ ├┤ ├┬┘│ ││││ ╚═╗ ║ ╠╦╝║║║║║ ╦ + // ╩ ╩ ╩╩╚═╚═╝╚═╝ └ ┴└─└─┘┴ ┴ ╚═╝ ╩ ╩╚═╩╝╚╝╚═╝ + // If the provided `skip` is a string, attempt to parse it into a number. + if (_.isString(criteria.skip)) { + criteria.skip = +criteria.skip; + }//>-• + + + // ┬ ┬┌─┐┬─┐┬┌─┐┬ ┬ ┌┬┐┬ ┬┌─┐┌┬┐ ___ ┬┌─┐ ┌┐┌┌─┐┬ ┬ + // └┐┌┘├┤ ├┬┘│├┤ └┬┘ │ ├─┤├─┤ │ | | │└─┐ ││││ ││││ + // └┘ └─┘┴└─┴└ ┴ ┴ ┴ ┴┴ ┴ ┴ | | ┴└─┘ ┘└┘└─┘└┴┘ + // ┌─┐ ╔═╗╔═╗╔═╗╔═╗ ╔╗╔╔═╗╔╦╗╦ ╦╦═╗╔═╗╦ ╔╗╔╦ ╦╔╦╗╔╗ ╔═╗╦═╗ + // ├─┤ ╚═╗╠═╣╠╣ ║╣ ║║║╠═╣ ║ ║ ║╠╦╝╠═╣║ ║║║║ ║║║║╠╩╗║╣ ╠╦╝ (OR zero) + // ┴ ┴ ╚═╝╩ ╩╚ ╚═╝┘ ╝╚╝╩ ╩ ╩ ╚═╝╩╚═╩ ╩╩═╝ ╝╚╝╚═╝╩ ╩╚═╝╚═╝╩╚═ + // At this point, the `skip` should be either zero or a safe, natural number. + // But if that's not the case, we say that this criteria is highly irregular. + if (criteria.skip === 0) { /* skip: 0 is valid */ } + else if (isSafeNaturalNumber(criteria.skip)) { /* any safe, natural number is a valid `skip` */ } + else { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'The `skip` clause in the provided criteria is invalid. If provided, it should be either zero (0), or a safe, natural number (e.g. 4). But instead, got: '+ + util.inspect(criteria.skip, {depth:5})+'' + )); + }//-• + + + + // ███████╗ ██████╗ ██████╗ ████████╗ + // ██╔════╝██╔═══██╗██╔══██╗╚══██╔══╝ + // ███████╗██║ ██║██████╔╝ ██║ + // ╚════██║██║ ██║██╔══██╗ ██║ + // ███████║╚██████╔╝██║ ██║ ██║ + // ╚══════╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ + // + // Validate/normalize `sort` clause. + try { + criteria.sort = normalizeSortClause(criteria.sort, modelIdentity, orm, meta); + } catch (e) { + switch (e.code) { + + case 'E_SORT_CLAUSE_UNUSABLE': + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'Could not use the provided `sort` clause: ' + e.message + )); + + // If no error code (or an unrecognized error code) was specified, + // then we assume that this was a spectacular failure do to some + // kind of unexpected, internal error on our part. + default: + throw new Error('Consistency violation: Encountered unexpected internal error when attempting to normalize/validate a provided `sort` clause:\n```\n'+util.inspect(criteria.sort, {depth:5})+'```\nHere is the error:\n```'+e.stack+'\n```'); + } + }//>-• + + + // ███████╗███████╗██╗ ███████╗ ██████╗████████╗ + // ██╔════╝██╔════╝██║ ██╔════╝██╔════╝╚══██╔══╝ + // ███████╗█████╗ ██║ █████╗ ██║ ██║ + // ╚════██║██╔══╝ ██║ ██╔══╝ ██║ ██║ + // ███████║███████╗███████╗███████╗╚██████╗ ██║ + // ╚══════╝╚══════╝╚══════╝╚══════╝ ╚═════╝ ╚═╝ + // Validate/normalize `select` clause. + + + // ╔╦╗╔═╗╔═╗╔═╗╦ ╦╦ ╔╦╗ + // ║║║╣ ╠╣ ╠═╣║ ║║ ║ + // ═╩╝╚═╝╚ ╩ ╩╚═╝╩═╝╩ + // If no `select` clause was provided, give it a default value. + if (_.isUndefined(criteria.select)) { + criteria.select = ['*']; + }//>- + + + + // If specified as a string, wrap it up in an array. + if (_.isString(criteria.select)) { + criteria.select = [ + criteria.select + ]; + }//>- + + + // At this point, we should have an array. + // If not, then we'll bail with an error. + if (!_.isArray(criteria.select)) { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'The `select` clause in the provided criteria is invalid. If provided, it should be an array of strings. But instead, got: '+ + util.inspect(criteria.select, {depth:5})+'' + )); + }//-• + + + // Special handling of `['*']`. + // + // > In order for special meaning to take effect, array must have exactly one item (`*`). + // > (Also note that `*` is not a valid attribute name, so there's no chance of overlap there.) + if (_.isEqual(criteria.select, ['*'])) { + + // ['*'] is always valid-- it is the default value for the `select` clause. + // So we don't have to do anything here. + + } + // Otherwise, we must investigate further. + else { + + // Ensure the primary key is included in the `select`. + // (If it is not, then add it automatically.) + // + // > Note that compatiblity with the `populates` query key is handled back in forgeStageTwoQuery(). + if (!_.contains(criteria.select, WLModel.primaryKey)) { + criteria.select.push(WLModel.primaryKey); + }//>- + + + // If model is `schema: false`, then prevent using a custom `select` clause. + // (This is because doing so is not yet reliable.) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Fix this & then thoroughly test with normal finds and populated finds, + // with the select clause in the main criteria and the subcriteria, using both native + // joins and polypopulates. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + if (WLModel.hasSchema === false) { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'The provided criteria contains a custom `select` clause, but since this model (`'+modelIdentity+'`) '+ + 'is `schema: false`, this cannot be relied upon... yet. In the mean time, if you\'d like to use a '+ + 'custom `select`, configure this model to `schema: true`. Or, better yet, since this is usually an app-wide setting,'+ + 'configure all of your models to have `schema: true` -- e.g. in `config/models.js`. (Note that this WILL be supported in a '+ + 'future, minor version release of Sails/Waterline. Want to lend a hand? http://sailsjs.com/contribute)' + )); + }//-• + + // Loop through array and check each attribute name. + _.each(criteria.select, function (attrNameToKeep){ + + // Try to look up the attribute def. + var attrDef; + try { + attrDef = getAttribute(attrNameToKeep, modelIdentity, orm); + } catch (e){ + switch (e.code) { + case 'E_ATTR_NOT_REGISTERED': + // If no matching attribute is found, `attrDef` just stays undefined + // and we keep going. + break; + default: throw e; + } + }// + + // If model is `schema: true`... + if (WLModel.hasSchema === true) { + + // Make sure this matched a recognized attribute name. + if (!attrDef) { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'The `select` clause in the provided criteria contains an item (`'+attrNameToKeep+'`) which is '+ + 'not a recognized attribute in this model (`'+modelIdentity+'`).' + )); + }//-• + + } + // Else if model is `schema: false`... + else if (WLModel.hasSchema === false) { + + // Make sure this is at least a valid name for a Waterline attribute. + if (!isValidAttributeName(attrNameToKeep)) { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'The `select` clause in the provided criteria contains an item (`'+attrNameToKeep+'`) which is not '+ + 'a valid name for an attribute in Sails/Waterline.' + )); + }//-• + + } else { throw new Error('Consistency violation: Every instantiated Waterline model should always have a `hasSchema` property as either `true` or `false` (should have been derived from the `schema` model setting when Waterline was being initialized). But somehow, this model (`'+modelIdentity+'`) ended up with `hasSchema: '+util.inspect(WLModel.hasSchema, {depth:5})+'`'); } + + + // Ensure that we're not trying to `select` a plural association. + // > That's never allowed, because you can only populate a plural association-- it's a virtual attribute. + // > Note that we also do a related check when we normalize the `populates` query key back in forgeStageTwoQuery(). + if (attrDef && attrDef.collection) { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'The `select` clause in the provided criteria contains an item (`'+attrNameToKeep+'`) which is actually '+ + 'the name of a plural ("collection") association for this model (`'+modelIdentity+'`). But you cannot '+ + 'explicitly select plural association because they\'re virtual attributes (use `.populate()` instead.)' + )); + }//-• + + });// + + + // ┌─┐┬ ┬┌─┐┌─┐┬┌─ ┌─┐┌─┐┬─┐ ╔╦╗╦ ╦╔═╗╦ ╦╔═╗╔═╗╔╦╗╔═╗╔═╗ + // │ ├─┤├┤ │ ├┴┐ ├┤ │ │├┬┘ ║║║ ║╠═╝║ ║║ ╠═╣ ║ ║╣ ╚═╗ + // └─┘┴ ┴└─┘└─┘┴ ┴ └ └─┘┴└─ ═╩╝╚═╝╩ ╩═╝╩╚═╝╩ ╩ ╩ ╚═╝╚═╝ + // Ensure that no two items refer to the same attribute. + criteria.select = _.uniq(criteria.select); + + }//>-• + + + + + + + // ██████╗ ███╗ ███╗██╗████████╗ + // ██╔═══██╗████╗ ████║██║╚══██╔══╝ + // ██║ ██║██╔████╔██║██║ ██║ + // ██║ ██║██║╚██╔╝██║██║ ██║ + // ╚██████╔╝██║ ╚═╝ ██║██║ ██║ + // ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝ + + // ╔╦╗╔═╗╔═╗╔═╗╦ ╦╦ ╔╦╗ + // ║║║╣ ╠╣ ╠═╣║ ║║ ║ + // ═╩╝╚═╝╚ ╩ ╩╚═╝╩═╝╩ + // If no `omit` clause was provided, give it a default value. + if (_.isUndefined(criteria.omit)) { + criteria.omit = []; + }//>- + + + // Verify that this is an array. + if (!_.isArray(criteria.omit)) { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'The `omit` clause in the provided criteria is invalid. If provided, it should be an array of strings. But instead, got: '+ + util.inspect(criteria.omit, {depth:5})+'' + )); + }//-• + + // Loop through array and check each attribute name. + _.remove(criteria.omit, function (attrNameToOmit){ + + // Verify this is a string. + if (!_.isString(attrNameToOmit)) { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'The `omit` clause in the provided criteria is invalid. If provided, it should be an array of strings (attribute names to omit. But one of the items is not a string: '+ + util.inspect(attrNameToOmit, {depth:5})+'' + )); + }//-• + + // If _explicitly_ trying to omit the primary key, + // then we say this is highly irregular. + // + // > Note that compatiblity with the `populates` query key is handled back in forgeStageTwoQuery(). + if (attrNameToOmit === WLModel.primaryKey) { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'The `omit` clause in the provided criteria explicitly attempts to omit the primary key (`'+WLModel.primaryKey+'`). But in the current version of Waterline, this is not possible.' + )); + }//-• + + // Try to look up the attribute def. + var attrDef; + try { + attrDef = getAttribute(attrNameToOmit, modelIdentity, orm); + } catch (e){ + switch (e.code) { + case 'E_ATTR_NOT_REGISTERED': + // If no matching attribute is found, `attrDef` just stays undefined + // and we keep going. + break; + default: throw e; + } + }// + + // If model is `schema: true`... + if (WLModel.hasSchema === true) { + + // Make sure this matched a recognized attribute name. + if (!attrDef) { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'The `omit` clause in the provided criteria contains an item (`'+attrNameToOmit+'`) which is not a recognized attribute in this model (`'+modelIdentity+'`).' + )); + }//-• + + } + // Else if model is `schema: false`... + else if (WLModel.hasSchema === false) { + + // In this case, we just give up and throw an E_HIGHLY_IRREGULAR error here + // explaining what's up. + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'Cannot use `omit`, because the referenced model (`'+modelIdentity+'`) does not declare itself `schema: true`.' + )); + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: double-check that there's not a reasonable way to do this in a way that + // supports both SQL and noSQL adapters. + // + // Best case, we get it to work for Mongo et al somehow, in which case we'd then + // also want to verify that each item is at least a valid Waterline attribute name here. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + } else { throw new Error('Consistency violation: Every instantiated Waterline model should always have a `hasSchema` property as either `true` or `false` (should have been derived from the `schema` model setting when Waterline was being initialized). But somehow, this model (`'+modelIdentity+'`) ended up with `hasSchema: '+util.inspect(WLModel.hasSchema, {depth:5})+'`'); } + // >-• + + // Ensure that we're not trying to `omit` a plural association. + // If so, just strip it out. + // + // > Note that we also do a related check when we normalize the `populates` query key back in forgeStageTwoQuery(). + if (attrDef && attrDef.collection) { + return true; + }//-• + + // Otherwise, we'll keep this item in the `omit` clause. + return false; + + });// + + // ┌─┐┬ ┬┌─┐┌─┐┬┌─ ┌─┐┌─┐┬─┐ ╔╦╗╦ ╦╔═╗╦ ╦╔═╗╔═╗╔╦╗╔═╗╔═╗ + // │ ├─┤├┤ │ ├┴┐ ├┤ │ │├┬┘ ║║║ ║╠═╝║ ║║ ╠═╣ ║ ║╣ ╚═╗ + // └─┘┴ ┴└─┘└─┘┴ ┴ └ └─┘┴└─ ═╩╝╚═╝╩ ╩═╝╩╚═╝╩ ╩ ╩ ╚═╝╚═╝ + // Ensure that no two items refer to the same attribute. + criteria.omit = _.uniq(criteria.omit); + + + // --• At this point, we know that both `select` AND `omit` are fully valid. So... + + // ┌─┐┌┐┌┌─┐┬ ┬┬─┐┌─┐ ╔═╗╔╦╗╦╔╦╗ ┬ ╔═╗╔═╗╦ ╔═╗╔═╗╔╦╗ ┌┬┐┌─┐ ┌┐┌┌─┐┌┬┐ ┌─┐┬ ┌─┐┌─┐┬ ┬ + // ├┤ │││└─┐│ │├┬┘├┤ ║ ║║║║║ ║ ┌┼─ ╚═╗║╣ ║ ║╣ ║ ║ │││ │ ││││ │ │ │ │ ├─┤└─┐├─┤ + // └─┘┘└┘└─┘└─┘┴└─└─┘ ╚═╝╩ ╩╩ ╩ └┘ ╚═╝╚═╝╩═╝╚═╝╚═╝ ╩ ─┴┘└─┘ ┘└┘└─┘ ┴ └─┘┴─┘┴ ┴└─┘┴ ┴ + // Make sure that `omit` and `select` are not BOTH specified as anything + // other than their default values. If so, then fail w/ an E_HIGHLY_IRREGULAR error. + var isNoopSelect = _.isEqual(criteria.select, ['*']); + var isNoopOmit = _.isEqual(criteria.omit, []); + if (!isNoopSelect && !isNoopOmit) { + throw flaverr('E_HIGHLY_IRREGULAR', new Error('Cannot specify both `omit` AND `select`. Please use one or the other.')); + }//-• + + + + + + + // IWMIH and the criteria is somehow no longer a dictionary, then freak out. + // (This is just to help us prevent present & future bugs in this utility itself.) + var isCriteriaNowValidDictionary = _.isObject(criteria) && !_.isArray(criteria) && !_.isFunction(criteria); + if (!isCriteriaNowValidDictionary) { + throw new Error('Consistency violation: At this point, the criteria should have already been normalized into a dictionary! But instead somehow it looks like this: '+util.inspect(criteria, {depth:5})+''); + } + + + + // Return the normalized criteria dictionary. + return criteria; + +}; diff --git a/lib/waterline/utils/query/private/normalize-new-record.js b/lib/waterline/utils/query/private/normalize-new-record.js new file mode 100644 index 000000000..1ff5aca58 --- /dev/null +++ b/lib/waterline/utils/query/private/normalize-new-record.js @@ -0,0 +1,412 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var rttc = require('rttc'); +var getModel = require('../../ontology/get-model'); +var getAttribute = require('../../ontology/get-attribute'); +var normalizeValueToSet = require('./normalize-value-to-set'); + + +/** + * normalizeNewRecord() + * + * Validate and normalize the provided dictionary (`newRecord`), hammering it destructively + * into the standardized format suitable to be part of a "stage 2 query" (see ARCHITECTURE.md). + * This allows us to present it in a normalized fashion to lifecycle callbacks, as well to + * other internal utilities within Waterline. + * + * This function has a return value. But realize that this is only to allow for an + * edge case: For convenience, the provided value is allowed to be `undefined`, in which + * case it is automatically converted into a new, empty dictionary (plain JavaScript object). + * But most of the time, the provided value will be irreversibly mutated in-place, AS WELL AS returned. + * + * -- + * + * THIS UTILITY IS NOT CURRENTLY RESPONSIBLE FOR APPLYING HIGH-LEVEL ("anchor") VALIDATION RULES! + * (but note that this could change at some point in the future) + * + * -- + * + * @param {Ref?} newRecord + * The original new record (i.e. from a "stage 1 query"). + * (If provided as `undefined`, it will be understood as `{}`) + * > WARNING: + * > IN SOME CASES (BUT NOT ALL!), THE PROVIDED DICTIONARY WILL + * > UNDERGO DESTRUCTIVE, IN-PLACE CHANGES JUST BY PASSING IT + * > IN TO THIS UTILITY. + * + * @param {String} modelIdentity + * The identity of the model this record is for (e.g. "pet" or "user") + * > Useful for looking up the Waterline model and accessing its attribute definitions. + * + * @param {Ref} orm + * The Waterline ORM instance. + * > Useful for accessing the model definitions. + * + * @param {Number} currentTimestamp + * The current JS timestamp (epoch ms). + * > This is passed in so that it can be exactly the same in the situation where + * > this utility might be running multiple times for a given query. + * + * @param {Dictionary?} meta + * The contents of the `meta` query key, if one was provided. + * > Useful for propagating query options to low-level utilities like this one. + * + * -- + * + * @returns {Dictionary} + * The successfully-normalized new record, ready for use in a stage 2 query. + * + * -- + * + * @throws {Error} If it encounters incompatible usage in the provided `newRecord`, + * | including e.g. the case where an invalid value is specified for + * | an association. + * | @property {String} code + * | - E_HIGHLY_IRREGULAR + * + * + * @throws {Error} If the provided `newRecord` is missing a value for a required attribute, + * | or if it specifies `null` or empty string ("") for it. + * | @property {String} code + * | - E_REQUIRED + * | @property {String} attrName + * + * + * @throws {Error} If it encounters a value with an incompatible data type in the provided + * | `newRecord`. This is only versus the attribute's declared "type" -- + * | failed validation versus associations results in a different error code + * | (see above). + * | @property {String} code + * | - E_TYPE + * | @property {String} attrName + * | @property {String} expectedType + * | - string + * | - number + * | - boolean + * | - json + * | + * | This is only versus the attribute's declared "type", or other similar type safety issues -- + * | certain failed checks for associations result in a different error code (see above). + * | + * | Remember: + * | This is the case where a _completely incorrect type of data_ was passed in. + * | This is NOT a high-level "anchor" validation failure! (see below for that) + * | > Unlike anchor validation errors, this exception should never be negotiated/parsed/used + * | > for delivering error messages to end users of an application-- it is carved out + * | > separately purely to make things easier to follow for the developer. + * + * + * @throws {Error} If it encounters any values within the provided `newRecord` that violate + * | high-level (anchor) validation rules. + * | @property {String} code + * | - E_VIOLATES_RULES + * | @property {String} attrName + * | @property {Array} ruleViolations + * | [ + * | { + * | rule: 'minLength', //(isEmail/isNotEmptyString/max/isNumber/etc) + * | message: 'Too few characters (max 30)' + * | }, + * | ... + * | ] + * + * + * @throws {Error} If anything else unexpected occurs. + */ +module.exports = function normalizeNewRecord(newRecord, modelIdentity, orm, currentTimestamp, meta) { + + // Tolerate this being left undefined by inferring a reasonable default. + // Note that we can't bail early, because we need to check for more stuff + // (there might be required attrs!) + if (_.isUndefined(newRecord)){ + newRecord = {}; + }//>- + + // Verify that this is now a dictionary. + if (!_.isObject(newRecord) || _.isFunction(newRecord) || _.isArray(newRecord)) { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'Expecting new record to be provided as a dictionary (plain JavaScript object) but instead, got: '+util.inspect(newRecord,{depth:5}) + )); + }//-• + + + // Look up the Waterline model for this query. + // > This is so that we can reference the original model definition. + var WLModel; + try { + WLModel = getModel(modelIdentity, orm); + } catch (e) { + switch (e.code) { + case 'E_MODEL_NOT_REGISTERED': throw new Error('Consistency violation: '+e.message); + default: throw e; + } + }// + + + + // ███╗ ██╗ ██████╗ ██████╗ ███╗ ███╗ █████╗ ██╗ ██╗███████╗███████╗ + // ████╗ ██║██╔═══██╗██╔══██╗████╗ ████║██╔══██╗██║ ██║╚══███╔╝██╔════╝ + // ██╔██╗ ██║██║ ██║██████╔╝██╔████╔██║███████║██║ ██║ ███╔╝ █████╗ + // ██║╚██╗██║██║ ██║██╔══██╗██║╚██╔╝██║██╔══██║██║ ██║ ███╔╝ ██╔══╝ + // ██║ ╚████║╚██████╔╝██║ ██║██║ ╚═╝ ██║██║ ██║███████╗██║███████╗███████╗ + // ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝╚═╝╚══════╝╚══════╝ + // + // ██████╗ ██████╗ ██████╗ ██╗ ██╗██╗██████╗ ███████╗██████╗ + // ██╔══██╗██╔══██╗██╔═══██╗██║ ██║██║██╔══██╗██╔════╝██╔══██╗ + // ██████╔╝██████╔╝██║ ██║██║ ██║██║██║ ██║█████╗ ██║ ██║ + // ██╔═══╝ ██╔══██╗██║ ██║╚██╗ ██╔╝██║██║ ██║██╔══╝ ██║ ██║ + // ██║ ██║ ██║╚██████╔╝ ╚████╔╝ ██║██████╔╝███████╗██████╔╝ + // ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═══╝ ╚═╝╚═════╝ ╚══════╝╚═════╝ + // + // ██╗ ██╗ █████╗ ██╗ ██╗ ██╗███████╗███████╗ + // ██║ ██║██╔══██╗██║ ██║ ██║██╔════╝██╔════╝ + // ██║ ██║███████║██║ ██║ ██║█████╗ ███████╗ + // ╚██╗ ██╔╝██╔══██║██║ ██║ ██║██╔══╝ ╚════██║ + // ╚████╔╝ ██║ ██║███████╗╚██████╔╝███████╗███████║ + // ╚═══╝ ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚══════╝╚══════╝ + // + + // Now loop over and check every key specified in this new record. + _.each(_.keys(newRecord), function (supposedAttrName){ + + // Validate & normalize this value. + // > Note that we explicitly ALLOW values to be provided for plural associations by passing in `true`. + try { + newRecord[supposedAttrName] = normalizeValueToSet(newRecord[supposedAttrName], supposedAttrName, modelIdentity, orm, meta); + } catch (e) { + switch (e.code) { + + // If its RHS should be ignored (e.g. because it is `undefined`), then delete this key and bail early. + case 'E_SHOULD_BE_IGNORED': + delete newRecord[supposedAttrName]; + return; + + case 'E_HIGHLY_IRREGULAR': + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'Could not use specified `'+supposedAttrName+'`. '+e.message + )); + + case 'E_TYPE': + throw flaverr({ + code: 'E_TYPE', + attrName: supposedAttrName, + expectedType: e.expectedType + }, new Error( + 'New record contains the wrong type of data for property `'+supposedAttrName+'`. '+e.message + )); + + case 'E_REQUIRED': + throw flaverr({ + code: 'E_REQUIRED', + attrName: supposedAttrName + }, new Error( + 'Could not use specified `'+supposedAttrName+'`. '+e.message + )); + + case 'E_VIOLATES_RULES': + if (!_.isArray(e.ruleViolations) || e.ruleViolations.length === 0) { + throw new Error('Consistency violation: This Error instance should ALWAYS have a non-empty array as its `ruleViolations` property. But instead, its `ruleViolations` property is: '+util.inspect(e.ruleViolations, {depth: 5})+'\nAlso, for completeness/context, here is the error\'s complete stack: '+e.stack); + } + + throw flaverr({ + code: 'E_VIOLATES_RULES', + attrName: supposedAttrName, + ruleViolations: e.ruleViolations + }, new Error( + 'Could not use specified `'+supposedAttrName+'`. '+e.message + )); + + default: + throw e; + } + }// + + });// + + + + // ┌─┐┬ ┬┌─┐┌─┐┬┌─ ┌─┐┌─┐┬─┐ ╔═╗╦═╗╦╔╦╗╔═╗╦═╗╦ ╦ ╦╔═╔═╗╦ ╦ + // │ ├─┤├┤ │ ├┴┐ ├┤ │ │├┬┘ ╠═╝╠╦╝║║║║╠═╣╠╦╝╚╦╝ ╠╩╗║╣ ╚╦╝ + // └─┘┴ ┴└─┘└─┘┴ ┴ └ └─┘┴└─ ╩ ╩╚═╩╩ ╩╩ ╩╩╚═ ╩ ╩ ╩╚═╝ ╩ + // + // There will always be at least one required attribute: the primary key... + // but, actually, we ALLOW it to be omitted since it might be (and usually is) + // decided by the underlying database. + // + // That said, it must NEVER be `null`. + if (_.isNull(newRecord[WLModel.primaryKey])) { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'Could not use specified value (`null`) as the primary key value (`'+WLModel.primaryKey+'`) for a new record. '+ + '(Try omitting it instead.)' + )); + }//-• + + // > Note that, if a non-null value WAS provided for the primary key, then it will have already + // > been validated/normalized (if relevant) by the type safety check above. So we don't need to + // > worry about addressing any of that here-- doing so would be duplicative. + + + + // ██╗ ██████╗ ██████╗ ██████╗ ██████╗ ██╗ ██╗███████╗██████╗ + // ██║ ██╔═══██╗██╔═══██╗██╔══██╗ ██╔═══██╗██║ ██║██╔════╝██╔══██╗ + // ██║ ██║ ██║██║ ██║██████╔╝ ██║ ██║██║ ██║█████╗ ██████╔╝ + // ██║ ██║ ██║██║ ██║██╔═══╝ ██║ ██║╚██╗ ██╔╝██╔══╝ ██╔══██╗ + // ███████╗╚██████╔╝╚██████╔╝██║ ╚██████╔╝ ╚████╔╝ ███████╗██║ ██║ + // ╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═════╝ ╚═══╝ ╚══════╝╚═╝ ╚═╝ + // + // █████╗ ██╗ ██╗ █████╗ ████████╗████████╗██████╗ + // ██╔══██╗██║ ██║ ██╔══██╗╚══██╔══╝╚══██╔══╝██╔══██╗ + // ███████║██║ ██║ ███████║ ██║ ██║ ██████╔╝ + // ██╔══██║██║ ██║ ██╔══██║ ██║ ██║ ██╔══██╗ + // ██║ ██║███████╗███████╗ ██║ ██║ ██║ ██║ ██║ ██║ + // ╚═╝ ╚═╝╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ + // + // ██████╗ ███████╗███████╗██╗███╗ ██╗██╗████████╗██╗ ██████╗ ███╗ ██╗███████╗ + // ██╔══██╗██╔════╝██╔════╝██║████╗ ██║██║╚══██╔══╝██║██╔═══██╗████╗ ██║██╔════╝ + // ██║ ██║█████╗ █████╗ ██║██╔██╗ ██║██║ ██║ ██║██║ ██║██╔██╗ ██║███████╗ + // ██║ ██║██╔══╝ ██╔══╝ ██║██║╚██╗██║██║ ██║ ██║██║ ██║██║╚██╗██║╚════██║ + // ██████╔╝███████╗██║ ██║██║ ╚████║██║ ██║ ██║╚██████╔╝██║ ╚████║███████║ + // ╚═════╝ ╚══════╝╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝╚══════╝ + // + // ██╗ ██████╗ ███████╗ █████╗ ██╗ ██╗ ██╗██╗████████╗██╗ ██╗ + // ██║ ██╔══██╗██╔════╝██╔══██╗██║ ██║ ██║██║╚══██╔══╝██║ ██║ + // ████████╗ ██║ ██║█████╗ ███████║██║ ██║ █╗ ██║██║ ██║ ███████║ + // ██╔═██╔═╝ ██║ ██║██╔══╝ ██╔══██║██║ ██║███╗██║██║ ██║ ██╔══██║ + // ██████║ ██████╔╝███████╗██║ ██║███████╗ ╚███╔███╔╝██║ ██║ ██║ ██║ + // ╚═════╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝╚══════╝ ╚══╝╚══╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ + // + // ██████╗ ███╗ ███╗██╗███████╗███████╗██╗ ██████╗ ███╗ ██╗███████╗ + // ██╔═══██╗████╗ ████║██║██╔════╝██╔════╝██║██╔═══██╗████╗ ██║██╔════╝ + // ██║ ██║██╔████╔██║██║███████╗███████╗██║██║ ██║██╔██╗ ██║███████╗ + // ██║ ██║██║╚██╔╝██║██║╚════██║╚════██║██║██║ ██║██║╚██╗██║╚════██║ + // ╚██████╔╝██║ ╚═╝ ██║██║███████║███████║██║╚██████╔╝██║ ╚████║███████║ + // ╚═════╝ ╚═╝ ╚═╝╚═╝╚══════╝╚══════╝╚═╝ ╚═════╝ ╚═╝ ╚═══╝╚══════╝ + // + + // ┌─┐┬ ┬┌─┐┌─┐┬┌─ ┌─┐┌─┐┬─┐ ╔═╗╔╦╗╦ ╦╔═╗╦═╗ ┬─┐┌─┐┌─┐ ┬ ┬┬┬─┐┌─┐┌┬┐ ┌─┐┌┬┐┌┬┐┬─┐┌─┐ + // │ ├─┤├┤ │ ├┴┐ ├┤ │ │├┬┘ ║ ║ ║ ╠═╣║╣ ╠╦╝ ├┬┘├┤ │─┼┐│ ││├┬┘├┤ ││ ├─┤ │ │ ├┬┘└─┐ + // └─┘┴ ┴└─┘└─┘┴ ┴ └ └─┘┴└─ ╚═╝ ╩ ╩ ╩╚═╝╩╚═ ┴└─└─┘└─┘└└─┘┴┴└─└─┘─┴┘ ┴ ┴ ┴ ┴ ┴└─└─┘ + // ┬ ┌─┐┌─┐┌─┐┬ ┬ ┬ ╔╦╗╔═╗╔═╗╔═╗╦ ╦╦ ╔╦╗╔═╗ + // ┌┼─ ├─┤├─┘├─┘│ └┬┘ ║║║╣ ╠╣ ╠═╣║ ║║ ║ ╚═╗ + // └┘ ┴ ┴┴ ┴ ┴─┘┴ ═╩╝╚═╝╚ ╩ ╩╚═╝╩═╝╩ ╚═╝ + // Check that any OTHER required attributes are represented as keys, and neither `undefined` nor `null`. + _.each(WLModel.attributes, function (attrDef, attrName) { + + // Quick sanity check. + var isAssociation = attrDef.model || attrDef.collection; + if (isAssociation && !_.isUndefined(attrDef.defaultsTo)) { + throw new Error('Consistency violation: `defaultsTo` should never be defined for an association. But `'+attrName+'` declares just such an inappropriate `defaultsTo`: '+util.inspect(attrDef.defaultsTo, {depth:5})+''); + } + + // If the provided value is `undefined`, then it's considered an omission. + // Otherwise, this is NOT an omission, so there's no way we'll need to mess + // w/ any kind of requiredness check, or to apply a default value or a timestamp. + // (i.e. in that case, we'll simply bail & skip ahead to the next attribute.) + if (!_.isUndefined(newRecord[attrName])) { + return; + }//-• + + + // IWMIH, we know the value is undefined, and thus we're dealing with an omission. + + + // If this is for a required attribute... + if (attrDef.required) { + + throw flaverr({ + code: 'E_REQUIRED', + attrName: attrName + }, new Error( + 'Missing value for required attribute `'+attrName+'`. '+ + 'Expected ' + (function _getExpectedNounPhrase (){ + if (!attrDef.model && !attrDef.collection) { + return rttc.getNounPhrase(attrDef.type); + }//-• + var otherModelIdentity = attrDef.model ? attrDef.model : attrDef.collection; + var OtherModel = getModel(otherModelIdentity, orm); + var otherModelPkType = getAttribute(OtherModel.primaryKey, otherModelIdentity, orm).type; + return rttc.getNounPhrase(otherModelPkType)+' (the '+OtherModel.primaryKey+' of a '+otherModelIdentity+')'; + })()+', '+ + 'but instead, got: '+util.inspect(newRecord[attrName], {depth: 5})+'' + )); + + }//-• + + + // IWMIH, this is for an optional attribute. + + + // If this is the primary key attribute, then set it to `null`. + // (https://docs.google.com/spreadsheets/d/1whV739iW6O9SxRZLCIe2lpvuAUqm-ie7j7tn_Pjir3s/edit#gid=1814738146) + // (This gets dealt with in the adapter later!) + if (attrName === WLModel.primaryKey) { + newRecord[attrName] = null; + } + // Default singular associations to `null`. + else if (attrDef.model) { + newRecord[attrName] = null; + } + // Default plural associations to `[]`. + else if (attrDef.collection) { + newRecord[attrName] = []; + } + // Or apply the default if there is one. + else if (attrDef.defaultsTo !== undefined) { + + // Deep clone the defaultsTo value. + // + // > FUTURE: eliminate the need for this deep clone by ensuring that we never mutate + // > this value anywhere else in Waterline and in core adapters. + // > (In the mean time, this behavior should not be relied on in any new code.) + newRecord[attrName] = _.cloneDeep(attrDef.defaultsTo); + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: maybe support encryption of the default value here. + // (See the related note in `waterline.js` for more information.) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + } + // Or use the timestamp, if this is `autoCreatedAt` or `autoUpdatedAt` + // (the latter because we set autoUpdatedAt to the same thing as `autoCreatedAt` + // when initially creating a record) + // + // > Note that this other timestamp is passed in so that all new records share + // > the exact same timestamp (in a `.createEach()` scenario, for example) + else if (attrDef.autoCreatedAt || attrDef.autoUpdatedAt) { + + assert(attrDef.type === 'number' || attrDef.type === 'string' || attrDef.type === 'ref', 'If an attribute has `autoCreatedAt: true` or `autoUpdatedAt: true`, then it should always have either `type: \'string\'`, `type: \'number\'` or `type: \'ref\'`. But the definition for attribute (`'+attrName+'`) has somehow gotten into an impossible state: It has `autoCreatedAt: '+attrDef.autoCreatedAt+'`, `autoUpdatedAt: '+attrDef.autoUpdatedAt+'`, and `type: \''+attrDef.type+'\'`'); + + // Set the value equal to the current timestamp, using the appropriate format. + if (attrDef.type === 'string') { + newRecord[attrName] = (new Date(currentTimestamp)).toJSON(); + } + else if (attrDef.type === 'ref') { + newRecord[attrName] = new Date(currentTimestamp); + } + else { + newRecord[attrName] = currentTimestamp; + } + + } + // Or use `null`, if this attribute specially expects/allows that as its base value. + else if (attrDef.allowNull) { + newRecord[attrName] = null; + } + // Or otherwise, just set it to the appropriate base value. + else { + newRecord[attrName] = rttc.coerce(attrDef.type); + }//>- + + });// + + + // Return the normalized dictionary. + return newRecord; + +}; diff --git a/lib/waterline/utils/query/private/normalize-pk-value-or-values.js b/lib/waterline/utils/query/private/normalize-pk-value-or-values.js new file mode 100644 index 000000000..8eecf0475 --- /dev/null +++ b/lib/waterline/utils/query/private/normalize-pk-value-or-values.js @@ -0,0 +1,98 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var normalizePkValue = require('./normalize-pk-value'); + + +/** + * normalizePkValueOrValues() + * + * Validate and normalize an array of pk values, OR a single pk value, into a consistent format. + * + * > Internally, this uses the `normalizePkValue()` utility to check/normalize each + * > primary key value before returning them. If an array is provided, and if it contains + * > _more than one pk value that is exactly the same_, then the duplicates will be stripped + * > out. + * + * ------------------------------------------------------------------------------------------ + * @param {Array|String|Number} pkValueOrPkValues + * @param {String} expectedPkType [either "number" or "string"] + * ------------------------------------------------------------------------------------------ + * @returns {Array} + * A valid, homogeneous array of primary key values that are guaranteed + * to match the specified `expectedPkType`. + * > WE should NEVER rely on this array coming back in a particular order. + * > (Could change at any time.) + * ------------------------------------------------------------------------------------------ + * @throws {Error} if invalid + * @property {String} code (=== "E_INVALID_PK_VALUE") + */ + +module.exports = function normalizePkValueOrValues (pkValueOrPkValues, expectedPkType){ + + // Check usage + if (expectedPkType !== 'string' && expectedPkType !== 'number') { + throw new Error('Consistency violation: The internal normalizePkValueOrValues() utility must always be called with a valid second argument ("string" or "number"). But instead, got: '+util.inspect(expectedPkType, {depth:5})+''); + } + + + // Our normalized result. + var pkValues; + + // If a singular string or number was provided, convert it into an array. + if (_.isString(pkValueOrPkValues) || _.isNumber(pkValueOrPkValues)) { + pkValues = [ pkValueOrPkValues ]; + } + // Otherwise, we'll assume it must already be an array. + // (Don't worry, we'll validate it momentarily.) + else { + pkValues = pkValueOrPkValues; + } + //>- + + + // Now, handle the case where something completely invalid was provided. + if (!_.isArray(pkValues)) { + throw flaverr('E_INVALID_PK_VALUE', new Error('Expecting either an individual primary key value (a '+expectedPkType+') or a homogeneous array of primary key values ('+expectedPkType+'s). But instead got a '+(typeof pkValues)+': '+util.inspect(pkValues,{depth:5})+'')); + }//-• + + + // Now that we most definitely have an array, ensure that it doesn't contain anything + // strange, curious, or malevolent by looping through and calling `normalizePkValue()` + // on each item. + pkValues = _.map(pkValues, function (thisPkValue){ + + // Return this primary key value, which might have been coerced. + try { + return normalizePkValue(thisPkValue, expectedPkType); + } catch (e) { + switch (e.code) { + case 'E_INVALID_PK_VALUE': + throw flaverr('E_INVALID_PK_VALUE', new Error( + ( + _.isArray(pkValueOrPkValues) ? + 'One of the values in the provided array' : + 'The provided value' + )+' is not valid primary key value. '+e.message + )); + default: throw e; + } + } + + });// + + + // Ensure uniqueness. + // (Strip out any duplicate pk values.) + pkValues = _.uniq(pkValues); + + + // Return the normalized array of pk values. + return pkValues; + +}; + diff --git a/lib/waterline/utils/query/private/normalize-pk-value.js b/lib/waterline/utils/query/private/normalize-pk-value.js new file mode 100644 index 000000000..d7e304954 --- /dev/null +++ b/lib/waterline/utils/query/private/normalize-pk-value.js @@ -0,0 +1,163 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var isSafeNaturalNumber = require('./is-safe-natural-number'); + + +/** + * normalizePkValue() + * + * Validate and normalize the provided pk value. + * + * > This ensures the provided pk value is a string or a number. + * > • If a string, it also validates that it is not the empty string (""). + * > • If a number, it also validates that it is a base-10, non-zero, positive integer + * > that is not larger than the maximum safe integer representable by JavaScript. + * > Also, if we are expecting numbers, numeric strings are tolerated, so long as they + * > can be parsed as valid numeric pk values. + * + * ------------------------------------------------------------------------------------------ + * @param {String|Number} pkValue + * @param {String} expectedPkType [either "number" or "string"] + * ------------------------------------------------------------------------------------------ + * @returns {String|Number} + * A valid primary key value, guaranteed to match the specified `expectedPkType`. + * ------------------------------------------------------------------------------------------ + * @throws {Error} if invalid + * @property {String} code (=== "E_INVALID_PK_VALUE") + * ------------------------------------------------------------------------------------------ + * @throws {Error} If anything unexpected happens, e.g. bad usage, or a failed assertion. + * ------------------------------------------------------------------------------------------ + */ + +module.exports = function normalizePkValue (pkValue, expectedPkType){ + + // Check usage + if (expectedPkType !== 'string' && expectedPkType !== 'number') { + throw new Error('Consistency violation: The internal normalizePkValue() utility must always be called with a valid second argument ("string" or "number"). But instead, got: '+util.inspect(expectedPkType, {depth:5})+''); + } + + + // If explicitly expecting strings... + if (expectedPkType === 'string') { + if (!_.isString(pkValue)) { + // > Note that we DO NOT tolerate non-strings being passed in, even though it + // > would be possible to cast them into strings automatically. While this would + // > be useful for key/value adapters like Redis, or in SQL databases when using + // > a string primary key, it can lead to bugs when querying against a database + // > like MongoDB that uses special hex or uuid strings. + throw flaverr('E_INVALID_PK_VALUE', new Error('Instead of a string (the expected pk type), the provided value is: '+util.inspect(pkValue,{depth:5})+'')); + }//-• + + // Empty string ("") is never a valid primary key value. + if (pkValue === '') { + throw flaverr('E_INVALID_PK_VALUE', new Error('Cannot use empty string ('+util.inspect(pkValue,{depth:5})+') as a primary key value.')); + }//-• + + }//‡ + // Else if explicitly expecting numbers... + else if (expectedPkType === 'number') { + if (!_.isNumber(pkValue)) { + + // If this is not even a _string_ either, then reject it. + // (Note that we handle this case separately in order to support a more helpful error message.) + if (!_.isString(pkValue)) { + throw flaverr('E_INVALID_PK_VALUE', new Error( + 'Instead of a number (the expected pk type), got: '+util.inspect(pkValue,{depth:5})+'' + )); + }//-• + + + + // Tolerate strings that _look_ like base-10, non-zero, positive integers; + // and that wouldn't be too big to be a safe JavaScript number. + // (Cast them into numbers automatically.) + + var GOT_STRING_FOR_NUMERIC_PK_SUFFIX = + 'To resolve this error, pass in a valid base-10, non-zero, positive integer instead. '+ + '(Or if you must use strings, then change the relevant model\'s pk attribute from '+ + '`type: \'number\'` to `type: \'string\'`.)'; + + var canPrblyCoerceIntoValidNumber = _.isString(pkValue) && pkValue.match(/^[0-9]+$/); + if (!canPrblyCoerceIntoValidNumber) { + throw flaverr('E_INVALID_PK_VALUE', new Error( + 'Instead of a number, the provided value (`'+util.inspect(pkValue,{depth:5})+'`) is a string, '+ + 'and it cannot be coerced into a valid primary key value automatically (contains characters other '+ + 'than numerals 0-9). '+ + GOT_STRING_FOR_NUMERIC_PK_SUFFIX + )); + }//-• + + var coercedNumber = +pkValue; + if (coercedNumber > (Number.MAX_SAFE_INTEGER||9007199254740991)) { + throw flaverr('E_INVALID_PK_VALUE', new Error( + 'Instead of a valid number, the provided value (`'+util.inspect(pkValue,{depth:5})+'`) is '+ + 'a string that looks like a number. But it cannot be coerced automatically because, despite '+ + 'its "numbery" appearance, it\'s just too big! '+ + GOT_STRING_FOR_NUMERIC_PK_SUFFIX + )); + }//-• + + pkValue = coercedNumber; + + }//>-• + + + //-• + // IWMIH, then we know that `pkValue` is now a number. + // (But it might be something like `NaN` or `Infinity`!) + // + // `pkValue` should be provided as a safe, positive, non-zero, finite integer. + // + // > We do a few explicit checks below for better error messages, and then finally + // > do one last check as a catchall, at the very end. + + // NaN is never valid as a primary key value. + if (_.isNaN(pkValue)) { + throw flaverr('E_INVALID_PK_VALUE', new Error('Cannot use `NaN` as a primary key value.')); + }//-• + + // Zero is never a valid primary key value. + if (pkValue === 0) { + throw flaverr('E_INVALID_PK_VALUE', new Error('Cannot use zero ('+util.inspect(pkValue,{depth:5})+') as a primary key value.')); + }//-• + + // A negative number is never a valid primary key value. + if (pkValue < 0) { + throw flaverr('E_INVALID_PK_VALUE', new Error('Cannot use a negative number ('+util.inspect(pkValue,{depth:5})+') as a primary key value.')); + }//-• + + // A floating point number is never a valid primary key value. + if (Math.floor(pkValue) !== pkValue) { + throw flaverr('E_INVALID_PK_VALUE', new Error('Cannot use a floating point number ('+util.inspect(pkValue,{depth:5})+') as a primary key value.')); + }//-• + + // Neither Infinity nor -Infinity are ever valid as primary key values. + if (Infinity === pkValue || -Infinity === pkValue) { + throw flaverr('E_INVALID_PK_VALUE', new Error('Cannot use `Infinity` or `-Infinity` (`'+util.inspect(pkValue,{depth:5})+'`) as a primary key value.')); + }//-• + + // Numbers greater than the maximum safe JavaScript integer are never valid as a primary key value. + // > Note that we check for `Infinity` above FIRST, before we do this comparison. That's just so that + // > we can display a tastier error message. + if (pkValue > (Number.MAX_SAFE_INTEGER||9007199254740991)) { + throw flaverr('E_INVALID_PK_VALUE', new Error('Cannot use the provided value (`'+util.inspect(pkValue,{depth:5})+'`), because it is too large to safely fit into a JavaScript integer (i.e. `> Number.MAX_SAFE_INTEGER`)')); + }//-• + + // Now do one last check as a catch-all, w/ a generic error msg. + if (!isSafeNaturalNumber(pkValue)) { + throw flaverr('E_INVALID_PK_VALUE', new Error('Cannot use the provided value (`'+util.inspect(pkValue,{depth:5})+'`) as a primary key value -- it is not a "safe", natural number (see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isSafeInteger).')); + } + + } else { throw new Error('Consistency violation: Should not be possible to make it here in the code! If you are seeing this error, there\'s a bug in Waterline!'); } + //>-• + + // Return the normalized pk value. + return pkValue; + +}; + diff --git a/lib/waterline/utils/query/private/normalize-sort-clause.js b/lib/waterline/utils/query/private/normalize-sort-clause.js new file mode 100644 index 000000000..fcfed9718 --- /dev/null +++ b/lib/waterline/utils/query/private/normalize-sort-clause.js @@ -0,0 +1,445 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var getModel = require('../../ontology/get-model'); +var getAttribute = require('../../ontology/get-attribute'); +var isValidAttributeName = require('./is-valid-attribute-name'); + + +/** + * normalizeSortClause() + * + * Validate and normalize the `sort` clause, rejecting obviously unsupported usage, + * and tolerating certain backwards-compatible things. + * + * -- + * + * @param {Ref} sortClause + * A hypothetically well-formed `sort` clause from a Waterline criteria. + * (i.e. in a "stage 1 query") + * > WARNING: + * > IN SOME CASES (BUT NOT ALL!), THE PROVIDED VALUE WILL + * > UNDERGO DESTRUCTIVE, IN-PLACE CHANGES JUST BY PASSING IT + * > IN TO THIS UTILITY. + * + * @param {String} modelIdentity + * The identity of the model this `sort` clause is referring to (e.g. "pet" or "user") + * > Useful for looking up the Waterline model and accessing its attribute definitions. + * + * @param {Ref} orm + * The Waterline ORM instance. + * > Useful for accessing the model definitions. + * + * @param {Dictionary?} meta + * The contents of the `meta` query key, if one was provided. + * > Useful for propagating query options to low-level utilities like this one. + * -- + * + * @returns {Array} + * The successfully-normalized `sort` clause, ready for use in a stage 2 query. + * > Note that the originally provided `sort` clause MAY ALSO HAVE BEEN + * > MUTATED IN PLACE! + * + * + * @throws {Error} If it encounters irrecoverable problems or unsupported usage in + * the provided `sort` clause. + * @property {String} code + * - E_SORT_CLAUSE_UNUSABLE + * + * + * @throws {Error} If anything else unexpected occurs. + */ + +module.exports = function normalizeSortClause(sortClause, modelIdentity, orm, meta) { + + // Look up the Waterline model for this query. + // > This is so that we can reference the original model definition. + var WLModel = getModel(modelIdentity, orm); + + // ╔═╗╔═╗╔╦╗╔═╗╔═╗╔╦╗╦╔╗ ╦╦ ╦╔╦╗╦ ╦ + // ║ ║ ║║║║╠═╝╠═╣ ║ ║╠╩╗║║ ║ ║ ╚╦╝ + // ╚═╝╚═╝╩ ╩╩ ╩ ╩ ╩ ╩╚═╝╩╩═╝╩ ╩ ╩ + // If `sort` was provided as a dictionary... + if (_.isObject(sortClause) && !_.isArray(sortClause) && !_.isFunction(sortClause)) { + + sortClause = _.reduce(_.keys(sortClause), function (memo, sortByKey) { + + var sortDirection = sortClause[sortByKey]; + + // It this appears to be a well-formed comparator directive that was simply mistakenly + // provided at the top level instead of being wrapped in an array, then throw an error + // specifically mentioning that. + if (_.isString(sortDirection) && _.keys(sortClause).length === 1) { + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'The `sort` clause in the provided criteria is invalid. If specified, it should be either '+ + 'a string like `\'fullName DESC\'`, or an array like `[ { fullName: \'DESC\' } ]`. '+ + 'But it looks like you might need to wrap this in an array, because instead, got: '+ + util.inspect(sortClause, {depth:5})+'' + )); + }//-• + + + // Otherwise, continue attempting to normalize this dictionary into array + // format under the assumption that it was provided as a Mongo-style comparator + // dictionary. (and freaking out if we see anything that makes us uncomfortable) + var newComparatorDirective = {}; + if (sortDirection === 1) { + newComparatorDirective[sortByKey] = 'ASC'; + } + else if (sortDirection === -1) { + newComparatorDirective[sortByKey] = 'DESC'; + } + else { + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'The `sort` clause in the provided criteria is invalid. If specified as a '+ + 'dictionary, it should use Mongo-esque semantics, using -1 and 1 for the sort '+ + 'direction (something like `{ fullName: -1, rank: 1 }`). But instead, got: '+ + util.inspect(sortClause, {depth:5})+'' + )); + } + memo.push(newComparatorDirective); + + return memo; + + }, []);// + + // IWMIH, then we know a dictionary was provided that appears to be using valid Mongo-esque + // semantics, or that is at least an empty dictionary. Nonetheless, this usage is not recommended, + // and might be removed in the future. So log a warning: + console.warn('\n'+ + 'Warning: The `sort` clause in the provided criteria is specified as a dictionary (plain JS object),\n'+ + 'meaning that it is presumably using Mongo-esque semantics (something like `{ fullName: -1, rank: 1 }`).\n'+ + 'But as of Sails v1/Waterline 0.13, this is no longer the recommended usage. Instead, please use either\n'+ + 'a string like `\'fullName DESC\'`, or an array like `[ { fullName: \'DESC\' } ]`.\n'+ + '(Since I get what you mean, tolerating & remapping this usage for now...)\n' + ); + + }//>- + + + // Tolerate empty array (`[]`), understanding it to mean the same thing as `undefined`. + if (_.isArray(sortClause) && sortClause.length === 0) { + sortClause = undefined; + // Note that this will be further expanded momentarily. + }//>- + + + + // ╔╦╗╔═╗╔═╗╔═╗╦ ╦╦ ╔╦╗ + // ║║║╣ ╠╣ ╠═╣║ ║║ ║ + // ═╩╝╚═╝╚ ╩ ╩╚═╝╩═╝╩ + // If no `sort` clause was provided, give it a default (empty) value, + // indicating the adapter should use its default sorting strategy + if (_.isUndefined(sortClause)) { + sortClause = []; + }//>- + + // If `sort` was provided as a string, then expand it into an array. + // (We'll continue cleaning it up down below-- this is just to get + // it part of the way there-- e.g. we might end up with something like: + // `[ 'name DESC' ]`) + if (_.isString(sortClause)) { + sortClause = [ + sortClause + ]; + }//>- + + + // If, by this time, `sort` is not an array... + if (!_.isArray(sortClause)) { + // Then the provided `sort` must have been highly irregular indeed. + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'The `sort` clause in the provided criteria is invalid. If specified, it should be either '+ + 'a string like `\'fullName DESC\'`, or an array like `[ { fullName: \'DESC\' } ]`. '+ + 'But instead, got: '+ + util.inspect(sortClause, {depth:5})+'' + )); + }//-• + + + + // Ensure that each item in the array is a structurally-valid comparator directive: + sortClause = _.map(sortClause, function (comparatorDirective){ + + // ┌┬┐┌─┐┬ ┌─┐┬─┐┌─┐┌┬┐┌─┐ ┌─┐┌┬┐┬─┐┬┌┐┌┌─┐ ┬ ┬┌─┐┌─┐┌─┐┌─┐ + // │ │ ││ ├┤ ├┬┘├─┤ │ ├┤ └─┐ │ ├┬┘│││││ ┬ │ │└─┐├─┤│ ┬├┤ + // ┴ └─┘┴─┘└─┘┴└─┴ ┴ ┴ └─┘ └─┘ ┴ ┴└─┴┘└┘└─┘ └─┘└─┘┴ ┴└─┘└─┘ + // ┌─ ┌─┐ ┌─┐ ╔═╗╔╦╗╔═╗╦╦ ╔═╗╔╦╗╔╦╗╦═╗╔═╗╔═╗╔═╗ ╔═╗╔═╗╔═╗ ─┐ + // │ ├┤ │ ┬ ║╣ ║║║╠═╣║║ ╠═╣ ║║ ║║╠╦╝║╣ ╚═╗╚═╗ ╠═╣╚═╗║ │ + // └─ └─┘o└─┘o ╚═╝╩ ╩╩ ╩╩╩═╝╩ ╩═╩╝═╩╝╩╚═╚═╝╚═╝╚═╝ ╩ ╩╚═╝╚═╝ ─┘ + // If this is a string, then morph it into a dictionary. + // + // > This is so that we tolerate syntax like `'name ASC'` + // > at the top level (since we would have expanded it above) + // > AND when provided within the array (e.g. `[ 'name ASC' ]`) + if (_.isString(comparatorDirective)) { + + var pieces = comparatorDirective.split(/\s+/); + if (pieces.length === 2) { + // Good, that's what we expect. + } + else if (pieces.length === 1) { + // If there is only the attribute name specified, then assume that we're implying 'ASC'. + // > For example, if we worked together at a pet shelter where there were two dogs (named + // > "Suzy" and "Arnold") and a parrot named "Eleanor", and our boss asked us for a list of + // > all the animals, sorted by name, we'd most likely assume that the list should begin witih + // > Arnold the dog. + pieces.push('ASC'); + } + else { + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'Invalid `sort` clause in criteria. If specifying a string, it should look like '+ + 'e.g. `\'emailAddress ASC\'`, where the attribute name ("emailAddress") is separated '+ + 'from the sort direction ("ASC" or "DESC") by whitespace. But instead, got: '+ + util.inspect(comparatorDirective, {depth:5})+'' + )); + }//-• + + // Build a dictionary out of it. + comparatorDirective = {}; + comparatorDirective[pieces[0]] = pieces[1]; + + }//>-• + + + // If this is NOT a dictionary at this point, then freak out. + if (!_.isObject(comparatorDirective) || _.isArray(comparatorDirective) || _.isFunction(comparatorDirective)) { + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'The `sort` clause in the provided criteria is invalid, because, although it '+ + 'is an array, one of its items (aka comparator directives) has an unexpected '+ + 'data type. Expected every comparator directive to be a dictionary like `{ fullName: \'DESC\' }`. '+ + 'But instead, this one is: '+ + util.inspect(comparatorDirective, {depth:5})+'' + )); + }//-• + + + // IWMIH, then we know we've got a dictionary. + // + // > This is where we assume it is a well-formed comparator directive + // > and casually/gently/lovingly validate it as such. + + + // ┌─┐┌─┐┬ ┬┌┐┌┌┬┐ ┬┌─┌─┐┬ ┬┌─┐ + // │ │ ││ ││││ │ ├┴┐├┤ └┬┘└─┐ + // └─┘└─┘└─┘┘└┘ ┴ ┴ ┴└─┘ ┴ └─┘ + // Count the keys. + switch (_.keys(comparatorDirective).length) { + + // Must not be an empty dictionary. + case 0: + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'The `sort` clause in the provided criteria is invalid, because, although it '+ + 'is an array, one of its items (aka comparator directives) is `{}`, an empty dictionary '+ + '(aka plain JavaScript object). But comparator directives are supposed to have '+ + '_exactly one_ key (e.g. so that they look something like `{ fullName: \'DESC\' }`.' + )); + + case 1: + // There should always be exactly one key. + // If we're here, then everything is ok. + // Keep going. + break; + + // Must not have more than one key. + default: + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'The `sort` clause in the provided criteria is invalid, because, although it '+ + 'is an array, one of its items (aka comparator directives) is a dictionary (aka '+ + 'plain JavaScript object) with '+(_.keys(comparatorDirective).length)+ ' keys... '+ + 'But, that\'s too many keys. Comparator directives are supposed to have _exactly '+ + 'one_ key (e.g. so that they look something like `{ fullName: \'DESC\' }`. '+ + 'But instead, this one is: '+util.inspect(comparatorDirective, {depth:5})+'' + )); + + }// + + + // ┌─┐┬ ┬┌─┐┌─┐┬┌─ ┌┬┐┬ ┬┌─┐┌┬┐ ┬┌─┌─┐┬ ┬ ┬┌─┐ ┬ ┬┌─┐┬ ┬┌┬┐ ┌─┐┌┬┐┌┬┐┬─┐ + // │ ├─┤├┤ │ ├┴┐ │ ├─┤├─┤ │ ├┴┐├┤ └┬┘ │└─┐ └┐┌┘├─┤│ │ ││ ├─┤ │ │ ├┬┘ + // └─┘┴ ┴└─┘└─┘┴ ┴ ┴ ┴ ┴┴ ┴ ┴ ┴ ┴└─┘ ┴ ┴└─┘ └┘ ┴ ┴┴─┘┴─┴┘ ┴ ┴ ┴ ┴ ┴└─ + // Next, check this comparator directive's key (i.e. its "comparator target") + // • if this model is `schema: true`: + // ° the directive's key must be the name of a recognized attribute + // • if this model is `schema: false`: + // ° then the directive's key must be a conceivably-valid attribute name + + var sortByKey = _.keys(comparatorDirective)[0]; + + var attrName; + var isDeepTarget; + var deepTargetHops; + if (_.isString(sortByKey)){ + deepTargetHops = sortByKey.split(/\./); + isDeepTarget = (deepTargetHops.length > 1); + } + + if (isDeepTarget) { + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Replace this opt-in experimental support with official support for + // deep targets for comparator directives: i.e. dot notation for sorting by nested + // properties of JSON embeds. + // This will require additional tests + docs, as well as a clear way of indicating + // whether a particular adapter supports this feature so that proper error messages + // can be displayed otherwise. + // (See https://github.com/balderdashy/waterline/pull/1519) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + if (!meta || !meta.enableExperimentalDeepTargets) { + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'Cannot use dot notation as the target for a `sort` comparator without enabling experimental '+ + 'support for "deep targets". Please try again with `.meta({enableExperimentalDeepTargets:true})`.' + )); + }//• + + attrName = deepTargetHops[0]; + } + else { + attrName = sortByKey; + } + + // Look up the attribute definition, if possible. + var attrDef; + try { + attrDef = getAttribute(attrName, modelIdentity, orm); + } catch (e){ + switch (e.code) { + case 'E_ATTR_NOT_REGISTERED': + // If no matching attr def exists, then just leave `attrDef` undefined + // and continue... for now anyway. + break; + default: throw e; + } + }// + + + // If model is `schema: true`... + if (WLModel.hasSchema === true) { + + // Make sure this matched a recognized attribute name. + if (!attrDef) { + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'The `sort` clause in the provided criteria is invalid, because, although it '+ + 'is an array, one of its items (aka comparator directives) is problematic. '+ + 'It indicates that we should sort by `'+attrName+'`-- but that is not a recognized '+ + 'attribute for this model (`'+modelIdentity+'`). Since the model declares `schema: true`, '+ + 'this is not allowed.' + )); + }//-• + + } + // Else if model is `schema: false`... + else if (WLModel.hasSchema === false) { + + // Make sure this is at least a valid name for a Waterline attribute. + if (!isValidAttributeName(attrName)) { + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'The `sort` clause in the provided criteria is invalid, because, although it '+ + 'is an array, one of its items (aka comparator directives) is problematic. '+ + 'It indicates that we should sort by `'+attrName+'`-- but that is not a '+ + 'valid name for an attribute in Waterline.' + )); + }//-• + + } else { throw new Error('Consistency violation: Every instantiated Waterline model should always have a `hasSchema` property as either `true` or `false` (should have been derived from the `schema` model setting when Waterline was being initialized). But somehow, this model (`'+modelIdentity+'`) ended up with `hasSchema: '+util.inspect(WLModel.hasSchema, {depth:5})+'`'); } + + + + // Now, make sure the matching attribute is _actually_ something that can be sorted on. + // In other words: it must NOT be a plural (`collection`) association. + if (attrDef && attrDef.collection) { + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'Cannot sort by `'+attrName+'` because it corresponds with an "unsortable" attribute '+ + 'definition for this model (`'+modelIdentity+'`). This attribute is a plural (`collection`) '+ + 'association, so sorting by it is not supported.' + )); + }//-• + + + if (isDeepTarget) { + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: See the other note above. This is still experimental. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + if (isDeepTarget && attrDef && attrDef.type !== 'json' && attrDef.type !== 'ref') { + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'Cannot use dot notation to sort by a nested property of `'+attrName+'` because '+ + 'the corresponding attribute is not capable of holding embedded JSON data such as '+ + 'dictionaries (`{}`) or arrays (`[]`). '+ + (attrDef.model||attrDef.collection? + 'Dot notation is not currently supported for sorting across associations '+ + '(see https://github.com/balderdashy/waterline/pull/1519 for details).' + : + 'Sorting with dot notation is only supported for fields which might potentially '+ + 'contain embedded JSON.' + ) + )); + }//• + }//fi + + + // ┬ ┬┌─┐┬─┐┬┌─┐┬ ┬ ┌─┐┬┌┬┐┬ ┬┌─┐┬─┐ ╔═╗╔═╗╔═╗ ┌─┐┬─┐ ╔╦╗╔═╗╔═╗╔═╗ + // └┐┌┘├┤ ├┬┘│├┤ └┬┘ ├┤ │ │ ├─┤├┤ ├┬┘ ╠═╣╚═╗║ │ │├┬┘ ║║║╣ ╚═╗║ + // └┘ └─┘┴└─┴└ ┴ └─┘┴ ┴ ┴ ┴└─┘┴└─ ╩ ╩╚═╝╚═╝ └─┘┴└─ ═╩╝╚═╝╚═╝╚═╝ + // ┬ ┌─┐┌┐┌┌─┐┬ ┬┬─┐┌─┐ ┌─┐┬─┐┌─┐┌─┐┌─┐┬─┐ ┌─┐┌─┐┌─┐┬┌┬┐┌─┐┬ ┬┌─┐┌─┐┌┬┐┬┌─┐┌┐┌ + // ┌┼─ ├┤ │││└─┐│ │├┬┘├┤ ├─┘├┬┘│ │├─┘├┤ ├┬┘ │ ├─┤├─┘│ │ ├─┤│ │┌─┘├─┤ │ ││ ││││ + // └┘ └─┘┘└┘└─┘└─┘┴└─└─┘ ┴ ┴└─└─┘┴ └─┘┴└─ └─┘┴ ┴┴ ┴ ┴ ┴ ┴┴─┘┴└─┘┴ ┴ ┴ ┴└─┘┘└┘ + // And finally, ensure the corresponding value on the RHS is either 'ASC' or 'DESC'. + // (doing a little capitalization if necessary) + + + // Before doing a careful check, uppercase the sort direction, if safe to do so. + if (_.isString(comparatorDirective[sortByKey])) { + comparatorDirective[sortByKey] = comparatorDirective[sortByKey].toUpperCase(); + }//>- + + // Now verify that it is either ASC or DESC. + switch (comparatorDirective[sortByKey]) { + case 'ASC': + case 'DESC': //ok! + break; + + default: + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'The `sort` clause in the provided criteria is invalid, because, although it '+ + 'is an array, one of its items (aka comparator directives) is problematic. '+ + 'It indicates that we should sort by `'+sortByKey+'`, which is fine. But then '+ + 'it suggests that Waterline should use `'+comparatorDirective[sortByKey]+'` '+ + 'as the sort direction. (Should always be either "ASC" or "DESC".)' + )); + }// + + // Return the modified comparator directive. + return comparatorDirective; + + });// + + + // ┌─┐┬ ┬┌─┐┌─┐┬┌─ ┌─┐┌─┐┬─┐ ╔╦╗╦ ╦╔═╗╦ ╦╔═╗╔═╗╔╦╗╔═╗╔═╗ + // │ ├─┤├┤ │ ├┴┐ ├┤ │ │├┬┘ ║║║ ║╠═╝║ ║║ ╠═╣ ║ ║╣ ╚═╗ + // └─┘┴ ┴└─┘└─┘┴ ┴ └ └─┘┴└─ ═╩╝╚═╝╩ ╩═╝╩╚═╝╩ ╩ ╩ ╚═╝╚═╝ + // Finally, check that no two comparator directives mention the + // same target. (Because you can't sort by the same thing twice.) + var referencedComparatorTargets = []; + _.each(sortClause, function (comparatorDirective){ + + var sortByKey = _.keys(comparatorDirective)[0]; + if (_.contains(referencedComparatorTargets, sortByKey)) { + throw flaverr('E_SORT_CLAUSE_UNUSABLE', new Error( + 'Cannot sort by the same thing (`'+sortByKey+'`) twice!' + )); + }//-• + + referencedComparatorTargets.push(sortByKey); + + });// + + + // --• At this point, we know we are dealing with a properly-formatted + // & semantically valid array of comparator directives. + return sortClause; + + +}; diff --git a/lib/waterline/utils/query/private/normalize-value-to-set.js b/lib/waterline/utils/query/private/normalize-value-to-set.js new file mode 100644 index 000000000..667f8aeca --- /dev/null +++ b/lib/waterline/utils/query/private/normalize-value-to-set.js @@ -0,0 +1,736 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var anchor = require('anchor'); +var flaverr = require('flaverr'); +var rttc = require('rttc'); +// var EA = require('encrypted-attr'); « this is required below for node compat. +var getModel = require('../../ontology/get-model'); +var getAttribute = require('../../ontology/get-attribute'); +var isValidAttributeName = require('./is-valid-attribute-name'); +var normalizePkValue = require('./normalize-pk-value'); +var normalizePkValueOrValues = require('./normalize-pk-value-or-values'); + + +/** + * normalizeValueToSet() + * + * Validate and normalize the provided `value`, hammering it destructively into a format + * that is compatible with the specified attribute. (Also take care of encrypting the `value`, + * if configured to do so by the corresponding attribute definition.) + * + * This function has a return value. But realize that this is only because the provided value + * _might_ be a string, number, or some other primitive that is NOT passed by reference, and thus + * must be replaced, rather than modified. + * + * -- + * + * @param {Ref} value + * The value to set (i.e. from the `valuesToSet` or `newRecord` query keys of a "stage 1 query"). + * (If provided as `undefined`, it will be ignored) + * > WARNING: + * > IN SOME CASES (BUT NOT ALL!), THE PROVIDED VALUE WILL + * > UNDERGO DESTRUCTIVE, IN-PLACE CHANGES JUST BY PASSING IT + * > IN TO THIS UTILITY. + * + * @param {String} supposedAttrName + * The "supposed attribute name"; i.e. the LHS the provided value came from (e.g. "id" or "favoriteBrands") + * > Useful for looking up the appropriate attribute definition. + * + * @param {String} modelIdentity + * The identity of the model this value is for (e.g. "pet" or "user") + * > Useful for looking up the Waterline model and accessing its attribute definitions. + * + * @param {Ref} orm + * The Waterline ORM instance. + * > Useful for accessing the model definitions. + * + * @param {Dictionary?} meta + * The contents of the `meta` query key, if one was provided. + * > Useful for propagating query options to low-level utilities like this one. + * + * -- + * + * @returns {Ref} + * The successfully-normalized value, ready for use within the `valuesToSet` or `newRecord` + * query key of a stage 2 query. (May or may not be the original reference.) + * + * -- + * + * @throws {Error} If the value should be ignored/stripped (e.g. because it is `undefined`, or because it + * does not correspond with a recognized attribute, and the model def has `schema: true`) + * @property {String} code + * - E_SHOULD_BE_IGNORED + * + * + * @throws {Error} If it encounters incompatible usage in the provided `value`, + * including e.g. the case where an invalid value is specified for + * an association. + * @property {String} code + * - E_HIGHLY_IRREGULAR + * + * + * @throws {Error} If the provided `value` has an incompatible data type. + * | @property {String} code + * | - E_TYPE + * | @property {String} expectedType + * | - string + * | - number + * | - boolean + * | - json + * | + * | This is only versus the attribute's declared "type", or other similar type safety issues -- + * | certain failed checks for associations result in a different error code (see above). + * | + * | Remember: + * | This is the case where a _completely incorrect type of data_ was passed in. + * | This is NOT a high-level "anchor" validation failure! (see below for that) + * | > Unlike anchor validation errors, this exception should never be negotiated/parsed/used + * | > for delivering error messages to end users of an application-- it is carved out + * | > separately purely to make things easier to follow for the developer. + * + * + * @throws {Error} If the provided `value` fails the requiredness guarantee of the corresponding attribute. + * | @property {String} code + * | - E_REQUIRED + * + * + * @throws {Error} If the provided `value` violates one or more of the high-level validation rules + * | configured for the corresponding attribute. + * | @property {String} code + * | - E_VIOLATES_RULES + * | @property {Array} ruleViolations + * | e.g. + * | ``` + * | [ + * | { + * | rule: 'minLength', //(isEmail/isNotEmptyString/max/isNumber/etc) + * | message: 'Too few characters (max 30)' + * | } + * | ] + * | ``` + * + * @throws {Error} If anything else unexpected occurs. + */ +module.exports = function normalizeValueToSet(value, supposedAttrName, modelIdentity, orm, meta) { + + // ================================================================================================ + assert(_.isString(supposedAttrName), '`supposedAttrName` must be a string.'); + // (`modelIdentity` and `orm` will be automatically checked by calling `getModel()` below) + // > Note that this attr name MIGHT be empty string -- although it should never be. + // > (we check that below) + // ================================================================================================ + + + + // ██████╗██╗ ██╗███████╗ ██████╗██╗ ██╗ ███╗ ███╗ ██████╗ ██████╗ ███████╗██╗ + // ██╔════╝██║ ██║██╔════╝██╔════╝██║ ██╔╝ ████╗ ████║██╔═══██╗██╔══██╗██╔════╝██║ + // ██║ ███████║█████╗ ██║ █████╔╝ ██╔████╔██║██║ ██║██║ ██║█████╗ ██║ + // ██║ ██╔══██║██╔══╝ ██║ ██╔═██╗ ██║╚██╔╝██║██║ ██║██║ ██║██╔══╝ ██║ + // ╚██████╗██║ ██║███████╗╚██████╗██║ ██╗ ██║ ╚═╝ ██║╚██████╔╝██████╔╝███████╗███████╗ + // ╚═════╝╚═╝ ╚═╝╚══════╝ ╚═════╝╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ + // + // █████╗ ███╗ ██╗██████╗ █████╗ ████████╗████████╗██████╗ + // ██╔══██╗████╗ ██║██╔══██╗ ██╔══██╗╚══██╔══╝╚══██╔══╝██╔══██╗ + // ███████║██╔██╗ ██║██║ ██║ ███████║ ██║ ██║ ██████╔╝ + // ██╔══██║██║╚██╗██║██║ ██║ ██╔══██║ ██║ ██║ ██╔══██╗ + // ██║ ██║██║ ╚████║██████╔╝ ██║ ██║ ██║ ██║ ██║ ██║ + // ╚═╝ ╚═╝╚═╝ ╚═══╝╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ + // + + // Look up the Waterline model. + // > This is so that we can reference the original model definition. + var WLModel; + try { + WLModel = getModel(modelIdentity, orm); + } catch (e) { + switch (e.code) { + case 'E_MODEL_NOT_REGISTERED': throw new Error('Consistency violation: '+e.message); + default: throw e; + } + }// + + + // This local variable is used to hold a reference to the attribute def + // that corresponds with this value (if there is one). + var correspondingAttrDef; + try { + correspondingAttrDef = getAttribute(supposedAttrName, modelIdentity, orm); + } catch (e) { + switch (e.code) { + + case 'E_ATTR_NOT_REGISTERED': + // If no matching attr def exists, then just leave `correspondingAttrDef` + // undefined and continue... for now anyway. + break; + + default: + throw e; + + } + }// + + // ┌─┐┬ ┬┌─┐┌─┐┬┌─ ┌─┐┌┬┐┌┬┐┬─┐┬┌┐ ┬ ┬┌┬┐┌─┐ ┌┐┌┌─┐┌┬┐┌─┐ + // │ ├─┤├┤ │ ├┴┐ ├─┤ │ │ ├┬┘│├┴┐│ │ │ ├┤ │││├─┤│││├┤ + // └─┘┴ ┴└─┘└─┘┴ ┴ ┴ ┴ ┴ ┴ ┴└─┴└─┘└─┘ ┴ └─┘ ┘└┘┴ ┴┴ ┴└─┘ + + // If this model declares `schema: true`... + if (WLModel.hasSchema === true) { + + // Check that this key corresponded with a recognized attribute definition. + // + // > If no such attribute exists, then fail gracefully by bailing early, indicating + // > that this value should be ignored (For example, this might cause this value to + // > be stripped out of the `newRecord` or `valuesToSet` query keys.) + if (!correspondingAttrDef) { + throw flaverr('E_SHOULD_BE_IGNORED', new Error( + 'This model declares itself `schema: true`, but this value does not match '+ + 'any recognized attribute (thus it will be ignored).' + )); + }//-• + + }// + // ‡ + // Else if this model declares `schema: false`... + else if (WLModel.hasSchema === false) { + + // Check that this key is a valid Waterline attribute name, at least. + if (!isValidAttributeName(supposedAttrName)) { + if (supposedAttrName === '') { + throw flaverr('E_HIGHLY_IRREGULAR', new Error('Empty string (\'\') is not a valid name for an attribute.')); + } + else { + throw flaverr('E_HIGHLY_IRREGULAR', new Error('This is not a valid name for an attribute.')); + } + }//-• + + } + // ‡ + else { + throw new Error( + 'Consistency violation: Every live Waterline model should always have the `hasSchema` flag '+ + 'as either `true` or `false` (should have been automatically derived from the `schema` model setting '+ + 'shortly after construction. And `schema` should have been verified as existing by waterline-schema). '+ + 'But somehow, this model\'s (`'+modelIdentity+'`) `hasSchema` property is as follows: '+ + util.inspect(WLModel.hasSchema, {depth:5})+'' + ); + }// + + + + + + // ██████╗██╗ ██╗███████╗ ██████╗██╗ ██╗ ██╗ ██╗ █████╗ ██╗ ██╗ ██╗███████╗ + // ██╔════╝██║ ██║██╔════╝██╔════╝██║ ██╔╝ ██║ ██║██╔══██╗██║ ██║ ██║██╔════╝ + // ██║ ███████║█████╗ ██║ █████╔╝ ██║ ██║███████║██║ ██║ ██║█████╗ + // ██║ ██╔══██║██╔══╝ ██║ ██╔═██╗ ╚██╗ ██╔╝██╔══██║██║ ██║ ██║██╔══╝ + // ╚██████╗██║ ██║███████╗╚██████╗██║ ██╗ ╚████╔╝ ██║ ██║███████╗╚██████╔╝███████╗ + // ╚═════╝╚═╝ ╚═╝╚══════╝ ╚═════╝╚═╝ ╚═╝ ╚═══╝ ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚══════╝ + // + // Validate+lightly coerce this value, both as schema-agnostic data, + // and vs. the corresponding attribute definition's declared `type`, + // `model`, or `collection`. + + // Declare var to flag whether or not an attribute should have validation rules applied. + // This will typically be the case for primary keys and generic attributes under certain conditions. + var doCheckForRuleViolations = false; + + // If this value is `undefined`, then bail early, indicating that it should be ignored. + if (_.isUndefined(value)) { + throw flaverr('E_SHOULD_BE_IGNORED', new Error( + 'This value is `undefined`. Remember: in Sails/Waterline, we always treat keys with '+ + '`undefined` values as if they were never there in the first place.' + )); + }//-• + + // ┌─┐┌─┐┌─┐┌─┐┬┌─┐┬┌─┐┌┬┐ ┬ ┬┌─┐┬ ┬ ┬┌─┐ ┬┌─┐ ┌─┐┌─┐┬─┐ ┌─┐┌┐┌ + // └─┐├─┘├┤ │ │├┤ │├┤ ││ └┐┌┘├─┤│ │ │├┤ │└─┐ ├┤ │ │├┬┘ ├─┤│││ + // └─┘┴ └─┘└─┘┴└ ┴└─┘─┴┘ └┘ ┴ ┴┴─┘└─┘└─┘ ┴└─┘ └ └─┘┴└─ ┴ ┴┘└┘ + // ╦ ╦╔╗╔╦═╗╔═╗╔═╗╔═╗╔═╗╔╗╔╦╔═╗╔═╗╔╦╗ ┌─┐┌┬┐┌┬┐┬─┐┬┌┐ ┬ ┬┌┬┐┌─┐ + // ║ ║║║║╠╦╝║╣ ║ ║ ║║ ╦║║║║╔═╝║╣ ║║ ├─┤ │ │ ├┬┘│├┴┐│ │ │ ├┤ + // ╚═╝╝╚╝╩╚═╚═╝╚═╝╚═╝╚═╝╝╚╝╩╚═╝╚═╝═╩╝ ┴ ┴ ┴ ┴ ┴└─┴└─┘└─┘ ┴ └─┘ + // + // If this value doesn't actually match an attribute definition... + if (!correspondingAttrDef) { + + // IWMIH then we already know this model has `schema: false`. + // So if this value doesn't match a recognized attribute def, + // then we'll validate it as `type: json`. + // + // > This is because we don't want to send a potentially-circular/crazy + // > value down to the adapter unless it corresponds w/ a `type: 'ref'` attribute. + try { + value = rttc.validate('json', value); + } catch (e) { + switch (e.code) { + case 'E_INVALID': throw flaverr({ code: 'E_TYPE', expectedType: 'json' }, new Error( + 'Invalid value for unrecognized attribute (must be JSON-compatible). To explicitly allow '+ + 'non-JSON-compatible values like this, define a `'+supposedAttrName+'` attribute, and specify '+ + '`type: ref`. More info on this error: '+e.message + )); + default: throw e; + } + } + + }//‡ + // ┌─┐┌─┐┬─┐ ╔═╗╦═╗╦╔╦╗╔═╗╦═╗╦ ╦ ╦╔═╔═╗╦ ╦ ╔═╗╔╦╗╔╦╗╦═╗╦╔╗ ╦ ╦╔╦╗╔═╗ + // ├┤ │ │├┬┘ ╠═╝╠╦╝║║║║╠═╣╠╦╝╚╦╝ ╠╩╗║╣ ╚╦╝ ╠═╣ ║ ║ ╠╦╝║╠╩╗║ ║ ║ ║╣ + // └ └─┘┴└─ ╩ ╩╚═╩╩ ╩╩ ╩╩╚═ ╩ ╩ ╩╚═╝ ╩ ╩ ╩ ╩ ╩ ╩╚═╩╚═╝╚═╝ ╩ ╚═╝ + else if (WLModel.primaryKey === supposedAttrName) { + + // Primary key attributes should have validation rules applied if they have any. + if (!_.isUndefined(correspondingAttrDef.validations)) { + doCheckForRuleViolations = true; + } + + try { + value = normalizePkValue(value, correspondingAttrDef.type); + } catch (e) { + switch (e.code) { + + case 'E_INVALID_PK_VALUE': + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'Invalid primary key value. '+e.message + )); + + default: + throw e; + } + } + + }//‡ + // ┌─┐┌─┐┬─┐ ╔═╗╦ ╦ ╦╦═╗╔═╗╦ ╔═╗╔═╗╔═╗╔═╗╔═╗╦╔═╗╔╦╗╦╔═╗╔╗╔ + // ├┤ │ │├┬┘ ╠═╝║ ║ ║╠╦╝╠═╣║ ╠═╣╚═╗╚═╗║ ║║ ║╠═╣ ║ ║║ ║║║║ + // └ └─┘┴└─ ╩ ╩═╝╚═╝╩╚═╩ ╩╩═╝ ╩ ╩╚═╝╚═╝╚═╝╚═╝╩╩ ╩ ╩ ╩╚═╝╝╚╝ + else if (correspondingAttrDef.collection) { + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // NOTE: For a brief period of time in the past, it was not permitted to call .update() or .validate() + // using an array of ids for a collection. But prior to the stable release of Waterline v0.13, this + // decision was reversed. The following commented-out code is left in Waterline to track what this + // was about, for posterity: + // ``` + // // If properties are not allowed for plural ("collection") associations, + // // then throw an error. + // if (!allowCollectionAttrs) { + // throw flaverr('E_HIGHLY_IRREGULAR', new Error( + // 'As a precaution, prevented replacing entire plural ("collection") association (`'+supposedAttrName+'`). '+ + // 'To do this, use `replaceCollection(...,\''+supposedAttrName+'\').members('+util.inspect(value, {depth:5})+')` '+ + // 'instead.' + // )); + // }//-• + // ``` + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + // Ensure that this is an array, and that each item in the array matches + // the expected data type for a pk value of the associated model. + try { + value = normalizePkValueOrValues(value, getAttribute(getModel(correspondingAttrDef.collection, orm).primaryKey, correspondingAttrDef.collection, orm).type); + } catch (e) { + switch (e.code) { + case 'E_INVALID_PK_VALUE': + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'If specified, expected `'+supposedAttrName+'` to be an array of ids '+ + '(representing the records to associate). But instead, got: '+ + util.inspect(value, {depth:5})+'' + // 'If specifying the value for a plural (`collection`) association, you must do so by '+ + // 'providing an array of associated ids representing the associated records. But instead, '+ + // 'for `'+supposedAttrName+'`, got: '+util.inspect(value, {depth:5})+'' + )); + default: throw e; + } + } + + }//‡ + // ┌─┐┌─┐┬─┐ ╔═╗╦╔╗╔╔═╗╦ ╦╦ ╔═╗╦═╗ ╔═╗╔═╗╔═╗╔═╗╔═╗╦╔═╗╔╦╗╦╔═╗╔╗╔ + // ├┤ │ │├┬┘ ╚═╗║║║║║ ╦║ ║║ ╠═╣╠╦╝ ╠═╣╚═╗╚═╗║ ║║ ║╠═╣ ║ ║║ ║║║║ + // └ └─┘┴└─ ╚═╝╩╝╚╝╚═╝╚═╝╩═╝╩ ╩╩╚═ ╩ ╩╚═╝╚═╝╚═╝╚═╝╩╩ ╩ ╩ ╩╚═╝╝╚╝ + else if (correspondingAttrDef.model) { + + // If `null` was specified, then it _might_ be OK. + if (_.isNull(value)) { + + // We allow `null` for singular associations UNLESS they are required. + if (correspondingAttrDef.required) { + throw flaverr('E_REQUIRED', new Error( + 'Cannot set `null` for required association (`'+supposedAttrName+'`).' + )); + }//-• + + }//‡ + // Otherwise, this value is NOT null. + // So ensure that it matches the expected data type for a pk value + // of the associated model (normalizing it, if appropriate/possible.) + else { + + try { + value = normalizePkValue(value, getAttribute(getModel(correspondingAttrDef.model, orm).primaryKey, correspondingAttrDef.model, orm).type); + } catch (e) { + switch (e.code) { + case 'E_INVALID_PK_VALUE': + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'Expecting an id representing the associated record, or `null` to indicate '+ + 'there will be no associated record. But the specified value is not a valid '+ + '`'+supposedAttrName+'`. '+e.message + )); + default: + throw e; + } + }// + + }// + + }//‡ + // ┌─┐┌─┐┬─┐ ╔╦╗╦╔═╗╔═╗╔═╗╦ ╦ ╔═╗╔╗╔╔═╗╔═╗╦ ╦╔═╗ ╔═╗╔╦╗╔╦╗╦═╗╦╔╗ ╦ ╦╔╦╗╔═╗ + // ├┤ │ │├┬┘ ║║║║╚═╗║ ║╣ ║ ║ ╠═╣║║║║╣ ║ ║║ ║╚═╗ ╠═╣ ║ ║ ╠╦╝║╠╩╗║ ║ ║ ║╣ + // └ └─┘┴└─ ╩ ╩╩╚═╝╚═╝╚═╝╩═╝╩═╝╩ ╩╝╚╝╚═╝╚═╝╚═╝╚═╝ ╩ ╩ ╩ ╩ ╩╚═╩╚═╝╚═╝ ╩ ╚═╝ + // Otherwise, the corresponding attr def is just a normal attr--not an association or primary key. + // > We'll use loose validation (& thus also light coercion) on the value and see what happens. + else { + if (!_.isString(correspondingAttrDef.type) || correspondingAttrDef.type === '') { + throw new Error('Consistency violation: There is no way this attribute (`'+supposedAttrName+'`) should have been allowed to be registered with neither a `type`, `model`, nor `collection`! Here is the attr def: '+util.inspect(correspondingAttrDef, {depth:5})+''); + } + + // First, check if this is an auto-*-at timestamp, and if it is... + if (correspondingAttrDef.autoCreatedAt || correspondingAttrDef.autoUpdatedAt) { + + // Ensure we are not trying to set it to empty string + // (this would never make sense.) + if (value === '') { + throw flaverr('E_HIGHLY_IRREGULAR', new Error( + 'If specified, should be a valid '+ + ( + correspondingAttrDef.type === 'number' ? + 'JS timestamp (unix epoch ms)' : + 'JSON timestamp (ISO 8601)' + )+'. '+ + 'But instead, it was empty string ("").' + )); + }//-• + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: If there is significant confusion being caused by allowing `autoUpdatedAt` + // attrs to be set explicitly on .create() and .update() , then we should reevaluate + // adding in the following code: + // ``` + // // And log a warning about how this auto-* timestamp is being set explicitly, + // // whereas the generally expected behavior is to let it be set automatically. + // var autoTSDisplayName; + // if (correspondingAttrDef.autoCreatedAt) { + // autoTSDisplayName = 'autoCreatedAt'; + // } + // else { + // autoTSDisplayName = 'autoUpdatedAt'; + // } + // + // console.warn('\n'+ + // 'Warning: Explicitly overriding `'+supposedAttrName+'`...\n'+ + // '(This attribute of the `'+modelIdentity+'` model is defined as '+ + // '`'+autoTSDisplayName+': true`, meaning it is intended to be set '+ + // 'automatically, except in special cases when debugging or migrating data.)\n' + // ); + // ``` + // + // But for now, leaving it (^^) out. + // + // > See https://github.com/balderdashy/waterline/pull/1440#issuecomment-275943205 + // > for more information. Note that we'd need an extra meta key because of + // > auto-migrations and other higher level tooling built on Waterline. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + }//>-• + + + // Handle a special case where we want a more specific error: + // + // > Note: This is just like normal RTTC validation ("loose" mode), with one major exception: + // > We handle `null` as a special case, regardless of the type being validated against; + // > whether or not this attribute is `required: true`. That's because it's so easy to + // > get confused about how `required` works in a given database vs. Waterline vs. JavaScript. + // > (Especially when it comes to null vs. undefined vs. empty string, etc) + // > + // > In RTTC, `null` is only valid vs. `json` and `ref` types, for singular associations, + // > and for completely unrecognized attributes -- and that's still true here. + // > But most schemaful databases also support a configuration where `null` is ALSO allowed + // > as an implicit base value for any type of data. This sorta serves the same purpose as + // > `undefined`, or omission, in JavaScript or MongoDB. BUT that doesn't mean we necessarily + // > allow `null` -- consistency of type safety rules is too important -- it just means that + // > we give it its own special error message. + // > + // > BUT NOTE: if `allowNull` is enabled, we DO allow null. + // > + // > Review the "required"-ness checks in the `normalize-new-record.js` utility for examples + // > of related behavior, and see the more detailed spec for more information: + // > https://docs.google.com/spreadsheets/d/1whV739iW6O9SxRZLCIe2lpvuAUqm-ie7j7tn_Pjir3s/edit#gid=1814738146 + var isProvidingNullForIncompatibleOptionalAttr = ( + _.isNull(value) && + correspondingAttrDef.type !== 'json' && + correspondingAttrDef.type !== 'ref' && + !correspondingAttrDef.allowNull && + !correspondingAttrDef.required + ); + if (isProvidingNullForIncompatibleOptionalAttr) { + throw flaverr({ code: 'E_TYPE', expectedType: correspondingAttrDef.type }, new Error( + 'Specified value (`null`) is not a valid `'+supposedAttrName+'`. '+ + 'Even though this attribute is optional, it still does not allow `null` to '+ + 'be explicitly set, because `null` is not valid vs. the expected '+ + 'type: \''+correspondingAttrDef.type+'\'. Instead, to indicate "voidness", '+ + 'please set the value for this attribute to the base value for its type, '+ + (function _getBaseValuePhrase(){ + switch(correspondingAttrDef.type) { + case 'string': return '`\'\'` (empty string)'; + case 'number': return '`0` (zero)'; + default: return '`'+rttc.coerce(correspondingAttrDef.type)+'`'; + } + })()+'. Or, if you specifically need to save `null`, then change this '+ + 'attribute to either `type: \'json\'` or `type: \'ref\'`. '+ + (function _getExtraPhrase(){ + if (_.isUndefined(correspondingAttrDef.defaultsTo)) { + return 'Also note: Since this attribute does not define a `defaultsTo`, '+ + 'the base value will be used as an implicit default if `'+supposedAttrName+'` '+ + 'is omitted when creating a record.'; + } + else { return ''; } + })() + )); + }//-• + + + // ┌─┐┬ ┬┌─┐┬─┐┌─┐┌┐┌┌┬┐┌─┐┌─┐ ╔╦╗╦ ╦╔═╗╔═╗ ╔═╗╔═╗╔═╗╔═╗╔╦╗╦ ╦ + // │ ┬│ │├─┤├┬┘├─┤│││ │ ├┤ ├┤ ║ ╚╦╝╠═╝║╣ ╚═╗╠═╣╠╣ ║╣ ║ ╚╦╝ + // └─┘└─┘┴ ┴┴└─┴ ┴┘└┘ ┴ └─┘└─┘ ╩ ╩ ╩ ╚═╝ ╚═╝╩ ╩╚ ╚═╝ ╩ ╩ + // If the value is `null` and the attribute has allowNull set to true it's ok. + if (correspondingAttrDef.allowNull && _.isNull(value)) { + // Nothing else to validate here. + } + //‡ + // Otherwise, verify that this value matches the expected type, and potentially + // perform loose coercion on it at the same time. This throws an E_INVALID error + // if validation fails. + else { + try { + value = rttc.validate(correspondingAttrDef.type, value); + } catch (e) { + switch (e.code) { + case 'E_INVALID': throw flaverr({ code: 'E_TYPE', expectedType: correspondingAttrDef.type }, new Error( + 'Specified value is not a valid `'+supposedAttrName+'`. '+e.message + )); + default: throw e; + } + } + } + + + // ┬ ┬┌─┐┌┐┌┌┬┐┬ ┌─┐ ┌─┐┌─┐┌─┐┌─┐┬┌─┐┬ ┌─┐┌─┐┌─┐┌─┐┌─┐ + // ├─┤├─┤│││ │││ ├┤ └─┐├─┘├┤ │ │├─┤│ │ ├─┤└─┐├┤ └─┐ + // ┴ ┴┴ ┴┘└┘─┴┘┴─┘└─┘ └─┘┴ └─┘└─┘┴┴ ┴┴─┘ └─┘┴ ┴└─┘└─┘└─┘ + // ┌─ ┌─┐┌─┐┬─┐ ╦═╗╔═╗╔═╗ ╦ ╦╦╦═╗╔═╗╔╦╗ ─┐ + // │─── ├┤ │ │├┬┘ ╠╦╝║╣ ║═╬╗║ ║║╠╦╝║╣ ║║ ───│ + // └─ └ └─┘┴└─ ╩╚═╚═╝╚═╝╚╚═╝╩╩╚═╚═╝═╩╝ ─┘ + if (correspondingAttrDef.required) { + + // "" (empty string) is never allowed as a value for a required attribute. + if (value === '') { + throw flaverr('E_REQUIRED', new Error( + 'Cannot set "" (empty string) for a required attribute.' + )); + }//>-• + + + // `null` is never allowed as a value for a required attribute. + if (_.isNull(value)) { + throw flaverr('E_REQUIRED', new Error( + 'Cannot set `null` for a required attribute.' + )); + }//-• + + }//>- + + + // Decide whether validation rules should be checked for this attribute. + // + // > • High-level validation rules are ALWAYS skipped for `null`. + // > • If there is no `validations` attribute key, then there's nothing for us to check. + doCheckForRuleViolations = !_.isNull(value) && !_.isUndefined(correspondingAttrDef.validations); + + }// + + // ┌─┐┬ ┬┌─┐┌─┐┬┌─ ┌─┐┌─┐┬─┐ ╦═╗╦ ╦╦ ╔═╗ ╦ ╦╦╔═╗╦ ╔═╗╔╦╗╦╔═╗╔╗╔╔═╗ + // │ ├─┤├┤ │ ├┴┐ ├┤ │ │├┬┘ ╠╦╝║ ║║ ║╣ ╚╗╔╝║║ ║║ ╠═╣ ║ ║║ ║║║║╚═╗ + // └─┘┴ ┴└─┘└─┘┴ ┴ └ └─┘┴└─ ╩╚═╚═╝╩═╝╚═╝ ╚╝ ╩╚═╝╩═╝╩ ╩ ╩ ╩╚═╝╝╚╝╚═╝ + // If appropriate, strictly enforce our (potentially-mildly-coerced) value + // vs. the validation ruleset defined on the corresponding attribute. + // Then, if there are any rule violations, stick them in an Error and throw it. + if (doCheckForRuleViolations) { + var ruleset = correspondingAttrDef.validations; + var isRulesetDictionary = _.isObject(ruleset) && !_.isArray(ruleset) && !_.isFunction(ruleset); + if (!isRulesetDictionary) { + throw new Error('Consistency violation: If set, an attribute\'s validations ruleset (`validations`) should always be a dictionary (plain JavaScript object). But for the `'+modelIdentity+'` model\'s `'+supposedAttrName+'` attribute, it somehow ended up as this instead: '+util.inspect(correspondingAttrDef.validations,{depth:5})+''); + } + + var ruleViolations; + try { + ruleViolations = anchor(value, ruleset); + // e.g. + // [ { rule: 'isEmail', message: 'Value was not a valid email address.' }, ... ] + } catch (e) { + throw new Error( + 'Consistency violation: Unexpected error occurred when attempting to apply '+ + 'high-level validation rules from `'+modelIdentity+'` model\'s `'+supposedAttrName+'` '+ + 'attribute. '+e.stack + ); + }// + + if (ruleViolations.length > 0) { + + // Format rolled-up summary for use in our error message. + // e.g. + // ``` + // • Value was not in the configured whitelist (delinquent, new, paid) + // • Value was an empty string. + // ``` + var summary = _.reduce(ruleViolations, function (memo, violation){ + memo += ' • '+violation.message+'\n'; + return memo; + }, ''); + + throw flaverr({ + code: 'E_VIOLATES_RULES', + ruleViolations: ruleViolations + }, new Error( + 'Violated one or more validation rules:\n'+ + summary + )); + }//-• + + }//>-• + + + // ███████╗███╗ ██╗ ██████╗██████╗ ██╗ ██╗██████╗ ████████╗ ██████╗ █████╗ ████████╗ █████╗ + // ██╔════╝████╗ ██║██╔════╝██╔══██╗╚██╗ ██╔╝██╔══██╗╚══██╔══╝ ██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗ + // █████╗ ██╔██╗ ██║██║ ██████╔╝ ╚████╔╝ ██████╔╝ ██║ ██║ ██║███████║ ██║ ███████║ + // ██╔══╝ ██║╚██╗██║██║ ██╔══██╗ ╚██╔╝ ██╔═══╝ ██║ ██║ ██║██╔══██║ ██║ ██╔══██║ + // ███████╗██║ ╚████║╚██████╗██║ ██║ ██║ ██║ ██║ ██████╔╝██║ ██║ ██║ ██║ ██║ + // ╚══════╝╚═╝ ╚═══╝ ╚═════╝╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ + // ╦╔═╗ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ + // ║╠╣ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ + // ╩╚ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ooo + + if (correspondingAttrDef && correspondingAttrDef.encrypt) { + + if (correspondingAttrDef.encrypt !== true) { + throw new Error( + 'Consistency violation: `'+modelIdentity+'` model\'s `'+supposedAttrName+'` attribute '+ + 'has a corrupted definition. Should not have been allowed to set `encrypt` to anything '+ + 'other than `true` or `false`.' + ); + }//• + if (correspondingAttrDef.type === 'ref') { + throw new Error( + 'Consistency violation: `'+modelIdentity+'` model\'s `'+supposedAttrName+'` attribute '+ + 'has a corrupted definition. Should not have been allowed to be both `type: \'ref\' '+ + 'AND `encrypt: true`.' + ); + }//• + if (!_.isObject(WLModel.dataEncryptionKeys) || !WLModel.dataEncryptionKeys.default || !_.isString(WLModel.dataEncryptionKeys.default)) { + throw new Error( + 'Consistency violation: `'+modelIdentity+'` model has a corrupted definition. Should not '+ + 'have been allowed to declare an attribute with `encrypt: true` without also specifying '+ + 'the `dataEncryptionKeys` model setting as a valid dictionary (including a valid "default" '+ + 'key).' + ); + }//• + + // Figure out what DEK to encrypt with. + var idOfDekToEncryptWith; + if (meta && meta.encryptWith) { + idOfDekToEncryptWith = meta.encryptWith; + } + else { + idOfDekToEncryptWith = 'default'; + } + + if (!WLModel.dataEncryptionKeys[idOfDekToEncryptWith]) { + throw new Error( + 'There is no known data encryption key by that name (`'+idOfDekToEncryptWith+'`). '+ + 'Please make sure a valid DEK (data encryption key) is configured under `dataEncryptionKeys`.' + ); + }//• + + try { + + // Never encrypt `''`(empty string), `0` (zero), `false`, or `null`, since these are possible + // base values. (Note that the current code path only runs when a value is explicitly provided + // for the attribute-- not when it is omitted. Thus these base values can get into the database + // without being encrypted _anyway_.) + if (value === '' || value === 0 || value === false || _.isNull(value)) { + // Don't encrypt. + } + // Never encrypt if the (private/experimental) `skipEncryption` meta key is + // set truthy. PLEASE DO NOT RELY ON THIS IN YOUR OWN CODE- IT COULD CHANGE + // AT ANY TIME AND BREAK YOUR APP OR PLUGIN! + // > (Useful for internal method calls-- e.g. the internal "create()" that + // > Waterline uses to implement `findOrCreate()`. For more info on that, + // > see https://github.com/balderdashy/sails/issues/4302#issuecomment-363883885) + else if (meta && meta.skipEncryption) { + // Don't encrypt. + } + else { + // First, JSON-encode value, to allow for differentiating between strings/numbers/booleans/null. + var jsonEncoded; + try { + jsonEncoded = JSON.stringify(value); + } catch (err) { + // Note: Stringification SHOULD always work, because we just checked all that out above. + // But just in case it doesn't, or if this code gets moved elsewhere in the future, here + // we include a reasonable error here as a backup. + throw flaverr({ + message: 'Before encrypting, Waterline attempted to JSON-stringify this value to ensure it '+ + 'could be accurately decoded into the correct data type later (for example, `2` vs `\'2\'`). '+ + 'But this time, JSON.stringify() failed with the following error: '+err.message + }, err); + } + + + // Encrypt using the appropriate key from the configured DEKs. + + // console.log('•••••encrypting JSON-encoded value: `'+util.inspect(jsonEncoded, {depth:null})+'`'); + + // Require this down here for Node version compat. + var EA = require('encrypted-attr'); + value = EA([supposedAttrName], { + keys: WLModel.dataEncryptionKeys, + keyId: idOfDekToEncryptWith + }) + .encryptAttribute(undefined, jsonEncoded); + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // Alternative: (hack for testing) + // ``` + // if (value.match(/^ENCRYPTED:/)){ throw new Error('Unexpected behavior: Can\'t encrypt something already encrypted!!!'); } + // value = 'ENCRYPTED:'+jsonEncoded; + // ``` + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + }//fi + + } catch (err) { + // console.log('•••••was attempting to encrypt this value: `'+util.inspect(value, {depth:null})+'`'); + throw flaverr({ + message: 'Encryption failed for `'+supposedAttrName+'`\n'+ + 'Details:\n'+ + ' '+err.message + }, _.isError(err) ? err : new Error()); + } + + + }//fi + + + // Return the normalized (and potentially encrypted) value. + return value; + +}; diff --git a/lib/waterline/utils/query/private/normalize-where-clause.js b/lib/waterline/utils/query/private/normalize-where-clause.js new file mode 100644 index 000000000..2e96b981f --- /dev/null +++ b/lib/waterline/utils/query/private/normalize-where-clause.js @@ -0,0 +1,733 @@ +/** + * Module dependencies + */ + +var assert = require('assert'); +var util = require('util'); +var _ = require('@sailshq/lodash'); +var flaverr = require('flaverr'); +var getModel = require('../../ontology/get-model'); +var normalizeConstraint = require('./normalize-constraint'); + + +/** + * Module constants + */ + +// Predicate operators +var PREDICATE_OPERATOR_KINDS = [ + 'or', + 'and' +]; + + +/** + * normalizeWhereClause() + * + * Validate and normalize the `where` clause, rejecting any obviously-unsupported + * usage, and tolerating certain backwards-compatible things. + * + * ------------------------------------------------------------------------------------------ + * @param {Ref} whereClause + * A hypothetically well-formed `where` clause from a Waterline criteria. + * (i.e. in a "stage 1 query") + * > WARNING: + * > IN SOME CASES (BUT NOT ALL!), THE PROVIDED VALUE WILL + * > UNDERGO DESTRUCTIVE, IN-PLACE CHANGES JUST BY PASSING IT + * > IN TO THIS UTILITY. + * + * @param {String} modelIdentity + * The identity of the model this `where` clause is referring to (e.g. "pet" or "user") + * > Useful for looking up the Waterline model and accessing its attribute definitions. + * + * @param {Ref} orm + * The Waterline ORM instance. + * > Useful for accessing the model definitions. + * + * @param {Dictionary?} meta + * The contents of the `meta` query key, if one was provided. + * > Useful for propagating query options to low-level utilities like this one. + * + * ------------------------------------------------------------------------------------------ + * @returns {Dictionary} + * The successfully-normalized `where` clause, ready for use in a stage 2 query. + * > Note that the originally provided `where` clause MAY ALSO HAVE BEEN + * > MUTATED IN PLACE! + * ------------------------------------------------------------------------------------------ + * @throws {Error} If it encounters irrecoverable problems or unsupported usage in + * the provided `where` clause. + * @property {String} code + * - E_WHERE_CLAUSE_UNUSABLE + * + * + * @throws {Error} If the `where` clause indicates that it should never match anything. + * @property {String} code + * - E_WOULD_RESULT_IN_NOTHING + * + * + * @throws {Error} If anything else unexpected occurs. + */ +module.exports = function normalizeWhereClause(whereClause, modelIdentity, orm, meta) { + + // Look up the Waterline model for this query. + // > This is so that we can reference the original model definition. + var WLModel = getModel(modelIdentity, orm); + + + // ┌─┐┬ ┬┌─┐┌─┐┌─┐┬─┐┌┬┐ ╔╦╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ╔═╗╦═╗╔═╗╔═╗ ┌┬┐┌─┐┌┬┐┌─┐ ┬┌─┌─┐┬ ┬ + // └─┐│ │├─┘├─┘│ │├┬┘ │ ║║║║ ║ ║ ╠═╣ ║ ║╣ ╠═╣╠╦╝║ ╦╚═╗ │││├┤ │ ├─┤ ├┴┐├┤ └┬┘ + // └─┘└─┘┴ ┴ └─┘┴└─ ┴ ╩ ╩╚═╝ ╩ ╩ ╩ ╩ ╚═╝ ╩ ╩╩╚═╚═╝╚═╝ ┴ ┴└─┘ ┴ ┴ ┴ ┴ ┴└─┘ ┴ + // Unless the `mutateArgs` meta key is enabled, deep-clone the entire `where` clause. + if (!meta || !meta.mutateArgs) { + whereClause = _.cloneDeep(whereClause); + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Replace this naive implementation with something better. + // (This isn't great because it can mess up things like Buffers... which you + // shouldn't really be using in a `where` clause anyway, but still-- it makes + // it way harder to figure out what's wrong when folks find themselves in that + // situation. It could also affect any weird custom constraints for `type:'ref'` + // attrs. And if the current approach were also used in valuesToSet, newRecord, + // newRecords etc, it would matter even more.) + // + // The full list of query keys that need to be carefully checked: + // • criteria + // • populates + // • newRecord + // • newRecords + // • valuesToSet + // • targetRecordIds + // • associatedIds + // + // The solution will probably mean distributing this deep clone behavior out + // to the various places it's liable to come up. In reality, this will be + // more performant anyway, since we won't be unnecessarily cloning things like + // big JSON values, etc. + // + // The important thing is that this should do shallow clones of deeply-nested + // control structures like top level query key dictionaries, criteria clauses, + // predicates/constraints/modifiers in `where` clauses, etc. + // + // > And remember: Don't deep-clone functions. + // > Note that, weirdly, it's fine to deep-clone dictionaries/arrays + // > that contain nested functions (they just don't get cloned-- they're + // > the same reference). But if you try to deep-clone a function at the + // > top level, it gets messed up. + // > + // > More background on this: https://trello.com/c/VLXPvyN5 + // > (Note that this behavior maintains backwards compatibility with Waterline <=0.12.) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + }//fi + + + // ╔╦╗╔═╗╔═╗╔═╗╦ ╦╦ ╔╦╗ + // ║║║╣ ╠╣ ╠═╣║ ║║ ║ + // ═╩╝╚═╝╚ ╩ ╩╚═╝╩═╝╩ + // If no `where` clause was provided, give it a default value. + if (_.isUndefined(whereClause)) { + whereClause = {}; + }//>- + + // ╔═╗╔═╗╔╦╗╔═╗╔═╗╔╦╗╦╔╗ ╦╦ ╦╔╦╗╦ ╦ (COMPATIBILITY) + // ║ ║ ║║║║╠═╝╠═╣ ║ ║╠╩╗║║ ║ ║ ╚╦╝ + // ╚═╝╚═╝╩ ╩╩ ╩ ╩ ╩ ╩╚═╝╩╩═╝╩ ╩ ╩ + // COMPATIBILITY + // If where is `null`, turn it into an empty dictionary. + if (_.isNull(whereClause)) { + + console.warn(); + console.warn( + 'Deprecated: In previous versions of Waterline, the specified `where` '+'\n'+ + 'clause (`null`) would match ALL records in this model (`'+modelIdentity+'`). '+'\n'+ + 'So for compatibility, that\'s what just happened. If that is what you intended '+'\n'+ + 'then, in the future, please pass in `{}` instead, or simply omit the `where` '+'\n'+ + 'clause altogether-- both of which are more explicit and future-proof ways of '+'\n'+ + 'doing the same thing.\n'+ + '\n'+ + '> Warning: This backwards compatibility will be removed\n'+ + '> in a future release of Sails/Waterline. If this usage\n'+ + '> is left unchanged, then queries like this one will eventually \n'+ + '> fail with an error.' + ); + console.warn(); + + whereClause = {}; + }//>- + + + + // ┌┐┌┌─┐┬─┐┌┬┐┌─┐┬ ┬┌─┐┌─┐ ╔═╗╦╔═╦ ╦ ┌─┐┬─┐ ╦╔╗╔ ┌─┐┬ ┬┌─┐┬─┐┌┬┐┬ ┬┌─┐┌┐┌┌┬┐ + // ││││ │├┬┘│││├─┤│ │┌─┘├┤ ╠═╝╠╩╗╚╗╔╝ │ │├┬┘ ║║║║ └─┐├─┤│ │├┬┘ │ ├─┤├─┤│││ ││ + // ┘└┘└─┘┴└─┴ ┴┴ ┴┴─┘┴└─┘└─┘ ╩ ╩ ╩ ╚╝ └─┘┴└─ ╩╝╚╝ └─┘┴ ┴└─┘┴└─ ┴ ┴ ┴┴ ┴┘└┘─┴┘ + // ┌─ ┌─┐┌┬┐ ┌┬┐┬ ┬┌─┐ ┌┬┐┌─┐┌─┐ ┬ ┌─┐┬ ┬┌─┐┬ ┌─┐┌─┐ ╦ ╦╦ ╦╔═╗╦═╗╔═╗ ─┐ + // │─── ├─┤ │ │ ├─┤├┤ │ │ │├─┘ │ ├┤ └┐┌┘├┤ │ │ │├┤ ║║║╠═╣║╣ ╠╦╝║╣ ───│ + // └─ ┴ ┴ ┴ ┴ ┴ ┴└─┘ ┴ └─┘┴ ┴─┘└─┘ └┘ └─┘┴─┘ └─┘└ ╚╩╝╩ ╩╚═╝╩╚═╚═╝ ─┘ + // + // If the `where` clause itself is an array, string, or number, then we'll + // be able to understand it as a primary key, or as an array of primary key values. + // + // ``` + // where: [...] + // ``` + // + // ``` + // where: 'bar' + // ``` + // + // ``` + // where: 29 + // ``` + if (_.isArray(whereClause) || _.isNumber(whereClause) || _.isString(whereClause)) { + + var topLvlPkValuesOrPkValueInWhere = whereClause; + + // So expand that into the beginnings of a proper `where` dictionary. + // (This will be further normalized throughout the rest of this file-- + // this is just enough to get us to where we're working with a dictionary.) + whereClause = {}; + whereClause[WLModel.primaryKey] = topLvlPkValuesOrPkValueInWhere; + + }//>- + + + + // ┬ ┬┌─┐┬─┐┬┌─┐┬ ┬ ┌┬┐┬ ┬┌─┐┌┬┐ ┌┬┐┬ ┬┌─┐ ╦ ╦╦ ╦╔═╗╦═╗╔═╗ ┌─┐┬ ┌─┐┬ ┬┌─┐┌─┐ + // └┐┌┘├┤ ├┬┘│├┤ └┬┘ │ ├─┤├─┤ │ │ ├─┤├┤ ║║║╠═╣║╣ ╠╦╝║╣ │ │ ├─┤│ │└─┐├┤ + // └┘ └─┘┴└─┴└ ┴ ┴ ┴ ┴┴ ┴ ┴ ┴ ┴ ┴└─┘ ╚╩╝╩ ╩╚═╝╩╚═╚═╝ └─┘┴─┘┴ ┴└─┘└─┘└─┘ + // ┬┌─┐ ┌┐┌┌─┐┬ ┬ ┌─┐ ╔╦╗╦╔═╗╔╦╗╦╔═╗╔╗╔╔═╗╦═╗╦ ╦ + // │└─┐ ││││ ││││ ├─┤ ║║║║ ║ ║║ ║║║║╠═╣╠╦╝╚╦╝ + // ┴└─┘ ┘└┘└─┘└┴┘ ┴ ┴ ═╩╝╩╚═╝ ╩ ╩╚═╝╝╚╝╩ ╩╩╚═ ╩ + // At this point, the `where` clause should be a dictionary. + if (!_.isObject(whereClause) || _.isArray(whereClause) || _.isFunction(whereClause)) { + throw flaverr('E_WHERE_CLAUSE_UNUSABLE', new Error( + 'If provided, `where` clause should be a dictionary. But instead, got: '+ + util.inspect(whereClause, {depth:5})+'' + )); + }//-• + + + + + // ██╗ ██████╗ ███████╗ ██████╗██╗ ██╗██████╗ ███████╗██╗ ██████╗ ███╗ ██╗ ██╗ + // ██╔╝ ██╔══██╗██╔════╝██╔════╝██║ ██║██╔══██╗██╔════╝██║██╔═══██╗████╗ ██║ ╚██╗ + // ██╔╝ ██████╔╝█████╗ ██║ ██║ ██║██████╔╝███████╗██║██║ ██║██╔██╗ ██║ ╚██╗ + // ╚██╗ ██╔══██╗██╔══╝ ██║ ██║ ██║██╔══██╗╚════██║██║██║ ██║██║╚██╗██║ ██╔╝ + // ╚██╗ ██║ ██║███████╗╚██████╗╚██████╔╝██║ ██║███████║██║╚██████╔╝██║ ╚████║ ██╔╝ + // ╚═╝ ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚══════╝╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═╝ + // ███████╗███████╗███████╗███████╗███████╗███████╗███████╗███████╗███████╗███████╗ + // ╚══════╝╚══════╝╚══════╝╚══════╝╚══════╝╚══════╝╚══════╝╚══════╝╚══════╝╚══════╝ + // ┌┬┐┌─┐ ┌┬┐┬ ┬┌─┐ ╦═╗╔═╗╔═╗╦ ╦╦═╗╔═╗╦╦ ╦╔═╗ ╔═╗╦═╗╔═╗╦ ╦╦ + // │││ │ │ ├─┤├┤ ╠╦╝║╣ ║ ║ ║╠╦╝╚═╗║╚╗╔╝║╣ ║ ╠╦╝╠═╣║║║║ + // ─┴┘└─┘ ┴ ┴ ┴└─┘ ╩╚═╚═╝╚═╝╚═╝╩╚═╚═╝╩ ╚╝ ╚═╝ ╚═╝╩╚═╩ ╩╚╩╝╩═╝ + // Recursively iterate through the provided `where` clause, starting with the top level. + // + // > Note that we mutate the `where` clause IN PLACE here-- there is no return value + // > from this self-calling recursive function. + // + // + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // EDGE CASES INVOLVING "VOID" AND "UNIVERSAL" + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // In order to provide the simplest possible interface for adapter implementors + // (i.e. fully-normalized stage 2&3 queries, w/ the fewest possible numbers of + // extraneous symbols) we need to handle certain edge cases in a special way. + // + // For example, an empty array of conjuncts/disjuncts is not EXACTLY invalid, per se. + // Instead, what exactly it means depends on the circumstances: + // + // |-------------------------|-------------------|-------------------|-------------------| + // | || Parent branch => | Parent is `and` | Parent is `or` | No parent | + // | \/ This branch | (conjunct, `∩`) | (disjunct, `∪`) | (at top level) | + // |-------------------------|===================|===================|===================| + // | | | | | + // | `{ and: [] }` | Rip out this | Throw to indicate | Replace entire | + // | `{ ??: { nin: [] } }` | conjunct. | parent will match | `where` clause | + // | `{}` | | EVERYTHING. | with `{}`. | + // | | | | | + // | Ξ : universal | x ∩ Ξ = x | x ∪ Ξ = Ξ | Ξ | + // | ("matches everything") | <> | <> | (universal) | + // |-------------------------|-------------------|-------------------|-------------------| + // | | | | | + // | `{ or: [] }` | Throw to indicate | Rip out this | Throw E_WOULD_... | + // | `{ ??: { in: [] } }` | parent will NEVER | disjunct. | RESULT_IN_NOTHING | + // | | match anything. | | error to indicate | + // | | | | that this query | + // | | | | is a no-op. | + // | | | | | + // | Ø : void | x ∩ Ø = Ø | x ∪ Ø = x | Ø | + // | ("matches nothing") | <> | <> | (void) | + // |-------------------------|-------------------|-------------------|-------------------| + // + // > For deeper reference, here are the boolean monotone laws: + // > https://en.wikipedia.org/wiki/Boolean_algebra#Monotone_laws + // > + // > See also the "identity" and "domination" laws from fundamental set algebra: + // > (the latter of which is roughly equivalent to the "annihilator" law from boolean algebra) + // > https://en.wikipedia.org/wiki/Algebra_of_sets#Fundamentals + // + // Anyways, as it turns out, this is exactly how it should work for ANY universal/void + // branch in the `where` clause. So as you can see below, we use this strategy to handle + // various edge cases involving `and`, `or`, `nin`, `in`, and `{}`. + // + // **There are some particular bits to notice in the implementation below:** + // • If removing this conjunct/disjunct would cause the parent predicate operator to have + // NO items, then we recursively apply the normalization all the way back up the tree, + // until we hit the root. That's taken care of above (in the place in the code where we + // make the recursive call). + // • If there is no containing conjunct/disjunct (i.e. because we're at the top-level), + // then we'll either throw a E_WOULD_RESULT_IN_NOTHING error (if this is an `or`), + // or revert the criteria to `{}` so it matches everything (if this is an `and`). + // That gets taken care of below. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // + // With that, let's begin. + try { + + // Initially invoke our self-calling, recursive function. + (function _recursiveStep(branch, recursionDepth, parent, indexInParent){ + + var MAX_RECURSION_DEPTH = 25; + if (recursionDepth > MAX_RECURSION_DEPTH) { + throw flaverr('E_WHERE_CLAUSE_UNUSABLE', new Error('This `where` clause seems to have a circular reference. Aborted automatically after reaching maximum recursion depth ('+MAX_RECURSION_DEPTH+').')); + }//-• + + //-• IWMIH, we know that `branch` is a dictionary. + // But that's about all we can trust. + // + // > In an already-fully-normalized `where` clause, we'd know that this dictionary + // > would ALWAYS be a valid conjunct/disjunct. But since we're doing the normalizing + // > here, we have to be more forgiving-- both for usability and backwards-compatibility. + + + // ╔═╗╔╦╗╦═╗╦╔═╗ ╦╔═╔═╗╦ ╦╔═╗ ┬ ┬┬┌┬┐┬ ┬ ╦ ╦╔╗╔╔╦╗╔═╗╔═╗╦╔╗╔╔═╗╔╦╗ ┬─┐┬ ┬┌─┐ + // ╚═╗ ║ ╠╦╝║╠═╝ ╠╩╗║╣ ╚╦╝╚═╗ ││││ │ ├─┤ ║ ║║║║ ║║║╣ ╠╣ ║║║║║╣ ║║ ├┬┘├─┤└─┐ + // ╚═╝ ╩ ╩╚═╩╩ ╩ ╩╚═╝ ╩ ╚═╝ └┴┘┴ ┴ ┴ ┴ ╚═╝╝╚╝═╩╝╚═╝╚ ╩╝╚╝╚═╝═╩╝ ┴└─┴ ┴└─┘ + // Strip out any keys with undefined values. + _.each(_.keys(branch), function (key){ + if (_.isUndefined(branch[key])) { + delete branch[key]; + } + }); + + + // Now count the keys. + var origBranchKeys = _.keys(branch); + + // ┬ ┬┌─┐┌┐┌┌┬┐┬ ┌─┐ ╔═╗╔╦╗╔═╗╔╦╗╦ ╦ ┬ ┬┬ ┬┌─┐┬─┐┌─┐ ┌─┐┬ ┌─┐┬ ┬┌─┐┌─┐ + // ├─┤├─┤│││ │││ ├┤ ║╣ ║║║╠═╝ ║ ╚╦╝ │││├─┤├┤ ├┬┘├┤ │ │ ├─┤│ │└─┐├┤ + // ┴ ┴┴ ┴┘└┘─┴┘┴─┘└─┘ ╚═╝╩ ╩╩ ╩ ╩ └┴┘┴ ┴└─┘┴└─└─┘ └─┘┴─┘┴ ┴└─┘└─┘└─┘ + // If there are 0 keys... + if (origBranchKeys.length === 0) { + + // An empty dictionary means that this branch is universal (Ξ). + // That is, that it would match _everything_. + // + // So we'll throw a special signal indicating that to the previous recursive step. + // (or to our `catch` statement below, if we're at the top level-- i.e. an empty `where` clause.) + // + // > Note that we could just `return` instead of throwing if we're at the top level. + // > That's because it's a no-op and throwing would achieve exactly the same thing. + // > Since this is a hot code path, we might consider doing that as a future optimization. + throw flaverr('E_UNIVERSAL', new Error('`{}` would match everything')); + + }//-• + + + + // ╔═╗╦═╗╔═╗╔═╗╔╦╗╦ ╦╦═╗╔═╗ ┌┐ ┬─┐┌─┐┌┐┌┌─┐┬ ┬ + // ╠╣ ╠╦╝╠═╣║ ║ ║ ║╠╦╝║╣ ├┴┐├┬┘├─┤││││ ├─┤ + // ╚ ╩╚═╩ ╩╚═╝ ╩ ╚═╝╩╚═╚═╝ └─┘┴└─┴ ┴┘└┘└─┘┴ ┴ + // Now we may need to denormalize (or "fracture") this branch. + // This is to normalize it such that it has only one key, with a + // predicate operator on the RHS. + // + // For example: + // ``` + // { + // name: 'foo', + // age: 2323, + // createdAt: 23238828382, + // hobby: { contains: 'ball' } + // } + // ``` + // ==> + // ``` + // { + // and: [ + // { name: 'foo' }, + // { age: 2323 } + // { createdAt: 23238828382 }, + // { hobby: { contains: 'ball' } } + // ] + // } + // ``` + if (origBranchKeys.length > 1) { + + // Loop over each key in the original branch and build an array of conjuncts. + var fracturedConjuncts = []; + _.each(origBranchKeys, function (origKey){ + + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // For now, we don't log this warning. + // It is still convenient to write criteria this way, and it's still + // a bit too soon to determine whether we should be recommending a change. + // + // > NOTE: There are two sides to this, for sure. + // > If you like this usage the way it is, please let @mikermcneil or + // > @particlebanana know. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // // Check if this is a key for a predicate operator. + // // e.g. the `or` in this example: + // // ``` + // // { + // // age: { '>': 28 }, + // // or: [ + // // { name: { 'startsWith': 'Jon' } }, + // // { name: { 'endsWith': 'Snow' } } + // // ] + // // } + // // ``` + // // + // // If so, we'll still automatically map it. + // // But also log a deprecation warning here, since it's more explicit to avoid + // // using predicates within multi-facet shorthand (i.e. could have used an additional + // // `and` predicate instead.) + // // + // if (_.contains(PREDICATE_OPERATOR_KINDS, origKey)) { + // + // // console.warn(); + // // console.warn( + // // 'Deprecated: Within a `where` clause, it tends to be better (and certainly '+'\n'+ + // // 'more explicit) to use an `and` predicate when you need to group together '+'\n'+ + // // 'constraints side by side with additional predicates (like `or`). This was '+'\n'+ + // // 'automatically normalized on your behalf for compatibility\'s sake, but please '+'\n'+ + // // 'consider changing your usage in the future:'+'\n'+ + // // '```'+'\n'+ + // // util.inspect(branch, {depth:5})+'\n'+ + // // '```'+'\n'+ + // // '> Warning: This backwards compatibility may be removed\n'+ + // // '> in a future release of Sails/Waterline. If this usage\n'+ + // // '> is left unchanged, then queries like this one may eventually \n'+ + // // '> fail with an error.' + // // ); + // // console.warn(); + // + // }//>- + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + var conjunct = {}; + conjunct[origKey] = branch[origKey]; + fracturedConjuncts.push(conjunct); + + });// + + + // Change this branch so that it now contains a predicate consisting of + // the conjuncts we built above. + // + // > Note that we change the branch in-place (on its parent) AND update + // > our `branch` variable. If the branch has no parent (i.e. top lvl), + // > then we change the actual variable we're using instead. This will + // > change the return value from this utility. + branch = { + and: fracturedConjuncts + }; + + if (parent) { + parent[indexInParent] = branch; + } + else { + whereClause = branch; + } + + }//>- + + + // --• IWMIH, then we know there is EXACTLY one key. + var soleBranchKey = _.keys(branch)[0]; + + + // ┬ ┬┌─┐┌┐┌┌┬┐┬ ┌─┐ ╔═╗╔═╗╔╗╔╔═╗╔╦╗╦═╗╔═╗╦╔╗╔╔╦╗ + // ├─┤├─┤│││ │││ ├┤ ║ ║ ║║║║╚═╗ ║ ╠╦╝╠═╣║║║║ ║ + // ┴ ┴┴ ┴┘└┘─┴┘┴─┘└─┘ ╚═╝╚═╝╝╚╝╚═╝ ╩ ╩╚═╩ ╩╩╝╚╝ ╩ + // If this key is NOT a predicate (`and`/`or`)... + if (!_.contains(PREDICATE_OPERATOR_KINDS, soleBranchKey)) { + + // ...then we know we're dealing with a constraint. + + // ╔═╗╦═╗╔═╗╔═╗╔╦╗╦ ╦╦═╗╔═╗ ┌─┐┌─┐┌┬┐┌─┐┬ ┌─┐─┐ ┬ ┌─┐┌─┐┌┐┌┌─┐┌┬┐┬─┐┌─┐┬┌┐┌┌┬┐ + // ╠╣ ╠╦╝╠═╣║ ║ ║ ║╠╦╝║╣ │ │ ││││├─┘│ ├┤ ┌┴┬┘ │ │ ││││└─┐ │ ├┬┘├─┤││││ │ + // ╚ ╩╚═╩ ╩╚═╝ ╩ ╚═╝╩╚═╚═╝ └─┘└─┘┴ ┴┴ ┴─┘└─┘┴ └─ └─┘└─┘┘└┘└─┘ ┴ ┴└─┴ ┴┴┘└┘ ┴ + // ┌─ ┬┌─┐ ┬┌┬┐ ┬┌─┐ ┌┬┐┬ ┬┬ ┌┬┐┬ ┬┌─┌─┐┬ ┬ ─┐ + // │ │├┤ │ │ │└─┐ ││││ ││ │ │───├┴┐├┤ └┬┘ │ + // └─ ┴└ ┴ ┴ ┴└─┘ ┴ ┴└─┘┴─┘┴ ┴ ┴ ┴└─┘ ┴ ─┘ + // Before proceeding, we may need to fracture the RHS of this key. + // (if it is a complex constraint w/ multiple keys-- like a "range" constraint) + // + // > This is to normalize it such that every complex constraint ONLY EVER has one key. + // > In order to do this, we may need to reach up to our highest ancestral predicate. + var isComplexConstraint = _.isObject(branch[soleBranchKey]) && !_.isArray(branch[soleBranchKey]) && !_.isFunction(branch[soleBranchKey]); + // If this complex constraint has multiple keys... + if (isComplexConstraint && _.keys(branch[soleBranchKey]).length > 1){ + + // Then fracture it before proceeding. + var complexConstraint = branch[soleBranchKey]; + + // Loop over each modifier in the complex constraint and build an array of conjuncts. + var fracturedModifierConjuncts = []; + _.each(complexConstraint, function (modifier, modifierKind){ + var conjunct = {}; + conjunct[soleBranchKey] = {}; + conjunct[soleBranchKey][modifierKind] = modifier; + fracturedModifierConjuncts.push(conjunct); + });// + + // Change this branch so that it now contains a predicate consisting of + // the new conjuncts we just built for these modifiers. + // + // > Note that we change the branch in-place (on its parent) AND update + // > our `branch` variable. If the branch has no parent (i.e. top lvl), + // > then we change the actual variable we're using instead. This will + // > change the return value from this utility. + // + branch = { + and: fracturedModifierConjuncts + }; + + if (parent) { + parent[indexInParent] = branch; + } + else { + whereClause = branch; + } + + // > Also note that we update the sole branch key variable. + soleBranchKey = _.keys(branch)[0]; + + // Now, since we know our branch is a predicate, we'll continue on. + // (see predicate handling code below) + + } + // Otherwise, we can go ahead and normalize the constraint, then bail. + else { + // ╔╗╔╔═╗╦═╗╔╦╗╔═╗╦ ╦╔═╗╔═╗ ╔═╗╔═╗╔╗╔╔═╗╔╦╗╦═╗╔═╗╦╔╗╔╔╦╗ + // ║║║║ ║╠╦╝║║║╠═╣║ ║╔═╝║╣ ║ ║ ║║║║╚═╗ ║ ╠╦╝╠═╣║║║║ ║ + // ╝╚╝╚═╝╩╚═╩ ╩╩ ╩╩═╝╩╚═╝╚═╝ ╚═╝╚═╝╝╚╝╚═╝ ╩ ╩╚═╩ ╩╩╝╚╝ ╩ + // Normalize the constraint itself. + // (note that this checks the RHS, but it also checks the key aka constraint target -- i.e. the attr name) + try { + branch[soleBranchKey] = normalizeConstraint(branch[soleBranchKey], soleBranchKey, modelIdentity, orm, meta); + } catch (e) { + switch (e.code) { + + case 'E_CONSTRAINT_NOT_USABLE': + throw flaverr('E_WHERE_CLAUSE_UNUSABLE', new Error( + 'Could not filter by `'+soleBranchKey+'`: '+ e.message + )); + + case 'E_CONSTRAINT_WOULD_MATCH_EVERYTHING': + throw flaverr('E_UNIVERSAL', e); + + case 'E_CONSTRAINT_WOULD_MATCH_NOTHING': + throw flaverr('E_VOID', e); + + default: + throw e; + + } + }// + + // Then bail early. + return; + + }// + + }// + + + + // >-• IWMIH, then we know that this branch's sole key is a predicate (`and`/`or`). + // (If it isn't, then our code above has a bug.) + assert(soleBranchKey === 'and' || soleBranchKey === 'or', 'Should never have made it here if the sole branch key is not `and` or `or`!'); + + + + // ██╗ ██╗ █████╗ ███╗ ██╗██████╗ ██╗ ███████╗ + // ██║ ██║██╔══██╗████╗ ██║██╔══██╗██║ ██╔════╝ + // ███████║███████║██╔██╗ ██║██║ ██║██║ █████╗ + // ██╔══██║██╔══██║██║╚██╗██║██║ ██║██║ ██╔══╝ + // ██║ ██║██║ ██║██║ ╚████║██████╔╝███████╗███████╗ + // ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝╚═════╝ ╚══════╝╚══════╝ + // + // ██████╗ ██████╗ ███████╗██████╗ ██╗ ██████╗ █████╗ ████████╗███████╗ + // ██╔══██╗██╔══██╗██╔════╝██╔══██╗██║██╔════╝██╔══██╗╚══██╔══╝██╔════╝ + // ██████╔╝██████╔╝█████╗ ██║ ██║██║██║ ███████║ ██║ █████╗ + // ██╔═══╝ ██╔══██╗██╔══╝ ██║ ██║██║██║ ██╔══██║ ██║ ██╔══╝ + // ██║ ██║ ██║███████╗██████╔╝██║╚██████╗██║ ██║ ██║ ███████╗ + // ╚═╝ ╚═╝ ╚═╝╚══════╝╚═════╝ ╚═╝ ╚═════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝ + // + // + // ``` ``` + // { { + // or: [...] and: [...] + // } } + // ``` ``` + + var conjunctsOrDisjuncts = branch[soleBranchKey]; + + + // RHS of a predicate must always be an array. + if (!_.isArray(conjunctsOrDisjuncts)) { + throw flaverr('E_WHERE_CLAUSE_UNUSABLE', new Error('Expected an array at `'+soleBranchKey+'`, but instead got: '+util.inspect(conjunctsOrDisjuncts,{depth: 5})+'\n(`and`/`or` should always be provided with an array on the right-hand side.)')); + }//-• + + + // Now loop over each conjunct or disjunct within this AND/OR predicate. + // Along the way, track any that will need to be trimmed out. + var indexesToRemove = []; + _.each(conjunctsOrDisjuncts, function (conjunctOrDisjunct, i){ + + // If conjunct/disjunct is `undefined`, trim it out and bail to the next one. + if (conjunctsOrDisjuncts[i] === undefined) { + indexesToRemove.push(i); + return; + }//• + + // Check that each conjunct/disjunct is a plain dictionary, no funny business. + if (!_.isObject(conjunctOrDisjunct) || _.isArray(conjunctOrDisjunct) || _.isFunction(conjunctOrDisjunct)) { + throw flaverr('E_WHERE_CLAUSE_UNUSABLE', new Error('Expected each item within an `and`/`or` predicate\'s array to be a dictionary (plain JavaScript object). But instead, got: `'+util.inspect(conjunctOrDisjunct,{depth: 5})+'`')); + } + + // Recursive call + try { + _recursiveStep(conjunctOrDisjunct, recursionDepth+1, conjunctsOrDisjuncts, i); + } catch (e) { + switch (e.code) { + + // If this conjunct or disjunct is universal (Ξ)... + case 'E_UNIVERSAL': + + // If this item is a disjunct, then annihilate our branch by throwing this error + // on up for the previous recursive step to take care of. + // ``` + // x ∪ Ξ = Ξ + // ``` + if (soleBranchKey === 'or') { + throw e; + }//-• + + // Otherwise, rip it out of the array. + // ``` + // x ∩ Ξ = x + // ``` + indexesToRemove.push(i); + break; + + // If this conjunct or disjunct is void (Ø)... + case 'E_VOID': + + // If this item is a conjunct, then annihilate our branch by throwing this error + // on up for the previous recursive step to take care of. + // ``` + // x ∩ Ø = Ø + // ``` + if (soleBranchKey === 'and') { + throw e; + }//-• + + // Otherwise, rip it out of the array. + // ``` + // x ∪ Ø = x + // ``` + indexesToRemove.push(i); + break; + + default: + throw e; + } + }// + + });// + + + // If any conjuncts/disjuncts were scheduled for removal above, + // go ahead and take care of that now. + if (indexesToRemove.length > 0) { + for (var i = 0; i < indexesToRemove.length; i++) { + var indexToRemove = indexesToRemove[i] - i; + conjunctsOrDisjuncts.splice(indexToRemove, 1); + }// + }//>- + + + // If the array is NOT EMPTY, then this is the normal case, and we can go ahead and bail. + if (conjunctsOrDisjuncts.length > 0) { + return; + }//-• + + + + // Otherwise, the predicate array is empty (e.g. `{ or: [] }` / `{ and: [] }`) + // + // For our purposes here, we just need to worry about signaling either "universal" or "void". + // (see table above for more information). + + // If this branch is universal (i.e. matches everything / `{and: []}`) + // ``` + // Ξ + // ``` + if (soleBranchKey === 'and') { + throw flaverr('E_UNIVERSAL', new Error('`{and: []}` with an empty array would match everything.')); + } + // Otherwise, this branch is void (i.e. matches nothing / `{or: []}`) + // ``` + // Ø + // ``` + else { + throw flaverr('E_VOID', new Error('`{or: []}` with an empty array would match nothing.')); + } + + })(whereClause, 0, undefined, undefined); + // + + } catch (e) { + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // Note: + // This `catch` block exists to handle top-level E_UNIVERSAL and E_VOID exceptions. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + switch (e.code) { + + // If an E_UNIVERSAL exception made it all the way up here, then we know that + // this `where` clause should match EVERYTHING. So we set it to `{}`. + case 'E_UNIVERSAL': + whereClause = {}; + break; + + // If an E_VOID exception made it all the way up here, then we know that + // this `where` clause would match NOTHING. So we throw `E_WOULD_RESULT_IN_NOTHING` + // to pass that message along. + case 'E_VOID': + throw flaverr('E_WOULD_RESULT_IN_NOTHING', new Error('Would match nothing')); + + default: + throw e; + } + }// + + // ███████╗███████╗███████╗███████╗███████╗███████╗███████╗███████╗███████╗███████╗ + // ╚══════╝╚══════╝╚══════╝╚══════╝╚══════╝╚══════╝╚══════╝╚══════╝╚══════╝╚══════╝ + // + // ██╗ ██╗ ██████╗ ███████╗ ██████╗██╗ ██╗██████╗ ███████╗██╗ ██████╗ ███╗ ██╗ ██╗ + // ██╔╝ ██╔╝ ██╔══██╗██╔════╝██╔════╝██║ ██║██╔══██╗██╔════╝██║██╔═══██╗████╗ ██║ ╚██╗ + // ██╔╝ ██╔╝ ██████╔╝█████╗ ██║ ██║ ██║██████╔╝███████╗██║██║ ██║██╔██╗ ██║ ╚██╗ + // ╚██╗ ██╔╝ ██╔══██╗██╔══╝ ██║ ██║ ██║██╔══██╗╚════██║██║██║ ██║██║╚██╗██║ ██╔╝ + // ╚██╗██╔╝ ██║ ██║███████╗╚██████╗╚██████╔╝██║ ██║███████║██║╚██████╔╝██║ ╚████║ ██╔╝ + // ╚═╝╚═╝ ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚══════╝╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═╝ + // + + + // Return the modified `where` clause. + return whereClause; + +}; diff --git a/lib/waterline/utils/query/process-all-records.js b/lib/waterline/utils/query/process-all-records.js new file mode 100644 index 000000000..a70e0c1a0 --- /dev/null +++ b/lib/waterline/utils/query/process-all-records.js @@ -0,0 +1,644 @@ +/** + * Module dependencies + */ + +var assert = require('assert'); +var util = require('util'); +var _ = require('@sailshq/lodash'); +// var EA = require('encrypted-attr'); « this is required below for node compat. +var flaverr = require('flaverr'); +var rttc = require('rttc'); +var eachRecordDeep = require('waterline-utils').eachRecordDeep; + +/** + * Module constants + */ + +var WARNING_SUFFIXES = { + + MIGHT_BE_YOUR_FAULT: + '\n'+ + '> You are seeing this warning because there are records in your database that don\'t\n'+ + '> match up with your models. This is often the result of a model definition being\n'+ + '> changed without also migrating leftover data. But it could also be because records\n'+ + '> were added or modified in your database from somewhere outside of Sails/Waterline\n'+ + '> (e.g. phpmyadmin, or another app). In either case, to make this warning go away,\n'+ + '> you have a few options. First of all, you could change your model definition so\n'+ + '> that it matches the existing records in your database. Or you could update/destroy\n'+ + '> the old records in your database; either by hand, or using a migration script.\n'+ + '> \n'+ + (process.env.NODE_ENV !== 'production' ? '> (For example, to wipe all data, you might just use `migrate: drop`.)\n' : '')+ + '> \n'+ + '> More rarely, this warning could mean there is a bug in the adapter itself. If you\n'+ + '> believe that is the case, then please contact the maintainer of this adapter by opening\n'+ + '> an issue, or visit http://sailsjs.com/support for help.\n', + + HARD_TO_SEE_HOW_THIS_COULD_BE_YOUR_FAULT: + '\n'+ + '> This is usally caused by a bug in the adapter itself. If you believe that\n'+ + '> might be the case here, then please contact the maintainer of this adapter by\n'+ + '> opening an issue, or visit http://sailsjs.com/support for help.\n' + +}; + + +/** + * processAllRecords() + * + * Process potentially-populated records coming back from the adapter, AFTER they've already had + * their keys transformed from column names back to attribute names and had populated data reintegrated. + * To reiterate that: this function takes logical records, **NOT physical records**. + * + * `processAllRecords()` has 3 responsibilities: + * + * (1) Verify the integrity of the provided records, and any populated child records + * (Note: If present, child records only ever go 1 level deep in Waterline currently.) + * > At the moment, this serves primarily as a way to check for stale, unmigrated data that + * > might exist in the database, as well as any unexpected adapter compatibility problems. + * > For the full specification and expected behavior, see: + * > https://docs.google.com/spreadsheets/d/1whV739iW6O9SxRZLCIe2lpvuAUqm-ie7j7tn_Pjir3s/edit#gid=1927470769 + * + * (2) Attach custom toJSON() functions to records, if the model says to do so. + * + * (3) Decrypt any data that was encrypted at rest. + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * @param {Array} records + * An array of records. (These are logical records -- NOT physical records!!) + * (WARNING: This array and its deeply-nested contents might be mutated in-place!!!) + * + * @param {Ref?} meta + * The `meta` query key for the query. + * + * @param {String} modelIdentity + * The identity of the model these records came from (e.g. "pet" or "user") + * > Useful for looking up the Waterline model and accessing its attribute definitions. + * + * @param {Ref} orm + * The Waterline ORM instance. + * > Useful for accessing the model definitions. + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ +module.exports = function processAllRecords(records, meta, modelIdentity, orm) { + // console.time('processAllRecords'); + + + if (!_.isArray(records)) { + throw new Error('Consistency violation: Expected `records` to be an array. But instead, got: '+util.inspect(records,{depth:5})+''); + } + + if (!_.isUndefined(meta) && !_.isObject(meta)) { + throw new Error('Consistency violation: Expected `meta` to be a dictionary, or undefined. But instead, got: '+util.inspect(meta,{depth:5})+''); + } + + if (!_.isString(modelIdentity) || modelIdentity === '') { + throw new Error('Consistency violation: Expected `modelIdentity` to be a non-empty string. But instead, got: '+util.inspect(modelIdentity,{depth:5})+''); + } + + + // Determine whether to skip record verification below. + // (we always do it unless the `skipRecordVerification` meta key is explicitly truthy,) + var skippingRecordVerification = meta && meta.skipRecordVerification; + + + // Iterate over each parent record and any nested arrays/dictionaries that + // appear to be populated child records. + eachRecordDeep(records, function _eachParentOrChildRecord(record, WLModel){ + + // First, check the results to verify compliance with the adapter spec., + // as well as any issues related to stale data that might not have been + // been migrated to keep up with the logical schema (`type`, etc. in + // attribute definitions). + if (!skippingRecordVerification) { + + + // ███╗ ██╗ ██████╗ ███╗ ██╗ █████╗ ████████╗████████╗██████╗ ██╗██████╗ ██╗ ██╗████████╗███████╗ + // ████╗ ██║██╔═══██╗████╗ ██║ ██╔══██╗╚══██╔══╝╚══██╔══╝██╔══██╗██║██╔══██╗██║ ██║╚══██╔══╝██╔════╝ + // ██╔██╗ ██║██║ ██║██╔██╗ ██║█████╗███████║ ██║ ██║ ██████╔╝██║██████╔╝██║ ██║ ██║ █████╗ + // ██║╚██╗██║██║ ██║██║╚██╗██║╚════╝██╔══██║ ██║ ██║ ██╔══██╗██║██╔══██╗██║ ██║ ██║ ██╔══╝ + // ██║ ╚████║╚██████╔╝██║ ╚████║ ██║ ██║ ██║ ██║ ██║ ██║██║██████╔╝╚██████╔╝ ██║ ███████╗ + // ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚═════╝ ╚═════╝ ╚═╝ ╚══════╝ + // + // ██╗ ██╗███████╗██╗ ██╗███████╗ + // ██║ ██╔╝██╔════╝╚██╗ ██╔╝██╔════╝ + // █████╔╝ █████╗ ╚████╔╝ ███████╗ + // ██╔═██╗ ██╔══╝ ╚██╔╝ ╚════██║ + // ██║ ██╗███████╗ ██║ ███████║ + // ╚═╝ ╚═╝╚══════╝ ╚═╝ ╚══════╝ + // + // If this model is defined as `schema: true`, then check the returned record + // for any extraneous keys which do not correspond with declared attributes. + // If any are found, then log a warning. + if (WLModel.hasSchema) { + + var nonAttrKeys = _.difference(_.keys(record), _.keys(WLModel.attributes)); + if (nonAttrKeys > 0) { + + // Since this is `schema: true`, the adapter method should have + // received an explicit `select` clause in the S3Q `criteria` + // query key, and thus it should not have sent back any unrecognized + // attributes (or in cases where there is no `criteria` query key, e.g. + // a create(), the adapter should never send back extraneous properties + // anyways, because Waterline core should have stripped any such extra + // properties off on the way _in_ to the adapter). + // + // So if we made it here, we can safely assume that this is due + // to an issue in the _adapter_ -- not some problem with unmigrated + // data. + console.warn('\n'+ + 'Warning: A record in this result set has extraneous properties ('+nonAttrKeys+')\n'+ + 'that, after adjusting for any custom columnNames, still do not correspond\n'+ + 'any recognized attributes of this model (`'+WLModel.identity+'`).\n'+ + 'Since this model is defined as `schema: true`, this behavior is unexpected.\n'+ + // ==================================================================================== + // Removed this for the sake of brevity-- could bring it back if deemed helpful. + // ==================================================================================== + // 'This problem could be the result of an adapter method not properly observing\n'+ + // 'the `select` clause it receives in the incoming criteria (or otherwise sending\n'+ + // 'extra, unexpected properties on records that were left over from old data).\n'+ + // ==================================================================================== + WARNING_SUFFIXES.MIGHT_BE_YOUR_FAULT + ); + + }// + + }// + + + + // ██╗ ██╗███████╗██╗ ██╗███████╗ ██╗ ██╗ ██╗ ██████╗ ██╗ ██╗███████╗ + // ██║ ██╔╝██╔════╝╚██╗ ██╔╝██╔════╝ ██║ ██║ ██╔╝ ██╔══██╗██║ ██║██╔════╝ + // █████╔╝ █████╗ ╚████╔╝ ███████╗ ██║ █╗ ██║ ██╔╝ ██████╔╝███████║███████╗ + // ██╔═██╗ ██╔══╝ ╚██╔╝ ╚════██║ ██║███╗██║ ██╔╝ ██╔══██╗██╔══██║╚════██║ + // ██║ ██╗███████╗ ██║ ███████║ ╚███╔███╔╝██╔╝ ██║ ██║██║ ██║███████║ + // ╚═╝ ╚═╝╚══════╝ ╚═╝ ╚══════╝ ╚══╝╚══╝ ╚═╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝ + // + // ██████╗ ███████╗ ██╗ ██╗███╗ ██╗██████╗ ███████╗███████╗██╗███╗ ██╗███████╗██████╗ + // ██╔═══██╗██╔════╝ ██║ ██║████╗ ██║██╔══██╗██╔════╝██╔════╝██║████╗ ██║██╔════╝██╔══██╗ + // ██║ ██║█████╗ ██║ ██║██╔██╗ ██║██║ ██║█████╗ █████╗ ██║██╔██╗ ██║█████╗ ██║ ██║ + // ██║ ██║██╔══╝ ██║ ██║██║╚██╗██║██║ ██║██╔══╝ ██╔══╝ ██║██║╚██╗██║██╔══╝ ██║ ██║ + // ╚██████╔╝██║ ╚██████╔╝██║ ╚████║██████╔╝███████╗██║ ██║██║ ╚████║███████╗██████╔╝ + // ╚═════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝╚═════╝ ╚══════╝╚═╝ ╚═╝╚═╝ ╚═══╝╚══════╝╚═════╝ + // + // Loop over the properties of the record. + _.each(_.keys(record), function (key){ + + // Ensure that the value was not explicitly sent back as `undefined`. + // (but if it was, log a warning. Note that we don't strip it out like + // we would normally, because we're careful not to munge data in this utility.) + if(_.isUndefined(record[key])){ + console.warn('\n'+ + 'Warning: A database adapter should never send back records that have `undefined`\n'+ + 'on the RHS of any property (e.g. `foo: undefined`). But after transforming\n'+ + 'columnNames back to attribute names for the model `' + modelIdentity + '`, one\n'+ + 'of the records sent back from this adapter has a property (`'+key+'`) with\n'+ + '`undefined` on the right-hand side.\n' + + WARNING_SUFFIXES.HARD_TO_SEE_HOW_THIS_COULD_BE_YOUR_FAULT + ); + }//>- + + }); + + + + // Now, loop over each attribute in the model. + _.each(WLModel.attributes, function (attrDef, attrName){ + + + // ██████╗ ██████╗ ██╗███╗ ███╗ █████╗ ██████╗ ██╗ ██╗ ██╗ ██╗███████╗██╗ ██╗ + // ██╔══██╗██╔══██╗██║████╗ ████║██╔══██╗██╔══██╗╚██╗ ██╔╝ ██║ ██╔╝██╔════╝╚██╗ ██╔╝ + // ██████╔╝██████╔╝██║██╔████╔██║███████║██████╔╝ ╚████╔╝ █████╔╝ █████╗ ╚████╔╝ + // ██╔═══╝ ██╔══██╗██║██║╚██╔╝██║██╔══██║██╔══██╗ ╚██╔╝ ██╔═██╗ ██╔══╝ ╚██╔╝ + // ██║ ██║ ██║██║██║ ╚═╝ ██║██║ ██║██║ ██║ ██║ ██║ ██╗███████╗ ██║ + // ╚═╝ ╚═╝ ╚═╝╚═╝╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝╚══════╝ ╚═╝ + // + if (attrName === WLModel.primaryKey) { + + assert(!attrDef.allowNull, 'The primary key attribute should never be defined with `allowNull:true`. (This should have already been caught in wl-schema during ORM initialization! Please report this at http://sailsjs.com/bugs)'); + + // Do quick, incomplete verification that a valid primary key value was sent back. + var isProbablyValidPkValue = ( + record[attrName] !== '' && + record[attrName] !== 0 && + ( + _.isString(record[attrName]) || _.isNumber(record[attrName]) + ) + ); + + if (!isProbablyValidPkValue) { + console.warn('\n'+ + 'Warning: Records sent back from a database adapter should always have a valid property\n'+ + 'that corresponds with the primary key attribute (`'+WLModel.primaryKey+'`). But in this result set,\n'+ + 'after transforming columnNames back to attribute names for model `' + modelIdentity + '`,\n'+ + 'there is a record with a missing or invalid `'+WLModel.primaryKey+'`.\n'+ + 'Record:\n'+ + '```\n'+ + util.inspect(record, {depth:5})+'\n'+ + '```\n'+ + WARNING_SUFFIXES.MIGHT_BE_YOUR_FAULT + ); + } + + } + // ███████╗██╗███╗ ██╗ ██████╗ ██╗ ██╗██╗ █████╗ ██████╗ + // ██╔════╝██║████╗ ██║██╔════╝ ██║ ██║██║ ██╔══██╗██╔══██╗ + // ███████╗██║██╔██╗ ██║██║ ███╗██║ ██║██║ ███████║██████╔╝ + // ╚════██║██║██║╚██╗██║██║ ██║██║ ██║██║ ██╔══██║██╔══██╗ + // ███████║██║██║ ╚████║╚██████╔╝╚██████╔╝███████╗██║ ██║██║ ██║ + // ╚══════╝╚═╝╚═╝ ╚═══╝ ╚═════╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝ + // + else if (attrDef.model) { + + assert(!attrDef.allowNull, 'Singular ("model") association attributes should never be defined with `allowNull:true` (they always allow null, by nature!). (This should have already been caught in wl-schema during ORM initialization! Please report this at http://sailsjs.com/bugs)'); + + // If record does not define a value for a singular association, that's ok. + // It may have been deliberately excluded by the `select` or `omit` clause. + if (_.isUndefined(record[attrName])) { + } + // If the value for this singular association came back as `null`, then that + // might be ok too-- it could mean that the association is empty. + // (Note that it might also mean that it is set, and that population was attempted, + // but that it failed; presumably because the associated child record no longer exists) + else if (_.isNull(record[attrName])) { + } + // If the value came back as something that looks vaguely like a valid primary key value, + // then that's probably ok-- it could mean that the association was set, but not populated. + else if ((_.isString(record[attrName]) || _.isNumber(record[attrName])) && record[attrName] !== '' && record[attrName] !== 0 && !_.isNaN(record[attrName])) { + } + // If the value came back as a dictionary, then that might be ok-- it could mean + // the association was successfully populated. + else if (_.isObject(record[attrName]) && !_.isArray(record[attrName]) && !_.isFunction(record[attrName])) { + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: we could check this more carefully in the future by providing more + // information to this utility-- specifically, the `populates` key from the S2Q. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + } + // Otherwise, the value is invalid. + else { + console.warn('\n'+ + 'An association in a result record has an unexpected data type. Since `'+attrName+'` is\n'+ + 'a singular (association), it should come back from Waterline as either:\n'+ + '• `null` (if not populated and set to null explicitly, or populated but orphaned)\n'+ + '• a dictionary (if successfully populated), or\n'+ + '• a valid primary key value for the associated model (if set + not populated)\n'+ + 'But for this record, after converting column names back into attribute names, it\n'+ + 'wasn\'t any of those things.\n'+ + 'Record:\n'+ + '```\n'+ + util.inspect(record, {depth:5})+'\n'+ + '```\n'+ + WARNING_SUFFIXES.MIGHT_BE_YOUR_FAULT + ); + } + + } + // ██████╗ ██╗ ██╗ ██╗██████╗ █████╗ ██╗ + // ██╔══██╗██║ ██║ ██║██╔══██╗██╔══██╗██║ + // ██████╔╝██║ ██║ ██║██████╔╝███████║██║ + // ██╔═══╝ ██║ ██║ ██║██╔══██╗██╔══██║██║ + // ██║ ███████╗╚██████╔╝██║ ██║██║ ██║███████╗ + // ╚═╝ ╚══════╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝ + // + else if (attrDef.collection) { + assert(!attrDef.allowNull, 'Plural ("collection") association attributes should never be defined with `allowNull:true`. (This should have already been caught in wl-schema during ORM initialization! Please report this at http://sailsjs.com/bugs)'); + + // If record does not define a value for a plural association, that's ok. + // That probably just means it was not populated. + if (_.isUndefined(record[attrName])) { + } + // If the value for this singular association came back as an array, then + // that might be ok too-- it probably means that the association was populated. + else if (_.isArray(record[attrName])) { + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: we could check that it is an array of valid child records, + // instead of just verifying that it is an array of _some kind_. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + } + // Otherwise, the value is invalid. + else { + console.warn('\n'+ + 'An association in a result record has an unexpected data type. Since `'+attrName+'` is\n'+ + 'a plural (association), it should come back from Waterline as either:\n'+ + '• `undefined` (if not populated), or\n'+ + '• an array of child records (if populated)\n'+ + 'But for this record, it wasn\'t any of those things.\n'+ + // Note that this could mean there was something else already there + // (imagine changing your model to use a plural association instead + // of an embedded array from a `type: 'json'` attribute) + 'Record:\n'+ + '```\n'+ + util.inspect(record, {depth:5})+'\n'+ + '```\n'+ + WARNING_SUFFIXES.MIGHT_BE_YOUR_FAULT + ); + } + + } + // ███████╗████████╗ █████╗ ███╗ ███╗██████╗ ███████╗ + // ██╔════╝╚══██╔══╝██╔══██╗████╗ ████║██╔══██╗██╔════╝ + // ███████╗ ██║ ███████║██╔████╔██║██████╔╝███████╗ + // ╚════██║ ██║ ██╔══██║██║╚██╔╝██║██╔═══╝ ╚════██║ + // ███████║ ██║ ██║ ██║██║ ╚═╝ ██║██║ ███████║ + // ╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚══════╝ + // + else if (attrDef.autoCreatedAt || attrDef.autoUpdatedAt) { + + assert(!attrDef.allowNull, 'Timestamp attributes should never be defined with `allowNull:true`. (This should have already been caught in wl-schema during ORM initialization! Please report this at http://sailsjs.com/bugs)'); + + // If there is no value defined on the record for this attribute... + if (_.isUndefined(record[attrName])) { + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Log a warning (but note that, to really get this right, we'd need access to + // a clone of the `omit` and `select` clauses from the s2q criteria, plus the `populates` + // query key from the s2q criteria -- probably also a clone of that) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + } + // Otherwise, we know there's SOMETHING there at least. + else { + + // Do quick, very incomplete verification that a valid timestamp was sent back. + var isProbablyValidTimestamp = ( + record[attrName] !== '' && + record[attrName] !== 0 && + ( + _.isString(record[attrName]) || _.isNumber(record[attrName]) || _.isDate(record[attrName]) + ) + ); + + if (!isProbablyValidTimestamp) { + console.warn('\n'+ + 'Warning: After transforming columnNames back to attribute names for model `' + modelIdentity + '`,\n'+ + ' a record in the result has a value with an unexpected data type for property `'+attrName+'`.\n'+ + 'The model\'s `'+attrName+'` attribute declares itself an auto timestamp with\n'+ + '`type: \''+attrDef.type+'\'`, but instead of a valid timestamp, the actual value\n'+ + 'in the record is:\n'+ + '```\n'+ + util.inspect(record[attrName],{depth:5})+'\n'+ + '```\n'+ + WARNING_SUFFIXES.MIGHT_BE_YOUR_FAULT + ); + } + + }// + + } + // ███╗ ███╗██╗███████╗ ██████╗ ██╗████████╗██╗ ██╗██████╗ ███████╗██╗ + // ████╗ ████║██║██╔════╝██╔════╝ ██╔╝╚══██╔══╝╚██╗ ██╔╝██╔══██╗██╔════╝╚██╗ + // ██╔████╔██║██║███████╗██║ ██║ ██║ ╚████╔╝ ██████╔╝█████╗ ██║ + // ██║╚██╔╝██║██║╚════██║██║ ██║ ██║ ╚██╔╝ ██╔═══╝ ██╔══╝ ██║ + // ██║ ╚═╝ ██║██║███████║╚██████╗██╗ ╚██╗ ██║ ██║ ██║ ███████╗██╔╝ + // ╚═╝ ╚═╝╚═╝╚══════╝ ╚═════╝╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝╚═╝ + // + else { + + // Sanity check: + if (attrDef.type === 'json' || attrDef.type === 'ref') { + assert(!attrDef.allowNull, '`type:\'json\'` and `type:\'ref\'` attributes should never be defined with `allowNull:true`. (This should have already been caught in wl-schema during ORM initialization! Please report this at http://sailsjs.com/bugs)'); + } + + // If there is no value defined on the record for this attribute... + if (_.isUndefined(record[attrName])) { + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Log a warning (but note that, to really get this right, we'd need access to + // a clone of the `omit` and `select` clauses from the s2q criteria, plus the `populates` + // query key from the s2q criteria -- probably also a clone of that) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + } + // If the value is `null`, and the attribute has `allowNull:true`, then its ok. + else if (_.isNull(record[attrName]) && attrDef.allowNull === true) { + // Nothing to validate here. + } + // Otherwise, we'll need to validate the value. + else { + + // Strictly validate the value vs. the attribute's `type`, and if it is + // obviously incorrect, then log a warning (but don't actually coerce it.) + try { + rttc.validateStrict(attrDef.type, record[attrName]); + } catch (e) { + switch (e.code) { + case 'E_INVALID': + + if (_.isNull(record[attrName])) { + console.warn('\n'+ + 'Warning: After transforming columnNames back to attribute names for model `' + modelIdentity + '`,\n'+ + ' a record in the result has a value of `null` for property `'+attrName+'`.\n'+ + 'Since the `'+attrName+'` attribute declares `type: \''+attrDef.type+'\'`,\n'+ + 'without ALSO declaring `allowNull: true`, this `null` value is unexpected.\n'+ + '(To resolve, either change this attribute to `allowNull: true` or update\n'+ + 'existing records in the database accordingly.)\n'+ + WARNING_SUFFIXES.MIGHT_BE_YOUR_FAULT + ); + } + else { + console.warn('\n'+ + 'Warning: After transforming columnNames back to attribute names for model `' + modelIdentity + '`,\n'+ + ' a record in the result has a value with an unexpected data type for property `'+attrName+'`.\n'+ + 'The corresponding attribute declares `type: \''+attrDef.type+'\'` but instead\n'+ + 'of that, the actual value is:\n'+ + '```\n'+ + util.inspect(record[attrName],{depth:5})+'\n'+ + '```\n'+ + WARNING_SUFFIXES.MIGHT_BE_YOUR_FAULT + ); + } + break; + default: throw e; + } + }//>-• + + } + + } + + + //>- + + // ██████╗██╗ ██╗███████╗ ██████╗██╗ ██╗ + // ██╔════╝██║ ██║██╔════╝██╔════╝██║ ██╔╝ + // ██║ ███████║█████╗ ██║ █████╔╝ + // ██║ ██╔══██║██╔══╝ ██║ ██╔═██╗ + // ╚██████╗██║ ██║███████╗╚██████╗██║ ██╗ + // ╚═════╝╚═╝ ╚═╝╚══════╝ ╚═════╝╚═╝ ╚═╝ + // + // ██████╗ ███████╗ ██████╗ ██╗ ██╗██╗██████╗ ███████╗██████╗ ███╗ ██╗███████╗███████╗███████╗ + // ██╔══██╗██╔════╝██╔═══██╗██║ ██║██║██╔══██╗██╔════╝██╔══██╗████╗ ██║██╔════╝██╔════╝██╔════╝ + // ██████╔╝█████╗ ██║ ██║██║ ██║██║██████╔╝█████╗ ██║ ██║██╔██╗ ██║█████╗ ███████╗███████╗ + // ██╔══██╗██╔══╝ ██║▄▄ ██║██║ ██║██║██╔══██╗██╔══╝ ██║ ██║██║╚██╗██║██╔══╝ ╚════██║╚════██║ + // ██║ ██║███████╗╚██████╔╝╚██████╔╝██║██║ ██║███████╗██████╔╝██║ ╚████║███████╗███████║███████║ + // ╚═╝ ╚═╝╚══════╝ ╚══▀▀═╝ ╚═════╝ ╚═╝╚═╝ ╚═╝╚══════╝╚═════╝ ╚═╝ ╚═══╝╚══════╝╚══════╝╚══════╝ + // + // If attribute is required, check that the value returned in this record + // is neither `null` nor empty string ('') nor `undefined`. + if (attrDef.required) { + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Log a warning (but note that, to really get this right, we'd need access to + // a clone of the `omit` and `select` clauses from the s2q criteria, plus the `populates` + // query key from the s2q criteria -- probably also a clone of that) + // + // ``` + // if (_.isUndefined(record[attrName]) || _.isNull(record[attrName]) || record[attrName] === '') { + // // (We'd also need to make sure this wasn't deliberately exluded by custom projections + // // before logging this warning.) + // console.warn('\n'+ + // 'Warning: After transforming columnNames back to attribute names for model `' + modelIdentity + '`,\n'+ + // 'a record in the result contains an unexpected value (`'+util.inspect(record[attrName],{depth:1})+'`)`\n'+ + // 'for its `'+attrName+'` property. Since `'+attrName+'` is a required attribute,\n'+ + // 'it should never be returned as `null` or empty string. This usually means there\n'+ + // 'is existing data that was persisted some time before the `'+attrName+'` attribute\n'+ + // 'was set to `required: true`. To make this warning go away, either remove\n'+ + // '`required: true` from this attribute, or update the existing, already-stored data\n'+ + // 'so that the `'+attrName+'` of all records is set to some value other than null or\n'+ + // 'empty string.\n'+ + // WARNING_SUFFIXES.MIGHT_BE_YOUR_FAULT + // ); + // } + // ``` + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + } + + });// + + }//fi (verify records) + + + // █████╗ ████████╗████████╗ █████╗ ██████╗██╗ ██╗ + // ██╔══██╗╚══██╔══╝╚══██╔══╝██╔══██╗██╔════╝██║ ██║ + // ███████║ ██║ ██║ ███████║██║ ███████║ + // ██╔══██║ ██║ ██║ ██╔══██║██║ ██╔══██║ + // ██║ ██║ ██║ ██║ ██║ ██║╚██████╗██║ ██║ + // ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝ + // + // ██████╗██╗ ██╗███████╗████████╗ ██████╗ ███╗ ███╗ + // ██╔════╝██║ ██║██╔════╝╚══██╔══╝██╔═══██╗████╗ ████║ + // ██║ ██║ ██║███████╗ ██║ ██║ ██║██╔████╔██║ + // ██║ ██║ ██║╚════██║ ██║ ██║ ██║██║╚██╔╝██║ + // ╚██████╗╚██████╔╝███████║ ██║ ╚██████╔╝██║ ╚═╝ ██║ + // ╚═════╝ ╚═════╝ ╚══════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ + // + // ████████╗ ██████╗ ██╗███████╗ ██████╗ ███╗ ██╗ ██╗██╗ + // ╚══██╔══╝██╔═══██╗ ██║██╔════╝██╔═══██╗████╗ ██║██╔╝╚██╗ + // ██║ ██║ ██║ ██║███████╗██║ ██║██╔██╗ ██║██║ ██║ + // ██║ ██║ ██║██ ██║╚════██║██║ ██║██║╚██╗██║██║ ██║ + // ██╗██║ ╚██████╔╝╚█████╔╝███████║╚██████╔╝██║ ╚████║╚██╗██╔╝ + // ╚═╝╚═╝ ╚═════╝ ╚════╝ ╚══════╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═╝╚═╝ + // ╦╔═╗ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ + // ║╠╣ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ + // ╩╚ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ooo + if (WLModel.customToJSON) { + Object.defineProperty(record, 'toJSON', { + writable: true, + value: WLModel.customToJSON + }); + }//>- + + + // ██████╗ ███████╗ ██████╗██████╗ ██╗ ██╗██████╗ ████████╗ ██████╗ █████╗ ████████╗ █████╗ + // ██╔══██╗██╔════╝██╔════╝██╔══██╗╚██╗ ██╔╝██╔══██╗╚══██╔══╝ ██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗ + // ██║ ██║█████╗ ██║ ██████╔╝ ╚████╔╝ ██████╔╝ ██║ ██║ ██║███████║ ██║ ███████║ + // ██║ ██║██╔══╝ ██║ ██╔══██╗ ╚██╔╝ ██╔═══╝ ██║ ██║ ██║██╔══██║ ██║ ██╔══██║ + // ██████╔╝███████╗╚██████╗██║ ██║ ██║ ██║ ██║ ██████╔╝██║ ██║ ██║ ██║ ██║ + // ╚═════╝ ╚══════╝ ╚═════╝╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ + // ╦╔═╗ ┬─┐┌─┐┬ ┌─┐┬ ┬┌─┐┌┐┌┌┬┐ + // ║╠╣ ├┬┘├┤ │ ├┤ └┐┌┘├─┤│││ │ + // ╩╚ ┴└─└─┘┴─┘└─┘ └┘ ┴ ┴┘└┘ ┴ooo + var willDecrypt = meta && meta.decrypt; + if (willDecrypt) { + _.each(WLModel.attributes, function (attrDef, attrName){ + try { + if (attrDef.encrypt) { + + // Never try to decrypt `''`(empty string), `0` (zero), `false`, or `null`, since these are + // possible base values, which might end up in the database. (Note that if this is a required + // attribute, we could probably be more picky-- but it seems unlikely that encrypting these base + // values at rest will ever be a priority, since they don't contain any sensitive information. + // Arguably, there are edge cases where knowing _whether_ a particular field is at its base value + // could be deemed sensitive info, but building around that extreme edge case seems like a bad idea + // that probably isn't worth the extra headache and complexity in core.) + if (record[attrName] === '' || record[attrName] === 0 || record[attrName] === false || _.isNull(record[attrName])) { + // Don't try to decrypt these. + } + else { + + // Decrypt using the appropriate key from the configured DEKs. + var decryptedButStillJsonEncoded; + + // console.log('•••••decrypting: `'+util.inspect(record[attrName], {depth:null})+'`'); + + // Require this down here for Node version compat. + var EA = require('encrypted-attr'); + decryptedButStillJsonEncoded = EA([attrName], { + keys: WLModel.dataEncryptionKeys + }) + .decryptAttribute(undefined, record[attrName]); + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // Alternative: (hack for testing) + // ``` + // if (!record[attrName].match(/^ENCRYPTED:/)){ throw new Error('Unexpected behavior: Can\'t decrypt something already decrypted!!!'); } + // decryptedButStillJsonEncoded = record[attrName].replace(/^ENCRYPTED:/, ''); + // ``` + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + // Finally, JSON-decode the value, to allow for differentiating between strings/numbers/booleans/null. + try { + record[attrName] = JSON.parse(decryptedButStillJsonEncoded); + } catch (err) { + throw flaverr({ + message: 'After initially decrypting the raw data, Waterline attempted to JSON-parse the data '+ + 'to ensure it was accurately decoded into the correct data type (for example, `2` vs `\'2\'`). '+ + 'But this time, JSON.parse() failed with the following error: '+err.message + }, err); + } + + }//fi + + }//fi + } catch (err) { + // console.log('•••••was attempting to decrypt this value: `'+util.inspect(record[attrName], {depth:null})+'`'); + + // Note: Decryption might not work, because there's no way of knowing what could have gotten into + // the database (e.g. from other processes, apps, maybe not even Node.js, etc.) + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // FUTURE: Instead of failing with an error, consider logging a warning and + // sending back the data as-is. (e.g. and attach MIGHT_BE_YOUR_FAULT suffix.) + // But remember: this is potentially sensitive data we're talking about, so being + // a little over-strict seems like the right idea. Maybe the right answer is to + // still log the warning, but instead of sending back the potentially-sensitive data, + // log it as part of the warning and send back whatever the appropriate base value is + // instead. + // + // Regardless, for now we use an actual error to be on the safe side. + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + throw flaverr({ + message: 'Decryption failed for `'+attrName+'` (in a `'+WLModel.identity+'` record).\n'+ + 'The actual value in the record that could not be decrypted is:\n'+ + '```\n'+ + util.inspect(record[attrName],{depth:5})+'\n'+ + '```\n'+ + 'Error details:\n'+ + ' '+err.message + }, _.isError(err) ? err : new Error()); + } + });//∞ + }//fi + + + + }, false, modelIdentity, orm);// + + + // + // Records are modified in-place above, so there is no return value. + // + + // console.timeEnd('processAllRecords'); + +}; diff --git a/lib/waterline/utils/query/verify-model-method-context.js b/lib/waterline/utils/query/verify-model-method-context.js new file mode 100644 index 000000000..a136b3bda --- /dev/null +++ b/lib/waterline/utils/query/verify-model-method-context.js @@ -0,0 +1,43 @@ +/** + * Module dependencies + */ + +var flaverr = require('flaverr'); + +/** + * verifyModelMethodContext() + * + * Take a look at the provided reference (presumably the `this` context of a + * model method when it runs) and give it a sniff to make sure it's _probably_ + * a Sails/Waterline model. + * + * If it's definitely NOT a Sails/Waterline model, then throw a usage error + * that explains that the model method seems to have been run from an invalid + * context, and throw out some ideas about what you might do about that. + * + * > This utility is designed exclusively for use by the model methods defined + * > within Waterline core. + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * + * @param {Ref} context + * The context (`this`) that this Waterline model method was invoked with. + * + * @throws {Error} If the context is not a model. + * @property {String} name :: 'UsageError' + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function verifyModelMethodContext(context) { + + if (!context.waterline) { + throw flaverr({ name: 'UsageError' }, new Error( + 'Model method called from an unexpected context. Expected `this` to refer to a Sails/Waterline '+ + 'model, but it doesn\'t seem to. (This sometimes occurs when passing a model method directly '+ + 'through as the argument for something like `async.eachSeries()` or `.stream().eachRecord()`. '+ + 'If that\'s what happened here, then just use a wrapper function.) For further help, see '+ + 'http://sailsjs.com/support.' + )); + } + +}; + diff --git a/lib/waterline/utils/schema.js b/lib/waterline/utils/schema.js deleted file mode 100644 index 6d77e6074..000000000 --- a/lib/waterline/utils/schema.js +++ /dev/null @@ -1,226 +0,0 @@ -/** - * Dependencies - */ - -var _ = require('lodash'); -var types = require('./types'); -var callbacks = require('./callbacks'); -var hasOwnProperty = require('./helpers').object.hasOwnProperty; - -/** - * Expose schema - */ - -var schema = module.exports = exports; - -/** - * Iterate over `attrs` normalizing string values to the proper - * attribute object. - * - * Example: - * { - * name: 'STRING', - * age: { - * type: 'INTEGER' - * } - * } - * - * Returns: - * { - * name: { - * type: 'string' - * }, - * age: { - * type: 'integer' - * } - * } - * - * @param {Object} attrs - * @return {Object} - */ - -schema.normalizeAttributes = function(attrs) { - var attributes = {}; - - Object.keys(attrs).forEach(function(key) { - - // Not concerned with functions - if (typeof attrs[key] === 'function') return; - - // Expand shorthand type - if (typeof attrs[key] === 'string') { - attributes[key] = { type: attrs[key] }; - } else { - attributes[key] = attrs[key]; - } - - // Ensure type is lower case - if (attributes[key].type && typeof attributes[key].type !== 'undefined') { - attributes[key].type = attributes[key].type.toLowerCase(); - } - - // Ensure Collection property is lowercased - if (hasOwnProperty(attrs[key], 'collection')) { - attrs[key].collection = attrs[key].collection.toLowerCase(); - } - - // Ensure Model property is lowercased - if (hasOwnProperty(attrs[key], 'model')) { - attrs[key].model = attrs[key].model.toLowerCase(); - } - }); - - return attributes; -}; - - -/** - * Return all methods in `attrs` that should be provided - * on the model. - * - * Example: - * { - * name: 'string', - * email: 'string', - * doSomething: function() { - * return true; - * } - * } - * - * Returns: - * { - * doSomething: function() { - * return true; - * } - * } - * - * @param {Object} attrs - * @return {Object} - */ - -schema.instanceMethods = function(attrs) { - var methods = {}; - - if (!attrs) return methods; - - Object.keys(attrs).forEach(function(key) { - if (typeof attrs[key] === 'function') { - methods[key] = attrs[key]; - } - }); - - return methods; -}; - - -/** - * Normalize callbacks - * - * Return all callback functions in `context`, allows for string mapping to - * functions located in `context.attributes`. - * - * Example: - * { - * attributes: { - * name: 'string', - * email: 'string', - * increment: function increment() { i++; } - * }, - * afterCreate: 'increment', - * beforeCreate: function() { return true; } - * } - * - * Returns: - * { - * afterCreate: [ - * function increment() { i++; } - * ], - * beforeCreate: [ - * function() { return true; } - * ] - * } - * - * @param {Object} context - * @return {Object} - */ - -schema.normalizeCallbacks = function(context) { - var i, _i, len, _len, fn; - var fns = {}; - - function defaultFn(fn) { - return function(values, next) { return next(); }; - } - - for (i = 0, len = callbacks.length; i < len; i = i + 1) { - fn = callbacks[i]; - - // Skip if the model hasn't defined this callback - if (typeof context[fn] === 'undefined') { - fns[fn] = [ defaultFn(fn) ]; - continue; - } - - if (Array.isArray(context[fn])) { - fns[fn] = []; - - // Iterate over all functions - for (_i = 0, _len = context[fn].length; _i < _len; _i = _i + 1) { - if (typeof context[fn][_i] === 'string') { - // Attempt to map string to function - if (typeof context.attributes[context[fn][_i]] === 'function') { - fns[fn][_i] = context.attributes[context[fn][_i]]; - delete context.attributes[context[fn][_i]]; - } else { - throw new Error('Unable to locate callback `' + context[fn][_i] + '`'); - } - } else { - fns[fn][_i] = context[fn][_i]; - } - } - } else if (typeof context[fn] === 'string') { - // Attempt to map string to function - if (typeof context.attributes[context[fn]] === 'function') { - fns[fn] = [ context.attributes[context[fn]] ]; - delete context.attributes[context[fn]]; - } else { - throw new Error('Unable to locate callback `' + context[fn] + '`'); - } - } else { - // Just add a single function - fns[fn] = [ context[fn] ]; - } - } - - return fns; -}; - - -/** - * Replace any Join Criteria references with the defined tableName for a collection. - * - * @param {Object} criteria - * @param {Object} collections - * @return {Object} - * @api public - */ - -schema.serializeJoins = function(criteria, collections) { - - if (!criteria.joins) return criteria; - - var joins = _.cloneDeep(criteria.joins); - - joins.forEach(function(join) { - - if (!hasOwnProperty(collections[join.parent], 'tableName')) return; - if (!hasOwnProperty(collections[join.child], 'tableName')) return; - - join.parent = collections[join.parent].tableName; - join.child = collections[join.child].tableName; - - }); - - criteria.joins = joins; - return criteria; -}; diff --git a/lib/waterline/utils/sorter.js b/lib/waterline/utils/sorter.js deleted file mode 100644 index 590b0f499..000000000 --- a/lib/waterline/utils/sorter.js +++ /dev/null @@ -1,57 +0,0 @@ -/** - * Module Dependencies - */ - -var _ = require('lodash'); - -/** - * Sort `data` (tuples) using `sortCriteria` (comparator) - * - * Based on method described here: - * http://stackoverflow.com/a/4760279/909625 - * - * @param { Object[] } data [tuples] - * @param { Object } sortCriteria [mongo-style comparator object] - * @return { Object[] } - */ - -module.exports = function sortData(data, sortCriteria) { - - function dynamicSort(property) { - var sortOrder = 1; - if (property[0] === '-') { - sortOrder = -1; - property = property.substr(1); - } - - return function(a, b) { - var result = (a[property] < b[property]) ? -1 : (a[property] > b[property]) ? 1 : 0; - return result * sortOrder; - }; - } - - function dynamicSortMultiple() { - var props = arguments; - return function(obj1, obj2) { - var i = 0; - var result = 0; - var numberOfProperties = props.length; - - while (result === 0 && i < numberOfProperties) { - result = dynamicSort(props[i])(obj1, obj2); - i++; - } - return result; - }; - } - - // build sort criteria in the format ['firstName', '-lastName'] - var sortArray = []; - _.each(_.keys(sortCriteria), function(key) { - if (sortCriteria[key] === -1) sortArray.push('-' + key); - else sortArray.push(key); - }); - - data.sort(dynamicSortMultiple.apply(null, sortArray)); - return data; -}; diff --git a/lib/waterline/utils/stream.js b/lib/waterline/utils/stream.js deleted file mode 100644 index 495437f8f..000000000 --- a/lib/waterline/utils/stream.js +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Streams - * - * A Streaming API with support for Transformations - */ - -var util = require('util'); -var Stream = require('stream'); -var Transformations = require('./transformations'); -var _ = require('lodash'); - -var ModelStream = module.exports = function(transformation) { - - // Use specified, or otherwise default, JSON transformation - this.transformation = transformation || Transformations.json; - - // Reset write index - this.index = 0; - - // Make stream writable - this.writable = true; -}; - -util.inherits(ModelStream, Stream); - -/** - * Write to stream - * - * Extracts args to write and emits them as data events - * - * @param {Object} model - * @param {Function} cb - */ - -ModelStream.prototype.write = function(model, cb) { - var self = this; - - // Run transformation on this item - this.transformation.write(model, this.index, function writeToStream(err, transformedModel) { - - // Increment index for next time - self.index++; - - // Write transformed model to stream - self.emit('data', _.clone(transformedModel)); - - // Inform that we're finished - if (cb) return cb(err); - }); - -}; - -/** - * End Stream - */ - -ModelStream.prototype.end = function(err, cb) { - var self = this; - - if (err) { - this.emit('error', err.message); - if (cb) return cb(err); - return; - } - - this.transformation.end(function(err, suffix) { - - if (err) { - self.emit('error', err); - if (cb) return cb(err); - return; - } - - // Emit suffix if specified - if (suffix) self.emit('data', suffix); - self.emit('end'); - if (cb) return cb(); - }); -}; diff --git a/lib/waterline/utils/system/collection-builder.js b/lib/waterline/utils/system/collection-builder.js new file mode 100644 index 000000000..59a633920 --- /dev/null +++ b/lib/waterline/utils/system/collection-builder.js @@ -0,0 +1,71 @@ +var _ = require('@sailshq/lodash'); + + +// ██████╗ ██╗ ██╗██╗██╗ ██████╗ ██╗ ██╗██╗ ██╗███████╗ +// ██╔══██╗██║ ██║██║██║ ██╔══██╗ ██║ ██║██║ ██║██╔════╝ +// ██████╔╝██║ ██║██║██║ ██║ ██║ ██║ ██║██║ ██║█████╗ +// ██╔══██╗██║ ██║██║██║ ██║ ██║ ██║ ██║╚██╗ ██╔╝██╔══╝ +// ██████╔╝╚██████╔╝██║███████╗██████╔╝ ███████╗██║ ╚████╔╝ ███████╗ +// ╚═════╝ ╚═════╝ ╚═╝╚══════╝╚═════╝ ╚══════╝╚═╝ ╚═══╝ ╚══════╝ +// +// ██╗ ██╗██╗ ███╗ ███╗ ██████╗ ██████╗ ███████╗██╗ +// ██║ ██║██║ ████╗ ████║██╔═══██╗██╔══██╗██╔════╝██║ +// ██║ █╗ ██║██║ ██╔████╔██║██║ ██║██║ ██║█████╗ ██║ +// ██║███╗██║██║ ██║╚██╔╝██║██║ ██║██║ ██║██╔══╝ ██║ +// ╚███╔███╔╝███████╗ ██║ ╚═╝ ██║╚██████╔╝██████╔╝███████╗███████╗ +// ╚══╝╚══╝ ╚══════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ +// +// Normalize a Waterline model instance and attaches the correct datastore, returning a "live model". +module.exports = function CollectionBuilder(collection, datastores, context) { + // ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ + // ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ + // ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ + + // Throw Error if no Tablename/Identity is set + if (!_.has(collection.prototype, 'tableName') && !_.has(collection.prototype, 'identity')) { + throw new Error('A tableName or identity property must be set.'); + } + + // Find the datastores used by this collection. If none are specified check + // if a default datastores exist. + // if (!_.has(collection.prototype, 'datastore')) { + if (collection.prototype.datastore === undefined) { + + // Check if a default datastore was specified + if (!_.has(datastores, 'default')) { + throw new Error('No `datastore` was specified in the definition for model `' + collection.prototype.identity+'`, and there is no default datastore (i.e. defined as "default") to fall back to. (Usually, if the "default" datastore is missing, it means the ORM is not set up correctly.)'); + } + + // Set the datastore as the default + collection.prototype.datastore = 'default'; + } + + + // ╔═╗╔═╗╔╦╗ ┌─┐┌─┐┌┬┐┬┬ ┬┌─┐ ┌┬┐┌─┐┌┬┐┌─┐┌─┐┌┬┐┌─┐┬─┐┌─┐┌─┐ + // ╚═╗║╣ ║ ├─┤│ │ │└┐┌┘├┤ ││├─┤ │ ├─┤└─┐ │ │ │├┬┘├┤ └─┐ + // ╚═╝╚═╝ ╩ ┴ ┴└─┘ ┴ ┴ └┘ └─┘ ─┴┘┴ ┴ ┴ ┴ ┴└─┘ ┴ └─┘┴└─└─┘└─┘ + + // Set the datastore used for the adapter + var datastoreName = collection.prototype.datastore; + + // Ensure the named datastore exists + if (!_.has(datastores, datastoreName)) { + if (datastoreName !== 'default'){ + throw new Error('Unrecognized datastore (`' + datastoreName + '`) specified in the definition for model `' + collection.prototype.identity + '`. Please make sure it exists. (If you\'re unsure, use "default".)'); + } + else { + throw new Error('Unrecognized datastore (`' + datastoreName + '`) specified in the definition for model `' + collection.prototype.identity + '`. (Usually, if the "default" datastore is missing, it means the ORM is not set up correctly.)'); + } + } + + // Add the collection to the datastore listing + datastores[datastoreName].collections.push(collection.prototype.identity); + + + // ╦╔╗╔╔═╗╔╦╗╔═╗╔╗╔╔╦╗╦╔═╗╔╦╗╔═╗ + // ║║║║╚═╗ ║ ╠═╣║║║ ║ ║╠═╣ ║ ║╣ + // ╩╝╚╝╚═╝ ╩ ╩ ╩╝╚╝ ╩ ╩╩ ╩ ╩ ╚═╝ + var liveModel = new collection(context, datastores[datastoreName]); + + return liveModel; +}; diff --git a/lib/waterline/utils/system/datastore-builder.js b/lib/waterline/utils/system/datastore-builder.js new file mode 100644 index 000000000..a5d4dc36e --- /dev/null +++ b/lib/waterline/utils/system/datastore-builder.js @@ -0,0 +1,56 @@ +// ██████╗ █████╗ ████████╗ █████╗ ███████╗████████╗ ██████╗ ██████╗ ███████╗ +// ██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗██╔════╝╚══██╔══╝██╔═══██╗██╔══██╗██╔════╝ +// ██║ ██║███████║ ██║ ███████║███████╗ ██║ ██║ ██║██████╔╝█████╗ +// ██║ ██║██╔══██║ ██║ ██╔══██║╚════██║ ██║ ██║ ██║██╔══██╗██╔══╝ +// ██████╔╝██║ ██║ ██║ ██║ ██║███████║ ██║ ╚██████╔╝██║ ██║███████╗ +// ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝╚══════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝╚══════╝ +// +// ██████╗ ██╗ ██╗██╗██╗ ██████╗ ███████╗██████╗ +// ██╔══██╗██║ ██║██║██║ ██╔══██╗██╔════╝██╔══██╗ +// ██████╔╝██║ ██║██║██║ ██║ ██║█████╗ ██████╔╝ +// ██╔══██╗██║ ██║██║██║ ██║ ██║██╔══╝ ██╔══██╗ +// ██████╔╝╚██████╔╝██║███████╗██████╔╝███████╗██║ ██║ +// ╚═════╝ ╚═════╝ ╚═╝╚══════╝╚═════╝ ╚══════╝╚═╝ ╚═╝ +// +// Builds up the set of datastores used by the various Waterline Models. +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +// TODO: verify that last part of the statement (not seeing how this is related to "models") +// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + +var _ = require('@sailshq/lodash'); + +module.exports = function DatastoreBuilder(adapters, datastoreConfigs) { + var datastores = {}; + + // For each datastore config, create a normalized, namespaced, dictionary. + _.each(datastoreConfigs, function(config, datastoreName) { + // Ensure that an `adapter` is specified + if (!_.has(config, 'adapter')) { + throw new Error('The datastore ' + datastoreName + ' is missing a required property (`adapter`). You should indicate the name of one of your adapters.'); + } + + // Ensure that the named adapter is present in the adapters that were passed + // in. + if (!_.has(adapters, config.adapter)) { + // Check that the adapter's name was a string + if (!_.isString(config.adapter)) { + throw new Error('Invalid `adapter` property in datastore ' + datastoreName + '. It should be a string (the name of one of the adapters you passed into `waterline.initialize()`).'); + } + + // Otherwise throw an unknown error + throw new Error('Unknown adapter ' + config.adapter + ' for datastore ' + datastoreName + '. You should double-check that the connection\'s `adapter` property matches the name of one of your adapters. Or perhaps you forgot to include your adapter when you called `waterline.initialize()`.)'); + } + + // Shallow-merge the adapter defaults underneath with the user-defined config. + var datastoreConfig = _.extend({}, adapters[config.adapter].defaults, config); + + // Build the datastore config + datastores[datastoreName] = { + config: datastoreConfig, + adapter: adapters[config.adapter], + collections: []// << TODO: fix naming + }; + }); + + return datastores; +}; diff --git a/lib/waterline/utils/system/has-schema-check.js b/lib/waterline/utils/system/has-schema-check.js new file mode 100644 index 000000000..42446dc5f --- /dev/null +++ b/lib/waterline/utils/system/has-schema-check.js @@ -0,0 +1,50 @@ +// ██╗ ██╗ █████╗ ███████╗ ███████╗ ██████╗██╗ ██╗███████╗███╗ ███╗ █████╗ +// ██║ ██║██╔══██╗██╔════╝ ██╔════╝██╔════╝██║ ██║██╔════╝████╗ ████║██╔══██╗ +// ███████║███████║███████╗ ███████╗██║ ███████║█████╗ ██╔████╔██║███████║ +// ██╔══██║██╔══██║╚════██║ ╚════██║██║ ██╔══██║██╔══╝ ██║╚██╔╝██║██╔══██║ +// ██║ ██║██║ ██║███████║ ███████║╚██████╗██║ ██║███████╗██║ ╚═╝ ██║██║ ██║ +// ╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝ ╚══════╝ ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝ +// +// ██████╗██╗ ██╗███████╗ ██████╗██╗ ██╗ +// ██╔════╝██║ ██║██╔════╝██╔════╝██║ ██╔╝ +// ██║ ███████║█████╗ ██║ █████╔╝ +// ██║ ██╔══██║██╔══╝ ██║ ██╔═██╗ +// ╚██████╗██║ ██║███████╗╚██████╗██║ ██╗ +// ╚═════╝╚═╝ ╚═╝╚══════╝ ╚═════╝╚═╝ ╚═╝ +// +// Returns TRUE/FALSE if a collection has it's `hasSchema` flag set. + +var _ = require('@sailshq/lodash'); + +module.exports = function hasSchemaCheck(context) { + // If hasSchema is defined on the collection, return the value + if (_.has(Object.getPrototypeOf(context), 'hasSchema')) { + var proto = Object.getPrototypeOf(context); + if (!_.isUndefined(proto.hasSchema)) { + return Object.getPrototypeOf(context).hasSchema; + } + } + + // Grab the first connection used + if (!context.connection || !_.isArray(context.connection)) { + return true; + } + + var connection = context.connections[_.first(context.connection)]; + + // Check the user defined config + if (_.has(connection, 'config') && _.has(connection.config, 'schema')) { + return connection.config.schema; + } + + // Check the defaults defined in the adapter + if (!_.has(connection, 'adapter')) { + return true; + } + + if (!_.has(connection.adapter, 'schema')) { + return true; + } + + return connection.adapter.schema; +}; diff --git a/lib/waterline/utils/system/lifecycle-callback-builder.js b/lib/waterline/utils/system/lifecycle-callback-builder.js new file mode 100644 index 000000000..a80b5049c --- /dev/null +++ b/lib/waterline/utils/system/lifecycle-callback-builder.js @@ -0,0 +1,56 @@ +// ██████╗ ██╗ ██╗██╗██╗ ██████╗ +// ██╔══██╗██║ ██║██║██║ ██╔══██╗ +// ██████╔╝██║ ██║██║██║ ██║ ██║ +// ██╔══██╗██║ ██║██║██║ ██║ ██║ +// ██████╔╝╚██████╔╝██║███████╗██████╔╝ +// ╚═════╝ ╚═════╝ ╚═╝╚══════╝╚═════╝ +// +// ██╗ ██╗███████╗███████╗ ██████╗██╗ ██╗ ██████╗██╗ ███████╗ +// ██║ ██║██╔════╝██╔════╝██╔════╝╚██╗ ██╔╝██╔════╝██║ ██╔════╝ +// ██║ ██║█████╗ █████╗ ██║ ╚████╔╝ ██║ ██║ █████╗ +// ██║ ██║██╔══╝ ██╔══╝ ██║ ╚██╔╝ ██║ ██║ ██╔══╝ +// ███████╗██║██║ ███████╗╚██████╗ ██║ ╚██████╗███████╗███████╗ +// ╚══════╝╚═╝╚═╝ ╚══════╝ ╚═════╝ ╚═╝ ╚═════╝╚══════╝╚══════╝ +// +// ██████╗ █████╗ ██╗ ██╗ ██████╗ █████╗ ██████╗██╗ ██╗███████╗ +// ██╔════╝██╔══██╗██║ ██║ ██╔══██╗██╔══██╗██╔════╝██║ ██╔╝██╔════╝ +// ██║ ███████║██║ ██║ ██████╔╝███████║██║ █████╔╝ ███████╗ +// ██║ ██╔══██║██║ ██║ ██╔══██╗██╔══██║██║ ██╔═██╗ ╚════██║ +// ╚██████╗██║ ██║███████╗███████╗██████╔╝██║ ██║╚██████╗██║ ██╗███████║ +// ╚═════╝╚═╝ ╚═╝╚══════╝╚══════╝╚═════╝ ╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝╚══════╝ +// + +var _ = require('@sailshq/lodash'); + +module.exports = function LifecycleCallbackBuilder(context) { + // Build a list of accepted lifecycle callbacks + var validCallbacks = [ + 'beforeValidate', + 'afterValidate', + 'beforeUpdate', + 'afterUpdate', + 'beforeCreate', + 'afterCreate', + 'beforeDestroy', + 'afterDestroy', + 'beforeFind', + 'afterFind', + 'beforeFindOne', + 'afterFindOne' + ]; + + // Hold a mapping of functions to run at various times in the query lifecycle + var callbacks = {}; + + // Look for each type of callback in the collection + _.each(validCallbacks, function(callbackName) { + // If the callback isn't defined on the model there is nothing to do + if (_.isUndefined(context[callbackName])) { + return; + } + + callbacks[callbackName] = context[callbackName]; + }); + + return callbacks; +}; diff --git a/lib/waterline/utils/system/reserved-property-names.js b/lib/waterline/utils/system/reserved-property-names.js new file mode 100644 index 000000000..2c13e1ce9 --- /dev/null +++ b/lib/waterline/utils/system/reserved-property-names.js @@ -0,0 +1,47 @@ +// ██████╗ ███████╗███████╗███████╗██████╗ ██╗ ██╗███████╗██████╗ +// ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗██║ ██║██╔════╝██╔══██╗ +// ██████╔╝█████╗ ███████╗█████╗ ██████╔╝██║ ██║█████╗ ██║ ██║ +// ██╔══██╗██╔══╝ ╚════██║██╔══╝ ██╔══██╗╚██╗ ██╔╝██╔══╝ ██║ ██║ +// ██║ ██║███████╗███████║███████╗██║ ██║ ╚████╔╝ ███████╗██████╔╝ +// ╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝╚═╝ ╚═╝ ╚═══╝ ╚══════╝╚═════╝ +// +// ██████╗ ██████╗ ██████╗ ██████╗ ███████╗██████╗ ████████╗██╗ ██╗ +// ██╔══██╗██╔══██╗██╔═══██╗██╔══██╗██╔════╝██╔══██╗╚══██╔══╝╚██╗ ██╔╝ +// ██████╔╝██████╔╝██║ ██║██████╔╝█████╗ ██████╔╝ ██║ ╚████╔╝ +// ██╔═══╝ ██╔══██╗██║ ██║██╔═══╝ ██╔══╝ ██╔══██╗ ██║ ╚██╔╝ +// ██║ ██║ ██║╚██████╔╝██║ ███████╗██║ ██║ ██║ ██║ +// ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚══════╝╚═╝ ╚═╝ ╚═╝ ╚═╝ +// +// ███╗ ██╗ █████╗ ███╗ ███╗███████╗███████╗ +// ████╗ ██║██╔══██╗████╗ ████║██╔════╝██╔════╝ +// ██╔██╗ ██║███████║██╔████╔██║█████╗ ███████╗ +// ██║╚██╗██║██╔══██║██║╚██╔╝██║██╔══╝ ╚════██║ +// ██║ ╚████║██║ ██║██║ ╚═╝ ██║███████╗███████║ +// ╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝╚══════╝ +// + +module.exports = [ + 'defaultsTo', + 'primaryKey', + 'autoIncrement', + 'unique', + 'index', + 'collection', + 'dominant', + 'through', + 'columnName', + 'foreignKey', + 'references', + 'on', + 'groupKey', + 'model', + 'via', + 'size', + 'example', + 'validationMessage', + 'validations', + 'populateSettings', + 'onKey', + 'protected', + 'meta' +]; diff --git a/lib/waterline/utils/system/reserved-validation-names.js b/lib/waterline/utils/system/reserved-validation-names.js new file mode 100644 index 000000000..3df7064ea --- /dev/null +++ b/lib/waterline/utils/system/reserved-validation-names.js @@ -0,0 +1,65 @@ +// ██████╗ ███████╗███████╗███████╗██████╗ ██╗ ██╗███████╗██████╗ +// ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗██║ ██║██╔════╝██╔══██╗ +// ██████╔╝█████╗ ███████╗█████╗ ██████╔╝██║ ██║█████╗ ██║ ██║ +// ██╔══██╗██╔══╝ ╚════██║██╔══╝ ██╔══██╗╚██╗ ██╔╝██╔══╝ ██║ ██║ +// ██║ ██║███████╗███████║███████╗██║ ██║ ╚████╔╝ ███████╗██████╔╝ +// ╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝╚═╝ ╚═╝ ╚═══╝ ╚══════╝╚═════╝ +// +// ██╗ ██╗ █████╗ ██╗ ██╗██████╗ █████╗ ████████╗██╗ ██████╗ ███╗ ██╗ +// ██║ ██║██╔══██╗██║ ██║██╔══██╗██╔══██╗╚══██╔══╝██║██╔═══██╗████╗ ██║ +// ██║ ██║███████║██║ ██║██║ ██║███████║ ██║ ██║██║ ██║██╔██╗ ██║ +// ╚██╗ ██╔╝██╔══██║██║ ██║██║ ██║██╔══██║ ██║ ██║██║ ██║██║╚██╗██║ +// ╚████╔╝ ██║ ██║███████╗██║██████╔╝██║ ██║ ██║ ██║╚██████╔╝██║ ╚████║ +// ╚═══╝ ╚═╝ ╚═╝╚══════╝╚═╝╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ +// +// ███╗ ██╗ █████╗ ███╗ ███╗███████╗███████╗ +// ████╗ ██║██╔══██╗████╗ ████║██╔════╝██╔════╝ +// ██╔██╗ ██║███████║██╔████╔██║█████╗ ███████╗ +// ██║╚██╗██║██╔══██║██║╚██╔╝██║██╔══╝ ╚════██║ +// ██║ ╚████║██║ ██║██║ ╚═╝ ██║███████╗███████║ +// ╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝╚══════╝ +// + +module.exports = [ + 'after', + 'alpha', + 'alphadashed', + 'alphanumeric', + 'alphanumericdashed', + 'before', + 'contains', + 'creditcard', + 'datetime', + 'decimal', + 'email', + 'finite', + 'float', + 'hexadecimal', + 'hexColor', + 'in', + 'int', + 'integer', + 'ip', + 'ipv4', + 'ipv6', + 'is', + 'lowercase', + 'max', + 'maxLength', + 'min', + 'minLength', + 'notRegex', + 'notContains', + 'notIn', + 'notNull', + 'numeric', + 'required', + 'regex', + 'truthy', + 'uppercase', + 'url', + 'urlish', + 'uuid', + 'uuidv3', + 'uuidv4' +]; diff --git a/lib/waterline/utils/system/transformer-builder.js b/lib/waterline/utils/system/transformer-builder.js new file mode 100644 index 000000000..b988aa309 --- /dev/null +++ b/lib/waterline/utils/system/transformer-builder.js @@ -0,0 +1,230 @@ +/** + * Module dependencies + */ + +var util = require('util'); +var _ = require('@sailshq/lodash'); + + +/** + * Transformation + * + * Allows for a Waterline Collection to have different + * attributes than what actually exist in an adater's representation. + * + * @param {Object} attributes + * @param {Object} tables + */ + +var Transformation = module.exports = function(attributes) { + + // Hold an internal mapping of keys to transform + this._transformations = {}; + + // Initialize + this.initialize(attributes); + + return this; +}; + +/** + * Initial mapping of transformations. + * + * @param {Object} attributes + * @param {Object} tables + */ + +Transformation.prototype.initialize = function(attributes) { + var self = this; + + _.each(attributes, function(wlsAttrDef, attrName) { + // Make sure the attribute has a columnName set + if (!_.has(wlsAttrDef, 'columnName')) { + return; + } + + // Ensure the columnName is a string + if (!_.isString(wlsAttrDef.columnName)) { + throw new Error('Consistency violation: `columnName` must be a string. But for this attribute (`'+attrName+'`) it is not!'); + } + + // Set the column name transformation + self._transformations[attrName] = wlsAttrDef.columnName; + }); +}; + +/** + * Transforms a set of attributes into a representation used + * in an adapter. + * + * @param {Object} attributes to transform + * @return {Object} + */ + +Transformation.prototype.serializeCriteria = function(values) { + var self = this; + + function recursiveParse(obj) { + + // Return if no object + if (!obj) { + return; + } + + _.each(obj, function(propertyValue, propertyName) { + // Recursively parse `OR` or `AND` criteria objects to transform keys + if (_.isArray(propertyValue) && (propertyName === 'or' || propertyName === 'and')) { + return recursiveParse(propertyValue); + } + + // If nested dictionary, then take the recursive step, calling the function again + // and passing the nested dictionary as `obj` + if (!_.isDate(propertyValue) && _.isPlainObject(propertyValue)) { + + // check if object key is in the transformations + if (_.has(self._transformations, propertyName)) { + obj[self._transformations[propertyName]] = propertyValue; + + // Only delete if the names are different + if (self._transformations[propertyName] !== propertyName) { + delete obj[propertyName]; + } + + return recursiveParse(obj[self._transformations[propertyName]]); + } + + return recursiveParse(propertyValue); + } + + // If the property === SELECT check for any transformation keys + if (propertyName === 'select' && _.isArray(propertyValue)) { + // var arr = _.clone(obj[property]); + _.each(propertyValue, function(prop) { + if(_.has(self._transformations, prop)) { + var idx = _.indexOf(propertyValue, prop); + if(idx > -1) { + obj[propertyName][idx] = self._transformations[prop]; + } + } + }); + } + + // If the property === SORT check for any transformation keys + if (propertyName === 'sort' && _.isArray(propertyValue)) { + obj.sort = _.map(obj.sort, function(sortClause) { + var comparatorTarget = _.first(_.keys(sortClause)); + var attrName = _.first(comparatorTarget.split(/\./)); + var sortDirection = sortClause[comparatorTarget]; + + var sort = {}; + var columnName = self._transformations[attrName]; + sort[[columnName].concat(comparatorTarget.split(/\./).slice(1)).join('.')] = sortDirection; + return sort; + }); + } + + // Check if property is a transformation key + if (_.has(self._transformations, propertyName)) { + obj[self._transformations[propertyName]] = propertyValue; + if (self._transformations[propertyName] !== propertyName) { + delete obj[propertyName]; + } + } + }); + } + + // Recursively parse attributes to handle nested criteria + recursiveParse(values); + + return values; +}; + + +/** + * Transform a set of values into a representation used + * in an adapter. + * + * > The values are mutated in-place. + * + * @param {Object} values to transform + */ +Transformation.prototype.serializeValues = function(values) { + + // Sanity check + if (!_.isObject(values) || _.isArray(values) || _.isFunction(values)) { + throw new Error('Consistency violation: Must be a dictionary, but instead got: '+util.inspect(values, {depth: 5})); + } + + var self = this; + + _.each(values, function(propertyValue, propertyName) { + if (_.has(self._transformations, propertyName)) { + values[self._transformations[propertyName]] = propertyValue; + + // Only delete if the names are different + if (self._transformations[propertyName] !== propertyName) { + delete values[propertyName]; + } + } + }); + + // We deliberately return undefined here to reiterate that + // this _always_ mutates things in place! + return; +}; + + + +/** + * .unserialize() + * + * Destructively transforms a physical-layer record received + * from an adapter into a logical representation appropriate + * for userland (i.e. swapping out column names for attribute + * names) + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * @param {Dictionary} pRecord + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * @returns {Dictionary} + * This is an unnecessary return -- this method just + * returns the same reference to the original pRecord, + * which has been destructively mutated anyway. + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +Transformation.prototype.unserialize = function(pRecord) { + + // Get the database columns that we'll be transforming into attribute names. + var colsToTransform = _.values(this._transformations); + + // Shallow clone the physical record, so that we don't lose any values in cases + // where one attribute's name conflicts with another attribute's `columnName`. + // (see https://github.com/balderdashy/sails/issues/4079) + var copyOfPhysicalRecord = _.clone(pRecord); + + // Remove the values from the pRecord that are set for the columns we're + // going to transform. This ensures that the `columnName` and the + // attribute name don't both appear as properties in the final record + // (unless there's a conflict as described above). + _.each(_.keys(pRecord), function(key) { + if (_.contains(colsToTransform, key)) { + delete pRecord[key]; + } + }); + + // Loop through the keys to transform of this record and reattach them. + _.each(this._transformations, function(columnName, attrName) { + + // If there's no value set for this column name, continue. + if (!_.has(copyOfPhysicalRecord, columnName)) { + return; + } + + // Otherwise get the value from the cloned record. + pRecord[attrName] = copyOfPhysicalRecord[columnName]; + + }); + + // Return the original, mutated record. + return pRecord; +}; diff --git a/lib/waterline/utils/system/types.js b/lib/waterline/utils/system/types.js new file mode 100644 index 000000000..2b8309aa4 --- /dev/null +++ b/lib/waterline/utils/system/types.js @@ -0,0 +1,11 @@ +/** + * Types Supported By Schemas + */ + +module.exports = [ + 'string', + 'number', + 'boolean', + 'json',// << generic json (`'*'`) + 'ref' // < passed straight through to adapter +]; diff --git a/lib/waterline/utils/system/validate-datastore-connectivity.js b/lib/waterline/utils/system/validate-datastore-connectivity.js new file mode 100644 index 000000000..b7cfbed33 --- /dev/null +++ b/lib/waterline/utils/system/validate-datastore-connectivity.js @@ -0,0 +1,45 @@ +var _ = require('@sailshq/lodash'); + +/** + * validateDatastoreConnectivity() + * + * Validates connectivity to a datastore by trying to acquire and release + * connection. + * + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + * @param {Ref} datastore + * + * @param {Function} done + * @param {Error?} err [if an error occured] + * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + */ + +module.exports = function validateDatastoreConnectivity(datastore, done) { + var adapterDSEntry = _.get(datastore.adapter.datastores, datastore.config.identity); + + // skip validation if `getConnection` and `releaseConnection` methods do not exist. + // aka pretend everything is OK + if (!_.has(adapterDSEntry.driver, 'getConnection') || !_.has(adapterDSEntry.driver, 'releaseConnection')) { + return done(); + } + + // try to acquire connection. + adapterDSEntry.driver.getConnection({ + manager: adapterDSEntry.manager + }, function(err, report) { + if (err) { + return done(err); + } + + // release connection. + adapterDSEntry.driver.releaseConnection({ + connection: report.connection + }, function(err) { + if (err) { + return done(err); + } + + return done(); + });// + });// +}; diff --git a/lib/waterline/utils/transformations.js b/lib/waterline/utils/transformations.js deleted file mode 100644 index ebe2e29a3..000000000 --- a/lib/waterline/utils/transformations.js +++ /dev/null @@ -1,45 +0,0 @@ -/** - * Transformations - */ - -var Transformations = module.exports = {}; - -// Add JSON Transformation methods -Transformations.json = {}; - -/** - * Write Method Transformations - * - * Used to stream back valid JSON from Waterline - */ - -Transformations.json.write = function(model, index, cb) { - var transformedModel; - - if (!model) transformedModel = ''; - - // Transform to JSON - if (model) { - try { - transformedModel = JSON.stringify(model); - } catch (e) { - return cb(e); - } - } - - // Prefix with opening [ - if (index === 0) { transformedModel = '['; } - - // Prefix with comma after first model - if (index > 1) transformedModel = ',' + transformedModel; - - cb(null, transformedModel); -}; - -/** - * Close off JSON Array - */ -Transformations.json.end = function(cb) { - var suffix = ']'; - cb(null, suffix); -}; diff --git a/lib/waterline/utils/types.js b/lib/waterline/utils/types.js deleted file mode 100644 index ee9b9d623..000000000 --- a/lib/waterline/utils/types.js +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Types Supported By Schemas - */ - -module.exports = [ - 'string', - 'text', - 'integer', - 'float', - 'date', - 'time', - 'datetime', - 'boolean', - 'binary', - 'array', - 'json', - 'mediumtext', - 'longtext', - 'objectid' -]; diff --git a/lib/waterline/utils/usageError.js b/lib/waterline/utils/usageError.js deleted file mode 100644 index 0f6b73d68..000000000 --- a/lib/waterline/utils/usageError.js +++ /dev/null @@ -1,9 +0,0 @@ -/** - * Create a nicely formatted usage error - */ - -module.exports = function(err, usage, cb) { - var message = err + '\n==============================================\nProper usage :: \n' + usage + '\n==============================================\n'; - if (cb) return cb(message); - throw new Error(message); -}; diff --git a/package.json b/package.json index 1ecd6f0c1..28ec32aa1 100644 --- a/package.json +++ b/package.json @@ -1,46 +1,29 @@ { "name": "waterline", "description": "An ORM for Node.js and the Sails framework", - "version": "0.12.2", - "homepage": "http://github.com/balderdashy/waterline", + "version": "0.15.2", + "homepage": "http://waterlinejs.org", "contributors": [ - { - "name": "particlebanana", - "github": "https://github.com/particlebanana" - }, - { - "name": "mikermcneil", - "github": "https://github.com/mikermcneil" - }, - { - "name": "zolmeister", - "github": "https://github.com/zolmeister" - }, { "name": "seerepo", "github": "https://github.com/balderdashy/waterline/graphs/contributors" } ], "dependencies": { - "anchor": "~0.11.2", - "async": "1.5.2", - "bluebird": "3.2.1", - "deep-diff": "0.3.4", - "lodash": "3.10.1", - "prompt": "1.0.0", - "switchback": "2.0.1", - "waterline-criteria": "1.0.1", - "waterline-schema": "0.2.0" + "@sailshq/lodash": "^3.10.2", + "anchor": "^1.2.0", + "async": "2.6.4", + "encrypted-attr": "1.0.6", + "flaverr": "^1.9.2", + "lodash.issafeinteger": "4.0.4", + "parley": "^3.3.2", + "rttc": "^10.0.0-1", + "waterline-schema": "^1.0.0-20", + "waterline-utils": "^1.3.7" }, "devDependencies": { - "codeclimate-test-reporter": "0.3.2", - "eslint": "2.11.1", - "espree": "3.1.5", - "istanbul": "0.4.3", - "mocha": "2.5.3", - "sails-memory": "balderdashy/sails-memory", - "should": "9.0.0", - "waterline-adapter-tests": "balderdashy/waterline-adapter-tests" + "eslint": "4.11.0", + "mocha": "3.0.2" }, "keywords": [ "mvc", @@ -57,16 +40,16 @@ "repository": "git://github.com/balderdashy/waterline.git", "main": "./lib/waterline", "scripts": { - "test": "make test", - "prepublish": "npm prune", - "browserify": "rm -rf .dist && mkdir .dist && browserify lib/waterline.js -s Waterline | uglifyjs > .dist/waterline.min.js", - "coverage": "make coverage" + "test": "nodever=`node -e \"console.log('\\`node -v\\`'[1]);\"` && if [ $nodever != \"0\" ]; then npm run lint; fi && npm run custom-tests", + "custom-tests": "node ./node_modules/mocha/bin/mocha test --recursive", + "lint": "node ./node_modules/eslint/bin/eslint . --max-warnings=0 --ignore-pattern 'test/'", + "browserify": "rm -rf .dist && mkdir .dist && browserify lib/waterline.js -s Waterline | uglifyjs > .dist/waterline.min.js" }, "engines": { - "node": ">=0.10.0 <=5.x.x" + "node": ">=4" }, - "license": "MIT", "bugs": { - "url": "https://github.com/balderdashy/waterline/issues/new" - } + "url": "https://sailsjs.com/bugs" + }, + "license": "MIT" } diff --git a/test/.eslintrc b/test/.eslintrc new file mode 100644 index 000000000..694f9841b --- /dev/null +++ b/test/.eslintrc @@ -0,0 +1,20 @@ +{ + // ╔═╗╔═╗╦ ╦╔╗╔╔╦╗┬─┐┌─┐ ┌─┐┬ ┬┌─┐┬─┐┬─┐┬┌┬┐┌─┐ + // ║╣ ╚═╗║ ║║║║ ║ ├┬┘│ │ │└┐┌┘├┤ ├┬┘├┬┘│ ││├┤ + // o╚═╝╚═╝╩═╝╩╝╚╝ ╩ ┴└─└─┘ └─┘ └┘ └─┘┴└─┴└─┴─┴┘└─┘ + // ┌─ ┌─┐┌─┐┬─┐ ┌─┐┬ ┬┌┬┐┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐ ┌┬┐┌─┐┌─┐┌┬┐┌─┐ ─┐ + // │ ├┤ │ │├┬┘ ├─┤│ │ │ │ ││││├─┤ │ ├┤ ││ │ ├┤ └─┐ │ └─┐ │ + // └─ └ └─┘┴└─ ┴ ┴└─┘ ┴ └─┘┴ ┴┴ ┴ ┴ └─┘─┴┘ ┴ └─┘└─┘ ┴ └─┘ ─┘ + // > An .eslintrc configuration override for use with the tests in this directory. + // + // (See .eslintrc in the root directory of this package for more info.) + + "extends": [ + "../.eslintrc" + ], + + "env": { + "mocha": true + } + +} diff --git a/test/adapter/runner.js b/test/adapter/runner.js deleted file mode 100644 index f3e040b49..000000000 --- a/test/adapter/runner.js +++ /dev/null @@ -1,99 +0,0 @@ -/** - * Test runner dependencies - */ -var util = require('util'); -var mocha = require('mocha'); - -var adapterName = 'sails-memory'; -var TestRunner = require('waterline-adapter-tests'); -var Adapter = require(adapterName); - - - -// Grab targeted interfaces from this adapter's `package.json` file: -var package = {}; -var interfaces = []; -var features = []; -try { - package = require('../../node_modules/' + adapterName + '/package.json'); - interfaces = package['waterlineAdapter'].interfaces; - features = package.waterlineAdapter.features; -} -catch (e) { - throw new Error( - '\n'+ - 'Could not read supported interfaces from "sails-adapter"."interfaces"'+'\n' + - 'in this adapter\'s `package.json` file ::' + '\n' + - util.inspect(e) - ); -} - - - - - -console.info('Testing `' + package.name + '`, a Sails adapter.'); -console.info('Running `waterline-adapter-tests` against ' + interfaces.length + ' interfaces...'); -console.info('( ' + interfaces.join(', ') + ' )'); -console.log(); -console.log('Latest draft of Waterline adapter interface spec:'); -console.info('https://github.com/balderdashy/sails-docs/blob/master/contributing/adapter-specification.md'); -console.log(); - - - - -/** - * Integration Test Runner - * - * Uses the `waterline-adapter-tests` module to - * run mocha tests against the specified interfaces - * of the currently-implemented Waterline adapter API. - */ -new TestRunner({ - - // Load the adapter module. - adapter: Adapter, - - // Default adapter config to use. - config: { - schema: false - }, - - // The set of adapter interfaces to test against. - // (grabbed these from this adapter's package.json file above) - interfaces: interfaces, - - // The set of adapter features to test against. - // (grabbed these from this adapter's package.json file above) - features: features, - - // Mocha options - // reference: https://github.com/mochajs/mocha/wiki/Using-mocha-programmatically - mocha: { - reporter: 'spec' - }, - - mochaChainableMethods: {}, - - // Return code 1 if any test failed - failOnError: true - - // Most databases implement 'semantic' and 'queryable'. - // - // As of Sails/Waterline v0.10, the 'associations' interface - // is also available. If you don't implement 'associations', - // it will be polyfilled for you by Waterline core. The core - // implementation will always be used for cross-adapter / cross-connection - // joins. - // - // In future versions of Sails/Waterline, 'queryable' may be also - // be polyfilled by core. - // - // These polyfilled implementations can usually be further optimized at the - // adapter level, since most databases provide optimizations for internal - // operations. - // - // Full interface reference: - // https://github.com/balderdashy/sails-docs/blob/master/contributing/adapter-specification.md -}); \ No newline at end of file diff --git a/test/unit/adapter/strategy.alter.buffers.js b/test/alter-migrations/strategy.alter.buffers.js similarity index 92% rename from test/unit/adapter/strategy.alter.buffers.js rename to test/alter-migrations/strategy.alter.buffers.js index 846849e8e..bcf9c7669 100644 --- a/test/unit/adapter/strategy.alter.buffers.js +++ b/test/alter-migrations/strategy.alter.buffers.js @@ -1,8 +1,8 @@ -var Waterline = require('../../../lib/waterline'); +var Waterline = require('../../lib/waterline'); var assert = require('assert'); -var _ = require('lodash'); +var _ = require('@sailshq/lodash'); -describe('Alter Mode Recovery with buffer attributes', function () { +describe.skip('Alter Mode Recovery with buffer attributes', function () { var waterline; var adapters; @@ -37,7 +37,7 @@ describe('Alter Mode Recovery with buffer attributes', function () { }); var adapter = { - registerConnection: function (connection, collections, cb) { + registerDatastore: function (connection, collections, cb) { cb(null, null); }, define: function (connectionName, collectionName, definition, cb) { @@ -82,7 +82,7 @@ describe('Alter Mode Recovery with buffer attributes', function () { PersonModel = { identity: 'Person', tableName: 'person_table', - connection: 'test_alter', + datastore: 'test_alter', migrate: 'alter', adapter: 'fake', attributes: { @@ -112,9 +112,9 @@ describe('Alter Mode Recovery with buffer attributes', function () { it('should recover data', function (done) { - var PersonCollection = Waterline.Collection.extend(PersonModel); - waterline.loadCollection(PersonCollection); - waterline.initialize({adapters: adapters, connections: connections}, function (err, data) { + var PersonCollection = Waterline.Model.extend(PersonModel); + waterline.registerModel(PersonCollection); + waterline.initialize({adapters: adapters, datastores: connections}, function (err, data) { if (err) { return done(err); } diff --git a/test/unit/adapter/strategy.alter.schema.js b/test/alter-migrations/strategy.alter.schema.js similarity index 78% rename from test/unit/adapter/strategy.alter.schema.js rename to test/alter-migrations/strategy.alter.schema.js index 9d29a13ee..dc99fe581 100644 --- a/test/unit/adapter/strategy.alter.schema.js +++ b/test/alter-migrations/strategy.alter.schema.js @@ -1,8 +1,9 @@ -var Waterline = require('../../../lib/waterline'); var assert = require('assert'); -var _ = require('lodash'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../lib/waterline'); +var MigrateHelper = require('../support/migrate.helper'); -describe('Alter Mode Recovery with an enforced schema', function () { +describe.skip('Alter Mode Recovery with an enforced schema', function () { var record; @@ -18,7 +19,7 @@ describe('Alter Mode Recovery with an enforced schema', function () { }]; var adapter = { - registerConnection: function (connection, collections, cb) { + registerDatastore: function (connection, collections, cb) { cb(null, null); }, define: function (connectionName, collectionName, definition, cb) { @@ -34,7 +35,6 @@ describe('Alter Mode Recovery with an enforced schema', function () { cb(null, (persistentData.length === 1) ? schema : undefined); }, find: function (connectionName, collectionName, options, cb, connection) { - if(!options.select && !options.where) { return cb(null, persistentData); } @@ -44,8 +44,9 @@ describe('Alter Mode Recovery with an enforced schema', function () { results = persistentData; } else { - results = _.find(persistentData, options.where); + results = _.filter(persistentData, options.where); } + // Psuedo support for select (needed to act like a real adapter) if(options.select && _.isArray(options.select) && options.select.length) { @@ -76,7 +77,7 @@ describe('Alter Mode Recovery with an enforced schema', function () { var PersonModel = { identity: 'Person', tableName: 'person_table', - connection: 'test_alter', + datastore: 'test_alter', migrate: 'alter', adapter: 'fake', schema: true, @@ -98,14 +99,17 @@ describe('Alter Mode Recovery with an enforced schema', function () { var adapters = {fake: adapter}; // Build the collections and find the record - var PersonCollection = Waterline.Collection.extend(PersonModel); - waterline.loadCollection(PersonCollection); - waterline.initialize({adapters: adapters, connections: connections}, function (err, data) { + var PersonCollection = Waterline.Model.extend(PersonModel); + waterline.registerModel(PersonCollection); + waterline.initialize({adapters: adapters, datastores: connections}, function (err, data) { if (err) return done(err); - data.collections.person.findOne({id: 1}, function (err, found) { - if (err) return done(err); - record = found; - done(); + + MigrateHelper(data, function(err) { + data.collections.person.findOne({id: 1}, function (err, found) { + if (err) return done(err); + record = found; + done(); + }); }); }); }); diff --git a/test/unit/adapter/strategy.alter.schemaless.js b/test/alter-migrations/strategy.alter.schemaless.js similarity index 75% rename from test/unit/adapter/strategy.alter.schemaless.js rename to test/alter-migrations/strategy.alter.schemaless.js index 4e4eb1ff5..1cc37a595 100644 --- a/test/unit/adapter/strategy.alter.schemaless.js +++ b/test/alter-migrations/strategy.alter.schemaless.js @@ -1,8 +1,9 @@ -var Waterline = require('../../../lib/waterline'); var assert = require('assert'); -var _ = require('lodash'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../lib/waterline'); +var MigrateHelper = require('../support/migrate.helper'); -describe('Alter Mode Recovery with schemaless data', function () { +describe.skip('Alter Mode Recovery with schemaless data', function () { var record; @@ -18,7 +19,7 @@ describe('Alter Mode Recovery with schemaless data', function () { }]; var adapter = { - registerConnection: function (connection, collections, cb) { + registerDatastore: function (connection, collections, cb) { cb(null, null); }, define: function (connectionName, collectionName, definition, cb) { @@ -44,7 +45,7 @@ describe('Alter Mode Recovery with schemaless data', function () { results = persistentData; } else { - results = _.find(persistentData, options.where); + results = _.filter(persistentData, options.where); } // Psuedo support for select (needed to act like a real adapter) if(options.select) { @@ -75,7 +76,7 @@ describe('Alter Mode Recovery with schemaless data', function () { var PersonModel = { identity: 'Person', tableName: 'person_table', - connection: 'test_alter', + datastore: 'test_alter', migrate: 'alter', adapter: 'fake', schema: false, @@ -97,14 +98,17 @@ describe('Alter Mode Recovery with schemaless data', function () { var adapters = {fake: adapter}; // Build the collections and find the record - var PersonCollection = Waterline.Collection.extend(PersonModel); - waterline.loadCollection(PersonCollection); - waterline.initialize({adapters: adapters, connections: connections}, function (err, data) { + var PersonCollection = Waterline.Model.extend(PersonModel); + waterline.registerModel(PersonCollection); + waterline.initialize({adapters: adapters, datastores: connections}, function (err, data) { if (err) return done(err); - data.collections.person.findOne({id: 1}, function (err, found) { - if (err) return done(err); - record = found; - done(); + + MigrateHelper(data, function(err) { + data.collections.person.findOne({id: 1}, function (err, found) { + if (err) return done(err); + record = found; + done(); + }); }); }); }); @@ -120,9 +124,8 @@ describe('Alter Mode Recovery with schemaless data', function () { assert.equal(record.age, 50); }); - it('should include the attributes NOT in the schema', function() { + it.skip('should include the attributes NOT in the schema', function() { assert.equal(record.car, 'batmobile'); }); }); - diff --git a/test/integration/Collection.adapter.handlers.js b/test/integration/Collection.adapter.handlers.js deleted file mode 100644 index 2288e8f1b..000000000 --- a/test/integration/Collection.adapter.handlers.js +++ /dev/null @@ -1,169 +0,0 @@ -/** - * Module dependencies - */ -var assert = require('assert'), - should = require('should'), - util = require('util'), - _ = require('lodash'); - - -// Helpers/suites -var bootstrapCollection = require('./helpers/Collection.bootstrap'); -var test = { - adapterMethod: require('./helpers/adapterMethod.helper.js') -}; -var expect = require('./helpers/cb.helper.js'); - -describe('Waterline Collection', function() { - - describe(':: error negotiation & handlers ::', function() { - - // Bootstrap a collection - before(bootstrapCollection({ - adapter: require('./fixtures/adapter.withHandlers.fixture') - })); - - // Vocabulary methods should upgrade callbacks to handlers - - var dummyValues = {}; - _.each({ - find: {}, - create: {}, - update: { - extraArgs: [dummyValues] - }, - destroy: {} - }, - function eachMethod(testOpts, methodName) { - - // We simulate different types of cb/handler usage by sneaking a property - // into the first argument. - var SIMULATE = { - CB: { - 'err': [{ - _simulate: 'traditionalError' - }], - '': [{ - _simulate: 'traditionalSuccess' - }] - }, - ERROR: { - 'err': [{ - _simulate: 'error' - }], - '': [{ - _simulate: 'anonError' - }] - }, - INVALID: { - 'err': [{ - _simulate: 'invalid' - }], - '': [{ - _simulate: 'anonInvalid' - }] - }, - SUCCESS: { - 'err': [{ - _simulate: 'success' - }], - '': [{ - _simulate: 'anonSuccess' - }] - } - }; - - function _mixinExtraArgs(firstArg) { - return firstArg.concat(testOpts.extraArgs || []); - } - SIMULATE = _.mapValues(SIMULATE, function(group) { - return _.mapValues(group, _mixinExtraArgs); - }); - - // Test all the different usages on the adapter side: - function testAdapterUsage(style) { - - // Adapter invokes callback - test.adapterMethod(methodName) - .usage.apply(test, SIMULATE.CB['err']) - .expect(style === 'cb' ? expect.cbHasErr : expect.errorHandler) - .callbackStyle(style) - .inspect(); - test.adapterMethod(methodName) - .usage.apply(test, SIMULATE.CB['']) - .expect(style === 'cb' ? expect.cbHasNoErr : expect.successHandler) - .callbackStyle(style) - .inspect(); - - // Adapter invokes error handler - test.adapterMethod(methodName) - .usage.apply(test, SIMULATE.ERROR['err']) - .expect(style === 'cb' ? expect.cbHasErr : expect.errorHandler) - .callbackStyle(style) - .inspect(); - test.adapterMethod(methodName) - .usage.apply(test, SIMULATE.ERROR['']) - .expect(style === 'cb' ? expect.cbHasErr : expect.errorHandler) - .callbackStyle(style) - .inspect(); - - // Adapter invokes invalid handler - test.adapterMethod(methodName) - .usage.apply(test, SIMULATE.INVALID['err']) - .expect(style === 'cb' ? expect.cbHasErr : expect.errorHandler) - .callbackStyle(style) - .inspect(); - test.adapterMethod(methodName) - .usage.apply(test, SIMULATE.INVALID['']) - .expect(style === 'cb' ? expect.cbHasErr : expect.errorHandler) - .callbackStyle(style) - .inspect(); - - // Adapter invokes success handler - test.adapterMethod(methodName) - .usage.apply(test, SIMULATE.SUCCESS['err']) - .expect(style === 'cb' ? expect.cbHasNoErr : expect.successHandler) - .callbackStyle(style) - .inspect(); - test.adapterMethod(methodName) - .usage.apply(test, SIMULATE.SUCCESS['']) - .expect(style === 'cb' ? expect.cbHasNoErr : expect.successHandler) - .callbackStyle(style) - .inspect(); - } - - // Test the different usages on the app side: - testAdapterUsage('cb'); - testAdapterUsage('handlers'); - - }); - - - - // Methods of dummy custom adapter methods do exactly what you would expect - // based on their names. Usage signature is: `Foo.bar(options, callback)` - - describe('custom methods', function() { - - // Custom methods should still work - it('should have the expected methods for use in our test', function() { - this.SomeCollection.traditionalError.should.be.a.Function; - this.SomeCollection.traditionalSuccess.should.be.a.Function; - }); - - var dummyOptions = {}; - test.adapterMethod('traditionalError') - .usage(dummyOptions) - .expect(expect.cbHasErr) - .callbackStyle('cb') - .inspect(); - - test.adapterMethod('traditionalSuccess') - .usage(dummyOptions) - .expect(expect.cbHasNoErr) - .callbackStyle('cb') - .inspect(); - }); - }); - -}); diff --git a/test/integration/Collection.adapter.nonCRUD.js b/test/integration/Collection.adapter.nonCRUD.js deleted file mode 100644 index 4650a1b99..000000000 --- a/test/integration/Collection.adapter.nonCRUD.js +++ /dev/null @@ -1,43 +0,0 @@ -var Waterline = require('../../lib/waterline'), - adapter = require('./fixtures/adapter.special.fixture'), - assert = require('assert'); - -describe('Waterline Collection', function() { - var User; - - before(function(done) { - var Model = Waterline.Collection.extend({ - attributes: {}, - connection: 'my_foo', - tableName: 'tests' - }); - - var waterline = new Waterline(); - waterline.loadCollection(Model); - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapter }, connections: connections }, function(err, colls) { - if(err) return done(err); - User = colls.collections.tests; - done(); - }); - }); - - describe('methods', function() { - - it('should have a foobar method', function(done) { - assert(typeof User.foobar === 'function'); - - User.foobar({}, function(err, result) { - assert(result.status === true); - done(); - }); - }); - - }); -}); diff --git a/test/integration/Collection.attributes.js b/test/integration/Collection.attributes.js deleted file mode 100644 index 6344fb407..000000000 --- a/test/integration/Collection.attributes.js +++ /dev/null @@ -1,131 +0,0 @@ -var Waterline = require('../../lib/waterline'), - assert = require('assert'); - -describe('Waterline Collection', function() { - - describe('basic fixture', function() { - var waterline = new Waterline(), - Model = require('./fixtures/model.fixture'), - User; - - before(function(done) { - waterline.loadCollection(Model); - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - User = colls.collections.test; - done(); - }); - }); - - describe('schema', function() { - - it('should create an internal schema from the attributes', function() { - assert(typeof User._schema.schema === 'object'); - assert(Object.keys(User._schema.schema).length === 8); // account for auto created keys (pk, timestamps) - }); - - // TO-DO - // Check all schema properties from Sails work - - }); - - describe('validations', function() { - - it('should create an internal validation object from the attributes', function() { - assert(typeof User._validator.validations === 'object'); - assert(Object.keys(User._validator.validations).length === 5); - }); - - // TO-DO - // Validate properties using Anchor with the Validator in waterline - - }); - - }); - - describe('custom fixtures', function() { - - describe('lowercase type', function() { - var waterline = new Waterline(), - User; - - before(function(done) { - var Model = Waterline.Collection.extend({ - tableName: 'lowercaseType', - connection: 'my_foo', - attributes: { - name: 'string' - } - }); - - waterline.loadCollection(Model); - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - User = colls.collections.lowercasetype; - done(); - }); - }); - - it('should set the proper schema type', function() { - assert(User._schema.schema.name.type === 'string'); - }); - - it('should set the proper validation type', function() { - assert(User._validator.validations.name.type === 'string'); - }); - }); - - describe('uppercase type', function() { - var waterline = new Waterline(), - User; - - before(function(done) { - var Model = Waterline.Collection.extend({ - tableName: 'uppercaseType', - connection: 'my_foo', - attributes: { - name: 'STRING' - } - }); - - waterline.loadCollection(Model); - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - User = colls.collections.uppercasetype; - done(); - }); - }); - - it('should set the proper schema', function() { - assert(User._schema.schema.name.type === 'string'); - }); - - it('should set the proper validation type', function() { - assert(User._validator.validations.name.type === 'string'); - }); - }); - - }); - -}); diff --git a/test/integration/Collection.ddl.js b/test/integration/Collection.ddl.js deleted file mode 100644 index 00d6869c7..000000000 --- a/test/integration/Collection.ddl.js +++ /dev/null @@ -1,118 +0,0 @@ - -var should = require('should'); -var bootstrapCollection = require('./helpers/Collection.bootstrap'); -var Adapter = require('./fixtures/adapter.withHandlers.fixture'); - - - -describe('calling describe', function() { - - var Collection; - - before(function(done) { - - bootstrapCollection({ - adapter: Adapter, - properties: { - attributes: { - name: 'string', - age: 'integer' - } - } - })(function (err) { - if (err) return done(err); - - this.ocean.should.be.an.Object; // access to all connections + collections - this.ocean.connections.my_foo.should.be.an.Object;// a connection - this.ocean.collections.tests.should.be.an.Object;// a collection called `tests` - this.SomeCollection.should.be.an.Object; // same as `tests`, for convenience - - this.SomeCollection.attributes - .should.be.an.Object; - this.SomeCollection.attributes - .should.have.property('name'); - this.SomeCollection.attributes - .should.have.property('age'); - - Collection = this.SomeCollection; - - done(); - }); - - }); - - it('should work', function (done) { - Collection.describe({ - success: function ( schema ) { - - schema - .should.be.an.Object; - schema - .should.have.property('name'); - schema - .should.have.property('age'); - - done(); - } - }); - }); - -}); - - - - -describe('calling drop', function() { - - var Collection; - - before(function(done) { - - bootstrapCollection({ - adapter: Adapter, - properties: { - identity: 'tests', - attributes: { - name: 'string', - age: 'integer' - } - } - })(function (err) { - if (err) return done(err); - - this.ocean.should.be.an.Object; // access to all connections + collections - this.ocean.connections.my_foo.should.be.an.Object;// a connection - this.ocean.collections.tests.should.be.an.Object;// a collection called `tests` - this.SomeCollection.should.be.an.Object; // same as `tests`, for convenience - - this.SomeCollection.attributes - .should.be.an.Object; - this.SomeCollection.attributes - .should.have.property('name'); - this.SomeCollection.attributes - .should.have.property('age'); - - Collection = this.SomeCollection; - - done(); - }); - - }); - - it('should work', function (done) { - Collection.drop(function (err ) { - if (err) return done(err); - - // Verify that the collection is actually gone: - Collection.describe({ - success: function (schema) { - should(schema).not.be.ok; - done(); - } - }); - }); - }); - -}); - - diff --git a/test/integration/Collection.identity.js b/test/integration/Collection.identity.js deleted file mode 100644 index 62563c0b1..000000000 --- a/test/integration/Collection.identity.js +++ /dev/null @@ -1,76 +0,0 @@ -var Waterline = require('../../lib/waterline'), - assert = require('assert'); - -describe('Waterline Collection', function() { - - describe('normalizing tableName to identity', function() { - var waterline = new Waterline(), - User; - - before(function(done) { - var Model = Waterline.Collection.extend({ - tableName: 'foo', - connection: 'my_foo', - attributes: { - name: 'string' - } - }); - - waterline.loadCollection(Model); - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - - if(err) return done(err); - User = colls.collections.foo; - done(); - }); - }); - - it('should have identity set', function() { - assert(User.identity === 'foo'); - }); - }); - - describe('with identity and tableName', function() { - var waterline = new Waterline(), - User; - - before(function(done) { - var Model = Waterline.Collection.extend({ - identity: 'foobar', - tableName: 'foo', - connection: 'my_foo', - attributes: { - name: 'string' - } - }); - - waterline.loadCollection(Model); - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - - if(err) return done(err); - User = colls.collections.foobar; - done(); - }); - }); - - it('should have identity set', function() { - assert(User.identity === 'foobar'); - assert(User.tableName === 'foo'); - }); - }); - -}); diff --git a/test/integration/Collection.multipleAdapters.js b/test/integration/Collection.multipleAdapters.js deleted file mode 100644 index 2385f7900..000000000 --- a/test/integration/Collection.multipleAdapters.js +++ /dev/null @@ -1,83 +0,0 @@ -var Waterline = require('../../lib/waterline'); -var assert = require('assert'); -var _ = require('lodash'); - -describe('Waterline Collection', function() { - var User; - var status = 0; - var adapter_1 = { - identity: 'foo', - registerConnection: function(connection, collections, cb) { - status++; - cb(); - }, - baseMethod: function () { - return 'base foo'; - } - }; - - var adapter_2 = { - identity: 'bar', - registerConnection: function(connection, collections, cb) { - status++; - cb(); - }, - baseMethod: function () { - return 'base bar'; - }, - customMethod: function () { - return 'custom bar' - } - }; - var Model = Waterline.Collection.extend({ - attributes: {}, - connection: ['my_foo', 'my_bar'], - tableName: 'tests' - }); - - before(function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); - - var connections = { - 'my_foo': { - adapter: 'foo' - }, - 'my_bar': { - adapter: 'bar' - } - }; - - waterline.initialize({ - adapters: { - 'foo': adapter_1, - 'bar': adapter_2 - }, - connections: connections - }, - function(err, colls) { - if (err) return done(err); - User = colls.collections.tests; - done(); - } - ); - }); - - describe('multiple adapters', function() { - - it('should call registerCollection on all adapters', function() { - assert.equal(status, 2); - }); - - it('should expose an adapter\'s custom methods', function () { - assert(_.isFunction(User.customMethod)); - assert.equal(User.customMethod(), 'custom bar'); - }); - - it('should give precedence to adapters earlier in the list', function () { - assert(_.isFunction(User.baseMethod)); - assert.equal(User.baseMethod(), 'base foo'); - }); - - }); -}); diff --git a/test/integration/Collection.transformer.js b/test/integration/Collection.transformer.js deleted file mode 100644 index d4beec816..000000000 --- a/test/integration/Collection.transformer.js +++ /dev/null @@ -1,42 +0,0 @@ -var Waterline = require('../../lib/waterline'), - assert = require('assert'); - -describe('Waterline Collection', function() { - - describe('with custom column name', function() { - var waterline = new Waterline(), - User; - - before(function(done) { - var Model = Waterline.Collection.extend({ - tableName: 'foo', - connection: 'my_foo', - attributes: { - name: { - type: 'string', - columnName: 'full_name' - } - } - }); - - waterline.loadCollection(Model); - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - User = colls.collections.foo; - done(); - }); - }); - - it('should build a transformer object', function() { - assert(User._transformer._transformations.name === 'full_name'); - }); - }); - -}); diff --git a/test/integration/Collection.validations.js b/test/integration/Collection.validations.js deleted file mode 100644 index cb947f3d2..000000000 --- a/test/integration/Collection.validations.js +++ /dev/null @@ -1,132 +0,0 @@ -var Waterline = require('../../lib/waterline'), - assert = require('assert'); - -describe('Waterline Collection', function() { - - describe('validations', function() { - var waterline = new Waterline(), - User; - - before(function(done) { - - // Extend for testing purposes - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'my_foo', - types: { - password: function(val) { - return val === this.passwordConfirmation; - } - }, - attributes: { - name: { - type: 'string', - required: true - }, - - email: { - type: 'email' - }, - - sex: { - type: 'string', - enum: ['male', 'female'] - }, - - username: { - type: 'string', - contains: function() { - return this.name; - } - }, - - password: { - type: 'password' - } - } - }); - - waterline.loadCollection(Model); - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - User = colls.collections.user; - done(); - }); - }); - - it('should work with valid data', function(done) { - User.create({ name: 'foo bar', email: 'foobar@gmail.com'}, function(err, user) { - assert(!err); - done(); - }); - }); - - it('should error with invalid data', function(done) { - User.create({ name: '', email: 'foobar@gmail.com'}, function(err, user) { - assert(!user); - assert(err.ValidationError); - assert(err.ValidationError.name[0].rule === 'required'); - done(); - }); - }); - - it('should support valid enums on strings', function(done) { - User.create({ name: 'foo', sex: 'male' }, function(err, user) { - assert(!err); - assert(user.sex === 'male'); - done(); - }); - }); - - it('should error with invalid enums on strings', function(done) { - User.create({ name: 'foo', sex: 'other' }, function(err, user) { - assert(!user); - assert(err.ValidationError); - assert(err.ValidationError.sex[0].rule === 'in'); - done(); - }); - }); - - it('should work with valid username', function(done) { - User.create({ name: 'foo', username: 'foozball_dude' }, function(err, user) { - assert(!err); - done(); - }); - }); - - it('should error with invalid username', function(done) { - User.create({ name: 'foo', username: 'baseball_dude' }, function(err, user) { - assert(!user); - assert(err.ValidationError); - assert(err.ValidationError.username[0].rule === 'contains'); - done(); - }); - }); - - it('should support custom type functions with the model\'s context', function(done) { - User.create({ name: 'foo', sex: 'male', password: 'passW0rd', passwordConfirmation: 'passW0rd' }, function(err, user) { - assert(!err); - done(); - }); - }); - - it('should error with invalid input for custom type', function(done) { - User.create({ name: 'foo', sex: 'male', password: 'passW0rd' }, function(err, user) { - assert(!user); - assert(err.ValidationError); - assert(err.ValidationError.password[0].rule === 'password'); - done(); - }); - }); - - }); -}); diff --git a/test/integration/_boilerplate.test.js b/test/integration/_boilerplate.test.js deleted file mode 100644 index a4c7f5a17..000000000 --- a/test/integration/_boilerplate.test.js +++ /dev/null @@ -1,21 +0,0 @@ -var bootstrapCollection = require('./helpers/Collection.bootstrap'); -var Adapter = require('./fixtures/adapter.withHandlers.fixture'); - - -describe('something to test', function () { - - before(bootstrapCollection({ - adapter: Adapter - })); - - it('should not throw', function () { - - this.ocean.should.be.an.Object; // access to all connections + collections - this.ocean.connections.my_foo.should.be.an.Object;// a connection - this.ocean.collections.tests.should.be.an.Object;// a collection called `tests` - this.SomeCollection.should.be.an.Object; // same as `tests`, for convenience - }); - - // more tests here - -}); \ No newline at end of file diff --git a/test/integration/fixtures/adapter.special.fixture.js b/test/integration/fixtures/adapter.special.fixture.js deleted file mode 100644 index fa154c9ef..000000000 --- a/test/integration/fixtures/adapter.special.fixture.js +++ /dev/null @@ -1,13 +0,0 @@ -/** - * Test Non-Standard, (Non CRUD) adapter - */ - -module.exports = { - - identity: 'foobar', - - foobar: function(connectionName, collectionName, options, cb) { - return cb(null, { status: true }); - } - -}; diff --git a/test/integration/fixtures/adapter.withHandlers.fixture.js b/test/integration/fixtures/adapter.withHandlers.fixture.js deleted file mode 100644 index 7f9d370c5..000000000 --- a/test/integration/fixtures/adapter.withHandlers.fixture.js +++ /dev/null @@ -1,154 +0,0 @@ -/** - * Module dependencies - */ -var _ = require('lodash'); - - - - -// Keeps track of registered collections -var _colls = {}; - - -/** - * Test Adapter Which Uses Handlers - */ -module.exports = { - - // Waterline Vocabulary Methods - // - // (supports automatic switching for handlers since we know the fn signature) - // - // The tests work by passing a `_simulate` option as a property to the first argument, - // which might be `options` or `values`. If `options`, it's a criteria, so we have to - // check the `where` since it's being automatically normalized in Waterline core. - find: function (conn, cid, options, cb) { - // console.log('IN FIND::', require('util').inspect(arguments)); - return _interpretUsageTest(options.where && options.where._simulate, cb); - }, - create: function (conn, cid, values, cb) { - return _interpretUsageTest(values._simulate, cb); - }, - update: function (conn, cid, options, values, cb) { - return _interpretUsageTest(options.where && options.where._simulate, cb); - }, - destroy: function (conn, cid, options, cb) { - return _interpretUsageTest(options.where && options.where._simulate, cb); - }, - - - // DDL Methods - // - describe: function (conn, cid, cb) { - cb(null, _colls[cid]); - }, - - define: function (conn, cid, definition, cb) { - _colls[cid] = definition; - cb(); - }, - - addAttribute: function (conn, cid, attrName, attrDef, cb) { - try { - _colls[cid].definition[attrName] = attrDef; - } - catch (e) { return cb(e); } - - cb(); - }, - - removeAttribute: function (conn, cid, attrName, cb) { - try { - delete _colls[cid].definition[attrName]; - } - catch (e) { return cb(e); } - - cb(); - }, - - drop: function (conn, cid, relations, cb) { - try { - delete _colls[cid]; - } - catch (e) { return cb(e); } - - cb(); - }, - - - // Lifecycle - // - registerConnection: function (con, collections, cb) { - _.extend(_colls, collections); - cb(); - }, - - - - // Custom Methods - // - // (automatic switching is not enabled since we don't know the fn signature) - traditionalError: function(conn, cid, options, cb) { - return cb(new Error('oops')); - }, - - traditionalSuccess: function(conn, cid, options, cb) { - return cb(null, [{ someResults: [] }]); - }, - - - // Future: - // convention of (options, cb) would allow us to further normalize usage - // Right now, the commented-out ones wouldn't work out of the box. - - // error: function(cid, options, cb) { - // return cb.error(new Error('oops')); - // }, - - // anonError: function(cid, options, cb) { - // return cb.error(); - // }, - - // invalid: function(cid, options, cb) { - // return cb.invalid(new Error('oops')); - // }, - - // anonInvalid: function(cid, options, cb) { - // return cb.error(); - // }, - - // success: function(cid, options, cb) { - // return cb.success([{ someResults: [] }]); - // }, - - // anonSuccess: function(cid, options, cb) { - // return cb.error(); - // } - - -}; - - - - -/** - * @param {String} usageCode - * @param {Function || Object} cb - */ -function _interpretUsageTest(usageCode, cb) { - switch (usageCode) { - case 'traditionalError': return cb(new Error('oops')); - case 'traditionalSuccess': return cb(null, [{ someResults: [] }]); - - case 'error': return cb.error(new Error('oops')); - case 'anonError': return cb.error(); - - case 'invalid': return cb.invalid(new Error('oops')); - case 'anonInvalid': return cb.invalid(); - - case 'success': return cb.success([{ someResults: [] }]); - case 'anonSuccess': return cb.success(); - - default: return cb(null, [{ someResults: [] }]); - } -} diff --git a/test/integration/fixtures/model.fixture.js b/test/integration/fixtures/model.fixture.js deleted file mode 100644 index 257bae313..000000000 --- a/test/integration/fixtures/model.fixture.js +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Example User Model - * - */ - -var Waterline = require('../../../lib/waterline'); - -module.exports = Waterline.Collection.extend({ - - tableName: 'test', - connection: 'my_foo', - - attributes: { - first_name: { - type: 'string', - length: { min: 5 }, - required: true - }, - - last_name: { - type: 'string', - length: { min: 5 }, - required: true - }, - - username: { - type: 'string', - length: { min: 2, max: 20 }, - unique: true, - required: true - }, - - email: { - type: 'email', - unique: true, - required: true - }, - - phone_number: { - type: 'string', - defaultsTo: '555-555-555' - }, - - full_name: function() { - return this.first_name + ' ' + this.last_name; - } - } - -}); diff --git a/test/integration/helpers/Collection.bootstrap.js b/test/integration/helpers/Collection.bootstrap.js deleted file mode 100644 index 49dc4c6dc..000000000 --- a/test/integration/helpers/Collection.bootstrap.js +++ /dev/null @@ -1,52 +0,0 @@ -/** - * Module Dependencies - */ -var Waterline = require('../../../lib/waterline'); -var _ = require('lodash'); - -/** - * @option {Adapter} adapter - * @return {Function} helper method to bootstrap a collection using the specified adapter - */ -module.exports = function (options) { - - /** - * @param {Function} done [description] - */ - return function(done) { - var self = this; - - var adapterIdentity = 'barbaz'; - options.adapter.identity = adapterIdentity; - - var Model = Waterline.Collection.extend( - _.merge({ - attributes: {}, - connection: 'my_foo', - tableName: 'tests', - schema: false - }, options.properties || {}) - ); - - var waterline = new Waterline(); - waterline.loadCollection(Model); - - var connections = { - 'my_foo': { - adapter: adapterIdentity - } - }; - - waterline.initialize({ adapters: { barbaz: options.adapter }, connections: connections }, function(err, ocean) { - if (err) return done(err); - - // Save access to all collections + connections - self.ocean = ocean; - - // expose global? - SomeCollection = ocean.collections.tests; - self.SomeCollection = SomeCollection; - done(); - }); - }; -}; diff --git a/test/integration/helpers/adapterMethod.helper.js b/test/integration/helpers/adapterMethod.helper.js deleted file mode 100644 index 358325007..000000000 --- a/test/integration/helpers/adapterMethod.helper.js +++ /dev/null @@ -1,182 +0,0 @@ -/** - * Module dependencies - */ -var assert = require('assert'), - should = require('should'), - util = require('util'), - _ = require('lodash'); - - -/** - * Helper class for more literate asynchronous tests. - * @param {Object} config - */ -var Deferred = function (config) { - - var deferred = this; - - var state = { - expectations: [] - }; - - - - var _run = function ( ) { - - // Generate a better default test message - var prettyUsage = ''; - prettyUsage += '.' +config.nameOfMethod + '('; - prettyUsage += (_.map(state.usage, function (arg){ return util.inspect(arg); })).join(','); - prettyUsage += ')'; - state.testMsg = state.testMsg || prettyUsage; - - describe(state.testMsg, function () { - - // Simulates a call like :: `SomeCollection.nameOfMethod( options, cb )` - before(function (done){ - - var mochaCtx = this; - - // Decide the fn, args, and `this` value (ctx) - var fn = mochaCtx.SomeCollection[config.nameOfMethod]; - var ctx = mochaCtx.SomeCollection; - var args = state.usage || []; - - - if ( !state.useHandlers ) { - - // console.log('Doing: ', config.nameOfMethod, 'with args:',args); - // Add callback as final argument - // fn.apply(ctx,args.concat([function adapterFnCallback () { - // // console.log('result args::',arguments); - // mochaCtx.resultArgs = Array.prototype.slice.call(arguments); - // return done(); - // }])); - // - fn.apply(ctx,args.concat([function adapterFnCallback () { - mochaCtx.resultArgs = Array.prototype.slice.call(arguments); - // console.log('!);'); - return done(); - }])); - - return; - } - - else { - - // console.log('WITH HANDLERS!! Doing:', config.nameOfMethod, 'with args:',args); - // console.log('fn::',fn); - - // Or use handlers instead - fn.apply(ctx, args).exec({ - success: function (){ - // console.log('SUCCESS HANDLER'); - mochaCtx.resultArgs = Array.prototype.slice.call(arguments); - mochaCtx.handlerName = 'success'; - return done(); - }, - error: function (){ - // console.log('ERROR HANDLER'); - mochaCtx.resultArgs = Array.prototype.slice.call(arguments); - mochaCtx.handlerName = 'error'; - return done(); - }, - invalid: function (){ - // console.log('INVALID HANDLER'); - mochaCtx.resultArgs = Array.prototype.slice.call(arguments); - mochaCtx.handlerName = 'invalid'; - return done(); - } - }); - - return; - } - - - - }); - - - // Run explicit describe function if specified - if (state.mochaDescribeFn) { - state.mochaDescribeFn(); - } - - // Otherwise check expectations - else { - _.each(state.expectations, function (expectFn) { - expectFn(); - }); - } - }); - - }; - - - /** - * @return {Deferred} [chainable] - */ - this.callbackStyle = function (style) { - state.useHandlers = style !== 'cb'; - return deferred; - }; - - - /** - * @param {String} testMsg [optional override] - * @param {Function} mochaDescribeFn [optional override] - * @return {Deferred} [chainable] - */ - this.inspect = function ( /* [testMsg], mochaDescribeFn */ ) { - - var testMsg = typeof arguments[0] === 'string' ? arguments[0] : ''; - if (testMsg) { - state.testMsg = testMsg; - } - - var mochaDescribeFn = typeof arguments[0] !== 'string' ? arguments[0] : arguments[1]; - if (mochaDescribeFn) { - state.mochaDescribeFn = mochaDescribeFn; - } - - _run(); - return deferred; - }; - - - - /** - * Save specified arguments as the usage of the function we're testing. - * @return {Deferred} [chainable] - */ - this.usage = function () { - state.usage = Array.prototype.slice.call(arguments) || []; - return deferred; - }; - - - /** - * @param {Function} fn [function to test] - * @return {Deferred} [chainable] - */ - this.expect = function (fn) { - state.expectations.push(fn); - return deferred; - }; -}; - -// Deferred object allows chained usage, i.e.: -// adapterMethod(foo).inspect(mochaDescribeFn) -function adapterMethod (nameOfMethod) { - return new Deferred({ - nameOfMethod: nameOfMethod - }); -} - - -/** - * Test an adapter method - * @type {Function} - */ -module.exports = adapterMethod; - diff --git a/test/integration/helpers/cb.helper.js b/test/integration/helpers/cb.helper.js deleted file mode 100644 index a0520bf89..000000000 --- a/test/integration/helpers/cb.helper.js +++ /dev/null @@ -1,41 +0,0 @@ -/** - * Module dependencies - */ -var assert = require('assert'), - should = require('should'), - util = require('util'), - _ = require('lodash'); - - -module.exports = { - cbHasErr: function (shouldMsg) { - it(shouldMsg || 'should provide conventional error arg to caller cb', function () { - var err = this.resultArgs[0]; - assert(err, 'Error argument should be present.'); - }); - }, - cbHasNoErr: function (shouldMsg) { - it(shouldMsg || 'should provide NO error arg to caller cb', function () { - var err = this.resultArgs[0]; - assert(!err, 'Error argument should NOT be present- but it was:\n' + util.inspect(err)); - }); - }, - - errorHandler: function (shouldMsg) { - it(shouldMsg || 'should trigger the `error` handler', function () { - should(this.handlerName).equal('error'); - }); - }, - - invalidHandler: function (shouldMsg) { - it(shouldMsg || 'should trigger the `invalid` handler', function () { - should(this.handlerName).equal('invalid'); - }); - }, - - successHandler: function (shouldMsg) { - it(shouldMsg || 'should trigger the `success` handler', function () { - should(this.handlerName).equal('success'); - }); - } -}; diff --git a/test/integration/model/association.add.hasMany.id.js b/test/integration/model/association.add.hasMany.id.js deleted file mode 100644 index 6ef88f187..000000000 --- a/test/integration/model/association.add.hasMany.id.js +++ /dev/null @@ -1,100 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Model', function() { - describe('associations hasMany', function() { - describe('.add() with an id', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var collections = {}; - var prefValues = []; - - before(function(done) { - var waterline = new Waterline(); - - var User = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person', - attributes: { - preferences: { - collection: 'preference', - via: 'user' - } - } - }); - - var Preference = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'preference', - attributes: { - foo: 'string', - user: { - model: 'person' - } - } - }); - - waterline.loadCollection(User); - waterline.loadCollection(Preference); - - var _values = [{ id: 1 }, { id: 2 }]; - - var adapterDef = { - find: function(con, col, criteria, cb) { - if(col === 'person') return cb(null, _values); - cb(null, []); - }, - update: function(con, col, criteria, values, cb) { - if(col === 'preference') { - prefValues.push(values); - } - - return cb(null, values); - } - }; - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - collections = colls.collections; - done(); - }); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass foreign key values to update method for each relationship', function(done) { - collections.person.find().exec(function(err, models) { - if(err) return done(err); - - var person = models[0]; - - person.preferences.add(1); - person.preferences.add(2); - - person.save(function(err) { - if(err) return done(err); - - assert(prefValues.length === 2); - assert(prefValues[0].user === 1); - assert(prefValues[1].user === 1); - - done(); - }); - }); - }); - - }); - }); -}); diff --git a/test/integration/model/association.add.hasMany.object.js b/test/integration/model/association.add.hasMany.object.js deleted file mode 100644 index 75b3359d2..000000000 --- a/test/integration/model/association.add.hasMany.object.js +++ /dev/null @@ -1,97 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Model', function() { - describe('associations hasMany', function() { - describe('.add() with an object', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var collections = {}; - var fooValues = []; - - before(function(done) { - var waterline = new Waterline(); - - var User = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person', - attributes: { - preferences: { - collection: 'preference', - via: 'user' - } - } - }); - - var Preference = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'preference', - attributes: { - foo: 'string', - user: { - model: 'person' - } - } - }); - - waterline.loadCollection(User); - waterline.loadCollection(Preference); - - var _values = [ - { id: 1, preference: [{ foo: 'bar' }, { foo: 'foobar' }] }, - { id: 2, preference: [{ foo: 'a' }, { foo: 'b' }] }, - ]; - - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, _values); }, - create: function(con, col, values, cb) { - fooValues.push(values.foo); - return cb(null, values); - }, - update: function(con, col, criteria, values, cb) { return cb(null, values); } - }; - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - collections = colls.collections; - done(); - }); - }); - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass model values to create method for each relationship', function(done) { - collections.person.find().exec(function(err, models) { - if(err) return done(err); - - var person = models[0]; - - person.preferences.add({ foo: 'foo' }); - person.preferences.add({ foo: 'bar' }); - - person.save(function(err) { - if(err) return done(err); - - assert(fooValues.length === 2); - assert(fooValues[0] === 'foo'); - assert(fooValues[1] === 'bar'); - - done(); - }); - }); - }); - - }); - }); -}); diff --git a/test/integration/model/association.add.manyToMany.id.js b/test/integration/model/association.add.manyToMany.id.js deleted file mode 100644 index c484887fc..000000000 --- a/test/integration/model/association.add.manyToMany.id.js +++ /dev/null @@ -1,112 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Model', function() { - describe('associations Many To Many', function() { - describe('.add() with an id', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var collections = {}; - var prefValues = []; - - before(function(done) { - var waterline = new Waterline(); - - var User = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person', - attributes: { - preferences: { - collection: 'preference', - via: 'people', - dominant: true - } - } - }); - - var Preference = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'preference', - attributes: { - foo: 'string', - people: { - collection: 'person', - via: 'preferences' - } - } - }); - - waterline.loadCollection(User); - waterline.loadCollection(Preference); - - var _values = [ - { id: 1, preference: [{ foo: 'bar' }, { foo: 'foobar' }] }, - { id: 2, preference: [{ foo: 'a' }, { foo: 'b' }] }, - ]; - - var i = 1; - - var adapterDef = { - find: function(con, col, criteria, cb) { - if(col === 'person_preferences__preference_people') return cb(null, []); - cb(null, _values); - }, - update: function(con, col, criteria, values, cb) { - if(col === 'preference') { - prefValues.push(values); - } - - return cb(null, values); - }, - create: function(con, col, values, cb) { - prefValues.push(values); - return cb(null, values); - }, - }; - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - collections = colls.collections; - done(); - }); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass foreign key values to update method for each relationship', function(done) { - collections.person.find().exec(function(err, models) { - if(err) return done(err); - - var person = models[0]; - - person.preferences.add(1); - person.preferences.add(2); - - person.save(function(err) { - if(err) return done(err); - - assert(prefValues.length === 2); - - assert(prefValues[0].preference_people === 1); - assert(prefValues[1].preference_people === 2); - - done(); - }); - }); - }); - - }); - }); -}); diff --git a/test/integration/model/association.add.manyToMany.object.js b/test/integration/model/association.add.manyToMany.object.js deleted file mode 100644 index 260277aa3..000000000 --- a/test/integration/model/association.add.manyToMany.object.js +++ /dev/null @@ -1,122 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Model', function() { - describe('associations Many To Many', function() { - describe('.add() with an object', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var collections = {}; - var fooValues = []; - - before(function(done) { - var waterline = new Waterline(); - - var User = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person', - attributes: { - preferences: { - collection: 'preference', - via: 'people', - dominant: true - } - } - }); - - var Preference = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'preference', - attributes: { - foo: 'string', - people: { - collection: 'person', - via: 'preferences' - } - } - }); - - waterline.loadCollection(User); - waterline.loadCollection(Preference); - - var _values = [ - { id: 1, preferences: [{ id: 1, foo: 'bar' }, { id: 2, foo: 'foobar' }] }, - { id: 2, preferences: [{ id: 3, foo: 'a' }, { id: 4, foo: 'b' }] }, - ]; - - var i = 1; - var added = false; - - var adapterDef = { - find: function(con, col, criteria, cb) { - if(col === 'person_preferences__preference_people') { - if(!added) return cb(); - if(criteria === fooValues[0]) return cb(null, fooValues[0]); - return cb(null, []); - } - - return cb(null, _values); - }, - create: function(con, col, values, cb) { - if(col !== 'person_preferences__preference_people') { - values.id = i; - i++; - return cb(null, values); - } - - added = true; - fooValues.push(values); - return cb(null, values); - }, - update: function(con, col, criteria, values, cb) { return cb(null, values); } - }; - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - collections = colls.collections; - done(); - }); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass model values to create method for each relationship', function(done) { - collections.person.find().exec(function(err, models) { - if(err) return done(err); - - var person = models[0]; - - person.preferences.add({ foo: 'foo' }); - person.preferences.add({ foo: 'bar' }); - - person.save(function(err) { - if(err) return done(err); - - assert(fooValues.length === 2); - - assert(fooValues[0].person_preferences === 1); - assert(fooValues[0].preference_people === 1); - - assert(fooValues[1].preference_people === 2); - assert(fooValues[1].person_preferences === 1); - - done(); - }); - }); - }); - - }); - }); -}); diff --git a/test/integration/model/association.destroy.manyToMany.js b/test/integration/model/association.destroy.manyToMany.js deleted file mode 100644 index 5515329b0..000000000 --- a/test/integration/model/association.destroy.manyToMany.js +++ /dev/null @@ -1,112 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - _ = require('lodash'), - assert = require('assert'); - -describe('Model', function() { - describe('associations Many To Many', function() { - describe('.destroy()', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var collections = {}; - var prefDestroyCall; - - before(function(done) { - var waterline = new Waterline(); - - var User = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person', - attributes: { - id : { - primaryKey : true, - columnName : 'CUSTOM_ID' - }, - preferences: { - collection: 'preference', - via: 'people', - dominant: true - } - } - }); - - var Preference = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'preference', - attributes: { - id : { - primaryKey : true, - columnName : 'CUSTOM_ID' - }, - foo: 'string', - people: { - collection: 'person', - via: 'preferences' - } - } - }); - - waterline.loadCollection(User); - waterline.loadCollection(Preference); - - var _values = [ - { id: 1, preference: [{ foo: 'bar' }, { foo: 'foobar' }] }, - { id: 2, preference: [{ foo: 'a' }, { foo: 'b' }] }, - ]; - - var i = 1; - - var adapterDef = { - find: function(con, col, criteria, cb) { - if(col === 'person_preference') return cb(null, []); - cb(null, _values); - }, - destroy: function(con, col, criteria, cb) { - if(col === 'person_preferences__preference_people') { - prefDestroyCall = criteria; - } - return cb(null, [{ - 'CUSTOM_ID' : 1, - 'preference' : [ { foo: 'bar' }, { foo: 'foobar' } ] - }]); - }, - update: function(con, col, criteria, values, cb) { - return cb(null, values); - }, - create: function(con, col, values, cb) { - return cb(null, values); - }, - }; - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - collections = colls.collections; - - done(); - }); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should obey column names in many to many destroy', function(done) { - collections.person.destroy(1).exec(function(err, results) { - var expected = { where: { person_preferences: [ 1 ] } } - assert.deepEqual(prefDestroyCall, expected); - done(); - }); - }); - - }); - }); -}); diff --git a/test/integration/model/association.getter.js b/test/integration/model/association.getter.js deleted file mode 100644 index f2ba36280..000000000 --- a/test/integration/model/association.getter.js +++ /dev/null @@ -1,103 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Model', function() { - describe('association', function() { - describe('getter', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var collection; - - before(function(done) { - var waterline = new Waterline(); - - var User = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person', - attributes: { - preferences: { - collection: 'preference', - via: 'user' - } - } - }); - - var Preference = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'preference', - attributes: { - user: { - model: 'person' - } - } - }); - - waterline.loadCollection(User); - waterline.loadCollection(Preference); - - var _values = [ - { preferences: [{ foo: 'bar' }, { foo: 'foobar' }] }, - { preferences: [{ foo: 'a' }, { foo: 'b' }] }, - ]; - - var adapterDef = { - identity: 'foo', - join: function(con, col, criteria, cb) { return cb(null, _values); }, - }; - - adapterDef.find = adapterDef.join; - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - collection = colls.collections.person; - done(); - }); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should have a getter for preferences', function(done) { - collection.find().exec(function(err, data) { - if(err) return done(err); - - assert(Array.isArray(data[0].preferences)); - assert(data[0].preferences.length == 2); - assert(data[0].preferences[0].foo === 'bar'); - - assert(Array.isArray(data[1].preferences)); - assert(data[1].preferences.length == 2); - assert(data[1].preferences[0].foo === 'a'); - - done(); - }); - }); - - it('should have special methods on the preference key', function(done) { - collection.find().exec(function(err, data) { - if(err) return done(err); - - assert(typeof data[0].preferences.add == 'function'); - assert(typeof data[0].preferences.remove == 'function'); - - assert(typeof data[1].preferences.add == 'function'); - assert(typeof data[1].preferences.remove == 'function'); - - done(); - }); - }); - - }); - }); -}); diff --git a/test/integration/model/association.remove.hasMany.js b/test/integration/model/association.remove.hasMany.js deleted file mode 100644 index 3ad1b0aa9..000000000 --- a/test/integration/model/association.remove.hasMany.js +++ /dev/null @@ -1,124 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Model', function() { - describe('associations hasMany', function() { - describe('.remove()', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var collections = {}; - var prefValues = []; - - before(function(done) { - var waterline = new Waterline(); - - var User = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person', - attributes: { - preferences: { - collection: 'preference', - via: 'user' - } - } - }); - - var Preference = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'preference', - attributes: { - foo: 'string', - user: { - model: 'person' - } - } - }); - - waterline.loadCollection(User); - waterline.loadCollection(Preference); - - var _values = [ - { id: 1, preference: [{ foo: 'bar' }, { foo: 'foobar' }] }, - { id: 2, preference: [{ foo: 'a' }, { foo: 'b' }] }, - ]; - - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, _values); }, - update: function(con, col, criteria, values, cb) { - if(col === 'preference') { - prefValues.push({ id: criteria.where.id, values: values }); - } - - return cb(null, values); - } - }; - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - collections = colls.collections; - done(); - }); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass foreign key values to update method for each relationship', function(done) { - collections.person.find().exec(function(err, models) { - if(err) return done(err); - - var person = models[0]; - - person.preferences.remove(1); - person.preferences.remove(2); - - person.save(function(err) { - if(err) return done(err); - - assert(prefValues.length === 2); - assert(prefValues[0].id === 1); - assert(prefValues[0].values.user === null); - assert(prefValues[1].id === 2); - assert(prefValues[1].values.user === null); - - done(); - }); - }); - }); - - it('should error with a failed transaction when an object is used', function(done) { - collections.person.find().exec(function(err, models) { - if(err) return done(err); - - var person = models[0]; - - person.preferences.remove({ foo: 'foo' }); - person.preferences.remove({ foo: 'bar' }); - - person.save(function(err) { - assert(err); - assert(err.failedTransactions); - assert(Array.isArray(err.failedTransactions)); - assert(err.failedTransactions.length === 2); - assert(err.failedTransactions[0].type === 'remove'); - assert(err.failedTransactions[1].type === 'remove'); - - done(); - }); - }); - }); - - }); - }); -}); diff --git a/test/integration/model/association.remove.manyToMany.js b/test/integration/model/association.remove.manyToMany.js deleted file mode 100644 index 3d5ef6794..000000000 --- a/test/integration/model/association.remove.manyToMany.js +++ /dev/null @@ -1,138 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Model', function() { - describe('associations Many To Many', function() { - describe('.remove()', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var collections = {}; - var prefValues = []; - - before(function(done) { - var waterline = new Waterline(); - - var User = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person', - attributes: { - preferences: { - collection: 'preference', - via: 'people', - dominant: true - } - } - }); - - var Preference = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'preference', - attributes: { - foo: 'string', - people: { - collection: 'person', - via: 'preferences' - } - } - }); - - waterline.loadCollection(User); - waterline.loadCollection(Preference); - - var _values = [ - { id: 1, preference: [{ foo: 'bar' }, { foo: 'foobar' }] }, - { id: 2, preference: [{ foo: 'a' }, { foo: 'b' }] }, - ]; - - var i = 1; - - var adapterDef = { - find: function(con, col, criteria, cb) { - if(col === 'person_preference') return cb(null, []); - cb(null, _values); - }, - destroy: function(con, col, criteria, cb) { - if(col === 'person_preferences__preference_people') { - prefValues.push(criteria.where); - } - return cb(null, criteria); - }, - update: function(con, col, criteria, values, cb) { - return cb(null, values); - }, - create: function(con, col, values, cb) { - prefValues.push(values); - return cb(null, values); - }, - }; - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - collections = colls.collections; - done(); - }); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass foreign key values to update method for each relationship', function(done) { - collections.person.find().exec(function(err, models) { - if(err) return done(err); - - var person = models[0]; - - person.preferences.remove(1); - person.preferences.remove(2); - - person.save(function(err) { - if(err) return done(err); - - assert(prefValues.length === 2); - - assert(prefValues[0].person_preferences === 1); - assert(prefValues[0].preference_people === 1); - assert(prefValues[1].person_preferences === 1); - assert(prefValues[1].preference_people === 2); - - done(); - }); - }); - }); - - it('should error with a failed transaction when an object is used', function(done) { - collections.person.find().exec(function(err, models) { - if(err) return done(err); - - var person = models[0]; - - person.preferences.remove({ foo: 'foo' }); - person.preferences.remove({ foo: 'bar' }); - - person.save(function(err) { - assert(err); - assert(err.failedTransactions); - assert(Array.isArray(err.failedTransactions)); - assert(err.failedTransactions.length === 2); - assert(err.failedTransactions[0].type === 'remove'); - assert(err.failedTransactions[1].type === 'remove'); - - done(); - }); - }); - }); - - }); - }); -}); diff --git a/test/integration/model/association.setter.js b/test/integration/model/association.setter.js deleted file mode 100644 index 6fb36246d..000000000 --- a/test/integration/model/association.setter.js +++ /dev/null @@ -1,92 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Model', function() { - describe('association', function() { - describe('setter', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var collection; - - before(function(done) { - var waterline = new Waterline(); - - var User = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person', - attributes: { - preferences: { - collection: 'preference', - via: 'user' - } - } - }); - - var Preference = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'preference', - attributes: { - user: { - model: 'person' - } - } - }); - - waterline.loadCollection(User); - waterline.loadCollection(Preference); - - var _values = [ - { preference: [{ foo: 'bar' }, { foo: 'foobar' }] }, - { preference: [{ foo: 'a' }, { foo: 'b' }] }, - ]; - - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, _values); } - }; - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - collection = colls.collections.person; - done(); - }); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should allow new associations to be added using the add function', function(done) { - collection.find().exec(function(err, data) { - if(err) return done(err); - - data[0].preferences.add(1); - assert(data[0].associations.preferences.addModels.length === 1); - - done(); - }); - }); - - it('should allow new associations to be removed using the remove function', function(done) { - collection.find().exec(function(err, data) { - if(err) return done(err); - - data[0].preferences.remove(1); - assert(data[0].associations.preferences.removeModels.length === 1); - - done(); - }); - }); - - }); - }); -}); diff --git a/test/integration/model/destroy.js b/test/integration/model/destroy.js deleted file mode 100644 index 3e82b34d9..000000000 --- a/test/integration/model/destroy.js +++ /dev/null @@ -1,60 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Model', function() { - describe('.destroy()', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var collection; - - before(function(done) { - var waterline = new Waterline(); - var Collection = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person', - attributes: { - first_name: 'string', - last_name: 'string', - full_name: function() { - return this.first_name + ' ' + this.last_name; - } - } - }); - - waterline.loadCollection(Collection); - - var adapterDef = { destroy: function(con, col, options, cb) { return cb(null, true); }}; - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - collection = colls.collections.person; - done(); - }); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass status from the adapter destroy method', function(done) { - var person = new collection._model({ id: 1, first_name: 'foo', last_name: 'bar' }); - - person.destroy(function(err, status) { - assert(!err); - assert(status === true); - done(); - }); - }); - - }); -}); diff --git a/test/integration/model/mixins.js b/test/integration/model/mixins.js deleted file mode 100644 index 52ec36cb6..000000000 --- a/test/integration/model/mixins.js +++ /dev/null @@ -1,56 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Model', function() { - describe('mixins', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var collection; - - before(function(done) { - var waterline = new Waterline(); - var Collection = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person', - attributes: { - first_name: 'string', - last_name: 'string', - full_name: function() { - return this.first_name + ' ' + this.last_name; - } - } - }); - - waterline.loadCollection(Collection); - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) done(err); - collection = colls.collections.person; - done(); - }); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('instantiated model should have a full_name function', function() { - var person = new collection._model({ first_name: 'foo', last_name: 'bar' }); - var name = person.full_name(); - - assert(typeof person.full_name === 'function'); - assert(name === 'foo bar'); - }); - - }); -}); diff --git a/test/integration/model/save.js b/test/integration/model/save.js deleted file mode 100644 index 864ea6815..000000000 --- a/test/integration/model/save.js +++ /dev/null @@ -1,184 +0,0 @@ -var Waterline = require('../../../lib/waterline'); -var _ = require('lodash'); -var assert = require('assert'); - -describe('Model', function() { - describe('.save()', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var personCollection; - var petCollection; - var updatedThroughCollection; - var populates; - var vals; - - before(function(done) { - var waterline = new Waterline(); - var Person = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person', - attributes: { - first_name: 'string', - last_name: 'string', - error: 'string', - full_name: function() { - return this.first_name + ' ' + this.last_name; - }, - pets: { - collection: 'pet', - via: 'owner' - }, - cars: { - collection: 'car', - via: 'owner' - } - } - }); - - var Pet = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'pet', - attributes: { - type: 'string', - owner: { - model: 'person' - } - } - }); - - var Car = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'car', - attributes: { - type: 'string', - owner: { - model: 'person' - } - } - }); - - waterline.loadCollection(Person); - waterline.loadCollection(Pet); - waterline.loadCollection(Car); - - vals = { - person: { pets: [] , cars: []}, - pet: {}, - car: {} - }; - - var adapterDef = { - find: function(con, col, criteria, cb) { - populates.push(col); - return cb(null, [vals[col]]); - }, - update: function(con, col, criteria, values, cb) { - if(values.error) return cb(new Error('error')); - vals[col] = values; - return cb(null, [values]); - }, - create: function(con, col, values, cb) { - - if (col === 'pet') { - vals.person.pets.push(values); - } - - vals[col] = values; - return cb(null, values); - } - }; - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - - // Setup pet collection - petCollection = colls.collections.pet; - - // Setup person collection - personCollection = colls.collections.person; - - // Setup value catching through personCollection.update - personCollection.update = (function(_super) { - - return function() { - - // Grab this value just for first update on the second test - if (!updatedThroughCollection && arguments[1].id === 2) { - updatedThroughCollection = _.cloneDeep(arguments[1]); - } - - return _super.apply(personCollection, arguments); - }; - - })(personCollection.update); - - done(); - }); - }); - - beforeEach(function(){ - populates = []; - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass model values to adapter update method.', function(done) { - var person = new personCollection._model({ id: 1, first_name: 'foo', last_name: 'bar' }); - - // Update a value - person.last_name = 'foobaz'; - - person.save(function(err) { - assert(!err); - assert.equal(vals.person.last_name, 'foobaz'); - done(); - }); - }); - - it('should not pass *-to-many associations through update.', function(done) { - var person = new personCollection._model({ id: 2, first_name: 'don', last_name: 'moe' }, {showJoins: true}); - - // Update collection - person.pets.push({type: 'dog'}); - person.pets.push({type: 'frog'}); - person.pets.push({type: 'log'}); - - person.save(function(err) { - assert(!err); - - assert(_.isPlainObject(vals.pet)); - assert.equal(_.keys(vals.pet).length, 0); - - assert.equal(typeof updatedThroughCollection, 'object'); - assert.equal(typeof updatedThroughCollection.pets, 'undefined'); - done(); - }); - }); - - it('should only return one argument to the callback', function(done) { - var person = new personCollection._model({ id: 1, error: 'error' }); - person.save(function() { - assert.equal(arguments.length, 1); - - var person = new personCollection._model({ id: 1 }); - person.save(function() { - assert.equal(arguments.length, 0); - done(); - }); - }); - }); - - }); -}); diff --git a/test/integration/model/toJSON.js b/test/integration/model/toJSON.js deleted file mode 100644 index adde612c5..000000000 --- a/test/integration/model/toJSON.js +++ /dev/null @@ -1,84 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Model', function() { - describe('.toJSON()', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var collection, collection2; - - before(function(done) { - var waterline = new Waterline(); - var Collection = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person', - attributes: { - first_name: 'string', - last_name: 'string', - full_name: function() { - return this.first_name + ' ' + this.last_name; - }, - toJSON: function() { - var obj = this.toObject(); - delete obj.last_name; - return obj; - } - } - }); - var Collection2 = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'person2', - attributes: { - first_name: {type: 'string', protected: true}, - last_name: 'string', - full_name: function() { - return this.first_name + ' ' + this.last_name; - } - } - }); - - waterline.loadCollection(Collection); - waterline.loadCollection(Collection2); - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) done(err); - collection = colls.collections.person; - collection2 = colls.collections.person2; - done(); - }); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should be overridable', function() { - var person = new collection._model({ first_name: 'foo', last_name: 'bar' }); - var obj = person.toJSON(); - - assert(obj === Object(obj)); - assert(obj.first_name === 'foo'); - assert(!obj.last_name); - }); - - it('should remove any attributes marked as "protected"', function() { - var person = new collection2._model({ first_name: 'foo', last_name: 'bar' }); - var obj = person.toJSON(); - - assert(obj === Object(obj)); - assert(!obj.first_name); - assert(obj.last_name == 'bar'); - }); - - }); -}); diff --git a/test/integration/model/toObject.associations.WithForeighKeyTypeDate.js b/test/integration/model/toObject.associations.WithForeighKeyTypeDate.js deleted file mode 100644 index 7d787f782..000000000 --- a/test/integration/model/toObject.associations.WithForeighKeyTypeDate.js +++ /dev/null @@ -1,101 +0,0 @@ -var Waterline = require('../../../lib/waterline'); -var assert = require('assert'); - -describe('Model', function() { - describe('.toObject() with associations with foreign key typed as datetime', function() { - var waterline; - var Schedule; - - before(function(done) { - waterline = new Waterline(); - var collections = {}; - - collections.trucker = Waterline.Collection.extend({ - identity: 'Trucker', - connection: 'foo', - tableName: 'trucker_table', - attributes: { - truckerName: { - type: 'string' - }, - workdays: { - collection: 'Workday', - via: 'trucker', - through: 'schedule' - } - } - }); - - collections.workday = Waterline.Collection.extend({ - identity: 'Workday', - connection: 'foo', - tableName: 'workday_table', - attributes: { - id: { - type: 'datetime', - primaryKey: true - }, - start: { - type: 'datetime', - defaultsTo: new Date(1970, 0, 1, 12, 0) - }, - end: { - type: 'datetime', - defaultsTo: new Date(1970, 0, 1, 16, 0, 0) - }, - trucker: { - collection: 'Trucker', - via: 'workday', - through: 'schedule' - } - } - }); - - collections.schedule = Waterline.Collection.extend({ - identity: 'Schedule', - connection: 'foo', - tableName: 'schedule_table', - attributes: { - miles: { - type: 'integer' - }, - trucker: { - model: 'Trucker', - foreignKey: true, - columnName: 'trucker_id' - }, - workday: { - model: 'Workday', - type: 'datetime', - foreignKey: true, - columnName: 'workday_id' - } - } - }); - - waterline.loadCollection(collections.trucker); - waterline.loadCollection(collections.workday); - waterline.loadCollection(collections.schedule); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if (err) { done(err); } - Schedule = colls.collections.schedule; - done(); - }); - }); - - it('should return a valid object with ids for foreign key fields', function() { - var schedule = new Schedule._model({ trucker: 1, workday: new Date(1970, 0, 1, 0, 0), miles: 10 }); - var obj = schedule.toObject(); - assert(obj.trucker === 1); - assert((new Date(obj.workday)).getTime() === (new Date(1970, 0, 1, 0, 0)).getTime()); - assert(obj.miles === 10); - }); - }); -}); diff --git a/test/integration/model/toObject.associations.js b/test/integration/model/toObject.associations.js deleted file mode 100644 index 18d194be9..000000000 --- a/test/integration/model/toObject.associations.js +++ /dev/null @@ -1,106 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Model', function() { - describe('.toObject() with associations', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var collection; - - before(function(done) { - var waterline = new Waterline(); - - var Foo = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'foo', - attributes: { - name: 'string', - bars: { - collection: 'bar', - via: 'foo' - }, - foobars: { - collection: 'baz', - via: 'foo' - } - } - }); - - var Bar = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'bar', - attributes: { - name: 'string', - foo: { - model: 'foo' - } - } - }); - - var Baz = Waterline.Collection.extend({ - connection: 'my_foo', - tableName: 'baz', - attributes: { - foo: { - model: 'foo' - } - } - }); - - waterline.loadCollection(Foo); - waterline.loadCollection(Bar); - waterline.loadCollection(Baz); - - var connections = { - 'my_foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) done(err); - collection = colls.collections.foo; - done(); - }); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should strip out the association key when no options are passed', function() { - var person = new collection._model({ name: 'foobar' }); - var obj = person.toObject(); - - assert(obj === Object(obj)); - assert(obj.name === 'foobar'); - assert(!obj.bars); - assert(!obj.baz); - }); - - it('should keep the association key when showJoins option is passed', function() { - var person = new collection._model({ name: 'foobar' }, { showJoins: true }); - var obj = person.toObject(); - - assert(obj === Object(obj)); - assert(obj.name === 'foobar'); - assert(obj.bars); - assert(obj.foobars); - }); - - it('should selectively keep the association keys when joins option is passed', function() { - var person = new collection._model({ name: 'foobar' }, { showJoins: true, joins: ['bar'] }); - var obj = person.toObject(); - - assert(obj === Object(obj)); - assert(obj.name === 'foobar'); - assert(obj.bars); - assert(!obj.foobars); - }); - - }); -}); diff --git a/test/structure/waterline/collection.js b/test/structure/waterline/collection.js deleted file mode 100644 index f2e3c6af6..000000000 --- a/test/structure/waterline/collection.js +++ /dev/null @@ -1,41 +0,0 @@ -var Collection = require('../../../lib/waterline/collection'), - assert = require('assert'); - -describe('Collection', function() { - - /** - * Test to ensure the basic functionality of the - * Collection prototype works correctly - */ - - it('should allow the prototype to be extended', function() { - var Person = Collection.extend({ identity: 'test', foo: 'bar' }); - var schema = { schema: { test: { attributes: {} }}}; - var person = new Person(schema, { test: {} }); - - assert(person.foo === 'bar'); - }); - - - describe('Core', function() { - var Person; - - // Setup Fixture Model - before(function() { - Person = Collection.extend({ - identity: 'test', - attributes: { - foo: 'string' - } - }); - }); - - it('should have a schema', function() { - var schema = { schema: { test: { attributes: { foo: { type: 'string' }} }}}; - var person = new Person(schema, { test: {} }); - - assert(person._schema.schema.foo.type === 'string'); - }); - - }); -}); diff --git a/test/structure/waterline/initialize.js b/test/structure/waterline/initialize.js deleted file mode 100644 index a53ffdb40..000000000 --- a/test/structure/waterline/initialize.js +++ /dev/null @@ -1,123 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Waterline', function() { - - describe('loader', function() { - var waterline; - - before(function() { - waterline = new Waterline(); - }); - - - it('should keep an internal mapping of collection definitions', function() { - var collection = Waterline.Collection.extend({ foo: 'bar' }); - var collections = waterline.loadCollection(collection); - - assert(Array.isArray(collections)); - assert(collections.length === 1); - }); - }); - - - describe('initialize', function() { - - describe('without junction tables', function() { - var waterline; - - before(function() { - waterline = new Waterline(); - - // Setup Fixture Model - var collection = Waterline.Collection.extend({ - tableName: 'foo', - connection: 'my_foo', - attributes: { - foo: 'string' - } - }); - - waterline.loadCollection(collection); - }); - - - it('should return an array of initialized collections', function(done) { - - var connections = { - 'my_foo': { - adapter: 'foo' - } - }; - - waterline.initialize({ adapters: { foo: {} }, connections: connections }, function(err, data) { - if(err) return done(err); - - assert(data.collections); - assert(Object.keys(data.collections).length === 1); - assert(data.collections.foo); - done(); - }); - }); - }); - - - describe('with junction tables', function() { - var waterline; - - before(function() { - waterline = new Waterline(); - - // Setup Fixture Models - var foo = Waterline.Collection.extend({ - tableName: 'foo', - connection: 'my_foo', - attributes: { - bar: { - collection: 'bar', - via: 'foo', - dominant: true - } - } - }); - - var bar = Waterline.Collection.extend({ - tableName: 'bar', - connection: 'my_foo', - attributes: { - foo: { - collection: 'foo', - via: 'bar' - } - } - }); - - waterline.loadCollection(foo); - waterline.loadCollection(bar); - }); - - - it('should add the junction tables to the collection output', function(done) { - - var connections = { - 'my_foo': { - adapter: 'foo' - } - }; - - waterline.initialize({ adapters: { foo: {} }, connections: connections }, function(err, data) { - if(err) return done(err); - - assert(data.collections); - assert(Object.keys(data.collections).length === 3); - assert(data.collections.foo); - assert(data.collections.bar); - assert(data.collections.bar_foo__foo_bar); - - done(); - }); - }); - }); - - }); -}); diff --git a/test/structure/waterline/query.methods.js b/test/structure/waterline/query.methods.js deleted file mode 100644 index 98f18b8ba..000000000 --- a/test/structure/waterline/query.methods.js +++ /dev/null @@ -1,117 +0,0 @@ -var Collection = require('../../../lib/waterline/collection'), - assert = require('assert'); - -describe('Collection', function() { - - /** - * Test to ensure API compatibility methods - * are correctly added to the Collection prototype - */ - - describe('Query Methods', function() { - var person; - - // Setup Fixture Model - before(function() { - var collection = Collection.extend({ identity: 'test' }); - var schema = { schema: { test: { attributes: {} }}}; - person = new collection(schema, { test: {} }); - }); - - describe('Basic Finders', function() { - - it('should have .findOne() method', function() { - assert(typeof person.findOne === 'function'); - }); - - it('should have .find() method', function() { - assert(typeof person.find === 'function'); - }); - - it('should have .where() method', function() { - assert(typeof person.where === 'function'); - }); - - it('should have .select() method', function() { - assert(typeof person.select === 'function'); - }); - - it('should have .findOneLike() method', function() { - assert(typeof person.findOneLike === 'function'); - }); - - it('should have .findLike() method', function() { - assert(typeof person.findLike === 'function'); - }); - - it('should have .startsWith() method', function() { - assert(typeof person.startsWith === 'function'); - }); - - it('should have .endsWith() method', function() { - assert(typeof person.endsWith === 'function'); - }); - - it('should have .contains() method', function() { - assert(typeof person.contains === 'function'); - }); - }); - - describe('DDL Functions', function() { - - it('should have .describe() method', function() { - assert(typeof person.describe === 'function'); - }); - - it('should have .alter() method', function() { - assert(typeof person.alter === 'function'); - }); - - it('should have .drop() method', function() { - assert(typeof person.drop === 'function'); - }); - }); - - describe('DQL Functions', function() { - - it('should have .join() method', function() { - assert(typeof person.join === 'function'); - }); - - it('should have .create() method', function() { - assert(typeof person.create === 'function'); - }); - - it('should have .update() method', function() { - assert(typeof person.update === 'function'); - }); - - it('should have .destroy() method', function() { - assert(typeof person.destroy === 'function'); - }); - - it('should have .count() method', function() { - assert(typeof person.count === 'function'); - }); - }); - - describe('Composite Functions', function() { - - it('should have .findOrCreate() method', function() { - assert(typeof person.findOrCreate === 'function'); - }); - }); - - describe('Aggregate Functions', function() { - - it('should have .createEach() method', function() { - assert(typeof person.createEach === 'function'); - }); - - it('should have .findOrCreateEach() method', function() { - assert(typeof person.findOrCreateEach === 'function'); - }); - }); - - }); -}); diff --git a/test/support/fixtures/associations/customer.fixture.js b/test/support/fixtures/associations/customer.fixture.js index 6d4515193..5e4586198 100644 --- a/test/support/fixtures/associations/customer.fixture.js +++ b/test/support/fixtures/associations/customer.fixture.js @@ -1,7 +1,6 @@ -var Collection = require('../../../../lib/waterline/collection'); +var BaseMetaModel = require('../../../../lib/waterline/MetaModel'); -// Extend for testing purposes -var Model = Collection.extend({ +module.exports = BaseMetaModel.extend({ identity: 'user', adapter: 'test', @@ -19,5 +18,3 @@ var Model = Collection.extend({ } }); - -module.exports = Model; diff --git a/test/support/fixtures/associations/payment.fixture.js b/test/support/fixtures/associations/payment.fixture.js index 8b90f5086..840736f9b 100644 --- a/test/support/fixtures/associations/payment.fixture.js +++ b/test/support/fixtures/associations/payment.fixture.js @@ -1,7 +1,6 @@ -var Collection = require('../../../../lib/waterline/collection'); +var BaseMetaModel = require('../../../../lib/waterline/MetaModel'); -// Extend for testing purposes -var Model = Collection.extend({ +module.exports = BaseMetaModel.extend({ identity: 'user', adapter: 'test', @@ -21,5 +20,3 @@ var Model = Collection.extend({ } }); - -module.exports = Model; diff --git a/test/support/fixtures/integrator/cache.js b/test/support/fixtures/integrator/cache.js index 78a44d28a..7db4893b8 100644 --- a/test/support/fixtures/integrator/cache.js +++ b/test/support/fixtures/integrator/cache.js @@ -1,25 +1,25 @@ /** * Module dependencies. */ -var _ = require('lodash'); +var _ = require('@sailshq/lodash'); var fixtures = { - tables: require('./tables') + tables: require('./tables') }; /** * Cache - * + * * @type {Object} */ -module.exports = (function () { - var cache = {}; - _.extend(cache, { - user: fixtures.tables.user, - message: fixtures.tables.message, - message_to_user: fixtures.tables.message_to_user, - message_cc_user: fixtures.tables.message_cc_user, - message_bcc_user: fixtures.tables.message_bcc_user - }); - return cache; +module.exports = (function() { + var cache = {}; + _.extend(cache, { + user: fixtures.tables.user, + message: fixtures.tables.message, + message_to_user: fixtures.tables.message_to_user, + message_cc_user: fixtures.tables.message_cc_user, + message_bcc_user: fixtures.tables.message_bcc_user + }); + return cache; })(); diff --git a/test/support/fixtures/integrator/multiple.joins.js b/test/support/fixtures/integrator/multiple.joins.js index 5c302dc4e..8d7b139cb 100644 --- a/test/support/fixtures/integrator/multiple.joins.js +++ b/test/support/fixtures/integrator/multiple.joins.js @@ -1,61 +1,71 @@ /** * Joins - * + * * @type {Array} */ module.exports = [ - // N..M Populate - // (Message has an association "to" which points to a collection of User) - { - parent: 'message', // left table name - parentKey: 'id', // left table key - alias: 'to', // the `alias` -- e.g. name of association - - child: 'message_to_user', // right table name - childKey: 'message_id' // right table key - }, - { - alias: 'to', // the `alias` -- e.g. name of association - - parent: 'message_to_user', // left table name - parentKey: 'user_id', // left table key - - child: 'user', // right table name - childKey: 'id', // right table key - select: ['id', 'email'] - }, - - // N..1 Populate - // (Message has an association "from" which points to one User) - { - parent: 'message', // left table name - alias: 'from', // the `alias` -- e.g. name of association - parentKey: 'from', // left table key - - child: 'user', // right table name - childKey: 'id', // right table key - select: ['email', 'id'] - }, - - // N..M Populate - // (Message has an association "cc" which points to a collection of User) - { - parent: 'message', // left table name - parentKey: 'id', // left table key - alias: 'cc', // the `alias` -- e.g. name of association - - child: 'message_cc_user', // right table name - childKey: 'message_id' // right table key - }, - { - alias: 'cc', // the `alias` -- e.g. name of association - - parent: 'message_cc_user', // left table name - parentKey: 'user_id', // left table key - - child: 'user', // right table name - childKey: 'id', // right table key - select: ['id', 'email'] - }, + // N..M Populate + // (Message has an association "to" which points to a collection of User) + { + parent: 'message', // left table name + parentCollectionIdentity: 'message', + parentKey: 'id', // left table key + alias: 'to', // the `alias` -- e.g. name of association + + child: 'message_to_user', // right table name + childKey: 'message_id', // right table key + childCollectionIdentity: 'message_to_user' + }, + { + alias: 'to', // the `alias` -- e.g. name of association + + parent: 'message_to_user', // left table name + parentCollectionIdentity: 'message_to_user', + parentKey: 'user_id', // left table key + + child: 'user', // right table name + childKey: 'id', // right table key + select: ['id', 'email'], + childCollectionIdentity: 'user' + }, + + // N..1 Populate + // (Message has an association "from" which points to one User) + { + parent: 'message', // left table name + parentCollectionIdentity: 'message', + alias: 'from', // the `alias` -- e.g. name of association + parentKey: 'from', // left table key + + child: 'user', // right table name + childKey: 'id', // right table key + select: ['email', 'id'], + childCollectionIdentity: 'user' + }, + + // N..M Populate + // (Message has an association "cc" which points to a collection of User) + { + parent: 'message', // left table name + parentCollectionIdentity: 'message', + parentKey: 'id', // left table key + alias: 'cc', // the `alias` -- e.g. name of association + + child: 'message_cc_user', // right table name + childKey: 'message_id', // right table key + childCollectionIdentity: 'message_cc_user' + }, + { + alias: 'cc', // the `alias` -- e.g. name of association + + parent: 'message_cc_user', // left table name + parentCollectionIdentity: 'message_cc_user', + parentKey: 'user_id', // left table key + + child: 'user', // right table name + childKey: 'id', // right table key + select: ['id', 'email'], + childCollectionIdentity: 'user' + } ]; diff --git a/test/support/fixtures/integrator/n..1.joins.js b/test/support/fixtures/integrator/n..1.joins.js index fc275e37d..e70f94571 100644 --- a/test/support/fixtures/integrator/n..1.joins.js +++ b/test/support/fixtures/integrator/n..1.joins.js @@ -1,14 +1,16 @@ /** * Joins - * + * * @type {Array} */ module.exports = [ - { - alias: 'from', // the `alias` -- e.g. name of association - parent: 'message', // left table name - parentKey: 'from', // left table key - child: 'user', // right table name - childKey: 'id' // right table key - } + { + alias: 'from', // the `alias` -- e.g. name of association + parent: 'message', // left table name + parentCollectionIdentity: 'message', + parentKey: 'from', // left table key + child: 'user', // right table name + childKey: 'id', // right table key + childCollectionIdentity: 'user' + } ]; diff --git a/test/support/fixtures/integrator/n..m.joins.js b/test/support/fixtures/integrator/n..m.joins.js index 06be279d7..98ef18bbc 100644 --- a/test/support/fixtures/integrator/n..m.joins.js +++ b/test/support/fixtures/integrator/n..m.joins.js @@ -1,21 +1,25 @@ /** * Joins - * + * * @type {Array} */ module.exports = [ - { - alias: 'to', // the `alias` -- e.g. name of association - parent: 'message', // parent/left table name - parentKey: 'id', // parent PK - childKey: 'message_id', // intermediate FK <- parent key - child: 'message_to_user', // intermediate/right table name - }, - { - alias: 'to', - parent: 'message_to_user', // intermediate/left table name - parentKey: 'user_id', // intermediate FK -> child key - childKey: 'id', // child PK - child: 'user' // child/right table name - } + { + alias: 'to', // the `alias` -- e.g. name of association + parent: 'message', // parent/left table name + parentCollectionIdentity: 'message', + parentKey: 'id', // parent PK + childKey: 'message_id', // intermediate FK <- parent key + child: 'message_to_user', // intermediate/right table name + childCollectionIdentity: 'message_to_user' + }, + { + alias: 'to', + parent: 'message_to_user', // intermediate/left table name + parentCollectionIdentity: 'message_to_user', + parentKey: 'user_id', // intermediate FK -> child key + childKey: 'id', // child PK + child: 'user', // child/right table name + childCollectionIdentity: 'user' + } ]; diff --git a/test/support/fixtures/model/context.manyToMany.fixture.js b/test/support/fixtures/model/context.manyToMany.fixture.js index 1238db9b7..2e1232c98 100644 --- a/test/support/fixtures/model/context.manyToMany.fixture.js +++ b/test/support/fixtures/model/context.manyToMany.fixture.js @@ -24,7 +24,7 @@ module.exports = function() { var models = { foo: { identity: 'foo', - connection: 'my_foo', + datastore: 'my_foo', attributes: { id: { type: 'integer', @@ -49,7 +49,7 @@ module.exports = function() { }, bar: { identity: 'bar', - connection: 'my_foo', + datastore: 'my_foo', attributes: { id: { type: 'integer', @@ -68,7 +68,7 @@ module.exports = function() { }, baz: { identity: 'baz', - connection: 'my_foo', + datastore: 'my_foo', attributes: { id: { type: 'integer', @@ -83,7 +83,7 @@ module.exports = function() { }, bar_foos__foo_bars: { identity: 'bar_foos__foo_bars', - connection: 'my_foo', + datastore: 'my_foo', tables: ['bar', 'foo'], junctionTable: true, @@ -128,7 +128,7 @@ module.exports = function() { // Build Up Waterline Schema context.waterline.schema.foo = { identity: 'foo', - connection: 'my_foo', + datastore: 'my_foo', attributes: { id: { type: 'integer', @@ -156,7 +156,7 @@ module.exports = function() { context.waterline.schema.bar = { identity: 'bar', - connection: 'my_foo', + datastore: 'my_foo', attributes: { id: { type: 'integer', @@ -177,7 +177,7 @@ module.exports = function() { context.waterline.schema.baz = { identity: 'baz', - connection: 'my_foo', + datastore: 'my_foo', attributes: { id: { type: 'integer', @@ -197,7 +197,7 @@ module.exports = function() { context.waterline.schema.bar_foos__foo_bars = { identity: 'bar_foos__foo_bars', - connection: 'my_foo', + datastore: 'my_foo', tables: ['bar', 'foo'], junctionTable: true, diff --git a/test/support/migrate.helper.js b/test/support/migrate.helper.js new file mode 100644 index 000000000..553d7c06a --- /dev/null +++ b/test/support/migrate.helper.js @@ -0,0 +1,23 @@ +var _ = require('@sailshq/lodash'); +var async = require('async'); + +module.exports = function(ontology, cb) { + // Run Auto-Migrations + var toBeSynced = _.reduce(ontology.collections, function(resources, collection) { + resources.push(collection); + return resources; + }, []); + + // Run auto-migration strategies on each collection + async.eachSeries(toBeSynced, function(collection, next) { + collection.sync(next); + }, function(err) { + if (err) { + return cb(err); + } + + // Expose Global + // SomeCollection = ocean.collections.tests; + cb(); + }); +}; diff --git a/test/unit/callbacks/afterCreate.create.js b/test/unit/callbacks/afterCreate.create.js index d84d1ef8b..f308b2dc2 100644 --- a/test/unit/callbacks/afterCreate.create.js +++ b/test/unit/callbacks/afterCreate.create.js @@ -1,30 +1,36 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); -describe('.afterCreate()', function() { - - describe('basic function', function() { +describe('After Create Lifecycle Callback ::', function() { + describe('Create ::', function() { var person; before(function(done) { var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, attributes: { - name: 'string' + id: { + type: 'number' + }, + name: { + type: 'string' + } }, afterCreate: function(values, cb) { values.name = values.name + ' updated'; - cb(); + return cb(); } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; + var adapterDef = { create: function(con, query, cb) { return cb(null, query.newRecord); }}; var connections = { 'foo': { @@ -32,86 +38,24 @@ describe('.afterCreate()', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * Create - */ - - describe('.create()', function() { - - it('should run afterCreate and mutate values', function(done) { - person.create({ name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test updated'); - done(); - }); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + person = orm.collections.user; + return done(); }); }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterCreate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; - var connections = { - 'foo': { - adapter: 'foobar' + it('should run afterCreate and mutate values', function(done) { + person.create({ name: 'test', id: 1 }, function(err, user) { + if (err) { + return done(err); } - }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.create({ name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test fn1 fn2'); - done(); + assert.equal(user.name, 'test updated'); + return done(); }); }); }); - }); diff --git a/test/unit/callbacks/afterCreate.createEach.js b/test/unit/callbacks/afterCreate.createEach.js index 5043d12dd..348576df4 100644 --- a/test/unit/callbacks/afterCreate.createEach.js +++ b/test/unit/callbacks/afterCreate.createEach.js @@ -1,30 +1,36 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); -describe('.afterCreate()', function() { - - describe('basic function', function() { +describe('After Create Lifecycle Callback ::', function() { + describe('.createEach ::', function() { var person; before(function(done) { var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, attributes: { - name: 'string' + id: { + type: 'number' + }, + name: { + type: 'string' + } }, afterCreate: function(values, cb) { values.name = values.name + ' updated'; - cb(null, values); + return cb(); } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; + var adapterDef = { createEach: function(con, query, cb) { return cb(null, query.newRecords); }}; var connections = { 'foo': { @@ -32,88 +38,25 @@ describe('.afterCreate()', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * CreateEach - */ - - describe('.createEach()', function() { - - it('should run afterCreate and mutate values', function(done) { - person.createEach([{ name: 'test' }, { name: 'test2' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test updated'); - assert(users[1].name === 'test2 updated'); - done(); - }); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + person = orm.collections.user; + return done(); }); }); - }); - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterCreate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; - - var connections = { - 'foo': { - adapter: 'foobar' + it('should run afterCreate and mutate values', function(done) { + person.createEach([{ name: 'test-foo', id: 1 }, { name: 'test-bar', id: 2 }], function(err, users) { + if (err) { + return done(err); } - }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.createEach([{ name: 'test' }, { name: 'test2' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test fn1 fn2'); - assert(users[1].name === 'test2 fn1 fn2'); - done(); - }); + assert.equal(users[0].name, 'test-foo updated'); + assert.equal(users[1].name, 'test-bar updated'); + return done(); + }, { fetch: true }); }); }); - }); diff --git a/test/unit/callbacks/afterCreate.findOrCreate.js b/test/unit/callbacks/afterCreate.findOrCreate.js index 61ec3d755..4ba8f94a3 100644 --- a/test/unit/callbacks/afterCreate.findOrCreate.js +++ b/test/unit/callbacks/afterCreate.findOrCreate.js @@ -1,40 +1,41 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); +var Waterline = require('../../../lib/waterline'); +var assert = require('assert'); describe('.afterCreate()', function() { - describe('basic function', function() { - - /** - * findOrCreate - */ - describe('.findOrCreate()', function() { - describe('without a record', function() { var person; before(function(done) { var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, + fetchRecordsOnCreateEach: true, attributes: { - name: 'string' + id: { + type: 'number' + }, + name: { + type: 'string' + } }, afterCreate: function(values, cb) { values.name = values.name + ' updated'; - cb(); + return cb(); } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } + find: function(con, query, cb) { return cb(null, null); }, + create: function(con, query, cb) { return cb(null, query.newRecord); } }; var connections = { @@ -43,18 +44,26 @@ describe('.afterCreate()', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + + person = orm.collections.user; + + return done(); }); }); it('should run afterCreate and mutate values on create', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test updated'); - done(); + person.findOrCreate({ name: 'test' }, { name: 'test', id: 1 }, function(err, user) { + if (err) { + return done(err); + } + + assert.equal(user.name, 'test updated'); + + return done(); }); }); }); @@ -64,25 +73,31 @@ describe('.afterCreate()', function() { before(function(done) { var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { - name: 'string' + id: { + type: 'number' + }, + name: { + type: 'string' + } }, afterCreate: function(values, cb) { values.name = values.name + ' updated'; - cb(); + return cb(); } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, [{ name: 'test' }]); }, - create: function(con, col, values, cb) { return cb(null, values); } + find: function(con, query, cb) { return cb(null, [{ name: 'test', id: 1 }]); }, + create: function(con, query, cb) { return cb(null, query.newRecord); } }; var connections = { @@ -91,148 +106,29 @@ describe('.afterCreate()', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should not run afterCreate and mutate values on find', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test'); - done(); - }); - }); - }); - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - - describe('without a record', function() { - - var person; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterCreate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); } - ] - }); - - waterline.loadCollection(Model); - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; + person = orm.collections.user; - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order on create', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test fn1 fn2'); - done(); + return done(); + }); }); - }); - }); - - describe('with a record', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterCreate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); + it('should not run afterCreate and mutate values on find', function(done) { + person.findOrCreate({ name: 'test' }, { name: 'test', id: 1 }, function(err, user) { + if (err) { + return done(err); } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, [{ name: 'test' }]); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - var connections = { - 'foo': { - adapter: 'foobar' - } - }; + assert.equal(user.name, 'test'); - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should not run any of the functions on find', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test'); - done(); + return done(); + }); }); }); }); - }); - }); diff --git a/test/unit/callbacks/afterCreate.findOrCreateEach.js b/test/unit/callbacks/afterCreate.findOrCreateEach.js deleted file mode 100644 index 706792ab4..000000000 --- a/test/unit/callbacks/afterCreate.findOrCreateEach.js +++ /dev/null @@ -1,123 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('.afterCreate()', function() { - - describe('basic function', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterCreate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * findOrCreateEach - */ - - describe('.findOrCreateEach()', function() { - - it('should run afterCreate and mutate values', function(done) { - person.findOrCreateEach([{ name: 'test' }], [{ name: 'test' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test updated'); - done(); - }); - }); - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterCreate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.findOrCreateEach([{ name: 'test' }], [{ name: 'test' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test fn1 fn2'); - done(); - }); - }); - }); - -}); diff --git a/test/unit/callbacks/afterDestroy.destroy.js b/test/unit/callbacks/afterDestroy.destroy.js index 064418924..75c5c57b9 100644 --- a/test/unit/callbacks/afterDestroy.destroy.js +++ b/test/unit/callbacks/afterDestroy.destroy.js @@ -1,35 +1,40 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); -describe('.afterDestroy()', function() { - - describe('basic function', function() { - var person, status; +describe('After Destroy Lifecycle Callback ::', function() { + describe('Destroy ::', function() { + var person; + var status; before(function(done) { var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, + fetchRecordsOnDestroy: true, attributes: { - name: 'string' + id: { + type: 'number' + }, + name: { + type: 'string' + } }, - afterDestroy: function(values, cb) { - person.create({ test: 'test' }, function(err, result) { - if(err) return cb(err); - status = result.status; - cb(); - }); + afterDestroy: function(destroyedRecord, cb) { + status = destroyedRecord.status; + cb(); } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def var adapterDef = { - destroy: function(con, col, options, cb) { return cb(null, options); }, - create: function(con, col, options, cb) { return cb(null, { status: true }); } + destroy: function(con, query, cb) { return cb(undefined, [{ status: true, id: 1 }]); }, + create: function(con, query, cb) { return cb(undefined, { status: true, id: 1 }); } }; var connections = { @@ -38,89 +43,24 @@ describe('.afterDestroy()', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * Destroy - */ - - describe('.destroy()', function() { - - it('should run afterDestroy', function(done) { - person.destroy({ name: 'test' }, function(err) { - assert(!err); - assert(status === true); - done(); - }); - }); - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person, status; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterDestroy: [ - // Function 1 - function(values, cb) { - status = 'fn1 '; - cb(); - }, - - // Function 2 - function(values, cb) { - status = status + 'fn2'; - cb(); - } - ] - }); - - // Fixture Adapter Def - var adapterDef = { - destroy: function(con, col, options, cb) { return cb(null, options); }, - create: function(con, col, options, cb) { return cb(null, { status: true }); } - }; - - waterline.loadCollection(Model); - - var connections = { - 'foo': { - adapter: 'foobar' + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); + person = orm.collections.user; + return done(); }); }); - it('should run the functions in order', function(done) { + it('should run afterDestroy', function(done) { person.destroy({ name: 'test' }, function(err) { - assert(!err); - assert(status === 'fn1 fn2'); - done(); + if (err) { + return done(err); + } + + assert.equal(status, true); + return done(); }); }); }); - }); diff --git a/test/unit/callbacks/afterValidation.create.js b/test/unit/callbacks/afterValidation.create.js deleted file mode 100644 index b392b97e2..000000000 --- a/test/unit/callbacks/afterValidation.create.js +++ /dev/null @@ -1,117 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('.afterValidate()', function() { - - describe('basic function', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterValidate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * Create - */ - - describe('.create()', function() { - - it('should run afterValidate and mutate values', function(done) { - person.create({ name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test updated'); - done(); - }); - }); - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterValidate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 1 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.create({ name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test fn1 fn2'); - done(); - }); - }); - }); - -}); diff --git a/test/unit/callbacks/afterValidation.createEach.js b/test/unit/callbacks/afterValidation.createEach.js deleted file mode 100644 index d0369485d..000000000 --- a/test/unit/callbacks/afterValidation.createEach.js +++ /dev/null @@ -1,119 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('.afterValidate()', function() { - - describe('basic function', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterValidate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * CreateEach - */ - - describe('.createEach()', function() { - - it('should run afterValidate and mutate values', function(done) { - person.createEach([{ name: 'test' }, { name: 'test2' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test updated'); - assert(users[1].name === 'test2 updated'); - done(); - }); - }); - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterValidate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 1 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.createEach([{ name: 'test' }, { name: 'test2' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test fn1 fn2'); - assert(users[1].name === 'test2 fn1 fn2'); - done(); - }); - }); - }); - -}); diff --git a/test/unit/callbacks/afterValidation.findOrCreate.js b/test/unit/callbacks/afterValidation.findOrCreate.js deleted file mode 100644 index c62bc484e..000000000 --- a/test/unit/callbacks/afterValidation.findOrCreate.js +++ /dev/null @@ -1,242 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('.afterValidate()', function() { - - describe('basic function', function() { - - /** - * findOrCreate - */ - - describe('.findOrCreate()', function() { - - describe('without a record', function() { - var person; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterValidate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, []); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run afterValidate and mutate values on create', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test updated'); - done(); - }); - }); - }); - - describe('with a record', function() { - var person; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterValidate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, [criteria.where]); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should not run afterValidate and mutate values on find', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test'); - done(); - }); - }); - }); - - - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - - describe('without a record', function() { - - var person; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterValidate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 1 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order on create', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test fn1 fn2'); - done(); - }); - }); - }); - - describe('without a record', function() { - var person; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterValidate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 1 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, [criteria.where]); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should not run any of the functions on find', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test'); - done(); - }); - }); - }); - - }); - -}); diff --git a/test/unit/callbacks/afterValidation.findOrCreateEach.js b/test/unit/callbacks/afterValidation.findOrCreateEach.js deleted file mode 100644 index 3c700494d..000000000 --- a/test/unit/callbacks/afterValidation.findOrCreateEach.js +++ /dev/null @@ -1,123 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('.afterValidate()', function() { - - describe('basic function', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterValidate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * findOrCreateEach - */ - - describe('.findOrCreateEach()', function() { - - it('should run afterValidate and mutate values', function(done) { - person.findOrCreateEach([{ name: 'test' }], [{ name: 'test' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test updated'); - done(); - }); - }); - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterValidate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 1 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.findOrCreateEach([{ name: 'test' }], [{ name: 'test' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test fn1 fn2'); - done(); - }); - }); - }); - -}); diff --git a/test/unit/callbacks/afterValidation.update.js b/test/unit/callbacks/afterValidation.update.js deleted file mode 100644 index 54bfe3535..000000000 --- a/test/unit/callbacks/afterValidation.update.js +++ /dev/null @@ -1,117 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('.afterValidate()', function() { - - describe('basic function', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterValidate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { update: function(con, col, criteria, values, cb) { return cb(null, [values]); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * Update - */ - - describe('.update()', function() { - - it('should run afterValidate and mutate values', function(done) { - person.update({ name: 'criteria' }, { name: 'test' }, function(err, users) { - assert(!err); - assert(users[0].name === 'test updated'); - done(); - }); - }); - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - afterValidate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 1 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { update: function(con, col, criteria, values, cb) { return cb(null, [values]); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.update({ name: 'criteria' }, { name: 'test' }, function(err, users) { - assert(!err); - assert(users[0].name === 'test fn1 fn2'); - done(); - }); - }); - }); - -}); diff --git a/test/unit/callbacks/beforeCreate.create.js b/test/unit/callbacks/beforeCreate.create.js index d061325fd..e01bc93fe 100644 --- a/test/unit/callbacks/beforeCreate.create.js +++ b/test/unit/callbacks/beforeCreate.create.js @@ -1,31 +1,36 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); -describe('.beforeCreate()', function() { - - describe('basic function', function() { +describe('Before Create Lifecycle Callback ::', function() { + describe('Create ::', function() { var person; before(function(done) { var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, attributes: { - name: 'string' + id: { + type: 'number' + }, + name: { + type: 'string' + } }, beforeCreate: function(values, cb) { - assert(this.identity === 'user'); values.name = values.name + ' updated'; cb(); } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; + var adapterDef = { create: function(con, query, cb) { return cb(null, query.newRecord); }}; var connections = { 'foo': { @@ -33,86 +38,24 @@ describe('.beforeCreate()', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * Create - */ - - describe('.create()', function() { - - it('should run beforeCreate and mutate values', function(done) { - person.create({ name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test updated'); - done(); - }); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + person = orm.collections.user; + return done(); }); }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeCreate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; - var connections = { - 'foo': { - adapter: 'foobar' + it('should run beforeCreate and mutate values', function(done) { + person.create({ name: 'test', id: 1 }, function(err, user) { + if (err) { + return done(err); } - }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.create({ name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test fn1 fn2'); - done(); + assert.equal(user.name, 'test updated'); + return done(); }); }); }); - }); diff --git a/test/unit/callbacks/beforeCreate.createEach.js b/test/unit/callbacks/beforeCreate.createEach.js index 6fbeaeb7a..4f1a82b43 100644 --- a/test/unit/callbacks/beforeCreate.createEach.js +++ b/test/unit/callbacks/beforeCreate.createEach.js @@ -1,18 +1,24 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); -describe('.beforeCreate()', function() { - - describe('basic function', function() { +describe('Before Create Lifecycle Callback ::', function() { + describe('.createEach() ::', function() { var person; before(function(done) { var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, attributes: { - name: 'string' + id: { + type: 'number' + }, + name: { + type: 'string' + } }, beforeCreate: function(values, cb) { @@ -21,10 +27,10 @@ describe('.beforeCreate()', function() { } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; + var adapterDef = { createEach: function(con, query, cb) { return cb(null, query.newRecords); }}; var connections = { 'foo': { @@ -32,88 +38,25 @@ describe('.beforeCreate()', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * CreateEach - */ - - describe('.createEach()', function() { - - it('should run beforeCreate and mutate values', function(done) { - person.createEach([{ name: 'test' }, { name: 'test2' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test updated'); - assert(users[1].name === 'test2 updated'); - done(); - }); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + person = orm.collections.user; + return done(); }); }); - }); - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeCreate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; - - var connections = { - 'foo': { - adapter: 'foobar' + it('should run beforeCreate and mutate values', function(done) { + person.createEach([{ name: 'test-foo', id: 1 }, { name: 'test-bar', id: 2 }], function(err, users) { + if (err) { + return done(err); } - }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.createEach([{ name: 'test' }, { name: 'test2' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test fn1 fn2'); - assert(users[1].name === 'test2 fn1 fn2'); - done(); - }); + assert.equal(users[0].name, 'test-foo updated'); + assert.equal(users[1].name, 'test-bar updated'); + return done(); + }, {fetch: true}); }); }); - }); diff --git a/test/unit/callbacks/beforeCreate.findOrCreate.js b/test/unit/callbacks/beforeCreate.findOrCreate.js index 31ad45e29..b3ba64995 100644 --- a/test/unit/callbacks/beforeCreate.findOrCreate.js +++ b/test/unit/callbacks/beforeCreate.findOrCreate.js @@ -1,40 +1,41 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); +var Waterline = require('../../../lib/waterline'); +var assert = require('assert'); describe('.beforeCreate()', function() { - describe('basic function', function() { - - /** - * findOrCreate - */ - describe('.findOrCreate()', function() { - describe('without a record', function() { var person; before(function(done) { var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, + fetchRecordsOnCreateEach: true, attributes: { - name: 'string' + id: { + type: 'number' + }, + name: { + type: 'string' + } }, beforeCreate: function(values, cb) { values.name = values.name + ' updated'; - cb(); + return cb(); } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } + find: function(con, query, cb) { return cb(null, null); }, + create: function(con, query, cb) { return cb(null, query.newRecord); } }; var connections = { @@ -43,18 +44,26 @@ describe('.beforeCreate()', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + + person = orm.collections.user; + + return done(); }); }); it('should run beforeCreate and mutate values on create', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test updated'); - done(); + person.findOrCreate({ name: 'test' }, { name: 'test', id: 1 }, function(err, user) { + if (err) { + return done(err); + } + + assert.equal(user.name, 'test updated'); + + return done(); }); }); }); @@ -64,25 +73,31 @@ describe('.beforeCreate()', function() { before(function(done) { var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { - name: 'string' + id: { + type: 'number' + }, + name: { + type: 'string' + } }, beforeCreate: function(values, cb) { values.name = values.name + ' updated'; - cb(); + return cb(); } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, [criteria.where]); }, - create: function(con, col, values, cb) { return cb(null, values); } + find: function(con, query, cb) { return cb(null, [{ name: 'test', id: 1}] ); }, + create: function(con, query, cb) { return cb(null, query.newRecord); } }; var connections = { @@ -91,147 +106,29 @@ describe('.beforeCreate()', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should not run beforeCreate and mutate values on find', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test'); - done(); - }); - }); - }); - }); - - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - - describe('without a record', function() { - var person; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeCreate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); + person = orm.collections.user; - it('should run the functions in order on create', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test fn1 fn2'); - done(); + return done(); + }); }); - }); - }); - - describe('without a record', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeCreate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); + it('should not run beforeCreate and mutate values on find', function(done) { + person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { + if (err) { + return done(err); } - ] - }); - - waterline.loadCollection(Model); - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, [criteria.where]); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); + assert(user.name === 'test'); - it('should now run any of the functions on find', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test'); - done(); + return done(); + }); }); }); }); }); - }); diff --git a/test/unit/callbacks/beforeCreate.findOrCreateEach.js b/test/unit/callbacks/beforeCreate.findOrCreateEach.js deleted file mode 100644 index ffb2b2bde..000000000 --- a/test/unit/callbacks/beforeCreate.findOrCreateEach.js +++ /dev/null @@ -1,123 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('.beforeCreate()', function() { - - describe('basic function', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeCreate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * findOrCreateEach - */ - - describe('.findOrCreateEach()', function() { - - it('should run beforeCreate and mutate values', function(done) { - person.findOrCreateEach([{ name: 'test' }], [{ name: 'test' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test updated'); - done(); - }); - }); - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeCreate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.findOrCreateEach([{ name: 'test' }], [{ name: 'test' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test fn1 fn2'); - done(); - }); - }); - }); - -}); diff --git a/test/unit/callbacks/beforeDestroy.destroy.js b/test/unit/callbacks/beforeDestroy.destroy.js index ccd65316a..f3cff3d61 100644 --- a/test/unit/callbacks/beforeDestroy.destroy.js +++ b/test/unit/callbacks/beforeDestroy.destroy.js @@ -1,18 +1,24 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); -describe('.beforeDestroy()', function() { - - describe('basic function', function() { - var person, status = false; +describe('Before Destroy Lifecycle Callback ::', function() { + describe('Destroy ::', function() { + var person; + var status = false; before(function(done) { var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { - name: 'string' + id: { + type: 'number' + }, + name: { + type: 'string' + } }, beforeDestroy: function(criteria, cb) { @@ -21,10 +27,10 @@ describe('.beforeDestroy()', function() { } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { destroy: function(con, col, options, cb) { return cb(null, options); }}; + var adapterDef = { destroy: function(con, query, cb) { return cb(null, query); }}; var connections = { 'foo': { @@ -32,86 +38,25 @@ describe('.beforeDestroy()', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * Destroy - */ - - describe('.destroy()', function() { - - it('should run beforeDestroy', function(done) { - person.destroy({ name: 'test' }, function(err) { - assert(!err); - assert(status === true); - done(); - }); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + person = orm.collections.user; + return done(); }); }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person, status; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeDestroy: [ - // Function 1 - function(criteria, cb) { - status = 'fn1 '; - cb(); - }, - // Function 2 - function(criteria, cb) { - status = status + 'fn2'; - cb(); - } - ] - }); - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { destroy: function(con, col, options, cb) { return cb(null, options); }}; - - var connections = { - 'foo': { - adapter: 'foobar' + it('should run beforeDestroy', function(done) { + person.destroy({ name: 'test' }, function(err) { + if (err) { + return done(err); } - }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.destroy({ name: 'test' }, function(err) { - assert(!err); - assert(status === 'fn1 fn2'); - done(); + assert.equal(status, true); + return done(); }); }); }); - }); diff --git a/test/unit/callbacks/beforeValidation.create.js b/test/unit/callbacks/beforeValidation.create.js deleted file mode 100644 index e0255ca34..000000000 --- a/test/unit/callbacks/beforeValidation.create.js +++ /dev/null @@ -1,117 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('.beforeValidate()', function() { - - describe('basic function', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeValidate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * Create - */ - - describe('.create()', function() { - - it('should run beforeValidate and mutate values', function(done) { - person.create({ name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test updated'); - done(); - }); - }); - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person, status; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeValidate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.create({ name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test fn1 fn2'); - done(); - }); - }); - }); - -}); diff --git a/test/unit/callbacks/beforeValidation.createEach.js b/test/unit/callbacks/beforeValidation.createEach.js deleted file mode 100644 index 1fa709ade..000000000 --- a/test/unit/callbacks/beforeValidation.createEach.js +++ /dev/null @@ -1,119 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('.beforeValidate()', function() { - - describe('basic function', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeValidate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * CreateEach - */ - - describe('.createEach()', function() { - - it('should run beforeValidate and mutate values', function(done) { - person.createEach([{ name: 'test' }, { name: 'test2' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test updated'); - assert(users[1].name === 'test2 updated'); - done(); - }); - }); - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person, status; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeValidate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.createEach([{ name: 'test' }, { name: 'test2' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test fn1 fn2'); - assert(users[1].name === 'test2 fn1 fn2'); - done(); - }); - }); - }); - -}); diff --git a/test/unit/callbacks/beforeValidation.findOrCreate.js b/test/unit/callbacks/beforeValidation.findOrCreate.js deleted file mode 100644 index 3f6ce701d..000000000 --- a/test/unit/callbacks/beforeValidation.findOrCreate.js +++ /dev/null @@ -1,241 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('.beforeValidate()', function() { - - describe('basic function', function() { - - /** - * findOrCreate - */ - - describe('.findOrCreate()', function() { - - describe('without a record', function() { - var person; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeValidate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run beforeValidate and mutate values on create', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test updated'); - done(); - }); - }); - }); - - describe('with a record', function() { - var person; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeValidate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, [criteria.where]); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should not run beforeValidate and mutate values on find', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test'); - done(); - }); - }); - }); - - - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - - describe('without a record', function() { - var person; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeValidate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 1 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order on create', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test fn1 fn2'); - done(); - }); - }); - }); - - describe('without a record', function() { - var person; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeValidate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 1 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, [criteria.where]); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should not run any of the functions on find', function(done) { - person.findOrCreate({ name: 'test' }, { name: 'test' }, function(err, user) { - assert(!err); - assert(user.name === 'test'); - done(); - }); - }); - }); - - }); - -}); diff --git a/test/unit/callbacks/beforeValidation.findOrCreateEach.js b/test/unit/callbacks/beforeValidation.findOrCreateEach.js deleted file mode 100644 index b55383f70..000000000 --- a/test/unit/callbacks/beforeValidation.findOrCreateEach.js +++ /dev/null @@ -1,123 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('.beforeValidate()', function() { - - describe('basic function', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeValidate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * findOrCreateEach - */ - - describe('.findOrCreateEach()', function() { - - it('should run beforeValidate and mutate values', function(done) { - person.findOrCreateEach([{ name: 'test' }], [{ name: 'test' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test updated'); - done(); - }); - }); - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person, status; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeValidate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, null); }, - create: function(con, col, values, cb) { return cb(null, values); } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.findOrCreateEach([{ name: 'test' }], [{ name: 'test' }], function(err, users) { - assert(!err); - assert(users[0].name === 'test fn1 fn2'); - done(); - }); - }); - }); - -}); diff --git a/test/unit/callbacks/beforeValidation.update.js b/test/unit/callbacks/beforeValidation.update.js deleted file mode 100644 index 6c74a3d01..000000000 --- a/test/unit/callbacks/beforeValidation.update.js +++ /dev/null @@ -1,117 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('.beforeValidate()', function() { - - describe('basic function', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeValidate: function(values, cb) { - values.name = values.name + ' updated'; - cb(); - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { update: function(con, col, criteria, values, cb) { return cb(null, [values]); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - /** - * Update - */ - - describe('.update()', function() { - - it('should run beforeValidate and mutate values', function(done) { - person.update({ name: 'criteria' }, { name: 'test' }, function(err, users) { - assert(!err); - assert(users[0].name === 'test updated'); - done(); - }); - }); - }); - }); - - - /** - * Test Callbacks can be defined as arrays and run in order. - */ - - describe('array of functions', function() { - var person, status; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeValidate: [ - // Function 1 - function(values, cb) { - values.name = values.name + ' fn1'; - cb(); - }, - - // Function 2 - function(values, cb) { - values.name = values.name + ' fn2'; - cb(); - } - ] - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { update: function(con, col, criteria, values, cb) { return cb(null, [values]); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - person = colls.collections.user; - done(); - }); - }); - - it('should run the functions in order', function(done) { - person.update({ name: 'criteria' }, { name: 'test' }, function(err, users) { - assert(!err); - assert(users[0].name === 'test fn1 fn2'); - done(); - }); - }); - }); - -}); diff --git a/test/unit/core/core.transformations/transformations.initialize.js b/test/unit/collection/transformations/transformations.initialize.js similarity index 73% rename from test/unit/core/core.transformations/transformations.initialize.js rename to test/unit/collection/transformations/transformations.initialize.js index 8c1d239ae..9df3aa669 100644 --- a/test/unit/core/core.transformations/transformations.initialize.js +++ b/test/unit/collection/transformations/transformations.initialize.js @@ -1,10 +1,8 @@ -var Transformer = require('../../../../lib/waterline/core/transformations'), - assert = require('assert'); - -describe('Core Transformations', function() { - - describe('initialize', function() { +var assert = require('assert'); +var Transformer = require('../../../../lib/waterline/utils/system/transformer-builder'); +describe('Collection Transformations ::', function() { + describe('Initialize ::', function() { describe('with string columnName', function() { var transformer; @@ -46,9 +44,8 @@ describe('Core Transformations', function() { return ''; })(); - assert(msg == 'columnName transformation must be a string'); + assert.strictEqual('Consistency violation: `columnName` must be a string. But for this attribute (`username`) it is not!', msg); }); }); }); - }); diff --git a/test/unit/core/core.transformations/transformations.serialize.js b/test/unit/collection/transformations/transformations.serialize.js similarity index 55% rename from test/unit/core/core.transformations/transformations.serialize.js rename to test/unit/collection/transformations/transformations.serialize.js index 1989034a5..e4a6f14d2 100644 --- a/test/unit/core/core.transformations/transformations.serialize.js +++ b/test/unit/collection/transformations/transformations.serialize.js @@ -1,12 +1,11 @@ -var Waterline = require('../../../../lib/waterline'), - Schema = require('waterline-schema'), - Transformer = require('../../../../lib/waterline/core/transformations'), - assert = require('assert'); - -describe('Core Transformations', function() { - - describe('serialize', function() { - +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Schema = require('waterline-schema'); +var Waterline = require('../../../../lib/waterline'); +var Transformer = require('../../../../lib/waterline/utils/system/transformer-builder'); + +describe('Collection Transformations ::', function() { + describe('Serialize ::', function() { describe('with normal key/value pairs', function() { var transformer; @@ -22,19 +21,20 @@ describe('Core Transformations', function() { }); it('should change username key to login', function() { - var values = transformer.serialize({ username: 'foo' }); + var values = { username: 'foo' }; + transformer.serializeValues(values); assert(values.login); - assert(values.login === 'foo'); + assert.equal(values.login, 'foo'); }); it('should work recursively', function() { - var values = transformer.serialize({ where: { user: { username: 'foo' }}}); + var values = transformer.serializeCriteria({ where: { user: { username: 'foo' }}}); assert(values.where.user.login); - assert(values.where.user.login === 'foo'); + assert.equal(values.where.user.login, 'foo'); }); it('should work on SELECT queries', function() { - var values = transformer.serialize( + var values = transformer.serializeCriteria( { where: { username: 'foo' @@ -44,7 +44,7 @@ describe('Core Transformations', function() { ); assert(values.where.login); - assert.equal(values.select.indexOf('login'), 0); + assert.equal(_.indexOf(values.select, 'login'), 0); }); }); @@ -56,24 +56,27 @@ describe('Core Transformations', function() { */ before(function() { - var collections = [], - waterline = new Waterline(); + var collections = []; - collections.push(Waterline.Collection.extend({ + collections.push(Waterline.Model.extend({ identity: 'customer', tableName: 'customer', + primaryKey: 'uuid', attributes: { uuid: { - type: 'string', - primaryKey: true + type: 'string' } } })); - collections.push(Waterline.Collection.extend({ + collections.push(Waterline.Model.extend({ identity: 'foo', tableName: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, customer: { model: 'customer' } @@ -85,17 +88,17 @@ describe('Core Transformations', function() { }); it('should change customer key to customer_uuid', function() { - var values = transformer.serialize({ customer: 1 }); + var values = { customer: 1 }; + transformer.serializeValues(values); assert(values.customer); - assert(values.customer === 1); + assert.equal(values.customer, 1); }); it('should work recursively', function() { - var values = transformer.serialize({ where: { user: { customer: 1 }}}); + var values = transformer.serializeCriteria({ where: { user: { customer: 1 }}}); assert(values.where.user.customer); - assert(values.where.user.customer === 1); + assert.equal(values.where.user.customer, 1); }); }); }); - }); diff --git a/test/unit/collection/transformations/transformations.unserialize.js b/test/unit/collection/transformations/transformations.unserialize.js new file mode 100644 index 000000000..20b00da2f --- /dev/null +++ b/test/unit/collection/transformations/transformations.unserialize.js @@ -0,0 +1,56 @@ +var assert = require('assert'); +var Transformer = require('../../../../lib/waterline/utils/system/transformer-builder'); + +describe('Collection Transformations ::', function() { + describe('Unserialize ::', function() { + describe('with normal key/value pairs', function() { + var transformer; + + before(function() { + var attributes = { + name: 'string', + username: { + columnName: 'login' + } + }; + + transformer = new Transformer(attributes, {}); + }); + + it('should change login key to username', function() { + var values = transformer.unserialize({ login: 'foo' }); + assert(values.username); + assert.equal(values.username, 'foo'); + }); + }); + + describe('with columnNames that conflict with other attribute names', function() { + + var transformer; + + before(function() { + var attributes = { + identity: { + type: 'string', + columnName: 'aid', + }, + ownerId: { + type: 'string', + columnName: 'identity', + } + }; + + transformer = new Transformer(attributes, {}); + }); + + it('should change unserialize both attributes correctly', function() { + var values = transformer.unserialize({ aid: 'foo', identity: 'bar' }); + assert(values.identity); + assert.equal(values.identity, 'foo'); + assert(values.ownerId); + assert.equal(values.ownerId, 'bar'); + }); + + }); + }); +}); diff --git a/test/unit/collection/type-cast/cast.boolean.js b/test/unit/collection/type-cast/cast.boolean.js new file mode 100644 index 000000000..e5b230dde --- /dev/null +++ b/test/unit/collection/type-cast/cast.boolean.js @@ -0,0 +1,103 @@ +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../../lib/waterline'); + +describe.skip('Type Casting ::', function() { + describe('with `type: \'boolean\'` ::', function() { + + var orm; + var Person; + before(function(done) { + orm = new Waterline(); + + orm.registerModel(Waterline.Model.extend({ + identity: 'person', + datastore: 'foo', + primaryKey: 'id', + attributes: { + id: { + type: 'number' + }, + activated: { + type: 'boolean' + }, + age: { + type: 'number' + }, + name: { + type: 'string' + }, + organization: { + type: 'json' + }, + avatarBlob: { + type: 'ref' + } + } + })); + + orm.initialize({ + adapters: { + foobar: {} + }, + datastores: { + foo: { adapter: 'foobar' } + } + }, function(err, orm) { + if (err) { return done(err); } + + Person = orm.collections.person; + return done(); + });// + + });// + + + it('should act as no-op when given a boolean', function() { + assert.equal(Person.validate('activated', true), true); + assert.equal(Person.validate('activated', false), false); + }); + + it('should cast string "true" to a boolean', function() { + assert.equal(Person.validate('activated', 'true'), true); + }); + + it('should cast string "false" to a boolean', function() { + // FUTURE: this may change in a future major version release of RTTC + // (this test is here to help catch that when/if it happens) + assert.equal(Person.validate('activated', 'false'), false); + }); + + it('should cast number 0 to a boolean', function() { + // FUTURE: this may change in a future major version release of RTTC + // (this test is here to help catch that when/if it happens) + assert.equal(Person.validate('activated', 0), false); + }); + + it('should cast number 1 to a boolean', function() { + assert.equal(Person.validate('activated', 1), true); + }); + + it('should throw E_VALIDATION error when a value can\'t be cast', function() { + try { + Person.validate('activated', 'not yet'); + } catch (e) { + switch (e.code) { + case 'E_VALIDATION': + // FUTURE: maybe expand test to check more things + return; + + // As of Thu Dec 22, 2016, this test is failing because + // validation is not being completely rolled up yet. + default: throw new Error('The actual error code was "'+e.code+'" - but it should have been "E_VALIDATION": the rolled-up validation error. This is so that errors from the public `.validate()` are consistent with errors exposed when creating or updating records (i.e. when multiple values are being set at the same time.) Here is the error that was actually received:\n```\n' +e.stack+'\n```'); + } + } + }); + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // For further details on edge case handling, plus thousands more tests, see: + // • http://npmjs.com/package/rttc + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + });// +});// diff --git a/test/unit/collection/type-cast/cast.json.js b/test/unit/collection/type-cast/cast.json.js new file mode 100644 index 000000000..6564e2137 --- /dev/null +++ b/test/unit/collection/type-cast/cast.json.js @@ -0,0 +1,139 @@ +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../../lib/waterline'); + +describe.skip('Type Casting ::', function() { + describe('with `type: \'json\'` ::', function() { + + var orm; + var Person; + before(function(done) { + orm = new Waterline(); + + orm.registerModel(Waterline.Model.extend({ + identity: 'person', + datastore: 'foo', + primaryKey: 'id', + attributes: { + id: { + type: 'number' + }, + activated: { + type: 'boolean' + }, + age: { + type: 'number' + }, + name: { + type: 'string' + }, + organization: { + type: 'json' + }, + avatarBlob: { + type: 'ref' + } + } + })); + + orm.initialize({ + adapters: { + foobar: {} + }, + datastores: { + foo: { adapter: 'foobar' } + } + }, function(err, orm) { + if (err) { return done(err); } + + Person = orm.collections.person; + return done(); + });// + + });// + + + it('should leave the null literal as-is', function() { + assert.equal(Person.validate('organization', null), null); + }); + + it('should leave numbers as-is', function() { + assert.equal(Person.validate('organization', 4), 4); + assert.equal(Person.validate('organization', 0), 0); + assert.equal(Person.validate('organization', -10000.32852), -10000.32852); + }); + + it('should leave booleans as-is', function() { + assert.equal(Person.validate('organization', true), true); + assert.equal(Person.validate('organization', false), false); + }); + + describe('given a string imposter (i.e. looks like another type)', function() { + + it('should leave "null" imposter string as-is', function (){ + assert.equal(Person.validate('organization', 'null'), 'null'); + }); + it('should leave number imposter strings as-is', function (){ + assert.equal(Person.validate('organization', '4'), '4'); + assert.equal(Person.validate('organization', '0'), '0'); + assert.equal(Person.validate('organization', '-10000.32852'), '-10000.32852'); + }); + it('should leave boolean imposter strings as-is', function (){ + assert.equal(Person.validate('organization', 'true'), 'true'); + assert.equal(Person.validate('organization', 'false'), 'false'); + }); + it('should leave dictionary imposter strings as-is', function (){ + var DICTIONARY_IMPOSTER_STR = '{ name: \'Foo Bar\', location: [-31.0123, 31.0123] }'; + var result = Person.validate('organization', '{ name: \'Foo Bar\', location: [-31.0123, 31.0123] }'); + assert(_.isString(result)); + assert.equal(DICTIONARY_IMPOSTER_STR, result); + }); + + }); + + it('should decycle circular nonsense', function(){ + var cersei = {}; + var jaime = {}; + cersei.brother = jaime; + cersei.lover = jaime; + jaime.sister = cersei; + jaime.lover = cersei; + + var dryJaime = Person.validate('organization', jaime); + assert.deepEqual(dryJaime, { + sister: { brother: '[Circular ~]', lover: '[Circular ~]' }, + lover: { brother: '[Circular ~]', lover: '[Circular ~]' } + }); + + var dryCersei = Person.validate('organization', cersei); + assert.deepEqual(dryCersei, { + brother: { sister: '[Circular ~]', lover: '[Circular ~]' }, + lover: { sister: '[Circular ~]', lover: '[Circular ~]' } + }); + + }); + + it('should reject Readable streams', function(){ + try { + Person.validate('organization', new (require('stream').Readable)()); + } catch (e) { + switch (e.code) { + case 'E_VALIDATION': + // FUTURE: maybe expand test to check more things + return; + + // As of Thu Dec 22, 2016, this test is failing because + // validation is not being completely rolled up yet. + default: throw new Error('The actual error code was "'+e.code+'" - but it should have been "E_VALIDATION": the rolled-up validation error. This is so that errors from the public `.validate()` are consistent with errors exposed when creating or updating records (i.e. when multiple values are being set at the same time.) Here is the error that was actually received:\n```\n' +e.stack+'\n```'); + } + } + }); + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // For further details on edge case handling, plus thousands more tests, see: + // • http://npmjs.com/package/rttc + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + }); +}); diff --git a/test/unit/collection/type-cast/cast.number.js b/test/unit/collection/type-cast/cast.number.js new file mode 100644 index 000000000..407996552 --- /dev/null +++ b/test/unit/collection/type-cast/cast.number.js @@ -0,0 +1,93 @@ +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../../lib/waterline'); + +describe.skip('Type Casting ::', function() { + describe('with `type: \'number\'` ::', function() { + + var orm; + var Person; + before(function(done) { + orm = new Waterline(); + + orm.registerModel(Waterline.Model.extend({ + identity: 'person', + datastore: 'foo', + primaryKey: 'id', + attributes: { + id: { + type: 'number' + }, + activated: { + type: 'boolean' + }, + age: { + type: 'number' + }, + name: { + type: 'string' + }, + organization: { + type: 'json' + }, + avatarBlob: { + type: 'ref' + } + } + })); + + orm.initialize({ + adapters: { + foobar: {} + }, + datastores: { + foo: { adapter: 'foobar' } + } + }, function(err, orm) { + if (err) { return done(err); } + + Person = orm.collections.person; + return done(); + });// + + });// + + it('should cast strings to numbers when integers', function() { + assert.equal(Person.validate('age', '27'), 27); + }); + + it('should cast strings to numbers when floats', function() { + assert.equal(Person.validate('age', '27.01'), 27.01); + }); + + it('should throw when a number can\'t be cast', function() { + var values = { age: 'steve' }; + assert.throws(function() { + person._cast(values); + }); + }); + + it('should not try and do anything fancy with mongo ID\'s, even when it\'s really tempting', function() { + try { + Person.validate('age', '51f88ddc5d7967808b000002'); + } catch (e) { + switch (e.code) { + case 'E_VALIDATION': + // FUTURE: maybe expand test to check more things + return; + + // As of Thu Dec 22, 2016, this test is failing because + // validation is not being completely rolled up yet. + default: throw new Error('The actual error code was "'+e.code+'" - but it should have been "E_VALIDATION": the rolled-up validation error. This is so that errors from the public `.validate()` are consistent with errors exposed when creating or updating records (i.e. when multiple values are being set at the same time.) Here is the error that was actually received:\n```\n' +e.stack+'\n```'); + } + } + }); + + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // For further details on edge case handling, plus thousands more tests, see: + // • http://npmjs.com/package/rttc + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + }); +}); diff --git a/test/unit/collection/type-cast/cast.ref.js b/test/unit/collection/type-cast/cast.ref.js new file mode 100644 index 000000000..128197ad8 --- /dev/null +++ b/test/unit/collection/type-cast/cast.ref.js @@ -0,0 +1,89 @@ +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../../lib/waterline'); + +describe.skip('Type Casting ::', function() { + describe('with `type: \'ref\'` ::', function() { + var orm; + var Person; + before(function(done) { + orm = new Waterline(); + + orm.registerModel(Waterline.Model.extend({ + identity: 'person', + datastore: 'foo', + primaryKey: 'id', + attributes: { + id: { + type: 'number' + }, + activated: { + type: 'boolean' + }, + age: { + type: 'number' + }, + name: { + type: 'string' + }, + organization: { + type: 'json' + }, + avatarBlob: { + type: 'ref' + } + } + })); + + orm.initialize({ + adapters: { + foobar: {} + }, + datastores: { + foo: { adapter: 'foobar' } + } + }, function(err, orm) { + if (err) { return done(err); } + + Person = orm.collections.person; + return done(); + });// + + });// + + it('should not modify ref types (and should return the original reference)', function() { + + var pretendIncomingBlobStream = new (require('stream').Readable)(); + // Note that Waterline also ensures strict equality: + assert(Person.validate('avatarBlob', pretendIncomingBlobStream) === pretendIncomingBlobStream); + }); + + it('should accept EVEN the wildest nonsense, just like it is, and not change it, not even one little bit', function() { + + var wildNonsense = [ Waterline, assert, _ ]; + wildNonsense.__proto__ = Waterline.prototype; + wildNonsense.constructor = assert; + wildNonsense.toJSON = _; + wildNonsense.toString = Waterline; + Object.defineProperty(wildNonsense, 'surprise', { + enumerable: false, + configurable: false, + writable: false, + value: wildNonsense + }); + Object.freeze(wildNonsense); + wildNonsense.temperature = -Infinity; + Object.seal(wildNonsense); + wildNonsense.numSeals = NaN; + wildNonsense.numSeaLions = Infinity; + + assert(Person.validate('avatarBlob', wildNonsense) === wildNonsense); + }); + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // For further details on edge case handling, plus thousands more tests, see: + // • http://npmjs.com/package/rttc + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + }); +}); diff --git a/test/unit/collection/type-cast/cast.string.js b/test/unit/collection/type-cast/cast.string.js new file mode 100644 index 000000000..21d5fd32d --- /dev/null +++ b/test/unit/collection/type-cast/cast.string.js @@ -0,0 +1,83 @@ +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../../lib/waterline'); + +describe.skip('Type Casting ::', function() { + describe('with `type: \'string\'` ::', function() { + + var orm; + var Person; + before(function(done) { + orm = new Waterline(); + + orm.registerModel(Waterline.Model.extend({ + identity: 'person', + datastore: 'foo', + primaryKey: 'id', + attributes: { + id: { + type: 'number' + }, + activated: { + type: 'boolean' + }, + age: { + type: 'number' + }, + name: { + type: 'string' + }, + organization: { + type: 'json' + }, + avatarBlob: { + type: 'ref' + } + } + })); + + orm.initialize({ + adapters: { + foobar: {} + }, + datastores: { + foo: { adapter: 'foobar' } + } + }, function(err, orm) { + if (err) { return done(err); } + + Person = orm.collections.person; + return done(); + });// + + });// + + + it('should cast numbers to strings', function() { + assert.equal(Person.validate('name', 27), '27'); + }); + + + it('should throw E_VALIDATION error when a value can\'t be cast', function() { + try { + Person.validate('name', null); + } catch (e) { + switch (e.code) { + case 'E_VALIDATION': + // FUTURE: maybe expand test to check more things + return; + + // As of Thu Dec 22, 2016, this test is failing because + // validation is not being completely rolled up yet. + default: throw new Error('The actual error code was "'+e.code+'" - but it should have been "E_VALIDATION": the rolled-up validation error. This is so that errors from the public `.validate()` are consistent with errors exposed when creating or updating records (i.e. when multiple values are being set at the same time.) Here is the error that was actually received:\n```\n' +e.stack+'\n```'); + } + } + }); + + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + // For further details on edge case handling, plus thousands more tests, see: + // • http://npmjs.com/package/rttc + // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + }); +}); diff --git a/test/unit/collection/validations.js b/test/unit/collection/validations.js new file mode 100644 index 000000000..5b0a0c6d9 --- /dev/null +++ b/test/unit/collection/validations.js @@ -0,0 +1,164 @@ +var assert = require('assert'); +var util = require('util'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); + +describe('Collection Validator ::', function() { + describe('.validate()', function() { + var person; + var car; + + before(function(done) { + var waterline = new Waterline(); + + var Person = Waterline.Model.extend({ + identity: 'person', + datastore: 'foo', + primaryKey: 'id', + attributes: { + id: { + type: 'number' + }, + age: { + type: 'number' + }, + sex: { + type: 'string', + required: true, + validations: { + isIn: ['male', 'female'] + } + } + } + }); + + var Car = Waterline.Model.extend({ + identity: 'car', + datastore: 'foo', + primaryKey: 'id', + attributes: { + id: { + type: 'string', + required: true, + validations: { + minLength: 6 + } + } + } + }); + + waterline.registerModel(Person); + waterline.registerModel(Car); + + var datastores = { + 'foo': { + adapter: 'foobar' + } + }; + + waterline.initialize({ adapters: { foobar: { update: function(con, query, cb) { return cb(); }, create: function(con, query, cb) { return cb(); } } }, datastores: datastores }, function(err, orm) { + if (err) { + return done(err); + } + person = orm.collections.person; + car = orm.collections.car; + done(); + }); + }); + + it('should not return any errors when no validation rules are violated', function(done) { + person.create({ sex: 'male' }).exec(function(err) { + assert(!err); + return done(); + }); + }); + + it('should return an Error with name `UsageError` when a required field is not present in a `create`', function(done) { + person.create({}).exec(function(err) { + assert(err); + assert.equal(err.name, 'UsageError'); + assert(err.message.match(/required/)); + return done(); + }); + }); + + it('should return an Error with name `UsageError` when a required string field is set to empty string in a `create`', function(done) { + person.create({ sex: '' }).exec(function(err) { + assert(err); + assert.equal(err.name, 'UsageError'); + assert(err.message.match(/required/)); + return done(); + }); + }); + + it('should return an Error with name `UsageError` when a field is set to the wrong type in a `create`', function(done) { + person.create({ name: 'foo', age: 'bar' }).exec(function(err) { + assert(err); + assert.equal(err.name, 'UsageError'); + assert(err.message.match(/type/)); + return done(); + }); + }); + + it('should return an Error with name `UsageError` when a field fails a validation rule in a `create`', function(done) { + person.create({ name: 'foo', sex: 'bar' }).exec(function(err) { + assert(err); + assert.equal(err.name, 'UsageError'); + assert(err.message.match(/rule/)); + return done(); + }); + }); + + it('should not return an Error when a required field is not present in an `update`', function(done) { + person.update({}, {}).exec(function(err) { + assert(!err); + return done(); + }); + }); + + it('should return an Error with name `UsageError` when a required string field is set to empty string in a `update`', function(done) { + person.update({}, { sex: '' }).exec(function(err) { + assert(err); + assert.equal(err.name, 'UsageError'); + assert(err.message.match(/required/)); + return done(); + }); + }); + + it('should return an Error with name `UsageError` when a field is set to the wrong type in a `update`', function(done) { + person.update({}, { age: 'bar' }).exec(function(err) { + assert(err); + assert.equal(err.name, 'UsageError'); + assert(err.message.match(/type/)); + return done(); + }); + }); + + it('should return an Error with name `UsageError` when a field fails a validation rule in a `update`', function(done) { + person.update({}, { sex: 'bar' }).exec(function(err) { + assert(err); + assert.equal(err.name, 'UsageError'); + assert(err.message.match(/rule/)); + return done(); + }); + }); + + it('should not return any errors when a primary key does not violate any validations.', function(done) { + + car.create({ id: 'foobarbax' }).exec(function(err) { + assert(!err); + return done(); + }); + }); + + it('should return an Error with name `UsageError` when a primary key fails a validation rule in a `create`', function(done) { + car.create({ id: 'foo' }).exec(function(err) { + assert(err); + assert.equal(err.name, 'UsageError'); + assert(err.message.match(/rule/)); + return done(); + }); + }); + + }); +}); diff --git a/test/unit/core/core.callbacks.js b/test/unit/core/core.callbacks.js deleted file mode 100644 index 4cfaaadff..000000000 --- a/test/unit/core/core.callbacks.js +++ /dev/null @@ -1,238 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Core Lifecycle Callbacks', function() { - - /** - * Automatically build an internal Callbacks object - * that uses no-op functions. - */ - - describe('default callbacks object', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: {}, - invalidState: function() {} - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should build a callbacks object', function() { - assert(Array.isArray(person._callbacks.beforeValidate)); - assert(typeof person._callbacks.beforeValidate[0] === 'function'); - - assert(Array.isArray(person._callbacks.afterValidate)); - assert(typeof person._callbacks.afterValidate[0] === 'function'); - - assert(Array.isArray(person._callbacks.beforeUpdate)); - assert(typeof person._callbacks.beforeUpdate[0] === 'function'); - - assert(Array.isArray(person._callbacks.afterUpdate)); - assert(typeof person._callbacks.afterUpdate[0] === 'function'); - - assert(Array.isArray(person._callbacks.beforeCreate)); - assert(typeof person._callbacks.beforeCreate[0] === 'function'); - - assert(Array.isArray(person._callbacks.afterCreate)); - assert(typeof person._callbacks.afterCreate[0] === 'function'); - - assert(Array.isArray(person._callbacks.beforeDestroy)); - assert(typeof person._callbacks.beforeDestroy[0] === 'function'); - - assert(Array.isArray(person._callbacks.afterDestroy)); - assert(typeof person._callbacks.afterDestroy[0] === 'function'); - }); - - - it('should ignore invalid lifecycle states', function() { - assert(!person._callbacks.invalidState); - }); - }); - - /** - * Callback states should allow an array to be used - * and should be able to mutate state. - */ - - describe('callback as an array', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - name: 'string', - - changeState_1: function() { - this.name = this.name + ' changed'; - }, - - changeState_2: function() { - this.name = this.name + ' again'; - } - }, - - beforeValidate: ['changeState_1', 'changeState_2'] - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should map functions to internal _callbacks object', function() { - assert(Array.isArray(person._callbacks.beforeValidate)); - assert(typeof person._callbacks.beforeValidate[0] === 'function'); - }); - - it('should mutate values', function() { - var values = { name: 'Foo' }; - person._callbacks.beforeValidate.forEach(function(key) { - key.call(values); - }); - - assert(values.name === 'Foo changed again'); - }); - }); - - /** - * Callback states should allow an string to be used - * and should be able to mutate state. - */ - - describe('callback as a string', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - name: 'string', - - changeState_1: function() { - this.name = this.name + ' changed'; - } - }, - - beforeValidate: 'changeState_1' - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should map functions to internal _callbacks object', function() { - assert(Array.isArray(person._callbacks.beforeValidate)); - assert(typeof person._callbacks.beforeValidate[0] === 'function'); - }); - - it('should mutate values', function() { - var values = { name: 'Foo' }; - person._callbacks.beforeValidate.forEach(function(key) { - key.call(values); - }); - - assert(values.name === 'Foo changed'); - }); - }); - - /** - * Callback states should allow a function to be used - * and should be able to mutate state. - */ - - describe('callback as a function', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - name: 'string' - }, - - beforeValidate: function() { - this.name = this.name + ' changed'; - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should map functions to internal _callbacks object', function() { - assert(Array.isArray(person._callbacks.beforeValidate)); - assert(typeof person._callbacks.beforeValidate[0] === 'function'); - }); - - it('should mutate values', function() { - var values = { name: 'Foo' }; - person._callbacks.beforeValidate.forEach(function(key) { - key.call(values); - }); - - assert(values.name === 'Foo changed'); - }); - }); - -}); diff --git a/test/unit/core/core.cast/cast.array.js b/test/unit/core/core.cast/cast.array.js deleted file mode 100644 index 8e522b3b2..000000000 --- a/test/unit/core/core.cast/cast.array.js +++ /dev/null @@ -1,42 +0,0 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Core Type Casting', function() { - describe('.run() with Array type', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - name: { - type: 'array' - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should cast values to an array', function() { - var values = person._cast.run({ name: 'foo' }); - assert(Array.isArray(values.name)); - assert(values.name.length === 1); - }); - - }); -}); diff --git a/test/unit/core/core.cast/cast.boolean.js b/test/unit/core/core.cast/cast.boolean.js deleted file mode 100644 index 5170909ad..000000000 --- a/test/unit/core/core.cast/cast.boolean.js +++ /dev/null @@ -1,61 +0,0 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Core Type Casting', function() { - describe('.run() with Boolean type', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - name: { - type: 'boolean' - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should cast string "true" to a boolean', function() { - var values = person._cast.run({ name: 'true' }); - assert(values.name === true); - }); - - it('should cast string "false" to a boolean', function() { - var values = person._cast.run({ name: 'false' }); - assert(values.name === false); - }); - - it('should not cast bad values', function() { - var values = person._cast.run({ name: 'foo' }); - assert(values.name === 'foo'); - }); - - it('should cast integer 0 to a boolean', function() { - var values = person._cast.run({ name: 0 }); - assert(values.name === false); - }); - - it('should cast integer 1 to a boolean', function() { - var values = person._cast.run({ name: 1 }); - assert(values.name === true); - }); - - }); -}); diff --git a/test/unit/core/core.cast/cast.date.js b/test/unit/core/core.cast/cast.date.js deleted file mode 100644 index 906116483..000000000 --- a/test/unit/core/core.cast/cast.date.js +++ /dev/null @@ -1,52 +0,0 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Core Type Casting', function() { - describe('.run() with Date type', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - name: { - type: 'date' - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should cast strings to a date', function() { - var values = person._cast.run({ name: '2013-09-18' }); - assert(values.name.constructor.name === 'Date'); - assert(values.name.toUTCString() === 'Wed, 18 Sep 2013 00:00:00 GMT'); - }); - - it('should objects that implement toDate()', function() { - function Foo() {} - Foo.prototype.toDate = function () { return new Date(1379462400000); }; - var values = person._cast.run({ - name: new Foo() - }); - assert(values.name.constructor.name === 'Date'); - assert(values.name.toUTCString() === 'Wed, 18 Sep 2013 00:00:00 GMT'); - }); - - }); -}); diff --git a/test/unit/core/core.cast/cast.float.js b/test/unit/core/core.cast/cast.float.js deleted file mode 100644 index 2eecabef4..000000000 --- a/test/unit/core/core.cast/cast.float.js +++ /dev/null @@ -1,42 +0,0 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Core Type Casting', function() { - describe('.run() with Float type', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - name: { - type: 'float' - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should cast strings to numbers', function() { - var values = person._cast.run({ name: '27.01' }); - assert(typeof values.name === 'number'); - assert(values.name === 27.01); - }); - - }); -}); diff --git a/test/unit/core/core.cast/cast.integer.js b/test/unit/core/core.cast/cast.integer.js deleted file mode 100644 index 05e98f565..000000000 --- a/test/unit/core/core.cast/cast.integer.js +++ /dev/null @@ -1,57 +0,0 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Core Type Casting', function() { - describe('.run() with Integer type', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - id: { - type: 'integer' - }, - name: { - type: 'integer' - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should cast strings to numbers', function() { - var values = person._cast.run({ name: '27' }); - assert(typeof values.name === 'number'); - assert(values.name === 27); - }); - - it('should not try and cast mongo ID\'s when an id property is used', function() { - var values = person._cast.run({ id: '51f88ddc5d7967808b000002' }); - assert(typeof values.id === 'string'); - assert(values.id === '51f88ddc5d7967808b000002'); - }); - - it('should not try and cast mongo ID\'s when value matches a mongo string', function() { - var values = person._cast.run({ name: '51f88ddc5d7967808b000002' }); - assert(typeof values.name === 'string'); - assert(values.name === '51f88ddc5d7967808b000002'); - }); - - }); -}); diff --git a/test/unit/core/core.cast/cast.string.js b/test/unit/core/core.cast/cast.string.js deleted file mode 100644 index 8e2b3944a..000000000 --- a/test/unit/core/core.cast/cast.string.js +++ /dev/null @@ -1,43 +0,0 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Core Type Casting', function() { - describe('.run() with String type', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - name: { - type: 'string' - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should cast numbers to strings', function() { - var values = person._cast.run({ name: 27 }); - - assert(typeof values.name === 'string'); - assert(values.name === '27'); - }); - - }); -}); diff --git a/test/unit/core/core.schema/schema.autoValues.js b/test/unit/core/core.schema/schema.autoValues.js deleted file mode 100644 index 336a78207..000000000 --- a/test/unit/core/core.schema/schema.autoValues.js +++ /dev/null @@ -1,197 +0,0 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Core Schema', function() { - - describe('with custom primary key', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - first_name: { - type: 'string', - primaryKey: true - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should pass the primary key down to the adapter', function() { - assert(person._schema.schema.first_name.primaryKey); - assert(person._schema.schema.first_name.unique); - assert(!person._schema.schema.id); - }); - }); - - describe('with autoIncrement key', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - count: { - autoIncrement: true - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should pass the autoIncrement down to the adapter', function() { - assert(person._schema.schema.count.autoIncrement); - }); - - it('should set the type to integer', function() { - assert(person._schema.schema.count.type === 'integer'); - }); - }); - - describe('with uniqueness key', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - name: { - type: 'string', - unique: true - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should pass the unique key down to the adapter', function() { - assert(person._schema.schema.name.unique); - }); - }); - - describe('with index key', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - name: { - type: 'string', - index: true - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should pass the index key down to the adapter', function() { - assert(person._schema.schema.name.index); - }); - }); - - describe('with enum key', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - sex: { - type: 'string', - enum: ['male', 'female'] - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should pass the enum options down to the adapter', function() { - assert(Array.isArray(person._schema.schema.sex.enum)); - assert(person._schema.schema.sex.enum.length === 2); - }); - }); - -}); diff --git a/test/unit/core/core.schema/schema.cleanValues.js b/test/unit/core/core.schema/schema.cleanValues.js deleted file mode 100644 index 4254f2340..000000000 --- a/test/unit/core/core.schema/schema.cleanValues.js +++ /dev/null @@ -1,99 +0,0 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Core Schema', function() { - - describe('cleanValues method', function() { - var user; - var userschemaless; - - before(function(done) { - var waterline = new Waterline(); - - var UserSchema = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - schema: true, - attributes: { - name: { - type: 'string', - defaultsTo: 'Foo Bar' - }, - age: { - type: 'integer', - }, - schemalessFriends: { - collection: 'userschemaless', - via: 'schemaFriends' - } - } - }); - - var UserSchemaless = Waterline.Collection.extend({ - identity: 'userschemaless', - connection: 'foo', - schema: false, - attributes: { - name: { - type: 'string', - defaultsTo: 'Foo Bar' - }, - age: { - type: 'integer', - }, - schemaFriends: { - collection: 'user', - via: 'schemalessFriends' - } - } - }); - - waterline.loadCollection(UserSchema); - waterline.loadCollection(UserSchemaless); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - user = colls.collections.user; - userschemaless = colls.collections.userschemaless; - done(); - }); - }); - - it('when collection is schemaless, should only remove collection attributes.', function() { - - var rawValues = { - name: 'don-moe', - non: 'should be here', - schemaFriends: [] - } - - var cleanValues = userschemaless._schema.cleanValues(rawValues); - - assert.equal(cleanValues.name, 'don-moe'); - assert.equal(cleanValues.non, 'should be here'); - assert.equal(cleanValues.schemaFriends, undefined); - }); - - it('when collection has schema, should clean attributes not in the schema, including collection attributes.', function() { - - var rawValues = { - name: 'don-moe', - non: 'should be here', - schemalessFriends: [] - } - - var cleanValues = user._schema.cleanValues(rawValues); - - assert.equal(cleanValues.name, 'don-moe'); - assert.equal(cleanValues.non, undefined); - assert.equal(cleanValues.schemalessFriends, undefined); - }); - }); - -}); diff --git a/test/unit/core/core.schema/schema.instanceMethods.js b/test/unit/core/core.schema/schema.instanceMethods.js deleted file mode 100644 index c073c108e..000000000 --- a/test/unit/core/core.schema/schema.instanceMethods.js +++ /dev/null @@ -1,41 +0,0 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Core Schema', function() { - - describe('with instance methods', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - first_name: 'string', - doSomething: function() {} - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should ignore instance methods in the schema', function() { - assert(!person._schema.schema.doSomething); - }); - }); - -}); diff --git a/test/unit/core/core.schema/schema.keyValue.js b/test/unit/core/core.schema/schema.keyValue.js deleted file mode 100644 index 415483132..000000000 --- a/test/unit/core/core.schema/schema.keyValue.js +++ /dev/null @@ -1,46 +0,0 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Core Schema', function() { - - describe('with simple key/value attributes', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - first_name: 'STRING', - last_name: 'STRING' - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should set internal schema attributes', function() { - assert(person._schema.schema.first_name); - assert(person._schema.schema.last_name); - }); - - it('should lowercase attribute types', function() { - assert(person._schema.schema.first_name.type === 'string'); - }); - }); - -}); diff --git a/test/unit/core/core.schema/schema.object.js b/test/unit/core/core.schema/schema.object.js deleted file mode 100644 index 65b06ad6d..000000000 --- a/test/unit/core/core.schema/schema.object.js +++ /dev/null @@ -1,93 +0,0 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Core Schema', function() { - - describe('with object attribute', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - first_name: { type: 'STRING' }, - last_name: { type: 'STRING' }, - phone: { - type: 'STRING', - defaultsTo: '555-555-5555' - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should set internal schema attributes', function() { - assert(person._schema.schema.first_name); - assert(person._schema.schema.last_name); - }); - - it('should lowercase attribute types', function() { - assert(person._schema.schema.first_name.type === 'string'); - }); - - it('should set defaultsTo value', function() { - assert(person._schema.schema.phone.defaultsTo === '555-555-5555'); - }); - }); - - describe('with special key object attribute', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - first_name: { type: 'STRING' }, - last_name: { type: 'STRING' }, - type: { - type: 'STRING', - columnName: 'person_type' - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should set type to attributes', function() { - assert(person._schema.schema.first_name.type); - }); - }); - -}); diff --git a/test/unit/core/core.schema/schema.specialTypes.js b/test/unit/core/core.schema/schema.specialTypes.js deleted file mode 100644 index 9b737aa31..000000000 --- a/test/unit/core/core.schema/schema.specialTypes.js +++ /dev/null @@ -1,45 +0,0 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Core Schema', function() { - - describe('with special types', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - email: 'email', - age: 'integer' - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should transform unknown types to strings', function() { - assert(person._schema.schema.email.type === 'string'); - }); - - it('should not transform known type', function() { - assert(person._schema.schema.age.type === 'integer'); - }); - }); - -}); diff --git a/test/unit/core/core.schema/schema.validationKeys.js b/test/unit/core/core.schema/schema.validationKeys.js deleted file mode 100644 index 183f81adc..000000000 --- a/test/unit/core/core.schema/schema.validationKeys.js +++ /dev/null @@ -1,43 +0,0 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Core Schema', function() { - - describe('with validation properties', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - first_name: { - type: 'STRING', - length: { min: 2, max: 10 } - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - person = colls.collections.person; - done(); - }); - }); - - it('should ignore validation properties in the schema', function() { - assert(!person._schema.schema.first_name.length); - }); - }); - -}); diff --git a/test/unit/core/core.transformations/transformations.unserialize.js b/test/unit/core/core.transformations/transformations.unserialize.js deleted file mode 100644 index 0d376a432..000000000 --- a/test/unit/core/core.transformations/transformations.unserialize.js +++ /dev/null @@ -1,30 +0,0 @@ -var Transformer = require('../../../../lib/waterline/core/transformations'), - assert = require('assert'); - -describe('Core Transformations', function() { - - describe('unserialize', function() { - - describe('with normal key/value pairs', function() { - var transformer; - - before(function() { - var attributes = { - name: 'string', - username: { - columnName: 'login' - } - }; - - transformer = new Transformer(attributes, {}); - }); - - it('should change login key to username', function() { - var values = transformer.unserialize({ login: 'foo' }); - assert(values.username); - assert(values.username === 'foo'); - }); - }); - - }); -}); diff --git a/test/unit/core/core.validations.js b/test/unit/core/core.validations.js deleted file mode 100644 index 295763770..000000000 --- a/test/unit/core/core.validations.js +++ /dev/null @@ -1,206 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Core Validator', function() { - - describe('.build() with model attributes', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - first_name: { - type: 'string', - length: { min: 2, max: 5 } - }, - last_name: { - type: 'string', - required: true, - defaultsTo: 'Smith', - meta: { - foo: 'bar' - } - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) { return done(err); } - person = colls.collections.person; - return done(); - }); - }); - - - it('should build a validation object', function() { - var validations = person._validator.validations; - - assert(validations.first_name); - assert(validations.first_name.type === 'string'); - assert(Object.keys(validations.first_name.length).length === 2); - assert(validations.first_name.length.min === 2); - assert(validations.first_name.length.max === 5); - - assert(validations.last_name); - assert(validations.last_name.type === 'string'); - assert(validations.last_name.required === true); - }); - - it('should ignore schema properties', function() { - assert(!person._validator.validations.last_name.defaultsTo); - }); - - it('should ignore the meta key', function() { - assert(!person._validator.validations.last_name.meta); - }); - - }); - - - describe('.validate()', function() { - var person; - - before(function(done) { - var waterline = new Waterline(); - - var Person = Waterline.Collection.extend({ - identity: 'person', - connection: 'foo', - attributes: { - first_name: { - type: 'string', - min: 2, - max: 5 - }, - last_name: { - type: 'string', - required: true, - defaultsTo: 'Smith' - }, - city: { - type: 'string', - maxLength: 7 - } - } - }); - - waterline.loadCollection(Person); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) { return done(err); } - person = colls.collections.person; - done(); - }); - }); - - - it('should validate types', function(done) { - person._validator.validate({ first_name: 27, last_name: 32 }, function(err, validationErrors) { - assert(!err, err); - assert(validationErrors); - assert(validationErrors.first_name); - assert(validationErrors.last_name); - assert(validationErrors.first_name[0].rule === 'string'); - assert(validationErrors.last_name[0].rule === 'string'); - done(); - }); - }); - - it('should validate required status', function(done) { - person._validator.validate({ first_name: 'foo' }, function(err, validationErrors) { - assert(!err, err); - assert(validationErrors); - assert(validationErrors); - assert(validationErrors.last_name); - assert(validationErrors.last_name[1].rule === 'required'); - done(); - }); - }); - - it('should validate all fields with presentOnly omitted or set to false', function(done) { - person._validator.validate({ city: 'Washington' }, function(err, validationErrors) { - assert(!err, err); - assert(validationErrors, 'expected validation errors'); - assert(!validationErrors.first_name); - assert(validationErrors.last_name); - assert(validationErrors.last_name[0].rule === 'string'); - assert(validationErrors.city); - assert(validationErrors.city[0].rule === 'maxLength'); - - person._validator.validate({ city: 'Washington' }, false, function(err, validationErrors) { - assert(!err, err); - assert(validationErrors, 'expected validation errors'); - assert(!validationErrors.first_name); - assert(validationErrors.last_name); - assert(validationErrors.last_name[0].rule === 'string'); - assert(validationErrors.city); - assert(validationErrors.city[0].rule === 'maxLength'); - done(); - }); - }); - }); - - it('should, for presentOnly === true, validate present values only, thus not need the required last_name', function(done) { - person._validator.validate({ first_name: 'foo' }, true, function(err, validationErrors) { - assert(!err, err); - assert(!validationErrors, 'expected no validation errors'); - done(); - }); - }); - - it('should validate only the specified value', function(done) { - person._validator.validate({ first_name: 'foo', last_name: 32, city: 'Washington' }, - 'first_name', function(err, validationErrors) { - assert(!err, err); - assert(!validationErrors, 'expected no validation errors'); - - person._validator.validate({ first_name: 'foo', last_name: 32, city: 'Washington' }, - 'last_name', function(err, validationErrors) { - assert(!err, err); - assert(validationErrors); - assert(validationErrors.last_name); - assert(validationErrors.last_name[0].rule === 'string'); - assert(!validationErrors.city); - done(); - }); - }); - }); - - it('should validate only the specified values', function(done) { - person._validator.validate({ first_name: 'foo', last_name: 32, city: 'Atlanta' }, - ['first_name', 'city'], function(err,validationErrors) { - assert(!err, err); - assert(!validationErrors); - - person._validator.validate({ first_name: 'foo', last_name: 32, city: 'Washington' }, - ['first_name', 'city'], function(err,validationErrors) { - assert(validationErrors); - assert(!validationErrors.first_name); - assert(!validationErrors.last_name); - assert(validationErrors.city); - assert(validationErrors.city[0].rule === 'maxLength'); - done(); - }); - }); - }); - - }); -}); diff --git a/test/unit/error/WLError.test.js b/test/unit/error/WLError.test.js deleted file mode 100644 index 7c16ae068..000000000 --- a/test/unit/error/WLError.test.js +++ /dev/null @@ -1,73 +0,0 @@ -/** - * Test dependencies - */ - -var errorify = require('../../../lib/waterline/error'); -var WLError = require('../../../lib/waterline/error/WLError'); -var assert = require('assert'); - - -describe('lib/error', function () { - - describe('errorify', function () { - - it('(given a string) should return WLError', function () { - var err = errorify('foo'); - assert(err instanceof WLError); - }); - it('(given an object) should return WLError', function () { - var err = errorify({what: 'will this do?'}); - assert(err instanceof WLError); - }); - it('(given an array) should return WLError', function () { - var err = errorify(['foo', 'bar', {baz: true}]); - assert(err instanceof WLError); - }); - it('(given a boolean) should return WLError', function () { - var err = errorify(false); - assert(err instanceof WLError); - }); - it('(given a boolean) should return WLError', function () { - var err = errorify(true); - assert(err instanceof WLError); - }); - it('(given a number) should return WLError', function () { - var err = errorify(2428424.422429); - assert(err instanceof WLError); - }); - it('(given `null`) should return WLError', function () { - var err = errorify(null); - assert(err instanceof WLError); - }); - it('(given `undefined`) should return WLError', function () { - var err = errorify(undefined); - assert(err instanceof WLError); - }); - it('(given no arguments) should return WLError', function () { - var err = errorify(); - assert(err instanceof WLError); - }); - }); - - describe('lib/error/WLError.js', function() { - it('should have a stack property, like Error', function() { - var err = errorify(); - assert(err.stack); - }); - it('should allow changing the stack property', function() { - var err = errorify(); - err.stack = 'new stack'; - assert(err.stack.indexOf('new stack') >= 0, 'err.stack was not set properly'); - }); - it('should have a message property, like Error', function() { - var err = errorify(); - assert(err.message); - }) - it('should allow changing the message property', function() { - var err = errorify(); - err.message = 'new message'; - assert.equal(err.message, 'new message'); - }); - }) - -}); diff --git a/test/unit/model/association.add.hasMany.id.js b/test/unit/model/association.add.hasMany.id.js deleted file mode 100644 index d3cf0c5dd..000000000 --- a/test/unit/model/association.add.hasMany.id.js +++ /dev/null @@ -1,99 +0,0 @@ -var _ = require('lodash'), - assert = require('assert'), - belongsToFixture = require('../../support/fixtures/model/context.belongsTo.fixture'), - Model = require('../../../lib/waterline/model'); - -describe('instance methods', function() { - describe('hasMany association add', function() { - - describe('with an id', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var model; - var i = 1; - var container = { update: [], create: [] }; - var foo = _.cloneDeep(container); - var bar = _.cloneDeep(container); - - before(function() { - var fixture = belongsToFixture(); - - var findOneFn = function(container) { - - var obj = function(criteria) { - return this; - }; - - obj.prototype.exec = function(cb) { - cb(null, [new model(container.update[0].values)]); - }; - - obj.prototype.populate = function() { return this; }; - - return function(criteria) { - return new obj(criteria); - }; - }; - - // Mock Collection Update Method - var updateFn = function(container) { - return function(criteria, values, cb) { - var obj = {}; - obj.criteria = criteria; - obj.values = values; - container.update.push(obj); - cb(null, [new model(values)]); - }; - }; - - // Mock Collection Create Method - var createFn = function(container) { - return function(values, cb) { - var obj = { values: values }; - values.id = i; - i++; - container.create.push(obj); - cb(null, new model(values)); - }; - }; - - // Add Collection Methods to all fixture collections - fixture.update = updateFn(foo); - fixture.findOne = findOneFn(foo); - fixture.waterline.collections.foo.update = updateFn(foo); - fixture.waterline.collections.bar.update = updateFn(bar); - fixture.waterline.collections.bar.create = createFn(bar); - - model = new Model(fixture, {}); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass model values to create method for each relationship', function(done) { - var person = new model({ id: 1, name: 'foobar' }); - - person.bars.add(1); - person.bars.add(2); - - person.save(function(err) { - assert(bar.update.length === 2); - assert(bar.update.length === 2); - assert(bar.update[0].criteria.id === 1); - - assert(bar.update[0].values.foo); - assert(bar.update[1].criteria.id === 2); - assert(bar.update[1].values.foo); - - done(); - }); - }); - }); - - }); -}); diff --git a/test/unit/model/association.add.hasMany.object.js b/test/unit/model/association.add.hasMany.object.js deleted file mode 100644 index ff97e840f..000000000 --- a/test/unit/model/association.add.hasMany.object.js +++ /dev/null @@ -1,101 +0,0 @@ -var _ = require('lodash'), - assert = require('assert'), - belongsToFixture = require('../../support/fixtures/model/context.belongsTo.fixture'), - Model = require('../../../lib/waterline/model'); - -describe('instance methods', function() { - describe('hasMany association add', function() { - - describe('with an object', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var model; - var i = 1; - var container = { update: [], create: [] }; - var foo = _.cloneDeep(container); - var bar = _.cloneDeep(container); - - before(function() { - var fixture = belongsToFixture(); - - var findOneFn = function(container) { - - var obj = function(criteria) { - return this; - }; - - obj.prototype.exec = function(cb) { - cb(null, [new model(container.update[0].values)]); - }; - - obj.prototype.populate = function() { return this; }; - - return function(criteria) { - return new obj(criteria); - }; - }; - - // Mock Collection Update Method - var updateFn = function(container) { - return function(criteria, values, cb) { - var obj = {}; - obj.criteria = criteria; - obj.values = values; - container.update.push(obj); - cb(null, [new model(values)]); - }; - }; - - // Mock Collection Create Method - var createFn = function(container) { - return function(values, cb) { - var obj = { values: values }; - values.id = i; - i++; - container.create.push(obj); - cb(null, new model(values)); - }; - }; - - // Add Collection Methods to all fixture collections - fixture.update = updateFn(foo); - fixture.findOne = findOneFn(foo); - fixture.waterline.collections.foo.update = updateFn(foo); - fixture.waterline.collections.bar.update = updateFn(bar); - fixture.waterline.collections.bar.create = createFn(bar); - - model = new Model(fixture, {}); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass model values to create method for each relationship', function(done) { - var person = new model({ id: 1, name: 'foobar' }); - - person.bars.add({ name: 'foo' }); - person.bars.add({ name: 'bar' }); - - person.save(function(err) { - assert(bar.create.length === 2); - - assert(bar.create[0].values.foo); - assert(bar.create[0].values.name); - assert(bar.create[1].values.foo); - assert(bar.create[1].values.name); - - assert(bar.create[0].values.name === 'foo'); - assert(bar.create[1].values.name === 'bar'); - - done(); - }); - }); - }); - - }); -}); diff --git a/test/unit/model/association.add.manyToMany.id.js b/test/unit/model/association.add.manyToMany.id.js deleted file mode 100644 index 3879ae697..000000000 --- a/test/unit/model/association.add.manyToMany.id.js +++ /dev/null @@ -1,101 +0,0 @@ -var _ = require('lodash'), - assert = require('assert'), - manyToManyFixture = require('../../support/fixtures/model/context.manyToMany.fixture'), - Model = require('../../../lib/waterline/model'); - -describe('instance methods', function() { - describe('many to many association add', function() { - - describe('with an id', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var model; - var i = 1; - var results = []; - - before(function() { - var fixture = manyToManyFixture(); - - // Mock Collection Update Method - var updateFn = function(criteria, values, cb) { - var obj = {}; - obj.criteria = criteria; - obj.values = values; - cb(null, [new model(values)]); - }; - - // Mock Collection Create Method - var createFn = function(values, cb) { - var obj = { values: values }; - values.id = i; - i++; - results.push(values); - cb(null, new model(values)); - }; - - // Mock Find One Method - var findOneFn = function(criteria, cb) { - var parentCriteria = criteria; - - if(cb) { - if(criteria.id) return cb(null, criteria); - return cb(); - } - - var obj = function(criteria) { - return this; - }; - - obj.prototype.exec = function(cb) { - cb(null, [parentCriteria]); - }; - - obj.prototype.populate = function() { return this; }; - - return new obj(criteria); - }; - - // Add Collection Methods to all fixture collections - fixture.waterline.connections.my_foo._adapter.update = updateFn; - fixture.waterline.connections.my_foo._adapter.create = createFn; - fixture.waterline.connections.my_foo._adapter.findOne = findOneFn; - - fixture.update = updateFn; - fixture.findOne = findOneFn; - fixture.waterline.collections.foo.findOne = findOneFn; - fixture.waterline.collections.bar_foos__foo_bars.findOne = findOneFn; - fixture.waterline.collections.bar_foos__foo_bars.create = createFn; - - - model = new Model(fixture, {}); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass model values to create method for each relationship', function(done) { - var person = new model({ id: 1, name: 'foobar' }); - - person.bars.add(1); - person.bars.add(2); - - person.save(function(err) { - - assert(results.length === 2); - assert(results[0].foo_bars = 1); - assert(results[0].bar_foos = 1); - assert(results[1].foo_bars = 2); - assert(results[1].bar_foos = 1); - - done(); - }); - }); - }); - - }); -}); diff --git a/test/unit/model/association.add.manyToMany.object.js b/test/unit/model/association.add.manyToMany.object.js deleted file mode 100644 index acea53fc4..000000000 --- a/test/unit/model/association.add.manyToMany.object.js +++ /dev/null @@ -1,102 +0,0 @@ -var _ = require('lodash'), - assert = require('assert'), - manyToManyFixture = require('../../support/fixtures/model/context.manyToMany.fixture'), - Model = require('../../../lib/waterline/model'); - -describe('instance methods', function() { - describe('many to many association add', function() { - - describe('with an object', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var model; - var i = 1; - var results = []; - - before(function() { - var fixture = manyToManyFixture(); - - // Mock Collection Update Method - var updateFn = function(criteria, values, cb) { - var obj = {}; - obj.criteria = criteria; - obj.values = values; - cb(null, [new model(values)]); - }; - - // Mock Collection Create Method - var createFn = function(values, cb) { - var obj = { values: values }; - values.id = i; - i++; - results.push(values); - cb(null, new model(values)); - }; - - // Mock Find One Method - var findOneFn = function(criteria, cb) { - var parentCriteria = criteria; - - if(cb) { - if(criteria.id) return cb(null, criteria); - return cb(); - } - - var obj = function(criteria) { - return this; - }; - - obj.prototype.exec = function(cb) { - cb(null, [parentCriteria]); - }; - - obj.prototype.populate = function() { return this; }; - - return new obj(criteria); - }; - - // Add Collection Methods to all fixture collections - fixture.waterline.connections.my_foo._adapter.update = updateFn; - fixture.waterline.connections.my_foo._adapter.create = createFn; - fixture.waterline.connections.my_foo._adapter.findOne = findOneFn; - - fixture.update = updateFn; - fixture.findOne = findOneFn; - fixture.waterline.collections.foo.findOne = findOneFn; - fixture.waterline.collections.bar.findOne = findOneFn; - fixture.waterline.collections.bar.create = createFn; - fixture.waterline.collections.bar_foos__foo_bars.findOne = findOneFn; - fixture.waterline.collections.bar_foos__foo_bars.create = createFn; - - - model = new Model(fixture, {}); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass model values to create method for each relationship', function(done) { - var person = new model({ id: 1, name: 'foobar' }); - - person.bars.add({ name: 'foo' }); - person.bars.add({ name: 'bar' }); - - person.save(function(err) { - assert(results.length === 4); - assert(results[0].name === 'foo'); - assert(results[1].foo_bars === 1); - assert(results[2].name === 'bar'); - assert(results[3].foo_bars === 3); - - done(); - }); - }); - }); - - }); -}); diff --git a/test/unit/model/association.getters.js b/test/unit/model/association.getters.js deleted file mode 100644 index 35d22484e..000000000 --- a/test/unit/model/association.getters.js +++ /dev/null @@ -1,42 +0,0 @@ -var assert = require('assert'), - manyToManyFixture = require('../../support/fixtures/model/context.manyToMany.fixture'), - Model = require('../../../lib/waterline/model'); - -describe('instance methods', function() { - describe('association getters', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var model; - - before(function() { - model = new Model(manyToManyFixture(), {}); - }); - - it('should have a getter for has many association keys', function() { - var person = new model({ name: 'foobar', bars: [{ id: 1, name: 'bar uno' }] }); - - assert(Array.isArray(person.bars)); - assert(person.bars.length == 1); - assert(person.bars[0].name === 'bar uno'); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should have special methods on the association key', function() { - var person = new model({ name: 'foobar' }); - - assert(typeof person.bars.add == 'function'); - assert(typeof person.bars.remove == 'function'); - - assert(typeof person.foobars.add == 'function'); - assert(typeof person.foobars.remove == 'function'); - }); - - }); -}); diff --git a/test/unit/model/association.remove.hasMany.id.js b/test/unit/model/association.remove.hasMany.id.js deleted file mode 100644 index d1056589a..000000000 --- a/test/unit/model/association.remove.hasMany.id.js +++ /dev/null @@ -1,91 +0,0 @@ -var _ = require('lodash'), - assert = require('assert'), - belongsToFixture = require('../../support/fixtures/model/context.belongsTo.fixture'), - Model = require('../../../lib/waterline/model'); - -describe('instance methods', function() { - describe('hasMany association remove', function() { - - describe('with an id', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var model; - var i = 1; - var container = { update: [], create: [] }; - var foo = _.cloneDeep(container); - var bar = _.cloneDeep(container); - - before(function() { - var fixture = belongsToFixture(); - - // Mock Collection Update Method - var updateFn = function(container) { - return function(criteria, values, cb) { - var obj = {}; - obj.criteria = criteria; - obj.values = values; - container.update.push(obj); - cb(null, [new model(values)]); - }; - }; - - // Mock Collection Create Method - var createFn = function(container) { - return function(values, cb) { - var obj = { values: values }; - values.id = i; - i++; - container.create.push(obj); - cb(null, new model(values)); - }; - }; - - // Add Collection Methods to all fixture collections - fixture.update = updateFn(foo); - fixture.waterline.collections.foo.update = updateFn(foo); - fixture.waterline.collections.bar.update = updateFn(bar); - fixture.waterline.collections.bar.create = createFn(bar); - - model = new Model(fixture, {}); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass model values to create method for each relationship', function(done) { - var person = new model({ id: 1, name: 'foobar' }); - - person.bars.remove(1); - person.bars.remove(2); - - person.save(function(err) { - - assert(bar.update.length === 2); - assert(bar.update[0].criteria.id === 1); - assert(bar.update[0].values.foo_id === null); - assert(bar.update[1].criteria.id === 2); - assert(bar.update[1].values.foo_id === null); - - done(); - }); - }); - - it('should error if passed in an object into the remove function', function(done) { - var person = new model({ id: 1, name: 'foobar' }); - - person.bars.remove({ name: 'foo' }); - - person.save(function(err) { - assert(err); - done(); - }); - }); - }); - - }); -}); diff --git a/test/unit/model/association.remove.manyToMany.id.js b/test/unit/model/association.remove.manyToMany.id.js deleted file mode 100644 index e3708c757..000000000 --- a/test/unit/model/association.remove.manyToMany.id.js +++ /dev/null @@ -1,83 +0,0 @@ -var _ = require('lodash'), - assert = require('assert'), - manyToManyFixture = require('../../support/fixtures/model/context.manyToMany.fixture'), - Model = require('../../../lib/waterline/model'); - -describe('instance methods', function() { - describe('many to many association remove', function() { - - describe('with an id', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var model; - var i = 1; - var results = []; - - before(function() { - var fixture = manyToManyFixture(); - - // Mock Collection Update Method - var updateFn = function(criteria, values, cb) { - var obj = {}; - obj.criteria = criteria; - obj.values = values; - cb(null, [new model(values)]); - }; - - // Mock Collection Destroy Method - var destroyFn = function(criteria, cb) { - var obj = { criteria: criteria }; - results.push(obj); - cb(null); - }; - - // Add Collection Methods to all fixture collections - fixture.waterline.connections.my_foo._adapter.update = updateFn; - fixture.waterline.connections.my_foo._adapter.destroy = destroyFn; - - fixture.update = updateFn; - fixture.waterline.collections.bar_foos__foo_bars.destroy = destroyFn; - - - model = new Model(fixture, {}); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass model values to destroy method for each relationship', function(done) { - var person = new model({ id: 1, name: 'foobar' }); - - person.bars.remove(1); - person.bars.remove(2); - - person.save(function(err) { - - assert(results.length === 2); - assert(results[0].criteria.foo_bars === 1); - assert(results[0].criteria.bar_foos === 1); - assert(results[1].criteria.foo_bars === 2); - assert(results[1].criteria.bar_foos === 1); - - done(); - }); - }); - - it('should error if passed in an object into the remove function', function(done) { - var person = new model({ id: 1, name: 'foobar' }); - person.bars.remove({ name: 'foo' }); - - person.save(function(err) { - assert(err); - done(); - }); - }); - }); - - }); -}); diff --git a/test/unit/model/association.setters.js b/test/unit/model/association.setters.js deleted file mode 100644 index 42a48bf42..000000000 --- a/test/unit/model/association.setters.js +++ /dev/null @@ -1,52 +0,0 @@ -var assert = require('assert'), - manyToManyFixture = require('../../support/fixtures/model/context.manyToMany.fixture'), - Model = require('../../../lib/waterline/model'); - -describe('instance methods', function() { - describe('association setters', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var model; - - before(function() { - model = new Model(manyToManyFixture(), {}); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should allow new associations to be added using the add function', function() { - var person = new model({ name: 'foobar' }); - - person.bars.add(1); - assert(person.associations.bars.addModels.length === 1); - }); - - it('should allow new associations to be added using the add function and an array', function() { - var person = new model({ name: 'foobar' }); - - person.bars.add( [ 1, 2, 3 ] ); - assert(person.associations.bars.addModels.length === 3); - }); - - - it('should allow new associations to be removed using the remove function', function() { - var person = new model({ name: 'foobar' }); - - person.bars.remove(1); - assert(person.associations.bars.removeModels.length === 1); - }); - - it('should allow new associations to be removed using the remove function and an array', function() { - var person = new model({ name: 'foobar' }); - - person.bars.remove( [ 1, 2, 3 ] ); - assert(person.associations.bars.removeModels.length === 3); - }); - }); -}); diff --git a/test/unit/model/destroy.js b/test/unit/model/destroy.js deleted file mode 100644 index a25ba8ad2..000000000 --- a/test/unit/model/destroy.js +++ /dev/null @@ -1,48 +0,0 @@ -var assert = require('assert'), - belongsToFixture = require('../../support/fixtures/model/context.belongsTo.fixture'), - Model = require('../../../lib/waterline/model'); - -describe('instance methods', function() { - describe('destroy', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var model; - - before(function() { - var fixture = belongsToFixture(); - fixture.destroy = function(criteria, cb) { - return cb(null, criteria); - }; - - model = new Model(fixture, {}); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass criteria to the context destroy method', function(done) { - var person = new model({ id: 1, name: 'foo' }); - - person.destroy(function(err, status) { - assert(status.id); - assert(status.id === 1); - done(); - }); - }); - - it('should return a promise', function(done) { - var person = new model({ id: 1, name: 'foo' }); - - person.destroy().then(function(status) { - assert(status.id); - assert(status.id === 1); - done(); - }); - }); - }); -}); diff --git a/test/unit/model/model.validate.js b/test/unit/model/model.validate.js deleted file mode 100644 index d4706e145..000000000 --- a/test/unit/model/model.validate.js +++ /dev/null @@ -1,78 +0,0 @@ -/** - * Test Model.validate() instance method - */ - -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Model', function() { - - describe('.validate()', function() { - var collection; - - /** - * Build a test model - */ - - before(function(done) { - var waterline = new Waterline(); - - var Model = Waterline.Collection.extend({ - connection: 'foo', - tableName: 'person', - attributes: { - first_name: { - type: 'string', - required: true - }, - email: { - type: 'email', - required: true - } - } - }); - - waterline.loadCollection(Model); - - var adapterDef = {}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - collection = colls.collections.person; - done(); - }); - }); - - it('should pass model values to validate method', function(done) { - var person = new collection._model({ email: 'none' }); - - // Update a value - person.last_name = 'foobaz'; - - person.validate(function(err) { - assert(err); - done(); - }); - }); - - it('should also work with promises', function(done) { - var person = new collection._model({ email: 'none' }); - - // Update a value - person.last_name = 'foobaz'; - - person.validate() - .catch(function(err) { - assert(err); - done(); - }); - }); - - }); -}); diff --git a/test/unit/model/save.js b/test/unit/model/save.js deleted file mode 100644 index 786d1dc41..000000000 --- a/test/unit/model/save.js +++ /dev/null @@ -1,136 +0,0 @@ -var assert = require('assert'); -var belongsToFixture = require('../../support/fixtures/model/context.belongsTo.fixture'); -var Model = require('../../../lib/waterline/model'); - -describe('instance methods', function() { - describe('save', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var fixture, model, updateValues; - - before(function() { - fixture = belongsToFixture(); - - fixture.findOne = function(criteria, cb) { - - if(cb) { - if(criteria.id) return cb(null, criteria); - return cb(); - } - - var obj = function() { - return this; - }; - - obj.prototype.exec = function(cb) { - cb(null, updateValues); - }; - - obj.prototype.populate = function() { return this; }; - - return new obj(criteria); - }; - - fixture.update = function(criteria, values, cb) { - updateValues = values; - return cb(null, [new model(values)]); - }; - - model = new Model(fixture, {}); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should pass new values to the update function', function(done) { - var person = new model({ id: 1, name: 'foo' }); - - person.name = 'foobar'; - - person.save(function(err) { - assert(!err); - done(); - }); - }); - - it('should return a promise', function(done) { - var person = new model({ id: 1, name: 'foo' }); - - person.name = 'foobar'; - - person.save().then(function() { - assert(updateValues.name === 'foobar'); - done(); - }).catch(function() { - done(new Error('Promise returned an error')); - }); - }); - - describe('promise with 0 updated rows', function(){ - var originalUpdate; - - before(function(){ - originalUpdate = fixture.update; - fixture.update = function(criteria, values, cb) { - return cb(null, []); - }; - }); - - after(function(){ - fixture.update = originalUpdate; - }); - - it('should reject', function(done){ - var person = new model({ id: 1, name: 'foo' }); - - person.name = 'foobar'; - - person.save().then(function() { - done("promise should be rejected, not resolved"); - }) - .catch(function(err) { - assert(err); - done(); - }); - }); - }); - - describe('promise with object that can\'t be found', function(){ - var originalFind; - - before(function(){ - originalFind = fixture.findOne; - fixture.update = function(criteria, values, cb) { - return cb(null, []); - }; - fixture.findOne = function(criteria, cb) { - return cb(new Error('Forced Error')); - }; - }); - - after(function(){ - fixture.findOne = originalFind; - }); - - it('should reject', function(done){ - var person = new model({ id: 1, name: 'foo' }); - - person.name = 'foobar'; - - person.save().then(function() { - done(new Error("promise should be rejected, not resolved")); - }) - .catch(function(err){ - assert(err); - done(); - }); - }); - }); - - }); -}); diff --git a/test/unit/model/toObject.js b/test/unit/model/toObject.js deleted file mode 100644 index 45d1cab56..000000000 --- a/test/unit/model/toObject.js +++ /dev/null @@ -1,116 +0,0 @@ -var assert = require('assert'); -var belongsToFixture = require('../../support/fixtures/model/context.belongsTo.fixture'); -var manyToManyFixture = require('../../support/fixtures/model/context.manyToMany.fixture'); -var simpleFixture = require('../../support/fixtures/model/context.simple.fixture'); -var _ = require('lodash'); -var Model = require('../../../lib/waterline/model'); - -describe('instance methods', function() { - describe('toObject', function() { - - describe('without associations', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var model; - - before(function() { - model = new Model(simpleFixture(), {}); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should return a POJO', function() { - var person = new model({ name: 'foo' }); - var obj = person.toObject(); - - assert(obj === Object(obj)); - assert(_.isPlainObject(obj)); - assert(obj.name === 'foo'); - assert(!obj.full_name); - }); - - }); - - describe('belongsTo', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var model; - - before(function() { - model = new Model(belongsToFixture(), {}); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should strip out the instance methods', function() { - var person = new model({ name: 'foo' }); - var obj = person.toObject(); - - assert(obj === Object(obj)); - assert(obj.name === 'foo'); - assert(!obj.full_name); - }); - }); - - describe('Many To Many', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var model; - - before(function() { - model = new Model(manyToManyFixture(), {}); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should strip out the association key when no options are passed', function() { - var person = new model({ name: 'foobar' }); - var obj = person.toObject(); - - assert(obj === Object(obj)); - assert(obj.name === 'foobar'); - assert(!obj.bars); - assert(!obj.foobars); - }); - - it('should keep the association key when showJoins option is passed', function() { - var person = new model({ name: 'foobar' }, { showJoins: true }); - var obj = person.toObject(); - - assert(obj === Object(obj)); - assert(obj.name === 'foobar'); - assert(obj.bars); - assert(obj.foobars); - }); - - it('should selectively keep the association keys when joins option is passed', function() { - var person = new model({ name: 'foobar' }, { showJoins: true, joins: ['bar'] }); - var obj = person.toObject(); - - assert(obj === Object(obj)); - assert(obj.name === 'foobar'); - assert(obj.bars); - assert(!obj.foobars); - }); - }); - - }); -}); diff --git a/test/unit/model/userMethods.js b/test/unit/model/userMethods.js deleted file mode 100644 index 801cc418f..000000000 --- a/test/unit/model/userMethods.js +++ /dev/null @@ -1,39 +0,0 @@ -var assert = require('assert'), - belongsToFixture = require('../../support/fixtures/model/context.belongsTo.fixture'), - Model = require('../../../lib/waterline/model'); - -describe('instance methods', function() { - describe('user defined methods', function() { - - ///////////////////////////////////////////////////// - // TEST SETUP - //////////////////////////////////////////////////// - - var model; - - before(function() { - var fixture = belongsToFixture(); - var mixins = { - full_name: function() { - return this.name + ' bar'; - } - }; - - model = new Model(fixture, mixins); - }); - - - ///////////////////////////////////////////////////// - // TEST METHODS - //////////////////////////////////////////////////// - - it('should have a full_name function', function() { - var person = new model({ name: 'foo' }); - var name = person.full_name(); - - assert(typeof person.full_name === 'function'); - assert(name === 'foo bar'); - }); - - }); -}); diff --git a/test/unit/query/associations/belongsTo.js b/test/unit/query/associations/belongsTo.js index 5c275a921..367e2156b 100644 --- a/test/unit/query/associations/belongsTo.js +++ b/test/unit/query/associations/belongsTo.js @@ -1,22 +1,24 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); +var util = require('util'); +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../../lib/waterline'); -describe('Collection Query', function() { +describe('Collection Query ::', function() { describe('belongs to association', function() { - var Car, generatedCriteria = {}; + var Car; + var generatedQuery; before(function(done) { - var waterline = new Waterline(); var collections = {}; - collections.user = Waterline.Collection.extend({ + collections.user = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'uuid', attributes: { uuid: { - type: 'string', - primaryKey: true + type: 'string' }, name: { type: 'string', @@ -25,27 +27,34 @@ describe('Collection Query', function() { } }); - collections.car = Waterline.Collection.extend({ + collections.car = Waterline.Model.extend({ identity: 'car', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, driver: { model: 'user' } } }); - waterline.loadCollection(collections.user); - waterline.loadCollection(collections.car); + waterline.registerModel(collections.user); + waterline.registerModel(collections.car); // Fixture Adapter Def var adapterDef = { identity: 'foo', - join: function(con, col, criteria, cb) { - generatedCriteria = criteria; + join: function(con, query, cb) { + generatedQuery = query; return cb(); }, - find: function(con, col, criteria, cb) { + find: function(con, query, cb) { + return cb(); + }, + findOne: function(con, query, cb) { return cb(); } }; @@ -56,37 +65,34 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - Car = colls.collections.car; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + Car = orm.collections.car; + return done(); }); }); - it('should build a join query', function(done) { - Car.findOne({ driver: 1 }) + Car.find().limit(1) .populate('driver') - .exec(function(err, values) { - if(err) return done(err); - assert(generatedCriteria.joins[0].parent === 'car'); - assert(generatedCriteria.joins[0].parentKey === 'driver'); - assert(generatedCriteria.joins[0].child === 'user'); - assert(generatedCriteria.joins[0].childKey === 'uuid'); - assert(generatedCriteria.joins[0].removeParentKey === true); - done(); - }); - }); - - - it('should return error if criteria is undefined', function(done) { - Car.findOne() - .populate('driver') - .exec(function(err, values) { - assert(err, 'An Error is expected'); - done(); + .exec(function(err, cars) { + if (err) { + return done(err); + } + + try { + assert(_.isArray(cars), 'expecting array, but instead got:'+util.inspect(cars, {depth:5})); + assert.equal(generatedQuery.joins[0].parent, 'car'); + assert.equal(generatedQuery.joins[0].parentKey, 'driver'); + assert.equal(generatedQuery.joins[0].child, 'user'); + assert.equal(generatedQuery.joins[0].childKey, 'uuid'); + assert.equal(generatedQuery.joins[0].removeParentKey, true); + } catch (e) { return done(e); } + + return done(); }); }); - }); }); diff --git a/test/unit/query/associations/hasMany.js b/test/unit/query/associations/hasMany.js index a327c0b38..186815454 100644 --- a/test/unit/query/associations/hasMany.js +++ b/test/unit/query/associations/hasMany.js @@ -1,23 +1,23 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../../lib/waterline'); +describe('Collection Query ::', function() { describe('has many association', function() { - var User, generatedCriteria; + var User; + var generatedQuery; before(function(done) { - var waterline = new Waterline(); var collections = {}; - collections.user = Waterline.Collection.extend({ + collections.user = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'uuid', attributes: { uuid: { - type: 'string', - primaryKey: true + type: 'number' }, cars: { collection: 'car', @@ -26,27 +26,31 @@ describe('Collection Query', function() { } }); - collections.car = Waterline.Collection.extend({ + collections.car = Waterline.Model.extend({ identity: 'car', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, driver: { model: 'user' } } }); - waterline.loadCollection(collections.user); - waterline.loadCollection(collections.car); + waterline.registerModel(collections.user); + waterline.registerModel(collections.car); // Fixture Adapter Def var adapterDef = { identity: 'foo', - join: function(con, col, criteria, cb) { - generatedCriteria = criteria; + join: function(con, query, cb) { + generatedQuery = query; return cb(); }, - find: function(con, col, criteria, cb) { + find: function(con, query, cb) { return cb(); } }; @@ -57,29 +61,32 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - User = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + User = orm.collections.user; + return done(); }); }); - it('should build a join query', function(done) { User.findOne(1) .populate('cars') - .exec(function(err, values) { - if(err) return done(err); - assert(generatedCriteria.joins[0].parent === 'user'); - assert(generatedCriteria.joins[0].parentKey === 'uuid'); - assert(generatedCriteria.joins[0].child === 'car'); - assert(generatedCriteria.joins[0].childKey === 'driver'); - assert(Array.isArray(generatedCriteria.joins[0].select)); - assert(generatedCriteria.joins[0].removeParentKey === false); + .exec(function(err) { + if(err) { + return done(err); + } - done(); + assert.equal(generatedQuery.joins[0].parent, 'user'); + assert.equal(generatedQuery.joins[0].parentKey, 'uuid'); + assert.equal(generatedQuery.joins[0].child, 'car'); + assert.equal(generatedQuery.joins[0].childKey, 'driver'); + assert(_.isArray(generatedQuery.joins[0].criteria.select)); + assert.equal(generatedQuery.joins[0].removeParentKey, false); + + return done(); }); }); - }); }); diff --git a/test/unit/query/associations/manyToMany.js b/test/unit/query/associations/manyToMany.js index 645d7305d..f4a64c4fe 100644 --- a/test/unit/query/associations/manyToMany.js +++ b/test/unit/query/associations/manyToMany.js @@ -1,20 +1,25 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../../lib/waterline'); +describe('Collection Query ::', function() { describe('many to many association', function() { - var User, generatedCriteria; + var User; + var generatedQuery; before(function(done) { - var waterline = new Waterline(); var collections = {}; - collections.user = Waterline.Collection.extend({ + collections.user = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number', + columnName: 'user_id' + }, cars: { collection: 'car', via: 'drivers' @@ -22,10 +27,19 @@ describe('Collection Query', function() { } }); - collections.car = Waterline.Collection.extend({ + collections.car = Waterline.Model.extend({ identity: 'car', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number', + columnName: 'car_id' + }, + name: { + type: 'string', + columnName: 'car_name' + }, drivers: { collection: 'user', via: 'cars', @@ -34,17 +48,17 @@ describe('Collection Query', function() { } }); - waterline.loadCollection(collections.user); - waterline.loadCollection(collections.car); + waterline.registerModel(collections.user); + waterline.registerModel(collections.car); // Fixture Adapter Def var adapterDef = { identity: 'foo', - join: function(con, col, criteria, cb) { - generatedCriteria = criteria; + join: function(con, query, cb) { + generatedQuery = query; return cb(); }, - find: function(con, col, criteria, cb) { + find: function(con, query, cb) { return cb(); } }; @@ -55,39 +69,44 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - User = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + User = orm.collections.user; + return done(); }); }); - it('should build a join query', function(done) { User.findOne(1) - .populate('cars') - .exec(function(err, values) { - if(err) return done(err); - - assert(generatedCriteria.joins.length === 2); - - assert(generatedCriteria.joins[0].parent === 'user'); - assert(generatedCriteria.joins[0].parentKey === 'id'); - assert(generatedCriteria.joins[0].child === 'car_drivers__user_cars'); - assert(generatedCriteria.joins[0].childKey === 'user_cars'); - assert(generatedCriteria.joins[0].select === false); - assert(generatedCriteria.joins[0].removeParentKey === false); - - assert(generatedCriteria.joins[1].parent === 'car_drivers__user_cars'); - assert(generatedCriteria.joins[1].parentKey === 'car_drivers'); - assert(generatedCriteria.joins[1].child === 'car'); - assert(generatedCriteria.joins[1].childKey === 'id'); - assert(Array.isArray(generatedCriteria.joins[1].select)); - assert(generatedCriteria.joins[1].removeParentKey === false); + .populate('cars', { sort: [{'name': 'ASC'}]}) + .exec(function(err) { + if (err) { + return done(err); + } - done(); + assert.equal(generatedQuery.joins.length, 2); + assert.equal(generatedQuery.joins[0].parent, 'user'); + assert.equal(generatedQuery.joins[0].parentKey, 'user_id'); + assert.equal(generatedQuery.joins[0].child, 'car_drivers__user_cars'); + assert.equal(generatedQuery.joins[0].childKey, 'user_cars'); + assert.equal(generatedQuery.joins[0].select, false); + assert.equal(generatedQuery.joins[0].removeParentKey, false); + assert.equal(generatedQuery.joins[1].parent, 'car_drivers__user_cars'); + assert.equal(generatedQuery.joins[1].parentKey, 'car_drivers'); + assert.equal(generatedQuery.joins[1].child, 'car'); + assert.equal(generatedQuery.joins[1].childKey, 'car_id'); + assert(_.isArray(generatedQuery.joins[1].criteria.select)); + assert.equal(generatedQuery.joins[1].criteria.select[0], 'car_id'); + assert.equal(generatedQuery.joins[1].criteria.select[1], 'car_name'); + assert(_.isArray(generatedQuery.joins[1].criteria.sort)); + assert(generatedQuery.joins[1].criteria.sort[0].car_name); + + assert.equal(generatedQuery.joins[1].removeParentKey, false); + + return done(); }); }); - }); }); diff --git a/test/unit/query/associations/manyToManyThrough.js b/test/unit/query/associations/manyToManyThrough.js deleted file mode 100644 index 7d224359e..000000000 --- a/test/unit/query/associations/manyToManyThrough.js +++ /dev/null @@ -1,237 +0,0 @@ -var Waterline = require('../../../../lib/waterline'); -var assert = require('assert'); -var async = require('async'); - -describe('Collection Query', function() { - - describe('many to many through association', function() { - var waterline; - var Driver; - var Ride; - var Taxi; - var Payment; - - before(function(done) { - var collections = {}; - waterline = new Waterline(); - - collections.payment = Waterline.Collection.extend({ - identity: 'Payment', - connection: 'foo', - tableName: 'payment_table', - attributes: { - paymentId: { - type: 'integer', - primaryKey: true - }, - amount: { - type: 'integer' - }, - ride: { - collection: 'Ride', - via: 'payment' - } - } - }); - - collections.driver = Waterline.Collection.extend({ - identity: 'Driver', - connection: 'foo', - tableName: 'driver_table', - attributes: { - driverId: { - type: 'integer', - primaryKey: true - }, - driverName: { - type: 'string' - }, - taxis: { - collection: 'Taxi', - via: 'driver', - through: 'ride' - }, - rides: { - collection: 'Ride', - via: 'taxi' - } - } - }); - - collections.taxi = Waterline.Collection.extend({ - identity: 'Taxi', - connection: 'foo', - tableName: 'taxi_table', - attributes: { - taxiId: { - type: 'integer', - primaryKey: true - }, - taxiMatricule: { - type: 'string' - }, - drivers: { - collection: 'Driver', - via: 'taxi', - through: 'ride' - } - } - }); - - collections.ride = Waterline.Collection.extend({ - identity: 'Ride', - connection: 'foo', - tableName: 'ride_table', - attributes: { - rideId: { - type: 'integer', - primaryKey: true - }, - payment: { - model: 'Payment' - }, - taxi: { - model: 'Taxi' - }, - driver: { - model: 'Driver' - } - } - }); - - waterline.loadCollection(collections.payment); - waterline.loadCollection(collections.driver); - waterline.loadCollection(collections.taxi); - waterline.loadCollection(collections.ride); - - var connections = { - 'foo': { - adapter: 'adapter' - } - }; - - waterline.initialize({adapters: {adapter: require('sails-memory')}, connections: connections}, function(err, colls) { - if (err) { - done(err); - } - Driver = colls.collections.driver; - Taxi = colls.collections.taxi; - Ride = colls.collections.ride; - Payment = colls.collections.payment; - - var drivers = [ - {driverId: 1, driverName: 'driver 1'}, - {driverId: 2, driverName: 'driver 2'} - ]; - var taxis = [ - {taxiId: 1, taxiMatricule: 'taxi_1'}, - {taxiId: 2, taxiMatricule: 'taxi_2'} - ]; - var rides = [ - {rideId: 1, taxi: 1, driver: 1}, - {rideId: 4, taxi: 2, driver: 2}, - {rideId: 5, taxi: 1, driver: 2} - ]; - var payments = [ - {paymentId: 3, amount: 10, ride: 1}, - {paymentId: 7, amount: 21, ride: 4}, - {paymentId: 15, amount: 7, ride: 5} - ]; - - async.series([ - function(callback) { - Driver.createEach(drivers, callback); - }, - function(callback) { - Taxi.createEach(taxis, callback); - }, - function(callback) { - Ride.createEach(rides, callback); - }, - function(callback) { - Payment.createEach(payments, callback); - } - ], function(err) { - done(err); - }); - }); - }); - - after(function(done) { - waterline.teardown(done); - }); - - it('through table model associations should return a single objet', function(done) { - Ride.findOne(1) - .populate('taxi') - .populate('driver') - .exec(function(err, ride) { - if (err) { - return done(err); - } - assert(!Array.isArray(ride.taxi), 'through table model associations return Array instead of single Objet'); - assert(!Array.isArray(ride.driver), 'through table model associations return Array instead of single Objet'); - assert(ride.taxi.taxiId === 1); - assert(ride.taxi.taxiMatricule === 'taxi_1'); - assert(ride.driver.driverId === 1); - assert(ride.driver.driverName === 'driver 1'); - done(); - }); - }); - - it('shoud return many childreen', function(done) { - Driver.findOne(2).populate('taxis', {sort: {taxiId: 1}}).exec(function(err, driver) { - if (err) { - return done(err); - } - assert(driver.taxis.length === 2); - assert(driver.taxis[0].taxiId === 1); - assert(driver.taxis[0].taxiMatricule === 'taxi_1'); - done(); - }); - }); - - it('should associate throughTable as one-to-many',function(done) { - Driver.findOne(2) - .populate('taxis', {sort: {taxiId: 1}}) - .populate('rides', {sort: {rideId: 1}}) - .exec(function(err, driver) { - if (err) { - return done(err); - } - assert(driver.taxis.length === 2); - assert(driver.taxis[0].taxiId === 1); - assert(driver.taxis[0].taxiMatricule === 'taxi_1'); - assert(Array.isArray(driver.rides)); - assert(driver.rides[0].rideId === 4); - assert(driver.rides[0].taxi === 2); - assert(driver.rides[0].driver === 2); - done(); - }); - }); - - it('should add and remove associations', function(done) { - Driver.findOne(1).populate('taxis').exec(function(err, driver) { - if (err) { - return done(err); - } - driver.taxis.add(2); - driver.taxis.remove(1); - driver.save(function(err, driver) { - if (err) { - return done(err); - } - Driver.findOne(1).populate('taxis', {sort: {taxiId: 1}}).exec(function(err, driver) { - if (err) { - return done(err); - } - assert(driver.taxis.length === 1); - assert(driver.taxis[0].taxiId === 2); - done(); - }); - }); - }); - }); - - }); -}); diff --git a/test/unit/query/associations/populateArray.js b/test/unit/query/associations/populateArray.js index 855ac39a1..28a079993 100644 --- a/test/unit/query/associations/populateArray.js +++ b/test/unit/query/associations/populateArray.js @@ -1,21 +1,22 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); +var assert = require('assert'); +var Waterline = require('../../../../lib/waterline'); -describe('Collection Query', function() { - describe('specific populated associations', function() { - var User; +describe('Collection Query ::', function() { + describe('specific populated associations ::', function() { var Car; - var Ticket; before(function(done) { - var waterline = new Waterline(); var collections = {}; - collections.user = Waterline.Collection.extend({ + collections.user = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, car: { model: 'car' }, @@ -26,10 +27,14 @@ describe('Collection Query', function() { } }); - collections.ticket = Waterline.Collection.extend({ + collections.ticket = Waterline.Model.extend({ identity: 'ticket', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, reason: { columnName: 'reason', type: 'string' @@ -40,32 +45,45 @@ describe('Collection Query', function() { } }); - collections.car = Waterline.Collection.extend({ + collections.car = Waterline.Model.extend({ identity: 'car', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, driver: { model: 'user', columnName: 'foobar' }, tickets: { - collection: 'ticket', - via: 'car' + collection: 'ticket', + via: 'car' } } }); - waterline.loadCollection(collections.user); - waterline.loadCollection(collections.car); - waterline.loadCollection(collections.ticket); + waterline.registerModel(collections.user); + waterline.registerModel(collections.car); + waterline.registerModel(collections.ticket); // Fixture Adapter Def var adapterDef = { identity: 'foo', - find: function(con, col, criteria, cb) { - if(col === 'user') return cb(null, [{ id: 1, car: 1, name: 'John Doe' }]); - if(col === 'car') return cb(null, [{ id: 1, foobar: 1, tickets: [1, 2]}]); - if(col === 'ticket') return cb(null, [{ id: 1, reason: 'red light', car:1}, { id: 2, reason: 'Parking in a disabled space', car: 1 }]); + find: function(con, query, cb) { + if(query.using === 'user') { + return cb(null, [{ id: 1, car: 1, name: 'John Doe' }]); + } + + if(query.using === 'car') { + return cb(null, [{ id: 1, foobar: 1, tickets: [1, 2]}]); + } + + if(query.using === 'ticket') { + return cb(null, [{ id: 1, reason: 'red light', car:1}, { id: 2, reason: 'Parking in a disabled space', car: 1 }]); + } + return cb(); } }; @@ -76,27 +94,34 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - User = colls.collections.user; - Car = colls.collections.car; - Ticket = colls.collections.ticket; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + + Car = orm.collections.car; + + return done(); }); }); it('should populate all related collections', function(done) { - Car.find().populate(['driver','tickets']).exec(function(err, car) { - if(err) return done(err); + Car.find() + .populate('driver') + .populate('tickets') + .exec(function(err, car) { + if (err) { + return done(err); + } + assert(car[0].driver); assert(car[0].driver.name); assert(car[0].tickets); assert(car[0].tickets[0].car); assert(car[0].tickets[1].car); - done(); + return done(); }); }); - }); }); diff --git a/test/unit/query/associations/transformedPopulations.js b/test/unit/query/associations/transformedPopulations.js index 93388ab4d..1a5a54631 100644 --- a/test/unit/query/associations/transformedPopulations.js +++ b/test/unit/query/associations/transformedPopulations.js @@ -1,21 +1,24 @@ -var Waterline = require('../../../../lib/waterline'), - assert = require('assert'); +var assert = require('assert'); +var Waterline = require('../../../../lib/waterline'); -describe('Collection Query', function() { - describe('populated associations', function() { +describe('Collection Query ::', function() { + describe('populated associations ::', function() { var User; var Car; var generatedCriteria = {}; before(function(done) { - var waterline = new Waterline(); var collections = {}; - collections.user = Waterline.Collection.extend({ + collections.user = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, car: { model: 'car' }, @@ -26,10 +29,14 @@ describe('Collection Query', function() { } }); - collections.car = Waterline.Collection.extend({ + collections.car = Waterline.Model.extend({ identity: 'car', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, driver: { model: 'user', columnName: 'foobar' @@ -37,16 +44,22 @@ describe('Collection Query', function() { } }); - waterline.loadCollection(collections.user); - waterline.loadCollection(collections.car); + waterline.registerModel(collections.user); + waterline.registerModel(collections.car); // Fixture Adapter Def var adapterDef = { identity: 'foo', - find: function(con, col, criteria, cb) { - generatedCriteria = criteria; - if(col === 'user') return cb(null, [{ id: 1, car: 1 }]); - if(col === 'car') return cb(null, [{ id: 1, foobar: 1 }]); + find: function(con, query, cb) { + generatedCriteria = query.criteria; + if (query.using === 'user') { + return cb(null, [{ id: 1, car: 1 }]); + } + + if (query.using === 'car') { + return cb(null, [{ id: 1, foobar: 1 }]); + } + return cb(); } }; @@ -57,42 +70,28 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - User = colls.collections.user; - Car = colls.collections.car; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + User = orm.collections.user; + Car = orm.collections.car; + return done(); }); }); it('should transform populated values', function(done) { - User.find().populate('car').exec(function(err, user) { - if(err) return done(err); - assert(user[0].car); - assert(user[0].car.driver); - assert(!user[0].car.foobar); - done(); - }); - }); - - it('should modelize populated values', function(done) { - User.find().populate('car').exec(function(err, user) { - if(err) return done(err); - assert(user[0].car); - assert(typeof user[0].car.save === 'function'); - done(); - }); - }); + User.find().populate('car').exec(function(err, users) { + if (err) { + return done(err); + } - it('should transform criteria values', function(done) { - Car.find().populate('driver', { name: 'foo' }).exec(function(err, car) { - if(err) return done(err); - assert(generatedCriteria.where.my_name); - assert(!generatedCriteria.where.name); - done(); + assert(users[0].car); + assert(users[0].car.driver); + assert(!users[0].car.foobar); + return done(); }); }); - }); }); diff --git a/test/unit/query/integrator.innerJoin.js b/test/unit/query/integrator.innerJoin.js deleted file mode 100644 index 90235a18a..000000000 --- a/test/unit/query/integrator.innerJoin.js +++ /dev/null @@ -1,107 +0,0 @@ -/** - * Module dependencies - */ -var innerJoin = require('../../../lib/waterline/query/integrator/innerJoin'); -var assert = require('assert'); -var should = require('should'); -var _ = require('lodash'); - - -describe('innerJoin', function() { - - // Clear the require cache - Object.keys(require.cache).forEach(function (modulePath) { - delete require.cache[modulePath]; - }); - - var fixtures = { - cache: require('../../support/fixtures/integrator/cache'), - joinResults: require('../../support/fixtures/integrator/joinResults') - }; - - describe('with invalid input', function() { - - it('should throw if options are invalid', function() { - assert.throws(function() { - innerJoin({ - left: 238523523952358, - right: 'something invalid', - leftKey: { - something: 'invalid' - }, - rightKey: { - wtf: new Date() - }, - }); - }); - - assert.throws(function() { - innerJoin('something completely ridiculous'); - }); - }); - - it('should throw if options are missing', function() { - assert.throws(function() { - innerJoin({ - left: [], - right: [], - leftKey: 'foo' - }); - }); - assert.throws(function() { - innerJoin({ - left: [], - right: [], - rightKey: 'foo' - }); - }); - assert.throws(function() { - innerJoin({ - right: [], - rightKey: 'foo' - }); - }); - }); - }); - - - describe('when run with valid input', function() { - - var results; - var expected = { - 'results.length': 2, - properties: [ - 'id', 'subject', 'body', 'from', - // Joined properties WILL always exist since this is an outer join. - 'user_id' - ], - results: fixtures.joinResults.___inner___message___message_to_user - }; - - it('should not throw', function() { - assert.doesNotThrow(function() { - results = innerJoin({ - left: fixtures.cache.message, - right: fixtures.cache.message_to_user, - leftKey: 'id', - rightKey: 'message_id' - }); - }); - }); - - it('output should be an array', function() { - results.should.be.Array; - }); - - it('output should match the expected results', function() { - - // Check that expected # of results exist. - results.should.have.lengthOf(expected['results.length']); - - // Check that results are exactly correct. - results.should.eql(expected.results); - }); - - }); - -}); diff --git a/test/unit/query/integrator.js b/test/unit/query/integrator.js deleted file mode 100644 index 254ea99a5..000000000 --- a/test/unit/query/integrator.js +++ /dev/null @@ -1,269 +0,0 @@ -/** - * Module dependencies - */ -var integrate = require('../../../lib/waterline/query/integrator'); -var assert = require('assert'); -var should = require('should'); -var _ = require('lodash'); - - - - -describe('integrator', function () { - - describe('with no callback', function () { - - it('should throw', function () { - assert.throws(function () { - integrate({}, []); - }); - }); - }); - - - - describe('with otherwise-invalid input', function () { - - it('should trigger cb(err)', function (done) { - assert.doesNotThrow(function () { - integrate('foo', 'bar', 'id', function (err, results) { - assert(err); - done(); - }); - }); - }); - }); - - - - describe('with valid input', function () { - - describe(':: N..M :: ',function () { - - var fixtures = { - joins: _.cloneDeep(require('../../support/fixtures/integrator/n..m.joins.js')), - cache: _.cloneDeep(require('../../support/fixtures/integrator/cache')) - }; - var results; - - before(function (done){ - assert.doesNotThrow(function () { - integrate(fixtures.cache, fixtures.joins, 'id', function (err, _results) { - assert(!err); - results = _results; - done(err); - }); - }); - }); - - it('should be an array', function () { - results.should.be.Array; - }); - - it('should have items which have all the properties of the parent table'); - - describe(':: populated aliases', function () { - var aliases = Object.keys(_.groupBy(fixtures.joins, 'alias')); - - it('should exist for every alias specified in `joins` (i.e. every `populate()`)', function () { - - // Each result is an object and contains a valid alias - _.each(results, function (result) { - result - .should.be.Object; - - _.any(aliases, function (alias) { - return result[alias]; - }) - .should.be.true; - }); - - // Double check. - _.each(results, function (result) { - result.should.be.Object; - - _.each(aliases, function (alias) { - result[alias].should.be.ok; - }); - }); - - // All aliases are accounted for in results - _.all(aliases, function (alias) { - return results.length === _.pluck(results, alias).length; - }).should.be.true; - }); - - it('should not include extraneous attributes'); - - - describe('with no matching child records',function () { - - // Empty the child table in the cache - before(function () { - fixtures.cache.message_to_user = []; - }); - - it('should still work in a predictable way (populate an empty array)', function (done) { - assert.doesNotThrow(function () { - integrate(fixtures.cache, fixtures.joins, 'id', function (err, _results) { - assert(!err); - return done(err); - }); - }); - }); - }); - }); - }); - - - - - - - - describe(':: 1..N ::',function () { - - var results; - var fixtures = { - joins: _.cloneDeep(require('../../support/fixtures/integrator/n..1.joins.js')), - cache: _.cloneDeep(require('../../support/fixtures/integrator/cache')) - }; - - before(function (done){ - assert.doesNotThrow(function () { - integrate(fixtures.cache, fixtures.joins, 'id', function (err, _results) { - assert(!err); - results = _results; - done(err); - }); - }); - }); - - it('should be an array', function () { - results.should.be.Array; - }); - - describe(':: populated aliases', function () { - var aliases = Object.keys(_.groupBy(fixtures.joins, 'alias')); - - it('should exist for every alias specified in `joins` (i.e. every `populate()`)', function () { - - // Each result is an object and contains a valid alias - _.each(results, function (result) { - result - .should.be.Object; - - _.any(aliases, function (alias) { - return result[alias]; - }) - .should.be.true; - }); - - // Double check. - _.each(results, function (result) { - result.should.be.Object; - - _.each(aliases, function (alias) { - result[alias].should.be.ok; - result[alias].should.be.ok; - }); - }); - - // All aliases are accounted for in results - _.all(aliases, function (alias) { - return results.length === _.pluck(results, alias).length; - }).should.be.true; - }); - - it('should have proper number of users in "from"', function () { - - // console.log('\n\n:: 1..N ::\nresults ::\n', - // require('util').inspect(results, {depth: 4})); - - results[0].should.have.property('from').with.lengthOf(1); - results[1].should.have.property('from').with.lengthOf(1); - results[2].should.have.property('from').with.lengthOf(0); - - }); - }); - - - it('should not include extraneous attributes'); - }); - }); - - - - - - - describe(':: multiple populates ::',function () { - - var results; - var fixtures = { - joins: _.cloneDeep(require('../../support/fixtures/integrator/multiple.joins.js')), - cache: _.cloneDeep(require('../../support/fixtures/integrator/cache')) - }; - - before(function (done){ - assert.doesNotThrow(function () { - integrate(fixtures.cache, fixtures.joins, 'id', function (err, _results) { - assert(!err); - results = _results; - done(err); - }); - }); - }); - - it('should be an array', function () { - results.should.be.Array; - }); - - describe(':: populated aliases', function () { - var aliases = Object.keys(_.groupBy(fixtures.joins, 'alias')); - - it('should exist for every alias specified in `joins` (i.e. every `populate()`)', function () { - - // Each result is an object and contains a valid alias - _.each(results, function (result) { - result - .should.be.Object; - - _.any(aliases, function (alias) { - return result[alias]; - }) - .should.be.true; - }); - - // Double check. - _.each(results, function (result) { - result.should.be.Object; - - _.each(aliases, function (alias) { - result[alias].should.be.ok; - result[alias].should.be.ok; - }); - }); - - // All aliases are accounted for in results - _.all(aliases, function (alias) { - return results.length === _.pluck(results, alias).length; - }).should.be.true; - - }); - - it('should contain expected results', function () { - - // console.log('\n\n:: multiple populates ::\nresults ::\n', - // require('util').inspect(results, {depth: 4})); - results[0].should.have.property('from').with.lengthOf(1); - results[1].should.have.property('from').with.lengthOf(1); - results[2].should.have.property('from').with.lengthOf(0); - }); - }); - - - it('should not include extraneous attributes'); - }); - -}); diff --git a/test/unit/query/integrator.leftOuterJoin.js b/test/unit/query/integrator.leftOuterJoin.js deleted file mode 100644 index b0c53eef2..000000000 --- a/test/unit/query/integrator.leftOuterJoin.js +++ /dev/null @@ -1,175 +0,0 @@ -/** - * Module dependencies - */ -var leftOuterJoin = require('../../../lib/waterline/query/integrator/leftOuterJoin'); -var fixtures = { - cache: require('../../support/fixtures/integrator/cache'), - joinResults: require('../../support/fixtures/integrator/joinResults') -}; -var assert = require('assert'); -var should = require('should'); -var _ = require('lodash'); - - -describe('leftOuterJoin', function() { - - describe('with invalid input', function() { - - it('should throw if options are invalid', function() { - assert.throws(function() { - leftOuterJoin({ - left: 238523523952358, - right: 'something invalid', - leftKey: { - something: 'invalid' - }, - rightKey: { - wtf: new Date() - }, - }); - }); - - assert.throws(function() { - leftOuterJoin('something completely ridiculous'); - }); - }); - - it('should throw if options are missing', function() { - assert.throws(function() { - leftOuterJoin({ - left: [], - right: [], - leftKey: 'foo' - }); - }); - assert.throws(function() { - leftOuterJoin({ - left: [], - right: [], - rightKey: 'foo' - }); - }); - assert.throws(function() { - leftOuterJoin({ - right: [], - rightKey: 'foo' - }); - }); - }); - }); - - - describe('when run with valid input', function() { - - var results; - var expected = { - 'results.length': 4, - properties: [ - 'id', 'subject', 'body', 'from', - // Joined properties won't always exist since this is an outer join. - /* 'user_id','email' */ - ], - results: fixtures.joinResults.message___message_to_user - }; - - it('should not throw', function() { - assert.doesNotThrow(function() { - results = leftOuterJoin({ - left: fixtures.cache.message, - right: fixtures.cache.message_to_user, - leftKey: 'id', - rightKey: 'message_id' - }); - }); - }); - - it('output should be an array', function() { - results.should.be.Array; - }); - - it('output should match the expected results', function() { - - // Check that expected # of results exist. - results.should.have.lengthOf(expected['results.length']); - - // Check that results are exactly correct. - results.should.eql(expected.results); - }); - - describe('when run again, using previous results as left side', function() { - - var results_2; - var expected = { - 'results_2.length': 4, - properties: [ - // Joined properties (user_id, email) won't always exist since this is an outer join. - 'id', 'subject', 'body', 'from', - ], - results: fixtures.joinResults.message___message_to_user___user - }; - - it('should not throw', function() { - assert.doesNotThrow(function() { - results_2 = leftOuterJoin({ - left: results, - right: fixtures.cache.user, - leftKey: '.user_id', - rightKey: 'id', - childNamespace: '..' - }); - }); - }); - - it('output should be an array', function() { - results_2.should.be.Array; - }); - - it('output should match the expected results', function() { - - // Check that it has the correct # of results - results_2.should.have.lengthOf(expected['results_2.length']); - - // Check that it has properties - _.each(results_2, function(result) { - _.each(expected.properties, function(property) { - result.should.have.property(property); - }); - }); - - // Check that results are exactly correct (deep equality). - results_2.should.eql(expected.results); - }); - }); - }); - - - describe('with no matching child rows', function () { - - var results; - - // Empty out the child table in cache - before(function () { - fixtures.cache.message_to_user = []; - }); - - - it('should not throw', function() { - assert.doesNotThrow(function() { - results = leftOuterJoin({ - left: fixtures.cache.message, - right: fixtures.cache.message_to_user, - leftKey: 'id', - rightKey: 'message_id' - }); - }); - }); - - - it('should still return all the items from parent table', function () { - results.should.be.an.Array; - results.should.have.lengthOf(fixtures.cache.message.length); - }); - }); - - -}); \ No newline at end of file diff --git a/test/unit/query/integrator.populate.js b/test/unit/query/integrator.populate.js deleted file mode 100644 index 4ba83a5d9..000000000 --- a/test/unit/query/integrator.populate.js +++ /dev/null @@ -1,110 +0,0 @@ -/** - * Test dependencies - */ -var _ = require('lodash'); -var leftOuterJoin = require('../../../lib/waterline/query/integrator/leftOuterJoin'); -var populate = require('../../../lib/waterline/query/integrator/populate'); -var fixtures = { - cache: _.cloneDeep(require('../../support/fixtures/integrator/cache')), - populateResults: _.cloneDeep(require('../../support/fixtures/integrator/populateResults')) -}; -var assert = require('assert'); -var should = require('should'); -var _ = require('lodash'); - -describe('populate', function() { - - - - describe('N..1 ::', function() { - - var results = _.cloneDeep(fixtures.cache.message); - var expected = { - length: 3, - properties: ['to', 'id', 'subject', 'body', 'from'], - results: fixtures.populateResults.message___message_to_user - }; - - it('should not throw', function() { - assert.doesNotThrow(function() { - populate({ - parentRows: results, - alias: 'to', - childRows: leftOuterJoin({ - left: fixtures.cache.message, - right: fixtures.cache.message_to_user, - leftKey: 'id', - rightKey: 'message_id' - }), - parentPK: 'id', - childPK: '.' + 'user_id', - fkToChild: '.' + 'user_id' - }); - }); - }); - - it('output should be an array', function() { - results.should.be.Array; - }); - - it('output should match the expected results', function() { - results.should.have.lengthOf(expected.length); - _.all(results, function (row) { - row.should.have.properties(expected.properties); - }); - results.should.eql(expected.results); - // console.log(require('util').inspect(results, {depth: 3})); - }); - }); - - - - describe('N..M ::', function() { - - var results = _.cloneDeep(fixtures.cache.message); - var expected = { - length: 3, - properties: ['to', 'id', 'subject', 'body', 'from'], - results: _.cloneDeep(fixtures.populateResults.message___message_to_user___user) - }; - - it('should not throw', function() { - assert.doesNotThrow(function() { - populate({ - parentRows: results, - alias: 'to', - childRows: leftOuterJoin({ - left: leftOuterJoin({ - left: fixtures.cache.message, - right: fixtures.cache.message_to_user, - leftKey: 'id', - rightKey: 'message_id' - }), - leftKey: '.user_id', - rightKey: 'id', - right: fixtures.cache.user, - childNamespace: '..' - }), - parentPK: 'id', - fkToChild: '.user_id', - childPK: '..id' - }); - }); - }); - - it('output should be an array', function() { - results.should.be.Array; - }); - - it('output should match the expected results', function() { - results.should.have.lengthOf(expected.length); - _.all(results, function (row) { - row.should.have.properties(expected.properties); - }); - results.should.eql(expected.results); - // console.log(require('util').inspect(results, {depth: 3})); - }); - }); - - -}); diff --git a/test/unit/query/query.autocreatedat.js b/test/unit/query/query.autocreatedat.js new file mode 100644 index 000000000..bbe007db5 --- /dev/null +++ b/test/unit/query/query.autocreatedat.js @@ -0,0 +1,62 @@ +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); + +describe('Collection Query ::', function() { + describe('.create()', function() { + describe('with autoCreatedAt', function() { + var modelDef = { + identity: 'user', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, + attributes: { + id: { + type: 'number' + }, + stringdate: { + type: 'string', + autoCreatedAt: true + }, + numberdate: { + type: 'number', + autoCreatedAt: true + }, + refdate: { + type: 'ref', + autoCreatedAt: true + }, + } + }; + + it('should use correct types for autoCreatedAt fields based on the attribute `type`', function(done) { + var waterline = new Waterline(); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); + + // Fixture Adapter Def + var adapterDef = { + create: function(con, query, cb) { + assert.equal(typeof query.newRecord.numberdate, 'number'); + assert.equal(typeof query.newRecord.stringdate, 'string'); + assert.equal(typeof query.newRecord.refdate, 'object'); + return cb(null, query.newRecord); + } + }; + + var connections = { + 'foo': { + adapter: 'foobar' + } + }; + + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.create({ id: 1 }, done); + }); + }); + + }); + }); +}); diff --git a/test/unit/query/query.autoupdatedat.js b/test/unit/query/query.autoupdatedat.js new file mode 100644 index 000000000..0881af9f5 --- /dev/null +++ b/test/unit/query/query.autoupdatedat.js @@ -0,0 +1,60 @@ +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); + +describe('Collection Query ::', function() { + describe('.update()', function() { + describe('with autoUpdatedAt', function() { + var modelDef = { + identity: 'user', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, + attributes: { + id: { + type: 'number' + }, + stringdate: { + type: 'string', + autoUpdatedAt: true + }, + numberdate: { + type: 'number', + autoUpdatedAt: true + }, + refdate: { + type: 'ref', + autoUpdatedAt: true + }, + } + }; + + it('should use correct types for autoUpdatedAt fields based on the attribute `type`', function(done) { + var waterline = new Waterline(); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); + + // Fixture Adapter Def + var adapterDef = { update: function(con, query, cb) { query.valuesToSet.id = 1; return cb(null, [query.valuesToSet]); }}; + + var connections = { + 'foo': { + adapter: 'foobar' + } + }; + + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.update({ id: 1 }, {}, function(err, records) { + assert.equal(typeof records[0].numberdate, 'number'); + assert.equal(typeof records[0].stringdate, 'string'); + assert.equal(typeof records[0].refdate, 'object'); + return done(); + }, { fetch: true }); + }); + }); + + }); + }); +}); diff --git a/test/unit/query/query.average.js b/test/unit/query/query.average.js deleted file mode 100644 index 38e16605f..000000000 --- a/test/unit/query/query.average.js +++ /dev/null @@ -1,65 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection average', function () { - - describe('.average()', function () { - var query; - - before(function (done) { - - // Extend for testing purposes - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - age: 'integer', - percent: 'float' - } - }); - - // Fixture Adapter Def - var adapterDef = { - find: function (con, col, criteria, cb) { - return cb(null, [criteria]); - } - }; - - waterline.loadCollection(Model); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if (err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - it('should return criteria with average set', function (done) { - query.find().average('age', 'percent').exec(function (err, obj) { - if(err) return done(err); - - assert(obj[0].average[0] === 'age'); - assert(obj[0].average[1] === 'percent'); - done(); - }); - }); - - it('should accept an array', function (done) { - query.find().average(['age', 'percent']).exec(function (err, obj) { - if(err) return done(err); - - assert(obj[0].average[0] === 'age'); - assert(obj[0].average[1] === 'percent'); - done(); - }); - }); - - }); -}); diff --git a/test/unit/query/query.avg.js b/test/unit/query/query.avg.js new file mode 100644 index 000000000..dcc7a5ce5 --- /dev/null +++ b/test/unit/query/query.avg.js @@ -0,0 +1,70 @@ +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); + +describe('Collection Query ::', function() { + describe('.avg()', function() { + var query; + + before(function(done) { + // Extend for testing purposes + var waterline = new Waterline(); + var Model = Waterline.Model.extend({ + identity: 'user', + datastore: 'foo', + primaryKey: 'id', + attributes: { + id: { + type: 'number' + }, + age: { + type: 'number' + }, + percent: { + type: 'number' + } + } + }); + + // Fixture Adapter Def + var adapterDef = { + avg: function(con, query, cb) { + return cb(null, query); + } + }; + + waterline.registerModel(Model); + + var connections = { + 'foo': { + adapter: 'foobar' + } + }; + + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); + }); + }); + + it('should return criteria with average set', function(done) { + query.avg('age').exec(function(err, query) { + if(err) { + return done(err); + } + + assert.equal(query.numericAttrName, 'age'); + return done(); + }); + }); + + it('should NOT accept an array', function(done) { + query.avg(['age', 'percent']).exec(function(err) { + assert(err); + return done(); + }); + }); + }); +}); diff --git a/test/unit/query/query.count.js b/test/unit/query/query.count.js index 9b3219309..9bbdf6299 100644 --- a/test/unit/query/query.count.js +++ b/test/unit/query/query.count.js @@ -1,62 +1,74 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.count()', function() { - var query; - - before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: { - type: 'string', - defaultsTo: 'Foo Bar' - }, - doSomething: function() {} + var orm; + before(function (done) { + Waterline.start({ + adapters: { + 'sails-foobar': { + identity: 'sails-foobar', + count: function(datastoreName, s3q, cb) { + return cb(undefined, 1); + } + } + }, + datastores: { + default: { + adapter: 'sails-foobar' + } + }, + models: { + user: { + identity: 'user', + datastore: 'default', + primaryKey: 'id', + attributes: { + id: { type: 'number' }, + name: { type: 'string' } + } + } } + }, function (err, _orm) { + if (err) { return done(err); } + orm = _orm; + return done(); }); - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { count: function(con, col, criteria, cb) { return cb(null, 1); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; + });// - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); - }); + after(function(done) { + // Note that we don't bother attempting to stop the orm + // if it doesn't even exist (i.e. because `.start()` failed). + if (!orm) { return done(); } + Waterline.stop(orm, done); }); - it('should return a count', function(done) { - query.count({ name: 'foo'}, {}, function(err, count) { - if(err) return done(err); - - assert(count > 0); - done(); + it('should return a number representing the number of things', function(done) { + Waterline.getModel('user', orm) + .count({ name: 'foo' }, function(err, count) { + if(err) { return done(err); } + try { + assert(typeof count === 'number'); + assert(count > 0); + } catch (e) { return done(e); } + return done(); }); - }); + });// it('should allow a query to be built using deferreds', function(done) { - query.count() + Waterline.getModel('user', orm) + .count() .exec(function(err, result) { - if(err) return done(err); - - assert(result); - done(); + if(err) { return done(err); } + try { + assert(result); + } catch (e) { return done(e); } + return done(); }); - }); + });// - }); -}); + });// +});// diff --git a/test/unit/query/query.count.transform.js b/test/unit/query/query.count.transform.js index 1b41514c8..337ce30a5 100644 --- a/test/unit/query/query.count.transform.js +++ b/test/unit/query/query.count.transform.js @@ -1,21 +1,21 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.count()', function() { - describe('with transformed values', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', - + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, name: { type: 'string', columnName: 'login' @@ -23,12 +23,12 @@ describe('Collection Query', function() { } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def var adapterDef = { - count: function(con, col, criteria, cb) { - assert(criteria.where.login); + count: function(con, query, cb) { + assert(query.criteria.where.login); return cb(null, 1); } }; @@ -39,21 +39,24 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should transform values before sending to adapter', function(done) { query.count({ name: 'foo' }, function(err, obj) { - if(err) return done(err); - assert(obj === 1); - done(); + if(err) { + return done(err); + } + assert.equal(obj, 1); + return done(); }); }); }); - }); }); diff --git a/test/unit/query/query.create.js b/test/unit/query/query.create.js index 23312ba39..0006833f7 100644 --- a/test/unit/query/query.create.js +++ b/test/unit/query/query.create.js @@ -1,20 +1,22 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.create()', function() { - describe('with Auto values', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, attributes: { + id: { + type: 'number' + }, first:{ type: 'string', defaultsTo: 'Foo' @@ -23,22 +25,25 @@ describe('Collection Query', function() { type: 'string', defaultsTo: 'Bar' }, - full: { - type: 'string', - defaultsTo: function() { return this.first + ' ' + this.second; } - }, name: { type: 'string', defaultsTo: 'Foo Bar' }, - doSomething: function() {} + createdAt: { + type: 'number', + autoCreatedAt: true + }, + updatedAt: { + type: 'number', + autoUpdatedAt: true + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; + var adapterDef = { create: function(con, query, cb) { query.newRecord.id = 1; return cb(null, query.newRecord); }}; var connections = { 'foo': { @@ -46,102 +51,108 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should set default values', function(done) { - query.create({}, function(err, status) { - assert(status.name === 'Foo Bar'); - done(); - }); - }); + query.create({id: 1}, function(err, status) { + if (err) { + return done(err); + } - it('should set default values when function', function(done) { - query.create({}, function(err, status) { - assert(status.full === 'Foo Bar'); - done(); + assert.equal(status.name, 'Foo Bar'); + return done(); }); }); it('should set default values when the value is undefined', function(done) { query.create({ first: undefined }, function(err, status) { - assert(status.first = 'Foo'); - assert(status.full === 'Foo Bar'); - done(); + if (err) { + return done(err); + } + + assert.equal(status.first, 'Foo'); + return done(); }); }); it('should add timestamps', function(done) { query.create({}, function(err, status) { + if (err) { + return done(err); + } + assert(status.createdAt); assert(status.updatedAt); - done(); + return done(); }); }); it('should set values', function(done) { query.create({ name: 'Bob' }, function(err, status) { - assert(status.name === 'Bob'); - done(); + if (err) { + return done(err); + } + + assert.equal(status.name, 'Bob'); + return done(); }); }); it('should strip values that don\'t belong to the schema', function(done) { query.create({ foo: 'bar' }, function(err, values) { - assert(!values.foo); - done(); - }); - }); + if (err) { + return done(err); + } - it('should return an instance of Model', function(done) { - query.create({}, function(err, status) { - assert(typeof status.doSomething === 'function'); - done(); + assert(!values.foo); + return done(); }); }); it('should allow a query to be built using deferreds', function(done) { - query.create() - .set({ name: 'bob' }) + query.create({ name: 'bob' }) .exec(function(err, result) { - assert(!err); + if (err) { + return done(err); + } assert(result); - done(); + return done(); }); }); - }); describe('override and disable auto values', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', - - autoCreatedAt: false, - autoUpdatedAt: false, - + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, attributes: { + id: { + type: 'number' + }, name: { type: 'string', defaultsTo: 'Foo Bar' - }, - doSomething: function() {} + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; + var adapterDef = { create: function(con, query, cb) { query.newRecord.id = 1; return cb(null, query.newRecord); }}; var connections = { 'foo': { @@ -149,18 +160,24 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if(err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should NOT add timestamps', function(done) { query.create({}, function(err, status) { + if (err) { + return done(err); + } + assert(!status.createdAt); assert(!status.updatedAt); - done(); + return done(); }); }); }); @@ -169,28 +186,35 @@ describe('Collection Query', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', - - autoCreatedAt: "customCreatedAt", - autoUpdatedAt: "customUpdatedAt", - + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, attributes: { + id: { + type: 'number' + }, name: { type: 'string', defaultsTo: 'Foo Bar' }, - doSomething: function() {} + customCreatedAt: { + type: 'number', + autoCreatedAt: true + }, + customUpdatedAt: { + type: 'number', + autoUpdatedAt: true + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; + var adapterDef = { create: function(con, query, cb) { query.newRecord.id = 1; return cb(null, query.newRecord); }}; var connections = { 'foo': { @@ -198,20 +222,26 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should add timestamps with a custom name', function(done) { query.create({}, function(err, status) { + if (err) { + return done(err); + } + assert(!status.createdAt); assert(!status.updatedAt); assert(status.customCreatedAt); assert(status.customUpdatedAt); - done(); + return done(); }); }); }); @@ -220,22 +250,29 @@ describe('Collection Query', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', - + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, attributes: { - name: 'string', - age: 'integer' + id: { + type: 'number' + }, + name: { + type: 'string' + }, + age: { + type: 'number' + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; + var adapterDef = { create: function(con, query, cb) { query.newRecord.id = 1; return cb(null, query.newRecord); }}; var connections = { 'foo': { @@ -243,41 +280,53 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should cast values before sending to adapter', function(done) { query.create({ name: 'foo', age: '27' }, function(err, values) { - assert(values.name === 'foo'); - assert(values.age === 27); - done(); + if (err) { + return done(err); + } + + assert.equal(values.name, 'foo'); + assert.equal(values.age, 27); + return done(); }); }); }); - describe('with schema set to false', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', schema: false, - - attributes: {} + primaryKey: 'id', + fetchRecordsOnCreate: true, + attributes: { + id: { + type: 'number' + } + } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; + var adapterDef = { + create: function(con, query, cb) { query.newRecord.id = 1; return cb(null, query.newRecord); }, + createEach: function(con, query, cb) { return cb(null); } + }; var connections = { 'foo': { @@ -285,28 +334,37 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should allow arbitratry values to be set', function(done) { query.create({ name: 'foo' }, function(err, record) { - assert(record.name === 'foo'); - done(); + if (err) { + return done(err); + } + + assert.equal(record.name, 'foo'); + return done(); }); }); it('should not be detructive to passed-in arrays', function(done) { var myPreciousArray = [{ name: 'foo', age: '27' }]; - query.createEach(myPreciousArray, function(err, values) { - assert(myPreciousArray.length === 1); - done(); + query.createEach(myPreciousArray, function(err) { + if (err) { + return done(err); + } + + assert.equal(myPreciousArray.length, 1); + return done(); }); }); }); - }); }); diff --git a/test/unit/query/query.create.nested.js b/test/unit/query/query.create.nested.js deleted file mode 100644 index a3b8c84b6..000000000 --- a/test/unit/query/query.create.nested.js +++ /dev/null @@ -1,159 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { - - describe('.create()', function() { - - describe('with nested model values', function() { - var query; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: { - type: 'string', - defaultsTo: 'Foo Bar' - }, - nestedModel: { - model: 'nested' - } - } - }); - var Nested = Waterline.Collection.extend({ - identity: 'nested', - connection: 'foo', - attributes: { - name: 'string' - } - }); - - waterline.loadCollection(Model); - waterline.loadCollection(Nested); - - // Fixture Adapter Def - var _id = 1; - var findValues = []; - - var adapterDef = { - create: function(con, col, values, cb) { - values.id = _id; - findValues.push(values); - _id++; - return cb(null, values); - }, - find: function(con, col, criteria, cb) { - cb(null, findValues[_id - 1]); - } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - it('should reduce the nested object down to a foreign key', function(done) { - query.create({ name: 'foo', nestedModel: { name: 'joe' }}, function(err, status) { - assert(!err); - assert(status.nestedModel); - assert(status.nestedModel === 1); - done(); - }); - }); - }); - - describe('with nested collection values', function() { - var query, updatedModels = [], findValues = []; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: { - type: 'string', - defaultsTo: 'Foo Bar' - }, - nestedModels: { - collection: 'nested', - via: 'model' - } - } - }); - var Nested = Waterline.Collection.extend({ - identity: 'nested', - connection: 'foo', - attributes: { - name: 'string', - model: { - model: 'user' - } - } - }); - - waterline.loadCollection(Model); - waterline.loadCollection(Nested); - - var _id = 1; - var adapterDef = { - create: function(con, col, values, cb) { - values.id = _id; - findValues.push(values); - _id++; - return cb(null, values); - }, - find: function(con, col, criteria, cb) { - cb(null, findValues[_id - 1]); - }, - update: function(con, col, criteria, values, cb) { - updatedModels.push(criteria.where); - return cb(null, [values]); - } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - it('should attempt to update each nested model', function(done) { - - var nestedModels = [ - { name: 'joe', model: 2 }, - { name: 'moe', model: 3 }, - { name: 'flow', model: 4 } - ]; - - query.create({ id: 5, name: 'foo', nestedModels: nestedModels }, function(err, status) { - assert(!err); - assert(status.nestedModels.length === 0); - assert(findValues.length === 4); - done(); - }); - }); - }); - - }); -}); diff --git a/test/unit/query/query.create.ref.js b/test/unit/query/query.create.ref.js new file mode 100644 index 000000000..6fc68cbcb --- /dev/null +++ b/test/unit/query/query.create.ref.js @@ -0,0 +1,54 @@ +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); + +describe('Collection Query ::', function() { + describe('.create()', function() { + describe('with ref values', function() { + + it('should maintain object references for `ref` type attributes', function(done) { + + var modelDef = { + identity: 'user', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, + attributes: { + id: { + type: 'number' + }, + blob: { + type: 'ref' + } + } + }; + + var myBlob = new Buffer([1,2,3,4,5]); + var waterline = new Waterline(); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); + + // Fixture Adapter Def + var adapterDef = { + create: function(con, query, cb) { + assert(query.newRecord.blob === myBlob); + return cb(null, query.newRecord); + } + }; + + var connections = { + 'foo': { + adapter: 'foobar' + } + }; + + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.create({ blob: myBlob, id: 1 }, done); + }); + });//it + + }); + }); +}); diff --git a/test/unit/query/query.create.transform.js b/test/unit/query/query.create.transform.js index c48742a73..247665e5b 100644 --- a/test/unit/query/query.create.transform.js +++ b/test/unit/query/query.create.transform.js @@ -1,38 +1,35 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.create()', function() { - describe('with transformed values', function() { - var Model; - - before(function() { - - Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - - attributes: { - name: { - type: 'string', - columnName: 'login' - } + var modelDef = { + identity: 'user', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, + attributes: { + id: { + type: 'number' + }, + name: { + type: 'string', + columnName: 'login' } - }); - }); + } + }; it('should transform values before sending to adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - create: function(con, col, values, cb) { - assert(values.login); - return cb(null, values); + create: function(con, query, cb) { + assert(query.newRecord.login); + return cb(null, query.newRecord); } }; @@ -42,23 +39,23 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.create({ name: 'foo' }, done); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.create({ name: 'foo', id: 1 }, done); }); - }); it('should transform values after receiving from adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - create: function(con, col, values, cb) { - assert(values.login); - return cb(null, values); + create: function(con, query, cb) { + assert(query.newRecord.login); + return cb(null, query.newRecord); } }; @@ -68,16 +65,22 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.create({ name: 'foo' }, function(err, values) { + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + + orm.collections.user.create({ name: 'foo', id: 1 }, function(err, values) { + if (err) { + return done(err); + } + assert(values.name); assert(!values.login); - done(); + return done(); }); }); }); }); - }); }); diff --git a/test/unit/query/query.createEach.js b/test/unit/query/query.createEach.js index 44dafda58..a099e65ad 100644 --- a/test/unit/query/query.createEach.js +++ b/test/unit/query/query.createEach.js @@ -1,20 +1,23 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.createEach()', function() { - describe('with proper values', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreateEach: true, attributes: { + id: { + type: 'number' + }, first:{ type: 'string', defaultsTo: 'Foo' @@ -23,26 +26,35 @@ describe('Collection Query', function() { type: 'string', defaultsTo: 'Bar' }, - full: { - type: 'string', - defaultsTo: function() { return this.first + ' ' + this.second; } - }, name: { type: 'string', defaultsTo: 'Foo Bar' }, arr: { - type: 'array', + type: 'json', defaultsTo: [] }, - doSomething: function() {} + createdAt: { + type: 'number', + autoCreatedAt: true + }, + updatedAt: { + type: 'number', + autoUpdatedAt: true + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { create: function(con, col, values, cb) { return cb(null, values); }}; + var adapterDef = { + createEach: function(con, query, cb) { + var id = 0; + query.newRecords = _.map(query.newRecords, function(newRecord) { newRecord.id = ++id; return newRecord; }); + return cb(null, query.newRecords); + } + }; var connections = { 'foo': { @@ -50,23 +62,24 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); - it('should require an array of values', function(done) { - query.createEach({}, function(err, values) { + query.createEach({}, function(err) { assert(err); - done(); + return done(); }); }); it('should require a valid set of records', function(done) { - query.createEach([{},'string'], function(err, values) { + query.createEach([{},'string'], function(err) { assert(err); done(); }); @@ -74,59 +87,72 @@ describe('Collection Query', function() { it('should add default values to each record', function(done) { query.createEach([{},{}], function(err, values) { - assert(Array.isArray(values)); - assert(values[0].name === 'Foo Bar'); - assert(values[1].name === 'Foo Bar'); - done(); - }); - }); + if (err) { + return done(err); + } - it('should add default values to each record when function', function(done) { - query.createEach([{},{}], function(err, values) { - assert(Array.isArray(values)); - assert(values[0].full === 'Foo Bar'); - assert(values[1].full === 'Foo Bar'); - done(); + assert(_.isArray(values)); + assert.equal(values[0].name, 'Foo Bar'); + assert.equal(values[1].name, 'Foo Bar'); + return done(); }); }); it('should clone default values for each record', function(done) { query.createEach([{},{}], function(err, values) { - assert(Array.isArray(values)); - assert(values[0].arr !== values[1].arr); + if (err) { + return done(err); + } + + assert(_.isArray(values)); + assert.notEqual(values[0].arr, values[1].arr); + + // Add an item to one array values[1].arr.push('another'); - assert(values[0].arr.length === 0); - assert(values[1].arr.length === 1); - done(); + + // Check that the values aren't refs + assert.equal(values[0].arr.length, 0); + assert.equal(values[1].arr.length, 1); + return done(); }); }); it('should strip values that don\'t belong to the schema', function(done) { query.createEach([{ foo: 'bar' }], function(err, values) { + if (err) { + return done(err); + } + assert(!values[0].foo); - done(); + return done(); }); }); it('should add timestamp values to each record', function(done) { query.createEach([{},{}], function(err, values) { + if (err) { + return done(err); + } + assert(values[0].createdAt); assert(values[0].updatedAt); assert(values[0].createdAt); assert(values[1].updatedAt); - done(); + return done(); }); }); it('should allow a query to be built using deferreds', function(done) { - query.createEach() - .set([{ name: 'bob' }, { name: 'foo'}]) + query.createEach([{ name: 'bob' }, { name: 'foo'}]) .exec(function(err, result) { - assert(!err); + if (err) { + return done(err); + } + assert(result); - assert(result[0].name === 'bob'); - assert(result[1].name === 'foo'); - done(); + assert.equal(result[0].name, 'bob'); + assert.equal(result[1].name, 'foo'); + return done(); }); }); }); @@ -135,21 +161,35 @@ describe('Collection Query', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreateEach: true, attributes: { - name: 'string', - age: 'integer' + id: { + type: 'number' + }, + name: { + type: 'string' + }, + age: { + type: 'number' + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { create: function(con, col, valuesList, cb) { return cb(null, valuesList); }}; + var adapterDef = { + createEach: function(con, query, cb) { + var id = 0; + query.newRecords = _.map(query.newRecords, function(newRecord) { newRecord.id = ++id; return newRecord; }); + return cb(null, query.newRecords); + } + }; var connections = { 'foo': { @@ -157,29 +197,38 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should cast values before sending to adapter', function(done) { query.createEach([{ name: 'foo', age: '27' }], function(err, values) { - assert(values[0].name === 'foo'); - assert(values[0].age === 27); - done(); + if (err) { + return done(err); + } + + assert.equal(values[0].name, 'foo'); + assert.equal(values[0].age, 27); + return done(); }); }); it('should not be detructive to passed-in arrays', function(done) { var myPreciousArray = [{ name: 'foo', age: '27' }]; - query.createEach(myPreciousArray, function(err, values) { - assert(myPreciousArray.length === 1); - done(); + query.createEach(myPreciousArray, function(err) { + if (err) { + return done(err); + } + + assert.equal(myPreciousArray.length, 1); + return done(); }); }); }); - }); }); diff --git a/test/unit/query/query.createEach.transform.js b/test/unit/query/query.createEach.transform.js index 740903897..9a29fd89e 100644 --- a/test/unit/query/query.createEach.transform.js +++ b/test/unit/query/query.createEach.transform.js @@ -1,37 +1,38 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.createEach()', function() { - var Model; - - before(function() { - - Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: { - type: 'string', - defaultsTo: 'Foo Bar', - columnName: 'login' - } + var modelDef = { + identity: 'user', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreateEach: true, + attributes: { + id: { + type: 'number' + }, + name: { + type: 'string', + defaultsTo: 'Foo Bar', + columnName: 'login' } - }); - }); + } + }; it('should transform values before sending to adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - create: function(con, col, values, cb) { - assert(values.login); - return cb(null, values); + createEach: function(con, query, cb) { + assert(_.first(query.newRecords).login); + var id = 0; + query.newRecords = _.map(query.newRecords, function(newRecord) { newRecord.id = ++id; return newRecord; }); + return cb(null, query.newRecords); } }; @@ -41,22 +42,24 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.createEach([{ name: 'foo' }], done); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.createEach([{ name: 'foo' }], done); }); }); it('should transform values after receiving from adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - create: function(con, col, values, cb) { - assert(values.login); - return cb(null, values); + createEach: function(con, query, cb) { + var id = 0; + query.newRecords = _.map(query.newRecords, function(newRecord) { newRecord.id = ++id; return newRecord; }); + return cb(null, query.newRecords); } }; @@ -66,15 +69,21 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.createEach([{ name: 'foo' }], function(err, values) { + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.createEach([{ name: 'foo' }], function(err, values) { + if (err) { + return done(err); + } + assert(values[0].name); assert(!values[0].login); - done(); + + return done(); }); }); }); - }); }); diff --git a/test/unit/query/query.destroy.js b/test/unit/query/query.destroy.js index 509346d22..36b24704a 100644 --- a/test/unit/query/query.destroy.js +++ b/test/unit/query/query.destroy.js @@ -1,32 +1,33 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.destroy()', function() { describe('with Auto PK', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, name: { type: 'string', defaultsTo: 'Foo Bar' - }, - doSomething: function() {} + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { destroy: function(con, col, options, cb) { return cb(null); }}; + var adapterDef = { destroy: function(con, query, cb) { return cb(); }}; var connections = { 'foo': { @@ -34,17 +35,22 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should not return an error', function(done) { query.destroy({}, function(err) { - assert(!err); - done(); + if (err) { + return done(err); + } + + return done(); }); }); @@ -52,16 +58,11 @@ describe('Collection Query', function() { query.destroy() .where({}) .exec(function(err) { - assert(!err); - done(); - }); - }); + if (err) { + return done(err); + } - it('should not delete an empty IN array', function(done) { - query.destroy({id: []}, function(err, deleted) { - assert(!err); - assert(deleted.length === 0); - done(); + return done(); }); }); }); @@ -70,32 +71,29 @@ describe('Collection Query', function() { var query; before(function(done) { - var waterline = new Waterline(); // Extend for testing purposes - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', - autoPK: false, + datastore: 'foo', + primaryKey: 'myPk', attributes: { name: { type: 'string', defaultsTo: 'Foo Bar' }, myPk: { - type: 'integer', - primaryKey: true, - columnName: 'pkColumn', - defaultsTo: 1 + type: 'number', + columnName: 'pkColumn' } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { destroy: function(con, col, options, cb) { return cb(null, options); }}; + var adapterDef = { destroy: function(con, query, cb) { return cb(null, query.criteria); }}; var connections = { 'foo': { @@ -103,22 +101,24 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should use the custom primary key when a single value is passed in', function(done) { - query.destroy(1, function(err, values) { - assert(!err); - assert(values.where.pkColumn === 1); - done(); + query.destroy(1, function(err) { + if (err) { + return done(err); + } + return done(); }); }); }); - }); }); diff --git a/test/unit/query/query.destroy.transform.js b/test/unit/query/query.destroy.transform.js index 6ac4f4b2a..fe63c34ec 100644 --- a/test/unit/query/query.destroy.transform.js +++ b/test/unit/query/query.destroy.transform.js @@ -1,21 +1,21 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.destroy()', function() { - describe('with transformed values', function() { var Model; before(function() { - // Extend for testing purposes - Model = Waterline.Collection.extend({ + Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', - + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, name: { type: 'string', columnName: 'login' @@ -25,15 +25,14 @@ describe('Collection Query', function() { }); it('should transform values before sending to adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def var adapterDef = { - destroy: function(con, col, options, cb) { - assert(options.where.login); - return cb(null); + destroy: function(con, query, cb) { + assert(query.criteria.where.login); + return cb(); } }; @@ -43,12 +42,13 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.destroy({ name: 'foo' }, done); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.destroy({ name: 'foo' }, done); }); }); }); - }); }); diff --git a/test/unit/query/query.dynamicFinders.js b/test/unit/query/query.dynamicFinders.js deleted file mode 100644 index e73d6064c..000000000 --- a/test/unit/query/query.dynamicFinders.js +++ /dev/null @@ -1,138 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { - - describe('dynamicFinders', function() { - - describe('configuration', function() { - var collections; - - before(function (done) { - - var waterline = new Waterline(); - var User = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - associationFinders: false, - attributes: { - name: 'string', - group: { - model: 'group' - } - } - }); - - var Group = Waterline.Collection.extend({ - identity: 'group', - connection: 'foo', - dynamicFinders: false, - attributes: { - name: 'string' - } - }); - - waterline.loadCollection(User); - waterline.loadCollection(Group); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, orm) { - if (err) return done(err); - - collections = orm.collections; - done(); - }); - }); - - it('can disable dynamicFinders', function () { - assert(typeof collections.group.findOneByName === 'undefined'); - }); - it('can disable associationFinders', function () { - assert(typeof collections.user.findByName === 'function'); - assert(typeof collections.user.findByGroupIn === 'undefined'); - }); - - }); - - describe('usage', function () { - var query; - - before(function(done) { - - var waterline = new Waterline(); - var User = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string', - group: { - model: 'group' - } - } - }); - - var Group = Waterline.Collection.extend({ - identity: 'group', - connection: 'foo', - attributes: { - name: 'string' - } - }); - - waterline.loadCollection(User); - waterline.loadCollection(Group); - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: {} }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - it('should add dynamic finder functions', function() { - assert(typeof query.findOneByName === 'function'); - assert(typeof query.findOneByNameIn === 'function'); - assert(typeof query.findOneByNameLike === 'function'); - assert(typeof query.findByName === 'function'); - assert(typeof query.findByNameIn === 'function'); - assert(typeof query.findByNameLike === 'function'); - assert(typeof query.countByName === 'function'); - assert(typeof query.countByNameIn === 'function'); - assert(typeof query.countByNameLike === 'function'); - assert(typeof query.nameStartsWith === 'function'); - assert(typeof query.nameEndsWith === 'function'); - assert(typeof query.nameContains === 'function'); - }); - - it('should not create generic dynamic finders for has_one and belongs_to associations', function() { - assert(!query.findOneByGroupIn); - assert(!query.findOneByGroupLike); - assert(!query.findByGroupIn); - assert(!query.findByGroupLike); - assert(!query.countByGroup); - assert(!query.countByGroupIn); - assert(!query.countByGroupLike); - assert(!query.groupStartsWith); - assert(!query.groupEndsWith); - assert(!query.groupContains); - }); - - it.skip('should create limited dynamic finders for has_one and belongs_to associations', function() { - assert(typeof query.findByGroup === 'function'); - assert(typeof query.findOneByGroup === 'function'); - }); - - }); - }); -}); diff --git a/test/unit/query/query.exec.js b/test/unit/query/query.exec.js index 91c9846c9..b59a2221f 100644 --- a/test/unit/query/query.exec.js +++ b/test/unit/query/query.exec.js @@ -1,33 +1,34 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'), - async = require('async'); - -describe('Collection Query', function() { +var assert = require('assert'); +var async = require('async'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.exec()', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, name: { type: 'string', defaultsTo: 'Foo Bar' - }, - doSomething: function() {} + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { - return cb(null, [criteria]); + find: function(con, query, cb) { + return cb(null, [{id: 1}]); } }; @@ -37,10 +38,13 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if (err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + + query = orm.collections.user; + return done(); }); }); @@ -48,53 +52,53 @@ describe('Collection Query', function() { // .exec() usage query.find() .exec(function(err, results0) { - assert(!err); + if (err) { + return done(err); + } // callback usage - query.find(function (err, results1) { - assert(!err); - assert(results0.length === results1.length); + query.find({}, {}, function(err, results1) { + if (err) { + return done(err); + } + assert.equal(results0.length, results1.length); + return done(); }); - done(); }); }); - describe('when passed a switchback (object with multiple handlers)', function () { + describe.skip('when passed a switchback (object with multiple handlers)', function() { + var _error; + var _results; before(function getTheQueryResultsForTestsBelow(done) { - var self = this; - async.auto({ - objUsage: function (cb) { + objUsage: function(cb) { query.find() .exec({ - success: function (results) { + success: function(results) { cb(null, results); }, error: cb }); }, - cbUsage: function (cb) { + + cbUsage: function(cb) { query.find().exec(cb); } - }, function asyncComplete (err, async_data) { + + }, function asyncComplete(err, async_data) { // Save results for use below - self._error = err; - self._results = async_data; - done(); + _error = err; + _results = async_data; + return done(); }); - - }); - - it('should not fail', function() { - assert(this._results); - assert(!this._error); }); - it('should work the same as it does with a callback', function() { - assert(this._results.cbUsage.length === this._results.objUsage.length); + it('should not fail, and should work the same as it does w/ a callback', function() { + assert(!_error, _error); + assert.equal(_results.cbUsage.length, _results.objUsage.length); }); }); - }); }); diff --git a/test/unit/query/query.find.js b/test/unit/query/query.find.js index 5c4fa591c..75aa4685b 100644 --- a/test/unit/query/query.find.js +++ b/test/unit/query/query.find.js @@ -1,30 +1,33 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.find()', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', + schema: false, attributes: { + id: { + type: 'number' + }, name: { type: 'string', defaultsTo: 'Foo Bar' - }, - doSomething: function() {} + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { find: function(con, col, criteria, cb) { return cb(null, [criteria]); }}; + var adapterDef = { find: function(con, query, cb) { return cb(null, [{id: 1, criteria: query.criteria}]); }}; var connections = { 'foo': { @@ -32,31 +35,33 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if(err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should allow options to be optional', function(done) { - query.find({}, function(err, values) { - assert(!err); - done(); + query.find({}, function(err) { + if(err) { + return done(err); + } + + return done(); }); }); it('should return an array', function(done) { query.find({}, {}, function(err, values) { - assert(Array.isArray(values)); - done(); - }); - }); + if (err) { + return done(err); + } - it('should return an instance of Model', function(done) { - query.find({}, {}, function(err, values) { - assert(typeof values[0].doSomething === 'function'); - done(); + assert(_.isArray(values)); + return done(); }); }); @@ -66,96 +71,115 @@ describe('Collection Query', function() { .where({ id: { '>': 1 } }) .limit(1) .skip(1) - .sort({ name: 0 }) + .sort([{ name: 'desc' }]) .exec(function(err, results) { - assert(!err); - assert(Array.isArray(results)); + if (err) { + return done(err); + } - assert(Object.keys(results[0].where).length === 2); - assert(results[0].where.name == 'Foo Bar'); - assert(results[0].where.id['>'] == 1); - assert(results[0].limit == 1); - assert(results[0].skip == 1); - assert(results[0].sort.name == -1); + assert(_.isArray(results)); + assert.equal(results[0].criteria.limit, 1); + assert.equal(results[0].criteria.skip, 1); + assert.equal(results[0].criteria.sort[0].name, 'DESC'); - done(); + return done(); }); }); describe('.paginate()', function() { - it('should skip to 0 and limit to 10 by default', function(done) { + it('should skip to 0 and limit to 30 by default', function(done) { query.find() - .paginate() + .paginate(0) .exec(function(err, results) { - assert(!err); - assert(Array.isArray(results)); + if (err) { + return done(err); + } - assert(results[0].skip === 0); - assert(results[0].limit === 10); + assert(_.isArray(results)); + assert.equal(results[0].criteria.skip, 0); + assert.equal(results[0].criteria.limit, 30); - done(); + return done(); }); }); it('should set skip to 0 from page 0', function(done) { query.find() - .paginate({page: 1}) + .paginate(1) .exec(function(err, results) { - assert(results[0].skip === 0); + if (err) { + return done(err); + } - done(); + assert.equal(results[0].criteria.skip, 30); + return done(); }); }); it('should set skip to 0 from page 1', function(done) { query.find() - .paginate({page: 1}) + .paginate(1) .exec(function(err, results) { - assert(results[0].skip === 0); + if (err) { + return done(err); + } - done(); + assert.equal(results[0].criteria.skip, 30); + return done(); }); }); - it('should set skip to 10', function(done) { + it('should set skip to 30', function(done) { query.find() - .paginate({page: 2}) + .paginate(2) .exec(function(err, results) { - assert(results[0].skip === 10); + if (err) { + return done(err); + } - done(); + assert.equal(results[0].criteria.skip, 60); + return done(); }); }); it('should set limit to 1', function(done) { query.find() - .paginate({limit: 1}) + .paginate(1, 1) .exec(function(err, results) { - assert(results[0].limit === 1); + if (err) { + return done(err); + } - done(); + assert.equal(results[0].criteria.limit, 1); + return done(); }); }); - it('should set skip to 10 and limit to 10', function(done) { + it('should set skip to 20 and limit to 10', function(done) { query.find() - .paginate({page: 2, limit: 10}) + .paginate(2, 10) .exec(function(err, results) { - assert(results[0].skip === 10); - assert(results[0].limit === 10); + if (err) { + return done(err); + } - done(); + assert.equal(results[0].criteria.skip, 20); + assert.equal(results[0].criteria.limit, 10); + return done(); }); }); - it('should set skip to 20 and limit to 10', function(done) { + it('should set skip to 30 and limit to 10', function(done) { query.find() - .paginate({page: 3, limit: 10}) + .paginate(3, 10) .exec(function(err, results) { - assert(results[0].skip === 20); - assert(results[0].limit === 10); + if (err) { + return done(err); + } - done(); + assert.equal(results[0].criteria.skip, 30); + assert.equal(results[0].criteria.limit, 10); + return done(); }); }); }); diff --git a/test/unit/query/query.find.transform.js b/test/unit/query/query.find.transform.js index 3b190a6a4..acd12bc51 100644 --- a/test/unit/query/query.find.transform.js +++ b/test/unit/query/query.find.transform.js @@ -1,39 +1,34 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.find()', function() { - describe('with transformed values', function() { - var Model; - - before(function() { - - // Extend for testing purposes - Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - - attributes: { - name: { - type: 'string', - columnName: 'login' - } + var modelDef = { + identity: 'user', + datastore: 'foo', + primaryKey: 'id', + attributes: { + id: { + type: 'number' + }, + name: { + type: 'string', + columnName: 'login' } - }); - }); + } + }; it('should transform criteria before sending to adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { - assert(criteria.where.login); - return cb(null, [{ login: 'foo' }]); + find: function(con, query, cb) { + assert(query.criteria.where.login); + return cb(null, [{ id: 1, login: 'foo' }]); } }; @@ -43,22 +38,23 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.find({ where: { name: 'foo' }}, done); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.find({ where: { name: 'foo' }}, done); }); }); it('should transform values after receiving from adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { - assert(criteria.where.login); - return cb(null, [{ login: 'foo' }]); + find: function(con, query, cb) { + assert(query.criteria.where.login); + return cb(null, [{ id: 1, login: 'foo' }]); } }; @@ -68,16 +64,126 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.find({ name: 'foo' }, function(err, values) { + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if(err) { + return done(err); + } + + orm.collections.user.find({ name: 'foo' }, function(err, values) { + if (err) { + return done(err); + } + assert(values[0].name); assert(!values[0].login); - done(); + return done(); }); }); - }); - }); + });//it + + it('should include base value for no-op populates', function(done) { + + Waterline.start({ + defaultModelSettings: { + attributes: { + id: { type: 'number' } + }, + primaryKey: 'id', + datastore: 'default' + }, + models: { + user: { + attributes: { + name: { + type: 'string', + columnName: '_userName' + }, + pets: { + collection: 'pet' + } + } + }, + pet: { + attributes: { + name: { + type: 'string', + columnName: '_petName' + } + } + } + }, + adapters: { + fake: { + identity: 'fake', + find: function(datastoreName, query, done) { + if (query.using === 'user') { + assert(!query.criteria.where.name); + return done(undefined, [{ id: 1, _userName: query.criteria.where._userName||'someuser' }]); + } + else if (query.using === 'pet') { + // console.log('query.criteria.where', require('util').inspect(query.criteria.where,{depth:null})); + assert(!query.criteria.where.name); + return done(undefined, [{ id: 1, _petName: query.criteria.where._petName||'somepet' }]); + } + else if (query.using === 'pet_pets_pet__user_pets') { + assert(_.contains(query.criteria.select, 'id')); + assert(_.contains(query.criteria.select, 'user_pets')); + assert(_.contains(query.criteria.select, 'pet_pets_pet')); + assert.equal(query.criteria.where.and[0].user_pets.in[0], 1); + return done(undefined, [{ id: 999, user_pets: 1, pet_pets_pet: 1 }]);//eslint-disable-line camelcase + } + else { + return done(new Error('Unexpected result for this test-- what model is this?? (`'+query.using+'`)')); + } + } + } + }, + datastores: { + default: { adapter: 'fake' } + } + }, function(err, orm) { + if(err) { return done(err); } + + // First, just a quick sanity check. + Waterline.getModel('pet', orm).find({ name: 'fluffy' }, function(err, pets) { + if (err){ return done(err); } + + if (pets.length !== 1) { return done(new Error('Expected there to be exactly one record returned!')); } + + // Then, let's test the meat of this. + Waterline.getModel('user', orm).find({ name: 'jorahmormont' }, { + // Use a deliberate no-op populate: + pets: { + or: [ + { + id: { in: [] } + }, + { + and: [ + {}, + { + id: { nin: [] } + }, + { + or: [] + } + ] + } + ] + } + }, function(err, users) { + if (err){ return done(err); } + + if (users.length !== 1) { return done(new Error('Expected there to be exactly one record returned!')); } + if (!_.isArray(users[0].pets) || users[0].pets.length !== 0) { return done(new Error('Expected base value for populated `pets` (i.e. empty array)')); } + + return done(); + + });//_∏_ + });//_∏_ + });//_∏_ (Waterline.start()) + });//it + }); }); }); diff --git a/test/unit/query/query.findOne.js b/test/unit/query/query.findOne.js index 66d784ad5..27372c1a0 100644 --- a/test/unit/query/query.findOne.js +++ b/test/unit/query/query.findOne.js @@ -1,32 +1,34 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var util = require('util'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.findOne()', function() { - describe('with autoPK', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, name: { type: 'string', defaultsTo: 'Foo Bar' - }, - doSomething: function() {} + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { find: function(con, col, criteria, cb) { return cb(null, [criteria]); }}; + var adapterDef = { find: function(con, query, cb) { return cb(null, [{id: 1, criteria: query.criteria}]); }}; var connections = { 'foo': { @@ -34,48 +36,50 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - it('should return an instance of Model', function(done) { - query.findOne({ name: 'foo' }, function(err, values) { - assert(typeof values.doSomething === 'function'); - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should allow an integer to be passed in as criteria', function(done) { - query.findOne(1, function(err, values) { - assert(!err); - assert(values.where.id === 1); - done(); + query.findOne(1, function(err, record) { + if (err) { + return done(err); + } + + assert(_.isObject(record.criteria.where), 'Expected `record.where` to be a dictionary, but it is not. Here is `record`:\n```\n'+util.inspect(record,{depth:5})+'\n```\n'); + assert.equal(record.criteria.where.id, 1); + return done(); }); }); it('should allow a query to be built using deferreds', function(done) { query.findOne() - .where({ name: 'Foo Bar' }) - .where({ id: { '>': 1 } }) + .where({ + name: 'Foo Bar', + id: { + '>': 1 + } + }) .exec(function(err, results) { - assert(!err); - assert(!Array.isArray(results)); - - assert(Object.keys(results.where).length === 2); - assert(results.where.name == 'Foo Bar'); - assert(results.where.id['>'] == 1); + if (err) { + return done(err); + } - done(); + assert(!_.isArray(results)); + assert.equal(_.keys(results.criteria.where).length, 1); + assert.equal(results.criteria.where.and[0].name, 'Foo Bar'); + assert.equal(results.criteria.where.and[1].id['>'], 1); + return done(); }); }); - }); describe('with custom PK', function() { - describe('with no columnName set', function() { var query; @@ -84,27 +88,25 @@ describe('Collection Query', function() { var waterline = new Waterline(); // Extend for testing purposes - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', - autoPK: false, + datastore: 'foo', + primaryKey: 'myPk', attributes: { name: { type: 'string', defaultsTo: 'Foo Bar' }, myPk: { - type: 'integer', - primaryKey: true, - defaultsTo: 1 + type: 'number' } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { find: function(con, col, criteria, cb) { return cb(null, [criteria]); }}; + var adapterDef = { find: function(con, query, cb) { return cb(null, [{myPk: 1, criteria: query.criteria}]); }}; var connections = { 'foo': { @@ -112,19 +114,23 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should use the custom primary key when a single value is passed in', function(done) { query.findOne(1, function(err, values) { - assert(!err); - assert(values.where.myPk === 1); - done(); + if (err) { + return done(err); + } + assert.equal(values.criteria.where.myPk, 1); + return done(); }); }); }); @@ -137,28 +143,26 @@ describe('Collection Query', function() { var waterline = new Waterline(); // Extend for testing purposes - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', - autoPK: false, + datastore: 'foo', + primaryKey: 'myPk', attributes: { name: { type: 'string', defaultsTo: 'Foo Bar' }, myPk: { - type: 'integer', - primaryKey: true, - columnName: 'pkColumn', - defaultsTo: 1 + type: 'number', + columnName: 'pkColumn' } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { find: function(con, col, criteria, cb) { return cb(null, [criteria]); }}; + var adapterDef = { find: function(con, query, cb) { return cb(null, [{myPk: 1, criteria: query.criteria}]); }}; var connections = { 'foo': { @@ -166,23 +170,27 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + + query = orm.collections.user; + return done(); }); }); - it('should use the custom primary key when a single value is passed in', function(done) { query.findOne(1, function(err, values) { - assert(!err); - assert(values.where.pkColumn === 1); - done(); + if (err) { + return done(err); + } + + assert.equal(values.criteria.where.pkColumn, 1); + return done(); }); }); }); - }); }); }); diff --git a/test/unit/query/query.findOne.transform.js b/test/unit/query/query.findOne.transform.js index 7a01d4012..5f17b4bf3 100644 --- a/test/unit/query/query.findOne.transform.js +++ b/test/unit/query/query.findOne.transform.js @@ -1,39 +1,34 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.findOne()', function() { - describe('with transformed values', function() { - var Model; - - before(function() { - - // Extend for testing purposes - Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - - attributes: { - name: { - type: 'string', - columnName: 'login' - } + var modelDef = { + identity: 'user', + datastore: 'foo', + primaryKey: 'id', + attributes: { + id: { + type: 'number' + }, + name: { + type: 'string', + columnName: 'login' } - }); - }); + } + }; it('should transform criteria before sending to adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { - assert(criteria.where.login); - return cb(null, [criteria]); + find: function(con, query, cb) { + assert(query.criteria.where.login); + return cb(null, [{id: 1, criteria: query.criteria}]); } }; @@ -43,22 +38,23 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.findOne({ where: { name: 'foo' }}, done); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.findOne({ where: { name: 'foo' }}, done); }); }); it('should transform values after receiving from adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { - assert(criteria.where.login); - return cb(null, [{ login: 'foo' }]); + find: function(con, query, cb) { + assert(query.criteria.where.login); + return cb(null, [{ id: 1, login: 'foo' }]); } }; @@ -68,16 +64,20 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.findOne({ name: 'foo' }, function(err, values) { + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.findOne({ name: 'foo' }, function(err, values) { + if (err) { + return done(err); + } assert(values.name); assert(!values.login); - done(); + return done(); }); }); }); }); - }); }); diff --git a/test/unit/query/query.findOrCreate.js b/test/unit/query/query.findOrCreate.js index 2d4f002e3..c30c39bf2 100644 --- a/test/unit/query/query.findOrCreate.js +++ b/test/unit/query/query.findOrCreate.js @@ -1,34 +1,36 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.findOrCreate()', function() { - describe('with proper values', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, + fetchRecordsOnCreateEach: true, attributes: { + id: { + type: 'number' + }, name: { type: 'string', defaultsTo: 'Foo Bar' - }, - doSomething: function() {} + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, []); }, - create: function(con, col, values, cb) { return cb(null, values); } + find: function(con, query, cb) { return cb(null, []); }, + create: function(con, query, cb) { query.newRecord.id = 1; return cb(null, query.newRecord); } }; var connections = { @@ -37,73 +39,64 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should set default values', function(done) { - query.findOrCreate({ name: 'Foo Bar' }, {}, function(err, status) { - assert(status.name === 'Foo Bar'); - done(); + query.findOrCreate({ name: 'Foo Bar' }, {}, function(err, status, created) { + if (err) { + return done(err); + } + + assert.equal(status.name, 'Foo Bar'); + assert.equal(created, true); + + return done(); }); }); it('should set default values with exec', function(done) { - query.findOrCreate({ name: 'Foo Bar' }).exec(function(err, status) { - assert(status.name === 'Foo Bar'); - done(); - }); - }); + query.findOrCreate({ name: 'Foo Bar' }).exec(function(err, status, created) { + if (err) { + return done(err); + } - it('should work with multiple objects', function(done) { - query.findOrCreate([{ name: 'Foo Bar' }, { name: 'Makis'}]).exec(function(err, status) { - assert(status[0].name === 'Foo Bar'); - assert(status[1].name === 'Makis'); - done(); - }); - }); + assert.equal(status.name, 'Foo Bar'); + assert.equal(created, true); - it('should add timestamps', function(done) { - query.findOrCreate({ name: 'Foo Bar' }, {}, function(err, status) { - assert(status.createdAt); - assert(status.updatedAt); - done(); + return done(); }); }); + + it('should set values', function(done) { - query.findOrCreate({ name: 'Foo Bar' }, { name: 'Bob' }, function(err, status) { - assert(status.name === 'Bob'); - done(); + query.findOrCreate({ name: 'Foo Bar' }, { name: 'Bob' }, function(err, status, created) { + if (err) { + return done(err); + } + + assert.equal(status.name, 'Bob'); + assert.equal(created, true); + + return done(); }); }); it('should strip values that don\'t belong to the schema', function(done) { query.findOrCreate({ name: 'Foo Bar'}, { foo: 'bar' }, function(err, values) { - assert(!values.foo); - done(); - }); - }); - - it('should return an instance of Model', function(done) { - query.findOrCreate({ name: 'Foo Bar' }, {}, function(err, status) { - assert(typeof status.doSomething === 'function'); - done(); - }); - }); + if (err) { + return done(err); + } - it('should allow a query to be built using deferreds', function(done) { - query.findOrCreate() - .where({ name: 'foo' }) - .set({ name: 'bob' }) - .exec(function(err, result) { - assert(!err); - assert(result); - assert(result.name === 'bob'); - done(); + assert(!values.foo); + return done(); }); }); }); @@ -112,23 +105,32 @@ describe('Collection Query', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, + fetchRecordsOnCreateEach: true, attributes: { - name: 'string', - age: 'integer' + id: { + type: 'number' + }, + name: { + type: 'string' + }, + age: { + type: 'number' + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { return cb(null, []); }, - create: function(con, col, values, cb) { return cb(null, values); } + find: function(con, query, cb) { return cb(null, []); }, + create: function(con, query, cb) { query.newRecord.id = 1; return cb(null, query.newRecord); } }; var connections = { @@ -137,21 +139,27 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should cast values before sending to adapter', function(done) { - query.findOrCreate({ name: 'Foo Bar' }, { name: 'foo', age: '27' }, function(err, values) { - assert(values.name === 'foo'); - assert(values.age === 27); - done(); + query.findOrCreate({ name: 'Foo Bar' }, { name: 'foo', age: '27' }, function(err, values, created) { + if (err) { + return done(err); + } + assert.equal(values.name, 'foo'); + assert.equal(values.age, 27); + assert.equal(created, true); + + return done(); }); }); }); - }); }); diff --git a/test/unit/query/query.findOrCreate.transform.js b/test/unit/query/query.findOrCreate.transform.js index 135a6c6e2..05dee8ab2 100644 --- a/test/unit/query/query.findOrCreate.transform.js +++ b/test/unit/query/query.findOrCreate.transform.js @@ -1,43 +1,41 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.findOrCreate()', function() { - describe('with transformed values', function() { - var Model; - - before(function() { - - // Extend for testing purposes - Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - - attributes: { - name: { - type: 'string', - columnName: 'login' - } + var modelDef = { + identity: 'user', + datastore: 'foo', + primaryKey: 'id', + fetchRecordsOnCreate: true, + fetchRecordsOnCreateEach: true, + attributes: { + id: { + type: 'number' + }, + name: { + type: 'string', + columnName: 'login' } - }); - }); + } + }; it('should transform criteria before sending to adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { - assert(criteria.where.login); + find: function(con, query, cb) { + assert(query.criteria.where.login); return cb(null, []); }, - create: function(con, col, values, cb) { - assert(values.login); - return cb(null, values); + create: function(con, query, cb) { + assert(query.newRecord.login); + query.newRecord.id = 1; + return cb(null, query.newRecord); } }; @@ -47,26 +45,28 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.findOrCreate({ where: { name: 'foo' }}, { name: 'foo' }, done); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.findOrCreate({ where: { name: 'foo' }}, { name: 'foo' }, done); }); }); it('should transform values before sending to adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { - assert(criteria.where.login); - return cb(null, []); + find: function(con, query, cb) { + assert(query.criteria.where.login); + return cb(undefined, []); }, - create: function(con, col, values, cb) { - assert(values.login); - return cb(null, values); + create: function(con, query, cb) { + assert(query.newRecord.login); + query.newRecord.id = 1; + return cb(undefined, query.newRecord); } }; @@ -76,26 +76,28 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.findOrCreate({ where: { name: 'foo' }}, { name: 'foo' }, done); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.findOrCreate({ where: { name: 'foo' }}, { name: 'foo' }, done); }); }); it('should transform values after receiving from adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - find: function(con, col, criteria, cb) { - assert(criteria.where.login); - return cb(null, []); + find: function(con, query, cb) { + assert(query.criteria.where.login); + return cb(undefined, []); }, - create: function(con, col, values, cb) { - assert(values.login); - return cb(null, values); + create: function(con, query, cb) { + assert(query.newRecord.login); + query.newRecord.id = 1; + return cb(undefined, query.newRecord); } }; @@ -105,16 +107,22 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.findOrCreate({ where: { name: 'foo' }}, { name: 'foo' }, function(err, values) { + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + + orm.collections.user.findOrCreate({ where: { name: 'foo' }}, { name: 'foo' }, function(err, values) { + if (err) { + return done(err); + } + assert(values.name); assert(!values.login); - done(); + return done(); }); }); }); }); - }); }); diff --git a/test/unit/query/query.findOrCreateEach.js b/test/unit/query/query.findOrCreateEach.js deleted file mode 100644 index 3b1012724..000000000 --- a/test/unit/query/query.findOrCreateEach.js +++ /dev/null @@ -1,147 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { - - describe('.findOrCreateEach()', function() { - - describe('with proper values', function() { - var query; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: { - type: 'string', - defaultsTo: 'Foo Bar' - }, - doSomething: function() {} - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { findOrCreateEach: function(con, col, valuesList, cb) { return cb(null, valuesList); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - it('should require an array of criteria', function(done) { - query.findOrCreateEach({}, {}, function(err, values) { - assert(err); - done(); - }); - }); - - it('should require an array of values', function(done) { - query.findOrCreateEach([], {}, function(err, values) { - assert(err); - done(); - }); - }); - - it('should require a valid set of records', function(done) { - query.findOrCreateEach([], [{},'string'], function(err, values) { - assert(err); - done(); - }); - }); - - it('should strip values that don\'t belong to the schema', function(done) { - query.findOrCreateEach([], [{ foo: 'bar' }], function(err, values) { - assert(!values[0].foo); - done(); - }); - }); - - it('should add default values to each record', function(done) { - query.findOrCreateEach([], [{},{}], function(err, values) { - assert(Array.isArray(values)); - assert(values[0].name === 'Foo Bar'); - assert(values[1].name === 'Foo Bar'); - done(); - }); - }); - - it('should add timestamp values to each record', function(done) { - query.findOrCreateEach([], [{},{}], function(err, values) { - assert(values[0].createdAt); - assert(values[0].updatedAt); - assert(values[0].createdAt); - assert(values[1].updatedAt); - done(); - }); - }); - - it('should allow a query to be built using deferreds', function(done) { - query.findOrCreateEach([{ name: 'foo' }]) - .set([{ name: 'bob' }, { name: 'foo'}]) - .exec(function(err, result) { - assert(!err); - assert(result); - assert(result[0].name === 'bob'); - assert(result[1].name === 'foo'); - done(); - }); - }); - }); - - describe('casting values', function() { - var query; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: 'string', - age: 'integer' - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { findOrCreateEach: function(con, col, valuesList, cb) { return cb(null, valuesList); }}; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - it('should cast values before sending to adapter', function(done) { - query.findOrCreateEach([], [{ name: 'foo', age: '27' }], function(err, values) { - assert(values[0].name === 'foo'); - assert(values[0].age === 27); - done(); - }); - }); - }); - - }); -}); diff --git a/test/unit/query/query.findOrCreateEach.transform.js b/test/unit/query/query.findOrCreateEach.transform.js deleted file mode 100644 index 5c5de7af0..000000000 --- a/test/unit/query/query.findOrCreateEach.transform.js +++ /dev/null @@ -1,83 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { - - describe('.findOrCreateEach()', function() { - - describe('with transformed values', function() { - var Model; - - before(function() { - - // Extend for testing purposes - Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - - attributes: { - name: { - type: 'string', - columnName: 'login' - } - } - }); - }); - - it('should transform values before sending to adapter', function(done) { - - var waterline = new Waterline(); - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - findOrCreateEach: function(con, col, valuesList, cb) { - assert(valuesList[0].login); - return cb(null, valuesList); - } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.findOrCreateEach([{ where: { name: 'foo' }}], [{ name: 'foo' }], done); - }); - }); - - it('should transform values after receiving from adapter', function(done) { - - var waterline = new Waterline(); - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - findOrCreateEach: function(con, col, valuesList, cb) { - assert(valuesList[0].login); - return cb(null, valuesList); - } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.findOrCreateEach([{}], [{ name: 'foo' }], function(err, values) { - assert(values[0].name); - assert(!values[0].login); - done(); - }); - }); - }); - }); - - }); -}); diff --git a/test/unit/query/query.groupBy.js b/test/unit/query/query.groupBy.js deleted file mode 100644 index 89377b177..000000000 --- a/test/unit/query/query.groupBy.js +++ /dev/null @@ -1,68 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection groupBy', function () { - - describe('.groupBy()', function () { - var query; - - before(function (done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - age: 'integer', - percent: 'float' - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function (con, col, criteria, cb) { - return cb(null, [criteria]); - } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if (err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - it('should return criteria with group sets', function (done) { - query.find() - .groupBy('age', 'percent') - .exec(function (err, obj) { - if(err) return done(err); - - assert(obj[0].groupBy[0] === 'age'); - assert(obj[0].groupBy[1] === 'percent'); - done(); - }); - }); - - it('should accept an array', function (done) { - query.find() - .groupBy(['age', 'percent']) - .exec(function (err, obj) { - if(err) return done(err); - - assert(obj[0].groupBy[0] === 'age'); - assert(obj[0].groupBy[1] === 'percent'); - done(); - }); - }); - - }); -}); diff --git a/test/unit/query/query.max.js b/test/unit/query/query.max.js deleted file mode 100644 index 55db2cfa6..000000000 --- a/test/unit/query/query.max.js +++ /dev/null @@ -1,68 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection sum', function () { - - describe('.min()', function () { - var query; - - before(function (done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - age: 'integer', - percent: 'float' - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function (con, col, criteria, cb) { - return cb(null, [criteria]); - } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if (err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - it('should return criteria with sum set', function (done) { - query.find() - .sum('age', 'percent') - .exec(function (err, obj) { - if (err) return done(err); - - assert(obj[0].sum[0] === 'age'); - assert(obj[0].sum[1] === 'percent'); - done(); - }); - }); - - it('should accept an array', function (done) { - query.find() - .sum(['age', 'percent']) - .exec(function (err, obj) { - if (err) return done(err); - - assert(obj[0].sum[0] === 'age'); - assert(obj[0].sum[1] === 'percent'); - done(); - }); - }); - - }); -}); diff --git a/test/unit/query/query.min.js b/test/unit/query/query.min.js deleted file mode 100644 index ac306a70d..000000000 --- a/test/unit/query/query.min.js +++ /dev/null @@ -1,68 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection sum', function () { - - describe('.max()', function () { - var query; - - before(function (done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - age: 'integer', - percent: 'float' - } - }); - - waterline.loadCollection(Model); - - // Fixture Adapter Def - var adapterDef = { - find: function (con, col, criteria, cb) { - return cb(null, [criteria]); - } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if (err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - it('should return criteria with sum set', function (done) { - query.find() - .min('age', 'percent') - .exec(function (err, obj) { - if (err) return done(err); - - assert(obj[0].min[0] === 'age'); - assert(obj[0].min[1] === 'percent'); - done(); - }); - }); - - it('should accept an array', function (done) { - query.find() - .min(['age', 'percent']) - .exec(function (err, obj) { - if (err) return done(err); - - assert(obj[0].min[0] === 'age'); - assert(obj[0].min[1] === 'percent'); - done(); - }); - }); - - }); -}); diff --git a/test/unit/query/query.promises.js b/test/unit/query/query.promises.js index a00536686..78bb827dc 100644 --- a/test/unit/query/query.promises.js +++ b/test/unit/query/query.promises.js @@ -1,32 +1,33 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); -describe('Collection Promise', function () { - - describe('.then()', function () { +describe('Collection Promise ::', function() { + describe('.then()', function() { var query; - before(function (done) { - + before(function(done) { var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, name: { type: 'string', defaultsTo: 'Foo Bar' - }, - doSomething: function () {} + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def var adapterDef = { - find: function (con, col, criteria, cb) { - return cb(null, [criteria]); + find: function(con, query, cb) { + return cb(undefined, [{id: 1, criteria: query.criteria}]); } }; @@ -36,42 +37,35 @@ describe('Collection Promise', function () { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if (err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + + query = orm.collections.user; + return done(); }); }); - it('should return a promise object', function (done) { - var promise = query.find({}).then(function (obj) { + it('should return a promise object', function(done) { + query.find({}).then(function(obj) { assert(obj); return 'test'; - }).then(function (test) { - assert(test === 'test'); - done(); - }).catch(function (err) { - done(err); + }).then(function(test) { + assert.equal(test, 'test'); + return done(); + }).catch(function(err) { + return done(err); }); }); - it('should reject the promise if the then handler fails', function (done) { - var promise = query.find({}).then(function (obj) { + it('should reject the promise if the then handler fails', function(done) { + query.find({}).then(function() { throw new Error("Error in promise handler"); - }).then(function (unexpected) { - done(new Error("Unexpected success")); - }).catch(function (expected) { - done(); - }); - }); - - it('should reject the promise if the spread handler fails', function (done) { - var promise = query.find({}).spread(function (obj) { - throw new Error("Error in promise handler"); - }).then(function (unexpected) { - done(new Error("Unexpected success")); - }).catch(function (expected) { - done(); + }).then(function() { + return done(new Error('Unexpected success')); + }).catch(function() { + return done(); }); }); @@ -79,16 +73,16 @@ describe('Collection Promise', function () { var promise = query.find({}); var prevResult; promise - .then(function(result){ - prevResult = result; - return promise; - }).then(function(result){ - assert.strictEqual(result, prevResult, "Previous and current result should be equal"); - done(); - }) - .catch(function(err){ - done(err); - }); + .then(function(result) { + prevResult = result; + return promise; + }).then(function(result) { + assert.strictEqual(result, prevResult, 'Previous and current result should be equal'); + done(); + }) + .catch(function(err) { + done(err); + }); }); }); }); diff --git a/test/unit/query/query.stream.js b/test/unit/query/query.stream.js index 170137d99..ccbe26904 100644 --- a/test/unit/query/query.stream.js +++ b/test/unit/query/query.stream.js @@ -1,30 +1,43 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.stream()', function() { var query; - before(function(done) { + var records = []; + for (var i = 1; i <= 100; i++) { + records.push({ + id: i, + name: 'user_' + i + }); + } + before(function(done) { var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, name: { type: 'string', defaultsTo: 'Foo Bar' - }, - doSomething: function() {} + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = {}; + var adapterDef = { + find: function(datastore, query, cb) { + return cb(undefined, records.slice(query.criteria.skip, query.criteria.skip + query.criteria.limit)); + } + }; var connections = { 'foo': { @@ -32,23 +45,58 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); - it('should implement a streaming interface', function(done) { + it('should allow streaming a single record at a time', function(done) { + + var sum = 0; + var stream = query.stream({}).eachRecord(function(rec, next) { + sum += rec.id; + return next(); + }).exec(function(err) { + if (err) {return done(err);} + try { + assert.equal(sum, 5050); + } catch (e) {return done(e);} + return done(); + }); + }); - var stream = query.stream({}); + it('should allow streaming a batch of records at a time', function(done) { - // Just test for error now - stream.on('error', function(err) { - assert(err); - done(); + var batch = 0; + var stream = query.stream({}).eachBatch(function(recs, next) { + batch += recs.length; + return next(); + }).exec(function(err) { + if (err) {return done(err);} + try { + assert.equal(batch, 100); + } catch (e) {return done(e);} + return done(); }); + }); + + it('should work correctly with `.skip()` and `.limit()`', function(done) { + var sum = 0; + var stream = query.stream({}).skip(10).limit(50).eachRecord(function(rec, next) { + sum += rec.id; + return next(); + }).exec(function(err) { + if (err) {return done(err);} + try { + assert.equal(sum, 1775); + } catch (e) {return done(e);} + return done(); + }); }); }); diff --git a/test/unit/query/query.sum.js b/test/unit/query/query.sum.js index e90339508..e255ea17b 100644 --- a/test/unit/query/query.sum.js +++ b/test/unit/query/query.sum.js @@ -1,29 +1,36 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); -describe('Collection sum', function () { - - describe('.sum()', function () { +describe('Collection Query ::', function() { + describe('.sum()', function() { var query; - before(function (done) { - + before(function(done) { var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { - age: 'integer', - percent: 'float' + id: { + type: 'number' + }, + age: { + type: 'number' + }, + percent: { + type: 'number' + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def var adapterDef = { - find: function (con, col, criteria, cb) { - return cb(null, [criteria]); + sum: function(con, query, cb) { + return cb(undefined, [query]); } }; @@ -33,36 +40,34 @@ describe('Collection sum', function () { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if (err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); - it('should return criteria with sum set', function (done) { - query.find() - .sum('age', 'percent') - .exec(function (err, obj) { - if (err) return done(err); + it('should return criteria with sum set', function(done) { + query.sum('age') + .exec(function(err, obj) { + if (err) { + return done(err); + } - assert(obj[0].sum[0] === 'age'); - assert(obj[0].sum[1] === 'percent'); - done(); + assert.equal(_.first(obj).method, 'sum'); + assert.equal(_.first(obj).numericAttrName, 'age'); + return done(); }); }); - it('should accept an array', function (done) { - query.find() - .sum(['age', 'percent']) - .exec(function (err, obj) { - if (err) return done(err); - - assert(obj[0].sum[0] === 'age'); - assert(obj[0].sum[1] === 'percent'); - done(); + it('should NOT accept an array', function(done) { + query.sum(['age', 'percent']) + .exec(function(err) { + assert(err); + return done(); }); }); - }); }); diff --git a/test/unit/query/query.update.js b/test/unit/query/query.update.js index de03e2c82..06db5a040 100644 --- a/test/unit/query/query.update.js +++ b/test/unit/query/query.update.js @@ -1,36 +1,39 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.update()', function() { - describe('with proper values', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { + id: { + type: 'number' + }, name: { type: 'string', defaultsTo: 'Foo Bar' }, age: { - type: 'integer', - required: true + type: 'number' }, - doSomething: function() {} + updatedAt: { + type: 'number', + autoUpdatedAt: true + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { update: function(con, col, criteria, values, cb) { return cb(null, [values]); }}; + var adapterDef = { update: function(con, query, cb) { query.valuesToSet.id = 1; return cb(null, [query.valuesToSet]); }}; var connections = { 'foo': { @@ -38,73 +41,93 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + + query = orm.collections.user; + return done(); }); }); it('should change the updatedAt timestamp', function(done) { query.update({}, { name: 'foo' }, function(err, status) { + if (err) { + return done(err); + } + assert(status[0].updatedAt); - done(); - }); + return done(); + }, { fetch: true }); }); it('should set values', function(done) { query.update({}, { name: 'foo' }, function(err, status) { - assert(status[0].name === 'foo'); - done(); - }); + if (err) { + return done(err); + } + + assert.equal(status[0].name, 'foo'); + return done(); + }, { fetch: true }); }); it('should strip values that don\'t belong to the schema', function(done) { query.update({}, { foo: 'bar' }, function(err, values) { - assert(!values.foo); - done(); - }); - }); + if (err) { + return done(err); + } - it('should return an instance of Model', function(done) { - query.update({}, { name: 'foo' }, function(err, status) { - assert(typeof status[0].doSomething === 'function'); - done(); - }); + assert(!values.foo); + return done(); + }, { fetch: true }); }); it('should allow a query to be built using deferreds', function(done) { query.update() .where({}) .set({ name: 'foo' }) + .meta({ + fetch: true + }) .exec(function(err, results) { - assert(!err); - assert(results[0].name === 'foo'); - done(); + if (err) { + return done(err); + } + + assert.equal(results[0].name, 'foo'); + return done(); }); }); - }); describe('casting values', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', + datastore: 'foo', + primaryKey: 'id', attributes: { - name: 'string', - age: 'integer' + id: { + type: 'number' + }, + name: { + type: 'string' + }, + age: { + type: 'number' + } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { update: function(con, col, criteria, values, cb) { return cb(null, [values]); }}; + var adapterDef = { update: function(con, query, cb) { query.valuesToSet.id = 1; return cb(null, [query.valuesToSet]); }}; var connections = { 'foo': { @@ -112,19 +135,25 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + query = orm.collections.user; + return done(); }); }); it('should cast values before sending to adapter', function(done) { query.update({}, { name: 'foo', age: '27' }, function(err, values) { - assert(values[0].name === 'foo'); - assert(values[0].age === 27); - done(); - }); + if (err) { + return done(err); + } + + assert.equal(values[0].name, 'foo'); + assert.equal(values[0].age, 27); + return done(); + }, { fetch: true }); }); }); @@ -132,30 +161,27 @@ describe('Collection Query', function() { var query; before(function(done) { - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ + var Model = Waterline.Model.extend({ identity: 'user', - connection: 'foo', - autoPK: false, + datastore: 'foo', + primaryKey: 'myPk', attributes: { name: { type: 'string', defaultsTo: 'Foo Bar' }, myPk: { - type: 'integer', - primaryKey: true, - columnName: 'pkColumn', - defaultsTo: 1 + type: 'number', + columnName: 'pkColumn' } } }); - waterline.loadCollection(Model); + waterline.registerModel(Model); // Fixture Adapter Def - var adapterDef = { update: function(con, col, criteria, values, cb) { return cb(null, [criteria]); }}; + var adapterDef = { update: function(con, query, cb) { return cb(null, [{myPk: 1, criteria: query.criteria}]); }}; var connections = { 'foo': { @@ -163,22 +189,26 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) done(err); - query = colls.collections.user; - done(); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + + query = orm.collections.user; + return done(); }); }); - it('should use the custom primary key when a single value is passed in', function(done) { query.update(1, { name: 'foo' }, function(err, values) { - assert(!err); - assert(values[0].where.pkColumn === 1); - done(); - }); + if (err) { + return done(err); + } + + assert.equal(values[0].criteria.where.pkColumn, 1); + return done(); + }, { fetch: true }); }); }); - }); }); diff --git a/test/unit/query/query.update.nested.js b/test/unit/query/query.update.nested.js deleted file mode 100644 index fd30059fb..000000000 --- a/test/unit/query/query.update.nested.js +++ /dev/null @@ -1,333 +0,0 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { - - describe('.update()', function() { - - describe('with nested model values', function() { - var query; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: { - type: 'string', - defaultsTo: 'Foo Bar' - }, - nestedModel: { - model: 'nested' - } - } - }); - var Nested = Waterline.Collection.extend({ - identity: 'nested', - connection: 'foo', - attributes: { - name: 'string' - } - }); - - waterline.loadCollection(Model); - waterline.loadCollection(Nested); - - // Fixture Adapter Def - var _id = 1; - var findValues = []; - - var adapterDef = { - update: function(con, col, criteria, values, cb) { - values.id = _id; - findValues.push(values); - _id++; - return cb(null, values); - }, - find: function(con, col, criteria, cb) { - cb(null, findValues[_id - 1]); - } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - it('should reduce the nested object down to a foreign key', function(done) { - query.update({}, { name: 'foo', nestedModel: { id: 1337, name: 'joe' }}, function(err, status) { - assert(!err, err); - assert(status[0].nestedModel); - assert(status[0].nestedModel === 1337); - done(); - }); - }); - }); - - describe('with nested model values (create)', function() { - var query; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: { - type: 'string', - defaultsTo: 'Foo Bar' - }, - nestedModel: { - model: 'nested' - }, - nestedModel2: { - model: 'nested' - } - } - }); - var Nested = Waterline.Collection.extend({ - identity: 'nested', - connection: 'foo', - attributes: { - name: 'string' - } - }); - - waterline.loadCollection(Model); - waterline.loadCollection(Nested); - - // Fixture Adapter Def - var _id = 1; - var findValues = []; - - var adapterDef = { - create: function(con, col, values, cb) { - values.id = _id; - findValues.push(values); - _id++; - return cb(null, values); - }, - update: function(con, col, criteria, values, cb) { - values.id = _id; - findValues.push(values); - _id++; - return cb(null, values); - }, - find: function(con, col, criteria, cb) { - cb(null, findValues[_id - 1]); - } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - it('should reduce the newly created nested object down to two foreign keys', function(done) { - query.update({}, { name: 'foo', nestedModel: { name: 'joe' }, nestedModel2: { name: 'jane' } }, function(err, status) { - assert(!err, err); - assert(status[0].nestedModel); - assert(status[0].nestedModel === 1); - assert(status[0].nestedModel2); - assert(status[0].nestedModel2 === 2); - done(); - }); - }); - }); - - describe('with nested model values (create, asynchronous adapter)', function() { - var query; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: { - type: 'string', - defaultsTo: 'Foo Bar' - }, - nestedModel: { - model: 'nested' - }, - nestedModel2: { - model: 'nested' - } - } - }); - var Nested = Waterline.Collection.extend({ - identity: 'nested', - connection: 'foo', - attributes: { - name: 'string' - } - }); - - waterline.loadCollection(Model); - waterline.loadCollection(Nested); - - // Fixture Adapter Def - var _id = 1; - var findValues = []; - - var adapterDef = { - create: function(con, col, values, cb) { - process.nextTick(function() { - values.id = _id; - findValues.push(values); - _id++; - return cb(null, values); - }); - }, - update: function(con, col, criteria, values, cb) { - process.nextTick(function() { - values.id = _id; - findValues.push(values); - _id++; - return cb(null, values); - }); - }, - find: function(con, col, criteria, cb) { - process.nextTick(function() { - cb(null, findValues[_id - 1]); - }); - } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - it('should call back only once and reduce the newly created nested object down to two foreign keys', function(done) { - var count = 0; - query.update({}, { name: 'foo', nestedModel: { name: 'joe' }, nestedModel2: { name: 'jane' } }, function(err, status) { - assert(++count === 1); - assert(!err, err); - assert(status[0].nestedModel); - assert(status[0].nestedModel2); - assert.deepEqual([status[0].nestedModel, status[0].nestedModel2].sort(), [1, 2]); - done(); - }); - }); - }); - - describe('with nested collection values', function() { - var query, updatedModels = []; - - before(function(done) { - - var waterline = new Waterline(); - var Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - attributes: { - name: { - type: 'string', - defaultsTo: 'Foo Bar' - }, - nestedModels: { - collection: 'nested', - via: 'model' - } - } - }); - var Nested = Waterline.Collection.extend({ - identity: 'nested', - connection: 'foo', - attributes: { - name: 'string', - model: { - model: 'user' - } - } - }); - - waterline.loadCollection(Model); - waterline.loadCollection(Nested); - - // Fixture Adapter Def - var _id = 1; - var findValues = []; - - var adapterDef = { - update: function(con, col, criteria, values, cb) { - updatedModels.push(criteria.where); - values.id = _id; - findValues.push(values); - _id++; - return cb(null, [values]); - }, - - find: function(con, col, criteria, cb) { - cb(null, findValues[_id - 1]); - } - }; - - var connections = { - 'foo': { - adapter: 'foobar' - } - }; - - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - query = colls.collections.user; - done(); - }); - }); - - - // - // TO-DO: - // Make this not use a shit load of queries. (currently 10)! - // - - it('should attempt to update each nested model', function(done) { - - var nestedModels = [ - { id: 1337, name: 'joe', model: 2 }, - { id: 1338, name: 'moe', model: 3 }, - { id: 1339, name: 'flow', model: 4 } - ]; - - query.update({}, { id: 5, name: 'foo', nestedModels: nestedModels }, function(err, status) { - assert(!err, err); - assert(status[0].nestedModels.length === 0); - assert(updatedModels.length === 10); - done(); - }); - }); - }); - - }); -}); diff --git a/test/unit/query/query.update.transform.js b/test/unit/query/query.update.transform.js index 1cb1e83cf..f10835c56 100644 --- a/test/unit/query/query.update.transform.js +++ b/test/unit/query/query.update.transform.js @@ -1,39 +1,35 @@ -var Waterline = require('../../../lib/waterline'), - assert = require('assert'); - -describe('Collection Query', function() { +var assert = require('assert'); +var _ = require('@sailshq/lodash'); +var Waterline = require('../../../lib/waterline'); +describe('Collection Query ::', function() { describe('.update()', function() { - describe('with transformed values', function() { - var Model; - - before(function() { - - // Extend for testing purposes - Model = Waterline.Collection.extend({ - identity: 'user', - connection: 'foo', - - attributes: { - name: { - type: 'string', - columnName: 'login' - } + var modelDef = { + identity: 'user', + datastore: 'foo', + primaryKey: 'id', + attributes: { + id: { + type: 'number' + }, + name: { + type: 'string', + columnName: 'login' } - }); - }); + } + }; - it('should transform criteria before sending to adapter', function(done) { + it('should transform criteria before sending to adapter', function(done) { var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - update: function(con, col, criteria, values, cb) { - assert(criteria.where.login); - return cb(null, [values]); + update: function(con, query, cb) { + assert(query.criteria.where.login); + return cb(undefined); } }; @@ -43,22 +39,23 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.update({ where: { name: 'foo' }}, { name: 'foo' }, done); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.update({ where: { name: 'foo' }}, { name: 'foo' }, done); }); }); it('should transform values before sending to adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - update: function(con, col, criteria, values, cb) { - assert(values.login); - return cb(null, [values]); + update: function(con, query, cb) { + assert(query.valuesToSet.login); + return cb(undefined); } }; @@ -68,22 +65,24 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.update({ where: { name: 'foo' }}, { name: 'foo' }, done); + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + orm.collections.user.update({ where: { name: 'foo' }}, { name: 'foo' }, done); }); }); it('should transform values after receiving from adapter', function(done) { - var waterline = new Waterline(); - waterline.loadCollection(Model); + waterline.registerModel(Waterline.Model.extend(_.extend({}, modelDef))); // Fixture Adapter Def var adapterDef = { - update: function(con, col, criteria, values, cb) { - assert(values.login); - return cb(null, [values]); + update: function(con, query, cb) { + assert(query.valuesToSet.login); + query.valuesToSet.id = 1; + return cb(undefined, [query.valuesToSet]); } }; @@ -93,16 +92,22 @@ describe('Collection Query', function() { } }; - waterline.initialize({ adapters: { foobar: adapterDef }, connections: connections }, function(err, colls) { - if(err) return done(err); - colls.collections.user.update({}, { name: 'foo' }, function(err, values) { + waterline.initialize({ adapters: { foobar: adapterDef }, datastores: connections }, function(err, orm) { + if (err) { + return done(err); + } + + orm.collections.user.update({}, { name: 'foo' }, function(err, values) { + if (err) { + return done(err); + } + assert(values[0].name); assert(!values[0].login); - done(); - }); + return done(); + }, { fetch: true }); }); }); }); - }); }); diff --git a/test/unit/utils/reduceAssociations.js b/test/unit/utils/reduceAssociations.js deleted file mode 100644 index 7fa484dc8..000000000 --- a/test/unit/utils/reduceAssociations.js +++ /dev/null @@ -1,34 +0,0 @@ -var util = require('util'); -var assert = require('assert'); -var reduceAssociations = require('../../../lib/waterline/utils/nestedOperations/reduceAssociations'); - - -describe('nestedOperations / reduceAsscociations', function () { - - // Identity of model - var model = 'foo'; - - // Our schema (all the models) represented as an object - var schema = { - foo: { - attributes: { - // put nothing in here - } - } - }; - - // Values (an object of properties passed in by a user for a create or update) - var values = { - name: 'Rob Weasley', - age: 45, - email: 'rob@hogwarts.edu' - }; - - it('should not throw when the values reference non-existent attributes', function () { - - assert.doesNotThrow(function () { - var result = reduceAssociations(model, schema, values); - }, util.format('`utils/nestedOperations/reduceAssociations.js` should not throw when `values` specifies an attribute which doesn\'t exist in schema')); - - }); -}); diff --git a/test/unit/utils/utils.acyclicTraversal.js b/test/unit/utils/utils.acyclicTraversal.js deleted file mode 100644 index bd1032f9b..000000000 --- a/test/unit/utils/utils.acyclicTraversal.js +++ /dev/null @@ -1,57 +0,0 @@ -var assert = require('assert'), - traverse = require('../../../lib/waterline/utils/acyclicTraversal'); - -describe('utils/acyclicTraversal', function() { - - describe('schema', function() { - - var schema = { - user: { - attributes: { - name: 'string', - age: 'integer', - pets: { - collection: 'pet', - via: 'owner' - }, - formerPets: { - collection: 'user', - via: 'formerOwners' - } - } - }, - pet: { - attributes: { - name: 'string', - breed: 'string', - owner: { - model: 'user' - }, - formerOwners: { - collection: 'user', - via: 'formerPets' - } - } - } - }; - - it('should return a .populate() plan', function() { - var plan = traverse(schema, 'user', 'pets'); - assert(typeof plan === 'object'); - }); - - it('should include distinct associations (i.e. `formerOwners`)', function () { - var plan = traverse(schema, 'user', 'pets'); - assert(typeof plan.formerOwners === 'object'); - }); - it('should NOT include already-traversed back-references (i.e. `owner`)', function () { - var plan = traverse(schema, 'user', 'pets'); - assert(typeof plan.owner === 'undefined'); - }); - it('should NOT include already-traversed associations (i.e. `pets`)', function () { - var plan = traverse(schema, 'user', 'pets'); - assert(typeof plan.formerOwners.pets === 'undefined'); - }); - }); - -}); diff --git a/test/unit/utils/utils.normalize.js b/test/unit/utils/utils.normalize.js deleted file mode 100644 index 01f26e99c..000000000 --- a/test/unit/utils/utils.normalize.js +++ /dev/null @@ -1,66 +0,0 @@ -var assert = require('assert'), - normalize = require('../../../lib/waterline/utils/normalize'); - -describe("Normalize utility", function() { - - describe(".criteria()", function() { - - describe("sort", function() { - it("should default to asc", function() { - var criteria = normalize.criteria({ sort: "name" }); - - assert(criteria.sort.name === 1); - }); - - it("should throw error on invalid order", function() { - var error; - - try { - normalize.criteria({ sort: "name up" }); - } catch(e) { - error = e; - } - - assert(typeof error !== 'undefined'); - }); - - it("should properly normalize valid sort", function() { - var criteria = normalize.criteria({ sort: "name desc" }); - - assert(criteria.sort.name === -1); - }); - - it("should properly normalize valid sort with upper case", function() { - var criteria = normalize.criteria({ sort: "name DESC" }); - - assert(criteria.sort.name === -1); - }); - }); - - describe("sort object", function() { - it("should throw error on invalid order", function() { - var error; - - try { - normalize.criteria({ sort: { name: "up" } }); - } catch(e) { - error = e; - } - - assert(typeof error !== 'undefined'); - }); - - it("should properly normalize valid sort", function() { - var criteria = normalize.criteria({ sort: { name: "asc" } }); - assert(criteria.sort.name === 1); - }); - - it("should properly normalize valid sort with upper case", function() { - var criteria = normalize.criteria({ sort: { name: "DESC" } }); - assert(criteria.sort.name === -1); - }); - }); - - }); - -}); \ No newline at end of file diff --git a/test/unit/utils/utils.schema.js b/test/unit/utils/utils.schema.js deleted file mode 100644 index b7ac949f8..000000000 --- a/test/unit/utils/utils.schema.js +++ /dev/null @@ -1,223 +0,0 @@ -var utils = require('../../../lib/waterline/utils/schema'), - assert = require('assert'); - -describe('Schema utilities', function() { - - describe('`normalizeAttributes`', function() { - - describe('with shorthand attributes', function() { - var attributes; - - before(function() { - attributes = { - first_name: 'STRING', - last_name: 'STRING' - }; - - attributes = utils.normalizeAttributes(attributes); - }); - - it('should normalize attributes to objects', function() { - assert(typeof attributes.first_name === 'object'); - assert(typeof attributes.last_name === 'object'); - }); - - it('should lowercase attribute types', function() { - assert(attributes.first_name.type === 'string'); - assert(attributes.last_name.type === 'string'); - }); - }); - - describe('with object attributes', function() { - var attributes; - - before(function() { - attributes = { - first_name: { - type: 'STRING', - required: true - }, - last_name: { - type: 'STRING', - required: false - } - }; - - attributes = utils.normalizeAttributes(attributes); - }); - - it('should normalize attributes to objects', function() { - assert(typeof attributes.first_name === 'object'); - assert(typeof attributes.last_name === 'object'); - }); - - it('should retain other properties', function() { - assert(typeof attributes.first_name.required !== 'undefined'); - assert(typeof attributes.last_name.required !== 'undefined'); - }); - - it('should lowercase attribute types', function() { - assert(attributes.first_name.type === 'string'); - assert(attributes.last_name.type === 'string'); - }); - }); - }); - - describe('`instanceMethods`', function() { - var methods; - - before(function() { - var attributes = { - first_name: 'STRING', - last_name: 'string', - age: function() { - return Math.floor(Math.random() + 1 * 10); - }, - full_name: function() { - return this.first_name + ' ' + this.last_name; - } - }; - - methods = utils.instanceMethods(attributes); - }); - - it('should return instance methods from attributes', function() { - assert(typeof methods.age === 'function'); - assert(typeof methods.full_name === 'function'); - }); - }); - - describe('`normalizeCallbacks`', function() { - - describe('with callbacks as function', function() { - var callbacks; - - before(function() { - var model = { - attributes: { - first_name: 'STRING', - last_name: 'string' - }, - afterCreate: function() {}, - beforeCreate: function() {} - }; - - callbacks = utils.normalizeCallbacks(model); - }); - - it('should normalize to callback array', function() { - assert(Array.isArray(callbacks.afterCreate)); - assert(Array.isArray(callbacks.beforeCreate)); - }); - }); - - describe('with callbacks as array of functions', function() { - var callbacks; - - before(function() { - var model = { - attributes: { - first_name: 'STRING', - last_name: 'string' - }, - afterCreate: [ - function() {} - ], - beforeCreate: [ - function() {}, - function() {} - ] - }; - - callbacks = utils.normalizeCallbacks(model); - }); - - it('should normalize to callback array', function() { - assert(Array.isArray(callbacks.afterCreate)); - assert(Array.isArray(callbacks.beforeCreate)); - }); - - it('should retain all callback functions', function() { - assert(callbacks.afterCreate.length === 1); - assert(callbacks.beforeCreate.length === 2); - }); - }); - - describe('with callbacks as strings', function() { - var fn_1, fn_2, callbacks; - - before(function() { - var model; - - fn_1 = function() { - this.age = this.age || this.age++; - }; - - fn_2 = function() { - this.first_name = this.first_name.toLowerCase(); - }; - - model = { - attributes: { - first_name: 'STRING', - last_name: 'string', - increment_age: fn_1, - lowerize_first_name: fn_2 - }, - afterCreate: 'lowerize_first_name', - beforeCreate: 'increment_age' - }; - - callbacks = utils.normalizeCallbacks(model); - }); - - it('should normalize to callback array', function() { - assert(Array.isArray(callbacks.afterCreate)); - assert(Array.isArray(callbacks.beforeCreate)); - }); - - it('should map all callback functions', function() { - assert(callbacks.afterCreate[0] === fn_2); - assert(callbacks.beforeCreate[0] === fn_1); - }); - }); - - describe('with callbacks as an array of strings', function() { - var fn_1, fn_2, callbacks; - - before(function() { - var model; - - fn_1 = function() { - this.age = this.age || this.age++; - }; - - fn_2 = function() { - this.first_name = this.first_name.toLowerCase(); - }; - - model = { - attributes: { - first_name: 'STRING', - last_name: 'string', - increment_age: fn_1, - lowerize_first_name: fn_2 - }, - afterCreate: ['increment_age', 'lowerize_first_name'] - }; - - callbacks = utils.normalizeCallbacks(model); - }); - - it('should normalize to callback array', function() { - assert(Array.isArray(callbacks.afterCreate)); - }); - - it('should map all callback functions', function() { - assert(callbacks.afterCreate[0] === fn_1); - assert(callbacks.afterCreate[1] === fn_2); - }); - }); - }); - -}); diff --git a/test/unit/validations/validation.enum.js b/test/unit/validations/validation.enum.js deleted file mode 100644 index ce8060e5c..000000000 --- a/test/unit/validations/validation.enum.js +++ /dev/null @@ -1,41 +0,0 @@ -var Validator = require('../../../lib/waterline/core/validations'), - assert = require('assert'); - -describe('validations', function() { - - describe('enum', function() { - var validator; - - before(function() { - - var validations = { - sex: { - type: 'string', - in: ['male', 'female'] - } - }; - - validator = new Validator(); - validator.initialize(validations); - }); - - it('should error if invalid enum is set', function(done) { - validator.validate({ sex: 'other' }, function(err, errors) { - assert(!err, err); - assert(errors); - assert(errors.sex); - assert(errors.sex[0].rule === 'in'); - done(); - }); - }); - - it('should NOT error if valid enum is set', function(done) { - validator.validate({ sex: 'male' }, function(err, errors) { - assert(!err, err); - assert(!errors); - done(); - }); - }); - }); - -}); diff --git a/test/unit/validations/validations.function.js b/test/unit/validations/validations.function.js deleted file mode 100644 index 051022255..000000000 --- a/test/unit/validations/validations.function.js +++ /dev/null @@ -1,72 +0,0 @@ -var Validator = require('../../../lib/waterline/core/validations'), - assert = require('assert'); - -describe('validations', function() { - - describe('with a function as the rule value', function() { - var validator; - - before(function() { - - var validations = { - name: { - type: 'string', - }, - username: { - type: 'string', - equals: function() { - return this.name.toLowerCase(); - } - }, - website: { - type: 'string', - contains: function(cb) { - setTimeout(function() { - return cb('http://'); - },1); - } - } - }; - - validator = new Validator(); - validator.initialize(validations); - }); - - it('should error if invalid username is set', function(done) { - validator.validate({ name: 'Bob', username: 'bobby' }, function(err, errors) { - assert(!err); - assert(errors); - assert(errors.username); - assert(errors.username[0].rule === 'equals'); - done(); - }); - }); - - it('should NOT error if valid username is set', function(done) { - validator.validate({ name: 'Bob', username: 'bob' }, function(err, errors) { - assert(!err, err); - assert(!errors); - done(); - }); - }); - - it('should error if invalid website is set', function(done) { - validator.validate({ website: 'www.google.com' }, function(err, errors) { - assert(!err, err); - assert(errors); - assert(errors.website); - assert(errors.website[0].rule === 'contains'); - done(); - }); - }); - - it('should NOT error if valid website is set', function(done) { - validator.validate({ website: 'http://www.google.com' }, function(err, errors) { - assert(!err, err); - assert(!errors); - done(); - }); - }); - }); - -}); diff --git a/test/unit/validations/validations.ignoreProperties.js b/test/unit/validations/validations.ignoreProperties.js deleted file mode 100644 index 8cd2ce98d..000000000 --- a/test/unit/validations/validations.ignoreProperties.js +++ /dev/null @@ -1,94 +0,0 @@ -var Validator = require('../../../lib/waterline/core/validations'), - assert = require('assert'); - -describe('validations', function() { - - describe('special types', function() { - var validator; - - before(function() { - - var validations = { - name: { - type: 'string' - }, - email: { - type: 'email', - special: true - }, - cousins: { - collection: 'related', - via: 'property', - async: true - } - }; - - var defaults = { - ignoreProperties: ['async', 'special'] - }; - - validator = new Validator(); - validator.initialize(validations); - - customValidator = new Validator(); - customValidator.initialize(validations, {}, defaults); - }); - - it('custom validator should validate email type', function(done) { - customValidator.validate({ - email: 'foobar@gmail.com' - }, function(err, errors) { - if (err) { - return done(err); - } - assert(!errors); - done(); - }); - }); - - it('custom validator should validate collection type', function(done) { - customValidator.validate({ - cousins: [] - }, function(err, errors) { - if (err) { - return done(err); - } - assert(!errors); - done(); - }); - }); - - it('standard validator should error with unrecognized properties', function(done) { - validator.validate({ - email: 'foobar@gmail.com' - }, function(err, errors) { - if (err) { - if ((err instanceof Error) && /Unknown rule: special/im.test(err)) { - return done(); - } - else { - return done(err); - } - } - return done(new Error('Expected fatal error due to unknown "special" validation rule.')); - }); - });// - - it('standard validator should error with unrecognized properties in an association', function(done) { - validator.validate({ - cousins: [] - }, function(err, errors) { - if (err) { - if ((err instanceof Error) && /Unknown rule: async/im.test(err)) { - return done(); - } - else { - return done(err); - } - } - return done(new Error('Expected fatal error due to unknown "async" validation rule.')); - }); - });// - - }); -}); diff --git a/test/unit/validations/validations.length.js b/test/unit/validations/validations.length.js deleted file mode 100644 index e9297779e..000000000 --- a/test/unit/validations/validations.length.js +++ /dev/null @@ -1,79 +0,0 @@ -var Validator = require('../../../lib/waterline/core/validations'), - assert = require('assert'); - -describe('validations', function() { - - describe('lengths', function() { - var validator; - - before(function() { - - var validations = { - firstName: { - type: 'string', - minLength: 2 - }, - lastName: { - type: 'string', - maxLength: 5 - } - }; - - validator = new Validator(); - validator.initialize(validations); - }); - - describe('minLength', function() { - - it('should validate minLength', function (done) { - validator.validate({ firstName: 'foo' }, function (err, validationErrors) { - if (err) { return done(err); } - try { - assert(!validationErrors); - return done(); - } - catch (e) {return done(e);} - }); - }); - - it('should error if length is shorter', function(done) { - validator.validate({ firstName: 'f' }, function (err, validationErrors) { - if (err) { return done(err); } - try { - assert(validationErrors); - assert(validationErrors.firstName); - return done(); - } - catch (e) {return done(e);} - }); - }); - }); - - describe('maxLength', function() { - - it('should validate maxLength', function(done) { - validator.validate({ lastName: 'foo' }, function (err, validationErrors) { - if (err) { return done(err); } - try { - assert(!validationErrors); - return done(); - } - catch (e) {return done(e);} - }); - }); - - it('should error if length is longer', function(done) { - validator.validate({ lastName: 'foobar' }, function (err, validationErrors) { - if (err) { return done(err); } - try { - assert(validationErrors); - assert(validationErrors.lastName); - return done(); - } - catch (e) {return done(e);} - }); - }); - }); - - }); -}); diff --git a/test/unit/validations/validations.required.js b/test/unit/validations/validations.required.js deleted file mode 100644 index f0a3cb17e..000000000 --- a/test/unit/validations/validations.required.js +++ /dev/null @@ -1,99 +0,0 @@ -var Validator = require('../../../lib/waterline/core/validations'), - assert = require('assert'); - -describe('validations', function() { - - describe('required', function() { - var validator; - - before(function() { - - var validations = { - name: { - type: 'string', - required: true - }, - employed: { - type: 'boolean', - required: true - }, - age: { type: 'integer' }, - email: { - type: 'email', - required: false - } - }; - - validator = new Validator(); - validator.initialize(validations); - }); - - it('should error if no value is set for required string field', function(done) { - validator.validate({ name: '', employed: true, age: 27 }, function (err, validationErrors) { - if (!err) { return done(err); } - assert(validationErrors); - assert(validationErrors.name); - assert(validationErrors.name[0].rule === 'required'); - done(); - }); - }); - - it('should error if no value is set for required boolean field', function(done) { - validator.validate({ name: 'Frederick P. Frederickson', age: 27 }, function (err, validationErrors) { - if (!err) { return done(err); } - assert(validationErrors); - assert(validationErrors.employed); - assert(validationErrors.employed[0].rule === 'boolean'); - assert(validationErrors.employed[1].rule === 'required'); - done(); - }); - }); - - it('should error if no value is set for required boolean field', function(done) { - validator.validate({ name: 'Frederick P. Frederickson', age: 27 }, function (err, validationErrors) { - if (!err) { return done(err); } - assert(validationErrors); - assert(validationErrors.employed); - assert(validationErrors.employed[0].rule === 'boolean'); - assert(validationErrors.employed[1].rule === 'required'); - done(); - }); - }); - - it('should NOT error if all required values are set', function(done) { - validator.validate({ name: 'Foo Bar', employed: true, age: 27 }, function (err, validationErrors) { - if (!err) { return done(err); } - assert(!validationErrors); - done(); - }); - }); - - it('should NOT error if required is false and values are valid', function(done) { - validator.validate({ name: 'Foo Bar', employed: true, email: 'email@example.com' }, function (err, validationErrors) { - if (!err) { return done(err); } - assert(!validationErrors); - done(); - }); - }); - - it('should NOT error if required is false and value is not present', function(done) { - validator.validate({ name: 'Foo Bar', employed: true }, function (err, validationErrors) { - if (!err) { return done(err); } - assert(!validationErrors); - done(); - }); - }); - - it('should error if required is false and value is invalid', function(done) { - validator.validate({ name: 'Frederick P. Frederickson', employed: true, email: 'not email' }, function (err, validationErrors) { - if (!err) { return done(err); } - assert(validationErrors); - assert(validationErrors.email); - assert.equal(validationErrors.email[0].rule, 'email'); - done(); - }); - }); - - }); - -}); diff --git a/test/unit/validations/validations.specialTypes.js b/test/unit/validations/validations.specialTypes.js deleted file mode 100644 index f414eac93..000000000 --- a/test/unit/validations/validations.specialTypes.js +++ /dev/null @@ -1,40 +0,0 @@ -var Validator = require('../../../lib/waterline/core/validations'), - assert = require('assert'); - -describe('validations', function() { - - describe('special types', function() { - var validator; - - before(function() { - - var validations = { - name: { type: 'string' }, - age: { type: 'integer' }, - email: { type: 'email' } - }; - - validator = new Validator(); - validator.initialize(validations); - }); - - it('should validate email type', function(done) { - validator.validate({ email: 'foobar@gmail.com' }, function (err, validationErrors) { - if (err) { return done(err); } - assert(!validationErrors); - done(); - }); - }); - - it('should error if incorrect email is passed', function(done) { - validator.validate({ email: 'foobar' }, function (err, validationErrors) { - if (err) { return done(err); } - assert(validationErrors); - assert(validationErrors.email); - done(); - }); - }); - - }); - -}); diff --git a/test/unit/validations/validations.type.js b/test/unit/validations/validations.type.js deleted file mode 100644 index 201c7211b..000000000 --- a/test/unit/validations/validations.type.js +++ /dev/null @@ -1,47 +0,0 @@ -var Validator = require('../../../lib/waterline/core/validations'), - assert = require('assert'); - -describe('validations', function() { - - describe('types', function() { - var validator; - - before(function() { - - var validations = { - name: { type: 'string' }, - age: { type: 'integer' } - }; - - validator = new Validator(); - validator.initialize(validations); - }); - - it('should validate string type', function(done) { - validator.validate({ name: 'foo bar' }, function (err, validationErrors) { - if (err) { return done(err); } - assert(!validationErrors); - done(); - }); - }); - - it('should validate integer type', function(done) { - validator.validate({ age: 27 }, function (err, validationErrors) { - if (err) { return done(err); } - assert(!validationErrors); - done(); - }); - }); - - it('should error if string passed to integer type', function(done) { - validator.validate({ age: 'foo bar' }, function (err, validationErrors) { - if (err) { return done(err); } - assert(validationErrors); - assert(validationErrors.age); - done(); - }); - }); - - }); - -});