diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 00000000..c2658d7d
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1 @@
+node_modules/
diff --git a/.editorconfig b/.editorconfig
index 0f099897..98a4353f 100644
--- a/.editorconfig
+++ b/.editorconfig
@@ -1,4 +1,12 @@
-# editorconfig.org
+# ╔═╗╔╦╗╦╔╦╗╔═╗╦═╗┌─┐┌─┐┌┐┌┌─┐┬┌─┐
+# ║╣ ║║║ ║ ║ ║╠╦╝│ │ ││││├┤ ││ ┬
+# o╚═╝═╩╝╩ ╩ ╚═╝╩╚═└─┘└─┘┘└┘└ ┴└─┘
+#
+# This file (`.editorconfig`) exists to help maintain consistent formatting
+# throughout this package, the Sails framework, and the Node-Machine project.
+#
+# To review what each of these options mean, see:
+# http://editorconfig.org/
root = true
[*]
diff --git a/.eslintrc b/.eslintrc
new file mode 100644
index 00000000..1eeec9cc
--- /dev/null
+++ b/.eslintrc
@@ -0,0 +1,71 @@
+{
+ // ╔═╗╔═╗╦ ╦╔╗╔╔╦╗┬─┐┌─┐
+ // ║╣ ╚═╗║ ║║║║ ║ ├┬┘│
+ // o╚═╝╚═╝╩═╝╩╝╚╝ ╩ ┴└─└─┘
+ // A set of basic conventions (similar to .jshintrc) for use within any
+ // arbitrary JavaScript / Node.js package -- inside or outside Sails.js.
+ // For the master copy of this file, see the `.eslintrc` template file in
+ // the `sails-generate` package (https://www.npmjs.com/package/sails-generate.)
+ // Designed for ESLint v4.
+ // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+ // For more information about any of the rules below, check out the relevant
+ // reference page on eslint.org. For example, to get details on "no-sequences",
+ // you would visit `http://eslint.org/docs/rules/no-sequences`. If you're unsure
+ // or could use some advice, come by https://sailsjs.com/support.
+ // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
+
+ "env": {
+ "node": true
+ },
+
+ "parserOptions": {
+ "ecmaVersion": 5
+ // ^^This can be changed to `8` if this package doesn't need to support <= Node v6.
+ },
+
+ "globals": {
+ "Promise": true
+ // ^^Available since Node v4
+ },
+
+ "rules": {
+ "callback-return": ["error", ["done", "proceed", "next", "onwards", "callback", "cb"]],
+ "camelcase": ["warn", {"properties": "always"}],
+ "comma-style": ["warn", "last"],
+ "curly": ["error"],
+ "eqeqeq": ["error", "always"],
+ "eol-last": ["warn"],
+ "handle-callback-err": ["error"],
+ "indent": ["warn", 2, {
+ "SwitchCase": 1,
+ "MemberExpression": "off",
+ "FunctionDeclaration": {"body":1, "parameters": "off"},
+ "FunctionExpression": {"body":1, "parameters": "off"},
+ "CallExpression": {"arguments":"off"},
+ "ArrayExpression": 1,
+ "ObjectExpression": 1,
+ "ignoredNodes": ["ConditionalExpression"]
+ }],
+ "linebreak-style": ["error", "unix"],
+ "no-dupe-keys": ["error"],
+ "no-duplicate-case": ["error"],
+ "no-extra-semi": ["warn"],
+ "no-labels": ["error"],
+ "no-mixed-spaces-and-tabs": ["error", "smart-tabs"],
+ "no-redeclare": ["warn"],
+ "no-return-assign": ["error", "always"],
+ "no-sequences": ["error"],
+ "no-trailing-spaces": ["warn"],
+ "no-undef": ["error"],
+ "no-unexpected-multiline": ["warn"],
+ "no-unreachable": ["warn"],
+ "no-unused-vars": ["warn", {"caughtErrors":"all", "caughtErrorsIgnorePattern": "^unused($|[A-Z].*$)"}],
+ "no-use-before-define": ["error", {"functions":false}],
+ "one-var": ["warn", "never"],
+ "quotes": ["warn", "single", {"avoidEscape":false, "allowTemplateLiterals":true}],
+ "semi": ["error", "always"],
+ "semi-spacing": ["warn", {"before":false, "after":true}],
+ "semi-style": ["warn", "last"]
+ }
+
+}
diff --git a/.gitignore b/.gitignore
old mode 100755
new mode 100644
index 1ed2e6f5..08e266ea
--- a/.gitignore
+++ b/.gitignore
@@ -1,11 +1,47 @@
-.\#*
-*#
+# ┌─┐┬┌┬┐╦╔═╗╔╗╔╔═╗╦═╗╔═╗
+# │ ┬│ │ ║║ ╦║║║║ ║╠╦╝║╣
+# o└─┘┴ ┴ ╩╚═╝╝╚╝╚═╝╩╚═╚═╝
+#
+# This file (`.gitignore`) exists to signify to `git` that certain files
+# and/or directories should be ignored for the purposes of version control.
+#
+# This is primarily useful for excluding temporary files of all sorts; stuff
+# generated by IDEs, build scripts, automated tests, package managers, or even
+# end-users (e.g. file uploads). `.gitignore` files like this also do a nice job
+# at keeping sensitive credentials and personal data out of version control systems.
+#
+
+############################
+# sails / node.js / npm
+############################
node_modules
-ssl
+npm-debug.log
+.node_history
+package-lock.json
+
+############################
+# editor & OS files
+############################
+*.swo
+*.swp
+*.swn
+*.swm
+*.seed
+*.log
+*.out
+*.pid
+lib-cov
.DS_STORE
+*#
+*\#
+.\#*
*~
.idea
+.netbeans
nbproject
-.waterline
-npm-debug.log
-.c9
\ No newline at end of file
+
+############################
+# misc
+############################
+.tmp
+dump.rdb
diff --git a/.jshintrc b/.jshintrc
new file mode 100644
index 00000000..f485e2e0
--- /dev/null
+++ b/.jshintrc
@@ -0,0 +1,130 @@
+{
+ // ┬┌─┐╦ ╦╦╔╗╔╔╦╗┬─┐┌─┐
+ // │└─┐╠═╣║║║║ ║ ├┬┘│
+ // o└┘└─┘╩ ╩╩╝╚╝ ╩ ┴└─└─┘
+ //
+ // This file (`.jshintrc`) exists to help with consistency of code
+ // throughout this package, and throughout Sails and the Node-Machine project.
+ //
+ // To review what each of these options mean, see:
+ // http://jshint.com/docs/options
+ //
+ // (or: https://github.com/jshint/jshint/blob/master/examples/.jshintrc)
+
+
+
+ //////////////////////////////////////////////////////////////////////
+ // NOT SUPPORTED IN SOME JSHINT VERSIONS SO LEAVING COMMENTED OUT:
+ //////////////////////////////////////////////////////////////////////
+ // Prevent overwriting prototypes of native classes like `Array`.
+ // (doing this is _never_ ok in any of our packages that are intended
+ // to be used as dependencies of other developers' modules and apps)
+ // "freeze": true,
+ //////////////////////////////////////////////////////////////////////
+
+
+ //////////////////////////////////////////////////////////////////////
+ // EVERYTHING ELSE:
+ //////////////////////////////////////////////////////////////////////
+
+ // Allow the use of `eval` and `new Function()`
+ // (we sometimes actually need to use these things)
+ "evil": true,
+
+ // Tolerate funny-looking dashes in RegExp literals.
+ // (see https://github.com/jshint/jshint/issues/159#issue-903547)
+ "regexdash": true,
+
+ // The potential runtime "Environments" (as defined by jshint)
+ // that the _style_ of code written in this package should be
+ // compatible with (not the code itself, of course).
+ "browser": true,
+ "node": true,
+ "wsh": true,
+
+ // Tolerate the use `[]` notation when dot notation would be possible.
+ // (this is sometimes preferable for readability)
+ "sub": true,
+
+ // Do NOT suppress warnings about mixed tabs and spaces
+ // (two spaces always, please; see `.editorconfig`)
+ "smarttabs": false,
+
+ // Suppress warnings about trailing whitespace
+ // (this is already enforced by the .editorconfig, so no need to warn as well)
+ "trailing": false,
+
+ // Suppress warnings about the use of expressions where fn calls or assignments
+ // are expected, and about using assignments where conditionals are expected.
+ // (while generally a good idea, without this setting, JSHint needlessly lights up warnings
+ // in existing, working code that really shouldn't be tampered with. Pandora's box and all.)
+ "expr": true,
+ "boss": true,
+
+ // Do NOT suppress warnings about using functions inside loops
+ // (in the general case, we should be using iteratee functions with `_.each()`
+ // or `Array.prototype.forEach()` instead of `for` or `while` statements
+ // anyway. This warning serves as a helpful reminder.)
+ "loopfunc": false,
+
+ // Suppress warnings about "weird constructions"
+ // i.e. allow code like:
+ // ```
+ // (new (function OneTimeUsePrototype () { } ))
+ // ```
+ //
+ // (sometimes order of operations in JavaScript can be scary. There is
+ // nothing wrong with using an extra set of parantheses when the mood
+ // strikes or you get "that special feeling".)
+ "supernew": true,
+
+ // Do NOT allow backwards, node-dependency-style commas.
+ // (while this code style choice was used by the project in the past,
+ // we have since standardized these practices to make code easier to
+ // read, albeit a bit less exciting)
+ "laxcomma": false,
+
+ // Do NOT allow avant garde use of commas in conditional statements.
+ // (this prevents accidentally writing code like:
+ // ```
+ // if (!_.contains(['+ci', '-ci', '∆ci', '+ce', '-ce', '∆ce']), change.verb) {...}
+ // ```
+ // See the problem in that code? Neither did we-- that's the problem!)
+ "nocomma": true,
+
+ // Strictly enforce the consistent use of single quotes.
+ // (this is a convention that was established primarily to make it easier
+ // to grep [or FIND+REPLACE in Sublime] particular string literals in
+ // JavaScript [.js] files. Note that JSON [.json] files are, of course,
+ // still written exclusively using double quotes around key names and
+ // around string literals.)
+ "quotmark": "single",
+
+ // Do NOT suppress warnings about the use of `==null` comparisons.
+ // (please be explicit-- use Lodash or `require('util')` and call
+ // either `.isNull()` or `.isUndefined()`)
+ "eqnull": false,
+
+ // Strictly enforce the use of curly braces with `if`, `else`, and `switch`
+ // as well as, much less commonly, `for` and `while` statements.
+ // (this is just so that all of our code is consistent, and to avoid bugs)
+ "curly": true,
+
+ // Strictly enforce the use of `===` and `!==`.
+ // (this is always a good idea. Check out "Truth, Equality, and JavaScript"
+ // by Angus Croll [the author of "If Hemmingway Wrote JavaScript"] for more
+ // explanation as to why.)
+ "eqeqeq": true,
+
+ // Allow initializing variables to `undefined`.
+ // For more information, see:
+ // • https://jslinterrors.com/it-is-not-necessary-to-initialize-a-to-undefined
+ // • https://github.com/jshint/jshint/issues/1484
+ //
+ // (it is often very helpful to explicitly clarify the initial value of
+ // a local variable-- especially for folks new to more advanced JavaScript
+ // and who might not recognize the subtle, yet critically important differences between our seemingly
+ // between `null` and `undefined`, and the impact on `typeof` checks)
+ "-W080": true
+
+}
diff --git a/.npmignore b/.npmignore
index bde75a02..7f802e75 100644
--- a/.npmignore
+++ b/.npmignore
@@ -1,18 +1,34 @@
-*#
+.git
+./.gitignore
+./.jshintrc
+./.editorconfig
+./.travis.yml
+./appveyor.yml
+./example
+./examples
+./test
+./tests
+./.github
+
node_modules
-ssl
+npm-debug.log
+.node_history
+*.swo
+*.swp
+*.swn
+*.swm
+*.seed
+*.log
+*.out
+*.pid
+lib-cov
.DS_STORE
+*#
+*\#
+.\#*
*~
.idea
+.netbeans
nbproject
-test
-CONTRIBUTING.md
-.git
-.gitignore
.tmp
-*.swo
-*.swp
-*.swn
-*.swm
-.jshintrc
-.editorconfig
+dump.rdb
diff --git a/.travis.yml b/.travis.yml
index b8d5747f..b516bd91 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,10 +1,35 @@
+# # # # # # # # # # # # # # # # # # # # # # # # # # # # #
+# ╔╦╗╦═╗╔═╗╦ ╦╦╔═╗ ┬ ┬┌┬┐┬ #
+# ║ ╠╦╝╠═╣╚╗╔╝║╚═╗ └┬┘││││ #
+# o ╩ ╩╚═╩ ╩ ╚╝ ╩╚═╝o ┴ ┴ ┴┴─┘ #
+# #
+# This file configures Travis CI. #
+# (i.e. how we run the tests... mainly) #
+# #
+# https://docs.travis-ci.com/user/customizing-the-build #
+# # # # # # # # # # # # # # # # # # # # # # # # # # # # #
+
language: node_js
+
node_js:
- - "0.12"
- - "0.10"
+ - "12"
+ - "14"
+ - "16"
+
+
+branches:
+ only:
+ - master
+
services: mysql
+sudo: false
before_script:
- - mysql -e 'create database sails_mysql;'
+ - mysql -e 'create database adapter_tests;'
+
+before_install:
+ - npm i -g npm@8.11.0
+env:
+ - WATERLINE_ADAPTER_TESTS_HOST=127.0.0.1 WATERLINE_ADAPTER_TESTS_USER=root WATERLINE_ADAPTER_TESTS_PASSWORD='' WATERLINE_ADAPTER_TESTS_DATABASE=adapter_tests
notifications:
email:
- - particlebanana@gmail.com
+ - ci@sailsjs.com
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 00000000..2e157c7c
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,54 @@
+# Sails MySQL Changelog
+
+## master
+
+* [NODE] Upgrades the underlying felixge/mysql dependency to work with Node v6. See [#321](https://github.com/balderdashy/sails-mysql/issues/321) (Thanks [@matuck](http://github.com/matuck)!)
+
+## ~0.12
+
+### 0.12.2
+
+* [ENHANCEMENT] Adds support for case-insensitive queries using the `wlNext.caseSensitive` flag. See [#304](https://github.com/balderdashy/sails-mysql/pull/304) for more details. Thanks [@wulfsolter](https://github.com/wulfsolter) for the patch!
+
+* [ENHANCEMENT] Adds MariaDB to the automated test suite. See [#276](https://github.com/balderdashy/sails-mysql/pull/276) for more details. Thanks to [@grooverdan](https://github.com/grooverdan) for the patch.
+
+* [ENHANCEMENT] Updates the dependencies to the latest versions which should remove any warning messages when installing.
+
+* [BUG] Fixes issues with backwards compatibility to Waterline `0.11.x` and older.
+
+### 0.12.1
+
+* [BUG] Fixes issue with populates due to changes in projections queries coming from Waterline-Sequel. Updated the `waterline-sequel` dependency to `0.6.2` to fix. See [#297](https://github.com/balderdashy/sails-mysql/issues/297) for more details. Thanks [@wulfsolter](https://github.com/wulfsolter) and [@aradnom](https://github.com/aradnom) for helping debug and test.
+
+### 0.12.0
+
+* [Enhancement] Upgrades the version of Waterline-Sequel being used to support using projections in join queries. See [#294](https://github.com/balderdashy/sails-mysql/pull/294) for more details.
+
+* [Enhancement] Adds JSHint and tweaks code style slightly to better support community additions. See [#295](https://github.com/balderdashy/sails-mysql/pull/295) for more details.
+
+## ~0.11
+
+
+### 0.11.5
+
+* [BUG] Updates [Waterline-Sequel](https://github.com/balderdashy/waterline-sequel) dependency to actually fix the previous dates bug.
+
+* [ENHANCEMENT] Changes the database url parsing to strip out query string values. See [#280](https://github.com/balderdashy/sails-mysql/pull/280) for more details. Thanks [@Bazze](https://github.com/Bazze)!
+
+### 0.11.4
+
+* [BUG] Updates [Waterline-Sequel](https://github.com/balderdashy/waterline-sequel) dependency to gain support for querying dates when they are represented as a string in the criteria.
+
+* [ENHANCEMENT] Normalize the adapter errors some to be more in line with the Postgres driver. Now returns the `originalError` key as specified in [Waterline-Adapter-Tests](https://github.com/balderdashy/waterline-adapter-tests/pull/89).
+
+### 0.11.3
+
+* [BUG] Fixes issue with an outdated `.stream()` interface. See [#264](https://github.com/balderdashy/sails-mysql/pull/264) for more details. Thanks [@github1337](https://github.com/github1337) for the patch!
+
+* [ENHANCEMENT] Better error message in the case of a foreign key constraint violation. See [#268](https://github.com/balderdashy/sails-mysql/pull/268) for more details. Thanks [@trheming](https://github.com/trheming) for the patch!
+
+* [ENHANCEMENT] Locked the dependency versions down to know working versions. Also added a `shrinkwrap.json` file. See [#272](https://github.com/balderdashy/sails-mysql/pull/272) for more details.
+
+* [ENHANCEMENT] Updated the Travis config to run test on Node 4.0 and 5.0. See [#273](https://github.com/balderdashy/sails-mysql/pull/273) for more details.
+
+* [PERFORMANCE] And the best for last, merged [#274](https://github.com/balderdashy/sails-mysql/pull/274) which increases performance on populates ~15x. Thanks a million to [@jianpingw](https://github.com/jianpingw) for spending the time to track this down!
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 00000000..ac194748
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,7 @@
+FROM nodesource/node:4.2
+
+ADD package.json package.json
+RUN npm install
+ADD . .
+
+CMD ["npm","test"]
diff --git a/README.md b/README.md
index c1871c75..7d301d8f 100755
--- a/README.md
+++ b/README.md
@@ -1,7 +1,4 @@
# Sails-MySQL Adapter
-[](https://travis-ci.org/balderdashy/sails-mysql)
-[](http://badge.fury.io/js/sails-mysql)
-[](https://david-dm.org/balderdashy/sails-mysql)
MySQL adapter for the Sails framework and Waterline ORM. Allows you to use MySQL via your models to store and retrieve data. Also provides a `query()` method for a direct interface to execute raw SQL commands.
@@ -16,88 +13,34 @@ Install from NPM.
$ npm install sails-mysql
```
-## Sails Configuration
-
-Add the mysql config to the config/connections.js file. Basic options:
-
-```javascript
-module.exports.connections = {
- mysql: {
- module : 'sails-mysql',
- host : 'localhost',
- port : 3306,
- user : 'username',
- password : 'password',
- database : 'MySQL Database Name'
-
- // OR (explicit sets take precedence)
- module : 'sails-mysql',
- url : 'mysql2://USER:PASSWORD@HOST:PORT/DATABASENAME'
-
- // Optional
- charset : 'utf8',
- collation : 'utf8_swedish_ci'
- }
-};
-```
-
-And then change default model configuration to the config/models.js:
-
-```javascript
-module.exports.models = {
- connection: 'mysql'
-};
-```
+## Help
-## Run tests
+If you have further questions or are having trouble, click [here](http://sailsjs.com/support).
-You can set environment variables to override the default database config for the tests, e.g.:
-```sh
-$ WATERLINE_ADAPTER_TESTS_PASSWORD=yourpass npm test
-```
+## Bugs [](http://npmjs.com/package/sails-mysql)
+To report a bug, [click here](http://sailsjs.com/bugs).
-Default settings are:
-```javascript
-{
- host: process.env.WATERLINE_ADAPTER_TESTS_HOST || 'localhost',
- port: process.env.WATERLINE_ADAPTER_TESTS_PORT || 3306,
- user: process.env.WATERLINE_ADAPTER_TESTS_USER || 'root',
- password: process.env.WATERLINE_ADAPTER_TESTS_PASSWORD || '',
- database: process.env.WATERLINE_ADAPTER_TESTS_DATABASE || 'sails_mysql',
- pool: true,
- connectionLimit: 10,
- waitForConnections: true
-}
-```
+## Contributing
+Please observe the guidelines and conventions laid out in the [Sails project contribution guide](http://sailsjs.com/documentation/contributing) when opening issues or submitting pull requests.
+[](http://npmjs.com/package/sails-mysql)
-#### More Resources
-- [Stackoverflow](http://stackoverflow.com/questions/tagged/sails.js)
-- [#sailsjs on Freenode](http://webchat.freenode.net/) (IRC channel)
-- [Twitter](https://twitter.com/sailsjs)
-- [Professional/enterprise](https://github.com/balderdashy/sails-docs/blob/master/FAQ.md#are-there-professional-support-options)
-- [Tutorials](https://github.com/balderdashy/sails-docs/blob/master/FAQ.md#where-do-i-get-help)
-- [Waterline (ORM)](http://github.com/balderdashy/waterline)
--
+#### Running the tests
+To run the tests, point this adapter at your database by specifying a [connection URL](http://sailsjs.com/documentation/reference/configuration/sails-config-datastores#?the-connection-url) and run `npm test`:
+```
+WATERLINE_ADAPTER_TESTS_URL=mysql://root:myc00lP4ssw0rD@localhost/adapter_tests npm test
+```
-#### License
-
-**[MIT](./LICENSE)**
-© 2014
-[Mike McNeil](http://michaelmcneil.com), [Balderdash](http://balderdash.co) & contributors
-
-[Sails](http://sailsjs.org) is free and open-source under the [MIT License](http://sails.mit-license.org/).
-
-See the [MySQL Logo Usage Guidelines](http://www.mysql.com/about/legal/trademark.html) for more information on our use of the MySQL logo.
+> For more info, see [**Reference > Configuration > sails.config.datastores > The connection URL**](http://sailsjs.com/documentation/reference/configuration/sails-config-datastores#?the-connection-url), or [ask for help](http://sailsjs.com/support).
-
+## License
+This adapter, like the [Sails framework](http://sailsjs.com) is free and open-source under the [MIT License](http://sailsjs.com/license).
-[](http://githalytics.com/mikermcneil/sails-mysql)
diff --git a/appveyor.yml b/appveyor.yml
new file mode 100644
index 00000000..4abb76b1
--- /dev/null
+++ b/appveyor.yml
@@ -0,0 +1,51 @@
+# # # # # # # # # # # # # # # # # # # # # # # # # #
+# ╔═╗╔═╗╔═╗╦ ╦╔═╗╦ ╦╔═╗╦═╗ ┬ ┬┌┬┐┬ #
+# ╠═╣╠═╝╠═╝╚╗╔╝║╣ ╚╦╝║ ║╠╦╝ └┬┘││││ #
+# ╩ ╩╩ ╩ ╚╝ ╚═╝ ╩ ╚═╝╩╚═o ┴ ┴ ┴┴─┘ #
+# #
+# This file configures Appveyor CI. #
+# (i.e. how we run the tests on Windows) #
+# #
+# https://www.appveyor.com/docs/lang/nodejs-iojs/ #
+# # # # # # # # # # # # # # # # # # # # # # # # # #
+
+
+# Test against these versions of Node.js.
+environment:
+ matrix:
+ - nodejs_version: "10"
+ - nodejs_version: "12"
+ - nodejs_version: "14"
+
+# Install scripts. (runs after repo cloning)
+install:
+ # Get the latest stable version of Node.js
+ # (Not sure what this is for, it's just in Appveyor's example.)
+ - ps: Install-Product node $env:nodejs_version
+ # Install declared dependencies
+ - npm install
+
+
+# Post-install test scripts.
+test_script:
+ # Setup database
+ - SET MYSQL_PWD=Password12!
+ - '"C:\Program Files\MySQL\MySQL Server 5.7\bin\mysql" -u root -p"Password12!" -e "CREATE DATABASE IF NOT EXISTS adapter_tests"'
+
+ # Output Node and NPM version info.
+ # (Presumably just in case Appveyor decides to try any funny business?
+ # But seriously, always good to audit this kind of stuff for debugging.)
+ - node --version
+ - npm --version
+ # Run the actual tests.
+ - npm run fasttest
+
+# Setup MySQL Database
+services:
+ - mysql
+
+# Don't actually build.
+# (Not sure what this is for, it's just in Appveyor's example.
+# I'm not sure what we're not building... but I'm OK with not
+# building it. I guess.)
+build: off
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 00000000..80ca1e79
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,14 @@
+adapter:
+ build: .
+ volumes:
+ - .:/usr/src/app
+ links:
+ - mysql
+
+mysql:
+ image: mysql
+ environment:
+ - MYSQL_DATABASE=sails_mysql
+ - MYSQL_USER=sails
+ - MYSQL_PASSWORD=sails
+ - MYSQL_ROOT_PASSWORD=sails
diff --git a/helpers/avg.js b/helpers/avg.js
new file mode 100644
index 00000000..5d9c668c
--- /dev/null
+++ b/helpers/avg.js
@@ -0,0 +1,154 @@
+// █████╗ ██╗ ██╗ ██████╗ █████╗ ██████╗████████╗██╗ ██████╗ ███╗ ██╗
+// ██╔══██╗██║ ██║██╔════╝ ██╔══██╗██╔════╝╚══██╔══╝██║██╔═══██╗████╗ ██║
+// ███████║██║ ██║██║ ███╗ ███████║██║ ██║ ██║██║ ██║██╔██╗ ██║
+// ██╔══██║╚██╗ ██╔╝██║ ██║ ██╔══██║██║ ██║ ██║██║ ██║██║╚██╗██║
+// ██║ ██║ ╚████╔╝ ╚██████╔╝ ██║ ██║╚██████╗ ██║ ██║╚██████╔╝██║ ╚████║
+// ╚═╝ ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'AVG',
+
+
+ description: 'Return the Average of the records matched by the query.',
+
+
+ inputs: {
+
+ datastore: {
+ description: 'The datastore to use for connections.',
+ extendedDescription: 'Datastores represent the config and manager required to obtain an active database connection.',
+ required: true,
+ readOnly: true,
+ example: '==='
+ },
+
+ models: {
+ description: 'An object containing all of the model definitions that have been registered.',
+ required: true,
+ example: '==='
+ },
+
+ query: {
+ description: 'A valid stage three Waterline query.',
+ required: true,
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The results of the avg query.',
+ outputType: 'ref'
+ },
+
+ invalidDatastore: {
+ description: 'The datastore used is invalid. It is missing key pieces.'
+ },
+
+ badConnection: {
+ friendlyName: 'Bad connection',
+ description: 'A connection either could not be obtained or there was an error using the connection.'
+ }
+
+ },
+
+
+ fn: function avg(inputs, exits) {
+ // Dependencies
+ var _ = require('@sailshq/lodash');
+ var Converter = require('waterline-utils').query.converter;
+ var Helpers = require('./private');
+
+
+ // Store the Query input for easier access
+ var query = inputs.query;
+ query.meta = query.meta || {};
+
+
+ // Find the model definition
+ var model = inputs.models[query.using];
+ if (!model) {
+ return exits.invalidDatastore();
+ }
+
+
+ // Set a flag if a leased connection from outside the adapter was used or not.
+ var leased = _.has(query.meta, 'leasedConnection');
+
+
+ // ╔═╗╔═╗╔╗╔╦ ╦╔═╗╦═╗╔╦╗ ┌┬┐┌─┐ ┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┐┌┌┬┐
+ // ║ ║ ║║║║╚╗╔╝║╣ ╠╦╝ ║ │ │ │ └─┐ │ ├─┤ │ ├┤ │││├┤ │││ │
+ // ╚═╝╚═╝╝╚╝ ╚╝ ╚═╝╩╚═ ╩ ┴ └─┘ └─┘ ┴ ┴ ┴ ┴ └─┘┴ ┴└─┘┘└┘ ┴
+ // Convert the Waterline criteria into a Waterline Query Statement. This
+ // turns it into something that is declarative and can be easily used to
+ // build a SQL query.
+ // See: https://github.com/treelinehq/waterline-query-docs for more info
+ // on Waterline Query Statements.
+ var statement;
+ try {
+ statement = Converter({
+ model: query.using,
+ method: 'avg',
+ criteria: query.criteria,
+ values: query.numericAttrName
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ // Compile the original Waterline Query
+ var compiledQuery;
+ try {
+ compiledQuery = Helpers.query.compileStatement(statement);
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ // ╔═╗╔═╗╔═╗╦ ╦╔╗╔ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ╚═╗╠═╝╠═╣║║║║║║ │ │ │││││││├┤ │ │ ││ ││││
+ // ╚═╝╩ ╩ ╩╚╩╝╝╚╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // ┌─┐┬─┐ ┬ ┬┌─┐┌─┐ ┬ ┌─┐┌─┐┌─┐┌─┐┌┬┐ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // │ │├┬┘ │ │└─┐├┤ │ ├┤ ├─┤└─┐├┤ ││ │ │ │││││││├┤ │ │ ││ ││││
+ // └─┘┴└─ └─┘└─┘└─┘ ┴─┘└─┘┴ ┴└─┘└─┘─┴┘ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // Spawn a new connection for running queries on.
+ Helpers.connection.spawnOrLeaseConnection(inputs.datastore, query.meta, function spawnConnectionCb(err, connection) {
+ if (err) {
+ return exits.badConnection(err);
+ }
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
+ var queryType = 'avg';
+
+ Helpers.query.runQuery({
+ connection: connection,
+ nativeQuery: compiledQuery.nativeQuery,
+ valuesToEscape: compiledQuery.valuesToEscape,
+ meta: compiledQuery.meta,
+ queryType: queryType,
+ disconnectOnError: leased ? false : true
+ },
+
+ function runQueryCb(err, report) {
+ // The runQuery helper will automatically release the connection on error
+ // if needed.
+ if (err) {
+ return exits.error(err);
+ }
+
+ // Always release the connection unless a leased connection from outside
+ // the adapter was used.
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ return exits.success(report.result);
+ }); // releaseConnection >
+ }); // runQuery >
+ }); // spawnConnection >
+ }
+});
diff --git a/helpers/count.js b/helpers/count.js
new file mode 100644
index 00000000..65685dcc
--- /dev/null
+++ b/helpers/count.js
@@ -0,0 +1,153 @@
+// ██████╗ ██████╗ ██╗ ██╗███╗ ██╗████████╗ █████╗ ██████╗████████╗██╗ ██████╗ ███╗ ██╗
+// ██╔════╝██╔═══██╗██║ ██║████╗ ██║╚══██╔══╝ ██╔══██╗██╔════╝╚══██╔══╝██║██╔═══██╗████╗ ██║
+// ██║ ██║ ██║██║ ██║██╔██╗ ██║ ██║ ███████║██║ ██║ ██║██║ ██║██╔██╗ ██║
+// ██║ ██║ ██║██║ ██║██║╚██╗██║ ██║ ██╔══██║██║ ██║ ██║██║ ██║██║╚██╗██║
+// ╚██████╗╚██████╔╝╚██████╔╝██║ ╚████║ ██║ ██║ ██║╚██████╗ ██║ ██║╚██████╔╝██║ ╚████║
+// ╚═════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'Count',
+
+
+ description: 'Return the count of the records matched by the query.',
+
+
+ inputs: {
+
+ datastore: {
+ description: 'The datastore to use for connections.',
+ extendedDescription: 'Datastores represent the config and manager required to obtain an active database connection.',
+ required: true,
+ readOnly: true,
+ example: '==='
+ },
+
+ models: {
+ description: 'An object containing all of the model definitions that have been registered.',
+ required: true,
+ example: '==='
+ },
+
+ query: {
+ description: 'A valid stage three Waterline query.',
+ required: true,
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The results of the count query.',
+ outputExample: '==='
+ },
+
+ invalidDatastore: {
+ description: 'The datastore used is invalid. It is missing key pieces.'
+ },
+
+ badConnection: {
+ friendlyName: 'Bad connection',
+ description: 'A connection either could not be obtained or there was an error using the connection.'
+ }
+
+ },
+
+
+ fn: function count(inputs, exits) {
+ // Dependencies
+ var _ = require('@sailshq/lodash');
+ var Converter = require('waterline-utils').query.converter;
+ var Helpers = require('./private');
+
+
+ // Store the Query input for easier access
+ var query = inputs.query;
+ query.meta = query.meta || {};
+
+
+ // Find the model definition
+ var model = inputs.models[query.using];
+ if (!model) {
+ return exits.invalidDatastore();
+ }
+
+
+ // Set a flag if a leased connection from outside the adapter was used or not.
+ var leased = _.has(query.meta, 'leasedConnection');
+
+
+ // ╔═╗╔═╗╔╗╔╦ ╦╔═╗╦═╗╔╦╗ ┌┬┐┌─┐ ┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┐┌┌┬┐
+ // ║ ║ ║║║║╚╗╔╝║╣ ╠╦╝ ║ │ │ │ └─┐ │ ├─┤ │ ├┤ │││├┤ │││ │
+ // ╚═╝╚═╝╝╚╝ ╚╝ ╚═╝╩╚═ ╩ ┴ └─┘ └─┘ ┴ ┴ ┴ ┴ └─┘┴ ┴└─┘┘└┘ ┴
+ // Convert the Waterline criteria into a Waterline Query Statement. This
+ // turns it into something that is declarative and can be easily used to
+ // build a SQL query.
+ // See: https://github.com/treelinehq/waterline-query-docs for more info
+ // on Waterline Query Statements.
+ var statement;
+ try {
+ statement = Converter({
+ model: query.using,
+ method: 'count',
+ criteria: query.criteria
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ // Compile the original Waterline Query
+ var compiledQuery;
+ try {
+ compiledQuery = Helpers.query.compileStatement(statement);
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ // ╔═╗╔═╗╔═╗╦ ╦╔╗╔ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ╚═╗╠═╝╠═╣║║║║║║ │ │ │││││││├┤ │ │ ││ ││││
+ // ╚═╝╩ ╩ ╩╚╩╝╝╚╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // ┌─┐┬─┐ ┬ ┬┌─┐┌─┐ ┬ ┌─┐┌─┐┌─┐┌─┐┌┬┐ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // │ │├┬┘ │ │└─┐├┤ │ ├┤ ├─┤└─┐├┤ ││ │ │ │││││││├┤ │ │ ││ ││││
+ // └─┘┴└─ └─┘└─┘└─┘ ┴─┘└─┘┴ ┴└─┘└─┘─┴┘ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // Spawn a new connection for running queries on.
+ Helpers.connection.spawnOrLeaseConnection(inputs.datastore, query.meta, function spawnConnectionCb(err, connection) {
+ if (err) {
+ return exits.badConnection(err);
+ }
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
+ var queryType = 'count';
+
+ Helpers.query.runQuery({
+ connection: connection,
+ nativeQuery: compiledQuery.nativeQuery,
+ valuesToEscape: compiledQuery.valuesToEscape,
+ meta: compiledQuery.meta,
+ queryType: queryType,
+ disconnectOnError: leased ? false : true
+ },
+
+ function runQueryCb(err, report) {
+ // The runQuery helper will automatically release the connection on error
+ // if needed.
+ if (err) {
+ return exits.error(err);
+ }
+
+ // Always release the connection unless a leased connection from outside
+ // the adapter was used.
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ return exits.success(report.result);
+ }); // releaseConnection >
+ }); // runQuery >
+ }); // spawnConnection >
+ }
+});
diff --git a/helpers/create-each.js b/helpers/create-each.js
new file mode 100644
index 00000000..a12bf16d
--- /dev/null
+++ b/helpers/create-each.js
@@ -0,0 +1,219 @@
+// ██████╗██████╗ ███████╗ █████╗ ████████╗███████╗ ███████╗ █████╗ ██████╗██╗ ██╗
+// ██╔════╝██╔══██╗██╔════╝██╔══██╗╚══██╔══╝██╔════╝ ██╔════╝██╔══██╗██╔════╝██║ ██║
+// ██║ ██████╔╝█████╗ ███████║ ██║ █████╗ █████╗ ███████║██║ ███████║
+// ██║ ██╔══██╗██╔══╝ ██╔══██║ ██║ ██╔══╝ ██╔══╝ ██╔══██║██║ ██╔══██║
+// ╚██████╗██║ ██║███████╗██║ ██║ ██║ ███████╗ ███████╗██║ ██║╚██████╗██║ ██║
+// ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝ ╚══════╝╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝
+//
+// █████╗ ██████╗████████╗██╗ ██████╗ ███╗ ██╗
+// ██╔══██╗██╔════╝╚══██╔══╝██║██╔═══██╗████╗ ██║
+// ███████║██║ ██║ ██║██║ ██║██╔██╗ ██║
+// ██╔══██║██║ ██║ ██║██║ ██║██║╚██╗██║
+// ██║ ██║╚██████╗ ██║ ██║╚██████╔╝██║ ╚████║
+// ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'Create Each',
+
+
+ description: 'Insert multiple records into a table in the database.',
+
+
+ inputs: {
+
+ datastore: {
+ description: 'The datastore to use for connections.',
+ extendedDescription: 'Datastores represent the config and manager required to obtain an active database connection.',
+ required: true,
+ readOnly: true,
+ example: '==='
+ },
+
+ models: {
+ description: 'An object containing all of the model definitions that have been registered.',
+ required: true,
+ example: '==='
+ },
+
+ query: {
+ description: 'A valid stage three Waterline query.',
+ required: true,
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The record was successfully inserted.',
+ outputVariableName: 'record',
+ outputExample: '==='
+ },
+
+ invalidDatastore: {
+ description: 'The datastore used is invalid. It is missing key pieces.'
+ },
+
+ badConnection: {
+ friendlyName: 'Bad connection',
+ description: 'A connection either could not be obtained or there was an error using the connection.'
+ },
+
+ notUnique: {
+ friendlyName: 'Not Unique',
+ outputExample: '==='
+ }
+
+ },
+
+
+ fn: function create(inputs, exits) {
+ // Dependencies
+ var _ = require('@sailshq/lodash');
+ var utils = require('waterline-utils');
+ var Helpers = require('./private');
+
+ // Store the Query input for easier access
+ var query = inputs.query;
+ query.meta = query.meta || {};
+
+
+ // Find the model definition
+ var model = inputs.models[query.using];
+ if (!model) {
+ return exits.invalidDatastore();
+ }
+
+
+ // Set a flag if a leased connection from outside the adapter was used or not.
+ var leased = _.has(query.meta, 'leasedConnection');
+
+
+ // Set a flag to determine if records are being returned
+ var fetchRecords = false;
+
+
+ // Build a faux ORM for use in processEachRecords
+ var fauxOrm = {
+ collections: inputs.models
+ };
+
+
+ // ╔═╗╦═╗╔═╗ ╔═╗╦═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐
+ // ╠═╝╠╦╝║╣───╠═╝╠╦╝║ ║║ ║╣ ╚═╗╚═╗ ├┬┘├┤ │ │ │├┬┘ ││└─┐
+ // ╩ ╩╚═╚═╝ ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ ┴└─└─┘└─┘└─┘┴└──┴┘└─┘
+ // Process each record to normalize output
+ try {
+ Helpers.query.preProcessRecord({
+ records: query.newRecords,
+ identity: model.identity,
+ orm: fauxOrm
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ // ╔═╗╔═╗╔╗╔╦ ╦╔═╗╦═╗╔╦╗ ┌┬┐┌─┐ ┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┐┌┌┬┐
+ // ║ ║ ║║║║╚╗╔╝║╣ ╠╦╝ ║ │ │ │ └─┐ │ ├─┤ │ ├┤ │││├┤ │││ │
+ // ╚═╝╚═╝╝╚╝ ╚╝ ╚═╝╩╚═ ╩ ┴ └─┘ └─┘ ┴ ┴ ┴ ┴ └─┘┴ ┴└─┘┘└┘ ┴
+ // Convert the Waterline criteria into a Waterline Query Statement. This
+ // turns it into something that is declarative and can be easily used to
+ // build a SQL query.
+ // See: https://github.com/treelinehq/waterline-query-docs for more info
+ // on Waterline Query Statements.
+ var statement;
+ try {
+ statement = utils.query.converter({
+ model: query.using,
+ method: 'createEach',
+ values: query.newRecords
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+
+ // ╔╦╗╔═╗╔╦╗╔═╗╦═╗╔╦╗╦╔╗╔╔═╗ ┬ ┬┬ ┬┬┌─┐┬ ┬ ┬ ┬┌─┐┬ ┬ ┬┌─┐┌─┐
+ // ║║║╣ ║ ║╣ ╠╦╝║║║║║║║║╣ │││├─┤││ ├─┤ └┐┌┘├─┤│ │ │├┤ └─┐
+ // ═╩╝╚═╝ ╩ ╚═╝╩╚═╩ ╩╩╝╚╝╚═╝ └┴┘┴ ┴┴└─┘┴ ┴ └┘ ┴ ┴┴─┘└─┘└─┘└─┘
+ // ┌┬┐┌─┐ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌
+ // │ │ │ ├┬┘├┤ │ │ │├┬┘│││
+ // ┴ └─┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘
+ if (_.has(query.meta, 'fetch') && query.meta.fetch) {
+ fetchRecords = true;
+ }
+
+ // Find the Primary Key
+ var primaryKeyField = model.primaryKey;
+ var primaryKeyColumnName = model.definition[primaryKeyField].columnName;
+
+ // Remove primary key if the value is NULL
+ _.each(statement.insert, function removeNullPrimaryKey(record) {
+ if (_.isNull(record[primaryKeyColumnName])) {
+ delete record[primaryKeyColumnName];
+ }
+ });
+
+
+ // ╔═╗╔═╗╔═╗╦ ╦╔╗╔ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ╚═╗╠═╝╠═╣║║║║║║ │ │ │││││││├┤ │ │ ││ ││││
+ // ╚═╝╩ ╩ ╩╚╩╝╝╚╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // ┌─┐┬─┐ ┬ ┬┌─┐┌─┐ ┬ ┌─┐┌─┐┌─┐┌─┐┌┬┐ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // │ │├┬┘ │ │└─┐├┤ │ ├┤ ├─┤└─┐├┤ ││ │ │ │││││││├┤ │ │ ││ ││││
+ // └─┘┴└─ └─┘└─┘└─┘ ┴─┘└─┘┴ ┴└─┘└─┘─┴┘ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // Spawn a new connection for running queries on.
+ Helpers.connection.spawnOrLeaseConnection(inputs.datastore, query.meta, function spawnOrLeaseConnectionCb(err, connection) {
+ if (err) {
+ return exits.badConnection(err);
+ }
+
+
+ // ╔═╗╦═╗╔═╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┌─┐┬ ┬
+ // ║ ╠╦╝║╣ ╠═╣ ║ ║╣ ├┤ ├─┤│ ├─┤
+ // ╚═╝╩╚═╚═╝╩ ╩ ╩ ╚═╝ └─┘┴ ┴└─┘┴ ┴
+ // Run the Create Each util
+ Helpers.query.createEach({
+ connection: connection,
+ statement: statement,
+ fetch: fetchRecords,
+ primaryKey: primaryKeyColumnName
+ },
+
+ function createEachCb(err, insertedRecords) {
+ // Release the connection if needed.
+ Helpers.connection.releaseConnection(connection, leased, function releaseCb() {
+ // If there was an error return it.
+ if (err) {
+ if (err.footprint && err.footprint.identity === 'notUnique') {
+ return exits.notUnique(err);
+ }
+
+ return exits.error(err);
+ }
+
+ if (fetchRecords) {
+ // Process each record to normalize output
+ try {
+ Helpers.query.processEachRecord({
+ records: insertedRecords,
+ identity: model.identity,
+ orm: fauxOrm
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ return exits.success({ records: insertedRecords });
+ }
+
+ return exits.success();
+ }); // .releaseConnection(); >
+ }); // .insertRecord(); >
+ }); // .spawnOrLeaseConnection(); >
+ }
+});
diff --git a/helpers/create.js b/helpers/create.js
new file mode 100644
index 00000000..5793d27a
--- /dev/null
+++ b/helpers/create.js
@@ -0,0 +1,212 @@
+// ██████╗██████╗ ███████╗ █████╗ ████████╗███████╗ █████╗ ██████╗████████╗██╗ ██████╗ ███╗ ██╗
+// ██╔════╝██╔══██╗██╔════╝██╔══██╗╚══██╔══╝██╔════╝ ██╔══██╗██╔════╝╚══██╔══╝██║██╔═══██╗████╗ ██║
+// ██║ ██████╔╝█████╗ ███████║ ██║ █████╗ ███████║██║ ██║ ██║██║ ██║██╔██╗ ██║
+// ██║ ██╔══██╗██╔══╝ ██╔══██║ ██║ ██╔══╝ ██╔══██║██║ ██║ ██║██║ ██║██║╚██╗██║
+// ╚██████╗██║ ██║███████╗██║ ██║ ██║ ███████╗ ██║ ██║╚██████╗ ██║ ██║╚██████╔╝██║ ╚████║
+// ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'Create',
+
+
+ description: 'Insert a record into a table in the database.',
+
+
+ inputs: {
+
+ datastore: {
+ description: 'The datastore to use for connections.',
+ extendedDescription: 'Datastores represent the config and manager required to obtain an active database connection.',
+ required: true,
+ readOnly: true,
+ example: '==='
+ },
+
+ models: {
+ description: 'An object containing all of the model definitions that have been registered.',
+ required: true,
+ example: '==='
+ },
+
+ query: {
+ description: 'A valid stage three Waterline query.',
+ required: true,
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The record was successfully inserted.',
+ outputVariableName: 'record',
+ outputType: 'ref'
+ },
+
+ invalidDatastore: {
+ description: 'The datastore used is invalid. It is missing key pieces.'
+ },
+
+ badConnection: {
+ friendlyName: 'Bad connection',
+ description: 'A connection either could not be obtained or there was an error using the connection.'
+ },
+
+ notUnique: {
+ friendlyName: 'Not Unique',
+ outputType: 'ref'
+ }
+
+ },
+
+
+ fn: function create(inputs, exits) {
+ // Dependencies
+ var _ = require('@sailshq/lodash');
+ var utils = require('waterline-utils');
+ var Helpers = require('./private');
+
+
+ // Store the Query input for easier access
+ var query = inputs.query;
+ query.meta = query.meta || {};
+
+ // Find the model definition
+ var model = inputs.models[query.using];
+ if (!model) {
+ return exits.invalidDatastore();
+ }
+
+
+ // Set a flag if a leased connection from outside the adapter was used or not.
+ var leased = _.has(query.meta, 'leasedConnection');
+
+ // Set a flag to determine if records are being returned
+ var fetchRecords = false;
+
+
+ // Build a faux ORM for use in processEachRecords
+ var fauxOrm = {
+ collections: inputs.models
+ };
+
+ // ╔═╗╦═╗╔═╗ ╔═╗╦═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐
+ // ╠═╝╠╦╝║╣───╠═╝╠╦╝║ ║║ ║╣ ╚═╗╚═╗ ├┬┘├┤ │ │ │├┬┘ ││└─┐
+ // ╩ ╩╚═╚═╝ ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ ┴└─└─┘└─┘└─┘┴└──┴┘└─┘
+ // Process each record to normalize output
+ try {
+ Helpers.query.preProcessRecord({
+ records: [query.newRecord],
+ identity: model.identity,
+ orm: fauxOrm
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+
+ // ╔═╗╔═╗╔╗╔╦ ╦╔═╗╦═╗╔╦╗ ┌┬┐┌─┐ ┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┐┌┌┬┐
+ // ║ ║ ║║║║╚╗╔╝║╣ ╠╦╝ ║ │ │ │ └─┐ │ ├─┤ │ ├┤ │││├┤ │││ │
+ // ╚═╝╚═╝╝╚╝ ╚╝ ╚═╝╩╚═ ╩ ┴ └─┘ └─┘ ┴ ┴ ┴ ┴ └─┘┴ ┴└─┘┘└┘ ┴
+ // Convert the Waterline criteria into a Waterline Query Statement. This
+ // turns it into something that is declarative and can be easily used to
+ // build a SQL query.
+ // See: https://github.com/treelinehq/waterline-query-docs for more info
+ // on Waterline Query Statements.
+ var statement;
+ try {
+ statement = utils.query.converter({
+ model: query.using,
+ method: 'create',
+ values: query.newRecord
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+
+ // ╔╦╗╔═╗╔╦╗╔═╗╦═╗╔╦╗╦╔╗╔╔═╗ ┬ ┬┬ ┬┬┌─┐┬ ┬ ┬ ┬┌─┐┬ ┬ ┬┌─┐┌─┐
+ // ║║║╣ ║ ║╣ ╠╦╝║║║║║║║║╣ │││├─┤││ ├─┤ └┐┌┘├─┤│ │ │├┤ └─┐
+ // ═╩╝╚═╝ ╩ ╚═╝╩╚═╩ ╩╩╝╚╝╚═╝ └┴┘┴ ┴┴└─┘┴ ┴ └┘ ┴ ┴┴─┘└─┘└─┘└─┘
+ // ┌┬┐┌─┐ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌
+ // │ │ │ ├┬┘├┤ │ │ │├┬┘│││
+ // ┴ └─┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘
+ if (_.has(query.meta, 'fetch') && query.meta.fetch) {
+ fetchRecords = true;
+ }
+
+
+ // Find the Primary Key
+ var primaryKeyField = model.primaryKey;
+ var primaryKeyColumnName = model.definition[primaryKeyField].columnName;
+
+ // Remove primary key if the value is NULL. This allows the auto-increment
+ // to work properly if set.
+ if (_.isNull(statement.insert[primaryKeyColumnName])) {
+ delete statement.insert[primaryKeyColumnName];
+ }
+
+
+ // ╔═╗╔═╗╔═╗╦ ╦╔╗╔ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ╚═╗╠═╝╠═╣║║║║║║ │ │ │││││││├┤ │ │ ││ ││││
+ // ╚═╝╩ ╩ ╩╚╩╝╝╚╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // ┌─┐┬─┐ ┬ ┬┌─┐┌─┐ ┬ ┌─┐┌─┐┌─┐┌─┐┌┬┐ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // │ │├┬┘ │ │└─┐├┤ │ ├┤ ├─┤└─┐├┤ ││ │ │ │││││││├┤ │ │ ││ ││││
+ // └─┘┴└─ └─┘└─┘└─┘ ┴─┘└─┘┴ ┴└─┘└─┘─┴┘ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // Spawn a new connection for running queries on.
+ Helpers.connection.spawnOrLeaseConnection(inputs.datastore, query.meta, function spawnOrLeaseConnectionCb(err, connection) {
+ if (err) {
+ return exits.badConnection(err);
+ }
+
+ // ╦╔╗╔╔═╗╔═╗╦═╗╔╦╗ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐
+ // ║║║║╚═╗║╣ ╠╦╝ ║ ├┬┘├┤ │ │ │├┬┘ ││
+ // ╩╝╚╝╚═╝╚═╝╩╚═ ╩ ┴└─└─┘└─┘└─┘┴└──┴┘
+ // Insert the record and return the new values
+ Helpers.query.create({
+ connection: connection,
+ statement: statement,
+ fetch: fetchRecords,
+ primaryKey: primaryKeyColumnName
+ },
+
+ function createRecordCb(err, insertedRecords) {
+ // Release the connection if needed.
+ Helpers.connection.releaseConnection(connection, leased, function releaseCb() {
+ // If there was an error return it.
+ if (err) {
+ if (err.footprint && err.footprint.identity === 'notUnique') {
+ return exits.notUnique(err);
+ }
+
+ return exits.error(err);
+ }
+
+ if (fetchRecords) {
+ // Process each record to normalize output
+ try {
+ Helpers.query.processEachRecord({
+ records: insertedRecords,
+ identity: model.identity,
+ orm: fauxOrm
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ // Only return the first record (there should only ever be one)
+ var insertedRecord = _.first(insertedRecords);
+ return exits.success({ record: insertedRecord });
+ }
+
+ return exits.success();
+ }); // .releaseConnection(); >
+ }); // .insertRecord(); >
+ }); // .spawnOrLeaseConnection(); >
+ }
+});
diff --git a/helpers/define.js b/helpers/define.js
new file mode 100644
index 00000000..166cae24
--- /dev/null
+++ b/helpers/define.js
@@ -0,0 +1,155 @@
+// ██████╗ ███████╗███████╗██╗███╗ ██╗███████╗
+// ██╔══██╗██╔════╝██╔════╝██║████╗ ██║██╔════╝
+// ██║ ██║█████╗ █████╗ ██║██╔██╗ ██║█████╗
+// ██║ ██║██╔══╝ ██╔══╝ ██║██║╚██╗██║██╔══╝
+// ██████╔╝███████╗██║ ██║██║ ╚████║███████╗
+// ╚═════╝ ╚══════╝╚═╝ ╚═╝╚═╝ ╚═══╝╚══════╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'Define',
+
+
+ description: 'Create a new table in the database based on a given schema.',
+
+
+ inputs: {
+
+ datastore: {
+ description: 'The datastore to use for connections.',
+ extendedDescription: 'Datastores represent the config and manager required to obtain an active database connection.',
+ required: true,
+ example: '==='
+ },
+
+ tableName: {
+ description: 'The name of the table to describe.',
+ required: true,
+ example: 'users'
+ },
+
+ definition: {
+ description: 'The definition of the schema to build.',
+ required: true,
+ example: {}
+ },
+
+ meta: {
+ friendlyName: 'Meta (custom)',
+ description: 'Additional stuff to pass to the driver.',
+ extendedDescription: 'This is reserved for custom driver-specific extensions.',
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The table was created successfully.'
+ },
+
+ badConnection: {
+ friendlyName: 'Bad connection',
+ description: 'A connection either could not be obtained or there was an error using the connection.'
+ }
+
+ },
+
+
+ fn: function define(inputs, exits) {
+ // Dependencies
+ var _ = require('@sailshq/lodash');
+ var Helpers = require('./private');
+
+
+ // Set a flag if a leased connection from outside the adapter was used or not.
+ var leased = _.has(inputs.meta, 'leasedConnection');
+
+
+ // ╔═╗╔═╗╔═╗╦ ╦╔╗╔ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ╚═╗╠═╝╠═╣║║║║║║ │ │ │││││││├┤ │ │ ││ ││││
+ // ╚═╝╩ ╩ ╩╚╩╝╝╚╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // Spawn a new connection for running queries on.
+ Helpers.connection.spawnOrLeaseConnection(inputs.datastore, inputs.meta, function spawnConnectionCb(err, connection) {
+ if (err) {
+ return exits.badConnection(err);
+ }
+
+
+ // Escape Table Name
+ var tableName;
+ try {
+ tableName = Helpers.schema.escapeTableName(inputs.tableName);
+ } catch (e) {
+ // If there was an issue, release the connection
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ return exits.error(e);
+ });
+ return;
+ }
+
+
+ // ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ ┌─┐┌┬┐┬─┐┬┌┐┌┌─┐
+ // ╠╩╗║ ║║║ ║║ │─┼┐│ │├┤ ├┬┘└┬┘ └─┐ │ ├┬┘│││││ ┬
+ // ╚═╝╚═╝╩╩═╝═╩╝ └─┘└└─┘└─┘┴└─ ┴ └─┘ ┴ ┴└─┴┘└┘└─┘
+
+ // Iterate through each attribute, building a query string
+ var schema;
+ try {
+ schema = Helpers.schema.buildSchema(inputs.definition);
+ } catch (e) {
+ // If there was an issue, release the connection
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ return exits.error(e);
+ });
+ return;
+ }
+
+ // Build Query
+ var query = 'CREATE TABLE IF NOT EXISTS ' + tableName + ' (' + schema + ')';
+
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐┬─┐┌─┐┌─┐┌┬┐┌─┐ ┌┬┐┌─┐┌┐ ┬ ┌─┐
+ // ╠╦╝║ ║║║║ │ ├┬┘├┤ ├─┤ │ ├┤ │ ├─┤├┴┐│ ├┤
+ // ╩╚═╚═╝╝╚╝ └─┘┴└─└─┘┴ ┴ ┴ └─┘ ┴ ┴ ┴└─┘┴─┘└─┘
+ // ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // │─┼┐│ │├┤ ├┬┘└┬┘
+ // └─┘└└─┘└─┘┴└─ ┴
+ Helpers.query.runNativeQuery(connection, query, [], undefined, function runNativeQueryCb(err) {
+ if (err) {
+ // If there was an issue, release the connection
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ return exits.error(err);
+ });
+ return;
+ }
+
+
+ // ╔╗ ╦ ╦╦╦ ╔╦╗ ┬┌┐┌┌┬┐┌─┐─┐ ┬┌─┐┌─┐
+ // ╠╩╗║ ║║║ ║║ ││││ ││├┤ ┌┴┬┘├┤ └─┐
+ // ╚═╝╚═╝╩╩═╝═╩╝ ┴┘└┘─┴┘└─┘┴ └─└─┘└─┘
+ // Build any indexes
+ Helpers.schema.buildIndexes({
+ connection: connection,
+ definition: inputs.definition,
+ tableName: inputs.tableName
+ },
+
+ function buildIndexesCb(err) {
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ if (err) {
+ return exits.error(err);
+ }
+
+ return exits.success();
+ });
+ return;
+ }); // buildIndexes() >
+ }); // runNativeQuery >
+ }); // spawnConnection >
+ }
+});
diff --git a/helpers/describe.js b/helpers/describe.js
new file mode 100644
index 00000000..17e2201b
--- /dev/null
+++ b/helpers/describe.js
@@ -0,0 +1,177 @@
+// ██████╗ ███████╗███████╗ ██████╗██████╗ ██╗██████╗ ███████╗
+// ██╔══██╗██╔════╝██╔════╝██╔════╝██╔══██╗██║██╔══██╗██╔════╝
+// ██║ ██║█████╗ ███████╗██║ ██████╔╝██║██████╔╝█████╗
+// ██║ ██║██╔══╝ ╚════██║██║ ██╔══██╗██║██╔══██╗██╔══╝
+// ██████╔╝███████╗███████║╚██████╗██║ ██║██║██████╔╝███████╗
+// ╚═════╝ ╚══════╝╚══════╝ ╚═════╝╚═╝ ╚═╝╚═╝╚═════╝ ╚══════╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'Describe',
+
+
+ description: 'Describe a table in the related data store.',
+
+
+ inputs: {
+
+ datastore: {
+ description: 'The datastore to use for connections.',
+ extendedDescription: 'Datastores represent the config and manager required to obtain an active database connection.',
+ required: true,
+ example: '==='
+ },
+
+ tableName: {
+ description: 'The name of the table to describe.',
+ required: true,
+ example: 'users'
+ },
+
+ meta: {
+ friendlyName: 'Meta (custom)',
+ description: 'Additional stuff to pass to the driver.',
+ extendedDescription: 'This is reserved for custom driver-specific extensions.',
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The results of the describe query.',
+ outputVariableName: 'records',
+ outputType: 'ref'
+ },
+
+ badConnection: {
+ friendlyName: 'Bad connection',
+ description: 'A connection either could not be obtained or there was an error using the connection.'
+ }
+
+ },
+
+
+ fn: function describe(inputs, exits) {
+ // Dependencies
+ var _ = require('@sailshq/lodash');
+ var Helpers = require('./private');
+
+ // Set a flag if a leased connection from outside the adapter was used or not.
+ var leased = _.has(inputs.meta, 'leasedConnection');
+
+
+ // ██████╗ ██╗ ██╗███████╗██████╗ ██╗███████╗███████╗
+ // ██╔═══██╗██║ ██║██╔════╝██╔══██╗██║██╔════╝██╔════╝
+ // ██║ ██║██║ ██║█████╗ ██████╔╝██║█████╗ ███████╗
+ // ██║▄▄ ██║██║ ██║██╔══╝ ██╔══██╗██║██╔══╝ ╚════██║
+ // ╚██████╔╝╚██████╔╝███████╗██║ ██║██║███████╗███████║
+ // ╚══▀▀═╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝╚═╝╚══════╝╚══════╝
+ //
+ // These native queries are responsible for describing a single table and the
+ // various attributes that make them.
+
+ var describeQuery = 'DESCRIBE ' + inputs.tableName;
+ var autoIncrementQuery = 'SHOW INDEX FROM ' + inputs.tableName;
+
+
+ // ╔═╗╔═╗╔═╗╦ ╦╔╗╔ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ╚═╗╠═╝╠═╣║║║║║║ │ │ │││││││├┤ │ │ ││ ││││
+ // ╚═╝╩ ╩ ╩╚╩╝╝╚╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // Spawn a new connection to run the queries on.
+ Helpers.connection.spawnOrLeaseConnection(inputs.datastore, inputs.meta, function spawnConnectionCb(err, connection) {
+ if (err) {
+ return exits.badConnection(err);
+ }
+
+
+ // ╦═╗╦ ╦╔╗╔ ┌┬┐┌─┐┌─┐┌─┐┬─┐┬┌┐ ┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ ││├┤ └─┐│ ├┬┘│├┴┐├┤ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ ─┴┘└─┘└─┘└─┘┴└─┴└─┘└─┘ └─┘└└─┘└─┘┴└─ ┴
+ Helpers.query.runNativeQuery(connection, describeQuery, [], undefined, function runDescribeQueryCb(err, describeResults) {
+ if (err) {
+ // Release the connection on error
+ Helpers.connection.releaseConnection(connection, leased, function cb() {
+ // If the table doesn't exist, return an empty object
+ if (err.code === 'ER_NO_SUCH_TABLE') {
+ return exits.success({ schema: {} });
+ }
+
+ return exits.error(err);
+ });
+ return;
+ }
+
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐┬ ┬┌┬┐┌─┐ ┬┌┐┌┌─┐┬─┐┌─┐┌┬┐┌─┐┌┐┌┌┬┐
+ // ╠╦╝║ ║║║║ ├─┤│ │ │ │ │───│││││ ├┬┘├┤ │││├┤ │││ │
+ // ╩╚═╚═╝╝╚╝ ┴ ┴└─┘ ┴ └─┘ ┴┘└┘└─┘┴└─└─┘┴ ┴└─┘┘└┘ ┴
+ // ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // │─┼┐│ │├┤ ├┬┘└┬┘
+ // └─┘└└─┘└─┘┴└─ ┴
+ Helpers.query.runNativeQuery(connection, autoIncrementQuery, [], undefined, function runAutoIncrementQueryCb(err, incrementResults) {
+ if (err) {
+ // Release the connection on error
+ Helpers.connection.releaseConnection(connection, leased, function cb() {
+ return exits.error(err);
+ });
+ return;
+ }
+
+
+ // ╔═╗╦═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠═╝╠╦╝║ ║║ ║╣ ╚═╗╚═╗ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ └─┘└└─┘└─┘┴└─ ┴
+ // ┬─┐┌─┐┌─┐┬ ┬┬ ┌┬┐┌─┐
+ // ├┬┘├┤ └─┐│ ││ │ └─┐
+ // ┴└─└─┘└─┘└─┘┴─┘┴ └─┘
+
+ // Normalize Schema
+ var schema = {};
+ _.each(describeResults, function normalize(column) {
+ // Set Type
+ schema[column.Field] = {
+ // Remove (n) column-size indicators
+ type: column.Type.replace(/\([0-9]+\)$/, '')
+ };
+
+ // Check for primary key
+ if (column.Key === 'PRI') {
+ schema[column.Field].primaryKey = true;
+ }
+
+ // Check for uniqueness
+ if (column.Key === 'UNI') {
+ schema[column.Field].unique = true;
+ }
+
+ // If also an integer set auto increment attribute
+ if (column.Type === 'int(11)') {
+ schema[column.Field].autoIncrement = true;
+ }
+
+ // Loop Through Indexes and Add Properties
+ _.each(incrementResults, function processIndexes(result) {
+ _.each(schema, function loopThroughSchema(attr) {
+ if (attr.Field !== result.Column_name) {
+ return;
+ }
+
+ attr.indexed = true;
+ });
+ });
+ });
+
+ Helpers.connection.releaseConnection(connection, leased, function cb() {
+ // Return the model schema
+ return exits.success({ schema: schema });
+ }); // releaseConnection >
+ }); // runAutoIncrementQuery >
+ }); // runDescribeQuery >
+ }); // spawnConnection >
+ }
+});
diff --git a/helpers/destroy.js b/helpers/destroy.js
new file mode 100644
index 00000000..14ccc1ee
--- /dev/null
+++ b/helpers/destroy.js
@@ -0,0 +1,180 @@
+// ██████╗ ███████╗███████╗████████╗██████╗ ██████╗ ██╗ ██╗ █████╗ ██████╗████████╗██╗ ██████╗ ███╗ ██╗
+// ██╔══██╗██╔════╝██╔════╝╚══██╔══╝██╔══██╗██╔═══██╗╚██╗ ██╔╝ ██╔══██╗██╔════╝╚══██╔══╝██║██╔═══██╗████╗ ██║
+// ██║ ██║█████╗ ███████╗ ██║ ██████╔╝██║ ██║ ╚████╔╝ ███████║██║ ██║ ██║██║ ██║██╔██╗ ██║
+// ██║ ██║██╔══╝ ╚════██║ ██║ ██╔══██╗██║ ██║ ╚██╔╝ ██╔══██║██║ ██║ ██║██║ ██║██║╚██╗██║
+// ██████╔╝███████╗███████║ ██║ ██║ ██║╚██████╔╝ ██║ ██║ ██║╚██████╗ ██║ ██║╚██████╔╝██║ ╚████║
+// ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'Destroy',
+
+
+ description: 'Destroy record(s) in the database matching a query criteria.',
+
+
+ inputs: {
+
+ datastore: {
+ description: 'The datastore to use for connections.',
+ extendedDescription: 'Datastores represent the config and manager required to obtain an active database connection.',
+ required: true,
+ readOnly: true,
+ example: '==='
+ },
+
+ models: {
+ description: 'An object containing all of the model definitions that have been registered.',
+ required: true,
+ example: '==='
+ },
+
+ query: {
+ description: 'A valid stage three Waterline query.',
+ required: true,
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The results of the destroy query.',
+ outputType: 'ref'
+ },
+
+ invalidDatastore: {
+ description: 'The datastore used is invalid. It is missing key pieces.'
+ },
+
+ badConnection: {
+ friendlyName: 'Bad connection',
+ description: 'A connection either could not be obtained or there was an error using the connection.'
+ }
+
+ },
+
+
+ fn: function destroy(inputs, exits) {
+ // Dependencies
+ var _ = require('@sailshq/lodash');
+ var WLUtils = require('waterline-utils');
+ var Helpers = require('./private');
+ var Converter = WLUtils.query.converter;
+
+
+ // Store the Query input for easier access
+ var query = inputs.query;
+ query.meta = query.meta || {};
+
+ // Find the model definition
+ var model = inputs.models[query.using];
+ if (!model) {
+ return exits.invalidDatastore();
+ }
+
+
+ // Set a flag if a leased connection from outside the adapter was used or not.
+ var leased = _.has(query.meta, 'leasedConnection');
+
+ // Set a flag to determine if records are being returned
+ var fetchRecords = false;
+
+
+ // ╔═╗╔═╗╔╗╔╦ ╦╔═╗╦═╗╔╦╗ ┌┬┐┌─┐ ┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┐┌┌┬┐
+ // ║ ║ ║║║║╚╗╔╝║╣ ╠╦╝ ║ │ │ │ └─┐ │ ├─┤ │ ├┤ │││├┤ │││ │
+ // ╚═╝╚═╝╝╚╝ ╚╝ ╚═╝╩╚═ ╩ ┴ └─┘ └─┘ ┴ ┴ ┴ ┴ └─┘┴ ┴└─┘┘└┘ ┴
+ // Convert the Waterline criteria into a Waterline Query Statement. This
+ // turns it into something that is declarative and can be easily used to
+ // build a SQL query.
+ // See: https://github.com/treelinehq/waterline-query-docs for more info
+ // on Waterline Query Statements.
+ var statement;
+ try {
+ statement = Converter({
+ model: query.using,
+ method: 'destroy',
+ criteria: query.criteria
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+
+ // ╔╦╗╔═╗╔╦╗╔═╗╦═╗╔╦╗╦╔╗╔╔═╗ ┬ ┬┬ ┬┬┌─┐┬ ┬ ┬ ┬┌─┐┬ ┬ ┬┌─┐┌─┐
+ // ║║║╣ ║ ║╣ ╠╦╝║║║║║║║║╣ │││├─┤││ ├─┤ └┐┌┘├─┤│ │ │├┤ └─┐
+ // ═╩╝╚═╝ ╩ ╚═╝╩╚═╩ ╩╩╝╚╝╚═╝ └┴┘┴ ┴┴└─┘┴ ┴ └┘ ┴ ┴┴─┘└─┘└─┘└─┘
+ // ┌┬┐┌─┐ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌
+ // │ │ │ ├┬┘├┤ │ │ │├┬┘│││
+ // ┴ └─┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘
+ if (_.has(query.meta, 'fetch') && query.meta.fetch) {
+ fetchRecords = true;
+ }
+
+
+ // Find the Primary Key
+ var primaryKeyField = model.primaryKey;
+ var primaryKeyColumnName = model.definition[primaryKeyField].columnName;
+
+
+ // ╔═╗╔═╗╔═╗╦ ╦╔╗╔ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ╚═╗╠═╝╠═╣║║║║║║ │ │ │││││││├┤ │ │ ││ ││││
+ // ╚═╝╩ ╩ ╩╚╩╝╝╚╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // ┌─┐┬─┐ ┬ ┬┌─┐┌─┐ ┬ ┌─┐┌─┐┌─┐┌─┐┌┬┐ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // │ │├┬┘ │ │└─┐├┤ │ ├┤ ├─┤└─┐├┤ ││ │ │ │││││││├┤ │ │ ││ ││││
+ // └─┘┴└─ └─┘└─┘└─┘ ┴─┘└─┘┴ ┴└─┘└─┘─┴┘ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // Spawn a new connection for running queries on.
+ Helpers.connection.spawnOrLeaseConnection(inputs.datastore, query.meta, function spawnConnectionCb(err, connection) {
+ if (err) {
+ return exits.badConnection(err);
+ }
+
+
+ // ╦═╗╦ ╦╔╗╔ ┌┬┐┌─┐┌─┐┌┬┐┬─┐┌─┐┬ ┬ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ ││├┤ └─┐ │ ├┬┘│ │└┬┘ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ ─┴┘└─┘└─┘ ┴ ┴└─└─┘ ┴ └─┘└└─┘└─┘┴└─ ┴
+ Helpers.query.destroy({
+ connection: connection,
+ statement: statement,
+ fetch: fetchRecords,
+ primaryKey: primaryKeyColumnName
+ },
+
+ function destroyRecordCb(err, destroyedRecords) {
+ // Always release the connection unless a leased connection from outside
+ // the adapter was used.
+ Helpers.connection.releaseConnection(connection, leased, function cb() {
+ // If there was an error return it.
+ if (err) {
+ return exits.error(err);
+ }
+
+ if (fetchRecords) {
+ var orm = {
+ collections: inputs.models
+ };
+
+ // Process each record to normalize output
+ try {
+ Helpers.query.processEachRecord({
+ records: destroyedRecords,
+ identity: model.identity,
+ orm: orm
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ return exits.success({ records: destroyedRecords });
+ }
+
+ return exits.success();
+ }); // releaseConnection >
+ }); // runQuery >
+ }); // spawnConnection >
+ }
+});
diff --git a/helpers/drop.js b/helpers/drop.js
new file mode 100644
index 00000000..ff34877c
--- /dev/null
+++ b/helpers/drop.js
@@ -0,0 +1,110 @@
+// ██████╗ ██████╗ ██████╗ ██████╗
+// ██╔══██╗██╔══██╗██╔═══██╗██╔══██╗
+// ██║ ██║██████╔╝██║ ██║██████╔╝
+// ██║ ██║██╔══██╗██║ ██║██╔═══╝
+// ██████╔╝██║ ██║╚██████╔╝██║
+// ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'Drop',
+
+
+ description: 'Remove a table from the database.',
+
+
+ inputs: {
+
+ datastore: {
+ description: 'The datastore to use for connections.',
+ extendedDescription: 'Datastores represent the config and manager required to obtain an active database connection.',
+ required: true,
+ example: '==='
+ },
+
+ tableName: {
+ description: 'The name of the table to destroy.',
+ required: true,
+ example: 'users'
+ },
+
+ meta: {
+ friendlyName: 'Meta (custom)',
+ description: 'Additional stuff to pass to the driver.',
+ extendedDescription: 'This is reserved for custom driver-specific extensions.',
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The table was destroyed successfully.'
+ },
+
+ badConnection: {
+ friendlyName: 'Bad connection',
+ description: 'A connection either could not be obtained or there was an error using the connection.'
+ }
+
+ },
+
+
+ fn: function drop(inputs, exits) {
+ // Dependencies
+ var _ = require('@sailshq/lodash');
+ var Helpers = require('./private');
+
+
+ // Set a flag if a leased connection from outside the adapter was used or not.
+ var leased = _.has(inputs.meta, 'leasedConnection');
+
+
+ // ╔═╗╔═╗╔═╗╦ ╦╔╗╔ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ╚═╗╠═╝╠═╣║║║║║║ │ │ │││││││├┤ │ │ ││ ││││
+ // ╚═╝╩ ╩ ╩╚╩╝╝╚╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // Spawn a new connection to run the queries on.
+ Helpers.connection.spawnOrLeaseConnection(inputs.datastore, inputs.meta, function spawnConnectionCb(err, connection) {
+ if (err) {
+ return exits.badConnection(err);
+ }
+
+
+ // ╔═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┌┬┐┌─┐┌┐ ┬ ┌─┐ ┌┐┌┌─┐┌┬┐┌─┐
+ // ║╣ ╚═╗║ ╠═╣╠═╝║╣ │ ├─┤├┴┐│ ├┤ │││├─┤│││├┤
+ // ╚═╝╚═╝╚═╝╩ ╩╩ ╚═╝ ┴ ┴ ┴└─┘┴─┘└─┘ ┘└┘┴ ┴┴ ┴└─┘
+ var tableName;
+ try {
+ tableName = Helpers.schema.escapeTableName(inputs.tableName);
+ } catch (e) {
+ // Release the connection on error
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ return exits.error(e);
+ });
+ return;
+ }
+
+ // Build native query
+ var query = 'DROP TABLE IF EXISTS ' + tableName + ';';
+
+
+ // ╦═╗╦ ╦╔╗╔ ┌┬┐┬─┐┌─┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ ││├┬┘│ │├─┘ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ ─┴┘┴└─└─┘┴ └─┘└└─┘└─┘┴└─ ┴
+ Helpers.query.runNativeQuery(connection, query, [], undefined, function runNativeQueryCb(err) {
+ // Always release the connection back to the pool
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ if (err) {
+ return exits.error(err);
+ }
+
+ return exits.success();
+ }); // releaseConnection >
+ }); // runNativeQuery >
+ }); // spawnConnection >
+ }
+});
diff --git a/helpers/index.js b/helpers/index.js
new file mode 100644
index 00000000..e6fd084e
--- /dev/null
+++ b/helpers/index.js
@@ -0,0 +1,17 @@
+module.exports = {
+ avg: require('./avg'),
+ count: require('./count'),
+ create: require('./create'),
+ createEach: require('./create-each'),
+ define: require('./define'),
+ describe: require('./describe'),
+ destroy: require('./destroy'),
+ drop: require('./drop'),
+ join: require('./join'),
+ registerDataStore: require('./register-data-store'),
+ select: require('./select'),
+ setSequence: require('./set-sequence'),
+ sum: require('./sum'),
+ teardown: require('./teardown'),
+ update: require('./update')
+};
diff --git a/helpers/join.js b/helpers/join.js
new file mode 100644
index 00000000..5fd06718
--- /dev/null
+++ b/helpers/join.js
@@ -0,0 +1,400 @@
+// ██╗ ██████╗ ██╗███╗ ██╗
+// ██║██╔═══██╗██║████╗ ██║
+// ██║██║ ██║██║██╔██╗ ██║
+// ██ ██║██║ ██║██║██║╚██╗██║
+// ╚█████╔╝╚██████╔╝██║██║ ╚████║
+// ╚════╝ ╚═════╝ ╚═╝╚═╝ ╚═══╝
+//
+module.exports = require('machine').build({
+
+
+ friendlyName: 'Join',
+
+
+ description: 'Support native joins on the database.',
+
+
+ inputs: {
+
+ datastore: {
+ description: 'The datastore to use for connections.',
+ extendedDescription: 'Datastores represent the config and manager required to obtain an active database connection.',
+ required: true,
+ readOnly: true,
+ example: '==='
+ },
+
+ models: {
+ description: 'An object containing all of the model definitions that have been registered.',
+ required: true,
+ example: '==='
+ },
+
+ query: {
+ description: 'A normalized Waterline Stage Three Query.',
+ required: true,
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The query was run successfully.',
+ outputType: 'ref'
+ },
+
+ badConnection: {
+ friendlyName: 'Bad connection',
+ description: 'A connection either could not be obtained or there was an error using the connection.'
+ }
+
+ },
+
+
+ fn: function join(inputs, exits) {
+ var _ = require('@sailshq/lodash');
+ var async = require('async');
+ var WLUtils = require('waterline-utils');
+ var Helpers = require('./private');
+
+ var meta = _.has(inputs.query, 'meta') ? inputs.query.meta : {};
+
+ // Set a flag if a leased connection from outside the adapter was used or not.
+ var leased = _.has(meta, 'leasedConnection');
+
+
+ // ╔═╗╦╔╗╔╔╦╗ ┌┬┐┌─┐┌┐ ┬ ┌─┐ ┌─┐┬─┐┬┌┬┐┌─┐┬─┐┬ ┬ ┬┌─┌─┐┬ ┬
+ // ╠╣ ║║║║ ║║ │ ├─┤├┴┐│ ├┤ ├─┘├┬┘││││├─┤├┬┘└┬┘ ├┴┐├┤ └┬┘
+ // ╚ ╩╝╚╝═╩╝ ┴ ┴ ┴└─┘┴─┘└─┘ ┴ ┴└─┴┴ ┴┴ ┴┴└─ ┴ ┴ ┴└─┘ ┴
+ // Find the model definition
+ var model = inputs.models[inputs.query.using];
+ if (!model) {
+ return exits.invalidDatastore();
+ }
+
+ // Grab the primary key attribute for the main table name
+ var primaryKeyAttr = model.primaryKey;
+ var primaryKeyColumnName = model.definition[primaryKeyAttr].columnName || primaryKeyAttr;
+
+ // Build a fake ORM and process the records.
+ var orm = {
+ collections: inputs.models
+ };
+
+ // ╔╗ ╦ ╦╦╦ ╔╦╗ ┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┐┌┌┬┐┌─┐
+ // ╠╩╗║ ║║║ ║║ └─┐ │ ├─┤ │ ├┤ │││├┤ │││ │ └─┐
+ // ╚═╝╚═╝╩╩═╝═╩╝ └─┘ ┴ ┴ ┴ ┴ └─┘┴ ┴└─┘┘└┘ ┴ └─┘
+ // Attempt to build up the statements necessary for the query.
+ var statements;
+ try {
+ statements = WLUtils.joins.convertJoinCriteria({
+ query: inputs.query,
+ getPk: function getPk(tableName) {
+ var model = inputs.models[tableName];
+ if (!model) {
+ throw new Error('Invalid parent table name used when caching query results. Perhaps the join criteria is invalid?');
+ }
+
+ var pkAttrName = model.primaryKey;
+ var pkColumnName = model.definition[pkAttrName].columnName || pkAttrName;
+
+ return pkColumnName;
+ }
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+
+ // ╔═╗╔═╗╔╗╔╦ ╦╔═╗╦═╗╔╦╗ ┌─┐┌─┐┬─┐┌─┐┌┐┌┌┬┐
+ // ║ ║ ║║║║╚╗╔╝║╣ ╠╦╝ ║ ├─┘├─┤├┬┘├┤ │││ │
+ // ╚═╝╚═╝╝╚╝ ╚╝ ╚═╝╩╚═ ╩ ┴ ┴ ┴┴└─└─┘┘└┘ ┴
+ // ┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┐┌┌┬┐
+ // └─┐ │ ├─┤ │ ├┤ │││├┤ │││ │
+ // └─┘ ┴ ┴ ┴ ┴ └─┘┴ ┴└─┘┘└┘ ┴
+ // Convert the parent statement into a native query. If the query can be run
+ // in a single query then this will be the only query that runs.
+ var compiledQuery;
+ try {
+ compiledQuery = Helpers.query.compileStatement(statements.parentStatement);
+ } catch (e) {
+ return exits.error(e);
+ }
+
+
+ // ╔═╗╔═╗╔═╗╦ ╦╔╗╔ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ╚═╗╠═╝╠═╣║║║║║║ │ │ │││││││├┤ │ │ ││ ││││
+ // ╚═╝╩ ╩ ╩╚╩╝╝╚╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // ┌─┐┬─┐ ┬ ┬┌─┐┌─┐ ┬ ┌─┐┌─┐┌─┐┌─┐┌┬┐ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // │ │├┬┘ │ │└─┐├┤ │ ├┤ ├─┤└─┐├┤ ││ │ │ │││││││├┤ │ │ ││ ││││
+ // └─┘┴└─ └─┘└─┘└─┘ ┴─┘└─┘┴ ┴└─┘└─┘─┴┘ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // Spawn a new connection for running queries on.
+ Helpers.connection.spawnOrLeaseConnection(inputs.datastore, meta, function spawnCb(err, connection) {
+ if (err) {
+ return exits.error(err);
+ }
+
+
+ // ╦═╗╦ ╦╔╗╔ ┌┬┐┬ ┬┌─┐ ┌┐┌┌─┐┌┬┐┬┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │ ├─┤├┤ │││├─┤ │ │└┐┌┘├┤ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ ┴ ┴ ┴└─┘ ┘└┘┴ ┴ ┴ ┴ └┘ └─┘ └─┘└└─┘└─┘┴└─ ┴
+ Helpers.query.runNativeQuery(connection, compiledQuery.nativeQuery, compiledQuery.valuesToEscape, compiledQuery.meta, function parentQueryCb(err, parentResults) {
+ if (err) {
+ // Release the connection on error
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ return exits.error(err);
+ });
+ return;
+ }
+
+ // If there weren't any joins being performed or no parent records were
+ // returned, release the connection and return the results.
+ if (!_.has(inputs.query, 'joins') || !parentResults.length) {
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb(err) {
+ if (err) {
+ return exits.error(err);
+ }
+
+ return exits.success(parentResults);
+ });
+ return;
+ }
+
+
+ // ╔═╗╦╔╗╔╔╦╗ ┌─┐┬ ┬┬┬ ┌┬┐┬─┐┌─┐┌┐┌ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐
+ // ╠╣ ║║║║ ║║ │ ├─┤││ ││├┬┘├┤ │││ ├┬┘├┤ │ │ │├┬┘ ││└─┐
+ // ╚ ╩╝╚╝═╩╝ └─┘┴ ┴┴┴─┘─┴┘┴└─└─┘┘└┘ ┴└─└─┘└─┘└─┘┴└──┴┘└─┘
+ // If there was a join that was either performed or still needs to be
+ // performed, look into the results for any children records that may
+ // have been joined and splt them out from the parent.
+ var sortedResults;
+ try {
+ sortedResults = WLUtils.joins.detectChildrenRecords(primaryKeyColumnName, parentResults);
+ } catch (e) {
+ // Release the connection if there was an error.
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ return exits.error(e);
+ });
+ return;
+ }
+
+
+ // ╦╔╗╔╦╔╦╗╦╔═╗╦ ╦╔═╗╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ ┌─┐┌─┐┌─┐┬ ┬┌─┐
+ // ║║║║║ ║ ║╠═╣║ ║╔═╝║╣ │─┼┐│ │├┤ ├┬┘└┬┘ │ ├─┤│ ├─┤├┤
+ // ╩╝╚╝╩ ╩ ╩╩ ╩╩═╝╩╚═╝╚═╝ └─┘└└─┘└─┘┴└─ ┴ └─┘┴ ┴└─┘┴ ┴└─┘
+ var queryCache;
+ try {
+ queryCache = Helpers.query.initializeQueryCache({
+ instructions: statements.instructions,
+ models: inputs.models,
+ sortedResults: sortedResults
+ });
+ } catch (e) {
+ // Release the connection if there was an error.
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ return exits.error(e);
+ });
+ return;
+ }
+
+
+ // ╔═╗╔╦╗╔═╗╦═╗╔═╗ ┌─┐┌─┐┬─┐┌─┐┌┐┌┌┬┐┌─┐
+ // ╚═╗ ║ ║ ║╠╦╝║╣ ├─┘├─┤├┬┘├┤ │││ │ └─┐
+ // ╚═╝ ╩ ╚═╝╩╚═╚═╝ ┴ ┴ ┴┴└─└─┘┘└┘ ┴ └─┘
+ try {
+ queryCache.setParents(sortedResults.parents);
+ } catch (e) {
+ // Release the connection if there was an error.
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ return exits.error(e);
+ });
+ return;
+ }
+
+
+ // ╔═╗╦ ╦╔═╗╔═╗╦╔═ ┌─┐┌─┐┬─┐ ┌─┐┬ ┬┬┬ ┌┬┐┬─┐┌─┐┌┐┌
+ // ║ ╠═╣║╣ ║ ╠╩╗ ├┤ │ │├┬┘ │ ├─┤││ ││├┬┘├┤ │││
+ // ╚═╝╩ ╩╚═╝╚═╝╩ ╩ └ └─┘┴└─ └─┘┴ ┴┴┴─┘─┴┘┴└─└─┘┘└┘
+ // ┌─┐ ┬ ┬┌─┐┬─┐┬┌─┐┌─┐
+ // │─┼┐│ │├┤ ├┬┘│├┤ └─┐
+ // └─┘└└─┘└─┘┴└─┴└─┘└─┘
+ // Now that all the parents are found, check if there are any child
+ // statements that need to be processed. If not, close the connection and
+ // return the combined results.
+ if (!statements.childStatements || !statements.childStatements.length) {
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb(err) {
+ if (err) {
+ return exits.error(err);
+ }
+
+ // Combine records in the cache to form nested results
+ var combinedResults;
+ try {
+ combinedResults = queryCache.combineRecords();
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ // Process each record to normalize output
+ try {
+ Helpers.query.processEachRecord({
+ records: combinedResults,
+ identity: model.identity,
+ orm: orm
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ // Return the combined results
+ exits.success(combinedResults);
+ });
+ return;
+ }
+
+
+ // ╔═╗╔═╗╦ ╦ ╔═╗╔═╗╔╦╗ ┌─┐┌─┐┬─┐┌─┐┌┐┌┌┬┐
+ // ║ ║ ║║ ║ ║╣ ║ ║ ├─┘├─┤├┬┘├┤ │││ │
+ // ╚═╝╚═╝╩═╝╩═╝╚═╝╚═╝ ╩ ┴ ┴ ┴┴└─└─┘┘└┘ ┴
+ // ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐
+ // ├┬┘├┤ │ │ │├┬┘ ││└─┐
+ // ┴└─└─┘└─┘└─┘┴└──┴┘└─┘
+ // There is more work to be done now. Go through the parent records and
+ // build up an array of the primary keys.
+ var parentKeys = _.map(queryCache.getParents(), function pluckPk(record) {
+ return record[primaryKeyColumnName];
+ });
+
+
+ // ╔═╗╦═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┌─┐┬ ┬┬┬ ┌┬┐ ┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┐┌┌┬┐┌─┐
+ // ╠═╝╠╦╝║ ║║ ║╣ ╚═╗╚═╗ │ ├─┤││ ││ └─┐ │ ├─┤ │ ├┤ │││├┤ │││ │ └─┐
+ // ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ └─┘┴ ┴┴┴─┘─┴┘ └─┘ ┴ ┴ ┴ ┴ └─┘┴ ┴└─┘┘└┘ ┴ └─┘
+ // For each child statement, figure out how to turn the statement into
+ // a native query and then run it. Add the results to the query cache.
+ async.each(statements.childStatements, function processChildStatements(template, next) {
+ // ╦═╗╔═╗╔╗╔╔╦╗╔═╗╦═╗ ┬┌┐┌ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║╣ ║║║ ║║║╣ ╠╦╝ ││││ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝═╩╝╚═╝╩╚═ ┴┘└┘ └─┘└└─┘└─┘┴└─ ┴
+ // ┌┬┐┌─┐┌┬┐┌─┐┬ ┌─┐┌┬┐┌─┐
+ // │ ├┤ │││├─┘│ ├─┤ │ ├┤
+ // ┴ └─┘┴ ┴┴ ┴─┘┴ ┴ ┴ └─┘
+ // If the statement is an IN query, replace the values with the parent
+ // keys.
+ if (template.queryType === 'in') {
+ // Pull the last AND clause out - it's the one we added
+ var inClause = _.pullAt(template.statement.where.and, template.statement.where.and.length - 1);
+
+ // Grab the object inside the array that comes back
+ inClause = _.first(inClause);
+
+ // Modify the inClause using the actual parent key values
+ _.each(inClause, function modifyInClause(val) {
+ val.in = parentKeys;
+ });
+
+ // Reset the statement
+ template.statement.where.and.push(inClause);
+ }
+
+
+ // ╦═╗╔═╗╔╗╔╔╦╗╔═╗╦═╗ ┬ ┬┌┐┌┬┌─┐┌┐┌ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║╣ ║║║ ║║║╣ ╠╦╝ │ ││││││ ││││ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝═╩╝╚═╝╩╚═ └─┘┘└┘┴└─┘┘└┘ └─┘└└─┘└─┘┴└─ ┴
+ // ┌┬┐┌─┐┌┬┐┌─┐┬ ┌─┐┌┬┐┌─┐
+ // │ ├┤ │││├─┘│ ├─┤ │ ├┤
+ // ┴ └─┘┴ ┴┴ ┴─┘┴ ┴ ┴ └─┘
+ // If the statement is a UNION type, loop through each parent key and
+ // build up a proper query.
+ if (template.queryType === 'union') {
+ var unionStatements = [];
+
+ // Build up an array of generated statements
+ _.each(parentKeys, function buildUnion(parentPk) {
+ var unionStatement = _.merge({}, template.statement);
+
+ // Replace the placeholder `?` values with the primary key of the
+ // parent record.
+ var andClause = _.pullAt(unionStatement.where.and, unionStatement.where.and.length - 1);
+ _.each(_.first(andClause), function replaceValue(val, key) {
+ _.first(andClause)[key] = parentPk;
+ });
+
+ // Add the UNION statement to the array of other statements
+ unionStatement.where.and.push(_.first(andClause));
+ unionStatements.push(unionStatement);
+ });
+
+ // Replace the final statement with the UNION ALL clause
+ if (unionStatements.length) {
+ template.statement = { unionAll: unionStatements };
+ }
+ }
+
+ // If there isn't a statement to be run, then just return
+ if (!template.statement) {
+ return next();
+ }
+
+
+ // ╔═╗╔═╗╔╦╗╔═╗╦╦ ╔═╗ ┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┐┌┌┬┐
+ // ║ ║ ║║║║╠═╝║║ ║╣ └─┐ │ ├─┤ │ ├┤ │││├┤ │││ │
+ // ╚═╝╚═╝╩ ╩╩ ╩╩═╝╚═╝ └─┘ ┴ ┴ ┴ ┴ └─┘┴ ┴└─┘┘└┘ ┴
+ // Attempt to convert the statement into a native query
+ var compiledQuery;
+ try {
+ compiledQuery = Helpers.query.compileStatement(template.statement);
+ } catch (e) {
+ return next(e);
+ }
+
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐┬ ┬┬┬ ┌┬┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │ ├─┤││ ││ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘┴ ┴┴┴─┘─┴┘ └─┘└└─┘└─┘┴└─ ┴
+ // Run the native query
+ Helpers.query.runNativeQuery(connection, compiledQuery.nativeQuery, compiledQuery.valuesToEscape, compiledQuery.meta, function parentQueryCb(err, queryResults) {
+ if (err) {
+ return next(err);
+ }
+
+ // Extend the values in the cache to include the values from the
+ // child query.
+ queryCache.extend(queryResults, template.instructions);
+
+ return next();
+ });
+ },
+
+ function asyncEachCb(err) {
+ // Always release the connection unless a leased connection from outside
+ // the adapter was used.
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ if (err) {
+ return exits.error(err);
+ }
+
+ // Combine records in the cache to form nested results
+ var combinedResults = queryCache.combineRecords();
+
+ // Process each record to normalize output
+ try {
+ Helpers.query.processEachRecord({
+ records: combinedResults,
+ identity: model.identity,
+ orm: orm
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ // Return the combined results
+ return exits.success(combinedResults);
+ }); // releaseConnection >
+ }); // asyncEachCb >
+ }); // runNativeQuery >
+ }); // spawnConnection >
+ }
+});
diff --git a/helpers/private/connection/create-manager.js b/helpers/private/connection/create-manager.js
new file mode 100644
index 00000000..a5497973
--- /dev/null
+++ b/helpers/private/connection/create-manager.js
@@ -0,0 +1,26 @@
+// ██████╗██████╗ ███████╗ █████╗ ████████╗███████╗
+// ██╔════╝██╔══██╗██╔════╝██╔══██╗╚══██╔══╝██╔════╝
+// ██║ ██████╔╝█████╗ ███████║ ██║ █████╗
+// ██║ ██╔══██╗██╔══╝ ██╔══██║ ██║ ██╔══╝
+// ╚██████╗██║ ██║███████╗██║ ██║ ██║ ███████╗
+// ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝
+//
+// ███╗ ███╗ █████╗ ███╗ ██╗ █████╗ ██████╗ ███████╗██████╗
+// ████╗ ████║██╔══██╗████╗ ██║██╔══██╗██╔════╝ ██╔════╝██╔══██╗
+// ██╔████╔██║███████║██╔██╗ ██║███████║██║ ███╗█████╗ ██████╔╝
+// ██║╚██╔╝██║██╔══██║██║╚██╗██║██╔══██║██║ ██║██╔══╝ ██╔══██╗
+// ██║ ╚═╝ ██║██║ ██║██║ ╚████║██║ ██║╚██████╔╝███████╗██║ ██║
+// ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝
+//
+// Create a new connection manager to use.
+
+var MySQL = require('machinepack-mysql');
+
+module.exports = function createManager(url, config) {
+ var report = MySQL.createManager({
+ connectionString: url,
+ meta: config
+ }).execSync();
+
+ return report;
+};
diff --git a/helpers/private/connection/destroy-manager.js b/helpers/private/connection/destroy-manager.js
new file mode 100644
index 00000000..4f497c6c
--- /dev/null
+++ b/helpers/private/connection/destroy-manager.js
@@ -0,0 +1,30 @@
+// ██████╗ ███████╗███████╗████████╗██████╗ ██████╗ ██╗ ██╗
+// ██╔══██╗██╔════╝██╔════╝╚══██╔══╝██╔══██╗██╔═══██╗╚██╗ ██╔╝
+// ██║ ██║█████╗ ███████╗ ██║ ██████╔╝██║ ██║ ╚████╔╝
+// ██║ ██║██╔══╝ ╚════██║ ██║ ██╔══██╗██║ ██║ ╚██╔╝
+// ██████╔╝███████╗███████║ ██║ ██║ ██║╚██████╔╝ ██║
+// ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝
+//
+// ███╗ ███╗ █████╗ ███╗ ██╗ █████╗ ██████╗ ███████╗██████╗
+// ████╗ ████║██╔══██╗████╗ ██║██╔══██╗██╔════╝ ██╔════╝██╔══██╗
+// ██╔████╔██║███████║██╔██╗ ██║███████║██║ ███╗█████╗ ██████╔╝
+// ██║╚██╔╝██║██╔══██║██║╚██╗██║██╔══██║██║ ██║██╔══╝ ██╔══██╗
+// ██║ ╚═╝ ██║██║ ██║██║ ╚████║██║ ██║╚██████╔╝███████╗██║ ██║
+// ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝
+//
+// Destroys a connection manager.
+
+var MySQL = require('machinepack-mysql');
+
+module.exports = function destroyManager(manager, cb) {
+ MySQL.destroyManager({
+ manager: manager
+ })
+ .exec(function destroyManagerCb(err) {
+ if (err) {
+ return cb(new Error('There was an error destroying the connection manager.\n\n' + err.stack));
+ }
+
+ return cb();
+ });
+};
diff --git a/helpers/private/connection/release-connection.js b/helpers/private/connection/release-connection.js
new file mode 100644
index 00000000..f091a68d
--- /dev/null
+++ b/helpers/private/connection/release-connection.js
@@ -0,0 +1,40 @@
+// ██████╗ ███████╗██╗ ███████╗ █████╗ ███████╗███████╗
+// ██╔══██╗██╔════╝██║ ██╔════╝██╔══██╗██╔════╝██╔════╝
+// ██████╔╝█████╗ ██║ █████╗ ███████║███████╗█████╗
+// ██╔══██╗██╔══╝ ██║ ██╔══╝ ██╔══██║╚════██║██╔══╝
+// ██║ ██║███████╗███████╗███████╗██║ ██║███████║███████╗
+// ╚═╝ ╚═╝╚══════╝╚══════╝╚══════╝╚═╝ ╚═╝╚══════╝╚══════╝
+//
+// ██████╗ ██████╗ ███╗ ██╗███╗ ██╗███████╗ ██████╗████████╗██╗ ██████╗ ███╗ ██╗
+// ██╔════╝██╔═══██╗████╗ ██║████╗ ██║██╔════╝██╔════╝╚══██╔══╝██║██╔═══██╗████╗ ██║
+// ██║ ██║ ██║██╔██╗ ██║██╔██╗ ██║█████╗ ██║ ██║ ██║██║ ██║██╔██╗ ██║
+// ██║ ██║ ██║██║╚██╗██║██║╚██╗██║██╔══╝ ██║ ██║ ██║██║ ██║██║╚██╗██║
+// ╚██████╗╚██████╔╝██║ ╚████║██║ ╚████║███████╗╚██████╗ ██║ ██║╚██████╔╝██║ ╚████║
+// ╚═════╝ ╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚═══╝╚══════╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝
+//
+// Release an open database connection.
+
+var MySQL = require('machinepack-mysql');
+
+module.exports = function releaseConnection(connection, leased, cb) {
+ // If this connection was leased outside of the Adapter, don't release it.
+ if (leased) {
+ return setImmediate(function ensureAsync() {
+ return cb();
+ });
+ }
+
+ MySQL.releaseConnection({
+ connection: connection
+ }).switch({
+ error: function error(err) {
+ return cb(new Error('There was an error releasing the connection back into the pool.' + err.stack));
+ },
+ badConnection: function badConnection() {
+ return cb(new Error('Bad connection when trying to release an active connection.'));
+ },
+ success: function success() {
+ return cb();
+ }
+ });
+};
diff --git a/helpers/private/connection/spawn-connection.js b/helpers/private/connection/spawn-connection.js
new file mode 100644
index 00000000..6fb2900b
--- /dev/null
+++ b/helpers/private/connection/spawn-connection.js
@@ -0,0 +1,40 @@
+// ███████╗██████╗ █████╗ ██╗ ██╗███╗ ██╗
+// ██╔════╝██╔══██╗██╔══██╗██║ ██║████╗ ██║
+// ███████╗██████╔╝███████║██║ █╗ ██║██╔██╗ ██║
+// ╚════██║██╔═══╝ ██╔══██║██║███╗██║██║╚██╗██║
+// ███████║██║ ██║ ██║╚███╔███╔╝██║ ╚████║
+// ╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══╝╚══╝ ╚═╝ ╚═══╝
+//
+// ██████╗ ██████╗ ███╗ ██╗███╗ ██╗███████╗ ██████╗████████╗██╗ ██████╗ ███╗ ██╗
+// ██╔════╝██╔═══██╗████╗ ██║████╗ ██║██╔════╝██╔════╝╚══██╔══╝██║██╔═══██╗████╗ ██║
+// ██║ ██║ ██║██╔██╗ ██║██╔██╗ ██║█████╗ ██║ ██║ ██║██║ ██║██╔██╗ ██║
+// ██║ ██║ ██║██║╚██╗██║██║╚██╗██║██╔══╝ ██║ ██║ ██║██║ ██║██║╚██╗██║
+// ╚██████╗╚██████╔╝██║ ╚████║██║ ╚████║███████╗╚██████╗ ██║ ██║╚██████╔╝██║ ╚████║
+// ╚═════╝ ╚═════╝ ╚═╝ ╚═══╝╚═╝ ╚═══╝╚══════╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝
+//
+// Instantiate a new connection from the connection manager.
+
+var MySQL = require('machinepack-mysql');
+
+module.exports = function spawnConnection(datastore, cb) {
+ // Validate datastore
+ if (!datastore || !datastore.manager || !datastore.config) {
+ return cb(new Error('Spawn Connection requires a valid datastore.'));
+ }
+
+ MySQL.getConnection({
+ manager: datastore.manager,
+ meta: datastore.config
+ })
+ .switch({
+ error: function error(err) {
+ return cb(err);
+ },
+ failed: function failedToConnect(err) {
+ return cb(err);
+ },
+ success: function success(connection) {
+ return cb(null, connection.connection);
+ }
+ });
+};
diff --git a/helpers/private/connection/spawn-or-lease-connection.js b/helpers/private/connection/spawn-or-lease-connection.js
new file mode 100644
index 00000000..dd6c24b1
--- /dev/null
+++ b/helpers/private/connection/spawn-or-lease-connection.js
@@ -0,0 +1,24 @@
+// ███████╗██████╗ █████╗ ██╗ ██╗███╗ ██╗ ██████╗ ██████╗ ██╗ ███████╗ █████╗ ███████╗███████╗
+// ██╔════╝██╔══██╗██╔══██╗██║ ██║████╗ ██║ ██╔═══██╗██╔══██╗ ██║ ██╔════╝██╔══██╗██╔════╝██╔════╝
+// ███████╗██████╔╝███████║██║ █╗ ██║██╔██╗ ██║ ██║ ██║██████╔╝ ██║ █████╗ ███████║███████╗█████╗
+// ╚════██║██╔═══╝ ██╔══██║██║███╗██║██║╚██╗██║ ██║ ██║██╔══██╗ ██║ ██╔══╝ ██╔══██║╚════██║██╔══╝
+// ███████║██║ ██║ ██║╚███╔███╔╝██║ ╚████║ ╚██████╔╝██║ ██║ ███████╗███████╗██║ ██║███████║███████╗
+// ╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══╝╚══╝ ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═╝ ╚══════╝╚══════╝╚═╝ ╚═╝╚══════╝╚══════╝
+//
+// Returns either the leased connection that was passed in to the meta input of
+// a helper or spawns a new connection. This is a normalized helper so the actual
+// helper methods don't need to deal with the branching logic.
+
+var _ = require('@sailshq/lodash');
+var spawnConnection = require('./spawn-connection');
+
+module.exports = function spawnOrLeaseConnection(datastore, meta, cb) {
+ if (!_.isUndefined(meta) && _.has(meta, 'leasedConnection')) {
+ return setImmediate(function ensureAsync() {
+ cb(null, meta.leasedConnection);
+ });
+ }
+
+ // Otherwise spawn the connection
+ spawnConnection(datastore, cb);
+};
diff --git a/helpers/private/index.js b/helpers/private/index.js
new file mode 100644
index 00000000..2b71db51
--- /dev/null
+++ b/helpers/private/index.js
@@ -0,0 +1,31 @@
+module.exports = {
+ // Helpers for handling connections
+ connection: {
+ createManager: require('./connection/create-manager'),
+ destroyManager: require('./connection/destroy-manager'),
+ releaseConnection: require('./connection/release-connection'),
+ spawnConnection: require('./connection/spawn-connection'),
+ spawnOrLeaseConnection: require('./connection/spawn-or-lease-connection')
+ },
+
+ // Helpers for handling query logic
+ query: {
+ create: require('./query/create'),
+ createEach: require('./query/create-each'),
+ compileStatement: require('./query/compile-statement'),
+ destroy: require('./query/destroy'),
+ initializeQueryCache: require('./query/initialize-query-cache'),
+ processEachRecord: require('./query/process-each-record'),
+ preProcessRecord: require('./query/pre-process-record'),
+ runNativeQuery: require('./query/run-native-query'),
+ runQuery: require('./query/run-query'),
+ update: require('./query/update')
+ },
+
+ // Helpers for dealing with underlying database schema
+ schema: {
+ buildIndexes: require('./schema/build-indexes'),
+ buildSchema: require('./schema/build-schema'),
+ escapeTableName: require('./schema/escape-table-name')
+ }
+};
diff --git a/helpers/private/query/compile-statement.js b/helpers/private/query/compile-statement.js
new file mode 100644
index 00000000..c9af15e2
--- /dev/null
+++ b/helpers/private/query/compile-statement.js
@@ -0,0 +1,26 @@
+// ██████╗ ██████╗ ███╗ ███╗██████╗ ██╗██╗ ███████╗
+// ██╔════╝██╔═══██╗████╗ ████║██╔══██╗██║██║ ██╔════╝
+// ██║ ██║ ██║██╔████╔██║██████╔╝██║██║ █████╗
+// ██║ ██║ ██║██║╚██╔╝██║██╔═══╝ ██║██║ ██╔══╝
+// ╚██████╗╚██████╔╝██║ ╚═╝ ██║██║ ██║███████╗███████╗
+// ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝╚══════╝
+//
+// ███████╗████████╗ █████╗ ████████╗███████╗███╗ ███╗███████╗███╗ ██╗████████╗
+// ██╔════╝╚══██╔══╝██╔══██╗╚══██╔══╝██╔════╝████╗ ████║██╔════╝████╗ ██║╚══██╔══╝
+// ███████╗ ██║ ███████║ ██║ █████╗ ██╔████╔██║█████╗ ██╔██╗ ██║ ██║
+// ╚════██║ ██║ ██╔══██║ ██║ ██╔══╝ ██║╚██╔╝██║██╔══╝ ██║╚██╗██║ ██║
+// ███████║ ██║ ██║ ██║ ██║ ███████╗██║ ╚═╝ ██║███████╗██║ ╚████║ ██║
+// ╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═══╝ ╚═╝
+//
+// Transform a Waterline Query Statement into a SQL query.
+
+var MySQL = require('machinepack-mysql');
+
+module.exports = function compileStatement(statement, meta) {
+ var report = MySQL.compileStatement({
+ statement: statement,
+ meta: meta
+ }).execSync();
+
+ return report;
+};
diff --git a/helpers/private/query/create-each.js b/helpers/private/query/create-each.js
new file mode 100644
index 00000000..69d639ff
--- /dev/null
+++ b/helpers/private/query/create-each.js
@@ -0,0 +1,219 @@
+// ██████╗██████╗ ███████╗ █████╗ ████████╗███████╗ ███████╗ █████╗ ██████╗██╗ ██╗
+// ██╔════╝██╔══██╗██╔════╝██╔══██╗╚══██╔══╝██╔════╝ ██╔════╝██╔══██╗██╔════╝██║ ██║
+// ██║ ██████╔╝█████╗ ███████║ ██║ █████╗ █████╗ ███████║██║ ███████║
+// ██║ ██╔══██╗██╔══╝ ██╔══██║ ██║ ██╔══╝ ██╔══╝ ██╔══██║██║ ██╔══██║
+// ╚██████╗██║ ██║███████╗██║ ██║ ██║ ███████╗ ███████╗██║ ██║╚██████╗██║ ██║
+// ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝ ╚══════╝╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝
+//
+// Run creates in order and return the records. This is needed because MySQL
+// lacks the ability to return multiple insert id's from a bulk insert.
+//
+// So when a createEach call from Waterline is made with the `fetch: true` flag
+// turned on, the records must be inserted one by one in order to return the
+// correct primary keys.
+
+var _ = require('@sailshq/lodash');
+var async = require('async');
+var compileStatement = require('./compile-statement');
+var runQuery = require('./run-query');
+
+module.exports = function createEach(options, cb) {
+ // ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐
+ // ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ │ │├─┘ │ ││ ││││└─┐
+ // ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ └─┘┴ ┴ ┴└─┘┘└┘└─┘
+ if (_.isUndefined(options) || !_.isPlainObject(options)) {
+ throw new Error('Invalid options argument. Options must contain: connection, statement, fetch, and primaryKey.');
+ }
+
+ if (!_.has(options, 'connection') || !_.isObject(options.connection)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid connection.');
+ }
+
+ if (!_.has(options, 'statement') || !_.isPlainObject(options.statement)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid statement.');
+ }
+
+ if (!_.has(options, 'fetch') || !_.isBoolean(options.fetch)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid fetch flag.');
+ }
+
+ if (!_.has(options, 'primaryKey') || !_.isString(options.primaryKey)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid primaryKey flag.');
+ }
+
+
+ // ███╗ ██╗ ██████╗ ███╗ ██╗ ███████╗███████╗████████╗ ██████╗██╗ ██╗
+ // ████╗ ██║██╔═══██╗████╗ ██║ ██╔════╝██╔════╝╚══██╔══╝██╔════╝██║ ██║
+ // ██╔██╗ ██║██║ ██║██╔██╗ ██║█████╗█████╗ █████╗ ██║ ██║ ███████║
+ // ██║╚██╗██║██║ ██║██║╚██╗██║╚════╝██╔══╝ ██╔══╝ ██║ ██║ ██╔══██║
+ // ██║ ╚████║╚██████╔╝██║ ╚████║ ██║ ███████╗ ██║ ╚██████╗██║ ██║
+ // ╚═╝ ╚═══╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═╝ ╚══════╝ ╚═╝ ╚═════╝╚═╝ ╚═╝
+ //
+ // ██████╗██████╗ ███████╗ █████╗ ████████╗███████╗
+ // ██╔════╝██╔══██╗██╔════╝██╔══██╗╚══██╔══╝██╔════╝
+ // ██║ ██████╔╝█████╗ ███████║ ██║ █████╗
+ // ██║ ██╔══██╗██╔══╝ ██╔══██║ ██║ ██╔══╝
+ // ╚██████╗██║ ██║███████╗██║ ██║ ██║ ███████╗
+ // ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝
+ //
+ // If the fetch flag was used, then the statement will need to be broken up
+ // into a series of async queries. Otherwise just run a bulk insert.
+ if (!options.fetch) {
+ // ╔═╗╔═╗╔╦╗╔═╗╦╦ ╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ║ ║ ║║║║╠═╝║║ ║╣ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╚═╝╩ ╩╩ ╩╩═╝╚═╝ └─┘└└─┘└─┘┴└─ ┴
+ // Compile the statement into a native query.
+ var compiledQuery;
+ try {
+ compiledQuery = compileStatement(options.statement, options.meta);
+ } catch (e) {
+ // If the statement could not be compiled, return an error.
+ return cb(e);
+ }
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
+ // Run the initial query (bulk insert)
+ runQuery({
+ connection: options.connection,
+ nativeQuery: compiledQuery.nativeQuery,
+ valuesToEscape: compiledQuery.valuesToEscape,
+ meta: compiledQuery.meta,
+ disconnectOnError: false,
+ queryType: 'insert'
+ },
+
+ function runQueryCb(err, report) {
+ if (err) {
+ return cb(err);
+ }
+
+ return cb(undefined, report.result);
+ });
+
+ // Return early
+ return;
+ }
+
+
+ // ███████╗███████╗████████╗ ██████╗██╗ ██╗ ██████╗██████╗ ███████╗ █████╗ ████████╗███████╗
+ // ██╔════╝██╔════╝╚══██╔══╝██╔════╝██║ ██║ ██╔════╝██╔══██╗██╔════╝██╔══██╗╚══██╔══╝██╔════╝
+ // █████╗ █████╗ ██║ ██║ ███████║ ██║ ██████╔╝█████╗ ███████║ ██║ █████╗
+ // ██╔══╝ ██╔══╝ ██║ ██║ ██╔══██║ ██║ ██╔══██╗██╔══╝ ██╔══██║ ██║ ██╔══╝
+ // ██║ ███████╗ ██║ ╚██████╗██║ ██║ ╚██████╗██║ ██║███████╗██║ ██║ ██║ ███████╗
+ // ╚═╝ ╚══════╝ ╚═╝ ╚═════╝╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝
+ //
+ // Break apart the statement's insert records and create a single create query
+ // for each one. Collect the result of the insertId's to be returned.
+ var newRecords = options.statement.insert;
+ var insertIds = [];
+
+ // Be sure to run these in series so that the insert order is maintained.
+ async.eachSeries(newRecords, function runCreateQuery(record, nextRecord) {
+ // Build up a statement to use.
+ var statement = {
+ insert: record,
+ into: options.statement.into
+ };
+
+ // ╔═╗╔═╗╔╦╗╔═╗╦╦ ╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ║ ║ ║║║║╠═╝║║ ║╣ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╚═╝╩ ╩╩ ╩╩═╝╚═╝ └─┘└└─┘└─┘┴└─ ┴
+ // Compile the statement into a native query.
+ var compiledQuery;
+ try {
+ compiledQuery = compileStatement(statement);
+ } catch (e) {
+ // If the statement could not be compiled, return an error.
+ return nextRecord(e);
+ }
+
+ var insertOptions = {
+ connection: options.connection,
+ nativeQuery: compiledQuery.nativeQuery,
+ valuesToEscape: compiledQuery.valuesToEscape,
+ meta: compiledQuery.meta,
+ disconnectOnError: false,
+ queryType: 'insert'
+ };
+
+ // Determine if a custom primary key value was used. If so pass it down so that
+ // the report can be used correctly. MySQL doesn't return these values.
+ if (statement.insert[options.primaryKey]) {
+ insertOptions.customPrimaryKey = statement.insert[options.primaryKey];
+ }
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
+ // Run the initial query (bulk insert)
+ runQuery(insertOptions, function runQueryCb(err, report) {
+ if (err) {
+ return nextRecord(err);
+ }
+
+ // Add the insert id to the array
+ insertIds.push(report.result.inserted);
+
+ return nextRecord(undefined, report.result);
+ });
+ },
+
+ function fetchCreateCb(err) {
+ if (err) {
+ return cb(err);
+ }
+
+
+ // ╔═╗╔═╗╦═╗╔═╗╔═╗╦═╗╔╦╗ ┌┬┐┬ ┬┌─┐ ┌─┐┌─┐┌┬┐┌─┐┬ ┬
+ // ╠═╝║╣ ╠╦╝╠╣ ║ ║╠╦╝║║║ │ ├─┤├┤ ├┤ ├┤ │ │ ├─┤
+ // ╩ ╚═╝╩╚═╚ ╚═╝╩╚═╩ ╩ ┴ ┴ ┴└─┘ └ └─┘ ┴ └─┘┴ ┴
+ var fetchStatement = {
+ select: '*',
+ from: options.statement.into,
+ where: {},
+ orderBy: [{}]
+ };
+
+ // Sort the records by primary key
+ fetchStatement.orderBy[0][options.primaryKey] = 'ASC';
+
+ // Build up the WHERE clause for the statement to get the newly inserted
+ // records.
+ fetchStatement.where[options.primaryKey] = { 'in': insertIds };
+
+
+ // ╔═╗╔═╗╔╦╗╔═╗╦╦ ╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ║ ║ ║║║║╠═╝║║ ║╣ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╚═╝╩ ╩╩ ╩╩═╝╚═╝ └─┘└└─┘└─┘┴└─ ┴
+ // Compile the statement into a native query.
+ var compiledQuery;
+ try {
+ compiledQuery = compileStatement(fetchStatement);
+ } catch (err) {
+ // If the statement could not be compiled, return an error.
+ return cb(err);
+ }
+
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
+ // Run the fetch query.
+ runQuery({
+ connection: options.connection,
+ nativeQuery: compiledQuery.nativeQuery,
+ valuesToEscape: compiledQuery.valuesToEscape,
+ meta: compiledQuery.meta,
+ disconnectOnError: false,
+ queryType: 'select'
+ }, function runQueryCb(err, report) {
+ if (err) {
+ return cb(err);
+ }
+
+ return cb(undefined, report.result);
+ });
+ });
+};
diff --git a/helpers/private/query/create.js b/helpers/private/query/create.js
new file mode 100644
index 00000000..3afaf970
--- /dev/null
+++ b/helpers/private/query/create.js
@@ -0,0 +1,129 @@
+// ██████╗██████╗ ███████╗ █████╗ ████████╗███████╗
+// ██╔════╝██╔══██╗██╔════╝██╔══██╗╚══██╔══╝██╔════╝
+// ██║ ██████╔╝█████╗ ███████║ ██║ █████╗
+// ██║ ██╔══██╗██╔══╝ ██╔══██║ ██║ ██╔══╝
+// ╚██████╗██║ ██║███████╗██║ ██║ ██║ ███████╗
+// ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝ ╚═╝ ╚══════╝
+//
+// Perform a create query and fetch the record if needed.
+
+var _ = require('@sailshq/lodash');
+var compileStatement = require('./compile-statement');
+var runQuery = require('./run-query');
+
+module.exports = function createEach(options, cb) {
+ // ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐
+ // ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ │ │├─┘ │ ││ ││││└─┐
+ // ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ └─┘┴ ┴ ┴└─┘┘└┘└─┘
+ if (_.isUndefined(options) || !_.isPlainObject(options)) {
+ throw new Error('Invalid options argument. Options must contain: connection, statement, fetch, and primaryKey.');
+ }
+
+ if (!_.has(options, 'connection') || !_.isObject(options.connection)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid connection.');
+ }
+
+ if (!_.has(options, 'statement') || !_.isPlainObject(options.statement)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid statement.');
+ }
+
+ if (!_.has(options, 'fetch') || !_.isBoolean(options.fetch)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid fetch flag.');
+ }
+
+ if (!_.has(options, 'primaryKey') || !_.isString(options.primaryKey)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid primaryKey flag.');
+ }
+
+
+ // ╔═╗╔═╗╔╦╗╔═╗╦╦ ╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ║ ║ ║║║║╠═╝║║ ║╣ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╚═╝╩ ╩╩ ╩╩═╝╚═╝ └─┘└└─┘└─┘┴└─ ┴
+ // Compile the statement into a native query.
+ var compiledQuery;
+ try {
+ compiledQuery = compileStatement(options.statement);
+ } catch (e) {
+ // If the statement could not be compiled, return an error.
+ return cb(e);
+ }
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
+ // Run the initial query (bulk insert)
+
+ var insertOptions = {
+ connection: options.connection,
+ nativeQuery: compiledQuery.nativeQuery,
+ valuesToEscape: compiledQuery.valuesToEscape,
+ meta: compiledQuery.meta,
+ disconnectOnError: false,
+ queryType: 'insert'
+ };
+
+ // Determine if a custom primary key value was used. If so pass it down so that
+ // the report can be used correctly. MySQL doesn't return these values.
+ if (options.statement.insert[options.primaryKey]) {
+ insertOptions.customPrimaryKey = options.statement.insert[options.primaryKey];
+ }
+
+
+ runQuery(insertOptions, function runQueryCb(err, report) {
+ if (err) {
+ return cb(err);
+ }
+
+ // If no fetch was used, then nothing else needs to be done.
+ if (!options.fetch) {
+ return cb(undefined, report.result);
+ }
+
+ // ╔═╗╔═╗╦═╗╔═╗╔═╗╦═╗╔╦╗ ┌┬┐┬ ┬┌─┐ ┌─┐┌─┐┌┬┐┌─┐┬ ┬
+ // ╠═╝║╣ ╠╦╝╠╣ ║ ║╠╦╝║║║ │ ├─┤├┤ ├┤ ├┤ │ │ ├─┤
+ // ╩ ╚═╝╩╚═╚ ╚═╝╩╚═╩ ╩ ┴ ┴ ┴└─┘ └ └─┘ ┴ └─┘┴ ┴
+ // Otherwise, fetch the newly inserted record
+ var fetchStatement = {
+ select: '*',
+ from: options.statement.into,
+ where: {}
+ };
+
+ // Build up the WHERE clause for the statement to get the newly inserted
+ // records.
+ fetchStatement.where[options.primaryKey] = report.result.inserted;
+
+
+ // ╔═╗╔═╗╔╦╗╔═╗╦╦ ╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ║ ║ ║║║║╠═╝║║ ║╣ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╚═╝╩ ╩╩ ╩╩═╝╚═╝ └─┘└└─┘└─┘┴└─ ┴
+ // Compile the statement into a native query.
+ var compiledQuery;
+ try {
+ compiledQuery = compileStatement(fetchStatement);
+ } catch (err) {
+ // If the statement could not be compiled, return an error.
+ return cb(err);
+ }
+
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
+ // Run the fetch query.
+ runQuery({
+ connection: options.connection,
+ nativeQuery: compiledQuery.nativeQuery,
+ valuesToEscape: compiledQuery.valuesToEscape,
+ meta: compiledQuery.meta,
+ disconnectOnError: false,
+ queryType: 'select'
+ }, function runQueryCb(err, report) {
+ if (err) {
+ return cb(err);
+ }
+
+ return cb(undefined, report.result);
+ });
+ });
+};
diff --git a/helpers/private/query/destroy.js b/helpers/private/query/destroy.js
new file mode 100644
index 00000000..223f75d0
--- /dev/null
+++ b/helpers/private/query/destroy.js
@@ -0,0 +1,136 @@
+// ██████╗ ███████╗███████╗████████╗██████╗ ██████╗ ██╗ ██╗
+// ██╔══██╗██╔════╝██╔════╝╚══██╔══╝██╔══██╗██╔═══██╗╚██╗ ██╔╝
+// ██║ ██║█████╗ ███████╗ ██║ ██████╔╝██║ ██║ ╚████╔╝
+// ██║ ██║██╔══╝ ╚════██║ ██║ ██╔══██╗██║ ██║ ╚██╔╝
+// ██████╔╝███████╗███████║ ██║ ██║ ██║╚██████╔╝ ██║
+// ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝
+//
+// Destroy the record(s) and return the values that were destroyed if needed.
+// If a fetch was performed, first the records need to be searched for with the
+// primary key selected.
+
+var _ = require('@sailshq/lodash');
+var runQuery = require('./run-query');
+var compileStatement = require('./compile-statement');
+
+
+module.exports = function insertRecord(options, cb) {
+ // ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐
+ // ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ │ │├─┘ │ ││ ││││└─┐
+ // ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ └─┘┴ ┴ ┴└─┘┘└┘└─┘
+ if (_.isUndefined(options) || !_.isPlainObject(options)) {
+ throw new Error('Invalid options argument. Options must contain: connection, statement, fetch, and primaryKey.');
+ }
+
+ if (!_.has(options, 'connection') || !_.isObject(options.connection)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid connection.');
+ }
+
+ if (!_.has(options, 'statement') || !_.isPlainObject(options.statement)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid statement.');
+ }
+
+ if (!_.has(options, 'primaryKey') || !_.isString(options.primaryKey)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid primaryKey.');
+ }
+
+ if (!_.has(options, 'fetch') || !_.isBoolean(options.fetch)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid fetch flag.');
+ }
+
+
+ // ╔═╗╔═╗╔╦╗ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐ ┌┐ ┌─┐┬┌┐┌┌─┐ ┌┬┐┌─┐┌─┐┌┬┐┬─┐┌─┐┬ ┬┌─┐┌┬┐
+ // ║ ╦║╣ ║ ├┬┘├┤ │ │ │├┬┘ ││└─┐ ├┴┐├┤ │││││ ┬ ││├┤ └─┐ │ ├┬┘│ │└┬┘├┤ ││
+ // ╚═╝╚═╝ ╩ ┴└─└─┘└─┘└─┘┴└──┴┘└─┘ └─┘└─┘┴┘└┘└─┘ ─┴┘└─┘└─┘ ┴ ┴└─└─┘ ┴ └─┘─┴┘
+ // If a fetch is used, the records that will be destroyed need to be found first.
+ // This is because in order to (semi) accurately return the records that were
+ // destroyed in MySQL first they need to be found, then destroyed.
+ (function getRecordsToDestroy(proceed) {
+ // Only look up the records if fetch was used
+ if (!options.fetch) {
+ return proceed();
+ }
+
+ // Otherwise build up a select query
+ var fetchStatement = {
+ from: options.statement.from,
+ where: options.statement.where
+ };
+
+ // ╔═╗╔═╗╔╦╗╔═╗╦╦ ╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ║ ║ ║║║║╠═╝║║ ║╣ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╚═╝╩ ╩╩ ╩╩═╝╚═╝ └─┘└└─┘└─┘┴└─ ┴
+ // Compile the statement into a native query.
+ var compiledFetchQuery;
+ try {
+ compiledFetchQuery = compileStatement(fetchStatement);
+ } catch (e) {
+ // If the statement could not be compiled, return an error.
+ return proceed(e);
+ }
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
+ // Run the initial find query
+ runQuery({
+ connection: options.connection,
+ nativeQuery: compiledFetchQuery.nativeQuery,
+ valuesToEscape: compiledFetchQuery.valuesToEscape,
+ meta: compiledFetchQuery.meta,
+ disconnectOnError: false,
+ queryType: 'select'
+ },
+
+ function runQueryCb(err, report) {
+ if (err) {
+ return proceed(err);
+ }
+
+ return proceed(undefined, report);
+ });
+ })(function afterInitialFetchCb(err, selectReport) {
+ if (err) {
+ return cb(err);
+ }
+
+ // ╔═╗╔═╗╔╦╗╔═╗╦╦ ╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ║ ║ ║║║║╠═╝║║ ║╣ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╚═╝╩ ╩╩ ╩╩═╝╚═╝ └─┘└└─┘└─┘┴└─ ┴
+ // Compile the destroy statement into a native query.
+ var compiledUpdateQuery;
+ try {
+ compiledUpdateQuery = compileStatement(options.statement);
+ } catch (e) {
+ // If the statement could not be compiled, return an error.
+ return cb(e);
+ }
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
+ // Run the destroy query
+ runQuery({
+ connection: options.connection,
+ nativeQuery: compiledUpdateQuery.nativeQuery,
+ valuesToEscape: compiledUpdateQuery.valuesToEscape,
+ meta: compiledUpdateQuery.meta,
+ disconnectOnError: false,
+ queryType: 'destroy'
+ },
+
+ function runQueryCb(err, report) {
+ if (err) {
+ return cb(err);
+ }
+
+ // If no fetch was used, then nothing else needs to be done.
+ if (!options.fetch) {
+ return cb(undefined, report.result);
+ }
+
+ // Otherwise, return the selected records
+ return cb(undefined, selectReport.result);
+ });
+ });
+};
diff --git a/helpers/private/query/initialize-query-cache.js b/helpers/private/query/initialize-query-cache.js
new file mode 100644
index 00000000..bd327443
--- /dev/null
+++ b/helpers/private/query/initialize-query-cache.js
@@ -0,0 +1,142 @@
+// ██╗███╗ ██╗██╗████████╗██╗ █████╗ ██╗ ██╗███████╗███████╗
+// ██║████╗ ██║██║╚══██╔══╝██║██╔══██╗██║ ██║╚══███╔╝██╔════╝
+// ██║██╔██╗ ██║██║ ██║ ██║███████║██║ ██║ ███╔╝ █████╗
+// ██║██║╚██╗██║██║ ██║ ██║██╔══██║██║ ██║ ███╔╝ ██╔══╝
+// ██║██║ ╚████║██║ ██║ ██║██║ ██║███████╗██║███████╗███████╗
+// ╚═╝╚═╝ ╚═══╝╚═╝ ╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝╚═╝╚══════╝╚══════╝
+//
+// ██████╗ ██╗ ██╗███████╗██████╗ ██╗ ██╗ ██████╗ █████╗ ██████╗██╗ ██╗███████╗
+// ██╔═══██╗██║ ██║██╔════╝██╔══██╗╚██╗ ██╔╝ ██╔════╝██╔══██╗██╔════╝██║ ██║██╔════╝
+// ██║ ██║██║ ██║█████╗ ██████╔╝ ╚████╔╝ ██║ ███████║██║ ███████║█████╗
+// ██║▄▄ ██║██║ ██║██╔══╝ ██╔══██╗ ╚██╔╝ ██║ ██╔══██║██║ ██╔══██║██╔══╝
+// ╚██████╔╝╚██████╔╝███████╗██║ ██║ ██║ ╚██████╗██║ ██║╚██████╗██║ ██║███████╗
+// ╚══▀▀═╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝╚══════╝
+//
+// Builds up a query cache for use when native joins are performed. The purpose
+// of this is because in some cases a query can't be fulfilled in a single query.
+// The Query Cache is responsible for holding intermediate values until all of
+// the operations are completed. The records can then be nested together and
+// returned as a single array of nested values.
+
+var _ = require('@sailshq/lodash');
+var utils = require('waterline-utils');
+
+module.exports = function initializeQueryCache(options) {
+ // ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐
+ // ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ │ │├─┘ │ ││ ││││└─┐
+ // ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ └─┘┴ ┴ ┴└─┘┘└┘└─┘
+ if (_.isUndefined(options) || !_.isPlainObject(options)) {
+ throw new Error('Invalid options argument. Options must contain: connection, query, model, schemaName, and tableName.');
+ }
+
+ if (!_.has(options, 'instructions') || !_.isPlainObject(options.instructions)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid instructions.');
+ }
+
+ if (!_.has(options, 'models') || !_.isPlainObject(options.models)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid models.');
+ }
+
+ if (!_.has(options, 'sortedResults') || !_.isPlainObject(options.sortedResults)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid sortedResults.');
+ }
+
+
+ // ╔╗ ╦ ╦╦╦ ╔╦╗ ┌┐┌┌─┐┬ ┬ ┌─┐┌─┐┌─┐┬ ┬┌─┐
+ // ╠╩╗║ ║║║ ║║ │││├┤ │││ │ ├─┤│ ├─┤├┤
+ // ╚═╝╚═╝╩╩═╝═╩╝ ┘└┘└─┘└┴┘ └─┘┴ ┴└─┘┴ ┴└─┘
+ // Build up a new cache to use to hold query results
+ var queryCache = utils.joins.queryCache();
+
+
+ // ╔═╗╦═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┌─┐┌─┐┌─┐┬ ┬┌─┐ ┬ ┬┌─┐┬ ┬ ┬┌─┐┌─┐
+ // ╠═╝╠╦╝║ ║║ ║╣ ╚═╗╚═╗ │ ├─┤│ ├─┤├┤ └┐┌┘├─┤│ │ │├┤ └─┐
+ // ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ └─┘┴ ┴└─┘┴ ┴└─┘ └┘ ┴ ┴┴─┘└─┘└─┘└─┘
+ _.each(options.instructions, function processInstruction(val, key) {
+ // Grab the instructions that define a particular join set
+ var popInstructions = val.instructions;
+
+ // Grab the strategy used for the join
+ var strategy = val.strategy.strategy;
+
+ // Find the Primary Key of the parent used in the join
+ var model = options.models[_.first(popInstructions).parent];
+ if (!model) {
+ throw new Error('Invalid parent table name used when caching query results. Perhaps the join criteria is invalid?');
+ }
+
+ var pkAttr = model.primaryKey;
+ var pkColumnName = model.definition[pkAttr].columnName || pkAttr;
+
+ // Build an alias to use for the association. The alias is the name of the
+ // assocation defined by the user. It's created in a model whenever a
+ // model or collection is defined.
+ var alias;
+
+ // Hold an optional keyName to use in strategy 1. This represents the
+ // foreign key value on the parent that will be replaced by the populated
+ // value.
+ var keyName;
+
+ // If the join strategy is a hasFk strategy this means the parent contains
+ // the value being populated - i.e. populating a model record. Therefore
+ // the keyName is the name of the attribute on the parent record.
+ if (val.strategy && val.strategy.strategy === 1) {
+ alias = _.first(popInstructions).alias;
+ keyName = _.first(popInstructions).parentKey;
+
+ // Otherwise this must be a collection populating so just grab the alias
+ // directly off the instructions.
+ } else {
+ alias = _.first(popInstructions).alias;
+ }
+
+
+ // Process each of the parents and build up a local cache containing
+ // values for the populated children.
+ _.each(options.sortedResults.parents, function buildAliasCache(parentRecord) {
+ var cache = {
+ attrName: key,
+ parentPkAttr: pkColumnName,
+ belongsToPkValue: parentRecord[pkColumnName],
+ keyName: keyName || alias,
+ type: strategy
+ };
+
+ // Grab the join keys used in the query
+ var childKey = _.first(popInstructions).childKey;
+ var parentKey = _.first(popInstructions).parentKey;
+
+ // Find any records in the children that match up to the join keys
+ var records = _.filter(options.sortedResults.children[alias], function findChildren(child) {
+ // If this is a VIA_JUNCTOR join, use the foreign key we built up,
+ // otherwise check equality between child and parent join keys.
+ if (strategy === 3) {
+ return child._parent_fk === parentRecord[parentKey];
+ }
+
+ return child[childKey] === parentRecord[parentKey];
+ });
+
+ // If this is a many-to-many strategy, be sure to clear the foreign
+ // key value that was added as part of the join process. The end user
+ // doesn't care about that.
+ if (strategy === 3) {
+ _.each(records, function cleanRecords(record) {
+ delete record._parent_fk;
+ });
+ }
+
+ // Store the child on the cache
+ if (records.length) {
+ cache.records = records;
+ }
+
+ // Store the local cache value in the query cache
+ queryCache.set(cache);
+ }); // buildAliasCache >
+ }); // processInstructions >
+
+ // Return the QueryCache
+ return queryCache;
+};
diff --git a/helpers/private/query/pre-process-record.js b/helpers/private/query/pre-process-record.js
new file mode 100644
index 00000000..7cd3a6de
--- /dev/null
+++ b/helpers/private/query/pre-process-record.js
@@ -0,0 +1,87 @@
+// ██████╗ ██████╗ ███████╗ ██████╗ ██████╗ ██████╗ ██████╗███████╗███████╗███████╗
+// ██╔══██╗██╔══██╗██╔════╝ ██╔══██╗██╔══██╗██╔═══██╗██╔════╝██╔════╝██╔════╝██╔════╝
+// ██████╔╝██████╔╝█████╗█████╗██████╔╝██████╔╝██║ ██║██║ █████╗ ███████╗███████╗
+// ██╔═══╝ ██╔══██╗██╔══╝╚════╝██╔═══╝ ██╔══██╗██║ ██║██║ ██╔══╝ ╚════██║╚════██║
+// ██║ ██║ ██║███████╗ ██║ ██║ ██║╚██████╔╝╚██████╗███████╗███████║███████║
+// ╚═╝ ╚═╝ ╚═╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═════╝╚══════╝╚══════╝╚══════╝
+//
+// ██████╗ ███████╗ ██████╗ ██████╗ ██████╗ ██████╗ SSSSSS
+// ██╔══██╗██╔════╝██╔════╝██╔═══██╗██╔══██╗██╔══██╗ S
+// ██████╔╝█████╗ ██║ ██║ ██║██████╔╝██║ ██║ SSSSSS
+// ██╔══██╗██╔══╝ ██║ ██║ ██║██╔══██╗██║ ██║ S
+// ██║ ██║███████╗╚██████╗╚██████╔╝██║ ██║██████╔╝ S
+// ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═════╝ SSSSSSS
+//
+
+var _ = require('@sailshq/lodash');
+var utils = require('waterline-utils');
+var eachRecordDeep = utils.eachRecordDeep;
+
+
+
+/**
+ * [exports description]
+ *
+ * TODO: Document this utility
+ *
+ * TODO: change the name of this utility to reflect the fact that its job is
+ * to pre-process new incoming records (plural)
+ *
+ * @param {[type]} options [description]
+ * @return {[type]} [description]
+ */
+module.exports = function preProcessRecord(options) {
+ // ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐
+ // ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ │ │├─┘ │ ││ ││││└─┐
+ // ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ └─┘┴ ┴ ┴└─┘┘└┘└─┘
+ if (_.isUndefined(options) || !_.isPlainObject(options)) {
+ throw new Error('Invalid options argument. Options must contain: records, identity, and orm.');
+ }
+
+ if (!_.has(options, 'records') || !_.isArray(options.records)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid records.');
+ }
+
+ if (!_.has(options, 'identity') || !_.isString(options.identity)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid identity.');
+ }
+
+ if (!_.has(options, 'orm') || !_.isPlainObject(options.orm)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid orm.');
+ }
+
+ // Key the collections by identity instead of column name
+ var collections = _.reduce(options.orm.collections, function(memo, val) {
+ memo[val.identity] = val;
+ return memo;
+ }, {});
+
+ options.orm.collections = collections;
+
+ // Run all the new, incoming records through the iterator so that they can be normalized
+ // with anything adapter-specific before getting written to the database.
+ // > (This should *never* go more than one level deep!)
+ eachRecordDeep(options.records, function iterator(record, WLModel, depth) {
+ if (depth !== 1) {
+ throw new Error('Consistency violation: Incoming new records in a s3q should never necessitate deep iteration! If you are seeing this error, it is probably because of a bug in this adapter, or in Waterline core.');
+ }
+
+ _.each(WLModel.definition, function checkAttributes(attrDef) {
+ var columnName = attrDef.columnName;
+
+ // JSON stringify the values provided for any `type: 'json'` attributes
+ // because MySQL can't store JSON.
+ if (attrDef.type === 'json' && _.has(record, columnName)) {
+
+ // Special case: If this is the `null` literal, leave it alone.
+ // But otherwise, stringify it into a JSON string.
+ // (even if it's already a string!)
+ if (!_.isNull(record[columnName])) {
+ record[columnName] = JSON.stringify(record[columnName]);
+ }
+
+ }//>-
+
+ });
+ }, true, options.identity, options.orm);
+};
diff --git a/helpers/private/query/process-each-record.js b/helpers/private/query/process-each-record.js
new file mode 100644
index 00000000..1b8661ba
--- /dev/null
+++ b/helpers/private/query/process-each-record.js
@@ -0,0 +1,82 @@
+// ██████╗ ██████╗ ██████╗ ██████╗███████╗███████╗███████╗ ███████╗ █████╗ ██████╗██╗ ██╗
+// ██╔══██╗██╔══██╗██╔═══██╗██╔════╝██╔════╝██╔════╝██╔════╝ ██╔════╝██╔══██╗██╔════╝██║ ██║
+// ██████╔╝██████╔╝██║ ██║██║ █████╗ ███████╗███████╗ █████╗ ███████║██║ ███████║
+// ██╔═══╝ ██╔══██╗██║ ██║██║ ██╔══╝ ╚════██║╚════██║ ██╔══╝ ██╔══██║██║ ██╔══██║
+// ██║ ██║ ██║╚██████╔╝╚██████╗███████╗███████║███████║ ███████╗██║ ██║╚██████╗██║ ██║
+// ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═════╝╚══════╝╚══════╝╚══════╝ ╚══════╝╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝
+//
+// ██████╗ ███████╗ ██████╗ ██████╗ ██████╗ ██████╗
+// ██╔══██╗██╔════╝██╔════╝██╔═══██╗██╔══██╗██╔══██╗
+// ██████╔╝█████╗ ██║ ██║ ██║██████╔╝██║ ██║
+// ██╔══██╗██╔══╝ ██║ ██║ ██║██╔══██╗██║ ██║
+// ██║ ██║███████╗╚██████╗╚██████╔╝██║ ██║██████╔╝
+// ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═════╝
+//
+
+var _ = require('@sailshq/lodash');
+var utils = require('waterline-utils');
+var eachRecordDeep = utils.eachRecordDeep;
+
+module.exports = function processEachRecord(options) {
+ // ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐
+ // ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ │ │├─┘ │ ││ ││││└─┐
+ // ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ └─┘┴ ┴ ┴└─┘┘└┘└─┘
+ if (_.isUndefined(options) || !_.isPlainObject(options)) {
+ throw new Error('Invalid options argument. Options must contain: records, identity, and orm.');
+ }
+
+ if (!_.has(options, 'records') || !_.isArray(options.records)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid records.');
+ }
+
+ if (!_.has(options, 'identity') || !_.isString(options.identity)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid identity.');
+ }
+
+ if (!_.has(options, 'orm') || !_.isPlainObject(options.orm)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid orm.');
+ }
+
+ // Key the collections by identity instead of column name
+ var collections = _.reduce(options.orm.collections, function(memo, val) {
+ memo[val.identity] = val;
+ return memo;
+ }, {});
+
+ options.orm.collections = collections;
+
+ // Run all the records through the iterator so that they can be normalized.
+ eachRecordDeep(options.records, function iterator(record, WLModel) {
+ // Check if the record and the model contain any boolean types.
+ // Because MySQL returns these as binary (0, 1) they must be
+ // transformed into true/false values.
+ _.each(WLModel.definition, function checkAttributes(attrDef) {
+ var columnName = attrDef.columnName;
+
+ if (attrDef.type === 'boolean' && _.has(record, columnName)) {
+ if (!_.isBoolean(record[columnName])) {
+ if (record[columnName] === 0) {
+ record[columnName] = false;
+ }
+
+ if (record[columnName] === 1) {
+ record[columnName] = true;
+ }
+ }
+ }
+
+ // JSON parse any type of JSON column type
+ if (attrDef.type === 'json' && _.has(record, columnName)) {
+
+ // Special case: If it came back as the `null` literal, leave it alone
+ if (_.isNull(record[columnName])) {
+ return;
+ }
+
+ // But otherwise, assume it's a JSON string and try to parse it
+ record[columnName] = JSON.parse(record[columnName]);
+ }
+
+ });
+ }, true, options.identity, options.orm);
+};
diff --git a/helpers/private/query/run-native-query.js b/helpers/private/query/run-native-query.js
new file mode 100644
index 00000000..04c7af2a
--- /dev/null
+++ b/helpers/private/query/run-native-query.js
@@ -0,0 +1,67 @@
+// ██████╗ ██╗ ██╗███╗ ██╗ ███╗ ██╗ █████╗ ████████╗██╗██╗ ██╗███████╗
+// ██╔══██╗██║ ██║████╗ ██║ ████╗ ██║██╔══██╗╚══██╔══╝██║██║ ██║██╔════╝
+// ██████╔╝██║ ██║██╔██╗ ██║ ██╔██╗ ██║███████║ ██║ ██║██║ ██║█████╗
+// ██╔══██╗██║ ██║██║╚██╗██║ ██║╚██╗██║██╔══██║ ██║ ██║╚██╗ ██╔╝██╔══╝
+// ██║ ██║╚██████╔╝██║ ╚████║ ██║ ╚████║██║ ██║ ██║ ██║ ╚████╔╝ ███████╗
+// ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═╝ ╚═══╝╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═══╝ ╚══════╝
+//
+// ██████╗ ██╗ ██╗███████╗██████╗ ██╗ ██╗
+// ██╔═══██╗██║ ██║██╔════╝██╔══██╗╚██╗ ██╔╝
+// ██║ ██║██║ ██║█████╗ ██████╔╝ ╚████╔╝
+// ██║▄▄ ██║██║ ██║██╔══╝ ██╔══██╗ ╚██╔╝
+// ╚██████╔╝╚██████╔╝███████╗██║ ██║ ██║
+// ╚══▀▀═╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝ ╚═╝
+//
+// Run a native SQL query on an open connection and return the raw results.
+
+var _ = require('@sailshq/lodash');
+var MySQL = require('machinepack-mysql');
+
+module.exports = function runNativeQuery(connection, query, valuesToEscape, meta, cb) {
+ MySQL.sendNativeQuery({
+ connection: connection,
+ nativeQuery: query,
+ valuesToEscape: valuesToEscape,
+ meta: meta
+ })
+ .switch({
+ error: function error(err) {
+ return cb(err);
+ },
+
+ // If the query failed, try and parse it into a normalized format.
+ queryFailed: function queryFailed(report) {
+ // Parse the native query error into a normalized format
+ var parsedError;
+ try {
+ parsedError = MySQL.parseNativeQueryError({
+ nativeQueryError: report.error
+ }).execSync();
+ } catch (e) {
+ return cb(e);
+ }
+
+ // If the catch all error was used, return an error instance instead of
+ // the footprint.
+ var catchAllError = false;
+
+ if (parsedError.footprint.identity === 'catchall') {
+ catchAllError = true;
+ }
+
+ if (catchAllError) {
+ return cb(report.error);
+ }
+
+ // Attach parsed error as footprint on the native query error
+ if (!_.has(report.error, 'footprint')) {
+ report.error.footprint = parsedError;
+ }
+
+ return cb(report.error);
+ },
+ success: function success(report) {
+ return cb(null, report.result.rows);
+ }
+ });
+};
diff --git a/helpers/private/query/run-query.js b/helpers/private/query/run-query.js
new file mode 100644
index 00000000..fc418000
--- /dev/null
+++ b/helpers/private/query/run-query.js
@@ -0,0 +1,130 @@
+// ██████╗ ██╗ ██╗███╗ ██╗ ██████╗ ██╗ ██╗███████╗██████╗ ██╗ ██╗
+// ██╔══██╗██║ ██║████╗ ██║ ██╔═══██╗██║ ██║██╔════╝██╔══██╗╚██╗ ██╔╝
+// ██████╔╝██║ ██║██╔██╗ ██║ ██║ ██║██║ ██║█████╗ ██████╔╝ ╚████╔╝
+// ██╔══██╗██║ ██║██║╚██╗██║ ██║▄▄ ██║██║ ██║██╔══╝ ██╔══██╗ ╚██╔╝
+// ██║ ██║╚██████╔╝██║ ╚████║ ╚██████╔╝╚██████╔╝███████╗██║ ██║ ██║
+// ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚══▀▀═╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝ ╚═╝
+//
+// Send a Native Query to the datastore and gracefully handle errors.
+
+var _ = require('@sailshq/lodash');
+var MySQL = require('machinepack-mysql');
+var releaseConnection = require('../connection/release-connection');
+
+module.exports = function runQuery(options, cb) {
+ // ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐
+ // ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ │ │├─┘ │ ││ ││││└─┐
+ // ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ └─┘┴ ┴ ┴└─┘┘└┘└─┘
+ if (_.isUndefined(options) || !_.isPlainObject(options)) {
+ throw new Error('Invalid options argument. Options must contain: connection, nativeQuery, and leased.');
+ }
+
+ if (!_.has(options, 'connection') || !_.isObject(options.connection)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid connection.');
+ }
+
+ if (!_.has(options, 'nativeQuery')) {
+ throw new Error('Invalid option used in options argument. Missing or invalid nativeQuery.');
+ }
+
+
+ // ╦═╗╦ ╦╔╗╔ ┌┐┌┌─┐┌┬┐┬┬ ┬┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │││├─┤ │ │└┐┌┘├┤ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ ┘└┘┴ ┴ ┴ ┴ └┘ └─┘ └─┘└└─┘└─┘┴└─ ┴
+ MySQL.sendNativeQuery({
+ connection: options.connection,
+ nativeQuery: options.nativeQuery,
+ valuesToEscape: options.valuesToEscape,
+ meta: options.meta
+ })
+ .switch({
+ // If there was an error, check if the connection should be
+ // released back into the pool automatically.
+ error: function error(err) {
+ if (!options.disconnectOnError) {
+ return cb(err);
+ }
+
+ releaseConnection(options.connection, options.leased, function releaseConnectionCb(err) {
+ return cb(err);
+ });
+ },
+ // If the query failed, try and parse it into a normalized format and
+ // release the connection if needed.
+ queryFailed: function queryFailed(report) {
+ // Parse the native query error into a normalized format
+ var parsedError;
+ try {
+ parsedError = MySQL.parseNativeQueryError({
+ nativeQueryError: report.error
+ }).execSync();
+ } catch (e) {
+ if (!options.disconnectOnError) {
+ return cb(e);
+ }
+
+ releaseConnection(options.connection, function releaseConnectionCb() {
+ return cb(e);
+ });
+ return;
+ }
+
+ // If the catch all error was used, return an error instance instead of
+ // the footprint.
+ var catchAllError = false;
+
+ if (parsedError.footprint.identity === 'catchall') {
+ catchAllError = true;
+ }
+
+ // If this shouldn't disconnect the connection, just return the normalized
+ // error with the footprint.
+ if (!options.disconnectOnError) {
+ if (catchAllError) {
+ return cb(report.error);
+ }
+
+ return cb(parsedError);
+ }
+
+ releaseConnection(options.connection, false, function releaseConnectionCb() {
+ if (catchAllError) {
+ return cb(report.error);
+ }
+
+ return cb(parsedError);
+ });
+ },
+ success: function success(report) {
+ // If a custom primary key was used and the record has an `insert` query
+ // type, build a manual insert report because we don't have the actual
+ // value that was used.
+ if (options.customPrimaryKey) {
+ return cb(null, {
+ result: {
+ inserted: options.customPrimaryKey
+ }
+ });
+ }
+
+
+ // ╔═╗╔═╗╦═╗╔═╗╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬ ┬─┐┌─┐┌─┐┬ ┬┬ ┌┬┐┌─┐
+ // ╠═╝╠═╣╠╦╝╚═╗║╣ │─┼┐│ │├┤ ├┬┘└┬┘ ├┬┘├┤ └─┐│ ││ │ └─┐
+ // ╩ ╩ ╩╩╚═╚═╝╚═╝ └─┘└└─┘└─┘┴└─ ┴ ┴└─└─┘└─┘└─┘┴─┘┴ └─┘
+ // If there was a query type given, parse the results.
+ var queryResults = report.result;
+ if (options.queryType) {
+ try {
+ queryResults = MySQL.parseNativeQueryResult({
+ queryType: options.queryType,
+ nativeQueryResult: report.result
+ }).execSync();
+ } catch (e) {
+ return cb(e);
+ }
+ }
+
+ return cb(null, queryResults);
+ }
+ });
+};
diff --git a/helpers/private/query/update.js b/helpers/private/query/update.js
new file mode 100644
index 00000000..84dc0868
--- /dev/null
+++ b/helpers/private/query/update.js
@@ -0,0 +1,206 @@
+// ██╗ ██╗██████╗ ██████╗ █████╗ ████████╗███████╗
+// ██║ ██║██╔══██╗██╔══██╗██╔══██╗╚══██╔══╝██╔════╝
+// ██║ ██║██████╔╝██║ ██║███████║ ██║ █████╗
+// ██║ ██║██╔═══╝ ██║ ██║██╔══██║ ██║ ██╔══╝
+// ╚██████╔╝██║ ██████╔╝██║ ██║ ██║ ███████╗
+// ╚═════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝
+//
+// Modify the record(s) and return the values that were modified if needed.
+// If a fetch was performed, first the records need to be searched for with the
+// primary key selected.
+
+var _ = require('@sailshq/lodash');
+var runQuery = require('./run-query');
+var compileStatement = require('./compile-statement');
+
+
+module.exports = function insertRecord(options, cb) {
+ // ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐
+ // ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ │ │├─┘ │ ││ ││││└─┐
+ // ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ └─┘┴ ┴ ┴└─┘┘└┘└─┘
+ if (_.isUndefined(options) || !_.isPlainObject(options)) {
+ throw new Error('Invalid options argument. Options must contain: connection, statement, fetch, and primaryKey.');
+ }
+
+ if (!_.has(options, 'connection') || !_.isObject(options.connection)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid connection.');
+ }
+
+ if (!_.has(options, 'statement') || !_.isPlainObject(options.statement)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid statement.');
+ }
+
+ if (!_.has(options, 'primaryKey') || !_.isString(options.primaryKey)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid primaryKey.');
+ }
+
+ if (!_.has(options, 'fetch') || !_.isBoolean(options.fetch)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid fetch flag.');
+ }
+
+
+ // ╔═╗╔═╗╔╦╗ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐ ┌┐ ┌─┐┬┌┐┌┌─┐ ┬ ┬┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐
+ // ║ ╦║╣ ║ ├┬┘├┤ │ │ │├┬┘ ││└─┐ ├┴┐├┤ │││││ ┬ │ │├─┘ ││├─┤ │ ├┤ ││
+ // ╚═╝╚═╝ ╩ ┴└─└─┘└─┘└─┘┴└──┴┘└─┘ └─┘└─┘┴┘└┘└─┘ └─┘┴ ─┴┘┴ ┴ ┴ └─┘─┴┘
+ // If a fetch is used, the records that will be updated need to be found first.
+ // This is because in order to (semi) accurately return the records that were
+ // updated in MySQL first they need to be found, then updated, then found again.
+ // Why? Because if you have a criteria such as update name to foo where name = bar
+ // Once the records have been updated there is no way to get them again. So first
+ // select the primary keys of the records to update, update the records, and then
+ // search for those records.
+ (function getRecordsToUpdate(proceed) {
+ // Only look up the records if fetch was used
+ if (!options.fetch) {
+ return proceed();
+ }
+
+ // Otherwise build up a select query
+ var fetchStatement = {
+ select: [options.primaryKey],
+ from: options.statement.using,
+ where: options.statement.where
+ };
+
+ // ╔═╗╔═╗╔╦╗╔═╗╦╦ ╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ║ ║ ║║║║╠═╝║║ ║╣ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╚═╝╩ ╩╩ ╩╩═╝╚═╝ └─┘└└─┘└─┘┴└─ ┴
+ // Compile the statement into a native query.
+ var compiledFetchQuery;
+ try {
+ compiledFetchQuery = compileStatement(fetchStatement);
+ } catch (e) {
+ // If the statement could not be compiled, return an error.
+ return proceed(e);
+ }
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
+ // Run the initial find query
+ runQuery({
+ connection: options.connection,
+ nativeQuery: compiledFetchQuery.nativeQuery,
+ valuesToEscape: compiledFetchQuery.valuesToEscape,
+ meta: compiledFetchQuery.meta,
+ disconnectOnError: false,
+ queryType: 'select'
+ },
+
+ function runQueryCb(err, report) {
+ if (err) {
+ return proceed(err);
+ }
+
+ return proceed(undefined, report);
+ });
+ })(function afterInitialFetchCb(err, selectReport) {
+ if (err) {
+ return cb(err);
+ }
+
+ // ╔═╗╔═╗╔╦╗╔═╗╦╦ ╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ║ ║ ║║║║╠═╝║║ ║╣ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╚═╝╩ ╩╩ ╩╩═╝╚═╝ └─┘└└─┘└─┘┴└─ ┴
+ // Compile the update statement into a native query.
+ var compiledUpdateQuery;
+ try {
+ compiledUpdateQuery = compileStatement(options.statement);
+ } catch (e) {
+ // If the statement could not be compiled, return an error.
+ return cb(e);
+ }
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
+ // Run the initial query
+ runQuery({
+ connection: options.connection,
+ nativeQuery: compiledUpdateQuery.nativeQuery,
+ valuesToEscape: compiledUpdateQuery.valuesToEscape,
+ meta: compiledUpdateQuery.meta,
+ disconnectOnError: false,
+ queryType: 'update'
+ },
+
+ function runQueryCb(err, report) {
+ if (err) {
+ return cb(err);
+ }
+
+ // If no fetch was used, then nothing else needs to be done.
+ if (!options.fetch) {
+ return cb(undefined, report.result);
+ }
+
+ // ╔═╗╔═╗╦═╗╔═╗╔═╗╦═╗╔╦╗ ┌┬┐┬ ┬┌─┐ ┌─┐┌─┐┌┬┐┌─┐┬ ┬
+ // ╠═╝║╣ ╠╦╝╠╣ ║ ║╠╦╝║║║ │ ├─┤├┤ ├┤ ├┤ │ │ ├─┤
+ // ╩ ╚═╝╩╚═╚ ╚═╝╩╚═╩ ╩ ┴ ┴ ┴└─┘ └ └─┘ ┴ └─┘┴ ┴
+ // Otherwise, fetch the newly inserted record
+ var fetchStatement = {
+ select: '*',
+ from: options.statement.using,
+ where: {}
+ };
+
+ // Build the fetch statement where clause
+ var selectPks = _.map(selectReport.result, function mapPks(record) {
+ return record[options.primaryKey];
+ });
+
+ fetchStatement.where[options.primaryKey] = {
+ in: selectPks
+ };
+
+
+ // Handle case where pk value was changed:
+ if (!_.isUndefined(options.statement.update[options.primaryKey])) {
+ // There should only ever be either zero or one record that were found before.
+ if (selectPks.length === 0) { /* do nothing */ }
+ else if (selectPks.length === 1) {
+ var oldPkValue = selectPks[0];
+ _.remove(fetchStatement.where[options.primaryKey].in, oldPkValue);
+ var newPkValue = options.statement.update[options.primaryKey];
+ fetchStatement.where[options.primaryKey].in.push(newPkValue);
+ }
+ else {
+ return cb(new Error('Consistency violation: Updated multiple records to have the same primary key value. (PK values should be unique!)'));
+ }
+ }
+
+
+ // ╔═╗╔═╗╔╦╗╔═╗╦╦ ╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ║ ║ ║║║║╠═╝║║ ║╣ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╚═╝╩ ╩╩ ╩╩═╝╚═╝ └─┘└└─┘└─┘┴└─ ┴
+ // Compile the statement into a native query.
+ var compiledFetchQuery;
+ try {
+ compiledFetchQuery = compileStatement(fetchStatement);
+ } catch (err) {
+ // If the statement could not be compiled, return an error.
+ return cb(err);
+ }
+
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
+ // Run the fetch query.
+ runQuery({
+ connection: options.connection,
+ nativeQuery: compiledFetchQuery.nativeQuery,
+ valuesToEscape: compiledFetchQuery.valuesToEscape,
+ meta: compiledFetchQuery.meta,
+ disconnectOnError: false,
+ queryType: 'select'
+ }, function runQueryCb(err, report) {
+ if (err) {
+ return cb(err);
+ }
+
+ return cb(undefined, report.result);
+ });
+ });
+ });
+};
diff --git a/helpers/private/schema/build-indexes.js b/helpers/private/schema/build-indexes.js
new file mode 100644
index 00000000..7597441c
--- /dev/null
+++ b/helpers/private/schema/build-indexes.js
@@ -0,0 +1,63 @@
+// ██████╗ ██╗ ██╗██╗██╗ ██████╗ ██╗███╗ ██╗██████╗ ███████╗██╗ ██╗███████╗███████╗
+// ██╔══██╗██║ ██║██║██║ ██╔══██╗ ██║████╗ ██║██╔══██╗██╔════╝╚██╗██╔╝██╔════╝██╔════╝
+// ██████╔╝██║ ██║██║██║ ██║ ██║ ██║██╔██╗ ██║██║ ██║█████╗ ╚███╔╝ █████╗ ███████╗
+// ██╔══██╗██║ ██║██║██║ ██║ ██║ ██║██║╚██╗██║██║ ██║██╔══╝ ██╔██╗ ██╔══╝ ╚════██║
+// ██████╔╝╚██████╔╝██║███████╗██████╔╝ ██║██║ ╚████║██████╔╝███████╗██╔╝ ██╗███████╗███████║
+// ╚═════╝ ╚═════╝ ╚═╝╚══════╝╚═════╝ ╚═╝╚═╝ ╚═══╝╚═════╝ ╚══════╝╚═╝ ╚═╝╚══════╝╚══════╝
+//
+// Build database indexes as needed.
+
+var _ = require('@sailshq/lodash');
+var async = require('async');
+var escapeTableName = require('./escape-table-name');
+var runNativeQuery = require('../query/run-native-query');
+
+
+module.exports = function buildIndexes(options, cb) {
+ // ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┌┬┐┬┌─┐┌┐┌┌─┐
+ // ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ │ │├─┘ │ ││ ││││└─┐
+ // ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ └─┘┴ ┴ ┴└─┘┘└┘└─┘
+ if (_.isUndefined(options) || !_.isPlainObject(options)) {
+ throw new Error('Invalid options argument. Options must contain: connection, definition, and tableName.');
+ }
+
+ if (!_.has(options, 'connection') || !_.isObject(options.connection)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid connection.');
+ }
+
+ if (!_.has(options, 'definition') || !_.isPlainObject(options.definition)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid definition.');
+ }
+
+ if (!_.has(options, 'tableName') || !_.isString(options.tableName)) {
+ throw new Error('Invalid option used in options argument. Missing or invalid tableName.');
+ }
+
+
+ // ╔═╗╦╔╗╔╔╦╗ ┌─┐┌┐┌┬ ┬ ┬┌┐┌┌┬┐┌─┐─┐ ┬┌─┐┌─┐
+ // ╠╣ ║║║║ ║║ ├─┤│││└┬┘ ││││ ││├┤ ┌┴┬┘├┤ └─┐
+ // ╚ ╩╝╚╝═╩╝ ┴ ┴┘└┘ ┴ ┴┘└┘─┴┘└─┘┴ └─└─┘└─┘
+ var indexes = _.reduce(options.definition, function reduce(meta, val, key) {
+ if (_.has(val, 'index')) {
+ meta.push(key);
+ }
+
+ return meta;
+ }, []);
+
+
+ // ╔╗ ╦ ╦╦╦ ╔╦╗ ┬┌┐┌┌┬┐┌─┐─┐ ┬┌─┐┌─┐
+ // ╠╩╗║ ║║║ ║║ ││││ ││├┤ ┌┴┬┘├┤ └─┐
+ // ╚═╝╚═╝╩╩═╝═╩╝ ┴┘└┘─┴┘└─┘┴ └─└─┘└─┘
+ // Build indexes in series
+ async.eachSeries(indexes, function build(name, nextIndex) {
+ // Strip slashes from table name, used to namespace index
+ var cleanTable = options.tableName.replace(/['"]/g, '');
+
+ // Build a query to create a namespaced index tableName_key
+ var query = 'CREATE INDEX ' + escapeTableName(cleanTable + '_' + name) + ' on ' + options.tableName + ' (' + escapeTableName(name) + ');';
+
+ // Run the native query
+ runNativeQuery(options.connection, query, [], undefined, nextIndex);
+ }, cb);
+};
diff --git a/helpers/private/schema/build-schema.js b/helpers/private/schema/build-schema.js
new file mode 100644
index 00000000..c62bbb52
--- /dev/null
+++ b/helpers/private/schema/build-schema.js
@@ -0,0 +1,83 @@
+// ██████╗ ██╗ ██╗██╗██╗ ██████╗ ███████╗ ██████╗██╗ ██╗███████╗███╗ ███╗ █████╗
+// ██╔══██╗██║ ██║██║██║ ██╔══██╗ ██╔════╝██╔════╝██║ ██║██╔════╝████╗ ████║██╔══██╗
+// ██████╔╝██║ ██║██║██║ ██║ ██║ ███████╗██║ ███████║█████╗ ██╔████╔██║███████║
+// ██╔══██╗██║ ██║██║██║ ██║ ██║ ╚════██║██║ ██╔══██║██╔══╝ ██║╚██╔╝██║██╔══██║
+// ██████╔╝╚██████╔╝██║███████╗██████╔╝ ███████║╚██████╗██║ ██║███████╗██║ ╚═╝ ██║██║ ██║
+// ╚═════╝ ╚═════╝ ╚═╝╚══════╝╚═════╝ ╚══════╝ ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝
+//
+// Build a schema object that is suitable for using in a Create Table query.
+
+var _ = require('@sailshq/lodash');
+
+module.exports = function buildSchema(definition) {
+ if (!definition) {
+ throw new Error('Build Schema requires a valid definition.');
+ }
+
+ // ╔╗╔╔═╗╦═╗╔╦╗╔═╗╦ ╦╔═╗╔═╗ ┌┬┐┬ ┬┌─┐┌─┐
+ // ║║║║ ║╠╦╝║║║╠═╣║ ║╔═╝║╣ │ └┬┘├─┘├┤
+ // ╝╚╝╚═╝╩╚═╩ ╩╩ ╩╩═╝╩╚═╝╚═╝ ┴ ┴ ┴ └─┘
+ // TODO: move this code inline to eliminate unnecessary function declaration
+ var normalizeType = function normalizeType(type) {
+ switch (type.toLowerCase()) {
+
+ // Default types from sails-hook-orm (for automigrations)
+ case '_number':
+ return 'REAL';
+ case '_numberkey':
+ return 'INTEGER';
+ case '_numbertimestamp':
+ return 'BIGINT';
+ case '_string':
+ return 'VARCHAR(255)';
+ case '_stringkey':
+ return 'VARCHAR(255)';
+ case '_stringtimestamp':
+ return 'VARCHAR(255)';
+ case '_boolean':
+ return 'BOOLEAN';
+ case '_json':
+ return 'LONGTEXT';
+ case '_ref':
+ return 'LONGTEXT';
+
+ // Sensible MySQL-specific defaults for common things folks might try to use.
+ // (FUTURE: log warnings suggesting proper usage when any of these synonyms are invoked)
+ case 'varchar':
+ return 'VARCHAR(255)';
+
+ default:
+ return type;
+ }
+ };
+
+ // Build up a string of column attributes
+ var columns = _.map(definition, function map(attribute, name) {
+ if (_.isString(attribute)) {
+ var val = attribute;
+ attribute = {};
+ attribute.type = val;
+ }
+
+ var type = normalizeType(attribute.columnType);
+ var nullable = attribute.notNull && 'NOT NULL';
+ var unique = attribute.unique && 'UNIQUE';
+ var autoIncrement = attribute.autoIncrement && 'AUTO_INCREMENT';
+
+ return _.compact(['`' + name + '`', type, nullable, unique, autoIncrement]).join(' ');
+ }).join(',');
+
+ // Grab the Primary Key
+ var primaryKeys = _.keys(_.pick(definition, function findPK(attribute) {
+ return attribute.primaryKey;
+ }));
+
+ // Add the Primary Key to the definition
+ var constraints = _.compact([
+ primaryKeys.length && 'PRIMARY KEY (' + primaryKeys.join(',') + ')'
+ ]).join(', ');
+
+ var schema = _.compact([columns, constraints]).join(', ');
+
+ return schema;
+};
diff --git a/helpers/private/schema/escape-table-name.js b/helpers/private/schema/escape-table-name.js
new file mode 100644
index 00000000..fb3162db
--- /dev/null
+++ b/helpers/private/schema/escape-table-name.js
@@ -0,0 +1,21 @@
+// ███████╗███████╗ ██████╗ █████╗ ██████╗ ███████╗ ████████╗ █████╗ ██████╗ ██╗ ███████╗
+// ██╔════╝██╔════╝██╔════╝██╔══██╗██╔══██╗██╔════╝ ╚══██╔══╝██╔══██╗██╔══██╗██║ ██╔════╝
+// █████╗ ███████╗██║ ███████║██████╔╝█████╗ ██║ ███████║██████╔╝██║ █████╗
+// ██╔══╝ ╚════██║██║ ██╔══██║██╔═══╝ ██╔══╝ ██║ ██╔══██║██╔══██╗██║ ██╔══╝
+// ███████╗███████║╚██████╗██║ ██║██║ ███████╗ ██║ ██║ ██║██████╔╝███████╗███████╗
+// ╚══════╝╚══════╝ ╚═════╝╚═╝ ╚═╝╚═╝ ╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═════╝ ╚══════╝╚══════╝
+//
+// ███╗ ██╗ █████╗ ███╗ ███╗███████╗
+// ████╗ ██║██╔══██╗████╗ ████║██╔════╝
+// ██╔██╗ ██║███████║██╔████╔██║█████╗
+// ██║╚██╗██║██╔══██║██║╚██╔╝██║██╔══╝
+// ██║ ╚████║██║ ██║██║ ╚═╝ ██║███████╗
+// ╚═╝ ╚═══╝╚═╝ ╚═╝╚═╝ ╚═╝╚══════╝
+//
+// Given a table name, escape it for the database and add the postgres schema
+// if needed.
+
+module.exports = function escapeTableName(name) {
+ name = '`' + name + '`';
+ return name;
+};
diff --git a/helpers/register-data-store.js b/helpers/register-data-store.js
new file mode 100644
index 00000000..930bb9f5
--- /dev/null
+++ b/helpers/register-data-store.js
@@ -0,0 +1,195 @@
+// ██████╗ ███████╗ ██████╗ ██╗███████╗████████╗███████╗██████╗
+// ██╔══██╗██╔════╝██╔════╝ ██║██╔════╝╚══██╔══╝██╔════╝██╔══██╗
+// ██████╔╝█████╗ ██║ ███╗██║███████╗ ██║ █████╗ ██████╔╝
+// ██╔══██╗██╔══╝ ██║ ██║██║╚════██║ ██║ ██╔══╝ ██╔══██╗
+// ██║ ██║███████╗╚██████╔╝██║███████║ ██║ ███████╗██║ ██║
+// ╚═╝ ╚═╝╚══════╝ ╚═════╝ ╚═╝╚══════╝ ╚═╝ ╚══════╝╚═╝ ╚═╝
+//
+// ██████╗ █████╗ ████████╗ █████╗ ███████╗████████╗ ██████╗ ██████╗ ███████╗
+// ██╔══██╗██╔══██╗╚══██╔══╝██╔══██╗ ██╔════╝╚══██╔══╝██╔═══██╗██╔══██╗██╔════╝
+// ██║ ██║███████║ ██║ ███████║ ███████╗ ██║ ██║ ██║██████╔╝█████╗
+// ██║ ██║██╔══██║ ██║ ██╔══██║ ╚════██║ ██║ ██║ ██║██╔══██╗██╔══╝
+// ██████╔╝██║ ██║ ██║ ██║ ██║ ███████║ ██║ ╚██████╔╝██║ ██║███████╗
+// ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝╚══════╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'Register Data Store',
+
+
+ description: 'Register a new datastore for making connections.',
+
+
+ sync: true,
+
+
+ inputs: {
+
+ identity: {
+ description: 'A unique identitifer for the connection.',
+ example: 'localPostgres',
+ required: true
+ },
+
+ config: {
+ description: 'The configuration to use for the data store.',
+ required: true,
+ example: '==='
+ },
+
+ models: {
+ description: 'The Waterline models that will be used with this data store.',
+ required: true,
+ example: '==='
+ },
+
+ datastores: {
+ description: 'An object containing all of the data stores that have been registered.',
+ required: true,
+ example: '==='
+ },
+
+ modelDefinitions: {
+ description: 'An object containing all of the model definitions that have been registered.',
+ required: true,
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The data store was initialized successfully.'
+ },
+
+ badConfiguration: {
+ description: 'The configuration was invalid.',
+ outputType: 'ref'
+ }
+
+ },
+
+
+ fn: function registerDataStore(inputs, exits) {
+ // Dependencies
+ var _ = require('@sailshq/lodash');
+ var MySQL = require('machinepack-mysql');
+ var Helpers = require('./private');
+
+ // Validate that the datastore isn't already initialized
+ if (inputs.datastores[inputs.identity]) {
+ return exits.badConfiguration(new Error('Datastore `' + inputs.identity + '` is already registered.'));
+ }
+
+ // ╦ ╦╔═╗╦ ╦╔╦╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┌┐┌┌─┐┬┌─┐
+ // ╚╗╔╝╠═╣║ ║ ║║╠═╣ ║ ║╣ │ │ ││││├┤ ││ ┬
+ // ╚╝ ╩ ╩╩═╝╩═╩╝╩ ╩ ╩ ╚═╝ └─┘└─┘┘└┘└ ┴└─┘
+ // If a URL config value was not given, ensure that all the various pieces
+ // needed to create one exist.
+ var hasURL = _.has(inputs.config, 'url');
+
+ // Validate that the connection has a host and database property
+ if (!hasURL && !inputs.config.host) {
+ return exits.badConfiguration(new Error('Datastore `' + inputs.identity + '` config is missing a host value.'));
+ }
+
+ if (!hasURL && !inputs.config.database) {
+ return exits.badConfiguration(new Error('Datastore `' + inputs.identity + '` config is missing a value for the database name.'));
+ }
+
+ // Loop through every model assigned to the datastore we're registering,
+ // and ensure that each one's primary key is either required or auto-incrementing.
+ try {
+ _.each(inputs.models, function checkPrimaryKey(modelDef, modelIdentity) {
+ var primaryKeyAttr = modelDef.definition[modelDef.primaryKey];
+
+ // Ensure that the model's primary key has either `autoIncrement` or `required`
+ if (primaryKeyAttr.required !== true && (!primaryKeyAttr.autoMigrations || primaryKeyAttr.autoMigrations.autoIncrement !== true)) {
+ throw new Error('In model `' + modelIdentity + '`, primary key `' + modelDef.primaryKey + '` must have either `required` or `autoIncrement` set.');
+ }
+ });
+ } catch (e) {
+ return exits.badConfiguration(e);
+ }
+
+ // ╔═╗╔═╗╔╗╔╔═╗╦═╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ║ ╦║╣ ║║║║╣ ╠╦╝╠═╣ ║ ║╣ │ │ │││││││├┤ │ │ ││ ││││
+ // ╚═╝╚═╝╝╚╝╚═╝╩╚═╩ ╩ ╩ ╚═╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // ┌─┐┌┬┐┬─┐┬┌┐┌┌─┐ ┬ ┬┬─┐┬
+ // └─┐ │ ├┬┘│││││ ┬ │ │├┬┘│
+ // └─┘ ┴ ┴└─┴┘└┘└─┘ └─┘┴└─┴─┘
+ // If the connection details were not supplied as a URL, make them into one.
+ // This is required for the underlying driver in use.
+ if (!_.has(inputs.config, 'url')) {
+ var url = 'mysql://';
+ var port = inputs.config.port || '5432';
+
+ // If authentication is used, add it to the connection string
+ if (inputs.config.user && inputs.config.password) {
+ url += inputs.config.user + ':' + inputs.config.password + '@';
+ }
+
+ url += inputs.config.host + ':' + port + '/' + inputs.config.database;
+ inputs.config.url = url;
+ }
+
+
+ // ╔═╗╦═╗╔═╗╔═╗╔╦╗╔═╗ ┌┬┐┌─┐┌┐┌┌─┐┌─┐┌─┐┬─┐
+ // ║ ╠╦╝║╣ ╠═╣ ║ ║╣ │││├─┤│││├─┤│ ┬├┤ ├┬┘
+ // ╚═╝╩╚═╚═╝╩ ╩ ╩ ╚═╝ ┴ ┴┴ ┴┘└┘┴ ┴└─┘└─┘┴└─
+ // Create a manager to handle the datastore connection config
+ var report;
+ try {
+ report = Helpers.connection.createManager(inputs.config.url, inputs.config);
+ } catch (e) {
+ if (!e.code || e.code === 'error') {
+ return exits.error(new Error('There was an error creating a new manager for the connection with a url of: ' + inputs.config.url + '\n\n' + e.stack));
+ }
+
+ if (e.code === 'failed') {
+ return exits.badConfiguration(new Error('There was an error creating a new manager for the connection with a url of: ' + inputs.config.url + '\n\n' + e.stack));
+ }
+
+ if (e.code === 'malformed') {
+ return exits.badConfiguration(new Error('There was an error creating a new manager for the connection with a url of: ' + inputs.config.url + '\n\n' + e.stack));
+ }
+
+ return exits.error(new Error('There was an error creating a new manager for the connection with a url of: ' + inputs.config.url + '\n\n' + e.stack));
+ }
+
+
+ // Build up a database schema for this connection that can be used
+ // throughout the adapter
+ var dbSchema = {};
+
+ _.each(inputs.models, function buildSchema(val) {
+ var identity = val.identity;
+ var tableName = val.tableName;
+ var definition = val.definition;
+
+ dbSchema[tableName] = {
+ identity: identity,
+ tableName: tableName,
+ definition: definition,
+ attributes: definition,
+ primaryKey: val.primaryKey
+ };
+ });
+
+ // Store the connection
+ inputs.datastores[inputs.identity] = {
+ manager: report.manager,
+ config: inputs.config,
+ driver: MySQL
+ };
+
+ // Store the db schema for the connection
+ inputs.modelDefinitions[inputs.identity] = dbSchema;
+
+ return exits.success();
+ }
+});
diff --git a/helpers/select.js b/helpers/select.js
new file mode 100644
index 00000000..c3597eba
--- /dev/null
+++ b/helpers/select.js
@@ -0,0 +1,172 @@
+// ███████╗███████╗██╗ ███████╗ ██████╗████████╗ █████╗ ██████╗████████╗██╗ ██████╗ ███╗ ██╗
+// ██╔════╝██╔════╝██║ ██╔════╝██╔════╝╚══██╔══╝ ██╔══██╗██╔════╝╚══██╔══╝██║██╔═══██╗████╗ ██║
+// ███████╗█████╗ ██║ █████╗ ██║ ██║ ███████║██║ ██║ ██║██║ ██║██╔██╗ ██║
+// ╚════██║██╔══╝ ██║ ██╔══╝ ██║ ██║ ██╔══██║██║ ██║ ██║██║ ██║██║╚██╗██║
+// ███████║███████╗███████╗███████╗╚██████╗ ██║ ██║ ██║╚██████╗ ██║ ██║╚██████╔╝██║ ╚████║
+// ╚══════╝╚══════╝╚══════╝╚══════╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'Select',
+
+
+ description: 'Find record(s) in the database.',
+
+
+ inputs: {
+
+ datastore: {
+ description: 'The datastore to use for connections.',
+ extendedDescription: 'Datastores represent the config and manager required to obtain an active database connection.',
+ required: true,
+ readOnly: true,
+ example: '==='
+ },
+
+ models: {
+ description: 'An object containing all of the model definitions that have been registered.',
+ required: true,
+ example: '==='
+ },
+
+ query: {
+ description: 'A valid stage three Waterline query.',
+ required: true,
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The results of the select query.',
+ outputVariableName: 'records',
+ outputType: 'ref'
+ },
+
+ invalidDatastore: {
+ description: 'The datastore used is invalid. It is missing key pieces.'
+ },
+
+ badConnection: {
+ friendlyName: 'Bad connection',
+ description: 'A connection either could not be obtained or there was an error using the connection.'
+ }
+
+ },
+
+
+ fn: function select(inputs, exits) {
+ // Dependencies
+ var _ = require('@sailshq/lodash');
+ var WLUtils = require('waterline-utils');
+ var Converter = WLUtils.query.converter;
+ var Helpers = require('./private');
+
+
+ // Store the Query input for easier access
+ var query = inputs.query;
+ query.meta = query.meta || {};
+
+
+ // Find the model definition
+ var model = inputs.models[query.using];
+ if (!model) {
+ return exits.invalidDatastore();
+ }
+
+
+ // Set a flag if a leased connection from outside the adapter was used or not.
+ var leased = _.has(query.meta, 'leasedConnection');
+
+
+ // ╔═╗╔═╗╔╗╔╦ ╦╔═╗╦═╗╔╦╗ ┌┬┐┌─┐ ┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┐┌┌┬┐
+ // ║ ║ ║║║║╚╗╔╝║╣ ╠╦╝ ║ │ │ │ └─┐ │ ├─┤ │ ├┤ │││├┤ │││ │
+ // ╚═╝╚═╝╝╚╝ ╚╝ ╚═╝╩╚═ ╩ ┴ └─┘ └─┘ ┴ ┴ ┴ ┴ └─┘┴ ┴└─┘┘└┘ ┴
+ // Convert the Waterline criteria into a Waterline Query Statement. This
+ // turns it into something that is declarative and can be easily used to
+ // build a SQL query.
+ // See: https://github.com/treelinehq/waterline-query-docs for more info
+ // on Waterline Query Statements.
+ var statement;
+ try {
+ statement = Converter({
+ model: query.using,
+ method: 'find',
+ criteria: query.criteria
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+
+ // Compile the original Waterline Query
+ var compiledQuery;
+ try {
+ compiledQuery = Helpers.query.compileStatement(statement);
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ // ╔═╗╔═╗╔═╗╦ ╦╔╗╔ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ╚═╗╠═╝╠═╣║║║║║║ │ │ │││││││├┤ │ │ ││ ││││
+ // ╚═╝╩ ╩ ╩╚╩╝╝╚╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // ┌─┐┬─┐ ┬ ┬┌─┐┌─┐ ┬ ┌─┐┌─┐┌─┐┌─┐┌┬┐ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // │ │├┬┘ │ │└─┐├┤ │ ├┤ ├─┤└─┐├┤ ││ │ │ │││││││├┤ │ │ ││ ││││
+ // └─┘┴└─ └─┘└─┘└─┘ ┴─┘└─┘┴ ┴└─┘└─┘─┴┘ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // Spawn a new connection for running queries on.
+ Helpers.connection.spawnOrLeaseConnection(inputs.datastore, query.meta, function spawnConnectionCb(err, connection) {
+ if (err) {
+ return exits.badConnection(err);
+ }
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐┌─┐┬ ┌─┐┌─┐┌┬┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ └─┐├┤ │ ├┤ │ │ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└─┘┴─┘└─┘└─┘ ┴ └─┘└└─┘└─┘┴└─ ┴
+ var queryType = 'select';
+
+ Helpers.query.runQuery({
+ connection: connection,
+ nativeQuery: compiledQuery.nativeQuery,
+ valuesToEscape: compiledQuery.valuesToEscape,
+ meta: compiledQuery.meta,
+ queryType: queryType,
+ disconnectOnError: leased ? false : true
+ },
+
+ function runQueryCb(err, report) {
+ // The runQuery helper will automatically release the connection on error
+ // if needed.
+ if (err) {
+ return exits.error(err);
+ }
+
+ // Always release the connection unless a leased connection from outside
+ // the adapter was used.
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ var selectRecords = report.result;
+ var orm = {
+ collections: inputs.models
+ };
+
+ // Process each record to normalize output
+ try {
+ Helpers.query.processEachRecord({
+ records: selectRecords,
+ identity: model.identity,
+ orm: orm
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ return exits.success({ records: selectRecords });
+ }); // releaseConnection >
+ }); // runQuery >
+ }); // spawnConnection >
+ }
+});
diff --git a/helpers/set-sequence.js b/helpers/set-sequence.js
new file mode 100644
index 00000000..eb2aa138
--- /dev/null
+++ b/helpers/set-sequence.js
@@ -0,0 +1,70 @@
+// ███████╗███████╗████████╗ ███████╗███████╗ ██████╗ ██╗ ██╗███████╗███╗ ██╗ ██████╗███████╗
+// ██╔════╝██╔════╝╚══██╔══╝ ██╔════╝██╔════╝██╔═══██╗██║ ██║██╔════╝████╗ ██║██╔════╝██╔════╝
+// ███████╗█████╗ ██║ ███████╗█████╗ ██║ ██║██║ ██║█████╗ ██╔██╗ ██║██║ █████╗
+// ╚════██║██╔══╝ ██║ ╚════██║██╔══╝ ██║▄▄ ██║██║ ██║██╔══╝ ██║╚██╗██║██║ ██╔══╝
+// ███████║███████╗ ██║ ███████║███████╗╚██████╔╝╚██████╔╝███████╗██║ ╚████║╚██████╗███████╗
+// ╚══════╝╚══════╝ ╚═╝ ╚══════╝╚══════╝ ╚══▀▀═╝ ╚═════╝ ╚══════╝╚═╝ ╚═══╝ ╚═════╝╚══════╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'Set Sequence',
+
+
+ description: 'Sets the current version of a sequence from a migration.',
+
+
+ inputs: {
+
+ datastore: {
+ description: 'The datastore to use for connections.',
+ extendedDescription: 'Datastores represent the config and manager required to obtain an active database connection.',
+ required: true,
+ readOnly: true,
+ example: '==='
+ },
+
+ sequenceName: {
+ description: 'The name of the sequence to set the value for.',
+ required: true,
+ example: 'user_id_seq'
+ },
+
+ sequenceValue: {
+ description: 'The value to set the sequence to.',
+ required: true,
+ example: 123
+ },
+
+ meta: {
+ friendlyName: 'Meta (custom)',
+ description: 'Additional stuff to pass to the driver.',
+ extendedDescription: 'This is reserved for custom driver-specific extensions.',
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The sequence was set successfully.'
+ },
+
+ badConnection: {
+ friendlyName: 'Bad connection',
+ description: 'A connection either could not be obtained or there was an error using the connection.'
+ }
+
+ },
+
+
+ fn: function select(inputs, exits) {
+ // Return a no-op.
+ setImmediate(function ensureAsync() {
+ return exits.success();
+ });
+ }
+});
diff --git a/helpers/sum.js b/helpers/sum.js
new file mode 100644
index 00000000..680366cd
--- /dev/null
+++ b/helpers/sum.js
@@ -0,0 +1,154 @@
+// ███████╗██╗ ██╗███╗ ███╗ █████╗ ██████╗████████╗██╗ ██████╗ ███╗ ██╗
+// ██╔════╝██║ ██║████╗ ████║ ██╔══██╗██╔════╝╚══██╔══╝██║██╔═══██╗████╗ ██║
+// ███████╗██║ ██║██╔████╔██║ ███████║██║ ██║ ██║██║ ██║██╔██╗ ██║
+// ╚════██║██║ ██║██║╚██╔╝██║ ██╔══██║██║ ██║ ██║██║ ██║██║╚██╗██║
+// ███████║╚██████╔╝██║ ╚═╝ ██║ ██║ ██║╚██████╗ ██║ ██║╚██████╔╝██║ ╚████║
+// ╚══════╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'SUM',
+
+
+ description: 'Return the SUM of the records matched by the query.',
+
+
+ inputs: {
+
+ datastore: {
+ description: 'The datastore to use for connections.',
+ extendedDescription: 'Datastores represent the config and manager required to obtain an active database connection.',
+ required: true,
+ readOnly: true,
+ example: '==='
+ },
+
+ models: {
+ description: 'An object containing all of the model definitions that have been registered.',
+ required: true,
+ example: '==='
+ },
+
+ query: {
+ description: 'A valid stage three Waterline query.',
+ required: true,
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The results of the sum query.',
+ outputType: 'ref'
+ },
+
+ invalidDatastore: {
+ description: 'The datastore used is invalid. It is missing key pieces.'
+ },
+
+ badConnection: {
+ friendlyName: 'Bad connection',
+ description: 'A connection either could not be obtained or there was an error using the connection.'
+ }
+
+ },
+
+
+ fn: function sum(inputs, exits) {
+ // Dependencies
+ var _ = require('@sailshq/lodash');
+ var Converter = require('waterline-utils').query.converter;
+ var Helpers = require('./private');
+
+
+ // Store the Query input for easier access
+ var query = inputs.query;
+ query.meta = query.meta || {};
+
+
+ // Find the model definition
+ var model = inputs.models[query.using];
+ if (!model) {
+ return exits.invalidDatastore();
+ }
+
+
+ // Set a flag if a leased connection from outside the adapter was used or not.
+ var leased = _.has(query.meta, 'leasedConnection');
+
+
+ // ╔═╗╔═╗╔╗╔╦ ╦╔═╗╦═╗╔╦╗ ┌┬┐┌─┐ ┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┐┌┌┬┐
+ // ║ ║ ║║║║╚╗╔╝║╣ ╠╦╝ ║ │ │ │ └─┐ │ ├─┤ │ ├┤ │││├┤ │││ │
+ // ╚═╝╚═╝╝╚╝ ╚╝ ╚═╝╩╚═ ╩ ┴ └─┘ └─┘ ┴ ┴ ┴ ┴ └─┘┴ ┴└─┘┘└┘ ┴
+ // Convert the Waterline criteria into a Waterline Query Statement. This
+ // turns it into something that is declarative and can be easily used to
+ // build a SQL query.
+ // See: https://github.com/treelinehq/waterline-query-docs for more info
+ // on Waterline Query Statements.
+ var statement;
+ try {
+ statement = Converter({
+ model: query.using,
+ method: 'sum',
+ criteria: query.criteria,
+ values: query.numericAttrName
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ // Compile the original Waterline Query
+ var compiledQuery;
+ try {
+ compiledQuery = Helpers.query.compileStatement(statement);
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ // ╔═╗╔═╗╔═╗╦ ╦╔╗╔ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ╚═╗╠═╝╠═╣║║║║║║ │ │ │││││││├┤ │ │ ││ ││││
+ // ╚═╝╩ ╩ ╩╚╩╝╝╚╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // ┌─┐┬─┐ ┬ ┬┌─┐┌─┐ ┬ ┌─┐┌─┐┌─┐┌─┐┌┬┐ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // │ │├┬┘ │ │└─┐├┤ │ ├┤ ├─┤└─┐├┤ ││ │ │ │││││││├┤ │ │ ││ ││││
+ // └─┘┴└─ └─┘└─┘└─┘ ┴─┘└─┘┴ ┴└─┘└─┘─┴┘ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // Spawn a new connection for running queries on.
+ Helpers.connection.spawnOrLeaseConnection(inputs.datastore, query.meta, function spawnConnectionCb(err, connection) {
+ if (err) {
+ return exits.badConnection(err);
+ }
+
+ // ╦═╗╦ ╦╔╗╔ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘└└─┘└─┘┴└─ ┴
+ var queryType = 'sum';
+
+ Helpers.query.runQuery({
+ connection: connection,
+ nativeQuery: compiledQuery.nativeQuery,
+ valuesToEscape: compiledQuery.valuesToEscape,
+ meta: compiledQuery.meta,
+ queryType: queryType,
+ disconnectOnError: leased ? false : true
+ },
+
+ function runQueryCb(err, report) {
+ // The runQuery helper will automatically release the connection on error
+ // if needed.
+ if (err) {
+ return exits.error(err);
+ }
+
+ // Always release the connection unless a leased connection from outside
+ // the adapter was used.
+ Helpers.connection.releaseConnection(connection, leased, function releaseConnectionCb() {
+ return exits.success(report.result);
+ }); // releaseConnection >
+ }); // runQuery >
+ }); // spawnConnection >
+ }
+});
diff --git a/helpers/teardown.js b/helpers/teardown.js
new file mode 100644
index 00000000..11095bd1
--- /dev/null
+++ b/helpers/teardown.js
@@ -0,0 +1,87 @@
+// ████████╗███████╗ █████╗ ██████╗ ██████╗ ██████╗ ██╗ ██╗███╗ ██╗
+// ╚══██╔══╝██╔════╝██╔══██╗██╔══██╗██╔══██╗██╔═══██╗██║ ██║████╗ ██║
+// ██║ █████╗ ███████║██████╔╝██║ ██║██║ ██║██║ █╗ ██║██╔██╗ ██║
+// ██║ ██╔══╝ ██╔══██║██╔══██╗██║ ██║██║ ██║██║███╗██║██║╚██╗██║
+// ██║ ███████╗██║ ██║██║ ██║██████╔╝╚██████╔╝╚███╔███╔╝██║ ╚████║
+// ╚═╝ ╚══════╝╚═╝ ╚═╝╚═╝ ╚═╝╚═════╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═══╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'Teardown',
+
+
+ description: 'Destroys a connection manager so that a server can be shut down cleanly.',
+
+
+ inputs: {
+
+ identity: {
+ description: 'The datastore identity to teardown.',
+ required: true,
+ example: '==='
+ },
+
+ datastores: {
+ description: 'An object containing all of the data stores that have been registered.',
+ required: true,
+ example: '==='
+ },
+
+ modelDefinitions: {
+ description: 'An object containing all of the model definitions that have been registered.',
+ required: true,
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The data store was initialized successfully.'
+ },
+
+ badConfiguration: {
+ description: 'The configuration was invalid.'
+ }
+
+ },
+
+
+ fn: function teardown(inputs, exits) {
+ // Dependencies
+ var Helpers = require('./private');
+
+ var datastore = inputs.datastores[inputs.identity];
+ if (!datastore) {
+ return exits.error(new Error('Invalid data store identity. No data store exists with that identity.'));
+ }
+
+
+ // ╔╦╗╔═╗╔═╗╔╦╗╦═╗╔═╗╦ ╦ ┌┬┐┌─┐┌┐┌┌─┐┌─┐┌─┐┬─┐
+ // ║║║╣ ╚═╗ ║ ╠╦╝║ ║╚╦╝ │││├─┤│││├─┤│ ┬├┤ ├┬┘
+ // ═╩╝╚═╝╚═╝ ╩ ╩╚═╚═╝ ╩ ┴ ┴┴ ┴┘└┘┴ ┴└─┘└─┘┴└─
+ var manager = datastore.manager;
+ if (!manager) {
+ return exits.error(new Error('Missing manager for this data store. The data store may be in the process of being destroyed.'));
+ }
+
+
+ Helpers.connection.destroyManager(manager, function destroyManagerCb(err) {
+ if (err) {
+ return exits.error(err);
+ }
+
+ // Delete the rest of the data from the data store
+ delete inputs.datastores[inputs.identity];
+
+ // Delete the model definitions
+ delete inputs.modelDefinitions[inputs.identity];
+
+ return exits.success();
+ });
+ }
+});
diff --git a/helpers/update.js b/helpers/update.js
new file mode 100644
index 00000000..98630925
--- /dev/null
+++ b/helpers/update.js
@@ -0,0 +1,209 @@
+// ██╗ ██╗██████╗ ██████╗ █████╗ ████████╗███████╗ █████╗ ██████╗████████╗██╗ ██████╗ ███╗ ██╗
+// ██║ ██║██╔══██╗██╔══██╗██╔══██╗╚══██╔══╝██╔════╝ ██╔══██╗██╔════╝╚══██╔══╝██║██╔═══██╗████╗ ██║
+// ██║ ██║██████╔╝██║ ██║███████║ ██║ █████╗ ███████║██║ ██║ ██║██║ ██║██╔██╗ ██║
+// ██║ ██║██╔═══╝ ██║ ██║██╔══██║ ██║ ██╔══╝ ██╔══██║██║ ██║ ██║██║ ██║██║╚██╗██║
+// ╚██████╔╝██║ ██████╔╝██║ ██║ ██║ ███████╗ ██║ ██║╚██████╗ ██║ ██║╚██████╔╝██║ ╚████║
+// ╚═════╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝
+//
+
+module.exports = require('machine').build({
+
+
+ friendlyName: 'Update',
+
+
+ description: 'Update record(s) in the database based on a query criteria.',
+
+
+ inputs: {
+
+ datastore: {
+ description: 'The datastore to use for connections.',
+ extendedDescription: 'Datastores represent the config and manager required to obtain an active database connection.',
+ required: true,
+ readOnly: true,
+ example: '==='
+ },
+
+ models: {
+ description: 'An object containing all of the model definitions that have been registered.',
+ required: true,
+ example: '==='
+ },
+
+ query: {
+ description: 'A valid stage three Waterline query.',
+ required: true,
+ example: '==='
+ }
+
+ },
+
+
+ exits: {
+
+ success: {
+ description: 'The records were successfully updated.',
+ outputVariableName: 'records',
+ outputType: 'ref'
+ },
+
+ invalidDatastore: {
+ description: 'The datastore used is invalid. It is missing key pieces.'
+ },
+
+ badConnection: {
+ friendlyName: 'Bad connection',
+ description: 'A connection either could not be obtained or there was an error using the connection.'
+ },
+
+ notUnique: {
+ friendlyName: 'Not Unique',
+ outputType: 'ref'
+ }
+
+ },
+
+
+ fn: function update(inputs, exits) {
+ // Dependencies
+ var _ = require('@sailshq/lodash');
+ var WLUtils = require('waterline-utils');
+ var Helpers = require('./private');
+ var Converter = WLUtils.query.converter;
+
+
+ // Store the Query input for easier access
+ var query = inputs.query;
+ query.meta = query.meta || {};
+
+
+ // Find the model definition
+ var model = inputs.models[query.using];
+ if (!model) {
+ return exits.invalidDatastore();
+ }
+
+
+ // Set a flag if a leased connection from outside the adapter was used or not.
+ var leased = _.has(query.meta, 'leasedConnection');
+
+ // Set a flag to determine if records are being returned
+ var fetchRecords = false;
+
+
+ // Build a faux ORM for use in processEachRecords
+ var fauxOrm = {
+ collections: inputs.models
+ };
+
+
+ // ╔═╗╦═╗╔═╗ ╔═╗╦═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┐
+ // ╠═╝╠╦╝║╣───╠═╝╠╦╝║ ║║ ║╣ ╚═╗╚═╗ ├┬┘├┤ │ │ │├┬┘ ││└─┐
+ // ╩ ╩╚═╚═╝ ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ ┴└─└─┘└─┘└─┘┴└──┴┘└─┘
+ // Process each record to normalize output
+ try {
+ Helpers.query.preProcessRecord({
+ records: [query.valuesToSet],
+ identity: model.identity,
+ orm: fauxOrm
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+
+ // ╔═╗╔═╗╔╗╔╦ ╦╔═╗╦═╗╔╦╗ ┌┬┐┌─┐ ┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┬┐┌─┐┌┐┌┌┬┐
+ // ║ ║ ║║║║╚╗╔╝║╣ ╠╦╝ ║ │ │ │ └─┐ │ ├─┤ │ ├┤ │││├┤ │││ │
+ // ╚═╝╚═╝╝╚╝ ╚╝ ╚═╝╩╚═ ╩ ┴ └─┘ └─┘ ┴ ┴ ┴ ┴ └─┘┴ ┴└─┘┘└┘ ┴
+ // Convert the Waterline criteria into a Waterline Query Statement. This
+ // turns it into something that is declarative and can be easily used to
+ // build a SQL query.
+ // See: https://github.com/treelinehq/waterline-query-docs for more info
+ // on Waterline Query Statements.
+ var statement;
+ try {
+ statement = Converter({
+ model: query.using,
+ method: 'update',
+ criteria: query.criteria,
+ values: query.valuesToSet
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+
+ // ╔╦╗╔═╗╔╦╗╔═╗╦═╗╔╦╗╦╔╗╔╔═╗ ┬ ┬┬ ┬┬┌─┐┬ ┬ ┬ ┬┌─┐┬ ┬ ┬┌─┐┌─┐
+ // ║║║╣ ║ ║╣ ╠╦╝║║║║║║║║╣ │││├─┤││ ├─┤ └┐┌┘├─┤│ │ │├┤ └─┐
+ // ═╩╝╚═╝ ╩ ╚═╝╩╚═╩ ╩╩╝╚╝╚═╝ └┴┘┴ ┴┴└─┘┴ ┴ └┘ ┴ ┴┴─┘└─┘└─┘└─┘
+ // ┌┬┐┌─┐ ┬─┐┌─┐┌┬┐┬ ┬┬─┐┌┐┌
+ // │ │ │ ├┬┘├┤ │ │ │├┬┘│││
+ // ┴ └─┘ ┴└─└─┘ ┴ └─┘┴└─┘└┘
+ if (_.has(query.meta, 'fetch') && query.meta.fetch) {
+ fetchRecords = true;
+ }
+
+
+ // Find the Primary Key
+ var primaryKeyField = model.primaryKey;
+ var primaryKeyColumnName = model.definition[primaryKeyField].columnName;
+
+
+ // ╔═╗╔═╗╔═╗╦ ╦╔╗╔ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ╚═╗╠═╝╠═╣║║║║║║ │ │ │││││││├┤ │ │ ││ ││││
+ // ╚═╝╩ ╩ ╩╚╩╝╝╚╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // ┌─┐┬─┐ ┬ ┬┌─┐┌─┐ ┬ ┌─┐┌─┐┌─┐┌─┐┌┬┐ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // │ │├┬┘ │ │└─┐├┤ │ ├┤ ├─┤└─┐├┤ ││ │ │ │││││││├┤ │ │ ││ ││││
+ // └─┘┴└─ └─┘└─┘└─┘ ┴─┘└─┘┴ ┴└─┘└─┘─┴┘ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // Spawn a new connection for running queries on.
+ Helpers.connection.spawnOrLeaseConnection(inputs.datastore, query.meta, function spawnConnectionCb(err, connection) {
+ if (err) {
+ return exits.badConnection(err);
+ }
+
+
+ // ╦═╗╦ ╦╔╗╔ ┬ ┬┌─┐┌┬┐┌─┐┌┬┐┌─┐ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠╦╝║ ║║║║ │ │├─┘ ││├─┤ │ ├┤ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩╚═╚═╝╝╚╝ └─┘┴ ─┴┘┴ ┴ ┴ └─┘ └─┘└└─┘└─┘┴└─ ┴
+ Helpers.query.update({
+ connection: connection,
+ statement: statement,
+ fetch: fetchRecords,
+ primaryKey: primaryKeyColumnName
+ },
+
+ function updateRecordCb(err, updatedRecords) {
+ // Always release the connection unless a leased connection from outside
+ // the adapter was used.
+ Helpers.connection.releaseConnection(connection, leased, function cb() {
+ // If there was an error return it.
+ if (err) {
+ if (err.footprint && err.footprint.identity === 'notUnique') {
+ return exits.notUnique(err);
+ }
+
+ return exits.error(err);
+ }
+
+ if (fetchRecords) {
+ // Process each record to normalize output
+ try {
+ Helpers.query.processEachRecord({
+ records: updatedRecords,
+ identity: model.identity,
+ orm: fauxOrm
+ });
+ } catch (e) {
+ return exits.error(e);
+ }
+
+ return exits.success({ records: updatedRecords });
+ }
+
+ return exits.success();
+ }); // releaseConnection >
+ }); // runQuery >
+ }); // spawnConnection >
+ }
+});
diff --git a/lib/adapter.js b/lib/adapter.js
index 84839a54..a63a71e6 100644
--- a/lib/adapter.js
+++ b/lib/adapter.js
@@ -1,1233 +1,473 @@
-/*---------------------------------------------------------------
- :: sails-mysql
- -> adapter
----------------------------------------------------------------*/
-
-// Dependencies
+// ███████╗ █████╗ ██╗██╗ ███████╗ ███╗ ███╗██╗ ██╗███████╗ ██████╗ ██╗
+// ██╔════╝██╔══██╗██║██║ ██╔════╝ ████╗ ████║╚██╗ ██╔╝██╔════╝██╔═══██╗██║
+// ███████╗███████║██║██║ ███████╗ ██╔████╔██║ ╚████╔╝ ███████╗██║ ██║██║
+// ╚════██║██╔══██║██║██║ ╚════██║ ██║╚██╔╝██║ ╚██╔╝ ╚════██║██║▄▄ ██║██║
+// ███████║██║ ██║██║███████╗███████║ ██║ ╚═╝ ██║ ██║ ███████║╚██████╔╝███████╗
+// ╚══════╝╚═╝ ╚═╝╚═╝╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝ ╚══▀▀═╝ ╚══════╝
+//
+// An adapter for MySQL and Waterline
+
+var _ = require('@sailshq/lodash');
var async = require('async');
-var _ = require('lodash');
-var util = require('util');
-var mysql = require('mysql');
-
-var Errors = require('waterline-errors').adapter;
-var Sequel = require('waterline-sequel');
-var Cursor = require('waterline-cursor');
-
-var utils = require('./utils');
-var _teardownConnection = require('./connections/teardown');
-var _spawnConnection = require('./connections/spawn');
-var _registerConnection = require('./connections/register');
-
-var sql = require('./sql.js');
-
-var hop = utils.object.hasOwnProperty;
-
-var STRINGFILE = {
- noCallbackError: 'An error occurred in the MySQL adapter, but no callback was specified to the spawnConnection function to handle it.'
-};
+var redactPasswords = require('./private/redact-passwords');
+var Helpers = require('../helpers');
-// Hack for development - in future versions, allow
-// logger to be injected (see wl2
-// or tweet @mikermcneil for status of this feature or
-// to help out)
-var log = (process.env.LOG_QUERIES === 'true') ? console.log : function () {};
+module.exports = (function sailsMySQL() {
+ // Keep track of all the datastores used by the app
+ var datastores = {};
-module.exports = (function() {
-
- // Keep track of all the connections
- var connections = {};
-
- var sqlOptions = {
- parameterized: false,
- caseSensitive: false,
- escapeCharacter: '`',
- casting: false,
- canReturnValues: false,
- escapeInserts: true
- };
+ // Keep track of all the connection model definitions
+ var modelDefinitions = {};
var adapter = {
+ identity: 'sails-mysql',
- //
- // TODO: make the exported thing an EventEmitter for when there's no callback.
- //
- emit: function (evName, data) {
-
- // temporary hack- should only be used for cases that would crash anyways
- // (see todo above- we still shouldn't throw, emit instead, hence this stub)
- if (evName === 'error') { throw data; }
- },
-
- // Which type of primary key is used by default
- pkFormat: 'integer',
-
- // Whether this adapter is syncable (yes)
- syncable: true,
+ // Waterline Adapter API Version
+ adapterApiVersion: 1,
defaults: {
- pool: true,
- connectionLimit: 5,
- waitForConnections: true
+ host: 'localhost',
+ port: 3306,
+ schema: true
},
- escape: function(val) {
- return mysql.escape(val);
- },
-
- escapeId: function(name) {
- return mysql.escapeId(name);
- },
-
-
- registerConnection: _registerConnection.configure(connections),
- teardown: _teardownConnection.configure(connections),
-
-
- // Direct access to query
- query: function(connectionName, collectionName, query, data, cb, connection) {
-
- if (_.isFunction(data)) {
- cb = data;
- data = null;
+ // ╔═╗═╗ ╦╔═╗╔═╗╔═╗╔═╗ ┌─┐┬─┐┬┬ ┬┌─┐┌┬┐┌─┐
+ // ║╣ ╔╩╦╝╠═╝║ ║╚═╗║╣ ├─┘├┬┘│└┐┌┘├─┤ │ ├┤
+ // ╚═╝╩ ╚═╩ ╚═╝╚═╝╚═╝ ┴ ┴└─┴ └┘ ┴ ┴ ┴ └─┘
+ // ┌┬┐┌─┐┌┬┐┌─┐┌─┐┌┬┐┌─┐┬─┐┌─┐┌─┐
+ // ││├─┤ │ ├─┤└─┐ │ │ │├┬┘├┤ └─┐
+ // ─┴┘┴ ┴ ┴ ┴ ┴└─┘ ┴ └─┘┴└─└─┘└─┘
+ // This allows outside access to the connection manager.
+ datastores: datastores,
+
+
+ // ╦═╗╔═╗╔═╗╦╔═╗╔╦╗╔═╗╦═╗ ┌┬┐┌─┐┌┬┐┌─┐┌─┐┌┬┐┌─┐┬─┐┌─┐
+ // ╠╦╝║╣ ║ ╦║╚═╗ ║ ║╣ ╠╦╝ ││├─┤ │ ├─┤└─┐ │ │ │├┬┘├┤
+ // ╩╚═╚═╝╚═╝╩╚═╝ ╩ ╚═╝╩╚═ ─┴┘┴ ┴ ┴ ┴ ┴└─┘ ┴ └─┘┴└─└─┘
+ // Register a datastore config and generate a connection manager for it.
+ registerDatastore: function registerDatastore(datastoreConfig, models, cb) {
+ var identity = datastoreConfig.identity;
+ if (!identity) {
+ return cb(new Error('Invalid datastore config. A datastore should contain a unique identity property.'));
}
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __QUERY__, cb);
- } else {
- __QUERY__(connection, cb);
+ try {
+ Helpers.registerDataStore({
+ identity: identity,
+ config: datastoreConfig,
+ models: models,
+ datastores: datastores,
+ modelDefinitions: modelDefinitions
+ }).execSync();
+ } catch (e) {
+ setImmediate(function done() {
+ return cb(redactPasswords(e));
+ });
+ return;
}
- function __QUERY__(connection, cb) {
-
- // Run query
- log('MySQL.query: ', query);
-
- if (data) connection.query(query, data, cb);
- else connection.query(query, cb);
-
- }
+ setImmediate(function done() {
+ return cb();
+ });
},
- // Fetch the schema for a collection
- // (contains attributes and autoIncrement value)
- describe: function(connectionName, collectionName, cb, connection) {
+ // ╔╦╗╔═╗╔═╗╦═╗╔╦╗╔═╗╦ ╦╔╗╔ ┌─┐┌─┐┌┐┌┌┐┌┌─┐┌─┐┌┬┐┬┌─┐┌┐┌
+ // ║ ║╣ ╠═╣╠╦╝ ║║║ ║║║║║║║ │ │ │││││││├┤ │ │ ││ ││││
+ // ╩ ╚═╝╩ ╩╩╚══╩╝╚═╝╚╩╝╝╚╝ └─┘└─┘┘└┘┘└┘└─┘└─┘ ┴ ┴└─┘┘└┘
+ // Destroy a manager and close any connections in it's pool.
+ teardown: function teardown(identity, cb) {
+ var datastoreIdentities = [];
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __DESCRIBE__, cb);
+ // If no specific identity was sent, teardown all the datastores
+ if (!identity || identity === null) {
+ datastoreIdentities = datastoreIdentities.concat(_.keys(datastores));
} else {
- __DESCRIBE__(connection, cb);
+ datastoreIdentities.push(identity);
}
- function __DESCRIBE__(connection, cb) {
-
- var connectionObject = connections[connectionName];
- var collection = connectionObject.collections[collectionName];
- if (!collection) {
- return cb(util.format('Unknown collection `%s` in connection `%s`', collectionName, connectionName));
- }
- var tableName = mysql.escapeId(collectionName);
-
- var query = 'DESCRIBE ' + tableName;
- var pkQuery = 'SHOW INDEX FROM ' + tableName;
-
- // Run query
- log('MySQL.describe: ', query);
- log('MySQL.describe(pk): ', pkQuery);
-
- connection.query(query, function __DESCRIBE__(err, schema) {
- if (err) {
- if (err.code === 'ER_NO_SUCH_TABLE') {
- return cb();
- } else return cb(err);
+ // Teardown each datastore identity manager
+ async.eachSeries(datastoreIdentities, function teardownDatastore(datastoreIdentity, next) {
+ Helpers.teardown({
+ identity: datastoreIdentity,
+ datastores: datastores,
+ modelDefinitions: modelDefinitions
+ }).switch({
+ error: function error(err) {
+ return next(redactPasswords(err));
+ },
+ success: function success() {
+ return next();
}
-
- connection.query(pkQuery, function(err, pkResult) {
- if(err) return cb(err);
-
- // Loop through Schema and attach extra attributes
- schema.forEach(function(attr) {
-
- // Set Primary Key Attribute
- if(attr.Key === 'PRI') {
- attr.primaryKey = true;
-
- // If also an integer set auto increment attribute
- if(attr.Type === 'int(11)') {
- attr.autoIncrement = true;
- }
- }
-
- // Set Unique Attribute
- if(attr.Key === 'UNI') {
- attr.unique = true;
- }
- });
-
- // Loop Through Indexes and Add Properties
- pkResult.forEach(function(result) {
- schema.forEach(function(attr) {
- if(attr.Field !== result.Column_name) return;
- attr.indexed = true;
- });
- });
-
- // Convert mysql format to standard javascript object
- var normalizedSchema = sql.normalizeSchema(schema);
-
- // Set Internal Schema Mapping
- collection.schema = normalizedSchema;
-
- // TODO: check that what was returned actually matches the cache
- cb(null, normalizedSchema);
- });
-
});
- }
+ }, function asyncCb(err) {
+ cb(redactPasswords(err));
+ });
},
- // Create a new collection
- define: function(connectionName, collectionName, definition, cb, connection) {
- var self = this;
-
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __DEFINE__, cb);
- } else {
- __DEFINE__(connection, cb);
- }
-
- function __DEFINE__(connection, cb) {
-
- var connectionObject = connections[connectionName];
- var collection = connectionObject.collections[collectionName];
- if (!collection) {
- return cb(util.format('Unknown collection `%s` in connection `%s`', collectionName, connectionName));
- }
- var tableName = mysql.escapeId(collectionName);
-
- // Iterate through each attribute, building a query string
- var schema = sql.schema(tableName, definition);
-
- // Build query
- var query = 'CREATE TABLE ' + tableName + ' (' + schema + ')';
- if(connectionObject.config.charset) {
- query += ' DEFAULT CHARSET ' + connectionObject.config.charset;
- }
-
- if(connectionObject.config.collation) {
- if(!connectionObject.config.charset) query += ' DEFAULT ';
- query += ' COLLATE ' + connectionObject.config.collation;
+ // ██████╗ ██████╗ ██╗
+ // ██╔══██╗██╔═══██╗██║
+ // ██║ ██║██║ ██║██║
+ // ██║ ██║██║▄▄ ██║██║
+ // ██████╔╝╚██████╔╝███████╗
+ // ╚═════╝ ╚══▀▀═╝ ╚══════╝
+ //
+ // Methods related to manipulating data stored in the database.
+
+
+ // ╔═╗╦═╗╔═╗╔═╗╔╦╗╔═╗ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐
+ // ║ ╠╦╝║╣ ╠═╣ ║ ║╣ ├┬┘├┤ │ │ │├┬┘ ││
+ // ╚═╝╩╚═╚═╝╩ ╩ ╩ ╚═╝ ┴└─└─┘└─┘└─┘┴└──┴┘
+ // Add a new row to the table
+ create: function create(datastoreName, query, cb) {
+ var datastore = datastores[datastoreName];
+ var models = modelDefinitions[datastoreName];
+ Helpers.create({
+ datastore: datastore,
+ models: models,
+ query: query
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
+ },
+ notUnique: function error(errInfo) {
+ var e = new Error(errInfo.message);
+ e.footprint = errInfo.footprint;
+ return cb(redactPasswords(e));
+ },
+ success: function success(report) {
+ var record = report && report.record || undefined;
+ return cb(undefined, record);
}
-
-
- // Run query
- log('MYSQL.define: ', query);
-
- connection.query(query, function __DEFINE__(err, result) {
- if (err) return cb(err);
-
- //
- // TODO:
- // Determine if this can safely be changed to the `adapter` closure var
- // (i.e. this is the last remaining usage of the "this" context in the MySQLAdapter)
- //
-
- self.describe(connectionName, collectionName, function(err) {
- cb(err, result);
- });
- });
-
- }
+ });
},
- // Drop an existing collection
- drop: function(connectionName, collectionName, relations, cb, connection) {
- if(typeof relations === 'function') {
- cb = relations;
- relations = [];
- }
-
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __DROP__, cb);
- } else {
- __DROP__(connection, cb);
- }
-
- function __DROP__(connection, cb) {
-
- var connectionObject = connections[connectionName];
-
-
- // Drop any relations
- function dropTable(item, next) {
-
- var collection = connectionObject.collections[item];
- var tableName = mysql.escapeId(collectionName);
-
- // Build query
- var query = 'DROP TABLE ' + tableName;
-
- // Run query
- log('MYSQL.drop: ', query);
-
- connection.query(query, function __DROP__(err, result) {
- if (err) {
- if (err.code !== 'ER_BAD_TABLE_ERROR' && err.code !== 'ER_NO_SUCH_TABLE') return next(err);
- result = null;
- }
-
- next(null, result);
- });
+ // ╔═╗╦═╗╔═╗╔═╗╔╦╗╔═╗ ╔═╗╔═╗╔═╗╦ ╦ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐
+ // ║ ╠╦╝║╣ ╠═╣ ║ ║╣ ║╣ ╠═╣║ ╠═╣ ├┬┘├┤ │ │ │├┬┘ ││
+ // ╚═╝╩╚═╚═╝╩ ╩ ╩ ╚═╝ ╚═╝╩ ╩╚═╝╩ ╩ ┴└─└─┘└─┘└─┘┴└──┴┘
+ // Add multiple new rows to the table
+ createEach: function createEach(datastoreName, query, cb) {
+ var datastore = datastores[datastoreName];
+ var models = modelDefinitions[datastoreName];
+ Helpers.createEach({
+ datastore: datastore,
+ models: models,
+ query: query
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
+ },
+ notUnique: function error(errInfo) {
+ var e = new Error(errInfo.message);
+ e.footprint = errInfo.footprint;
+ return cb(redactPasswords(e));
+ },
+ success: function success(report) {
+ var records = report && report.records || undefined;
+ return cb(undefined, records);
}
-
- async.eachSeries(relations, dropTable, function(err) {
- if(err) return cb(err);
- dropTable(collectionName, cb);
- });
-
- }
- },
-
- //
- addAttribute: function (connectionName, collectionName, attrName, attrDef, cb, connection) {
-
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __ADD_ATTRIBUTE__, cb);
- } else {
- __ADD_ATTRIBUTE__(connection, cb);
- }
-
- function __ADD_ATTRIBUTE__(connection, cb) {
-
- var connectionObject = connections[connectionName];
- var collection = connectionObject.collections[collectionName];
- var tableName = collectionName;
-
- var query = sql.addColumn(tableName, attrName, attrDef);
-
- // Run query
- log('MYSQL.addAttribute: ', query);
-
- connection.query(query, function(err, result) {
- if (err) return cb(err);
-
- // TODO: marshal response to waterline interface
- cb(err);
- });
-
- }
+ });
},
- //
- removeAttribute: function (connectionName, collectionName, attrName, cb, connection) {
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __REMOVE_ATTRIBUTE__, cb);
- } else {
- __REMOVE_ATTRIBUTE__(connection, cb);
- }
-
- function __REMOVE_ATTRIBUTE__(connection, cb) {
-
- var connectionObject = connections[connectionName];
- var collection = connectionObject.collections[collectionName];
- var tableName = collectionName;
-
- var query = sql.removeColumn(tableName, attrName);
-
- // Run query
- log('MYSQL.removeAttribute: ', query);
-
- connection.query(query, function(err, result) {
- if (err) return cb(err);
-
- // TODO: marshal response to waterline interface
- cb(err);
- });
-
- }
- },
-
- // No custom alter necessary-- alter can be performed by using the other methods (addAttribute, removeAttribute)
- // you probably want to use the default in waterline core since this can get complex
- // (that is unless you want some enhanced functionality-- then please be my guest!)
-
- // Create one or more new models in the collection
- create: function(connectionName, collectionName, data, cb, connection) {
-
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __CREATE__, cb);
- } else {
- __CREATE__(connection, cb);
- }
-
- function __CREATE__(connection, cb) {
-
- var connectionObject = connections[connectionName];
- var collection = connectionObject.collections[collectionName];
- var tableName = collectionName;
-
- var _insertData = _.cloneDeep(data);
-
- // Prepare values
- Object.keys(data).forEach(function(value) {
- data[value] = utils.prepareValue(data[value]);
- });
-
- var schema = connectionObject.schema;
- var _query;
-
- var sequel = new Sequel(schema, sqlOptions);
-
- // Build a query for the specific query strategy
- try {
- _query = sequel.create(collectionName, data);
- } catch(e) {
- return cb(e);
+ // ╔═╗╔═╗╦ ╔═╗╔═╗╔╦╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╚═╗║╣ ║ ║╣ ║ ║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╚═╝╩═╝╚═╝╚═╝ ╩ └─┘└└─┘└─┘┴└─ ┴
+ // Select Query Logic
+ find: function find(datastoreName, query, cb) {
+ var datastore = datastores[datastoreName];
+ var models = modelDefinitions[datastoreName];
+ Helpers.select({
+ datastore: datastore,
+ models: models,
+ query: query
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
+ },
+ success: function success(report) {
+ return cb(undefined, report.records);
}
-
- // Run query
- log('MySQL.create: ', _query.query);
-
- connection.query(_query.query, function(err, result) {
- if (err) return cb( handleQueryError(err) );
-
- // Build model to return
- var autoInc = null;
-
- Object.keys(collection.definition).forEach(function(key) {
- if(!collection.definition[key].hasOwnProperty('autoIncrement')) return;
- autoInc = key;
- });
-
- var autoIncData = {};
-
- if (autoInc) {
- autoIncData[autoInc] = result.insertId;
- }
-
- var values = _.extend({}, _insertData, autoIncData);
- cb(err, values);
- });
- }
+ });
},
- // Override of createEach to share a single connection
- // instead of using a separate connection for each request
- createEach: function (connectionName, collectionName, valuesList, cb, connection) {
-
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __CREATE_EACH__, cb);
- } else {
- __CREATE_EACH__(connection, cb);
- }
-
-
- function __CREATE_EACH__(connection, cb) {
-
- var connectionObject = connections[connectionName];
- var collection = connectionObject.collections[collectionName];
- var tableName = collectionName;
-
- var records = [];
-
- async.eachSeries(valuesList, function (data, cb) {
-
- // Prepare values
- Object.keys(data).forEach(function(value) {
- data[value] = utils.prepareValue(data[value]);
- });
- var schema = connectionObject.schema;
- var _query;
-
- var sequel = new Sequel(schema, sqlOptions);
-
- // Build a query for the specific query strategy
- try {
- _query = sequel.create(collectionName, data);
- } catch(e) {
- return cb(e);
- }
-
- // Run query
- log('MySQL.createEach: ', _query.query);
-
- connection.query(_query.query, function(err, results) {
- if (err) return cb( handleQueryError(err) );
- records.push(results.insertId);
- cb();
- });
- }, function(err) {
- if(err) return cb(err);
-
- var pk = 'id';
-
- Object.keys(collection.definition).forEach(function(key) {
- if(!collection.definition[key].hasOwnProperty('primaryKey')) return;
- pk = key;
- });
-
- // If there are no records (`!records.length`)
- // then skip the query altogether- we don't need to look anything up
- if (!records.length){
- return cb(null, []);
+ // ╦ ╦╔═╗╔╦╗╔═╗╔╦╗╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ║ ║╠═╝ ║║╠═╣ ║ ║╣ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╩ ═╩╝╩ ╩ ╩ ╚═╝ └─┘└└─┘└─┘┴└─ ┴
+ // Update one or more models in the table
+ update: function update(datastoreName, query, cb) {
+ var datastore = datastores[datastoreName];
+ var models = modelDefinitions[datastoreName];
+ Helpers.update({
+ datastore: datastore,
+ models: models,
+ query: query
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
+ },
+ notUnique: function error(errInfo) {
+ var e = new Error(errInfo.message);
+ e.footprint = errInfo.footprint;
+ return cb(redactPasswords(e));
+ },
+ success: function success(report) {
+ if (report) {
+ return cb(undefined, report.records);
}
- // Build a Query to get newly inserted records
- var query = 'SELECT * FROM ' + mysql.escapeId(tableName) + ' WHERE ' + mysql.escapeId(pk) + ' IN (' + records + ');';
-
- // Run Query returing results
- log('MYSQL.createEach: ', query);
-
- connection.query(query, function(err, results) {
- if(err) return cb(err);
- cb(null, results);
- });
- });
-
- }
+ return cb();
+ }
+ });
},
- /**
- * [join description]
- * @param {[type]} conn [description]
- * @param {[type]} coll [description]
- * @param {[type]} criteria [description]
- * @param {[type]} cb [description]
- * @return {[type]} [description]
- */
- join: function (connectionName, collectionName, options, cb, connection) {
-
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __JOIN__, cb);
- } else {
- __JOIN__(connection, cb);
- }
-
- function __JOIN__(client, done) {
-
- // Populate associated records for each parent result
- // (or do them all at once as an optimization, if possible)
- Cursor({
-
- instructions: options,
- nativeJoins: true,
-
- /**
- * Find some records directly (using only this adapter)
- * from the specified collection.
- *
- * @param {String} collectionIdentity
- * @param {Object} criteria
- * @param {Function} _cb
- */
- $find: function (collectionName, criteria, _cb) {
- return adapter.find(connectionName, collectionName, criteria, _cb, client);
- },
-
- /**
- * Look up the name of the primary key field
- * for the collection with the specified identity.
- *
- * @param {String} collectionIdentity
- * @return {String}
- */
- $getPK: function (collectionName) {
- if (!collectionName) return;
- return _getPK(connectionName, collectionName);
- },
-
- /**
- * Given a strategy type, build up and execute a SQL query for it.
- *
- * @param {}
- */
-
- $populateBuffers: function populateBuffers(options, next) {
-
- var buffers = options.buffers;
- var instructions = options.instructions;
-
- // Grab the collection by looking into the connection
- var connectionObject = connections[connectionName];
- var collection = connectionObject.collections[collectionName];
-
- var parentRecords = [];
- var cachedChildren = {};
-
- // Grab Connection Schema
- var schema = {};
-
- Object.keys(connectionObject.collections).forEach(function(coll) {
- schema[coll] = connectionObject.collections[coll].schema;
- });
-
- // Build Query
- var _schema = connectionObject.schema;
-
- var sequel = new Sequel(_schema, sqlOptions);
- var _query;
-
- // Build a query for the specific query strategy
- try {
- _query = sequel.find(collectionName, instructions);
- } catch(e) {
- return next(e);
- }
-
- async.auto({
-
- processParent: function(next) {
- log('MySQL.populateBuffers: ', _query.query[0]);
-
- client.query(_query.query[0], function __FIND__(err, result) {
- if(err) return next(err);
-
- parentRecords = result;
-
- var splitChildren = function(parent, next) {
- var cache = {};
-
- _.keys(parent).forEach(function(key) {
-
- // Check if we can split this on our special alias identifier '___' and if
- // so put the result in the cache
- var split = key.split('___');
- if(split.length < 2) return;
-
- if(!hop(cache, split[0])) cache[split[0]] = {};
- cache[split[0]][split[1]] = parent[key];
- delete parent[key];
- });
-
- // Combine the local cache into the cachedChildren
- if(_.keys(cache).length > 0) {
- _.keys(cache).forEach(function(pop) {
- if(!hop(cachedChildren, pop)) cachedChildren[pop] = [];
- cachedChildren[pop] = cachedChildren[pop].concat(cache[pop]);
- });
- }
-
- next();
- };
-
-
- // Pull out any aliased child records that have come from a hasFK association
- async.eachSeries(parentRecords, splitChildren, function(err) {
- if(err) return next(err);
- buffers.parents = parentRecords;
- next();
- });
- });
- },
-
- // Build child buffers.
- // For each instruction, loop through the parent records and build up a
- // buffer for the record.
- buildChildBuffers: ['processParent', function(next, results) {
- async.each(_.keys(instructions.instructions), function(population, nextPop) {
-
- var populationObject = instructions.instructions[population];
- var popInstructions = populationObject.instructions;
- var pk = _getPK(connectionName, popInstructions[0].parent);
-
- var alias = populationObject.strategy.strategy === 1 ? popInstructions[0].parentKey : popInstructions[0].alias;
-
- // Use eachSeries here to keep ordering
- async.eachSeries(parentRecords, function(parent, nextParent) {
- var buffer = {
- attrName: population,
- parentPK: parent[pk],
- pkAttr: pk,
- keyName: alias
- };
-
- var records = [];
-
- // Check for any cached parent records
- if(hop(cachedChildren, alias)) {
- cachedChildren[alias].forEach(function(cachedChild) {
- var childVal = popInstructions[0].childKey;
- var parentVal = popInstructions[0].parentKey;
-
- if(cachedChild[childVal] !== parent[parentVal]) {
- return;
- }
-
- // If null value for the parentVal, ignore it
- if(parent[parentVal] === null) return;
-
- records.push(cachedChild);
- });
- }
-
- if(records.length > 0) {
- buffer.records = records;
- }
-
- buffers.add(buffer);
- nextParent();
- }, nextPop);
- }, next);
- }],
-
-
- processChildren: ['buildChildBuffers', function(next, results) {
-
- // Remove the parent query
- _query.query.shift();
-
- async.each(_query.query, function(q, next) {
-
- var qs = '';
- var pk;
-
- if(!Array.isArray(q.instructions)) {
- pk = _getPK(connectionName, q.instructions.parent);
- }
- else if(q.instructions.length > 1) {
- pk = _getPK(connectionName, q.instructions[0].parent);
- }
-
- parentRecords.forEach(function(parent) {
- if(_.isNumber(parent[pk])) {
- qs += q.qs.replace('^?^', parent[pk]) + ' UNION ';
- } else {
- qs += q.qs.replace('^?^', '"' + parent[pk] + '"') + ' UNION ';
- }
- });
-
- // Remove the last UNION
- qs = qs.slice(0, -7);
-
- // Add a final sort to the Union clause for integration
- if(parentRecords.length > 1) {
- var addedOrder = false;
-
- function addSort(sortKey, sorts) {
- if (!sortKey.match(/^[0-9,a-z,A-Z$_]+$/)) {
- return;
- }
- if (!addedOrder) {
- addedOrder = true;
- qs += ' ORDER BY ';
- }
-
- var direction = sorts[sortKey] === 1 ? 'ASC' : 'DESC';
- qs += sortKey + ' ' + direction;
- }
-
- if(!Array.isArray(q.instructions)) {
- _.keys(q.instructions.criteria.sort).forEach(function(sortKey) {
- addSort(sortKey, q.instructions.criteria.sort);
- });
- }
- else if(q.instructions.length === 2) {
- _.keys(q.instructions[1].criteria.sort).forEach(function(sortKey) {
- addSort(sortKey, q.instructions[1].criteria.sort);
- });
- }
- }
-
- log('MySQL.processChildren: ', qs);
-
- client.query(qs, function __FIND__(err, result) {
- if(err) return next(err);
-
- var groupedRecords = {};
-
- result.forEach(function(row) {
-
- if(!Array.isArray(q.instructions)) {
- if(!hop(groupedRecords, row[q.instructions.childKey])) {
- groupedRecords[row[q.instructions.childKey]] = [];
- }
-
- groupedRecords[row[q.instructions.childKey]].push(row);
- }
- else {
-
- // Grab the special "foreign key" we attach and make sure to remove it
- var fk = '___' + q.instructions[0].childKey;
-
- if(!hop(groupedRecords, row[fk])) {
- groupedRecords[row[fk]] = [];
- }
-
- var data = _.cloneDeep(row);
- delete data[fk];
- groupedRecords[row[fk]].push(data);
- }
- });
-
- buffers.store.forEach(function(buffer) {
- if(buffer.attrName !== q.attrName) return;
- var records = groupedRecords[buffer.belongsToPKValue];
- if(!records) return;
- if(!buffer.records) buffer.records = [];
- buffer.records = buffer.records.concat(records);
- });
-
- next();
- });
- }, function(err) {
- next();
- });
-
- }]
-
- },
- function(err) {
- if(err) return next(err);
- next();
- });
+ // ╔╦╗╔═╗╔═╗╔╦╗╦═╗╔═╗╦ ╦ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ║║║╣ ╚═╗ ║ ╠╦╝║ ║╚╦╝ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ═╩╝╚═╝╚═╝ ╩ ╩╚═╚═╝ ╩ └─┘└└─┘└─┘┴└─ ┴
+ // Delete one or more records in a table
+ destroy: function destroy(datastoreName, query, cb) {
+ var datastore = datastores[datastoreName];
+ var models = modelDefinitions[datastoreName];
+ Helpers.destroy({
+ datastore: datastore,
+ models: models,
+ query: query
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
+ },
+ success: function success(report) {
+ if (report) {
+ return cb(undefined, report.records);
}
- }, done);
- }
+ return cb();
+ }
+ });
},
- // Find one or more models from the collection
- // using where, limit, skip, and order
- // In where: handle `or`, `and`, and `like` queries
- find: function(connectionName, collectionName, options, cb, connection) {
-
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __FIND__, cb);
- } else {
- __FIND__(connection, cb);
- }
-
- function __FIND__(connection, cb) {
-
- // Check if this is an aggregate query and that there is something to return
- if(options.groupBy || options.sum || options.average || options.min || options.max) {
- if(!options.sum && !options.average && !options.min && !options.max) {
- return cb(Errors.InvalidGroupBy);
- }
+ // ╔╗╔╔═╗╔╦╗╦╦ ╦╔═╗ ┬┌─┐┬┌┐┌ ┌─┐┬ ┬┌─┐┌─┐┌─┐┬─┐┌┬┐
+ // ║║║╠═╣ ║ ║╚╗╔╝║╣ ││ │││││ └─┐│ │├─┘├─┘│ │├┬┘ │
+ // ╝╚╝╩ ╩ ╩ ╩ ╚╝ ╚═╝ └┘└─┘┴┘└┘ └─┘└─┘┴ ┴ └─┘┴└─ ┴
+ // Build up native joins to run on the adapter.
+ join: function join(datastoreName, query, cb) {
+ var datastore = datastores[datastoreName];
+ var models = modelDefinitions[datastoreName];
+ Helpers.join({
+ datastore: datastore,
+ models: models,
+ query: query
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
+ },
+ success: function success(report) {
+ return cb(undefined, report);
}
+ });
+ },
- var connectionObject = connections[connectionName];
- var collection = connectionObject.collections[collectionName];
-
- // Build find query
- var schema = connectionObject.schema;
- var _query;
-
- var sequel = new Sequel(schema, sqlOptions);
- // Build a query for the specific query strategy
- try {
- _query = sequel.find(collectionName, options);
- } catch(e) {
- return cb(e);
+ // ╔═╗╦ ╦╔═╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╠═╣╚╗╔╝║ ╦ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╩ ╩ ╚╝ ╚═╝ └─┘└└─┘└─┘┴└─ ┴
+ // Find out the average of the query.
+ avg: function avg(datastoreName, query, cb) {
+ var datastore = datastores[datastoreName];
+ var models = modelDefinitions[datastoreName];
+ Helpers.avg({
+ datastore: datastore,
+ models: models,
+ query: query
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
+ },
+ success: function success(report) {
+ return cb(undefined, report);
}
-
- // Run query
- log('MYSQL.find: ', _query.query[0]);
-
- connection.query(_query.query[0], function(err, result) {
- if(err) return cb(err);
- cb(null, result);
- });
-
- }
+ });
},
- // Count one model from the collection
- // using where, limit, skip, and order
- // In where: handle `or`, `and`, and `like` queries
- count: function(connectionName, collectionName, options, cb, connection) {
-
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __COUNT__, cb);
- } else {
- __COUNT__(connection, cb);
- }
-
- function __COUNT__(connection, cb) {
- // Check if this is an aggregate query and that there is something to return
- if(options.groupBy || options.sum || options.average || options.min || options.max) {
- if(!options.sum && !options.average && !options.min && !options.max) {
- return cb(Errors.InvalidGroupBy);
- }
+ // ╔═╗╦ ╦╔╦╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ╚═╗║ ║║║║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╚═╝╩ ╩ └─┘└└─┘└─┘┴└─ ┴
+ // Find out the sum of the query.
+ sum: function sum(datastoreName, query, cb) {
+ var datastore = datastores[datastoreName];
+ var models = modelDefinitions[datastoreName];
+ Helpers.sum({
+ datastore: datastore,
+ models: models,
+ query: query
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
+ },
+ success: function success(report) {
+ return cb(undefined, report);
}
+ });
+ },
- var connectionObject = connections[connectionName];
- var collection = connectionObject.collections[collectionName];
-
- // Build find query
- var schema = connectionObject.schema;
- var _query;
-
- var sequel = new Sequel(schema, sqlOptions);
- // Build a count query
- try {
- _query = sequel.count(collectionName, options);
- } catch(e) {
- return cb(e);
+ // ╔═╗╔═╗╦ ╦╔╗╔╔╦╗ ┌─┐ ┬ ┬┌─┐┬─┐┬ ┬
+ // ║ ║ ║║ ║║║║ ║ │─┼┐│ │├┤ ├┬┘└┬┘
+ // ╚═╝╚═╝╚═╝╝╚╝ ╩ └─┘└└─┘└─┘┴└─ ┴
+ // Return the number of matching records.
+ count: function count(datastoreName, query, cb) {
+ var datastore = datastores[datastoreName];
+ var models = modelDefinitions[datastoreName];
+ Helpers.count({
+ datastore: datastore,
+ models: models,
+ query: query
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
+ },
+ success: function success(report) {
+ return cb(undefined, report);
}
-
- // Run query
- log('MYSQL.count: ', _query.query[0]);
-
- connection.query(_query.query[0], function(err, result) {
- if(err) return cb(err);
- // Return the count from the simplified query
- cb(null, result[0].count);
- });
- }
+ });
},
- // Stream one or more models from the collection
- // using where, limit, skip, and order
- // In where: handle `or`, `and`, and `like` queries
- stream: function(connectionName, collectionName, options, stream, connection) {
-
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __STREAM__);
- } else {
- __STREAM__(connection);
- }
-
- function __STREAM__(connection, cb) {
-
- var connectionObject = connections[connectionName];
- var collection = connectionObject.collections[collectionName];
- var tableName = collectionName;
-
- // Build find query
- var query = sql.selectQuery(tableName, options);
- // Run query
- log('MySQL.stream: ', query);
-
- var dbStream = connection.query(query);
-
- // Handle error, an 'end' event will be emitted after this as well
- dbStream.on('error', function(err) {
- stream.end(err); // End stream
- cb(err); // Close connection
- });
-
- // the field packets for the rows to follow
- dbStream.on('fields', function(fields) {});
-
- // Pausing the connnection is useful if your processing involves I/O
- dbStream.on('result', function(row) {
- connection.pause();
- stream.write(row, function() {
- connection.resume();
- });
- });
+ // ██████╗ ██████╗ ██╗
+ // ██╔══██╗██╔══██╗██║
+ // ██║ ██║██║ ██║██║
+ // ██║ ██║██║ ██║██║
+ // ██████╔╝██████╔╝███████╗
+ // ╚═════╝ ╚═════╝ ╚══════╝
+ //
+ // Methods related to modifying the underlying data structure of the
+ // database.
+
+
+ // ╔╦╗╔═╗╔═╗╔═╗╦═╗╦╔╗ ╔═╗ ┌┬┐┌─┐┌┐ ┬ ┌─┐
+ // ║║║╣ ╚═╗║ ╠╦╝║╠╩╗║╣ │ ├─┤├┴┐│ ├┤
+ // ═╩╝╚═╝╚═╝╚═╝╩╚═╩╚═╝╚═╝ ┴ ┴ ┴└─┘┴─┘└─┘
+ // Describe a table and get back a normalized model schema format.
+ // (This is used to allow Sails to do auto-migrations)
+ describe: function describe(datastoreName, tableName, cb, meta) {
+ var datastore = datastores[datastoreName];
+ Helpers.describe({
+ datastore: datastore,
+ tableName: tableName,
+ meta: meta
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
+ },
+ success: function success(report) {
+ // Waterline expects the result to be undefined if the table doesn't
+ // exist.
+ if (_.keys(report.schema).length) {
+ return cb(undefined, report.schema);
+ }
- // all rows have been received
- dbStream.on('end', function() {
- stream.end(); // End stream
- cb(); // Close connection
- });
- }
+ return cb();
+ }
+ });
},
- // Update one or more models in the collection
- update: function(connectionName, collectionName, options, values, cb, connection) {
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __UPDATE__, cb);
- } else {
- __UPDATE__(connection, cb);
- }
-
- function __UPDATE__(connection, cb) {
-
- var connectionObject = connections[connectionName];
- var collection = connectionObject.collections[collectionName];
-
- // Build find query
- var schema = connectionObject.schema;
- var _query;
-
- var sequel = new Sequel(schema, sqlOptions);
-
- // Build a query for the specific query strategy
- try {
- _query = sequel.find(collectionName, _.cloneDeep(options));
- } catch(e) {
- return cb(e);
+ // ╔╦╗╔═╗╔═╗╦╔╗╔╔═╗ ┌┬┐┌─┐┌┐ ┬ ┌─┐
+ // ║║║╣ ╠╣ ║║║║║╣ │ ├─┤├┴┐│ ├┤
+ // ═╩╝╚═╝╚ ╩╝╚╝╚═╝ ┴ ┴ ┴└─┘┴─┘└─┘
+ // Build a new table in the database.
+ // (This is used to allow Sails to do auto-migrations)
+ define: function define(datastoreName, tableName, definition, cb, meta) {
+ var datastore = datastores[datastoreName];
+ Helpers.define({
+ datastore: datastore,
+ tableName: tableName,
+ definition: definition,
+ meta: meta
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
+ },
+ success: function success() {
+ return cb();
}
-
- log('MySQL.update(before): ', _query.query[0]);
-
- connection.query(_query.query[0], function(err, results) {
- if(err) return cb(err);
-
- var ids = [];
-
- var pk = 'id';
- Object.keys(collection.definition).forEach(function(key) {
- if(!collection.definition[key].hasOwnProperty('primaryKey')) return;
- pk = key;
- });
-
- // update statement will affect 0 rows
- if (results.length === 0) {
- return cb(null, []);
- }
-
- results.forEach(function(result) {
- ids.push(result[pk]);
- });
-
- // Prepare values
- Object.keys(values).forEach(function(value) {
- values[value] = utils.prepareValue(values[value]);
- });
-
- // Build query
- try {
- _query = sequel.update(collectionName, options, values);
- } catch(e) {
- return cb(e);
- }
-
- // Run query
- log('MySQL.update: ', _query.query);
-
- connection.query(_query.query, function(err, result) {
- if (err) return cb( handleQueryError(err) );
-
- var criteria;
- if(ids.length === 1) {
- criteria = { where: {}, limit: 1 };
- criteria.where[pk] = ids[0];
- } else {
- criteria = { where: {} };
- criteria.where[pk] = ids;
- }
-
- // Build a query for the specific query strategy
- try {
- _query = sequel.find(collectionName, criteria);
- } catch(e) {
- return cb(e);
- }
-
- // Run query
- log('MySQL.update(after): ', _query.query[0]);
-
- connection.query(_query.query[0], function(err, result) {
- if(err) return cb(err);
- cb(null, result);
- });
- });
-
- });
- }
+ });
},
- // Delete one or more models from the collection
- destroy: function(connectionName, collectionName, options, cb, connection) {
-
- if(_.isUndefined(connection)) {
- return spawnConnection(connectionName, __DESTROY__, cb);
- } else {
- __DESTROY__(connection, cb);
- }
-
- function __DESTROY__(connection, cb) {
-
- var connectionObject = connections[connectionName];
- var collection = connectionObject.collections[collectionName];
- var tableName = collectionName;
-
- // Build query
- var schema = connectionObject.schema;
- var _query;
-
- var sequel = new Sequel(schema, sqlOptions);
-
- // Build a query for the specific query strategy
- try {
- _query = sequel.destroy(collectionName, options);
- } catch(e) {
- return cb(e);
+ // ╔═╗╦═╗╔═╗╔═╗╔╦╗╔═╗ ┌─┐┌─┐┬ ┬┌─┐┌┬┐┌─┐
+ // ║ ╠╦╝║╣ ╠═╣ ║ ║╣ └─┐│ ├─┤├┤ │││├─┤
+ // ╚═╝╩╚═╚═╝╩ ╩ ╩ ╚═╝ └─┘└─┘┴ ┴└─┘┴ ┴┴ ┴
+ // Create a new Postgres Schema (namespace) in the database.
+ createSchema: function createSchema(datastoreName, schemaName, cb, meta) {
+ var datastore = datastores[datastoreName];
+ Helpers.createSchema({
+ datastore: datastore,
+ schemaName: schemaName,
+ meta: meta
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
+ },
+ success: function success() {
+ return cb();
}
+ });
+ },
- async.auto({
- findRecords: function(next) {
- adapter.find(connectionName, collectionName, options, next, connection);
- },
+ // ╔╦╗╦═╗╔═╗╔═╗ ┌┬┐┌─┐┌┐ ┬ ┌─┐
+ // ║║╠╦╝║ ║╠═╝ │ ├─┤├┴┐│ ├┤
+ // ═╩╝╩╚═╚═╝╩ ┴ ┴ ┴└─┘┴─┘└─┘
+ // Remove a table from the database.
+ drop: function drop(datastoreName, tableName, relations, cb, meta) {
+ var datastore = datastores[datastoreName];
+ Helpers.drop({
+ datastore: datastore,
+ tableName: tableName,
+ meta: meta
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
+ },
+ badConnection: function badConnection(err) {
+ return cb(redactPasswords(err));
+ },
+ success: function success() {
+ return cb();
+ }
+ });
+ },
- destroyRecords: ['findRecords', function(next) {
- log('MySQL.destroy: ', _query.query);
- connection.query(_query.query, next);
- }]
+ // ╔═╗╔═╗╔╦╗ ┌─┐┌─┐┌─┐ ┬ ┬┌─┐┌┐┌┌─┐┌─┐
+ // ╚═╗║╣ ║ └─┐├┤ │─┼┐│ │├┤ ││││ ├┤
+ // ╚═╝╚═╝ ╩ └─┘└─┘└─┘└└─┘└─┘┘└┘└─┘└─┘
+ // Set a sequence in an auto-incrementing primary key to a known value.
+ setSequence: function setSequence(datastoreName, sequenceName, sequenceValue, cb, meta) {
+ var datastore = datastores[datastoreName];
+ Helpers.setSequence({
+ datastore: datastore,
+ sequenceName: sequenceName,
+ sequenceValue: sequenceValue,
+ meta: meta
+ }).switch({
+ error: function error(err) {
+ return cb(redactPasswords(err));
},
- function(err, results) {
- if(err) return cb(err);
- cb(null, results.findRecords);
- });
-
- }
+ success: function success() {
+ return cb();
+ }
+ });
},
-
- // Identity is here to facilitate unit testing
- // (this is optional and normally automatically populated based on filename)
- identity: 'sails-mysql'
};
-
-
return adapter;
-
-
-
- /**
- * Wrap a function in the logic necessary to provision a connection.
- * (either grab a free connection from the pool or create a new one)
- *
- * cb is optional (you might be streaming), but... come to think of it...
- * TODO:
- * if streaming, pass in the stream instead of the callback--
- * then any relevant `error` events can be emitted on the stream.
- *
- * @param {[type]} connectionName
- * @param {Function} fn
- * @param {[type]} cb
- */
- function spawnConnection(connectionName, fn, cb) {
- _spawnConnection(
- getConnectionObject(connectionName),
- fn,
- wrapCallback(cb)
- );
- }
-
-
-
-
- ////// NOTE /////////////////////////////////////////////////////////////
- //
- // both of these things should be done in WL core, imo:
- //
- // i.e.
- // getConnectionObject(connectionName)
- // wrapCallback(cb)
- //
- /////////////////////////////////////////////////////////////////////////
-
-
-
- /**
- * wrapCallback
- *
- * cb is optional (you might be streaming), but... come to think of it...
- * TODO:
- * if streaming, pass in the stream instead of the callback--
- * then any relevant `error` events can be emitted on the stream.
- *
- * @param {Function} cb [description]
- * @return {[type]} [description]
- */
- function wrapCallback (cb) {
-
- // Handle missing callback:
- if (!cb) {
- // Emit errors on adapter itself when no callback is present.
- cb = function (err) {
- try {
- adapter.emit(STRINGFILE.noCallbackError+'\n'+err.toString());
- }
- catch (e) { adapter.emit(err); }
- };
- }
- return cb;
- }
-
-
- /**
- * Lookup the primary key for the given collection
- * @param {[type]} collectionIdentity [description]
- * @return {[type]} [description]
- * @api private
- */
- function _getPK (connectionIdentity, collectionIdentity) {
-
- var collectionDefinition;
- try {
- collectionDefinition = connections[connectionIdentity].collections[collectionIdentity].definition;
-
- return _.find(Object.keys(collectionDefinition), function _findPK (key) {
- var attrDef = collectionDefinition[key];
- if( attrDef && attrDef.primaryKey ) return key;
- else return false;
- }) || 'id';
- }
- catch (e) {
- throw new Error('Unable to determine primary key for collection `'+collectionIdentity+'` because '+
- 'an error was encountered acquiring the collection definition:\n'+ require('util').inspect(e,false,null));
- }
- }
-
-
- /**
- *
- * @param {String} connectionName
- * @return {Object} connectionObject
- */
- function getConnectionObject ( connectionName ) {
-
- var connectionObject = connections[connectionName];
- if(!connectionObject) {
-
- // this should never happen unless the adapter is being called directly
- // (i.e. outside of a CONNection OR a COLLection.)
- adapter.emit('error', Errors.InvalidConnection);
- }
- return connectionObject;
- }
-
- /**
- *
- * @param {[type]} err [description]
- * @return {[type]} [description]
- * @api private
- */
- function handleQueryError (err) {
-
- var formattedErr;
-
- // Check for uniqueness constraint violations:
- if (err.code === 'ER_DUP_ENTRY') {
-
- // Manually parse the MySQL error response and extract the relevant bits,
- // then build the formatted properties that will be passed directly to
- // WLValidationError in Waterline core.
- var matches = err.message.match(/Duplicate entry '(.*)' for key '(.*?)'$/);
- if (matches && matches.length) {
- formattedErr = {};
- formattedErr.code = 'E_UNIQUE';
- formattedErr.invalidAttributes = {};
- formattedErr.invalidAttributes[matches[2]] = [{
- value: matches[1],
- rule: 'unique'
- }];
- }
- }
-
- return formattedErr || err;
- }
-
})();
-
diff --git a/lib/connections/register.js b/lib/connections/register.js
deleted file mode 100644
index ad45de54..00000000
--- a/lib/connections/register.js
+++ /dev/null
@@ -1,89 +0,0 @@
-/**
- * Module dependencies
- */
-
-var mysql = require('mysql');
-var _releaseConnection = require('./release');
-var Errors = require('waterline-errors').adapter;
-var _ = require('lodash');
-var utils = require('../utils');
-
-
-module.exports = {};
-
-
-module.exports.configure = function ( connections ) {
-
- /**
- * Register a connection (and the collections assigned to it) with the MySQL adapter.
- *
- * @param {Connection} connection
- * @param {Object} collections
- * @param {Function} cb
- */
-
- return function registerConnection (connection, collections, cb) {
-
- // Validate arguments
- if(!connection.identity) return cb(Errors.IdentityMissing);
- if(connections[connection.identity]) return cb(Errors.IdentityDuplicate);
-
- // Build up a schema for this connection that can be used throughout the adapter
- var schema = {};
-
- _.each(_.keys(collections), function(coll) {
- var collection = collections[coll];
- if(!collection) return;
-
- var _schema = collection.waterline && collection.waterline.schema && collection.waterline.schema[collection.identity];
- if(!_schema) return;
-
- // Set defaults to ensure values are set
- if(!_schema.attributes) _schema.attributes = {};
- if(!_schema.tableName) _schema.tableName = coll;
-
- // If the connection names are't the same we don't need it in the schema
- if(!_.includes(collections[coll].connection, connection.identity)) {
- return;
- }
-
- // If the tableName is different from the identity, store the tableName in the schema
- var schemaKey = coll;
- if(_schema.tableName != coll) {
- schemaKey = _schema.tableName;
- }
-
- schema[schemaKey] = _schema;
- });
-
- if('url' in connection) {
- utils.parseUrl(connection);
- }
-
- // Store the connection
- connections[connection.identity] = {
- config: connection,
- collections: collections,
- connection: {},
- schema: schema
- };
-
- var activeConnection = connections[connection.identity];
-
- // Create a connection pool if configured to do so.
- // (and set up the necessary `releaseConnection` functionality to drain it.)
- if (activeConnection.config.pool) {
- activeConnection.connection.pool = mysql.createPool(activeConnection.config);
- activeConnection.connection.releaseConnection = _releaseConnection.poolfully;
- }
- // Otherwise, assign some default releaseConnection functionality.
- else {
- activeConnection.connection.releaseConnection = _releaseConnection.poollessly;
- }
-
- // Done! The WLConnection (and all of it's collections) have been loaded.
- return cb();
- };
-
-
-};
diff --git a/lib/connections/release.js b/lib/connections/release.js
deleted file mode 100644
index cff1875c..00000000
--- a/lib/connections/release.js
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Module dependencies
- */
-
-var Errors = require('waterline-errors').adapter;
-
-/**
- * Functions for freeing/terminating a MySQL connection when a query is complete.
- *
- * @type {Object}
- */
-module.exports = {
-
- /**
- * Frees the MySQL connection back into the pool.
- *
- * @param {MySQLConnection} conn
- * @param {Function} cb [description]
- */
- poolfully: function(conn, cb) {
- if (!conn || typeof conn.release !== 'function') {
- return cb(Errors.ConnectionRelease);
- }
-
- // Don't wait for connection release to trigger this callback.
- // (TODO: evaluate whether this should be the case)
- conn.release();
- return cb();
- },
-
-
- /**
- * Terminates the MySQL connection.
- *
- * @param {MySQLConnection} conn
- * @param {Function} cb
- */
- poollessly: function(conn, cb) {
- if (!conn || typeof conn.end !== 'function') {
- return cb(Errors.ConnectionRelease);
- }
-
- // Wait for the connection to be ended, then trigger the callback.
- conn.end(cb);
- }
-};
diff --git a/lib/connections/spawn.js b/lib/connections/spawn.js
deleted file mode 100644
index 2632e434..00000000
--- a/lib/connections/spawn.js
+++ /dev/null
@@ -1,119 +0,0 @@
-// Dependencies
-var mysql = require('mysql');
-
-var STRINGFILE = {
- noCallbackError: 'An error occurred in the MySQL adapter, but no callback was specified to the spawnConnection function to handle it.'
-};
-
-
-
-/**
- * Wrap a function in the logic necessary to provision a connection.
- * (either grab a free connection from the pool or create a new one)
- *
- * cb is optional (you might be streaming), but... TODO:
- * if streaming, pass in the stream instead of the callback--
- * then any relevant `error` events can be emitted on the stream.
- *
- * @param {Object} connectionObject
- * @param {Function} fn
- * @param {[type]} cb__spawnConnection
- */
-
-module.exports = function spawnConnection (connectionObject, fn, cb__spawnConnection) {
-
- //
- // TODO:
- //
- // Add app-side "soft" connection timeout if necessary.
- //
- // See mike's comment in node-mysql:
- // + https://github.com/felixge/node-mysql/pull/717#issuecomment-33877721
- // Also see the issue on pool conncetion timeouts:
- // + https://github.com/felixge/node-mysql/issues/424
- //
-
- // If pooling is used, grab a connection from the pool and run the
- // logic for the query.
- if (connectionObject.connection.pool) {
- connectionObject.connection.pool.getConnection(function (err, conn) {
- afterwards(err, conn);
- });
- return;
- }
-
- // Use a new connection each time
- var conn = mysql.createConnection(connectionObject.config);
- conn.connect(function (err) {
- afterwards(err, conn);
- });
- return;
-
-
-
- /**
- * Run the actual query logic (`fn`) now that we have our live connection,
- * and release/close the connection when finished.
- *
- * @param {[type]} err [description]
- * @param {[type]} liveConnection [description]
- * @return {[type]} [description]
- */
- function afterwards(err, liveConnection) {
-
- // Handle connection errors
- if (err) {
-
- //
- // TODO:
- // Cast the error using `waterline-errors`
- // ("could not connect")
- //
- err = new Error( 'Could not connect to MySQL:\n' + err.toString());
-
- // Try to release the connection, if it exists:
- connectionObject.connection.releaseConnection(liveConnection, function dontWaitForThis(){ });
-
- // But trigger the callback immediately (don't wait for the connection to be released)
- return cb__spawnConnection(err);
- }
-
- // Now that we have the live connection, run our adapter logic.
- // i.e. run `fn`, a function which, amongst other things, should do something
- // with the live MySQL connection (probably send a query).
- fn(liveConnection, function(err, result) {
-
- // Handle errors passed back from our adapter logic.
- if (err) {
-
- // Release the connection, then pass control back to Waterline core.
- connectionObject.connection.releaseConnection(liveConnection, function sendBackError ( /* thisErrDoesntMatter */ ) {
- cb__spawnConnection(err);
- });
- return;
- }
-
-
- // If we made it here, our adapter logic came back without an error,
- // so we release the connection and trigger our callback.
- connectionObject.connection.releaseConnection(liveConnection, function (err) {
-
- // If an error occurred releasing the connection handle it here:
- // (note that this is unlikely, and would indicate unexpected behavior)
- if (err) {
-
- //
- // TODO:
- // Cast the error using `waterline-errors`
- // ("could not release connection")
- //
- return cb__spawnConnection(err);
- }
-
- // Success (done.)
- return cb__spawnConnection(null, result);
- });
- });
-
- }
-};
diff --git a/lib/connections/teardown.js b/lib/connections/teardown.js
deleted file mode 100644
index 3582b2b0..00000000
--- a/lib/connections/teardown.js
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Module dependencies
- */
-
-
-
-module.exports = {};
-
-
-module.exports.configure = function ( connections ) {
-
- /**
- * Teardown a MySQL connection.
- * (if the Waterline "connection" is using a pool, also `.end()` it.)
- *
- * @param {String} connectionName [name of the Waterline "connection"]
- * @param {Function} cb
- */
- return function teardown (connectionName, cb) {
-
- function closeConnection(name) {
- // Drain the MySQL connection pool for this Waterline Connection
- // (if it's in use.)
-
- if ( connections[name] && connections[name].connection && connections[name].connection.pool ) {
- // console.log('Ending pool for ' + connectionName);
- connections[name].connection.pool.end();
- }
-
- // Make sure memory is freed by removing this stuff from our
- // global set of WL Connections.
- delete connections[name];
- }
-
- // If no connection name was given, teardown all the connections
- if(!connectionName) {
- Object.keys(connections).forEach(function(conn) {
- closeConnection(conn);
- });
- }
-
- // Else only teardown a single connection
- else {
- closeConnection(connectionName);
- }
-
- return cb();
-
- };
-
-};
diff --git a/lib/private/redact-passwords.js b/lib/private/redact-passwords.js
new file mode 100644
index 00000000..b66797f4
--- /dev/null
+++ b/lib/private/redact-passwords.js
@@ -0,0 +1,34 @@
+// ██████╗ ███████╗██████╗ █████╗ ██████╗████████╗
+// ██╔══██╗██╔════╝██╔══██╗██╔══██╗██╔════╝╚══██╔══╝
+// ██████╔╝█████╗ ██║ ██║███████║██║ ██║
+// ██╔══██╗██╔══╝ ██║ ██║██╔══██║██║ ██║
+// ██║ ██║███████╗██████╔╝██║ ██║╚██████╗ ██║
+// ╚═╝ ╚═╝╚══════╝╚═════╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝
+//
+// ██████╗ █████╗ ███████╗███████╗██╗ ██╗ ██████╗ ██████╗ ██████╗ ███████╗
+// ██╔══██╗██╔══██╗██╔════╝██╔════╝██║ ██║██╔═══██╗██╔══██╗██╔══██╗██╔════╝
+// ██████╔╝███████║███████╗███████╗██║ █╗ ██║██║ ██║██████╔╝██║ ██║███████╗
+// ██╔═══╝ ██╔══██║╚════██║╚════██║██║███╗██║██║ ██║██╔══██╗██║ ██║╚════██║
+// ██║ ██║ ██║███████║███████║╚███╔███╔╝╚██████╔╝██║ ██║██████╔╝███████║
+// ╚═╝ ╚═╝ ╚═╝╚══════╝╚══════╝ ╚══╝╚══╝ ╚═════╝ ╚═╝ ╚═╝╚═════╝ ╚══════╝
+//
+// Remove database passwords from the error instance.
+
+module.exports = function redactPasswords(err) {
+ var REDACT_REPLACEMENT = '$1:****@';
+ var REDACT_REGEX_SINGLE = /^(mysql:\/\/[^:\s]*):[^@\s]*@/;
+ var REDACT_REGEX_MULTI = /(mysql:\/\/[^:\s]*):[^@\s]*@/g;
+
+ if(err) {
+ if(err.meta && typeof err.meta === 'object' && err.meta.password && typeof err.meta.password === 'string'){
+ err.meta.password = '****';
+ }
+ if(err.meta && typeof err.meta === 'object' && err.meta.url && typeof err.meta.url === 'string') {
+ err.meta.url = err.meta.url.replace(REDACT_REGEX_SINGLE, REDACT_REPLACEMENT);
+ }
+ if(err.message && typeof err.message === 'string') {
+ err.message = err.message.replace(REDACT_REGEX_MULTI, REDACT_REPLACEMENT);
+ }
+ }
+ return err;
+};
diff --git a/lib/sql.js b/lib/sql.js
deleted file mode 100644
index ce77ca78..00000000
--- a/lib/sql.js
+++ /dev/null
@@ -1,462 +0,0 @@
-/**
- * Module Dependencies
- */
-
-var mysql = require('mysql');
-var _ = require('lodash');
-var utils = require('./utils');
-var hop = utils.object.hasOwnProperty;
-
-var sql = module.exports = {
-
- // Convert mysql format to standard javascript object
- normalizeSchema: function (schema) {
- return _.reduce(schema, function(memo, field) {
-
- // Marshal mysql DESCRIBE to waterline collection semantics
- var attrName = field.Field;
- var type = field.Type;
-
- // Remove (n) column-size indicators
- type = type.replace(/\([0-9]+\)$/,'');
-
- memo[attrName] = {
- type: type,
- defaultsTo: field.Default,
- autoIncrement: field.Extra === 'auto_increment'
- };
-
- if(field.primaryKey) {
- memo[attrName].primaryKey = field.primaryKey;
- }
-
- if(field.unique) {
- memo[attrName].unique = field.unique;
- }
-
- if(field.indexed) {
- memo[attrName].indexed = field.indexed;
- }
-
- return memo;
- }, {});
- },
-
- // @returns ALTER query for adding a column
- addColumn: function (collectionName, attrName, attrDef) {
- // Escape table name and attribute name
- var tableName = mysql.escapeId(collectionName);
-
- // sails.log.verbose("ADDING ",attrName, "with",attrDef);
-
- // Build column definition
- var columnDefinition = sql._schema(collectionName, attrDef, attrName);
-
- return 'ALTER TABLE ' + tableName + ' ADD ' + columnDefinition;
- },
-
- // @returns ALTER query for dropping a column
- removeColumn: function (collectionName, attrName) {
- // Escape table name and attribute name
- var tableName = mysql.escapeId(collectionName);
- attrName = mysql.escapeId(attrName);
-
- return 'ALTER TABLE ' + tableName + ' DROP COLUMN ' + attrName;
- },
-
- countQuery: function(collectionName, options, tableDefs){
- var query = 'SELECT count(*) as count from `' + collectionName + '`';
- return query += sql.serializeOptions(collectionName, options, tableDefs);
- },
-
- // Create a schema csv for a DDL query
- schema: function(collectionName, attributes) {
- return sql.build(collectionName, attributes, sql._schema);
- },
-
- _schema: function(collectionName, attribute, attrName) {
- attrName = mysql.escapeId(attrName);
- var type = sqlTypeCast(attribute);
-
- // Process PK field
- if(attribute.primaryKey) {
-
- var columnDefinition = attrName + ' ' + type;
-
- // If type is an integer, set auto increment
- if(type === 'TINYINT' || type === 'SMALLINT' || type === 'INT' || type === 'BIGINT') {
- return columnDefinition + ' UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY';
- }
-
- // Just set NOT NULL on other types
- return columnDefinition + ' NOT NULL PRIMARY KEY';
- }
-
- // Process NOT NULL field.
- // if notNull is true, set NOT NULL constraint
- var nullPart = '';
- if (attribute.notNull) {
- nullPart = ' NOT NULL ';
- }
-
- // Process UNIQUE field
- if(attribute.unique) {
- return attrName + ' ' + type + nullPart + ' UNIQUE KEY';
- }
-
- // Process INDEX field (NON-UNIQUE KEY)
- if(attribute.index) {
- return attrName + ' ' + type + nullPart + ', INDEX(' + attrName + ')';
- }
-
- return attrName + ' ' + type + ' ' + nullPart;
- },
-
- // Create an attribute csv for a DQL query
- attributes: function(collectionName, attributes) {
- return sql.build(collectionName, attributes, sql.prepareAttribute);
- },
-
- // Create a value csv for a DQL query
- // key => optional, overrides the keys in the dictionary
- values: function(collectionName, values, key) {
- return sql.build(collectionName, values, sql.prepareValue, ', ', key);
- },
-
- prepareCriterion: function(collectionName, value, key, parentKey) {
- // Special sub-attr case
- if (validSubAttrCriteria(value)) {
- return sql.where(collectionName, value, null, key);
-
- }
-
- // Build escaped attr and value strings using either the key,
- // or if one exists, the parent key
- var attrStr, valueStr;
-
-
- // Special comparator case
- if (parentKey) {
-
- attrStr = sql.prepareAttribute(collectionName, value, parentKey);
- valueStr = sql.prepareValue(collectionName, value, parentKey);
-
- // Why don't we strip you out of those bothersome apostrophes?
- var nakedButClean = String(valueStr).replace(new RegExp('^\'+|\'+$', 'g'), '');
-
- if (key === '<' || key === 'lessThan') return attrStr + '<' + valueStr;
- else if (key === '<=' || key === 'lessThanOrEqual') return attrStr + '<=' + valueStr;
- else if (key === '>' || key === 'greaterThan') return attrStr + '>' + valueStr;
- else if (key === '>=' || key === 'greaterThanOrEqual') return attrStr + '>=' + valueStr;
- else if (key === '!' || key === 'not') {
- if (value === null) return attrStr + ' IS NOT NULL';
- else if (_.isArray(value)) return attrStr + ' NOT IN(' + valueStr + ')';
- else return attrStr + '<>' + valueStr;
- }
- else if (key === 'like') return attrStr + ' LIKE \'' + nakedButClean + '\'';
- else if (key === 'contains') return attrStr + ' LIKE \'%' + nakedButClean + '%\'';
- else if (key === 'startsWith') return attrStr + ' LIKE \'' + nakedButClean + '%\'';
- else if (key === 'endsWith') return attrStr + ' LIKE \'%' + nakedButClean + '\'';
- else throw new Error('Unknown comparator: ' + key);
- } else {
- attrStr = sql.prepareAttribute(collectionName, value, key);
- valueStr = sql.prepareValue(collectionName, value, key);
-
- // Special IS NULL case
- if (_.isNull(value)) {
- return attrStr + " IS NULL";
- } else return attrStr + "=" + valueStr;
- }
- },
-
- prepareValue: function(collectionName, value, attrName) {
-
- // Cast dates to SQL
- if (_.isDate(value)) {
- value = toSqlDate(value);
- }
-
- // Cast functions to strings
- if (_.isFunction(value)) {
- value = value.toString();
- }
-
- // Escape (also wraps in quotes)
- return mysql.escape(value);
- },
-
- prepareAttribute: function(collectionName, value, attrName) {
- return mysql.escapeId(collectionName) + '.' + mysql.escapeId(attrName);
- },
-
- // // Starting point for predicate evaluation
- // // parentKey => if set, look for comparators and apply them to the parent key
- where: function(collectionName, where, key, parentKey) {
- return sql.build(collectionName, where, sql.predicate, ' AND ', undefined, parentKey);
- },
-
- // Recursively parse a predicate calculus and build a SQL query
- predicate: function(collectionName, criterion, key, parentKey) {
- var queryPart = '';
-
-
- if (parentKey) {
- return sql.prepareCriterion(collectionName, criterion, key, parentKey);
- }
-
- // OR
- if (key.toLowerCase() === 'or') {
- queryPart = sql.build(collectionName, criterion, sql.where, ' OR ');
- return ' ( ' + queryPart + ' ) ';
- }
-
- // AND
- else if (key.toLowerCase() === 'and') {
- queryPart = sql.build(collectionName, criterion, sql.where, ' AND ');
- return ' ( ' + queryPart + ' ) ';
- }
-
- // IN
- else if (_.isArray(criterion)) {
- queryPart = sql.prepareAttribute(collectionName, null, key) + " IN (" + sql.values(collectionName, criterion, key) + ")";
- return queryPart;
- }
-
- // LIKE
- else if (key.toLowerCase() === 'like') {
- return sql.build(collectionName, criterion, function(collectionName, value, attrName) {
- var attrStr = sql.prepareAttribute(collectionName, value, attrName);
-
-
- // TODO: Handle regexp criterias
- if (_.isRegExp(value)) {
- throw new Error('RegExp LIKE criterias not supported by the MySQLAdapter yet. Please contribute @ http://github.com/balderdashy/sails-mysql');
- }
-
- var valueStr = sql.prepareValue(collectionName, value, attrName);
-
- // Handle escaped percent (%) signs [encoded as %%%]
- valueStr = valueStr.replace(/%%%/g, '\\%');
-
- return attrStr + " LIKE " + valueStr;
- }, ' AND ');
- }
-
- // NOT
- else if (key.toLowerCase() === 'not') {
- throw new Error('NOT not supported yet!');
- }
-
- // Basic criteria item
- else {
- return sql.prepareCriterion(collectionName, criterion, key);
- }
-
- },
-
- serializeOptions: function(collectionName, options, tableDefs) {
-
- // Join clause
- // allow the key to be named with join or joins
- var joins = options.join || options.joins || [];
-
- if (joins.length > 0) {
- return this.buildJoinQuery(collectionName, joins, options, tableDefs);
- }
-
- return this.buildSingleQuery(collectionName, options, tableDefs);
- },
-
- /**
- * Build Up a Select Statement Without Joins
- */
-
- buildSingleQuery: function(collectionName, options, tableDefs) {
- var queryPart = '';
-
- if(options.where) {
- queryPart += 'WHERE ' + sql.where(collectionName, options.where) + ' ';
- }
-
- if (options.groupBy) {
- queryPart += 'GROUP BY ';
-
- // Normalize to array
- if(!Array.isArray(options.groupBy)) options.groupBy = [options.groupBy];
-
- options.groupBy.forEach(function(key) {
- queryPart += key + ', ';
- });
-
- // Remove trailing comma
- queryPart = queryPart.slice(0, -2) + ' ';
- }
-
- if (options.sort) {
- queryPart += 'ORDER BY ';
-
- // Sort through each sort attribute criteria
- _.each(options.sort, function(direction, attrName) {
-
- queryPart += sql.prepareAttribute(collectionName, null, attrName) + ' ';
-
- // Basic MongoDB-style numeric sort direction
- if (direction === 1) {
- queryPart += 'ASC, ';
- } else {
- queryPart += 'DESC, ';
- }
- });
-
- // Remove trailing comma
- if(queryPart.slice(-2) === ', ') {
- queryPart = queryPart.slice(0, -2) + ' ';
- }
- }
-
- if (hop(options, 'limit') && (options.limit !== null && options.limit !== undefined)) {
- queryPart += 'LIMIT ' + options.limit + ' ';
- }
-
- if (hop(options, 'skip') && (options.skip !== null && options.skip !== undefined)) {
- // Some MySQL hackery here. For details, see:
- // http://stackoverflow.com/questions/255517/mysql-offset-infinite-rows
- if (!options.limit) {
- queryPart += 'LIMIT 18446744073709551610 ';
- }
- queryPart += 'OFFSET ' + options.skip + ' ';
- }
-
- return queryPart;
- },
-
- // Put together the CSV aggregation
- // separator => optional, defaults to ', '
- // keyOverride => optional, overrides the keys in the dictionary
- // (used for generating value lists in IN queries)
- // parentKey => key of the parent to this object
- build: function(collectionName, collection, fn, separator, keyOverride, parentKey) {
- separator = separator || ', ';
- var $sql = '';
- _.each(collection, function(value, key) {
- $sql += fn(collectionName, value, keyOverride || key, parentKey);
-
- // (always append separator)
- $sql += separator;
- });
-
- // (then remove final one)
- return String($sql).replace(new RegExp(separator + '+$'), '');
- }
-};
-
-// Cast waterline types into SQL data types
-function sqlTypeCast(attr) {
- var type;
- if(_.isObject(attr) && _.has(attr, 'type')) {
- type = attr.type;
- } else {
- type = attr;
- }
-
- type = type && type.toLowerCase();
-
- switch (type) {
- case 'string': {
- var size = 255; // By default.
-
- // If attr.size is positive integer, use it as size of varchar.
- if(!Number.isNaN(attr.size) && (parseInt(attr.size) == parseFloat(attr.size)) && (parseInt(attr.size) > 0))
- size = attr.size;
-
- return 'VARCHAR(' + size + ')';
- }
-
- case 'text':
- case 'array':
- case 'json':
- return 'LONGTEXT';
-
- case 'mediumtext':
- return 'mediumtext';
-
- case 'longtext':
- return 'longtext';
-
- case 'boolean':
- return 'BOOL';
-
- case 'int':
- case 'integer': {
- var size = 32; // By default
-
- if(!Number.isNaN(attr.size) && (parseInt(attr.size) == parseFloat(attr.size)) && (parseInt(size) > 0)) {
- size = parseInt(attr.size);
- }
-
- // MEDIUMINT gets internally promoted to INT so there is no real benefit
- // using it.
-
- switch (size) {
- case 8:
- return 'TINYINT';
- case 16:
- return 'SMALLINT';
- case 32:
- return 'INT';
- case 64:
- return 'BIGINT'
- default:
- return 'INT';
- }
- }
-
- case 'float':
- case 'double':
- return 'FLOAT';
-
- case 'decimal':
- return 'DECIMAL';
-
- case 'date':
- return 'DATE';
-
- case 'datetime':
- return 'DATETIME';
-
- case 'time':
- return 'TIME';
-
- case 'binary':
- return 'BLOB';
-
- default:
- console.error('Unregistered type given: ' + type);
- return 'LONGTEXT';
- }
-}
-
-function wrapInQuotes(val) {
- return '"' + val + '"';
-}
-
-function toSqlDate(date) {
-
- date = date.getFullYear() + '-' +
- ('00' + (date.getMonth()+1)).slice(-2) + '-' +
- ('00' + date.getDate()).slice(-2) + ' ' +
- ('00' + date.getHours()).slice(-2) + ':' +
- ('00' + date.getMinutes()).slice(-2) + ':' +
- ('00' + date.getSeconds()).slice(-2);
-
- return date;
-}
-
-// Return whether this criteria is valid as an object inside of an attribute
-function validSubAttrCriteria(c) {
- return _.isObject(c) && (
- !_.isUndefined(c.not) || !_.isUndefined(c.greaterThan) || !_.isUndefined(c.lessThan) ||
- !_.isUndefined(c.greaterThanOrEqual) || !_.isUndefined(c.lessThanOrEqual) || !_.isUndefined(c['<']) ||
- !_.isUndefined(c['<=']) || !_.isUndefined(c['!']) || !_.isUndefined(c['>']) || !_.isUndefined(c['>=']) ||
- !_.isUndefined(c.startsWith) || !_.isUndefined(c.endsWith) || !_.isUndefined(c.contains) || !_.isUndefined(c.like));
-}
diff --git a/lib/utils.js b/lib/utils.js
deleted file mode 100644
index 7a127a6a..00000000
--- a/lib/utils.js
+++ /dev/null
@@ -1,259 +0,0 @@
-/**
- * Utility Functions
- */
-
-// Dependencies
-var mysql = require('mysql');
-var _ = require('lodash');
-var url = require('url');
-
-// Module Exports
-
-var utils = module.exports = {};
-
-/**
- * Parse URL string from config
- *
- * Parse URL string into connection config parameters
- */
-
-utils.parseUrl = function (config) {
- if(!_.isString(config.url)) return config;
-
- var obj = url.parse(config.url);
-
- config.host = obj.hostname || config.host;
- config.port = obj.port || config.port;
-
- if(_.isString(obj.path)) {
- config.database = obj.path.split("/")[1] || config.database;
- }
-
- if(_.isString(obj.auth)) {
- config.user = obj.auth.split(":")[0] || config.user;
- config.password = obj.auth.split(":")[1] || config.password;
- }
- return config;
-};
-
-/**
- * Prepare values
- *
- * Transform a JS date to SQL date and functions
- * to strings.
- */
-
-utils.prepareValue = function(value) {
-
- if(_.isUndefined(value) || value === null) return value;
-
- // Cast functions to strings
- if (_.isFunction(value)) {
- value = value.toString();
- }
-
- // Store Arrays and Objects as strings
- if (Array.isArray(value) || value.constructor && value.constructor.name === 'Object') {
- try {
- value = JSON.stringify(value);
- } catch (e) {
- // just keep the value and let the db handle an error
- value = value;
- }
- }
-
- // Cast dates to SQL
- if (_.isDate(value)) {
- value = utils.toSqlDate(value);
- }
-
- return mysql.escape(value);
-};
-
-/**
- * Builds a Select statement determining if Aggeregate options are needed.
- */
-
-utils.buildSelectStatement = function(criteria, table, schemaDefs) {
-
- var query = '';
-
- if(criteria.groupBy || criteria.sum || criteria.average || criteria.min || criteria.max) {
- query = 'SELECT ';
-
- // Append groupBy columns to select statement
- if(criteria.groupBy) {
- if(criteria.groupBy instanceof Array) {
- criteria.groupBy.forEach(function(opt){
- query += opt + ', ';
- });
-
- } else {
- query += criteria.groupBy + ', ';
- }
- }
-
- // Handle SUM
- if (criteria.sum) {
- if(criteria.sum instanceof Array) {
- criteria.sum.forEach(function(opt){
- query += 'SUM(' + opt + ') AS ' + opt + ', ';
- });
-
- } else {
- query += 'SUM(' + criteria.sum + ') AS ' + criteria.sum + ', ';
- }
- }
-
- // Handle AVG (casting to float to fix percision with trailing zeros)
- if (criteria.average) {
- if(criteria.average instanceof Array) {
- criteria.average.forEach(function(opt){
- query += 'AVG(' + opt + ') AS ' + opt + ', ';
- });
-
- } else {
- query += 'AVG(' + criteria.average + ') AS ' + criteria.average + ', ';
- }
- }
-
- // Handle MAX
- if (criteria.max) {
- if(criteria.max instanceof Array) {
- criteria.max.forEach(function(opt){
- query += 'MAX(' + opt + ') AS ' + opt + ', ';
- });
-
- } else {
- query += 'MAX(' + criteria.max + ') AS ' + criteria.max + ', ';
- }
- }
-
- // Handle MIN
- if (criteria.min) {
- if(criteria.min instanceof Array) {
- criteria.min.forEach(function(opt){
- query += 'MIN(' + opt + ') AS ' + opt + ', ';
- });
-
- } else {
- query += 'MIN(' + criteria.min + ') AS ' + criteria.min + ', ';
- }
- }
-
- // trim trailing comma
- query = query.slice(0, -2) + ' ';
-
- // Add FROM clause
- return query += 'FROM `' + table + '` ';
- }
-
- /**
- * If no aggregate options lets just build a normal query
- */
-
-
- // Add all keys to the select statement for this table
- query += 'SELECT ';
-
- var selectKeys = [],
- joinSelectKeys = [];
-
- if ( !schemaDefs[table] ) throw new Error('Schema definition missing for table: `'+table+'`');
-
- _( schemaDefs[table] ).forEach(function(schemaDef, key) {
- selectKeys.push({ table: table, key: key });
- });
-
- // Check for joins
- if(criteria.joins || criteria.join) {
-
- var joins = criteria.joins || criteria.join;
-
- joins.forEach(function(join) {
- if(!join.select) return;
-
- Object.keys(schemaDefs[join.child.toLowerCase()]).forEach(function(key) {
- var _join = _.cloneDeep(join);
- _join.key = key;
- joinSelectKeys.push(_join);
- });
-
- // Remove the foreign key for this join from the selectKeys array
- selectKeys = selectKeys.filter(function(select) {
- var keep = true;
- if(select.key === join.parentKey && join.removeParentKey) keep = false;
- return keep;
- });
- });
- }
-
- // Add all the columns to be selected that are not joins
- selectKeys.forEach(function(select) {
- query += '`' + select.table + '`.`' + select.key + '`, ';
- });
-
- // Add all the columns from the joined tables
- joinSelectKeys.forEach(function(select) {
-
- // Create an alias by prepending the child table with the alias of the join
- var alias = select.alias.toLowerCase() + '_' + select.child.toLowerCase();
-
- // If this is a belongs_to relationship, keep the foreign key name from the AS part
- // of the query. This will result in a selected column like: "user"."id" AS "user_id__id"
- if(select.model) {
- return query += mysql.escapeId(alias) + '.' + mysql.escapeId(select.key) + ' AS ' +
- mysql.escapeId(select.parentKey + '__' + select.key) + ', ';
- }
-
- // If a junctionTable is used, the child value should be used in the AS part of the
- // select query.
- if(select.junctionTable) {
- return query += mysql.escapeId(alias) + '.' + mysql.escapeId(select.key) + ' AS ' +
- mysql.escapeId(select.alias + '__' + select.key) + ', ';
- }
-
- // Else if a hasMany attribute is being selected, use the alias plus the child
- return query += mysql.escapeId(alias) + '.' + mysql.escapeId(select.key) + ' AS ' +
- mysql.escapeId(select.alias + '__' + select.key) + ', ';
- });
-
- // Remove the last comma
- query = query.slice(0, -2) + ' FROM `' + table + '` ';
-
- return query;
-};
-
-
-/**
- * ignore
- */
-
-utils.object = {};
-
-/**
- * Safer helper for hasOwnProperty checks
- *
- * @param {Object} obj
- * @param {String} prop
- * @return {Boolean}
- * @api public
- */
-
-var hop = Object.prototype.hasOwnProperty;
-utils.object.hasOwnProperty = function(obj, prop) {
- return hop.call(obj, prop);
-};
-
-
-utils.toSqlDate = function toSqlDate(date) {
-
- date = date.getFullYear() + '-' +
- ('00' + (date.getMonth()+1)).slice(-2) + '-' +
- ('00' + date.getDate()).slice(-2) + ' ' +
- ('00' + date.getHours()).slice(-2) + ':' +
- ('00' + date.getMinutes()).slice(-2) + ':' +
- ('00' + date.getSeconds()).slice(-2);
-
- return date;
-};
diff --git a/package.json b/package.json
index 4bcfaf5e..a9c80fd1 100644
--- a/package.json
+++ b/package.json
@@ -1,10 +1,15 @@
{
"name": "sails-mysql",
- "version": "0.11.1",
+ "version": "3.0.1",
"description": "MySQL adapter for Sails.js",
"main": "lib/adapter.js",
"scripts": {
- "test": "make test"
+ "test": "node ./node_modules/mocha/bin/mocha test/adapter/unit --timeout 10000 --recursive && node test/adapter/integration/runner",
+ "fasttest": "node ./node_modules/mocha/bin/mocha test/adapter/unit --timeout 10000 --recursive && node test/adapter/integration/runner",
+ "pretest": "nodever=`node -e \"console.log('\\`node -v\\`'[1]);\"` && if [ $nodever != \"0\" ]; then npm run lint; fi",
+ "lint": "node ./node_modules/eslint/bin/eslint . --max-warnings=0 --ignore-pattern 'test/'",
+ "docker": "docker-compose run adapter bash",
+ "benchmark": "node ./node_modules/mocha/bin/mocha test/benchmarks --recursive"
},
"repository": {
"type": "git",
@@ -20,21 +25,20 @@
"license": "MIT",
"readmeFilename": "README.md",
"dependencies": {
- "async": "~1.3.0",
- "lodash": "~3.10.0",
- "mysql": "~2.8.0",
- "waterline-errors": "~0.10.0",
- "waterline-sequel": "~0.5.0",
- "waterline-cursor": "~0.0.5"
+ "@sailshq/lodash": "^3.10.2",
+ "async": "2.6.4",
+ "machine": "^15.0.0-21",
+ "machinepack-mysql": "^5.0.0",
+ "waterline-utils": "^1.3.10"
},
"devDependencies": {
- "should": "*",
- "mocha": "~1.13.0",
- "waterline-adapter-tests": "~0.10.7",
- "captains-log": "~0.11.5"
+ "benchmark": "2.1.1",
+ "eslint": "4.11.0",
+ "mocha": "3.0.2",
+ "waterline-adapter-tests": "^1.0.0-6"
},
"waterlineAdapter": {
- "waterlineVersion": "~0.10.0",
+ "waterlineVersion": "^0.13.0",
"interfaces": [
"semantic",
"queryable",
diff --git a/test/adapter/integration/runner.js b/test/adapter/integration/runner.js
new file mode 100644
index 00000000..82aeb8cd
--- /dev/null
+++ b/test/adapter/integration/runner.js
@@ -0,0 +1,119 @@
+/**
+ * Run integration tests
+ *
+ * Uses the `waterline-adapter-tests` module to
+ * run mocha tests against the appropriate version
+ * of Waterline. Only the interfaces explicitly
+ * declared in this adapter's `package.json` file
+ * are tested. (e.g. `queryable`, `semantic`, etc.)
+ */
+
+
+/**
+ * Module dependencies
+ */
+
+var util = require('util');
+var TestRunner = require('waterline-adapter-tests');
+var Adapter = require('../../../lib/adapter');
+
+
+// Grab targeted interfaces from this adapter's `package.json` file:
+var package = {};
+var interfaces = [];
+var features = [];
+
+try {
+ package = require('../../../package.json');
+ interfaces = package.waterlineAdapter.interfaces;
+ features = package.waterlineAdapter.features;
+} catch (e) {
+ throw new Error(
+ '\n' +
+ 'Could not read supported interfaces from `waterlineAdapter.interfaces`' + '\n' +
+ 'in this adapter\'s `package.json` file ::' + '\n' +
+ util.inspect(e)
+ );
+}
+
+
+console.log('Testing `' + package.name + '`, a Sails/Waterline adapter.');
+console.log('Running `waterline-adapter-tests` against ' + interfaces.length + ' interfaces...');
+console.log('( ' + interfaces.join(', ') + ' )');
+console.log();
+console.log('Latest draft of Waterline adapter interface spec:');
+console.log('http://links.sailsjs.org/docs/plugins/adapters/interfaces');
+console.log();
+
+
+// //////////////////////////////////////////////////////////////////////
+// Integration Test Runner
+//
+// Uses the `waterline-adapter-tests` module to
+// run mocha tests against the specified interfaces
+// of the currently-implemented Waterline adapter API.
+// //////////////////////////////////////////////////////////////////////
+new TestRunner({
+
+ // Mocha opts
+ mocha: {
+ bail: false,
+ timeout: 20000
+ },
+
+ // Load the adapter module.
+ adapter: Adapter,
+
+ // Default connection config to use.
+ config: (function(){
+ var config = {
+ schema: true,
+ };
+
+ // Try and build up a Waterline Adapter Tests URL if one isn't set.
+ // (Not all automated test runners can be configured to automatically set these).
+ // Docker sets various URL's that can be used to build up a URL for instance.
+ if (process.env.WATERLINE_ADAPTER_TESTS_URL) {
+ config.url = process.env.WATERLINE_ADAPTER_TESTS_URL;
+ return config;
+ }
+ else {
+ var host = process.env.MYSQL_PORT_3306_TCP_ADDR || process.env.WATERLINE_ADAPTER_TESTS_HOST || 'localhost';
+ var port = process.env.WATERLINE_ADAPTER_TESTS_PORT || 3306;
+ var user = process.env.MYSQL_ENV_MYSQL_USER || process.env.WATERLINE_ADAPTER_TESTS_USER || 'root';
+ var password = process.env.MYSQL_ENV_MYSQL_PASSWORD || process.env.WATERLINE_ADAPTER_TESTS_PASSWORD || process.env.MYSQL_PWD || '';
+ var database = process.env.MYSQL_ENV_MYSQL_DATABASE || process.env.WATERLINE_ADAPTER_TESTS_DATABASE || 'adapter_tests';
+
+ config.url = 'mysql://' + user + ':' + password + '@' + host + ':' + port + '/' + database;
+ return config;
+ }
+
+ })(),
+
+ failOnError: true,
+ // The set of adapter interfaces to test against.
+ // (grabbed these from this adapter's package.json file above)
+ interfaces: interfaces,
+
+ // The set of adapter features to test against.
+ // (grabbed these from this adapter's package.json file above)
+ features: features,
+
+ // Most databases implement 'semantic' and 'queryable'.
+ //
+ // As of Sails/Waterline v0.10, the 'associations' interface
+ // is also available. If you don't implement 'associations',
+ // it will be polyfilled for you by Waterline core. The core
+ // implementation will always be used for cross-adapter / cross-connection
+ // joins.
+ //
+ // In future versions of Sails/Waterline, 'queryable' may be also
+ // be polyfilled by core.
+ //
+ // These polyfilled implementations can usually be further optimized at the
+ // adapter level, since most databases provide optimizations for internal
+ // operations.
+ //
+ // Full interface reference:
+ // https://github.com/balderdashy/sails-docs/blob/master/adapter-specification.md
+});
diff --git a/test/adapter/unit/create.js b/test/adapter/unit/create.js
new file mode 100644
index 00000000..9a884aaf
--- /dev/null
+++ b/test/adapter/unit/create.js
@@ -0,0 +1,161 @@
+var assert = require('assert');
+var _ = require('@sailshq/lodash');
+var Adapter = require('../../../lib/adapter');
+var Support = require('../../support/bootstrap');
+
+describe('Unit Tests ::', function() {
+ describe('Create', function() {
+ // Test Setup
+ before(function(done) {
+ Support.Setup('test_create', done);
+ });
+
+ after(function(done) {
+ Support.Teardown('test_create', done);
+ });
+
+ // Attributes for the test table
+ var attributes = {
+ fieldA: 'foo',
+ fieldB: 'bar'
+ };
+
+ it('should insert a record into the database and return it\'s fields', function(done) {
+ var query = {
+ using: 'test_create',
+ newRecord: attributes,
+ meta: {
+ fetch: true
+ }
+ };
+
+ Adapter.create('test', query, function(err, result) {
+ if (err) {
+ return done(err);
+ }
+
+ assert(_.isObject(result));
+ assert(!_.isFunction(result));
+ assert(!_.isArray(result));
+ assert.equal(result.fieldA, 'foo');
+ assert.equal(result.fieldB, 'bar');
+ assert(result.id);
+
+ return done();
+ });
+ });
+
+ // Create Auto-Incremented ID
+ it('should create an auto-incremented id field', function(done) {
+ var query = {
+ using: 'test_create',
+ newRecord: attributes,
+ meta: {
+ fetch: true
+ }
+ };
+
+ Adapter.create('test', query, function(err, result) {
+ if (err) {
+ return done(err);
+ }
+
+ assert(_.isObject(result));
+ assert(!_.isFunction(result));
+ assert(!_.isArray(result));
+ assert(result.id);
+
+ return done();
+ });
+ });
+
+ it('should keep case', function(done) {
+ var query = {
+ using: 'test_create',
+ newRecord: {
+ fieldA: 'Foo',
+ fieldB: 'bAr'
+ },
+ meta: {
+ fetch: true
+ }
+ };
+
+ Adapter.create('test', query, function(err, result) {
+ if (err) {
+ return done(err);
+ }
+
+ assert.equal(result.fieldA, 'Foo');
+ assert.equal(result.fieldB, 'bAr');
+
+ return done();
+ });
+ });
+
+ it('should pass through buffers for `ref` type attributes', function(done) {
+ var query = {
+ using: 'test_create',
+ newRecord: {
+ fieldC: new Buffer([1,2,3])
+ },
+ meta: {
+ fetch: true
+ }
+ };
+
+ Adapter.create('test', query, function(err, record) {
+ if (err) { return done(err); }
+ assert(record.fieldC instanceof Buffer);
+ assert.equal(record.fieldC.length, 3);
+ return done();
+ });
+ });
+
+ it('should pass through date objects for `ref` type attributes', function(done) {
+ var query = {
+ using: 'test_create',
+ newRecord: {
+ fieldD: new Date('2001-06-15 12:00:00')
+ },
+ meta: {
+ fetch: true
+ }
+ };
+
+ Adapter.create('test', query, function(err, record) {
+ if (err) { return done(err); }
+ assert(record.fieldD instanceof Date);
+ assert.equal(record.fieldD.getFullYear(), '2001');
+ return done();
+ });
+ });
+
+
+ // Look into the bowels of the PG Driver and ensure the Create function handles
+ // it's connections properly.
+ it('should release it\'s connection when completed', function(done) {
+ var manager = Adapter.datastores.test.manager;
+ var preConnectionsAvailable = manager.pool._allConnections.length;
+
+ var query = {
+ using: 'test_create',
+ newRecord: attributes,
+ meta: {
+ fetch: true
+ }
+ };
+
+ Adapter.create('test', query, function(err) {
+ if (err) {
+ return done(err);
+ }
+
+ var postConnectionsAvailable = manager.pool._allConnections.length;
+ assert.equal(preConnectionsAvailable, postConnectionsAvailable);
+
+ return done();
+ });
+ });
+ });
+});
diff --git a/test/adapter/unit/define.js b/test/adapter/unit/define.js
new file mode 100644
index 00000000..3f2b0adf
--- /dev/null
+++ b/test/adapter/unit/define.js
@@ -0,0 +1,75 @@
+var assert = require('assert');
+var _ = require('@sailshq/lodash');
+var Adapter = require('../../../lib/adapter');
+var Support = require('../../support/bootstrap');
+
+describe('Unit Tests ::', function() {
+ describe('Define', function() {
+ // Test Setup
+ before(function(done) {
+ Support.registerConnection(['test_define'], done);
+ });
+
+ after(function(done) {
+ Support.Teardown('test_define', done);
+ });
+
+ // Attributes for the test table
+ var definition = {
+ id: {
+ columnType: 'serial',
+ autoIncrement: true
+ },
+ name: {
+ columnType: 'text',
+ notNull: true
+ },
+ email: {
+ columnType: 'text'
+ },
+ title: {
+ columnType: 'text'
+ },
+ phone: {
+ columnType: 'text'
+ },
+ type: {
+ columnType: 'text'
+ },
+ favoriteFruit: {
+ columnType: 'text'
+ },
+ age: {
+ columnType: 'integer'
+ }
+ };
+
+ it('should create a table in the database', function(done) {
+ Adapter.define('test', 'test_define', definition, function(err) {
+ if (err) {
+ return done(err);
+ }
+
+ Adapter.describe('test', 'test_define', function(err, result) {
+ if (err) {
+ return done(err);
+ }
+
+ assert(_.isPlainObject(result));
+
+ assert.equal(_.keys(result).length, 8);
+ assert(result.id);
+ assert(result.name);
+ assert(result.email);
+ assert(result.title);
+ assert(result.phone);
+ assert(result.type);
+ assert(result.favoriteFruit);
+ assert(result.age);
+
+ return done();
+ });
+ });
+ });
+ });
+});
diff --git a/test/adapter/unit/describe.js b/test/adapter/unit/describe.js
new file mode 100644
index 00000000..aa01b77a
--- /dev/null
+++ b/test/adapter/unit/describe.js
@@ -0,0 +1,39 @@
+var assert = require('assert');
+var _ = require('@sailshq/lodash');
+var Adapter = require('../../../lib/adapter');
+var Support = require('../../support/bootstrap');
+
+describe('Unit Tests ::', function() {
+ describe('Describe', function() {
+ // Test Setup
+ before(function(done) {
+ Support.Setup('test_describe', done);
+ });
+
+ after(function(done) {
+ Support.Teardown('test_describe', done);
+ });
+
+ it('should return information on a table', function(done) {
+ Adapter.describe('test', 'test_describe', function(err, result) {
+ if (err) {
+ return done(err);
+ }
+
+ assert(_.isPlainObject(result));
+
+ assert(result.fieldA);
+ assert(result.fieldB);
+ assert(result.id);
+
+ assert.equal(result.fieldA.type, 'text');
+ assert.equal(result.fieldB.type, 'text');
+ assert.equal(result.id.type, 'int');
+ assert(result.id.primaryKey);
+ assert(result.id.autoIncrement);
+
+ return done();
+ });
+ });
+ });
+});
diff --git a/test/adapter/unit/destroy.js b/test/adapter/unit/destroy.js
new file mode 100644
index 00000000..056d32d8
--- /dev/null
+++ b/test/adapter/unit/destroy.js
@@ -0,0 +1,73 @@
+var assert = require('assert');
+var Adapter = require('../../../lib/adapter');
+var Support = require('../../support/bootstrap');
+
+describe('Unit Tests ::', function() {
+ describe('Destroy', function() {
+ // Test Setup
+ before(function(done) {
+ Support.Setup('test_destroy', function(err) {
+ if (err) {
+ return done(err);
+ }
+
+ // Seed the database with two simple records.
+ Support.Seed('test_destroy', done);
+ });
+ });
+
+ after(function(done) {
+ Support.Teardown('test_destroy', done);
+ });
+
+ it('should ensure the record is actually deleted', function(done) {
+ var query = {
+ using: 'test_destroy',
+ criteria: {
+ where: {
+ fieldA: 'foo_2'
+ }
+ }
+ };
+
+ Adapter.destroy('test', query, function(err) {
+ if (err) {
+ return done(err);
+ }
+
+ Adapter.find('test', query, function(err, results) {
+ if (err) {
+ return done(err);
+ }
+
+ assert.equal(results.length, 0);
+
+ return done();
+ });
+ });
+ });
+
+ // Look into the bowels of the PG Driver and ensure the Create function handles
+ // it's connections properly.
+ it('should release it\'s connection when completed', function(done) {
+ var manager = Adapter.datastores.test.manager;
+ var preConnectionsAvailable = manager.pool._allConnections.length;
+
+ var query = {
+ using: 'test_destroy',
+ criteria: {}
+ };
+
+ Adapter.destroy('test', query, function(err) {
+ if (err) {
+ return done(err);
+ }
+
+ var postConnectionsAvailable = manager.pool._allConnections.length;
+ assert.equal(preConnectionsAvailable, postConnectionsAvailable);
+
+ return done();
+ });
+ });
+ });
+});
diff --git a/test/adapter/unit/drop.js b/test/adapter/unit/drop.js
new file mode 100644
index 00000000..d058203f
--- /dev/null
+++ b/test/adapter/unit/drop.js
@@ -0,0 +1,36 @@
+var assert = require('assert');
+var _ = require('@sailshq/lodash');
+var Adapter = require('../../../lib/adapter');
+var Support = require('../../support/bootstrap');
+
+describe('Unit Tests ::', function() {
+ describe('Drop', function() {
+ // Test Setup
+ before(function(done) {
+ Support.Setup('test_drop', done);
+ });
+
+ after(function(done) {
+ Support.Teardown('test_drop', done);
+ });
+
+
+ it('should remove a table from the database', function(done) {
+ Adapter.drop('test', 'test_drop', [], function dropCb(err) {
+ if (err) {
+ return done(err);
+ }
+
+ Adapter.describe('test', 'test_drop', function describeCb(err, result) {
+ if (err) {
+ return done(err);
+ }
+
+ assert.equal(_.keys(result), 0);
+
+ return done();
+ });
+ });
+ });
+ });
+});
diff --git a/test/adapter/unit/find.js b/test/adapter/unit/find.js
new file mode 100644
index 00000000..6a84feab
--- /dev/null
+++ b/test/adapter/unit/find.js
@@ -0,0 +1,138 @@
+var assert = require('assert');
+var _ = require('@sailshq/lodash');
+var Adapter = require('../../../lib/adapter');
+var Support = require('../../support/bootstrap');
+
+describe('Unit Tests ::', function() {
+ describe('Find', function() {
+ // Test Setup
+ before(function(done) {
+ Support.Setup('test_find', function(err) {
+ if (err) {
+ return done(err);
+ }
+
+ // Seed the database with two simple records.
+ Support.Seed('test_find', done);
+ });
+ });
+
+ after(function(done) {
+ Support.Teardown('test_find', done);
+ });
+
+
+ it('should select the correct record', function(done) {
+ var query = {
+ using: 'test_find',
+ criteria: {
+ where: {
+ fieldA: 'foo'
+ }
+ }
+ };
+
+ Adapter.find('test', query, function(err, results) {
+ if (err) {
+ return done(err);
+ }
+
+ assert(_.isArray(results));
+ assert.equal(results.length, 1);
+ assert.equal(_.first(results).fieldA, 'foo');
+ assert.equal(_.first(results).fieldB, 'bar');
+
+ return done();
+ });
+ });
+
+ it('should return all the records', function(done) {
+ var query = {
+ using: 'test_find',
+ criteria: {}
+ };
+
+ Adapter.find('test', query, function(err, results) {
+ if (err) {
+ return done(err);
+ }
+
+ assert(_.isArray(results));
+ assert.equal(results.length, 2);
+
+ return done();
+ });
+ });
+
+ it('should be case sensitive', function(done) {
+ var query = {
+ using: 'test_find',
+ criteria: {
+ where: {
+ fieldB: 'bAr_2'
+ }
+ }
+ };
+
+ Adapter.find('test', query, function(err, results) {
+ if (err) {
+ return done(err);
+ }
+
+ assert(_.isArray(results));
+ assert.equal(results.length, 1);
+ assert.equal(_.first(results).fieldA, 'foo_2');
+ assert.equal(_.first(results).fieldB, 'bAr_2');
+
+ return done();
+ });
+ });
+
+ it('should return `ref` type attributes unchanged', function(done) {
+ var query = {
+ using: 'test_find',
+ criteria: {
+ where: {
+ fieldB: 'bAr_2'
+ }
+ }
+ };
+
+ Adapter.find('test', query, function(err, results) {
+ if (err) {
+ return done(err);
+ }
+ var record = results[0];
+ assert(record.fieldC instanceof Buffer, 'fieldC was not a Buffer!');
+ assert(record.fieldD instanceof Date, 'fieldD was not a Date!');
+ assert.equal(record.fieldC.length, 3, 'fieldC was a Buffer, but not the right Buffer! (contained: ' + require('util').inspect(record.fieldC) + ')');
+ assert.equal(record.fieldD.getFullYear(), '2001', 'fieldD was a Date, but not the right Date! (contained: ' + require('util').inspect(record.fieldD) + ')');
+
+ return done();
+ });
+ });
+
+ // Look into the bowels of the PG Driver and ensure the Create function handles
+ // it's connections properly.
+ it('should release it\'s connection when completed', function(done) {
+ var manager = Adapter.datastores.test.manager;
+ var preConnectionsAvailable = manager.pool._allConnections.length;
+
+ var query = {
+ using: 'test_find',
+ criteria: {}
+ };
+
+ Adapter.find('test', query, function(err) {
+ if (err) {
+ return done(err);
+ }
+
+ var postConnectionsAvailable = manager.pool._allConnections.length;
+ assert.equal(preConnectionsAvailable, postConnectionsAvailable);
+
+ return done();
+ });
+ });
+ });
+});
diff --git a/test/adapter/unit/update.js b/test/adapter/unit/update.js
new file mode 100644
index 00000000..c08ad27d
--- /dev/null
+++ b/test/adapter/unit/update.js
@@ -0,0 +1,108 @@
+var assert = require('assert');
+var _ = require('@sailshq/lodash');
+var Adapter = require('../../../lib/adapter');
+var Support = require('../../support/bootstrap');
+
+describe('Unit Tests ::', function() {
+ describe('Update', function() {
+ // Test Setup
+ before(function(done) {
+ Support.Setup('test_update', function(err) {
+ if (err) {
+ return done(err);
+ }
+
+ // Seed the database with two simple records.
+ Support.Seed('test_update', done);
+ });
+ });
+
+ after(function(done) {
+ Support.Teardown('test_update', done);
+ });
+
+ it('should update the correct record', function(done) {
+ var query = {
+ using: 'test_update',
+ criteria: {
+ where: {
+ fieldA: 'foo'
+ }
+ },
+ valuesToSet: {
+ fieldA: 'foobar'
+ },
+ meta: {
+ fetch: true
+ }
+ };
+
+ Adapter.update('test', query, function(err, results) {
+ if (err) {
+ return done(err);
+ }
+
+ assert(_.isArray(results));
+ assert.equal(results.length, 1);
+ assert.equal(_.first(results).fieldA, 'foobar');
+ assert.equal(_.first(results).fieldB, 'bar');
+
+ return done();
+ });
+ });
+
+ it('should be case in-sensitive', function(done) {
+ var query = {
+ using: 'test_update',
+ criteria: {
+ where: {
+ fieldB: 'bAr_2'
+ }
+ },
+ valuesToSet: {
+ fieldA: 'FooBar'
+ },
+ meta: {
+ fetch: true
+ }
+ };
+
+ Adapter.update('test', query, function(err, results) {
+ if (err) {
+ return done(err);
+ }
+
+ assert(_.isArray(results));
+ assert.equal(results.length, 1);
+ assert.equal(_.first(results).fieldA, 'FooBar');
+ assert.equal(_.first(results).fieldB, 'bAr_2');
+
+ return done();
+ });
+ });
+
+ // Look into the bowels of the PG Driver and ensure the Create function handles
+ // it's connections properly.
+ it('should release it\'s connection when completed', function(done) {
+ var manager = Adapter.datastores.test.manager;
+ var preConnectionsAvailable = manager.pool._allConnections.length;
+
+ var query = {
+ using: 'test_update',
+ criteria: {},
+ valuesToSet: {}
+ };
+
+ Adapter.update('test', query, function(err) {
+ if (err) {
+ return done(err);
+ }
+
+ var postConnectionsAvailable = manager.pool._allConnections.length;
+ assert.equal(preConnectionsAvailable, postConnectionsAvailable);
+
+ return done();
+ });
+ });
+ });
+});
diff --git a/test/integration/runner.js b/test/integration/runner.js
deleted file mode 100644
index 48275543..00000000
--- a/test/integration/runner.js
+++ /dev/null
@@ -1,111 +0,0 @@
-/**
- * Run integration tests
- *
- * Uses the `waterline-adapter-tests` module to
- * run mocha tests against the appropriate version
- * of Waterline. Only the interfaces explicitly
- * declared in this adapter's `package.json` file
- * are tested. (e.g. `queryable`, `semantic`, etc.)
- */
-
-
-/**
- * Module dependencies
- */
-
-var util = require('util');
-var mocha = require('mocha');
-var log = require('captains-log')();
-var TestRunner = require('waterline-adapter-tests');
-var Adapter = require('../../lib/adapter');
-
-
-
-// Grab targeted interfaces from this adapter's `package.json` file:
-var package = {},
- interfaces = [],
- features = [];
-try {
- package = require('../../package.json');
- interfaces = package.waterlineAdapter.interfaces;
- features = package.waterlineAdapter.features;
-} catch (e) {
- throw new Error(
- '\n' +
- 'Could not read supported interfaces from `waterlineAdapter.interfaces`' + '\n' +
- 'in this adapter\'s `package.json` file ::' + '\n' +
- util.inspect(e)
- );
-}
-
-
-
-log.info('Testing `' + package.name + '`, a Sails/Waterline adapter.');
-log.info('Running `waterline-adapter-tests` against ' + interfaces.length + ' interfaces...');
-log.info('( ' + interfaces.join(', ') + ' )');
-console.log();
-log('Latest draft of Waterline adapter interface spec:');
-log('http://links.sailsjs.org/docs/plugins/adapters/interfaces');
-console.log();
-
-
-
-/**
- * Integration Test Runner
- *
- * Uses the `waterline-adapter-tests` module to
- * run mocha tests against the specified interfaces
- * of the currently-implemented Waterline adapter API.
- */
-new TestRunner({
-
- // Mocha opts
- mocha: {
- bail: true
- },
-
- // Load the adapter module.
- adapter: Adapter,
-
- // Default connection config to use.
- config: {
- host: process.env.WATERLINE_ADAPTER_TESTS_HOST || 'localhost',
- port: process.env.WATERLINE_ADAPTER_TESTS_PORT || 3306,
- user: process.env.WATERLINE_ADAPTER_TESTS_USER || 'root',
- password: process.env.WATERLINE_ADAPTER_TESTS_PASSWORD || '',
- database: process.env.WATERLINE_ADAPTER_TESTS_DATABASE || 'sails_mysql',
- pool: true,
- connectionLimit: 10,
- queueLimit: 0,
- waitForConnections: true
- },
-
- // The set of adapter interfaces to test against.
- // (grabbed these from this adapter's package.json file above)
- interfaces: interfaces,
-
- // The set of adapter features to test against.
- // (grabbed these from this adapter's package.json file above)
- features: features,
-
- // Return code non zero if any test fails
- failOnError: true
-
- // Most databases implement 'semantic' and 'queryable'.
- //
- // As of Sails/Waterline v0.10, the 'associations' interface
- // is also available. If you don't implement 'associations',
- // it will be polyfilled for you by Waterline core. The core
- // implementation will always be used for cross-adapter / cross-connection
- // joins.
- //
- // In future versions of Sails/Waterline, 'queryable' may be also
- // be polyfilled by core.
- //
- // These polyfilled implementations can usually be further optimized at the
- // adapter level, since most databases provide optimizations for internal
- // operations.
- //
- // Full interface reference:
- // https://github.com/balderdashy/sails-docs/blob/master/adapter-specification.md
-});
\ No newline at end of file
diff --git a/test/load/loadTest.js b/test/load/loadTest.js
deleted file mode 100644
index 7ed78829..00000000
--- a/test/load/loadTest.js
+++ /dev/null
@@ -1,50 +0,0 @@
-var Adapter = require('../../lib/adapter'),
- Config = require('./support/config'),
- Fixture = require('./support/fixture'),
- assert = require('assert'),
- async = require('async');
-
-var CONNECTIONS = 10000;
-
-describe('Load Testing', function() {
- this.timeout(60000);
-
- before(function(done) {
- var Schema;
-
- // Register The Collection
- Adapter.registerConnection({ identity: 'loadTest', config: Config }, {'Person': Fixture}, function(err) {
- if(err) done(err);
-
- // Define The Collection
- Adapter.define('loadTest', Fixture, function(err, schema) {
- if(err) return done(err);
- Schema = schema;
- done();
- });
- });
- });
-
- describe('create with x connection', function() {
-
- it('should not error', function(done) {
-
- // generate x users
- async.times(CONNECTIONS, function(n, next){
-
- var data = {
- first_name: Math.floor((Math.random()*100000)+1),
- last_name: Math.floor((Math.random()*100000)+1),
- email: Math.floor((Math.random()*100000)+1)
- };
-
- Adapter.create('loadTest', data, next);
- }, function(err, users) {
- assert(!err);
- assert(users.length === CONNECTIONS);
- done();
- });
- });
- });
-
-});
diff --git a/test/load/support/bootstrap.js b/test/load/support/bootstrap.js
deleted file mode 100644
index e0c4d836..00000000
--- a/test/load/support/bootstrap.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Wipe Database before tests are run and
- * after tests are run to ensure a clean test environment.
- */
-
-var Adapter = require('../../../lib/adapter'),
- config = require('./config');
-
-// Global Before Helper
-before(function(done) {
- dropTable(done);
-});
-
-// Global After Helper
-after(function(done) {
- dropTable(done);
-});
-
-function dropTable(cb) {
- Adapter.registerConnection({ identity: 'loadTest', config: config }, function(err) {
- if(err) cb(err);
- Adapter.drop('loadTest', cb);
- });
-}
diff --git a/test/load/support/config.js b/test/load/support/config.js
deleted file mode 100644
index baa71475..00000000
--- a/test/load/support/config.js
+++ /dev/null
@@ -1,9 +0,0 @@
-module.exports = {
- host: 'localhost',
- user: 'root',
- password: '',
- database: 'sails_loadTest',
- pool: true,
- connectionLimit: 10,
- waitForConnections: true
-};
diff --git a/test/load/support/fixture.js b/test/load/support/fixture.js
deleted file mode 100644
index 7358fe5c..00000000
--- a/test/load/support/fixture.js
+++ /dev/null
@@ -1,18 +0,0 @@
-/**
- * Fixture Schema To Pass To Define
- */
-
-module.exports = {
- first_name: { type: 'string' },
- last_name: { type: 'string' },
- email: { type: 'string' },
- id:{
- type: 'integer',
- autoIncrement: true,
- size: 64,
- defaultsTo: 'AUTO_INCREMENT',
- primaryKey: true
- },
- createdAt: { type: 'DATE', default: 'NOW' },
- updatedAt: { type: 'DATE', default: 'NOW' }
-};
diff --git a/test/support/bootstrap.js b/test/support/bootstrap.js
new file mode 100644
index 00000000..65135a9b
--- /dev/null
+++ b/test/support/bootstrap.js
@@ -0,0 +1,211 @@
+/**
+ * Support functions for helping with Postgres tests
+ */
+
+var _ = require('@sailshq/lodash');
+var MySQL = require('machinepack-mysql');
+var adapter = require('../../lib/adapter');
+
+var Support = module.exports = {};
+
+// Determine config (using env vars).
+Support.Config = (function(){
+ var config = {
+ schema: true,
+ };
+
+ // Try and build up a Waterline Adapter Tests URL if one isn't set.
+ // (Not all automated test runners can be configured to automatically set these).
+ // Docker sets various URL's that can be used to build up a URL for instance.
+ if (process.env.WATERLINE_ADAPTER_TESTS_URL) {
+ config.url = process.env.WATERLINE_ADAPTER_TESTS_URL;
+ return config;
+ }
+ else {
+ var host = process.env.MYSQL_PORT_3306_TCP_ADDR || process.env.WATERLINE_ADAPTER_TESTS_HOST || 'localhost';
+ var port = process.env.WATERLINE_ADAPTER_TESTS_PORT || 3306;
+ var user = process.env.MYSQL_ENV_MYSQL_USER || process.env.WATERLINE_ADAPTER_TESTS_USER || 'root';
+ var password = process.env.MYSQL_ENV_MYSQL_PASSWORD || process.env.WATERLINE_ADAPTER_TESTS_PASSWORD || process.env.MYSQL_PWD || '';
+ var database = process.env.MYSQL_ENV_MYSQL_DATABASE || process.env.WATERLINE_ADAPTER_TESTS_DATABASE || 'adapter_tests';
+
+ config.url = 'mysql://' + user + ':' + password + '@' + host + ':' + port + '/' + database;
+ return config;
+ }
+
+})();
+
+// Fixture Model Def
+Support.Model = function model(name, def) {
+ return {
+ identity: name,
+ tableName: name,
+ datastore: 'test',
+ primaryKey: 'id',
+ definition: def || Support.Definition
+ };
+};
+
+// Fixture Table Definition
+Support.Definition = {
+ id: {
+ type: 'number',
+ columnName: 'id',
+ autoMigrations: {
+ columnType: 'integer',
+ autoIncrement: true,
+ unique: true
+ }
+ },
+ fieldA: {
+ type: 'string',
+ columnName: 'fieldA',
+ autoMigrations: {
+ columnType: 'text'
+ }
+ },
+ fieldB: {
+ type: 'string',
+ columnName: 'fieldB',
+ autoMigrations: {
+ columnType: 'text'
+ }
+ },
+ fieldC: {
+ type: 'ref',
+ columnName: 'fieldC',
+ autoMigrations: {
+ columnType: 'mediumblob'
+ }
+ },
+ fieldD: {
+ type: 'ref',
+ columnName: 'fieldD',
+ autoMigrations: {
+ columnType: 'datetime'
+ }
+ }
+};
+
+// Register and Define a Collection
+Support.Setup = function setup(tableName, cb) {
+ var collection = Support.Model(tableName);
+ var collections = {};
+ collections[tableName] = collection;
+
+ var connection = _.cloneDeep(Support.Config);
+ connection.identity = 'test';
+
+ // Setup a primaryKey for migrations
+ collection.definition = _.cloneDeep(Support.Definition);
+
+ // Build a schema to represent the underlying physical database structure
+ var schema = {};
+ _.each(collection.definition, function parseAttribute(attributeVal, attributeName) {
+ var columnName = attributeVal.columnName || attributeName;
+
+ // If the attribute doesn't have an `autoMigrations` key on it, ignore it.
+ if (!_.has(attributeVal, 'autoMigrations')) {
+ return;
+ }
+
+ schema[columnName] = attributeVal.autoMigrations;
+ });
+
+ // Set Primary Key flag on the primary key attribute
+ var primaryKeyAttrName = collection.primaryKey;
+ var primaryKey = collection.definition[primaryKeyAttrName];
+ if (primaryKey) {
+ var pkColumnName = primaryKey.columnName || primaryKeyAttrName;
+ schema[pkColumnName].primaryKey = true;
+ }
+
+
+ adapter.registerDatastore(connection, collections, function registerCb(err) {
+ if (err) {
+ return cb(err);
+ }
+
+ adapter.define('test', tableName, schema, cb);
+ });
+};
+
+// Just register a connection
+Support.registerConnection = function registerConnection(tableNames, cb) {
+ var collections = {};
+
+ _.each(tableNames, function processTable(name) {
+ var collection = Support.Model(name);
+ collections[name] = collection;
+ });
+
+ var connection = _.cloneDeep(Support.Config);
+ connection.identity = 'test';
+
+ adapter.registerDatastore(connection, collections, cb);
+};
+
+// Remove a table and destroy the manager
+Support.Teardown = function teardown(tableName, cb) {
+ var manager = adapter.datastores[_.first(_.keys(adapter.datastores))].manager;
+ MySQL.getConnection({
+ manager: manager,
+ meta: Support.Config
+ }).exec(function getConnectionCb(err, report) {
+ if (err) {
+ return cb(err);
+ }
+
+ var query = 'DROP TABLE IF EXISTS `' + tableName + '`;';
+ MySQL.sendNativeQuery({
+ connection: report.connection,
+ nativeQuery: query
+ }).exec(function dropTableCb(err) {
+ if (err) {
+ return cb(err);
+ }
+
+ MySQL.releaseConnection({
+ connection: report.connection
+ }).exec(function releaseConnectionCb(err) {
+ if (err) {
+ return cb(err);
+ }
+
+ delete adapter.datastores[_.first(_.keys(adapter.datastores))];
+ return cb();
+ });
+ });
+ });
+};
+
+// Seed a record to use for testing
+Support.Seed = function seed(tableName, cb) {
+ var manager = adapter.datastores[_.first(_.keys(adapter.datastores))].manager;
+ MySQL.getConnection({
+ manager: manager,
+ meta: Support.Config
+ }).exec(function getConnectionCb(err, report) {
+ if (err) {
+ return cb(err);
+ }
+
+ var query = [
+ 'INSERT INTO `' + tableName + '` (`fieldA`, `fieldB`, `fieldC`, `fieldD`) ',
+ 'values (\'foo\', \'bar\', null, null), (\'foo_2\', \'bAr_2\', $1, $2);'
+ ].join('');
+
+ MySQL.sendNativeQuery({
+ connection: report.connection,
+ nativeQuery: query,
+ valuesToEscape: [new Buffer([1,2,3]), new Date('2001-06-15 12:00:00')]
+ }).exec(function seedCb(err) {
+ if (err) {
+ return cb(err);
+ }
+
+ MySQL.releaseConnection({
+ connection: report.connection
+ }).exec(cb);
+ });
+ });
+};
diff --git a/test/unit/adapter.addAttribute.js b/test/unit/adapter.addAttribute.js
deleted file mode 100644
index 7cd07bcc..00000000
--- a/test/unit/adapter.addAttribute.js
+++ /dev/null
@@ -1,46 +0,0 @@
-var adapter = require('../../lib/adapter'),
- _ = require('lodash'),
- should = require('should'),
- support = require('./support/bootstrap');
-
-describe('adapter', function() {
-
- /**
- * Setup and Teardown
- */
-
- before(function(done) {
- support.Setup('test_addAttribute', done);
- });
-
- after(function(done) {
- support.Teardown('test_addAttribute', done);
- });
-
- /**
- * ADD ATTRIBUTE
- *
- * Adds a column to a Table
- */
-
- describe('.addAttribute()', function() {
-
- // Add a column to a table
- it('should add column color to the table', function(done) {
-
- adapter.addAttribute('test', 'test_addAttribute', 'color', 'string', function(err, result) {
- adapter.describe('test', 'test_addAttribute', function(err, result) {
-
- // Test Row length
- Object.keys(result).length.should.eql(4);
-
- // Test the name of the last column
- should.exist(result.color);
-
- done();
- });
- });
-
- });
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.avg.js b/test/unit/adapter.avg.js
deleted file mode 100644
index b20dfaad..00000000
--- a/test/unit/adapter.avg.js
+++ /dev/null
@@ -1,58 +0,0 @@
-var Sequel = require('waterline-sequel'),
- should = require('should'),
- Support = require('./support/bootstrap');
-
-describe('query', function() {
-
- /**
- * AVG
- *
- * Adds a AVG select parameter to a sql statement
- */
-
- describe('.avg()', function() {
-
- describe('with array', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- average: ['age']
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should use the AVG aggregate option in the select statement', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT CAST( AVG("test"."age") AS float) AS age FROM "test" AS "test" WHERE ' +
- 'LOWER("test"."name") = $1 ';
-
- query.query[0].should.eql(sql);
- });
- });
-
- describe('with string', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- average: 'age'
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should use the AVG aggregate option in the select statement', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT CAST( AVG("test"."age") AS float) AS age FROM "test" AS "test" WHERE ' +
- 'LOWER("test"."name") = $1 ';
-
- query.query[0].should.eql(sql);
- });
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.create.js b/test/unit/adapter.create.js
deleted file mode 100644
index 3c040ae7..00000000
--- a/test/unit/adapter.create.js
+++ /dev/null
@@ -1,82 +0,0 @@
-var adapter = require('../../lib/adapter'),
- should = require('should'),
- support = require('./support/bootstrap');
-
-describe('adapter', function() {
-
- /**
- * Setup and Teardown
- */
-
- before(function(done) {
- support.Setup('test_create', done);
- });
-
- after(function(done) {
- support.Teardown('test_create', done);
- });
-
- // Attributes for the test table
- var attributes = {
- field_1: 'foo',
- field_2: 'bar'
- };
-
- /**
- * CREATE
- *
- * Insert a row into a table
- */
-
- describe('.create()', function() {
-
- // Insert a record
- it('should insert a single record', function(done) {
- adapter.create('test', 'test_create', attributes, function(err, result) {
-
- // Check record was actually inserted
- support.Client(function(err, client, close) {
- client.query('SELECT * FROM test_create', function(err, rows) {
- if (err) {
- return done(err);
- }
- // Test 1 row is returned
- rows.length.should.eql(1);
-
- // close client
- client.end();
-
- done();
- });
- });
- });
- });
-
- // Create Auto-Incremented ID
- it('should create an auto-incremented ID field', function(done) {
- adapter.create('test', 'test_create', attributes, function(err, result) {
-
- // Should have an ID of 2
- result.id.should.eql(2);
-
- done();
- });
- });
-
- it('should keep case', function(done) {
- var attributes = {
- field_1: 'Foo',
- field_2: 'bAr'
- };
-
- adapter.create('test', 'test_create', attributes, function(err, result) {
-
- result.field_1.should.eql('Foo');
- result.field_2.should.eql('bAr');
-
- done();
- });
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.createEach.js b/test/unit/adapter.createEach.js
deleted file mode 100644
index 13f945f3..00000000
--- a/test/unit/adapter.createEach.js
+++ /dev/null
@@ -1,54 +0,0 @@
-var adapter = require('../../lib/adapter'),
- should = require('should'),
- support = require('./support/bootstrap');
-
-describe('adapter', function() {
-
- /**
- * Setup and Teardown
- */
-
- before(function(done) {
- support.Setup('test_createEach', done);
- });
-
- after(function(done) {
- support.Teardown('test_createEach', done);
- });
-
- // Attributes for the test table
- var attributes = {
- field_1: 'foo',
- field_2: 'bar'
- };
-
- /**
- * CREATE EACH
- *
- * Insert an array of rows into a table
- */
-
- describe('.createEach()', function() {
-
- // Insert multiple records
- it('should insert multiple records', function(done) {
- adapter.createEach('test', 'test_createEach', [attributes, attributes], function(err, result) {
-
- // Check records were actually inserted
- support.Client(function(err, client) {
- client.query('SELECT * FROM test_createEach', function(err, rows) {
-
- // Test 2 rows are returned
- rows.length.should.eql(2);
-
- // close client
- client.end();
-
- done();
- });
- });
- });
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.define.js b/test/unit/adapter.define.js
deleted file mode 100644
index cde31852..00000000
--- a/test/unit/adapter.define.js
+++ /dev/null
@@ -1,126 +0,0 @@
-var adapter = require('../../lib/adapter'),
- should = require('should'),
- support = require('./support/bootstrap');
-
-describe('adapter', function() {
-
- /**
- * Setup and Teardown
- */
-
- before(function(done) {
- support.registerConnection(['test_define', 'user'], done);
- });
-
- after(function(done) {
- support.Teardown('test_define', done);
- });
-
- // Attributes for the test table
- var definition = {
- id : {
- type: 'integer',
- size: 64,
- autoIncrement: true
- },
- name : {
- type: 'string',
- notNull: true
- },
- email : 'string',
- title : 'string',
- phone : 'string',
- type : 'string',
- favoriteFruit : {
- defaultsTo: 'blueberry',
- type: 'string'
- },
- age : 'integer'
- };
-
- /**
- * DEFINE
- *
- * Create a new table with a defined set of attributes
- */
-
- describe('.define()', function() {
-
- describe('basic usage', function() {
-
- // Build Table from attributes
- it('should build the table', function(done) {
-
- adapter.define('test', 'test_define', definition, function(err) {
- adapter.describe('test', 'test_define', function(err, result) {
- Object.keys(result).length.should.eql(8);
- done();
- });
- });
-
- });
-
- // notNull constraint
- it('should create a bigint primary key', function(done) {
- adapter.define('test', 'test_define', definition, function(err) {
- support.Client(function(err, client) {
- var query = "SELECT COLUMN_TYPE from information_schema.COLUMNS "+
- "WHERE TABLE_SCHEMA = '" + support.Config.database + "' AND TABLE_NAME = 'test_define' AND COLUMN_NAME = 'id'";
-
- client.query(query, function(err, rows) {
- rows[0].COLUMN_TYPE.should.eql("bigint(20)");
- client.end();
- done();
- });
- });
- });
- });
-
- });
-
- it('should add a notNull constraint', function(done) {
- adapter.define('test', 'test_define', definition, function(err) {
- support.Client(function(err, client) {
- var query = "SELECT IS_NULLABLE from information_schema.COLUMNS "+
- "WHERE TABLE_SCHEMA = '" + support.Config.database + "' AND TABLE_NAME = 'test_define' AND COLUMN_NAME = 'name'";
-
- client.query(query, function(err, rows) {
- rows[0].IS_NULLABLE.should.eql("NO");
- client.end();
- done();
- });
- });
- });
- });
-
- describe('reserved words', function() {
-
- after(function(done) {
- support.Client(function(err, client) {
- var query = 'DROP TABLE user;';
- client.query(query, function(err) {
-
- // close client
- client.end();
-
- done();
- });
- });
- });
-
- // Build Table from attributes
- it('should escape reserved words', function(done) {
-
- adapter.define('test', 'user', definition, function(err) {
- adapter.describe('test', 'user', function(err, result) {
- Object.keys(result).length.should.eql(8);
- done();
- });
- });
-
- });
-
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.describe.js b/test/unit/adapter.describe.js
deleted file mode 100644
index e230ec61..00000000
--- a/test/unit/adapter.describe.js
+++ /dev/null
@@ -1,42 +0,0 @@
-var adapter = require('../../lib/adapter'),
- should = require('should'),
- support = require('./support/bootstrap');
-
-describe('adapter', function() {
-
- /**
- * Setup and Teardown
- */
-
- before(function(done) {
- support.Setup('test_describe', done);
- });
-
- after(function(done) {
- support.Teardown('test_describe', done);
- });
-
- /**
- * DESCRIBE
- *
- * Similar to MySQL's Describe method this should list the
- * properties of a table.
- */
-
- describe('.describe()', function() {
-
- // Output Column Names
- it('should output the column names', function(done) {
- adapter.describe('test', 'test_describe', function(err, results) {
- Object.keys(results).length.should.eql(3);
-
- should.exist(results.id);
- should.exist(results.field_1);
- should.exist(results.field_2);
-
- done();
- });
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.destroy.js b/test/unit/adapter.destroy.js
deleted file mode 100644
index a79d46f4..00000000
--- a/test/unit/adapter.destroy.js
+++ /dev/null
@@ -1,55 +0,0 @@
-var adapter = require('../../lib/adapter'),
- should = require('should'),
- support = require('./support/bootstrap');
-
-describe('adapter', function() {
-
- /**
- * Setup and Teardown
- */
-
- before(function(done) {
- support.Setup('test_destroy', done);
- });
-
- after(function(done) {
- support.Teardown('test_destroy', done);
- });
-
- /**
- * DESTROY
- *
- * Remove a row from a table
- */
-
- describe('.destroy()', function() {
-
- describe('with options', function() {
-
- before(function(done) {
- support.Seed('test_destroy', done);
- });
-
- it('should destroy the record', function(done) {
- adapter.destroy('test', 'test_destroy', { where: { id: 1 }}, function(err, result) {
-
- // Check record was actually removed
- support.Client(function(err, client) {
- client.query('SELECT * FROM test_destroy', function(err, rows) {
-
- // Test no rows are returned
- rows.length.should.eql(0);
-
- // close client
- client.end();
-
- done();
- });
- });
-
- });
- });
-
- });
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.drop.js b/test/unit/adapter.drop.js
deleted file mode 100644
index c1dcc57f..00000000
--- a/test/unit/adapter.drop.js
+++ /dev/null
@@ -1,39 +0,0 @@
-var adapter = require('../../lib/adapter'),
- should = require('should'),
- support = require('./support/bootstrap');
-
-describe('adapter', function() {
-
- /**
- * Setup and Teardown
- */
-
- before(function(done) {
- support.Setup('test_drop', done);
- });
-
- after(function(done) {
- adapter.teardown('test', done);
- });
-
- /**
- * DROP
- *
- * Drop a table and all it's records.
- */
-
- describe('.drop()', function() {
-
- // Drop the Test table
- it('should drop the table', function(done) {
-
- adapter.drop('test', 'test_drop', function(err, result) {
- adapter.describe('test', 'test_drop', function(err, result) {
- should.not.exist(result);
- done();
- });
- });
-
- });
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.find.js b/test/unit/adapter.find.js
deleted file mode 100644
index 1f384054..00000000
--- a/test/unit/adapter.find.js
+++ /dev/null
@@ -1,87 +0,0 @@
-var adapter = require('../../lib/adapter'),
- should = require('should'),
- support = require('./support/bootstrap');
-
-describe('adapter', function() {
-
- /**
- * Setup and Teardown
- */
-
- before(function(done) {
- support.Setup('test_find', done);
- });
-
- after(function(done) {
- support.Teardown('test_find', done);
- });
-
- /**
- * FIND
- *
- * Returns an array of records from a SELECT query
- */
-
- describe('.find()', function() {
-
- describe('WHERE clause', function() {
-
- before(function(done) {
- support.Seed('test_find', done);
- });
-
- describe('key/value attributes', function() {
-
- it('should return the record set', function(done) {
- adapter.find('test', 'test_find', { where: { field_1: 'foo' } }, function(err, results) {
- results.length.should.eql(1);
- results[0].id.should.eql(1);
- done();
- });
- });
-
- });
-
- describe('comparators', function() {
-
- // Insert a unique record to test with
- before(function(done) {
- var query = [
- 'INSERT INTO test_find (field_1, field_2)',
- "values ('foobar', 'AR)H$daxx');"
- ].join('');
-
- support.Client(function(err, client) {
- client.query(query, function() {
-
- // close client
- client.end();
-
- done();
- });
- });
- });
-
- it('should support endsWith', function(done) {
-
- var criteria = {
- where: {
- field_2: {
- endsWith: 'AR)H$daxx'
- }
- }
- };
-
- adapter.find('test', 'test_find', criteria, function(err, results) {
- results.length.should.eql(1);
- results[0].field_2.should.eql('AR)H$daxx');
- done();
- });
- });
-
- });
-
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.groupBy.js b/test/unit/adapter.groupBy.js
deleted file mode 100644
index 39e7e780..00000000
--- a/test/unit/adapter.groupBy.js
+++ /dev/null
@@ -1,60 +0,0 @@
-var Sequel = require('waterline-sequel'),
- should = require('should'),
- Support = require('./support/bootstrap');
-
-describe('query', function() {
-
- /**
- * groupBy
- *
- * Adds a Group By statement to a sql statement
- */
-
- describe('.groupBy()', function() {
-
- describe('with array', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- groupBy: ['name'],
- average: ['age']
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should append a Group By clause to the select statement', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT "test"."name", CAST( AVG("test"."age") AS float) AS age ' +
- 'FROM "test" AS "test" WHERE LOWER("test"."name") = $1 GROUP BY "test"."name"';
-
- query.query[0].should.eql(sql);
- });
- });
-
- describe('with string', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- groupBy: 'name',
- average: 'age'
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should use the MAX aggregate option in the select statement', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT "test"."name", CAST( AVG("test"."age") AS float) AS age ' +
- 'FROM "test" AS "test" WHERE LOWER("test"."name") = $1 GROUP BY "test"."name"'
-
- query.query[0].should.eql(sql);
- });
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.index.js b/test/unit/adapter.index.js
deleted file mode 100644
index 247b6bdf..00000000
--- a/test/unit/adapter.index.js
+++ /dev/null
@@ -1,52 +0,0 @@
-var adapter = require('../../lib/adapter'),
- should = require('should'),
- support = require('./support/bootstrap');
-
-describe('adapter', function() {
-
- /**
- * Teardown
- */
-
- before(function(done) {
- support.registerConnection(['test_index'], done);
- });
-
- after(function(done) {
- support.Teardown('test_index', done);
- });
-
- // Attributes for the test table
- var definition = {
- id: {
- type: 'integer',
- autoIncrement: true
- },
- name: {
- type: 'string',
- index: true
- }
- };
-
- /**
- * Indexes
- *
- * Ensure Indexes get created correctly
- */
-
- describe('Index Attributes', function() {
-
- // Build Indicies from definition
- it('should add indicies', function(done) {
-
- adapter.define('test', 'test_index', definition, function(err) {
- adapter.describe('test', 'test_index', function(err, result) {
- result.name.indexed.should.eql(true);
- done();
- });
- });
-
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.joins.js b/test/unit/adapter.joins.js
deleted file mode 100644
index c78225e3..00000000
--- a/test/unit/adapter.joins.js
+++ /dev/null
@@ -1,97 +0,0 @@
-var Sequel = require('waterline-sequel'),
- _ = require('lodash'),
- should = require('should'),
- Support = require('./support/bootstrap');
-
-describe('query', function() {
-
- /**
- * Joins
- *
- * Build up SQL queries using joins and subqueries.
- */
-
- describe('.joins()', function() {
-
- var petSchema = {
- name: 'string',
- id: {
- type: 'integer',
- autoIncrement: true,
- primaryKey: true,
- unique: true
- },
- createdAt: { type: 'datetime', default: 'NOW' },
- updatedAt: { type: 'datetime', default: 'NOW' },
- owner: {
- columnName: 'owner_id',
- type: 'integer',
- foreignKey: true,
- references: 'user',
- on: 'id',
- onKey: 'id'
- }
- };
-
- var userSchema = {
- name: 'string',
- id: {
- type: 'integer',
- autoIncrement: true,
- primaryKey: true,
- unique: true
- },
- createdAt: { type: 'datetime', default: 'NOW' },
- updatedAt: { type: 'datetime', default: 'NOW' },
- pets: {
- collection: 'pet',
- via: 'user',
- references: 'pet',
- on: 'owner_id',
- onKey: 'user'
- }
- };
-
- // Simple populate criteria, ex: .populate('pets')
- describe('populates', function() {
-
- // Lookup criteria
- var criteria = {
- instructions: {
- pet: {
- strategy: {strategy: 1, meta: { parentFK: 'id' }},
- instructions: [
- { parent: 'user',
- parentKey: 'id',
- child: 'pet',
- childKey: 'owner',
- select: [ 'name', 'id', 'createdAt', 'updatedAt', 'owner' ],
- alias: 'pet',
- removeParentKey: true,
- model: true,
- collection: false,
- criteria: {}
- }
- ]
- }
- },
- where: null,
- limit: 30,
- skip: 0
- };
-
- var schemaDef = {'user': Support.Schema('user', userSchema), 'pet': Support.Schema('pet', petSchema)};
-
- it('should build a query using inner joins', function() {
- var query = new Sequel(schemaDef, Support.SqlOptions).find('user', criteria);
- var sql = 'SELECT "user"."name", "user"."id", "user"."createdAt", "user"."updatedAt", '+
- '"__pet"."name" AS "id___name", "__pet"."id" AS "id___id", "__pet"."createdAt" ' +
- 'AS "id___createdAt", "__pet"."updatedAt" AS "id___updatedAt", "__pet"."owner_id" ' +
- 'AS "id___owner_id" FROM "user" AS "user" LEFT OUTER JOIN "pet" AS "__pet" ON ' +
- '"user".\"id\" = \"__pet\".\"owner\" LIMIT 30 OFFSET 0';
- query.query[0].should.eql(sql);
- });
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.max.js b/test/unit/adapter.max.js
deleted file mode 100644
index eb1b4358..00000000
--- a/test/unit/adapter.max.js
+++ /dev/null
@@ -1,54 +0,0 @@
-var Sequel = require('waterline-sequel'),
- should = require('should'),
- Support = require('./support/bootstrap');
-
-describe('query', function() {
-
- /**
- * MAX
- *
- * Adds a MAX select parameter to a sql statement
- */
-
- describe('.max()', function() {
-
- describe('with array', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- max: ['age']
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should use the max aggregate option in the select statement', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT MAX("test"."age") AS age FROM "test" AS "test" WHERE LOWER("test"."name") = $1 ';
- query.query[0].should.eql(sql);
- });
- });
-
- describe('with string', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- max: 'age'
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should use the MAX aggregate option in the select statement', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT MAX("test"."age") AS age FROM "test" AS "test" WHERE LOWER("test"."name") = $1 ';
- query.query[0].should.eql(sql);
- });
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.min.js b/test/unit/adapter.min.js
deleted file mode 100644
index 57579041..00000000
--- a/test/unit/adapter.min.js
+++ /dev/null
@@ -1,54 +0,0 @@
-var Sequel = require('waterline-sequel'),
- should = require('should'),
- Support = require('./support/bootstrap');
-
-describe('query', function() {
-
- /**
- * MIN
- *
- * Adds a MIN select parameter to a sql statement
- */
-
- describe('.min()', function() {
-
- describe('with array', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- min: ['age']
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should use the min aggregate option in the select statement', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT MIN("test"."age") AS age FROM "test" AS "test" WHERE LOWER("test"."name") = $1 ';
- query.query[0].should.eql(sql);
- });
- });
-
- describe('with string', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- min: 'age'
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should use the MIN aggregate option in the select statement', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT MIN("test"."age") AS age FROM "test" AS "test" WHERE LOWER("test"."name") = $1 ';
- query.query[0].should.eql(sql);
- });
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.removeAttribute.js b/test/unit/adapter.removeAttribute.js
deleted file mode 100644
index 488d9b89..00000000
--- a/test/unit/adapter.removeAttribute.js
+++ /dev/null
@@ -1,45 +0,0 @@
-var adapter = require('../../lib/adapter'),
- should = require('should'),
- support = require('./support/bootstrap');
-
-describe('adapter', function() {
-
- /**
- * Setup and Teardown
- */
-
- before(function(done) {
- support.Setup('test_removeAttribute', done);
- });
-
- after(function(done) {
- support.Teardown('test_removeAttribute', done);
- });
-
- /**
- * REMOVE ATTRIBUTE
- *
- * Drops a Column from a Table
- */
-
- describe('.removeAttribute()', function() {
-
- // Remove a column to a table
- it('should remove column field_2 from the table', function(done) {
-
- adapter.removeAttribute('test', 'test_removeAttribute', 'field_2', function(err) {
- adapter.describe('test', 'test_removeAttribute', function(err, result) {
-
- // Test Row length
- Object.keys(result).length.should.eql(2);
-
- // Test the name of the last column
- should.not.exist(result.field_2);
-
- done();
- });
- });
-
- });
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.sum.js b/test/unit/adapter.sum.js
deleted file mode 100644
index 59bb9883..00000000
--- a/test/unit/adapter.sum.js
+++ /dev/null
@@ -1,58 +0,0 @@
-var Sequel = require('waterline-sequel'),
- should = require('should'),
- Support = require('./support/bootstrap');
-
-describe('query', function() {
-
- /**
- * SUM
- *
- * Adds a SUM select parameter to a sql statement
- */
-
- describe('.sum()', function() {
-
- describe('with array', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- sum: ['age']
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should use the SUM aggregate option in the select statement', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT CAST(SUM("test"."age") AS float) AS age FROM "test" AS "test" WHERE ' +
- 'LOWER("test"."name") = $1 ';
-
- query.query[0].should.eql(sql);
- });
- });
-
- describe('with string', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- sum: 'age'
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should use the SUM aggregate option in the select statement', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT CAST(SUM("test"."age") AS float) AS age FROM "test" AS "test" WHERE ' +
- 'LOWER("test"."name") = $1 ';
-
- query.query[0].should.eql(sql);
- });
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/adapter.update.js b/test/unit/adapter.update.js
deleted file mode 100644
index 3d5f57dd..00000000
--- a/test/unit/adapter.update.js
+++ /dev/null
@@ -1,53 +0,0 @@
-var adapter = require('../../lib/adapter'),
- should = require('should'),
- support = require('./support/bootstrap');
-
-describe('adapter', function() {
-
- /**
- * Setup and Teardown
- */
-
- before(function(done) {
- support.Setup('test_update', done);
- });
-
- after(function(done) {
- support.Teardown('test_update', done);
- });
-
- /**
- * UPDATE
- *
- * Update a row in a table
- */
-
- describe('.update()', function() {
-
- describe('with options', function() {
-
- before(function(done) {
- support.Seed('test_update', done);
- });
-
- it('should update the record', function(done) {
-
- adapter.update('test', 'test_update', { where: { id: 1 }}, { field_1: 'foobar' }, function(err, result) {
- result[0].field_1.should.eql('foobar');
- done();
- });
-
- });
-
- it('should keep case', function(done) {
-
- adapter.update('test', 'test_update', { where: { id: 1 }}, { field_1: 'FooBar' }, function(err, result) {
- result[0].field_1.should.eql('FooBar');
- done();
- });
-
- });
-
- });
- });
-});
\ No newline at end of file
diff --git a/test/unit/query.skip.js b/test/unit/query.skip.js
deleted file mode 100644
index c268a957..00000000
--- a/test/unit/query.skip.js
+++ /dev/null
@@ -1,32 +0,0 @@
-var Sequel = require('waterline-sequel'),
- should = require('should'),
- Support = require('./support/bootstrap');
-
-describe('query', function() {
-
- /**
- * SKIP
- *
- * Adds an OFFSET parameter to a sql statement
- */
-
- describe('.skip()', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- skip: 1
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' } })};
-
- it('should append the SKIP clause to the query', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT "test"."name" FROM "test" AS "test" WHERE LOWER("test"."name") = $1 LIMIT 184467440737095516 OFFSET 1';
- query.query[0].should.eql(sql);
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/query.sort.js b/test/unit/query.sort.js
deleted file mode 100644
index e9a3af38..00000000
--- a/test/unit/query.sort.js
+++ /dev/null
@@ -1,81 +0,0 @@
-var Sequel = require('waterline-sequel'),
- should = require('should'),
- Support = require('./support/bootstrap');
-
-describe('query', function() {
-
- /**
- * SORT
- *
- * Adds an ORDER BY parameter to a sql statement
- */
-
- describe('.sort()', function() {
-
- it('should append the ORDER BY clause to the query', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- sort: {
- name: 1
- }
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' } })};
-
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT "test"."name" FROM "test" AS "test" WHERE LOWER("test"."name") = $1 ' +
- 'ORDER BY "test"."name" ASC';
-
- query.query[0].should.eql(sql);
- });
-
- it('should sort by multiple columns', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- sort: {
- name: 1,
- age: 1
- }
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT "test"."name", "test"."age" FROM "test" AS "test" WHERE LOWER("test"."name") = $1 ' +
- 'ORDER BY "test"."name" ASC, "test"."age" ASC';
-
- query.query[0].should.eql(sql);
- });
-
- it('should allow desc and asc ordering', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo'
- },
- sort: {
- name: 1,
- age: -1
- }
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT "test"."name", "test"."age" FROM "test" AS "test" WHERE LOWER("test"."name") = $1 ' +
- 'ORDER BY "test"."name" ASC, "test"."age" DESC';
-
- query.query[0].should.eql(sql);
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/query.where.js b/test/unit/query.where.js
deleted file mode 100644
index 03a746bb..00000000
--- a/test/unit/query.where.js
+++ /dev/null
@@ -1,209 +0,0 @@
-var Sequel = require('waterline-sequel'),
- should = require('should'),
- Support = require('./support/bootstrap');
-
-describe('query', function() {
-
- /**
- * WHERE
- *
- * Build the WHERE part of an sql statement from a js object
- */
-
- describe('.where()', function() {
-
- describe('`AND` criteria', function() {
-
- describe('case insensitivity', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'Foo',
- age: 1
- }
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should build a SELECT statement using LOWER() on strings', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
-
- var sql = 'SELECT "test"."name", "test"."age" FROM "test" AS "test" ' +
- 'WHERE LOWER("test"."name") = $1 AND "test"."age" = $2 ';
-
- query.query[0].should.eql(sql);
- query.values[0][0].should.eql('foo');
- query.values[0][1].should.eql(1);
- });
- });
-
- describe('criteria is simple key value lookups', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo',
- age: 27
- }
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should build a simple SELECT statement', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
-
- var sql = 'SELECT "test"."name", "test"."age" FROM "test" AS "test" ' +
- 'WHERE LOWER("test"."name") = $1 AND "test"."age" = $2 ';
-
- query.query[0].should.eql(sql);
- query.values[0].length.should.eql(2);
- });
-
- });
-
- describe('has multiple comparators', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: 'foo',
- age: {
- '>' : 27,
- '<' : 30
- }
- }
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should build a SELECT statement with comparators', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
-
- var sql = 'SELECT "test"."name", "test"."age" FROM "test" AS "test" ' +
- 'WHERE LOWER("test"."name") = $1 AND "test"."age" > $2 AND "test"."age" < $3 ';
-
- query.query[0].should.eql(sql);
- query.values[0].length.should.eql(3);
- });
-
- });
- });
-
- describe('`LIKE` criteria', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- like: {
- type: '%foo%',
- name: 'bar%'
- }
- }
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, type: { type: 'text' } })};
-
- it('should build a SELECT statement with ILIKE', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
-
- var sql = 'SELECT "test"."name", "test"."type" FROM "test" AS "test" WHERE LOWER("test"."type") ILIKE $1 ' +
- 'AND LOWER("test"."name") ILIKE $2 ';
-
- query.query[0].should.eql(sql);
- query.values[0].length.should.eql(2);
- });
-
- });
-
- describe('`OR` criteria', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- or: [
- { like: { foo: '%foo%' } },
- { like: { bar: '%bar%' } }
- ]
- }
- };
-
- var schema = {'test': Support.Schema('test', { foo: { type: 'text' }, bar: { type: 'text'} })};
-
- it('should build a SELECT statement with multiple like statements', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
-
- var sql = 'SELECT "test"."foo", "test"."bar" FROM "test" AS "test" WHERE ((LOWER("test"."foo") ILIKE $1) ' +
- 'OR (LOWER("test"."bar") ILIKE $2)) ';
-
- query.query[0].should.eql(sql);
- query.values[0].length.should.eql(2);
- });
- });
-
- describe('`IN` criteria', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- name: [
- 'foo',
- 'bar',
- 'baz'
- ]
- }
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, myId: { type: 'integer'} })};
-
- var camelCaseCriteria = {
- where: {
- myId: [
- 1,
- 2,
- 3
- ]
- }
- };
-
- it('should build a SELECT statement with an IN array', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
- var sql = 'SELECT "test"."name", "test"."myId" FROM "test" AS "test" WHERE LOWER("test"."name") IN ($1,$2,$3) ';
-
- query.query[0].should.eql(sql);
- query.values[0].length.should.eql(3);
- });
-
- it('should build a SELECT statememnt with an IN array and camel case column', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', camelCaseCriteria);
-
- query.query[0].should.eql('SELECT "test"."name", "test"."myId" FROM "test" AS "test" WHERE "test"."myId" IN ($1,$2,$3) ');
- query.values[0].length.should.eql(3);
- });
-
- });
-
- describe('`NOT` criteria', function() {
-
- // Lookup criteria
- var criteria = {
- where: {
- age: {
- not: 40
- }
- }
- };
-
- var schema = {'test': Support.Schema('test', { name: { type: 'text' }, age: { type: 'integer'} })};
-
- it('should build a SELECT statement with an NOT clause', function() {
- var query = new Sequel(schema, Support.SqlOptions).find('test', criteria);
-
- query.query[0].should.eql('SELECT "test"."name", "test"."age" FROM "test" AS "test" WHERE "test"."age" <> $1 ');
- query.values[0].length.should.eql(1);
- });
-
- });
-
- });
-});
\ No newline at end of file
diff --git a/test/unit/support/bootstrap.js b/test/unit/support/bootstrap.js
deleted file mode 100644
index fc410a0d..00000000
--- a/test/unit/support/bootstrap.js
+++ /dev/null
@@ -1,146 +0,0 @@
-var mysql = require('mysql'),
- _ = require('lodash'),
- adapter = require('../../../lib/adapter');
-
-var Support = module.exports = {};
-
-Support.SqlOptions = {
- parameterized: true,
- caseSensitive: true,
- escapeCharacter: '"',
- casting: true,
- canReturnValues: true,
- escapeInserts: true,
- declareDeleteAlias: false
-};
-
-Support.Config = {
- host: process.env.WATERLINE_ADAPTER_TESTS_HOST || 'localhost',
- port: process.env.WATERLINE_ADAPTER_TESTS_PORT || 3306,
- user: process.env.WATERLINE_ADAPTER_TESTS_USER || 'root',
- password: process.env.WATERLINE_ADAPTER_TESTS_PASSWORD || '',
- database: process.env.WATERLINE_ADAPTER_TESTS_DATABASE || 'sails_mysql',
- port: 3306
-};
-
-// Fixture Collection Def
-Support.Collection = function(name, def) {
- var schemaDef = {};
- schemaDef[name] = Support.Schema(name, def);
- return {
- identity: name,
- tableName: name,
- connection: 'test',
- definition: def || Support.Definition,
- waterline: { schema: schemaDef }
- };
-};
-
-// Fixture Table Definition
-Support.Definition = {
- field_1: { type: 'string' },
- field_2: { type: 'string' },
- id: {
- type: 'integer',
- autoIncrement: true,
- size: 64,
- primaryKey: true
- }
-};
-
-Support.Schema = function(name, def) {
- return {
- connection: 'test',
- identity: name,
- tableName: name,
- attributes: def || Support.Definition
- };
-}
-
-// Register and Define a Collection
-Support.Setup = function(tableName, cb) {
-
- var collection = Support.Collection(tableName);
-
- var collections = {};
- collections[tableName] = collection;
-
- var connection = _.cloneDeep(Support.Config);
- connection.identity = 'test';
-
- adapter.registerConnection(connection, collections, function(err) {
- if(err) return cb(err);
- adapter.define('test', tableName, Support.Definition, function(err) {
- if(err) return cb(err);
- cb();
- });
- });
-};
-
-// Just register a connection
-Support.registerConnection = function(tableNames, cb) {
- var collections = {};
-
- tableNames.forEach(function(name) {
- var collection = Support.Collection(name);
- collections[name] = collection;
- });
-
- var connection = _.cloneDeep(Support.Config);
- connection.identity = 'test';
-
- adapter.registerConnection(connection, collections, cb);
-};
-
-// Remove a table
-Support.Teardown = function(tableName, cb) {
- var client = mysql.createConnection(this.Config);
-
- dropTable(tableName, client, function(err) {
- if(err) {
- return cb(err);
- }
-
- adapter.teardown('test', function(err) {
- cb();
- });
-
- });
-};
-
-// Return a client used for testing
-Support.Client = function(cb) {
- var connection = mysql.createConnection(this.Config);
- connection.connect(function(err) {
- if(err) { cb(err); }
- cb(null, connection);
- });
-};
-
-// Seed a record to use for testing
-Support.Seed = function(tableName, cb) {
- this.Client(function(err, client) {
- createRecord(tableName, client, function(err) {
- if(err) {
- return cb(err);
- }
- cb();
- });
- });
-};
-
-function dropTable(table, client, cb) {
- client.connect();
-
- var query = "DROP TABLE " + table + ';';
- client.query(query, cb);
-}
-
-function createRecord(table, client, cb) {
- var query = [
- "INSERT INTO " + table + " (field_1, field_2) " +
- "values ('foo', 'bar');"
- ].join('');
-
- client.query(query, cb);
-}
\ No newline at end of file