knex/test/integration/builder/additional.js

994 lines
32 KiB
JavaScript
Raw Normal View History

/*global describe, expect, it*/
2016-09-13 18:12:23 -04:00
/*eslint no-var:0, max-len:0 */
'use strict';
var Knex = require('../../../knex');
var _ = require('lodash');
var Promise = require('bluebird');
module.exports = function(knex) {
describe('Additional', function() {
describe('Custom response processing', () => {
before('setup custom response handler', () => {
Add queryContext to schema and query builders (#2314) * feat(query-builder): add hookContext for wrapIdentifier * refactor: use isUndefined * test(transaction): test passing of hookContext * feat(runnner): pass context to postProcessResponse * test(runner): test postProcessResponse for raw responses * test(raw): test passing of hookContext * feat: add hookContext to Raw and SchemaBuilder * test(transaction): fix test for hookContext * chore: fix lint error * fix: check for hookContext before calling it * test(transaction): fix hookContext test * chore: remove whitespace * test(hookContext): test cloning of context object * refactor: hookContext -> queryContext * minor: use more descriptive variable name i.e. refactor: `context` => `queryContext` * fix: remove unnecessary checks for query builder * fix(Raw): pass query builder to formatter * fix(SchemaCompiler): pass schema builder to formatter * refactor: add addQueryContext helper * feat: add queryContext to TableBuilder and ColumnBuilder * fix(TableCompiler): pass table builder to formatter * fix(ColumnCompiler): pass column builder to formatter * fix(pushQuery): fix passing builder to formatter * test(Schema|Table|ColumnCompiler): test passing queryContext * fix(SchemaCompiler): pass queryContext to TableCompiler * fix(TableCompiler): pass queryContext to ColumnCompiler * test: add queryContext tests for all schema dialects * test(TableCompiler): test overwriting queryContext from SchemaCompiler * test(Raw): test passing queryContext to wrapIdentifier * tests: run all the tests
2018-02-01 23:41:01 +01:00
knex.client.config.postProcessResponse = (response, queryContext) => {
response.callCount = response.callCount ? response.callCount + 1 : 1;
Add queryContext to schema and query builders (#2314) * feat(query-builder): add hookContext for wrapIdentifier * refactor: use isUndefined * test(transaction): test passing of hookContext * feat(runnner): pass context to postProcessResponse * test(runner): test postProcessResponse for raw responses * test(raw): test passing of hookContext * feat: add hookContext to Raw and SchemaBuilder * test(transaction): fix test for hookContext * chore: fix lint error * fix: check for hookContext before calling it * test(transaction): fix hookContext test * chore: remove whitespace * test(hookContext): test cloning of context object * refactor: hookContext -> queryContext * minor: use more descriptive variable name i.e. refactor: `context` => `queryContext` * fix: remove unnecessary checks for query builder * fix(Raw): pass query builder to formatter * fix(SchemaCompiler): pass schema builder to formatter * refactor: add addQueryContext helper * feat: add queryContext to TableBuilder and ColumnBuilder * fix(TableCompiler): pass table builder to formatter * fix(ColumnCompiler): pass column builder to formatter * fix(pushQuery): fix passing builder to formatter * test(Schema|Table|ColumnCompiler): test passing queryContext * fix(SchemaCompiler): pass queryContext to TableCompiler * fix(TableCompiler): pass queryContext to ColumnCompiler * test: add queryContext tests for all schema dialects * test(TableCompiler): test overwriting queryContext from SchemaCompiler * test(Raw): test passing queryContext to wrapIdentifier * tests: run all the tests
2018-02-01 23:41:01 +01:00
response.queryContext = queryContext;
return response;
};
});
after('restore client configuration', () => {
knex.client.config.postProcessResponse = null;
});
it('should process normal response', () => {
return knex('accounts')
.limit(1)
.then((res) => {
expect(res.callCount).to.equal(1);
});
});
Add queryContext to schema and query builders (#2314) * feat(query-builder): add hookContext for wrapIdentifier * refactor: use isUndefined * test(transaction): test passing of hookContext * feat(runnner): pass context to postProcessResponse * test(runner): test postProcessResponse for raw responses * test(raw): test passing of hookContext * feat: add hookContext to Raw and SchemaBuilder * test(transaction): fix test for hookContext * chore: fix lint error * fix: check for hookContext before calling it * test(transaction): fix hookContext test * chore: remove whitespace * test(hookContext): test cloning of context object * refactor: hookContext -> queryContext * minor: use more descriptive variable name i.e. refactor: `context` => `queryContext` * fix: remove unnecessary checks for query builder * fix(Raw): pass query builder to formatter * fix(SchemaCompiler): pass schema builder to formatter * refactor: add addQueryContext helper * feat: add queryContext to TableBuilder and ColumnBuilder * fix(TableCompiler): pass table builder to formatter * fix(ColumnCompiler): pass column builder to formatter * fix(pushQuery): fix passing builder to formatter * test(Schema|Table|ColumnCompiler): test passing queryContext * fix(SchemaCompiler): pass queryContext to TableCompiler * fix(TableCompiler): pass queryContext to ColumnCompiler * test: add queryContext tests for all schema dialects * test(TableCompiler): test overwriting queryContext from SchemaCompiler * test(Raw): test passing queryContext to wrapIdentifier * tests: run all the tests
2018-02-01 23:41:01 +01:00
it('should pass query context to the custom handler', () => {
return knex('accounts')
.queryContext('the context')
.limit(1)
.then((res) => {
Add queryContext to schema and query builders (#2314) * feat(query-builder): add hookContext for wrapIdentifier * refactor: use isUndefined * test(transaction): test passing of hookContext * feat(runnner): pass context to postProcessResponse * test(runner): test postProcessResponse for raw responses * test(raw): test passing of hookContext * feat: add hookContext to Raw and SchemaBuilder * test(transaction): fix test for hookContext * chore: fix lint error * fix: check for hookContext before calling it * test(transaction): fix hookContext test * chore: remove whitespace * test(hookContext): test cloning of context object * refactor: hookContext -> queryContext * minor: use more descriptive variable name i.e. refactor: `context` => `queryContext` * fix: remove unnecessary checks for query builder * fix(Raw): pass query builder to formatter * fix(SchemaCompiler): pass schema builder to formatter * refactor: add addQueryContext helper * feat: add queryContext to TableBuilder and ColumnBuilder * fix(TableCompiler): pass table builder to formatter * fix(ColumnCompiler): pass column builder to formatter * fix(pushQuery): fix passing builder to formatter * test(Schema|Table|ColumnCompiler): test passing queryContext * fix(SchemaCompiler): pass queryContext to TableCompiler * fix(TableCompiler): pass queryContext to ColumnCompiler * test: add queryContext tests for all schema dialects * test(TableCompiler): test overwriting queryContext from SchemaCompiler * test(Raw): test passing queryContext to wrapIdentifier * tests: run all the tests
2018-02-01 23:41:01 +01:00
expect(res.queryContext).to.equal('the context');
});
});
it('should process raw response', () => {
return knex.raw('select * from ??', ['accounts']).then((res) => {
Add queryContext to schema and query builders (#2314) * feat(query-builder): add hookContext for wrapIdentifier * refactor: use isUndefined * test(transaction): test passing of hookContext * feat(runnner): pass context to postProcessResponse * test(runner): test postProcessResponse for raw responses * test(raw): test passing of hookContext * feat: add hookContext to Raw and SchemaBuilder * test(transaction): fix test for hookContext * chore: fix lint error * fix: check for hookContext before calling it * test(transaction): fix hookContext test * chore: remove whitespace * test(hookContext): test cloning of context object * refactor: hookContext -> queryContext * minor: use more descriptive variable name i.e. refactor: `context` => `queryContext` * fix: remove unnecessary checks for query builder * fix(Raw): pass query builder to formatter * fix(SchemaCompiler): pass schema builder to formatter * refactor: add addQueryContext helper * feat: add queryContext to TableBuilder and ColumnBuilder * fix(TableCompiler): pass table builder to formatter * fix(ColumnCompiler): pass column builder to formatter * fix(pushQuery): fix passing builder to formatter * test(Schema|Table|ColumnCompiler): test passing queryContext * fix(SchemaCompiler): pass queryContext to TableCompiler * fix(TableCompiler): pass queryContext to ColumnCompiler * test: add queryContext tests for all schema dialects * test(TableCompiler): test overwriting queryContext from SchemaCompiler * test(Raw): test passing queryContext to wrapIdentifier * tests: run all the tests
2018-02-01 23:41:01 +01:00
expect(res.callCount).to.equal(1);
});
});
Add queryContext to schema and query builders (#2314) * feat(query-builder): add hookContext for wrapIdentifier * refactor: use isUndefined * test(transaction): test passing of hookContext * feat(runnner): pass context to postProcessResponse * test(runner): test postProcessResponse for raw responses * test(raw): test passing of hookContext * feat: add hookContext to Raw and SchemaBuilder * test(transaction): fix test for hookContext * chore: fix lint error * fix: check for hookContext before calling it * test(transaction): fix hookContext test * chore: remove whitespace * test(hookContext): test cloning of context object * refactor: hookContext -> queryContext * minor: use more descriptive variable name i.e. refactor: `context` => `queryContext` * fix: remove unnecessary checks for query builder * fix(Raw): pass query builder to formatter * fix(SchemaCompiler): pass schema builder to formatter * refactor: add addQueryContext helper * feat: add queryContext to TableBuilder and ColumnBuilder * fix(TableCompiler): pass table builder to formatter * fix(ColumnCompiler): pass column builder to formatter * fix(pushQuery): fix passing builder to formatter * test(Schema|Table|ColumnCompiler): test passing queryContext * fix(SchemaCompiler): pass queryContext to TableCompiler * fix(TableCompiler): pass queryContext to ColumnCompiler * test: add queryContext tests for all schema dialects * test(TableCompiler): test overwriting queryContext from SchemaCompiler * test(Raw): test passing queryContext to wrapIdentifier * tests: run all the tests
2018-02-01 23:41:01 +01:00
it('should pass query context for raw responses', () => {
return knex
.raw('select * from ??', ['accounts'])
Add queryContext to schema and query builders (#2314) * feat(query-builder): add hookContext for wrapIdentifier * refactor: use isUndefined * test(transaction): test passing of hookContext * feat(runnner): pass context to postProcessResponse * test(runner): test postProcessResponse for raw responses * test(raw): test passing of hookContext * feat: add hookContext to Raw and SchemaBuilder * test(transaction): fix test for hookContext * chore: fix lint error * fix: check for hookContext before calling it * test(transaction): fix hookContext test * chore: remove whitespace * test(hookContext): test cloning of context object * refactor: hookContext -> queryContext * minor: use more descriptive variable name i.e. refactor: `context` => `queryContext` * fix: remove unnecessary checks for query builder * fix(Raw): pass query builder to formatter * fix(SchemaCompiler): pass schema builder to formatter * refactor: add addQueryContext helper * feat: add queryContext to TableBuilder and ColumnBuilder * fix(TableCompiler): pass table builder to formatter * fix(ColumnCompiler): pass column builder to formatter * fix(pushQuery): fix passing builder to formatter * test(Schema|Table|ColumnCompiler): test passing queryContext * fix(SchemaCompiler): pass queryContext to TableCompiler * fix(TableCompiler): pass queryContext to ColumnCompiler * test: add queryContext tests for all schema dialects * test(TableCompiler): test overwriting queryContext from SchemaCompiler * test(Raw): test passing queryContext to wrapIdentifier * tests: run all the tests
2018-02-01 23:41:01 +01:00
.queryContext('the context')
.then((res) => {
Add queryContext to schema and query builders (#2314) * feat(query-builder): add hookContext for wrapIdentifier * refactor: use isUndefined * test(transaction): test passing of hookContext * feat(runnner): pass context to postProcessResponse * test(runner): test postProcessResponse for raw responses * test(raw): test passing of hookContext * feat: add hookContext to Raw and SchemaBuilder * test(transaction): fix test for hookContext * chore: fix lint error * fix: check for hookContext before calling it * test(transaction): fix hookContext test * chore: remove whitespace * test(hookContext): test cloning of context object * refactor: hookContext -> queryContext * minor: use more descriptive variable name i.e. refactor: `context` => `queryContext` * fix: remove unnecessary checks for query builder * fix(Raw): pass query builder to formatter * fix(SchemaCompiler): pass schema builder to formatter * refactor: add addQueryContext helper * feat: add queryContext to TableBuilder and ColumnBuilder * fix(TableCompiler): pass table builder to formatter * fix(ColumnCompiler): pass column builder to formatter * fix(pushQuery): fix passing builder to formatter * test(Schema|Table|ColumnCompiler): test passing queryContext * fix(SchemaCompiler): pass queryContext to TableCompiler * fix(TableCompiler): pass queryContext to ColumnCompiler * test: add queryContext tests for all schema dialects * test(TableCompiler): test overwriting queryContext from SchemaCompiler * test(Raw): test passing queryContext to wrapIdentifier * tests: run all the tests
2018-02-01 23:41:01 +01:00
expect(res.queryContext).to.equal('the context');
});
});
it('should process response done in transaction', () => {
return knex
.transaction((trx) => {
return trx('accounts')
.limit(1)
.then((res) => {
expect(res.callCount).to.equal(1);
return res;
});
})
.then((res) => {
expect(res.callCount).to.equal(1);
});
});
Add queryContext to schema and query builders (#2314) * feat(query-builder): add hookContext for wrapIdentifier * refactor: use isUndefined * test(transaction): test passing of hookContext * feat(runnner): pass context to postProcessResponse * test(runner): test postProcessResponse for raw responses * test(raw): test passing of hookContext * feat: add hookContext to Raw and SchemaBuilder * test(transaction): fix test for hookContext * chore: fix lint error * fix: check for hookContext before calling it * test(transaction): fix hookContext test * chore: remove whitespace * test(hookContext): test cloning of context object * refactor: hookContext -> queryContext * minor: use more descriptive variable name i.e. refactor: `context` => `queryContext` * fix: remove unnecessary checks for query builder * fix(Raw): pass query builder to formatter * fix(SchemaCompiler): pass schema builder to formatter * refactor: add addQueryContext helper * feat: add queryContext to TableBuilder and ColumnBuilder * fix(TableCompiler): pass table builder to formatter * fix(ColumnCompiler): pass column builder to formatter * fix(pushQuery): fix passing builder to formatter * test(Schema|Table|ColumnCompiler): test passing queryContext * fix(SchemaCompiler): pass queryContext to TableCompiler * fix(TableCompiler): pass queryContext to ColumnCompiler * test: add queryContext tests for all schema dialects * test(TableCompiler): test overwriting queryContext from SchemaCompiler * test(Raw): test passing queryContext to wrapIdentifier * tests: run all the tests
2018-02-01 23:41:01 +01:00
it('should pass query context for responses from transactions', () => {
return knex
.transaction((trx) => {
return trx('accounts')
.queryContext('the context')
.limit(1)
.then((res) => {
expect(res.queryContext).to.equal('the context');
return res;
});
})
.then((res) => {
expect(res.queryContext).to.equal('the context');
});
Add queryContext to schema and query builders (#2314) * feat(query-builder): add hookContext for wrapIdentifier * refactor: use isUndefined * test(transaction): test passing of hookContext * feat(runnner): pass context to postProcessResponse * test(runner): test postProcessResponse for raw responses * test(raw): test passing of hookContext * feat: add hookContext to Raw and SchemaBuilder * test(transaction): fix test for hookContext * chore: fix lint error * fix: check for hookContext before calling it * test(transaction): fix hookContext test * chore: remove whitespace * test(hookContext): test cloning of context object * refactor: hookContext -> queryContext * minor: use more descriptive variable name i.e. refactor: `context` => `queryContext` * fix: remove unnecessary checks for query builder * fix(Raw): pass query builder to formatter * fix(SchemaCompiler): pass schema builder to formatter * refactor: add addQueryContext helper * feat: add queryContext to TableBuilder and ColumnBuilder * fix(TableCompiler): pass table builder to formatter * fix(ColumnCompiler): pass column builder to formatter * fix(pushQuery): fix passing builder to formatter * test(Schema|Table|ColumnCompiler): test passing queryContext * fix(SchemaCompiler): pass queryContext to TableCompiler * fix(TableCompiler): pass queryContext to ColumnCompiler * test: add queryContext tests for all schema dialects * test(TableCompiler): test overwriting queryContext from SchemaCompiler * test(Raw): test passing queryContext to wrapIdentifier * tests: run all the tests
2018-02-01 23:41:01 +01:00
});
});
describe('columnInfo with wrapIdentifier and postProcessResponse', () => {
before('setup hooks', () => {
knex.client.config.postProcessResponse = (response) => {
return _.mapKeys(response, (val, key) => {
return _.camelCase(key);
});
};
knex.client.config.wrapIdentifier = (id, origImpl) => {
return origImpl(_.snakeCase(id));
};
});
after('restore client configuration', () => {
knex.client.config.postProcessResponse = null;
knex.client.config.wrapIdentifier = null;
});
it('should work using camelCased table name', () => {
return knex('testTableTwo')
.columnInfo()
.then((res) => {
expect(Object.keys(res)).to.have.all.members([
'id',
'accountId',
'details',
'status',
'jsonData',
]);
});
});
it('should work using snake_cased table name', () => {
return knex('test_table_two')
.columnInfo()
.then((res) => {
expect(Object.keys(res)).to.have.all.members([
'id',
'accountId',
'details',
'status',
'jsonData',
]);
});
});
});
2018-06-05 15:31:09 +03:00
// TODO: This doesn't work on oracle yet.
if (['pg', 'mssql'].includes(knex.client.driverName)) {
2018-06-05 15:31:09 +03:00
describe('returning with wrapIdentifier and postProcessResponse`', () => {
let origHooks = {};
before('setup custom hooks', () => {
origHooks.postProcessResponse =
knex.client.config.postProcessResponse;
2018-06-05 15:31:09 +03:00
origHooks.wrapIdentifier = knex.client.config.wrapIdentifier;
// Add `_foo` to each identifier.
knex.client.config.postProcessResponse = (res) => {
if (Array.isArray(res)) {
return res.map((it) => {
2018-06-05 15:31:09 +03:00
if (typeof it === 'object') {
return _.mapKeys(it, (value, key) => {
return key + '_foo';
});
} else {
return it;
}
});
2018-06-05 15:31:09 +03:00
} else {
return res;
}
};
// Remove `_foo` from the end of each identifier.
knex.client.config.wrapIdentifier = (id) => {
return id.substring(0, id.length - 4);
};
});
after('restore hooks', () => {
knex.client.config.postProcessResponse =
origHooks.postProcessResponse;
2018-06-05 15:31:09 +03:00
knex.client.config.wrapIdentifier = origHooks.wrapIdentifier;
});
it('should return the correct column when a single property is given to returning', () => {
return knex('accounts_foo')
.insert({ balance_foo: 123 })
.returning('balance_foo')
.then((res) => {
2018-06-05 15:31:09 +03:00
expect(res).to.eql([123]);
});
});
it('should return the correct columns when multiple properties are given to returning', () => {
return knex('accounts_foo')
.insert({ balance_foo: 123, email_foo: 'foo@bar.com' })
.returning(['balance_foo', 'email_foo'])
.then((res) => {
expect(res).to.eql([
{ balance_foo: 123, email_foo: 'foo@bar.com' },
]);
2018-06-05 15:31:09 +03:00
});
});
});
}
it('should forward the .get() function from bluebird', function() {
return knex('accounts')
.select()
.limit(1)
.then(function(accounts) {
var firstAccount = accounts[0];
return knex('accounts')
.select()
.limit(1)
.get(0)
.then(function(account) {
expect(account.id == firstAccount.id);
});
});
});
it('should forward the .mapSeries() function from bluebird', function() {
var asyncTask = function() {
return new Promise(function(resolve, reject) {
var output = asyncTask.num++;
setTimeout(function() {
resolve(output);
}, Math.random() * 200);
});
};
asyncTask.num = 1;
var returnedValues = [];
return knex('accounts')
.select()
.limit(3)
.mapSeries(function(account) {
return asyncTask().then(function(number) {
returnedValues.push(number);
});
})
.then(function() {
expect(returnedValues[0] == 1);
expect(returnedValues[1] == 2);
expect(returnedValues[2] == 3);
});
});
it('should forward the .delay() function from bluebird', function() {
var startTime = new Date().valueOf();
return knex('accounts')
.select()
.limit(1)
.delay(300)
.then(function(accounts) {
expect(new Date().valueOf() - startTime > 300);
});
});
it('should truncate a table with truncate', function() {
return knex('test_table_two')
.truncate()
2013-12-27 14:44:21 -05:00
.testSql(function(tester) {
tester('mysql', 'truncate `test_table_two`');
tester('pg', 'truncate "test_table_two" restart identity');
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
tester('pg-redshift', 'truncate "test_table_two"');
tester('sqlite3', 'delete from `test_table_two`');
tester('oracledb', 'truncate table "test_table_two"');
tester('mssql', 'truncate table [test_table_two]');
2013-12-27 14:44:21 -05:00
})
.then(() => {
return knex('test_table_two')
.select('*')
.then((resp) => {
expect(resp).to.have.length(0);
});
})
.then(() => {
// Insert new data after truncate and make sure ids restart at 1.
// This doesn't currently work on oracle, where the created sequence
// needs to be manually reset.
Fix #2545, bring redshift wrapIdentifier custom hook up to date with postgres (#2551) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments * Sublime Text gitignore * Redshift does not support adding more than one column in alter table * Fix integration tests for redshift * Linter * Combine dialect test skip if clause
2018-04-04 18:43:39 -04:00
// On redshift, one would need to create an entirely new table and do
// `insert into ... (select ...); alter table rename...`
if (
/oracle/i.test(knex.client.driverName) ||
/redshift/i.test(knex.client.driverName)
) {
return;
}
return knex('test_table_two')
.insert({ status: 1 })
.then((res) => {
Fix #2545, bring redshift wrapIdentifier custom hook up to date with postgres (#2551) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments * Sublime Text gitignore * Redshift does not support adding more than one column in alter table * Fix integration tests for redshift * Linter * Combine dialect test skip if clause
2018-04-04 18:43:39 -04:00
return knex('test_table_two')
.select('id')
.first()
.then((res) => {
expect(res).to.be.an('object');
Fix #2545, bring redshift wrapIdentifier custom hook up to date with postgres (#2551) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments * Sublime Text gitignore * Redshift does not support adding more than one column in alter table * Fix integration tests for redshift * Linter * Combine dialect test skip if clause
2018-04-04 18:43:39 -04:00
expect(res.id).to.equal(1);
});
});
});
});
it('should allow raw queries directly with `knex.raw`', function() {
var tables = {
mysql: 'SHOW TABLES',
mysql2: 'SHOW TABLES',
pg:
"SELECT table_name FROM information_schema.tables WHERE table_schema='public'",
'pg-redshift':
"SELECT table_name FROM information_schema.tables WHERE table_schema='public'",
2014-08-11 12:25:39 +02:00
sqlite3: "SELECT name FROM sqlite_master WHERE type='table';",
oracledb: 'select TABLE_NAME from USER_TABLES',
mssql:
"SELECT table_name FROM information_schema.tables WHERE table_schema='dbo'",
};
return knex.raw(tables[knex.client.driverName]).testSql(function(tester) {
tester(knex.client.driverName, tables[knex.client.driverName]);
2013-12-27 14:44:21 -05:00
});
});
it('should allow using the primary table as a raw statement', function() {
expect(knex(knex.raw('raw_table_name')).toQuery()).to.equal(
'select * from raw_table_name'
);
});
it('should allow using .fn-methods to create raw statements', function() {
expect(knex.fn.now().prototype === knex.raw().prototype);
expect(knex.fn.now().toQuery()).to.equal('CURRENT_TIMESTAMP');
expect(knex.fn.now(6).toQuery()).to.equal('CURRENT_TIMESTAMP(6)');
});
it('gets the columnInfo', function() {
return knex('datatype_test')
.columnInfo()
.testSql(function(tester) {
tester(
'mysql',
'select * from information_schema.columns where table_name = ? and table_schema = ?',
null,
{
enum_value: {
defaultValue: null,
maxLength: 1,
nullable: true,
type: 'enum',
},
uuid: {
defaultValue: null,
maxLength: 36,
nullable: false,
type: 'char',
},
}
);
tester(
'pg',
'select * from information_schema.columns where table_name = ? and table_catalog = ? and table_schema = current_schema()',
null,
{
enum_value: {
defaultValue: null,
maxLength: null,
nullable: true,
type: 'text',
},
uuid: {
defaultValue: null,
maxLength: null,
nullable: false,
type: 'uuid',
},
}
);
tester(
'pg-redshift',
'select * from information_schema.columns where table_name = ? and table_catalog = ? and table_schema = current_schema()',
null,
{
enum_value: {
defaultValue: null,
maxLength: 255,
nullable: true,
type: 'character varying',
},
uuid: {
defaultValue: null,
maxLength: 36,
nullable: false,
type: 'character',
},
}
);
tester('sqlite3', 'PRAGMA table_info(`datatype_test`)', [], {
enum_value: {
defaultValue: null,
maxLength: null,
2014-08-11 12:25:39 +02:00
nullable: true,
type: 'text',
2014-08-11 12:25:39 +02:00
},
uuid: {
defaultValue: null,
maxLength: '36',
2014-08-11 12:25:39 +02:00
nullable: false,
type: 'char',
},
});
tester(
'oracledb',
"select * from xmltable( '/ROWSET/ROW'\n passing dbms_xmlgen.getXMLType('\n select char_col_decl_length, column_name, data_type, data_default, nullable\n from user_tab_columns where table_name = ''datatype_test'' ')\n columns\n CHAR_COL_DECL_LENGTH number, COLUMN_NAME varchar2(200), DATA_TYPE varchar2(106),\n DATA_DEFAULT clob, NULLABLE varchar2(1))",
[],
{
enum_value: {
defaultValue: null,
nullable: true,
maxLength: 1,
type: 'VARCHAR2',
},
uuid: {
defaultValue: null,
nullable: false,
maxLength: 36,
type: 'CHAR',
},
}
);
tester(
'mssql',
"select * from information_schema.columns where table_name = ? and table_catalog = ? and table_schema = 'dbo'",
['datatype_test', 'knex_test'],
{
enum_value: {
defaultValue: null,
maxLength: 100,
nullable: true,
type: 'nvarchar',
},
uuid: {
defaultValue: null,
maxLength: null,
nullable: false,
type: 'uniqueidentifier',
},
}
);
});
});
it('gets the columnInfo with columntype', function() {
return knex('datatype_test')
.columnInfo('uuid')
.testSql(function(tester) {
tester(
'mysql',
'select * from information_schema.columns where table_name = ? and table_schema = ?',
null,
{
defaultValue: null,
maxLength: 36,
nullable: false,
type: 'char',
}
);
tester(
'pg',
'select * from information_schema.columns where table_name = ? and table_catalog = ? and table_schema = current_schema()',
null,
{
defaultValue: null,
maxLength: null,
nullable: false,
type: 'uuid',
}
);
tester(
'pg-redshift',
'select * from information_schema.columns where table_name = ? and table_catalog = ? and table_schema = current_schema()',
null,
{
defaultValue: null,
maxLength: 36,
nullable: false,
type: 'character',
}
);
tester('sqlite3', 'PRAGMA table_info(`datatype_test`)', [], {
defaultValue: null,
maxLength: '36',
nullable: false,
type: 'char',
});
tester(
'oracledb',
"select * from xmltable( '/ROWSET/ROW'\n passing dbms_xmlgen.getXMLType('\n select char_col_decl_length, column_name, data_type, data_default, nullable\n from user_tab_columns where table_name = ''datatype_test'' ')\n columns\n CHAR_COL_DECL_LENGTH number, COLUMN_NAME varchar2(200), DATA_TYPE varchar2(106),\n DATA_DEFAULT clob, NULLABLE varchar2(1))",
[],
{
defaultValue: null,
maxLength: 36,
nullable: false,
type: 'CHAR',
}
);
tester(
'mssql',
"select * from information_schema.columns where table_name = ? and table_catalog = ? and table_schema = 'dbo'",
null,
{
defaultValue: null,
maxLength: null,
nullable: false,
type: 'uniqueidentifier',
}
);
});
2014-08-11 12:25:39 +02:00
});
it('#2184 - should properly escape table name for SQLite columnInfo', function() {
if (knex.client.driverName !== 'sqlite3') {
return;
}
return knex.schema
.dropTableIfExists('group')
.then(function() {
return knex.schema.createTable('group', function(table) {
table.integer('foo');
});
})
.then(function() {
return knex('group').columnInfo();
})
.then(function(columnInfo) {
expect(columnInfo).to.deep.equal({
foo: {
type: 'integer',
maxLength: null,
nullable: true,
defaultValue: null,
},
});
});
});
2014-04-16 01:23:50 -04:00
it('should allow renaming a column', function() {
var countColumn;
switch (knex.client.driverName) {
case 'oracledb':
countColumn = 'COUNT(*)';
break;
case 'mssql':
countColumn = '';
break;
default:
countColumn = 'count(*)';
break;
}
var count,
inserts = [];
_.times(40, function(i) {
inserts.push({
email: 'email' + i,
first_name: 'Test',
last_name: 'Data',
});
});
return knex('accounts')
.insert(inserts)
.then(function() {
return knex.count('*').from('accounts');
})
.then(function(resp) {
count = resp[0][countColumn];
return knex.schema
.table('accounts', function(t) {
t.renameColumn('about', 'about_col');
})
.testSql(function(tester) {
tester('mysql', ['show fields from `accounts` where field = ?']);
tester('pg', [
'alter table "accounts" rename "about" to "about_col"',
]);
tester('pg-redshift', [
'alter table "accounts" rename "about" to "about_col"',
]);
tester('sqlite3', ['PRAGMA table_info(`accounts`)']);
tester('oracledb', [
'DECLARE PK_NAME VARCHAR(200); IS_AUTOINC NUMBER := 0; BEGIN EXECUTE IMMEDIATE (\'ALTER TABLE "accounts" RENAME COLUMN "about" TO "about_col"\'); SELECT COUNT(*) INTO IS_AUTOINC from "USER_TRIGGERS" where trigger_name = \'accounts_autoinc_trg\'; IF (IS_AUTOINC > 0) THEN SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = \'P\' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = \'accounts\'; IF (\'about_col\' = PK_NAME) THEN EXECUTE IMMEDIATE (\'DROP TRIGGER "accounts_autoinc_trg"\'); EXECUTE IMMEDIATE (\'create or replace trigger "accounts_autoinc_trg" BEFORE INSERT on "accounts" for each row declare checking number := 1; begin if (:new."about_col" is null) then while checking >= 1 loop select "accounts_seq".nextval into :new."about_col" from dual; select count("about_col") into checking from "accounts" where "about_col" = :new."about_col"; end loop; end if; end;\'); end if; end if;END;',
]);
tester('mssql', ["exec sp_rename ?, ?, 'COLUMN'"]);
});
})
.then(function() {
return knex.count('*').from('accounts');
})
.then(function(resp) {
expect(resp[0][countColumn]).to.equal(count);
})
.then(function() {
return knex('accounts').select('about_col');
})
.then(function() {
return knex.schema.table('accounts', function(t) {
t.renameColumn('about_col', 'about');
});
})
.then(function() {
return knex.count('*').from('accounts');
})
.then(function(resp) {
expect(resp[0][countColumn]).to.equal(count);
});
});
it('should allow dropping a column', function() {
var countColumn;
switch (knex.client.driverName) {
case 'oracledb':
countColumn = 'COUNT(*)';
break;
case 'mssql':
countColumn = '';
break;
default:
countColumn = 'count(*)';
break;
2014-08-11 12:25:39 +02:00
}
var count;
return knex
.count('*')
.from('accounts')
.then(function(resp) {
count = resp[0][countColumn];
})
.then(function() {
return knex.schema
.table('accounts', function(t) {
t.dropColumn('first_name');
})
.testSql(function(tester) {
tester('mysql', ['alter table `accounts` drop `first_name`']);
tester('pg', ['alter table "accounts" drop column "first_name"']);
tester('pg-redshift', [
'alter table "accounts" drop column "first_name"',
]);
tester('sqlite3', ['PRAGMA table_info(`accounts`)']);
tester('oracledb', [
'alter table "accounts" drop ("first_name")',
]);
//tester('oracledb', ['alter table "accounts" drop ("first_name")']);
tester('mssql', [
'ALTER TABLE [accounts] DROP COLUMN [first_name]',
]);
});
})
.then(function() {
return knex
.select('*')
.from('accounts')
.first();
})
.then(function(resp) {
expect(_.keys(resp).sort()).to.eql([
'about',
'balance',
'created_at',
'email',
'id',
'last_name',
'logins',
'phone',
'updated_at',
]);
})
.then(function() {
return knex.count('*').from('accounts');
})
.then(function(resp) {
expect(resp[0][countColumn]).to.equal(count);
});
});
2016-05-26 11:06:33 -07:00
it('.timeout() should throw TimeoutError', function() {
var driverName = knex.client.driverName;
if (driverName === 'sqlite3') {
return;
} //TODO -- No built-in support for sleeps
if (/redshift/.test(driverName)) {
return;
}
var testQueries = {
pg: function() {
return knex.raw('SELECT pg_sleep(1)');
},
mysql: function() {
return knex.raw('SELECT SLEEP(1)');
},
mysql2: function() {
return knex.raw('SELECT SLEEP(1)');
},
mssql: function() {
return knex.raw("WAITFOR DELAY '00:00:01'");
},
oracledb: function() {
return knex.raw('begin dbms_lock.sleep(1); end;');
},
};
2016-02-15 17:06:08 +01:00
if (!testQueries.hasOwnProperty(driverName)) {
throw new Error('Missing test query for driver: ' + driverName);
}
var query = testQueries[driverName]();
return query
.timeout(200)
2016-02-15 17:06:08 +01:00
.then(function() {
expect(true).to.equal(false);
})
.catch(function(error) {
expect(_.pick(error, 'timeout', 'name', 'message')).to.deep.equal({
timeout: 200,
name: 'TimeoutError',
message:
'Defined query timeout of 200ms exceeded when running query.',
2016-02-15 17:06:08 +01:00
});
});
2016-05-26 11:06:33 -07:00
});
it('.timeout(ms, {cancel: true}) should throw TimeoutError and cancel slow query', function() {
var driverName = knex.client.driverName;
if (driverName === 'sqlite3') {
return;
} //TODO -- No built-in support for sleeps
if (/redshift/.test(driverName)) {
return;
}
2016-05-26 11:06:33 -07:00
// There's unexpected behavior caused by knex releasing a connection back
// to the pool because of a timeout when a long query is still running.
// A subsequent query will acquire the connection (still in-use) and hang
// until the first query finishes. Setting a sleep time longer than the
// mocha timeout exposes this behavior.
var testQueries = {
pg: function() {
2016-05-26 11:06:33 -07:00
return knex.raw('SELECT pg_sleep(10)');
},
mysql: function() {
2016-05-26 11:06:33 -07:00
return knex.raw('SELECT SLEEP(10)');
},
mysql2: function() {
2016-05-26 11:06:33 -07:00
return knex.raw('SELECT SLEEP(10)');
},
mssql: function() {
return knex.raw("WAITFOR DELAY '00:00:10'");
2016-05-26 11:06:33 -07:00
},
oracledb: function() {
return knex.raw('begin dbms_lock.sleep(10); end;');
},
2016-05-26 11:06:33 -07:00
};
if (!testQueries.hasOwnProperty(driverName)) {
throw new Error('Missing test query for driverName: ' + driverName);
2016-05-26 11:06:33 -07:00
}
var query = testQueries[driverName]();
2016-05-26 11:06:33 -07:00
function addTimeout() {
return query.timeout(200, { cancel: true });
}
2016-05-26 11:06:33 -07:00
// Only mysql query cancelling supported for now
if (!_.startsWith(driverName, 'mysql')) {
expect(addTimeout).to.throw(
'Query cancelling not supported for this dialect'
);
2016-05-26 11:06:33 -07:00
return;
}
return addTimeout()
.then(function() {
expect(true).to.equal(false);
})
.catch(function(error) {
expect(_.pick(error, 'timeout', 'name', 'message')).to.deep.equal({
timeout: 200,
name: 'TimeoutError',
message:
'Defined query timeout of 200ms exceeded when running query.',
2016-05-26 11:06:33 -07:00
});
// Ensure sleep command is removed.
// This query will hang if a connection gets released back to the pool
// too early.
// 50ms delay since killing query doesn't seem to have immediate effect to the process listing
return Promise.resolve()
.then()
.delay(50)
.then(function() {
return knex.raw('SHOW PROCESSLIST');
})
2016-05-26 11:06:33 -07:00
.then(function(results) {
var processes = results[0];
var sleepProcess = _.find(processes, {
Info: 'SELECT SLEEP(10)',
});
2016-05-26 11:06:33 -07:00
expect(sleepProcess).to.equal(undefined);
});
2016-02-15 17:06:08 +01:00
});
});
it('.timeout(ms, {cancel: true}) should throw error if cancellation cannot acquire connection', function() {
// Only mysql query cancelling supported for now
var driverName = knex.client.config.driverName;
if (!_.startsWith(driverName, 'mysql')) {
2016-09-13 18:12:23 -04:00
return;
}
//To make this test easier, I'm changing the pool settings to max 1.
var knexConfig = _.clone(knex.client.config);
knexConfig.pool.min = 0;
knexConfig.pool.max = 1;
var knexDb = new Knex(knexConfig);
return knexDb
.raw('SELECT SLEEP(1)')
.timeout(1, { cancel: true })
.then(
function() {
throw new Error("Shouldn't have gotten here.");
},
function(error) {
expect(_.pick(error, 'timeout', 'name', 'message')).to.deep.equal({
timeout: 1,
name: 'TimeoutError',
message:
'After query timeout of 1ms exceeded, cancelling of query failed.',
});
}
)
.finally(() => knexDb.destroy());
});
it('Event: query-response', function() {
var queryCount = 0;
var onQueryResponse = function(response, obj, builder) {
queryCount++;
2016-02-26 20:32:34 +01:00
expect(response).to.be.an('array');
expect(obj).to.be.an('object');
expect(obj.__knexUid).to.be.a('string');
expect(obj.__knexQueryUid).to.be.a('string');
expect(builder).to.be.an('object');
};
knex.on('query-response', onQueryResponse);
return knex('accounts')
.select()
.on('query-response', onQueryResponse)
.then(function() {
return knex.transaction(function(tr) {
return tr('accounts')
.select()
.on('query-response', onQueryResponse); //Transactions should emit the event as well
});
})
.then(function() {
knex.removeListener('query-response', onQueryResponse);
expect(queryCount).to.equal(4);
});
});
it('Event: query-error', function() {
var queryCount = 0;
var onQueryError = function(error, obj) {
queryCount++;
expect(obj).to.be.an('object');
expect(obj.__knexUid).to.be.a('string');
expect(obj.__knexQueryUid).to.be.a('string');
expect(error).to.be.an('object');
};
knex.on('query-error', onQueryError);
return knex
.raw('Broken query')
.on('query-error', onQueryError)
.then(function() {
expect(true).to.equal(false); //Should not be resolved
})
.catch(function() {
knex.removeListener('query-error', onQueryError);
expect(queryCount).to.equal(2);
});
});
it('Event: start', function() {
return knex('accounts')
.insert({ last_name: 'Start event test' })
.then(function() {
var queryBuilder = knex('accounts').select();
queryBuilder.on('start', function(builder) {
//Alter builder prior to compilation
//Select only one row
builder.where('last_name', 'Start event test').first();
});
return queryBuilder;
})
.then(function(row) {
expect(row).to.exist;
expect(row.last_name).to.equal('Start event test');
});
});
it("Event 'query' should not emit native sql string", function() {
var builder = knex('accounts')
.where('id', 1)
.select();
builder.on('query', function(obj) {
var native = builder.toSQL().toNative().sql;
var sql = builder.toSQL().sql;
//Only assert if they diff to begin with.
//IE Maria does not diff
if (native !== sql) {
expect(obj.sql).to.not.equal(builder.toSQL().toNative().sql);
expect(obj.sql).to.equal(builder.toSQL().sql);
}
});
return builder;
});
describe('async stack traces', function() {
2018-04-25 23:11:24 +02:00
before(() => {
knex.client.config.asyncStackTraces = true;
});
2018-04-25 23:11:24 +02:00
after(() => {
delete knex.client.config.asyncStackTraces;
});
2018-04-25 23:11:24 +02:00
it('should capture stack trace on query builder instantiation', () => {
return knex('some_nonexisten_table')
.select()
.catch((err) => {
expect(err.stack.split('\n')[1]).to.match(
/at Function\.queryBuilder \(/
); // the index 1 might need adjustment if the code is refactored
expect(typeof err.originalStack).to.equal('string');
});
});
2018-04-25 23:11:24 +02:00
it('should capture stack trace on raw query', () => {
return knex.raw('select * from some_nonexisten_table').catch((err) => {
expect(err.stack.split('\n')[2]).to.match(/at Function\.raw \(/); // the index 2 might need adjustment if the code is refactored
expect(typeof err.originalStack).to.equal('string');
});
});
2018-04-25 23:11:24 +02:00
it('should capture stack trace on schema builder', () => {
return knex.schema
.renameTable('some_nonexisten_table', 'whatever')
.catch((err) => {
expect(err.stack.split('\n')[1]).to.match(/client\.schemaBuilder/); // the index 1 might need adjustment if the code is refactored
expect(typeof err.originalStack).to.equal('string');
});
});
});
2018-04-25 23:11:24 +02:00
it('Overwrite knex.logger functions using config', () => {
var knexConfig = _.clone(knex.client.config);
var callCount = 0;
var assertCall = function(expectedMessage, message) {
expect(message).to.equal(expectedMessage);
callCount++;
};
knexConfig.log = {
warn: assertCall.bind(null, 'test'),
error: assertCall.bind(null, 'test'),
debug: assertCall.bind(null, 'test'),
deprecate: assertCall.bind(
null,
'test is deprecated, please use test2'
),
};
//Sqlite warning message
knexConfig.useNullAsDefault = true;
var knexDb = new Knex(knexConfig);
knexDb.client.logger.warn('test');
knexDb.client.logger.error('test');
knexDb.client.logger.debug('test');
knexDb.client.logger.deprecate('test', 'test2');
expect(callCount).to.equal(4);
});
});
};