Various fixes to mssql dialect (#2653)

* Fixed float type of mssql to be float

* Many tests where postgres test was not actually ran at all

* Migrations to be mssql compatible

Mssql driver doesn't handle if multiple queries are sent to same transaction concurrently.

* Prevented mssql failing when invalid schema builder was executed by accident

Instead of trying to generate sql from broken schema calls, just make exception to leak before query compiling is started

* Fixed mssql trx rollback to always throw an error

Also modified some connection test query to be mssql compatible

* Fixed various bugs from MSSQL driver to make tests run

* Fixed mssql unique index to be compatible with other dialect implementations

* Enable running mssql tests on CI

* Test for #2588

* Updated tests to not be dependend on tables left from previous test rans

* Trying to make mssql server work on travis
This commit is contained in:
Mikael Lepistö 2018-06-29 10:47:06 +03:00 committed by GitHub
parent ec8550249f
commit b349ac4a8c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 381 additions and 192 deletions

View File

@ -10,13 +10,13 @@ cache:
matrix:
include:
- node_js: "10"
env: TEST_ORACLEDB=true DB="maria mysql mysql2 postgres sqlite3 oracledb" CXX=g++-4.8 KNEX_TEST_TIMEOUT=60000 ORACLE_HOME=/u01/app/oracle/product/11.2.0/xe ORACLE_SID=XE OCI_LIB_DIR=/u01/app/oracle/product/11.2.0/xe/lib LD_LIBRARY_PATH=/u01/app/oracle/product/11.2.0/xe/lib
env: TEST_ORACLEDB=true DB="mssql mysql mysql2 postgres sqlite3 oracledb" CXX=g++-4.8 KNEX_TEST_TIMEOUT=60000 ORACLE_HOME=/u01/app/oracle/product/11.2.0/xe ORACLE_SID=XE OCI_LIB_DIR=/u01/app/oracle/product/11.2.0/xe/lib LD_LIBRARY_PATH=/u01/app/oracle/product/11.2.0/xe/lib
- node_js: "8"
env: TEST_ORACLEDB=true DB="maria mysql mysql2 postgres sqlite3 oracledb" CXX=g++-4.8 KNEX_TEST_TIMEOUT=60000 ORACLE_HOME=/u01/app/oracle/product/11.2.0/xe ORACLE_SID=XE OCI_LIB_DIR=/u01/app/oracle/product/11.2.0/xe/lib LD_LIBRARY_PATH=/u01/app/oracle/product/11.2.0/xe/lib
env: TEST_ORACLEDB=true DB="mssql mysql mysql2 postgres sqlite3 oracledb" CXX=g++-4.8 KNEX_TEST_TIMEOUT=60000 ORACLE_HOME=/u01/app/oracle/product/11.2.0/xe ORACLE_SID=XE OCI_LIB_DIR=/u01/app/oracle/product/11.2.0/xe/lib LD_LIBRARY_PATH=/u01/app/oracle/product/11.2.0/xe/lib
- node_js: "6"
env: TEST_ORACLEDB=true DB="maria mysql mysql2 postgres sqlite3 oracledb" CXX=g++-4.8 KNEX_TEST_TIMEOUT=60000 ORACLE_HOME=/u01/app/oracle/product/11.2.0/xe ORACLE_SID=XE OCI_LIB_DIR=/u01/app/oracle/product/11.2.0/xe/lib LD_LIBRARY_PATH=/u01/app/oracle/product/11.2.0/xe/lib
env: TEST_ORACLEDB=true DB="mssql mysql mysql2 postgres sqlite3 oracledb" CXX=g++-4.8 KNEX_TEST_TIMEOUT=60000 ORACLE_HOME=/u01/app/oracle/product/11.2.0/xe ORACLE_SID=XE OCI_LIB_DIR=/u01/app/oracle/product/11.2.0/xe/lib LD_LIBRARY_PATH=/u01/app/oracle/product/11.2.0/xe/lib
- node_js: "7"
env: DB="maria mysql mysql2 postgres sqlite3" CXX=g++-4.8 KNEX_TEST_TIMEOUT=60000
env: DB="mssql mysql mysql2 postgres sqlite3" CXX=g++-4.8 KNEX_TEST_TIMEOUT=60000
install: npm i
@ -28,6 +28,9 @@ before_install:
before_script:
- psql -c 'create database knex_test;' -U postgres
- mysql -e 'create database knex_test;'
- npm run mssql:init
- docker ps -a
- netstat -tulpn
after_script:
- npm run-script coveralls

View File

@ -81,6 +81,7 @@
"test": "npm run pre_test && istanbul --config=test/.istanbul.yml cover node_modules/mocha/bin/_mocha -- --check-leaks -t 10000 -b -R spec test/index.js && npm run tape",
"oracledb:test": "docker rmi -f --no-prune knex-test-oracledb && docker build -f scripts/oracle-tests-Dockerfile --tag knex-test-oracledb . && docker run -i -t knex-test-oracledb",
"mssql:init": "docker-compose -f scripts/mssql-docker-compose.yml up --no-start && docker-compose -f scripts/mssql-docker-compose.yml start",
"postmssql:init": "node scripts/wait-for-mssql-server.js",
"mssql:test": "DB=mssql npm test",
"mssql:destroy": "docker-compose -f scripts/mssql-docker-compose.yml stop",
"stress:init": "docker-compose -f scripts/stress-test/docker-compose.yml up --no-start && docker-compose -f scripts/stress-test/docker-compose.yml start",

View File

@ -37,7 +37,7 @@ const mssql = Knex({
});
/* TODO: figure out how to nicely install oracledb node driver on osx
const mysql = Knex({
const oracledb = Knex({
client: 'oracledb',
connection: {
user : "travis",

View File

@ -0,0 +1,51 @@
var Connection = require('tedious').Connection;
var config = {
userName: "sa",
password: "S0meVeryHardPassword",
server: "localhost",
options: {
database: "knex_test",
}
};
let didConnect = false;
let tryCount = 0;
function tryToConnect() {
tryCount++;
if (tryCount > 50) {
console.log("Giving up... it fails if it fails");
process.exit(0);
}
console.log("Connecting... to mssql");
var connection = new Connection(config);
connection.on('end', () => {
if (!didConnect) {
console.log("Couldnt connnect yet... try again in two secs...");
setTimeout(tryToConnect, 2000);
}
});
connection.on('error', () => {
// prevent leaking errors.. driver seems to sometimes emit error event,
// sometimes connect event with error
// and some times just closes connection without error / connect events
// (debug event says that socket was ended and thats it...)
});
connection.on('connect', (err) => {
if (!err) {
console.log("Connecting mssql server was a great success!");
didConnect = true;
} else {
console.log("Error was passed to connect event.");
}
connection.close();
});
}
tryToConnect();

View File

@ -29,10 +29,10 @@ function Client_MSSQL(config = {}) {
}
// mssql always creates pool :( lets try to unpool it as much as possible
config.pool = {
this.mssqlPoolSettings = {
min: 1,
max: 1,
idleTimeoutMillis: Number.MAX_SAFE_INTEGER,
idleTimeoutMillis: Number.MAX_SAFE_INTEGER,
evictionRunIntervalMillis: 0
};
@ -199,7 +199,10 @@ assign(Client_MSSQL.prototype, {
// connection needs to be added to the pool.
acquireRawConnection() {
return new Promise((resolver, rejecter) => {
const connection = new this.driver.ConnectionPool(this.connectionSettings);
const settings = Object.assign({}, this.connectionSettings);
settings.pool = this.mssqlPoolSettings;
const connection = new this.driver.ConnectionPool(settings);
connection.connect((err) => {
if (err) {
return rejecter(err)

View File

@ -165,11 +165,14 @@ assign(QueryCompiler_MSSQL.prototype, {
},
forUpdate() {
return 'with (READCOMMITTEDLOCK)';
// this doesn't work exacltly as it should, one should also mention index while locking
// https://stackoverflow.com/a/9818448/360060
return 'with (UPDLOCK)';
},
forShare() {
return 'with (NOLOCK)';
// http://www.sqlteam.com/article/introduction-to-locking-in-sql-server
return 'with (HOLDLOCK)';
},
// Compiles a `columnInfo` query.

View File

@ -24,13 +24,12 @@ assign(ColumnCompiler_MSSQL.prototype, {
bigint: 'bigint',
double(precision, scale) {
if (!precision) return 'decimal'
return `decimal(${this._num(precision, 8)}, ${this._num(scale, 2)})`
return 'float';
},
floating(precision, scale) {
if (!precision) return 'decimal'
return `decimal(${this._num(precision, 8)}, ${this._num(scale, 2)})`
// ignore precicion / scale which is mysql specific stuff
return `float`;
},
integer(length) {

View File

@ -19,7 +19,7 @@ inherits(TableCompiler_MSSQL, TableCompiler);
assign(TableCompiler_MSSQL.prototype, {
createAlterTableMethods: ['foreign', 'primary', 'unique'],
createAlterTableMethods: ['foreign', 'primary'],
createQuery (columns, ifNot) {
const createStatement = ifNot ? `if object_id('${this.tableName()}', 'U') is null CREATE TABLE ` : 'CREATE TABLE ';
const sql = createStatement + this.tableName() + (this._formatting ? ' (\n ' : ' (') + columns.sql.join(this._formatting ? ',\n ' : ', ') + ')';
@ -118,11 +118,16 @@ assign(TableCompiler_MSSQL.prototype, {
unique (columns, indexName) {
indexName = indexName ? this.formatter.wrap(indexName) : this._indexCommand('unique', this.tableNameRaw, columns);
if (!this.forCreate) {
this.pushQuery(`CREATE UNIQUE INDEX ${indexName} ON ${this.tableName()} (${this.formatter.columnize(columns)})`);
} else {
this.pushQuery(`CONSTRAINT ${indexName} UNIQUE (${this.formatter.columnize(columns)})`);
if (!Array.isArray(columns)) {
columns = [columns];
}
const whereAllTheColumnsAreNotNull = columns.map(column => this.formatter.columnize(column) + ' IS NOT NULL').join(' AND ');
// make unique constraint that allows null https://stackoverflow.com/a/767702/360060
// to be more or less compatible with other DBs (if any of the columns is NULL then "duplicates" are allowed)
this.pushQuery(`CREATE UNIQUE INDEX ${indexName} ON ${this.tableName()} (${this.formatter.columnize(columns)}) WHERE ${whereAllTheColumnsAreNotNull}`);
},
// Compile a drop index command.
@ -146,7 +151,7 @@ assign(TableCompiler_MSSQL.prototype, {
// Compile a drop unique key command.
dropUnique (column, indexName) {
indexName = indexName ? this.formatter.wrap(indexName) : this._indexCommand('unique', this.tableNameRaw, column);
this.pushQuery(`ALTER TABLE ${this.tableName()} DROP CONSTRAINT ${indexName}`);
this.pushQuery(`DROP INDEX ${indexName} ON ${this.tableName()}`);
}
})

View File

@ -1,5 +1,6 @@
import Promise from 'bluebird';
import Transaction from '../../transaction';
import { isUndefined } from 'lodash'
const debug = require('debug')('knex:tx')
export default class Transaction_MSSQL extends Transaction {
@ -32,7 +33,13 @@ export default class Transaction_MSSQL extends Transaction {
debug('%s: rolling back', this.txid)
return conn.tx_.rollback()
.then(
() => this._rejecter(error),
() => {
let err = error;
if(isUndefined(error)) {
err = new Error(`Transaction rejected with non-error: ${error}`)
}
this._rejecter(err)
},
err => {
if (error) err.originalError = error;
return this._rejecter(err);
@ -58,6 +65,7 @@ export default class Transaction_MSSQL extends Transaction {
configConnection ||
t.client.acquireConnection();
}).tap(function(conn) {
conn.__knexTxId = t.txid;
if (!t.outerTx) {
t.conn = conn
conn.tx_ = conn.transaction()

View File

@ -198,7 +198,6 @@ export default class Migrator {
_getLock(trx) {
const transact = trx ? fn => fn(trx) : fn => this.knex.transaction(fn);
return transact(trx => {
return this._isLocked(trx)
.then(isLocked => {

View File

@ -213,7 +213,7 @@ module.exports = function(knex) {
.truncate()
.testSql(function(tester) {
tester('mysql', 'truncate `test_table_two`');
tester('postgresql', 'truncate "test_table_two" restart identity');
tester('pg', 'truncate "test_table_two" restart identity');
tester('pg-redshift', 'truncate "test_table_two"');
tester('sqlite3', "delete from `test_table_two`");
tester('oracle', "truncate table \"test_table_two\"");
@ -289,7 +289,7 @@ module.exports = function(knex) {
"type": "char"
}
});
tester('postgresql', 'select * from information_schema.columns where table_name = ? and table_catalog = ? and table_schema = current_schema',
tester('pg', 'select * from information_schema.columns where table_name = ? and table_catalog = ? and table_schema = current_schema()',
null, {
"enum_value": {
"defaultValue": null,
@ -369,7 +369,7 @@ module.exports = function(knex) {
});
});
it('gets the columnInfo', function() {
it('gets the columnInfo with columntype', function() {
return knex('datatype_test').columnInfo('uuid').testSql(function(tester) {
tester('mysql',
'select * from information_schema.columns where table_name = ? and table_schema = ?',
@ -379,7 +379,7 @@ module.exports = function(knex) {
"nullable": false,
"type": "char"
});
tester('postgresql', 'select * from information_schema.columns where table_name = ? and table_catalog = ? and table_schema = current_schema',
tester('pg', 'select * from information_schema.columns where table_name = ? and table_catalog = ? and table_schema = current_schema()',
null, {
"defaultValue": null,
"maxLength": null,
@ -465,7 +465,7 @@ module.exports = function(knex) {
t.renameColumn('about', 'about_col');
}).testSql(function(tester) {
tester('mysql', ["show fields from `accounts` where field = ?"]);
tester('postgresql', ["alter table \"accounts\" rename \"about\" to \"about_col\""]);
tester('pg', ["alter table \"accounts\" rename \"about\" to \"about_col\""]);
tester('pg-redshift', ["alter table \"accounts\" rename \"about\" to \"about_col\""]);
tester('sqlite3', ["PRAGMA table_info(`accounts`)"]);
tester('oracle', ["alter table \"accounts\" rename column \"about\" to \"about_col\""]);
@ -503,7 +503,7 @@ module.exports = function(knex) {
t.dropColumn('first_name');
}).testSql(function(tester) {
tester('mysql', ["alter table `accounts` drop `first_name`"]);
tester('postgresql', ['alter table "accounts" drop column "first_name"']);
tester('pg', ['alter table "accounts" drop column "first_name"']);
tester('pg-redshift', ['alter table "accounts" drop column "first_name"']);
tester('sqlite3', ["PRAGMA table_info(`accounts`)"]);
tester('oracle', ['alter table "accounts" drop ("first_name")']);
@ -720,10 +720,8 @@ module.exports = function(knex) {
});
it('Event: start', function() {
// On redshift, cannot set an identity column to a value
if (/redshift/i.test(knex.client.dialect)) { return; }
return knex('accounts')
.insert({id: '999', last_name: 'Start'})
.insert({last_name: 'Start event test'})
.then(function() {
var queryBuilder = knex('accounts').select();
@ -731,7 +729,7 @@ module.exports = function(knex) {
//Alter builder prior to compilation
//Select only one row
builder
.where('id', '999')
.where('last_name', 'Start event test')
.first();
});
@ -739,8 +737,7 @@ module.exports = function(knex) {
})
.then(function(row) {
expect(row).to.exist;
expect(String(row.id)).to.equal('999');
expect(row.last_name).to.equal('Start');
expect(row.last_name).to.equal('Start event test');
});
});

View File

@ -26,7 +26,7 @@ module.exports = function(knex) {
}]
);
tester(
'postgresql',
'pg',
'select sum("logins") from "accounts"',
[],
[{
@ -84,7 +84,7 @@ module.exports = function(knex) {
// sqlite: 1.6666666666666667
tester('sqlite3', 'select avg(`logins`) from `accounts`', [], checkResRange.bind(null, 'avg(`logins`)'));
// postgres: '1.6666666666666667'
tester('postgresql', 'select avg("logins") from "accounts"', [], checkResRange.bind(null, 'avg'));
tester('pg', 'select avg("logins") from "accounts"', [], checkResRange.bind(null, 'avg'));
// postgres: '1.6666666666666667'
tester('pg-redshift', 'select avg("logins") from "accounts"', [], checkResRangeMssql.bind(null, 'avg'));
// oracle: 1.66666666666667
@ -107,7 +107,7 @@ module.exports = function(knex) {
}]
);
tester(
'postgresql',
'pg',
'select count("id") from "accounts"',
[],
[{
@ -164,7 +164,7 @@ module.exports = function(knex) {
}]
);
tester(
'postgresql',
'pg',
'select count("id"), max("logins"), min("logins") from "accounts"',
[],
[{
@ -229,13 +229,13 @@ module.exports = function(knex) {
}]
);
tester(
'postgresql',
'pg',
'select count(distinct "id"), sum(distinct "logins"), avg(distinct "logins") from "accounts"',
[],
[{
count: '6',
sum: 3,
avg: 1.5
sum: "3",
avg: "1.5000000000000000"
}]
);
tester(
@ -298,7 +298,7 @@ module.exports = function(knex) {
}]
);
tester(
'postgresql',
'pg',
'select count(distinct("id", "logins")) from "accounts"',
[],
[{
@ -325,7 +325,7 @@ module.exports = function(knex) {
}]
);
tester(
'postgresql',
'pg',
'select count(distinct("id", "logins")) as "count" from "accounts"',
[],
[{
@ -350,7 +350,7 @@ module.exports = function(knex) {
}]
);
tester(
'postgresql',
'pg',
'select count("id") from "accounts" group by "logins" order by "logins" asc',
[],
[{
@ -411,7 +411,7 @@ module.exports = function(knex) {
}]
);
tester(
'postgresql',
'pg',
'select count("id") from "accounts" group by "first_name"',
[],
[{

View File

@ -18,7 +18,7 @@ module.exports = function(knex) {
1
);
tester(
'postgresql',
'pg',
'delete from "accounts" where "id" = ?',
[1],
1
@ -50,7 +50,7 @@ module.exports = function(knex) {
});
});
it('should allow returning for deletes in postgresql', function() {
it('should allow returning for deletes in postgresql and mssql', function() {
return knex('accounts')
.where('id', 2)
.del('*')
@ -62,7 +62,7 @@ module.exports = function(knex) {
1
);
tester(
'postgresql',
'pg',
'delete from "accounts" where "id" = ? returning *',
[2],
[{
@ -71,6 +71,7 @@ module.exports = function(knex) {
last_name: 'User',
email: 'test2@example.com',
logins: 1,
balance: 0,
about: 'Lorem ipsum Dolore labore incididunt enim.',
created_at: d,
updated_at: d,
@ -105,6 +106,7 @@ module.exports = function(knex) {
last_name: 'User',
email: 'test2@example.com',
logins: 1,
balance: 0,
about: 'Lorem ipsum Dolore labore incididunt enim.',
created_at: d,
updated_at: d,

View File

@ -28,7 +28,7 @@ module.exports = function(knex) {
[1]
);
tester(
'postgresql',
'pg',
'insert into "accounts" ("about", "created_at", "email", "first_name", "last_name", "logins", "updated_at") values (?, ?, ?, ?, ?, ?, ?) returning "id"',
['Lorem ipsum Dolore labore incididunt enim.', d,'test@example.com','Test','User', 1, d],
['1']
@ -87,7 +87,7 @@ module.exports = function(knex) {
[2]
);
tester(
'postgresql',
'pg',
'insert into "accounts" ("about", "created_at", "email", "first_name", "last_name", "logins", "updated_at") values (?, ?, ?, ?, ?, ?, ?), (?, ?, ?, ?, ?, ?, ?) returning "id"',
['Lorem ipsum Dolore labore incididunt enim.', d,'test2@example.com','Test','User',1, d,'Lorem ipsum Dolore labore incididunt enim.', d,'test3@example.com','Test','User',2, d],
['2','3']
@ -189,7 +189,7 @@ module.exports = function(knex) {
[4]
);
tester(
'postgresql',
'pg',
'insert into "accounts" ("about", "created_at", "email", "first_name", "last_name", "logins", "updated_at") values (?, ?, ?, ?, ?, ?, ?), (?, ?, ?, ?, ?, ?, ?) returning "id"',
['Lorem ipsum Dolore labore incididunt enim.', d,'test4@example.com','Test','User',2, d,'Lorem ipsum Dolore labore incididunt enim.', d,'test5@example.com','Test','User',2, d],
['4','5']
@ -260,7 +260,7 @@ module.exports = function(knex) {
['Lorem ipsum Dolore labore incididunt enim.', d, 'test5@example.com','Test','User', 2, d]
);
tester(
'postgresql',
'pg',
'insert into "accounts" ("about", "created_at", "email", "first_name", "last_name", "logins", "updated_at") values (?, ?, ?, ?, ?, ?, ?) returning "id"',
['Lorem ipsum Dolore labore incididunt enim.', d, 'test5@example.com','Test','User', 2, d]
);
@ -307,7 +307,7 @@ module.exports = function(knex) {
[7]
);
tester(
'postgresql',
'pg',
'insert into "accounts" ("about", "created_at", "email", "first_name", "last_name", "logins", "updated_at") values (?, ?, ?, ?, ?, ?, ?) returning "id"',
['Lorem ipsum Dolore labore incididunt enim.', d, 'test6@example.com','Test','User',2, d],
['7']
@ -351,7 +351,7 @@ module.exports = function(knex) {
['d']
);
tester(
'postgresql',
'pg',
'insert into "datatype_test" ("enum_value") values (?)',
['d']
);
@ -435,7 +435,7 @@ module.exports = function(knex) {
[1]
);
tester(
'postgresql',
'pg',
'insert into "test_default_table" default values returning "id"',
[],
[1]
@ -486,7 +486,7 @@ module.exports = function(knex) {
[1]
);
tester(
'postgresql',
'pg',
'insert into "test_default_table2" default values returning "id"',
[],
[1]
@ -541,7 +541,7 @@ module.exports = function(knex) {
// [1]
// );
// tester(
// 'postgresql',
// 'pg',
// 'insert into "test_default_table3" ("id") values (default), (default) returning "id"',
// [],
// [1, 2]
@ -586,7 +586,7 @@ module.exports = function(knex) {
[4]
);
tester(
'postgresql',
'pg',
'insert into "test_table_two" ("account_id", "details", "status") values (?, ?, ?) returning "account_id", "details"',
[10,'Lorem ipsum Minim nostrud Excepteur consectetur enim ut qui sint in veniam in nulla anim do cillum sunt voluptate Duis non incididunt.',0],
[{account_id: 10, details: 'Lorem ipsum Minim nostrud Excepteur consectetur enim ut qui sint in veniam in nulla anim do cillum sunt voluptate Duis non incididunt.'}]
@ -641,7 +641,7 @@ module.exports = function(knex) {
var returningColumn = '*';
return knex('test_table_two').insert(insertData, returningColumn).testSql(function(tester) {
tester(
'postgresql',
'pg',
'insert into "test_table_two" ("account_id", "details", "status") values (?, ?, ?) returning *',
[10,'Lorem ipsum Minim nostrud Excepteur consectetur enim ut qui sint in veniam in nulla anim do cillum sunt voluptate Duis non incididunt.',0],
[{

View File

@ -52,7 +52,7 @@ module.exports = function(knex) {
details: ''
}]
);
tester('postgresql',
tester('pg',
'select "accounts".*, "test_table_two"."details" from "accounts" inner join "test_table_two" on "accounts"."id" = "test_table_two"."account_id" order by "accounts"."id" asc',
[],
[
@ -348,7 +348,7 @@ module.exports = function(knex) {
details: null
}]
);
tester('postgresql',
tester('pg',
'select "accounts".*, "test_table_two"."details" from "accounts" left join "test_table_two" on "accounts"."id" = "test_table_two"."account_id" order by "accounts"."id" asc',
[],
[
@ -849,7 +849,7 @@ module.exports = function(knex) {
json_data: null
}]
);
tester('postgresql',
tester('pg',
'select * from "accounts" left join "test_table_two" on "accounts"."id" = "test_table_two"."account_id" or "accounts"."email" = "test_table_two"."details" order by "accounts"."id" asc',
[],
[
@ -1274,7 +1274,7 @@ module.exports = function(knex) {
e2: 'test2@example.com'
}]
);
tester('postgresql',
tester('pg',
'select "accounts"."email" as "e1", "a2"."email" as "e2" from "accounts" inner join "accounts" as "a2" on "a2"."email" <> "accounts"."email" where "a2"."email" = ? order by "e1" asc limit ?',
['test2@example.com', 5],
[
@ -1425,7 +1425,7 @@ module.exports = function(knex) {
e2: 'test2@example.com'
}]
);
tester('postgresql',
tester('pg',
'select "accounts"."email" as "e1", "a2"."email" as "e2" from "accounts" inner join "accounts" as "a2" on "accounts"."email" <> "a2"."email" or "accounts"."id" = 2 where "a2"."email" = ? order by "e1" asc limit ?',
['test2@example.com', 5],
[{
@ -1543,7 +1543,7 @@ module.exports = function(knex) {
}
);
tester(
'postgresql',
'pg',
'select "account_id" from "accounts" cross join "test_table_two" order by "account_id" asc',
[],
function (res) {

View File

@ -26,7 +26,7 @@ module.exports = function(knex) {
[1, 2, 3, 4, 5, 7]
);
tester(
'postgresql',
'pg',
'select "id" from "accounts" order by "id" asc',
[],
['1', '2', '3', '4', '5', '7']
@ -68,7 +68,7 @@ module.exports = function(knex) {
[1, 2, 3, 4, 5, 7]
);
tester(
'postgresql',
'pg',
'select "accounts"."id" from "accounts" order by "accounts"."id" asc',
[],
['1', '2', '3', '4', '5', '7']
@ -110,7 +110,7 @@ module.exports = function(knex) {
[3, 4, 5, 7]
);
tester(
'postgresql',
'pg',
'select "id" from "accounts" order by "id" asc offset ?',
[2],
['3', '4', '5', '7']
@ -152,7 +152,7 @@ module.exports = function(knex) {
{ id: 1, first_name: 'Test' }
);
tester(
'postgresql',
'pg',
'select "id", "first_name" from "accounts" order by "id" asc limit ?',
[1],
{ id: '1', first_name: 'Test' }
@ -350,7 +350,7 @@ module.exports = function(knex) {
}]
);
tester(
'postgresql',
'pg',
'select "first_name", "last_name" from "accounts" where "id" = ?',
[1],
[{
@ -413,7 +413,7 @@ module.exports = function(knex) {
}]
);
tester(
'postgresql',
'pg',
'select "first_name", "last_name" from "accounts" where "id" = ?',
[1],
[{
@ -472,7 +472,7 @@ module.exports = function(knex) {
[1]
);
tester(
'postgresql',
'pg',
'select "email", "logins" from "accounts" where "id" > ?',
[1]
);
@ -523,7 +523,7 @@ module.exports = function(knex) {
}]
);
tester(
'postgresql',
'pg',
'select * from "accounts" where "id" = ?',
[1],
[{
@ -623,7 +623,7 @@ module.exports = function(knex) {
[]
);
tester(
'postgresql',
'pg',
'select "first_name", "email" from "accounts" where "id" is null',
[],
[]
@ -669,7 +669,7 @@ module.exports = function(knex) {
[]
);
tester(
'postgresql',
'pg',
'select * from "accounts" where "id" = ?',
[0],
[]
@ -761,8 +761,8 @@ module.exports = function(knex) {
details: 'One, Two, Zero',
status: 0
}]);
tester('postgresql',
'select * from "composite_key_test" where ("column_a", "column_b") in ((?, ?),(?, ?)) order by "status" desc',
tester('pg',
'select * from "composite_key_test" where ("column_a", "column_b") in ((?, ?), (?, ?)) order by "status" desc',
[1,1,1,2],
[{
column_a: 1,
@ -826,8 +826,8 @@ module.exports = function(knex) {
details: 'One, One, One',
status: 1
}]);
tester('postgresql',
'select * from "composite_key_test" where "status" = ? and ("column_a", "column_b") in ((?, ?),(?, ?))',
tester('pg',
'select * from "composite_key_test" where "status" = ? and ("column_a", "column_b") in ((?, ?), (?, ?))',
[1,1,1,1,2],
[{
column_a: 1,
@ -956,6 +956,55 @@ module.exports = function(knex) {
})
});
});
it('select for update locks selected row', function() {
if (knex.client.dialect === 'sqlite3') {
return;
}
return knex('test_default_table').insert({ string: 'making sure there is a row to lock'})
.then(() => {
return knex.transaction(trx => {
// select all from test table and lock
return trx('test_default_table').forUpdate().then((res) => {
// try to select stuff from table in other connection should just hang...
return knex('test_default_table').forUpdate().timeout(100);
});
}).then(res => {
expect("Second query should have timed out").to.be.false;
}).catch(err => {
expect(err.message).to.be.contain('Defined query timeout of 100ms exceeded when running query');
});
});
});
it('select for share prevents updating in other transaction', function() {
if (knex.client.dialect === 'sqlite3' || knex.client.dialect === 'oracle') {
return;
}
return knex('test_default_table').insert({ string: 'making sure there is a row to lock'})
.then(() => {
return knex.transaction(trx => {
// select all from test table and lock
return trx('test_default_table').forShare().then((res) => {
// try to update row that was selected for share should just hang...
return knex.transaction(trx2 => {
return trx2('test_default_table').update({ string: 'foo' }).timeout(100);
});
});
}).then(res => {
expect("Second query should have timed out").to.be.false;
}).catch(err => {
// mssql fails because it tires to rollback at the same time when update query is running
// hopefully for share really works though...
if (knex.client.dialect == 'mssql') {
expect(err.message).to.be.contain("Can't rollback transaction. There is a request in progress");
} else {
expect(err.message).to.be.contain('Defined query timeout of 100ms exceeded when running query');
}
});
});
});
});
};

View File

@ -340,10 +340,11 @@ module.exports = function(knex) {
//Create a transaction that will occupy the only available connection, and avoid trx.commit.
return knexDb.transaction(function(trx) {
var sql = 'SELECT 1 = 1';
var sql = 'SELECT 1';
if (knex.client.dialect === 'oracle') {
sql = 'SELECT 1 FROM DUAL';
}
trx.raw(sql).then(function () {
//No connection is available, so try issuing a query without transaction.
//Since there is no available connection, it should throw a timeout error based on `aquireConnectionTimeout` from the knex config.

View File

@ -21,7 +21,7 @@ module.exports = function(knex) {
1
);
tester(
'postgresql',
'pg',
'update "accounts" set "first_name" = ?, "last_name" = ?, "email" = ? where "id" = ?',
['User','Test','test100@example.com',1],
1
@ -137,7 +137,6 @@ module.exports = function(knex) {
});
it('should allow returning for updates in postgresql', function() {
return knex('accounts').where('id', 1).update({
email:'test100@example.com',
first_name: 'UpdatedUser',
@ -150,7 +149,7 @@ module.exports = function(knex) {
1
);
tester(
'postgresql',
'pg',
'update "accounts" set "email" = ?, "first_name" = ?, "last_name" = ? where "id" = ? returning *',
['test100@example.com','UpdatedUser','UpdatedTest',1],
[{
@ -159,6 +158,7 @@ module.exports = function(knex) {
last_name: 'UpdatedTest',
email: 'test100@example.com',
logins: 1,
balance: 12.24,
about: 'Lorem ipsum Dolore labore incididunt enim.',
created_at: d,
updated_at: d,
@ -193,6 +193,7 @@ module.exports = function(knex) {
last_name: 'UpdatedTest',
email: 'test100@example.com',
logins: 1,
balance: 12.240000000000002,
about: 'Lorem ipsum Dolore labore incididunt enim.',
created_at: d,
updated_at: d,
@ -200,7 +201,6 @@ module.exports = function(knex) {
}]
);
});
});
});

View File

@ -262,7 +262,7 @@ module.exports = function(knex) {
// But the other should succeed:
return knex('knex_migrations').select('*').then(function(data) {
expect(data.length).to.equal(2);
})
});
});
});
});

View File

@ -2,20 +2,18 @@
exports.up = function(knex, promise) {
return promise.all([
knex.schema
return knex.schema
.createTable('migration_test_2', function(t) {
t.increments();
t.string('name');
}),
knex.schema
.createTable('migration_test_2_1', function(t) {
t.increments();
t.string('name');
})
]);
})
.then(() => knex.schema.createTable('migration_test_2_1', function(t) {
t.increments();
t.string('name');
}));
};
exports.down = function(knex, promise) {
return promise.all([knex.schema.dropTable('migration_test_2'), knex.schema.dropTable('migration_test_2_1')]);
return knex.schema.dropTable('migration_test_2')
.then(() => knex.schema.dropTable('migration_test_2_1'));
};

View File

@ -2,20 +2,20 @@
exports.up = function(knex, promise) {
return promise.all([
knex.schema
.createTable('migration_test_2', function(t) {
return knex.schema
.createTable('migration_test_2', function(t) {
t.increments();
t.string('name');
})
.then(() => knex.schema
.createTable('migration_test_2_1', function(t) {
t.increments();
t.string('name');
}),
knex.schema
.createTable('migration_test_2_1', function(t) {
t.increments();
t.string('name');
})
]);
})
);
};
exports.down = function(knex, promise) {
return promise.all([knex.schema.dropTable('migration_test_2'), knex.schema.dropTable('migration_test_2_1')]);
return knex.schema.dropTable('migration_test_2')
.then(() => knex.schema.dropTable('migration_test_2_1'));
};

View File

@ -273,8 +273,10 @@ module.exports = function(knex) {
'create index "NkZo/dGRI9O73/NE2fHo+35d4jk" on "test_table_one" ("first_name")',
'alter table "test_table_one" add constraint "test_table_one_email_unique" unique ("email")',
'create index "test_table_one_logins_index" on "test_table_one" ("logins")']);
tester('mssql', ['CREATE TABLE [test_table_one] ([id] bigint identity(1,1) not null primary key, [first_name] nvarchar(255), [last_name] nvarchar(255), [email] nvarchar(255) null, [logins] int default \'1\', [balance] decimal default \'0\', [about] nvarchar(max), [created_at] datetime, [updated_at] datetime, CONSTRAINT [test_table_one_email_unique] UNIQUE ([email]))',
tester('mssql', [
'CREATE TABLE [test_table_one] ([id] bigint identity(1,1) not null primary key, [first_name] nvarchar(255), [last_name] nvarchar(255), [email] nvarchar(255) null, [logins] int default \'1\', [balance] float default \'0\', [about] nvarchar(max), [created_at] datetime, [updated_at] datetime)',
'CREATE INDEX [test_table_one_first_name_index] ON [test_table_one] ([first_name])',
'CREATE UNIQUE INDEX [test_table_one_email_unique] ON [test_table_one] ([email]) WHERE [email] IS NOT NULL',
'CREATE INDEX [test_table_one_logins_index] ON [test_table_one] ([logins])']);
});
});
@ -388,7 +390,7 @@ module.exports = function(knex) {
});
it('rejects setting foreign key where tableName is not typeof === string', function() {
return knex.schema.createTable('invalid_inTable_param_test', function(table) {
let builder = knex.schema.createTable('invalid_inTable_param_test', function(table) {
const createInvalidUndefinedInTableSchema = function() {
table.increments('id').references('id').inTable()
};
@ -397,7 +399,11 @@ module.exports = function(knex) {
};
expect(createInvalidUndefinedInTableSchema).to.throw(TypeError);
expect(createInvalidObjectInTableSchema).to.throw(TypeError);
})
table.integer('yet_another_id').references('id').inTable({tableName: 'this_should_fail_too'})
});
expect(() => builder.toSQL()).to.throw(TypeError);
});
@ -415,7 +421,10 @@ module.exports = function(knex) {
tester('pg-redshift', ['create table "composite_key_test" ("column_a" integer, "column_b" integer, "details" varchar(max), "status" smallint)','alter table "composite_key_test" add constraint "composite_key_test_column_a_column_b_unique" unique ("column_a", "column_b")']);
tester('sqlite3', ['create table `composite_key_test` (`column_a` integer, `column_b` integer, `details` text, `status` tinyint)','create unique index `composite_key_test_column_a_column_b_unique` on `composite_key_test` (`column_a`, `column_b`)']);
tester('oracle', ['create table "composite_key_test" ("column_a" integer, "column_b" integer, "details" clob, "status" smallint)','alter table "composite_key_test" add constraint "zYmMt0VQwlLZ20XnrMicXZ0ufZk" unique ("column_a", "column_b")']);
tester('mssql', ['CREATE TABLE [composite_key_test] ([column_a] int, [column_b] int, [details] nvarchar(max), [status] tinyint, CONSTRAINT [composite_key_test_column_a_column_b_unique] UNIQUE ([column_a], [column_b]))']);
tester('mssql', [
'CREATE TABLE [composite_key_test] ([column_a] int, [column_b] int, [details] nvarchar(max), [status] tinyint)',
'CREATE UNIQUE INDEX [composite_key_test_column_a_column_b_unique] ON [composite_key_test] ([column_a], [column_b]) WHERE [column_a] IS NOT NULL AND [column_b] IS NOT NULL'
]);
}).then(function() {
return knex('composite_key_test').insert([{
column_a: 1,
@ -556,9 +565,10 @@ module.exports = function(knex) {
});
it('allows alter column syntax', function () {
if (knex.client.dialect.match('sqlite') !== null ||
knex.client.dialect.match('redshift') !== null ||
knex.client.dialect.match('oracle') !== null) {
if (knex.client.dialect.match('sqlite') ||
knex.client.dialect.match('redshift') ||
knex.client.dialect.match('mssql') ||
knex.client.dialect.match('oracle')) {
return;
}

View File

@ -0,0 +1,90 @@
'use strict';
const tape = require('tape')
/**
* Collection of tests for making sure that certain features are cross database compatible
*/
module.exports = function(knex) {
const dialect = knex.client.dialect;
if (dialect === 'oracle') {
// TODO: FIX ORACLE TO WORK THE SAME WAY WITH OTHER DIALECTS IF POSSIBLE
return;
}
tape(dialect + ' - crossdb compatibility: setup test table', function(t) {
knex.schema.dropTableIfExists('test_table')
.createTable('test_table', function(t) {
t.integer('id');
t.string('first');
t.string('second');
t.string('third').unique();
t.unique(['first', 'second']);
})
.finally(function() {
t.end();
});
});
tape(dialect + ' - crossdb compatibility: table may have multiple nulls in unique constrainted column', function (t) {
t.plan(3);
knex('test_table').insert([
{ third: 'foo' },
{ third: 'foo' }
]).catch(err => {
t.assert(true, 'unique constraint prevents adding rows');
return knex('test_table').insert([
{ first: 'foo2', second: 'bar2' },
{ first: 'foo2', second: 'bar2' }
]);
}).catch(err => {
t.assert(true, 'two column unique constraint prevents adding rows');
// even one null makes index to not match, thus allows adding the row
return knex('test_table').insert([
{ first: 'fo', second: null, third: null },
{ first: 'fo', second: null, third: null },
{ first: null, second: 'fo', third: null },
{ first: null, second: 'fo', third: null },
{ first: null, second: null, third: null },
]);
}).then(() => {
return knex('test_table');
}).then(res => {
t.assert(res.length == 5, 'multiple rows with nulls could be added despite of unique constraints')
}).finally(() => {
t.end();
});
});
tape(dialect + ' - create and drop index works in different cases', t => {
t.plan(1);
knex.schema.dropTableIfExists('test_table_drop_unique')
.createTable('test_table_drop_unique', t => {
t.integer('id');
t.string('first');
t.string('second');
t.string('third').unique();
t.string('fourth');
t.unique(['first', 'second']);
t.unique('fourth');
}).alterTable('test_table_drop_unique', t => {
t.dropUnique('third');
t.dropUnique('fourth');
t.dropUnique(['first', 'second']);
}).alterTable('test_table_drop_unique', t => {
t.unique(['first', 'second']);
t.unique('third');
t.unique('fourth');
})
.then(() => {
t.assert(true, 'Creating / dropping / creating unique constraint was a success');
})
.finally(() => {
t.end();
})
});
}

View File

@ -20,6 +20,7 @@ Object.keys(knexfile).forEach(function(key) {
require('./transactions')(knex)
require('./stream')(knex)
require('./crossdb-compatibility')(knex)
// Tear down the knex connection
tape(knex.client.driverName + ' - transactions: after', function(t) {

View File

@ -9,7 +9,8 @@ module.exports = (knexfile) => {
Object.keys(knexfile).forEach((key) => {
const dialect = knexfile[key].dialect || knexfile[key].client;
if (dialect !== 'sqlite3' && dialect !== 'oracledb') {
// TODO: FIX ORACLE AND MSSQL TO WORK THE SAME WAY WITH OTHER DIALECTS IF POSSIBLE
if (dialect !== 'sqlite3' && dialect !== 'oracledb' && dialect !== 'mssql') {
const knexConf = _.cloneDeep(knexfile[key]);
knexConf.connection.database = knexConf.connection.db = 'i-refuse-to-exist';
knexConf.acquireConnectionTimeout = 4000;
@ -19,7 +20,7 @@ module.exports = (knexfile) => {
t.plan(1);
t.timeoutAfter(1000);
knex('accounts').select(1)
.then(res => {
.then(res => {
t.fail(`Query should have failed, got: ${JSON.stringify(res)}`);
})
.catch(Bluebird.TimeoutError, e => {

View File

@ -4335,71 +4335,43 @@ describe("QueryBuilder", function() {
});
});
// it("lock for update", function (){
// testsql(tb().select('*').from('foo').where('bar', '=', 'baz').forUpdate(), {
// mysql: {
// sql: 'select * from `foo` where `bar` = ? for update',
// bindings: ['baz']
// },
// postgres: {
// sql: 'select * from "foo" where "bar" = ? for update',
// bindings: ['baz']
// },
// redshift: {
// sql: 'select * from "foo" where "bar" = ? for update',
// bindings: ['baz']
// },
// oracle: {
// sql: 'select * from "foo" where "bar" = ? for update',
// bindings: ['baz']
// },
// mssql: {
// sql: 'select * from [foo] where [bar] = ? with (READCOMMITTEDLOCK)',
// bindings: ['baz']
// },
// oracledb: {
// sql: 'select * from "foo" where "bar" = ? for update',
// bindings: ['baz']
// },
// postgres: {
// sql: 'select * from "foo" where "bar" = ?',
// bindings: ['baz']
// },
// redshift: {
// sql: 'select * from "foo" where "bar" = ?',
// bindings: ['baz']
// },
// });
// });
it("lock for update", function (){
testsql(qb().select('*').from('foo').where('bar', '=', 'baz').forUpdate(), {
mysql: {
sql: 'select * from `foo` where `bar` = ? for update',
bindings: ['baz']
},
postgres: {
sql: 'select * from "foo" where "bar" = ? for update',
bindings: ['baz']
},
mssql: {
sql: 'select * from [foo] with (UPDLOCK) where [bar] = ?',
bindings: ['baz']
},
oracledb: {
sql: 'select * from "foo" where "bar" = ? for update',
bindings: ['baz']
}
});
});
// it("lock in share mode", function() {
// testsql(qb().transacting({}).select('*').from('foo').where('bar', '=', 'baz').forShare(), {
// mysql: {
// sql: 'select * from `foo` where `bar` = ? lock in share mode',
// bindings: ['baz']
// },
// postgres: {
// sql: "select * from \"foo\" where \"bar\" = ? for share",
// bindings: ['baz']
// },
// redshift: {
// sql: "select * from \"foo\" where \"bar\" = ? for share",
// bindings: ['baz']
// },
// mssql: {
// sql: 'select * from [foo] where [bar] = ? with (NOLOCK)',
// bindings: ['baz']
// },
// postgres: {
// sql: 'select * from "foo" where "bar" = ?',
// bindings: ['baz']
// },
// redshift: {
// sql: 'select * from "foo" where "bar" = ?',
// bindings: ['baz']
// },
// });
// });
it("lock in share mode", function() {
testsql(qb().select('*').from('foo').where('bar', '=', 'baz').forShare(), {
mysql: {
sql: 'select * from `foo` where `bar` = ? lock in share mode',
bindings: ['baz']
},
postgres: {
sql: "select * from \"foo\" where \"bar\" = ? for share",
bindings: ['baz']
},
mssql: {
sql: 'select * from [foo] with (HOLDLOCK) where [bar] = ?',
bindings: ['baz']
},
});
});
it("should allow lock (such as forUpdate) outside of a transaction", function() {
testsql(qb().select('*').from('foo').where('bar', '=', 'baz').forUpdate(), {
@ -4408,17 +4380,13 @@ describe("QueryBuilder", function() {
bindings: ['baz']
},
mssql: {
sql: 'select * from [foo] with (READCOMMITTEDLOCK) where [bar] = ?',
sql: 'select * from [foo] with (UPDLOCK) where [bar] = ?',
bindings: ['baz']
},
postgres: {
sql: 'select * from "foo" where "bar" = ? for update',
bindings: ['baz']
},
redshift: {
sql: 'select * from "foo" where "bar" = ?',
bindings: ['baz']
},
});
});

View File

@ -90,7 +90,7 @@ describe("MSSQL SchemaBuilder", function() {
}).toSQL();
equal(1, tableSql.length);
expect(tableSql[0].sql).to.equal('ALTER TABLE [users] DROP CONSTRAINT [users_foo_unique]');
expect(tableSql[0].sql).to.equal('DROP INDEX [users_foo_unique] ON [users]');
});
it('should alter columns with the alter flag', function() {
@ -110,7 +110,7 @@ describe("MSSQL SchemaBuilder", function() {
}).toSQL();
equal(1, tableSql.length);
expect(tableSql[0].sql).to.equal('ALTER TABLE [users] DROP CONSTRAINT [foo]');
expect(tableSql[0].sql).to.equal('DROP INDEX [foo] ON [users]');
});
it('test drop index', function() {
@ -182,7 +182,7 @@ describe("MSSQL SchemaBuilder", function() {
}).toSQL();
equal(1, tableSql.length);
expect(tableSql[0].sql).to.equal('CREATE UNIQUE INDEX [bar] ON [users] ([foo])');
expect(tableSql[0].sql).to.equal('CREATE UNIQUE INDEX [bar] ON [users] ([foo]) WHERE [foo] IS NOT NULL');
});
it('test adding index', function() {
@ -368,7 +368,7 @@ describe("MSSQL SchemaBuilder", function() {
}).toSQL();
equal(1, tableSql.length);
expect(tableSql[0].sql).to.equal('ALTER TABLE [users] ADD [foo] decimal(5, 2)');
expect(tableSql[0].sql).to.equal('ALTER TABLE [users] ADD [foo] float');
});
it('test adding double', function() {
@ -377,7 +377,7 @@ describe("MSSQL SchemaBuilder", function() {
}).toSQL();
equal(1, tableSql.length);
expect(tableSql[0].sql).to.equal('ALTER TABLE [users] ADD [foo] decimal');
expect(tableSql[0].sql).to.equal('ALTER TABLE [users] ADD [foo] float');
});
it('test adding double specifying precision', function() {
@ -386,7 +386,7 @@ describe("MSSQL SchemaBuilder", function() {
}).toSQL();
equal(1, tableSql.length);
expect(tableSql[0].sql).to.equal('ALTER TABLE [users] ADD [foo] decimal(15, 8)');
expect(tableSql[0].sql).to.equal('ALTER TABLE [users] ADD [foo] float');
});
it('test adding decimal', function() {
@ -530,7 +530,7 @@ describe("MSSQL SchemaBuilder", function() {
}).toSQL();
equal(1, tableSql.length);
expect(tableSql[0].sql).to.equal('ALTER TABLE [composite_key_test] DROP CONSTRAINT [composite_key_test_column_a_column_b_unique]');
expect(tableSql[0].sql).to.equal('DROP INDEX [composite_key_test_column_a_column_b_unique] ON [composite_key_test]');
});
it('allows default as alias for defaultTo', function() {