Fix #2545, bring redshift wrapIdentifier custom hook up to date with postgres (#2551)

* Add a Redshift dialect that inherits from Postgres.

* Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect.

* Update the Redshift dialect to be compatible with master.

* Update package.json

* Disable liftoff cli

* Remove the CLI

* Add lib to the repo

* Allow the escaping of named bindings.

* Update dist

* Update the Redshift dialect’s instantiation of the query and column compilers.

* Update the distribution

* Fix a merge conflict

* Take lib back out

* Trying to bring back in line with tgreisser/knex

* Add npm 5 package-lock

* Bring cli.js back in line

* Bring cli.js back in line

* Progress commit on redshift integration tests

* Revert "Progress commit on redshift integration tests"

This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c.

* Progress commit

* Working not null on primary columns in createTable

* Working redshift unit tests

* Working unit and integration tests, still need to fix migration tests

* Brought datatypes more in line with what redshift actually supports

* Added query compiler unit tests

* Add a hacky returning clause for redshift ugh

* Working migration integration tests

* Working insert integration tests

* Allow multiple insert returning values

* Working select integration tests

* Working join integration tests

* Working aggregate integration tests

* All integration suite tests working

* Put docker index for reconnect tests back

* Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly

* Leave redshift integration tests in place, but do not run them by default

* Fix mysql order by test

* Fix more tests

* Change test db name to knex_test for consistency

* Address PR comments

* Sublime Text gitignore

* Redshift does not support adding more than one column in alter table

* Fix integration tests for redshift

* Linter

* Combine dialect test skip if clause
This commit is contained in:
Adam Cofer 2018-04-04 18:43:39 -04:00 committed by Mikael Lepistö
parent 843a16799d
commit ee8cc35ecb
8 changed files with 88 additions and 35 deletions

2
.gitignore vendored
View File

@ -4,6 +4,8 @@ raw
.idea
.DS_Store
.vagrant
*.sublime-project
*.sublime-workspace
node_modules
package-lock.json
test.sqlite3

View File

@ -7,7 +7,7 @@ import QueryCompiler from '../../../query/compiler';
import QueryCompiler_PG from '../../postgres/query/compiler';
import * as helpers from '../../../helpers';
import { assign, reduce } from 'lodash';
import { assign, reduce, identity } from 'lodash';
function QueryCompiler_Redshift(client, builder) {
QueryCompiler_PG.call(this, client, builder);
@ -68,13 +68,23 @@ assign(QueryCompiler_Redshift.prototype, {
// Compiles a columnInfo query
columnInfo() {
const column = this.single.columnInfo;
let schema = this.single.schema;
// The user may have specified a custom wrapIdentifier function in the config. We
// need to run the identifiers through that function, but not format them as
// identifiers otherwise.
const table = this.client.customWrapIdentifier(this.single.table, identity);
if (schema) {
schema = this.client.customWrapIdentifier(schema, identity);
}
let sql = 'select * from information_schema.columns where table_name = ? and table_catalog = ?';
const bindings = [this.single.table.toLowerCase(), this.client.database().toLowerCase()];
const bindings = [table.toLowerCase(), this.client.database().toLowerCase()];
if (this.single.schema) {
if (schema) {
sql += ' and table_schema = ?';
bindings.push(this.single.schema);
bindings.push(schema);
} else {
sql += ' and table_schema = current_schema()';
}

View File

@ -70,4 +70,23 @@ TableCompiler_Redshift.prototype.primary = function(columns, constraintName) {
return self.pushQuery(`alter table ${self.tableName()} add constraint ${constraintName} primary key (${self.formatter.columnize(columns)})`);
};
// Compiles column add. Redshift can only add one column per ALTER TABLE, so core addColumns doesn't work. #2545
TableCompiler_Redshift.prototype.addColumns = function (columns, prefix, colCompilers) {
if (prefix === this.alterColumnsPrefix) {
TableCompiler_PG.prototype.addColumns.call(this, columns, prefix, colCompilers);
} else {
prefix = prefix || this.addColumnsPrefix;
colCompilers = colCompilers || this.getColumns();
for (const col of colCompilers) {
const quotedTableName = this.tableName();
const colCompiled = col.compileColumn();
this.pushQuery({
sql: `alter table ${quotedTableName} ${prefix}${colCompiled}`,
bindings: []
});
}
}
};
export default TableCompiler_Redshift;

View File

@ -100,13 +100,13 @@ module.exports = function(knex) {
it('should work using camelCased table name', () => {
return knex('testTableTwo').columnInfo().then(res => {
expect(Object.keys(res)).to.eql(['id', 'accountId', 'details', 'status', 'jsonData']);
expect(Object.keys(res)).to.have.all.members(['id', 'accountId', 'details', 'status', 'jsonData']);
});
});
it('should work using snake_cased table name', () => {
return knex('test_table_two').columnInfo().then(res => {
expect(Object.keys(res)).to.eql(['id', 'accountId', 'details', 'status', 'jsonData']);
expect(Object.keys(res)).to.have.all.members(['id', 'accountId', 'details', 'status', 'jsonData']);
});
});
@ -175,7 +175,9 @@ module.exports = function(knex) {
// Insert new data after truncate and make sure ids restart at 1.
// This doesn't currently work on oracle, where the created sequence
// needs to be manually reset.
if (knex.client.dialect !== 'oracle') {
// On redshift, one would need to create an entirely new table and do
// `insert into ... (select ...); alter table rename...`
if (/oracle/i.test(knex.client.dialect) || /redshift/i.test(knex.client.dialect)) { return; }
return knex('test_table_two').insert({ status: 1 })
.then(res => {
return knex('test_table_two')
@ -186,7 +188,6 @@ module.exports = function(knex) {
expect(res.id).to.equal(1);
});
});
}
});
});
@ -664,6 +665,8 @@ module.exports = function(knex) {
});
it('Event: start', function() {
// On redshift, cannot set an identity column to a value
if (/redshift/i.test(knex.client.dialect)) { return; }
return knex('accounts')
.insert({id: '999', last_name: 'Start'})
.then(function() {

View File

@ -1,4 +1,5 @@
'use strict';
/* eslint no-var: 0 */
var assert = require('assert')
var testConfig = process.env.KNEX_TEST && require(process.env.KNEX_TEST) || {};

View File

@ -1743,6 +1743,10 @@ describe("QueryBuilder", function() {
sql: 'select * from "users" group by id, email',
bindings: []
},
redshift: {
sql: 'select * from "users" group by id, email',
bindings: []
},
});
});
@ -3936,6 +3940,10 @@ describe("QueryBuilder", function() {
sql: 'update "users" set "email" = ? where "id" = ?',
bindings: ['foo', 1]
},
redshift: {
sql: 'update "users" set "email" = ? where "id" = ?',
bindings: ['foo', 1]
},
});
});
@ -4296,6 +4304,10 @@ describe("QueryBuilder", function() {
sql: 'select * from "foo" where "bar" = ? for update',
bindings: ['baz']
},
redshift: {
sql: 'select * from "foo" where "bar" = ?',
bindings: ['baz']
},
});
});

View File

@ -1,8 +1,9 @@
/*global describe, expect, it*/
/* eslint max-len:0 */
'use strict';
var tableSql;
let tableSql;
const Redshift_Client = require('../../../lib/dialects/redshift');
const client = new Redshift_Client({})
@ -26,8 +27,9 @@ describe("Redshift SchemaBuilder", function() {
table.increments('id');
table.string('email');
}).toSQL();
equal(1, tableSql.length);
expect(tableSql[0].sql).to.equal('alter table "users" add column "id" integer identity(1,1) primary key not null, add column "email" varchar(255)');
equal(2, tableSql.length);
expect(tableSql[0].sql).to.equal('alter table "users" add column "id" integer identity(1,1) primary key not null');
expect(tableSql[1].sql).to.equal('alter table "users" add column "email" varchar(255)');
});
it("alter table with schema", function() {
@ -386,16 +388,18 @@ describe("Redshift SchemaBuilder", function() {
tableSql = client.schemaBuilder().table('users', function(table) {
table.timestamps();
}).toSQL();
equal(1, tableSql.length);
expect(tableSql[0].sql).to.equal('alter table "users" add column "created_at" timestamptz, add column "updated_at" timestamptz');
equal(2, tableSql.length);
expect(tableSql[0].sql).to.equal('alter table "users" add column "created_at" timestamptz');
expect(tableSql[1].sql).to.equal('alter table "users" add column "updated_at" timestamptz');
});
it("adding timestamps with defaults", function() {
tableSql = client.schemaBuilder().table('users', function(table) {
table.timestamps(false, true);
}).toSQL();
equal(1, tableSql.length);
expect(tableSql[0].sql).to.equal('alter table "users" add column "created_at" timestamptz not null default CURRENT_TIMESTAMP, add column "updated_at" timestamptz not null default CURRENT_TIMESTAMP');
equal(2, tableSql.length);
expect(tableSql[0].sql).to.equal('alter table "users" add column "created_at" timestamptz not null default CURRENT_TIMESTAMP');
expect(tableSql[1].sql).to.equal('alter table "users" add column "updated_at" timestamptz not null default CURRENT_TIMESTAMP');
});
it("adding binary", function() {
@ -428,22 +432,22 @@ describe("Redshift SchemaBuilder", function() {
});
it('allows creating an extension', function() {
var sql = client.schemaBuilder().createExtension('test').toSQL();
const sql = client.schemaBuilder().createExtension('test').toSQL();
expect(sql[0].sql).to.equal('create extension "test"');
});
it('allows dropping an extension', function() {
var sql = client.schemaBuilder().dropExtension('test').toSQL();
const sql = client.schemaBuilder().dropExtension('test').toSQL();
expect(sql[0].sql).to.equal('drop extension "test"');
});
it('allows creating an extension only if it doesn\'t exist', function() {
var sql = client.schemaBuilder().createExtensionIfNotExists('test').toSQL();
const sql = client.schemaBuilder().createExtensionIfNotExists('test').toSQL();
expect(sql[0].sql).to.equal('create extension if not exists "test"');
});
it('allows dropping an extension only if it exists', function() {
var sql = client.schemaBuilder().dropExtensionIfExists('test').toSQL();
const sql = client.schemaBuilder().dropExtensionIfExists('test').toSQL();
expect(sql[0].sql).to.equal('drop extension if exists "test"');
});
@ -471,8 +475,9 @@ describe("Redshift SchemaBuilder", function() {
t.primary(['test1', 'test2'], 'testconstraintname');
}).toSQL();
equal(1, tableSql.length);
expect(tableSql[0].sql).to.equal('alter table "users" add column "test1" varchar(255), add column "test2" varchar(255) not null');
equal(2, tableSql.length);
expect(tableSql[0].sql).to.equal('alter table "users" add column "test1" varchar(255)');
expect(tableSql[1].sql).to.equal('alter table "users" add column "test2" varchar(255) not null');
tableSql = client.schemaBuilder().table('users', function(t) {
t.string('test1').notNullable();
@ -480,8 +485,9 @@ describe("Redshift SchemaBuilder", function() {
t.primary(['test1', 'test2'], 'testconstraintname');
}).toSQL();
expect(tableSql[0].sql).to.equal('alter table "users" add column "test1" varchar(255) not null, add column "test2" varchar(255) not null');
expect(tableSql[1].sql).to.equal('alter table "users" add constraint "testconstraintname" primary key ("test1", "test2")');
expect(tableSql[0].sql).to.equal('alter table "users" add column "test1" varchar(255) not null');
expect(tableSql[1].sql).to.equal('alter table "users" add column "test2" varchar(255) not null');
expect(tableSql[2].sql).to.equal('alter table "users" add constraint "testconstraintname" primary key ("test1", "test2")');
tableSql = client.schemaBuilder().createTable('users', function(t) {
t.string('test').primary('testconstraintname');