1287 lines
58 KiB
JavaScript
Raw Normal View History

Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
/*global describe, it, expect, testPromise, before, after*/
'use strict';
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
const Promise = testPromise;
module.exports = function(knex) {
2013-09-12 13:30:47 -04:00
describe('Schema', function() {
2013-12-27 14:44:21 -05:00
describe('dropTable', function() {
it('has a dropTableIfExists method', function() {
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
this.timeout(process.env.KNEX_TEST_TIMEOUT || 30000);
2013-12-27 14:44:21 -05:00
return Promise.all([
knex.schema
.dropTableIfExists('test_foreign_table_two')
.testSql(function(tester) {
tester(['pg'], ['drop table if exists "test_foreign_table_two"']);
tester(
['pg-redshift'],
['drop table if exists "test_foreign_table_two"']
);
tester(
['sqlite3', 'mysql'],
['drop table if exists `test_foreign_table_two`']
);
tester('oracledb', [
'begin execute immediate \'drop table "test_foreign_table_two"\'; exception when others then if sqlcode != -942 then raise; end if; end;',
'begin execute immediate \'drop sequence "test_foreign_table_two_seq"\'; exception when others then if sqlcode != -2289 then raise; end if; end;',
]);
tester('mssql', [
"if object_id('[test_foreign_table_two]', 'U') is not null DROP TABLE [test_foreign_table_two]",
]);
}),
knex.schema
.dropTableIfExists('test_table_one')
2014-04-16 01:23:50 -04:00
.dropTableIfExists('catch_test')
.dropTableIfExists('test_table_two')
.dropTableIfExists('test_table_three')
2015-08-09 22:24:55 -03:00
.dropTableIfExists('test_table_four')
2014-04-16 01:23:50 -04:00
.dropTableIfExists('datatype_test')
.dropTableIfExists('composite_key_test')
.dropTableIfExists('charset_collate_test')
.dropTableIfExists('accounts')
.dropTableIfExists('migration_test_1')
.dropTableIfExists('migration_test_2')
.dropTableIfExists('migration_test_2_1')
.dropTableIfExists('test_default_table')
.dropTableIfExists('test_default_table2')
.dropTableIfExists('test_default_table3')
2014-04-16 01:23:50 -04:00
.dropTableIfExists('knex_migrations')
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
.dropTableIfExists('knex_migrations_lock')
.dropTableIfExists('bool_test')
2015-07-15 23:16:30 -03:00
.dropTableIfExists('10_test_table')
.dropTableIfExists('rename_column_foreign_test')
2015-02-04 11:03:14 -05:00
.dropTableIfExists('rename_column_test')
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
.dropTableIfExists('renamecoltest')
.dropTableIfExists('should_not_be_run')
.dropTableIfExists('invalid_inTable_param_test')
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
.dropTableIfExists('primarytest')
.dropTableIfExists('increments_columns_1_test')
.dropTableIfExists('increments_columns_2_test'),
2013-12-27 14:44:21 -05:00
]);
});
2013-09-12 13:30:47 -04:00
});
describe('createTable', function() {
describe('increments types - postgres', function() {
if (!knex || !knex.client || !/pg/i.test(knex.client.driverName)) {
return Promise.resolve();
}
before(function() {
return Promise.all([
knex.schema.createTable('increments_columns_1_test', function(
table
) {
table.increments().comment('comment_1');
}),
knex.schema.createTable('increments_columns_2_test', function(
table
) {
table.increments('named_2').comment('comment_2');
}),
]);
});
after(function() {
return Promise.all([
knex.schema.dropTable('increments_columns_1_test'),
knex.schema.dropTable('increments_columns_2_test'),
]);
});
it('#2210 - creates an incrementing column with a comment', function() {
const table_name = 'increments_columns_1_test';
const expected_column = 'id';
const expected_comment = 'comment_1';
return knex
.raw(
'SELECT c.oid FROM pg_catalog.pg_class c WHERE c.relname = ?',
[table_name]
)
.then(function(res) {
const column_oid = res.rows[0].oid;
return knex
.raw('SELECT pg_catalog.col_description(?,?);', [
column_oid,
'1',
])
.then(function(_res) {
const comment = _res.rows[0].col_description;
return knex
.raw(
'select column_name from INFORMATION_SCHEMA.COLUMNS where table_name = ?;',
table_name
)
.then((res) => {
const column_name = res.rows[0].column_name;
expect(column_name).to.equal(expected_column);
expect(comment).to.equal(expected_comment);
});
});
});
});
it('#2210 - creates an incrementing column with a specified name and comment', function() {
const table_name = 'increments_columns_2_test';
const expected_column = 'named_2';
const expected_comment = 'comment_2';
return knex
.raw(
'SELECT c.oid FROM pg_catalog.pg_class c WHERE c.relname = ?',
[table_name]
)
.then(function(res) {
const column_oid = res.rows[0].oid;
return knex
.raw('SELECT pg_catalog.col_description(?,?);', [
column_oid,
'1',
])
.then(function(_res) {
const comment = _res.rows[0].col_description;
return knex
.raw(
'select column_name from INFORMATION_SCHEMA.COLUMNS where table_name = ?;',
table_name
)
.then((res) => {
const column_name = res.rows[0].column_name;
expect(column_name).to.equal(expected_column);
expect(comment).to.equal(expected_comment);
});
});
});
});
});
describe('increments types - mysql', function() {
if (!knex || !knex.client || !/mysql/i.test(knex.client.driverName)) {
return Promise.resolve();
}
before(function() {
return Promise.all([
knex.schema.createTable('increments_columns_1_test', function(
table
) {
table.increments().comment('comment_1');
}),
knex.schema.createTable('increments_columns_2_test', function(
table
) {
table.increments('named_2').comment('comment_2');
}),
]);
});
after(function() {
return Promise.all([
knex.schema.dropTable('increments_columns_1_test'),
knex.schema.dropTable('increments_columns_2_test'),
]);
});
it('#2210 - creates an incrementing column with a comment', function() {
const table_name = 'increments_columns_1_test';
const expected_column = 'id';
const expected_comment = 'comment_1';
const query = `
SELECT
COLUMN_COMMENT
FROM
INFORMATION_SCHEMA.COLUMNS
WHERE
TABLE_NAME = ? AND
COLUMN_NAME = ?
`;
return knex
.raw(query, [table_name, expected_column])
.then(function(res) {
const comment = res[0][0].COLUMN_COMMENT;
expect(comment).to.equal(expected_comment);
});
});
it('#2210 - creates an incrementing column with a specified name and comment', function() {
const table_name = 'increments_columns_2_test';
const expected_column = 'named_2';
const expected_comment = 'comment_2';
const query = `
SELECT
COLUMN_COMMENT
FROM
INFORMATION_SCHEMA.COLUMNS
WHERE
TABLE_NAME = ? AND
COLUMN_NAME = ?
`;
return knex
.raw(query, [table_name, expected_column])
.then(function(res) {
const comment = res[0][0].COLUMN_COMMENT;
expect(comment).to.equal(expected_comment);
});
});
});
describe('enum - postgres', function() {
if (!knex || !knex.client || !/pg/i.test(knex.client.driverName)) {
return Promise.resolve();
}
afterEach(function() {
return knex.schema
.dropTableIfExists('native_enum_test')
.raw('DROP TYPE "foo_type"');
});
it('uses native type when useNative is specified', function() {
return knex.schema
.createTable('native_enum_test', function(table) {
table
.enum('foo_column', ['a', 'b', 'c'], {
useNative: true,
enumName: 'foo_type',
})
.notNull();
table.uuid('id').notNull();
})
.testSql(function(tester) {
tester('pg', [
"create type \"foo_type\" as enum ('a', 'b', 'c')",
'create table "native_enum_test" ("foo_column" "foo_type" not null, "id" uuid not null)',
]);
});
});
});
it('Callback function must be supplied', function() {
expect(function() {
knex.schema.createTable('callback_must_be_supplied').toString();
}).to.throw(TypeError);
expect(function() {
knex.schema
.createTable('callback_must_be_supplied', function() {})
.toString();
}).to.not.throw(TypeError);
});
it('is possible to chain .catch', function() {
2013-12-27 14:44:21 -05:00
return knex.schema
.createTable('catch_test', function(t) {
t.increments();
})
.catch(function(e) {
2013-12-27 14:44:21 -05:00
throw e;
});
});
2013-09-12 13:30:47 -04:00
it('accepts the table name, and a "container" function', function() {
2013-12-27 14:44:21 -05:00
return knex.schema
.createTable('test_table_one', function(table) {
table.engine('InnoDB');
table.comment('A table comment.');
table.bigIncrements('id');
table.string('first_name').index();
table.string('last_name');
table
.string('email')
.unique()
.nullable();
table
.integer('logins')
.defaultTo(1)
.index()
.comment();
table.float('balance').defaultTo(0);
if (knex.client.driverName === 'oracledb') {
2014-08-11 12:25:39 +02:00
// use string instead to force varchar2 to avoid later problems with join and union
table.string('about', 4000).comment('A comment.');
} else {
table.text('about').comment('A comment.');
}
2013-12-27 14:44:21 -05:00
table.timestamps();
})
.testSql(function(tester) {
tester('mysql', [
"create table `test_table_one` (`id` bigint unsigned not null auto_increment primary key, `first_name` varchar(255), `last_name` varchar(255), `email` varchar(255) null, `logins` int default '1', `balance` float(8, 2) default '0', `about` text comment 'A comment.', `created_at` datetime, `updated_at` datetime) default character set utf8 engine = InnoDB comment = 'A table comment.'",
'alter table `test_table_one` add index `test_table_one_first_name_index`(`first_name`)',
'alter table `test_table_one` add unique `test_table_one_email_unique`(`email`)',
'alter table `test_table_one` add index `test_table_one_logins_index`(`logins`)',
]);
tester('pg', [
'create table "test_table_one" ("id" bigserial primary key, "first_name" varchar(255), "last_name" varchar(255), "email" varchar(255) null, "logins" integer default \'1\', "balance" real default \'0\', "about" text, "created_at" timestamptz, "updated_at" timestamptz)',
'comment on table "test_table_one" is \'A table comment.\'',
'comment on column "test_table_one"."logins" is NULL',
'comment on column "test_table_one"."about" is \'A comment.\'',
'create index "test_table_one_first_name_index" on "test_table_one" ("first_name")',
'alter table "test_table_one" add constraint "test_table_one_email_unique" unique ("email")',
'create index "test_table_one_logins_index" on "test_table_one" ("logins")',
]);
tester('pg-redshift', [
'create table "test_table_one" ("id" bigint identity(1,1) primary key not null, "first_name" varchar(255), "last_name" varchar(255), "email" varchar(255) null, "logins" integer default \'1\', "balance" real default \'0\', "about" varchar(max), "created_at" timestamptz, "updated_at" timestamptz)',
'comment on table "test_table_one" is \'A table comment.\'',
'comment on column "test_table_one"."logins" is NULL',
'comment on column "test_table_one"."about" is \'A comment.\'',
'alter table "test_table_one" add constraint "test_table_one_email_unique" unique ("email")',
]);
tester('sqlite3', [
"create table `test_table_one` (`id` integer not null primary key autoincrement, `first_name` varchar(255), `last_name` varchar(255), `email` varchar(255) null, `logins` integer default '1', `balance` float default '0', `about` text, `created_at` datetime, `updated_at` datetime)",
'create index `test_table_one_first_name_index` on `test_table_one` (`first_name`)',
'create unique index `test_table_one_email_unique` on `test_table_one` (`email`)',
'create index `test_table_one_logins_index` on `test_table_one` (`logins`)',
]);
tester('oracledb', [
`create table \"test_table_one\" (\"id\" number(20, 0) not null primary key, \"first_name\" varchar2(255), \"last_name\" varchar2(255), \"email\" varchar2(255) null, \"logins\" integer default '1', \"balance\" float default '0', \"about\" varchar2(4000), \"created_at\" timestamp with local time zone, \"updated_at\" timestamp with local time zone)`,
'comment on table "test_table_one" is \'A table comment.\'',
`DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE ('CREATE SEQUENCE \"test_table_one_seq\"'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = 'P' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = 'test_table_one'; execute immediate ('create or replace trigger \"test_table_one_autoinc_trg\" BEFORE INSERT on \"test_table_one\" for each row declare checking number := 1; begin if (:new.\"' || PK_NAME || '\" is null) then while checking >= 1 loop select \"test_table_one_seq\".nextval into :new.\"' || PK_NAME || '\" from dual; select count(\"' || PK_NAME || '\") into checking from \"test_table_one\" where \"' || PK_NAME || '\" = :new.\"' || PK_NAME || '\"; end loop; end if; end;'); END;`,
'comment on column "test_table_one"."logins" is \'\'',
2014-08-11 12:25:39 +02:00
'comment on column "test_table_one"."about" is \'A comment.\'',
'create index "NkZo/dGRI9O73/NE2fHo+35d4jk" on "test_table_one" ("first_name")',
'alter table "test_table_one" add constraint "test_table_one_email_unique" unique ("email")',
'create index "test_table_one_logins_index" on "test_table_one" ("logins")',
]);
tester('mssql', [
"CREATE TABLE [test_table_one] ([id] bigint identity(1,1) not null primary key, [first_name] nvarchar(255), [last_name] nvarchar(255), [email] nvarchar(255) null, [logins] int default '1', [balance] float default '0', [about] nvarchar(max), [created_at] datetime2, [updated_at] datetime2)",
'CREATE INDEX [test_table_one_first_name_index] ON [test_table_one] ([first_name])',
'CREATE UNIQUE INDEX [test_table_one_email_unique] ON [test_table_one] ([email]) WHERE [email] IS NOT NULL',
'CREATE INDEX [test_table_one_logins_index] ON [test_table_one] ([logins])',
]);
2013-12-27 14:44:21 -05:00
});
});
2013-09-12 13:30:47 -04:00
it('is possible to set the db engine with the table.engine', function() {
2013-12-27 14:44:21 -05:00
return knex.schema
.createTable('test_table_two', function(table) {
table.engine('InnoDB');
table.increments();
table.integer('account_id');
if (knex.client.driverName === 'oracledb') {
2014-08-11 12:25:39 +02:00
// use string instead to force varchar2 to avoid later problems with join and union
// e.g. where email (varchar2) = details (clob) does not work
table.string('details', 4000);
} else {
table.text('details');
}
2013-12-27 14:44:21 -05:00
table.tinyint('status');
})
.testSql(function(tester) {
tester('mysql', [
'create table `test_table_two` (`id` int unsigned not null auto_increment primary key, `account_id` int, `details` text, `status` tinyint) default character set utf8 engine = InnoDB',
]);
tester('mssql', [
'CREATE TABLE [test_table_two] ([id] int identity(1,1) not null primary key, [account_id] int, [details] nvarchar(max), [status] tinyint)',
]);
2013-12-27 14:44:21 -05:00
});
});
2013-09-12 13:30:47 -04:00
it('sets default values with defaultTo', function() {
2013-12-27 14:44:21 -05:00
return knex.schema
.createTable('test_table_three', function(table) {
table.engine('InnoDB');
table
.integer('main')
.notNullable()
.primary();
2013-12-27 14:44:21 -05:00
table.text('paragraph').defaultTo('Lorem ipsum Qui quis qui in.');
})
.testSql(function(tester) {
tester('mysql', [
'create table `test_table_three` (`main` int not null, `paragraph` text) default character set utf8 engine = InnoDB',
'alter table `test_table_three` add primary key `test_table_three_pkey`(`main`)',
]);
tester('pg', [
'create table "test_table_three" ("main" integer not null, "paragraph" text default \'Lorem ipsum Qui quis qui in.\')',
'alter table "test_table_three" add constraint "test_table_three_pkey" primary key ("main")',
]);
tester('pg-redshift', [
'create table "test_table_three" ("main" integer not null, "paragraph" varchar(max) default \'Lorem ipsum Qui quis qui in.\')',
'alter table "test_table_three" add constraint "test_table_three_pkey" primary key ("main")',
]);
tester('sqlite3', [
"create table `test_table_three` (`main` integer not null, `paragraph` text default 'Lorem ipsum Qui quis qui in.', primary key (`main`))",
]);
tester('oracledb', [
'create table "test_table_three" ("main" integer not null, "paragraph" clob default \'Lorem ipsum Qui quis qui in.\')',
'alter table "test_table_three" add constraint "test_table_three_pkey" primary key ("main")',
]);
tester('mssql', [
'CREATE TABLE [test_table_three] ([main] int not null, [paragraph] nvarchar(max), CONSTRAINT [test_table_three_pkey] PRIMARY KEY ([main]))',
]);
2013-12-27 14:44:21 -05:00
});
});
2013-09-12 13:30:47 -04:00
it('supports the enum and uuid columns', function() {
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
// NB: redshift does not...
2013-12-27 14:44:21 -05:00
return knex.schema
.createTable('datatype_test', function(table) {
table.enum('enum_value', ['a', 'b', 'c']);
table.uuid('uuid').notNull();
})
.testSql(function(tester) {
tester('mysql', [
"create table `datatype_test` (`enum_value` enum('a', 'b', 'c'), `uuid` char(36) not null) default character set utf8",
]);
tester('pg', [
'create table "datatype_test" ("enum_value" text check ("enum_value" in (\'a\', \'b\', \'c\')), "uuid" uuid not null)',
]);
tester('sqlite3', [
"create table `datatype_test` (`enum_value` text check (`enum_value` in ('a', 'b', 'c')), `uuid` char(36) not null)",
]);
tester('oracledb', [
'create table "datatype_test" ("enum_value" varchar2(1) check ("enum_value" in (\'a\', \'b\', \'c\')), "uuid" char(36) not null)',
]);
tester('mssql', [
'CREATE TABLE [datatype_test] ([enum_value] nvarchar(100), [uuid] uniqueidentifier not null)',
]);
2013-12-27 14:44:21 -05:00
});
});
2013-09-12 13:30:47 -04:00
it('allows for setting foreign keys on schema creation', function() {
return knex.schema
.createTable('test_foreign_table_two', function(table) {
table.increments();
table
.integer('fkey_two')
.unsigned()
.references('id')
.inTable('test_table_two');
table
.integer('fkey_three')
.unsigned()
.references('id')
.inTable('test_table_two')
.withKeyName('fk_fkey_three');
table.integer('fkey_four').unsigned();
table
.foreign('fkey_four', 'fk_fkey_four')
.references('test_table_two.id');
})
.testSql(function(tester) {
tester('mysql', [
'create table `test_foreign_table_two` (`id` int unsigned not null auto_increment primary key, `fkey_two` int unsigned, `fkey_three` int unsigned, `fkey_four` int unsigned) default character set utf8',
'alter table `test_foreign_table_two` add constraint `test_foreign_table_two_fkey_two_foreign` foreign key (`fkey_two`) references `test_table_two` (`id`)',
'alter table `test_foreign_table_two` add constraint `fk_fkey_three` foreign key (`fkey_three`) references `test_table_two` (`id`)',
'alter table `test_foreign_table_two` add constraint `fk_fkey_four` foreign key (`fkey_four`) references `test_table_two` (`id`)',
]);
tester('pg', [
'create table "test_foreign_table_two" ("id" serial primary key, "fkey_two" integer, "fkey_three" integer, "fkey_four" integer)',
'alter table "test_foreign_table_two" add constraint "test_foreign_table_two_fkey_two_foreign" foreign key ("fkey_two") references "test_table_two" ("id")',
'alter table "test_foreign_table_two" add constraint "fk_fkey_three" foreign key ("fkey_three") references "test_table_two" ("id")',
'alter table "test_foreign_table_two" add constraint "fk_fkey_four" foreign key ("fkey_four") references "test_table_two" ("id")',
]);
tester('pg-redshift', [
'create table "test_foreign_table_two" ("id" integer identity(1,1) primary key not null, "fkey_two" integer, "fkey_three" integer, "fkey_four" integer)',
'alter table "test_foreign_table_two" add constraint "test_foreign_table_two_fkey_two_foreign" foreign key ("fkey_two") references "test_table_two" ("id")',
'alter table "test_foreign_table_two" add constraint "fk_fkey_three" foreign key ("fkey_three") references "test_table_two" ("id")',
'alter table "test_foreign_table_two" add constraint "fk_fkey_four" foreign key ("fkey_four") references "test_table_two" ("id")',
]);
tester('sqlite3', [
'create table `test_foreign_table_two` (`id` integer not null primary key autoincrement, `fkey_two` integer, `fkey_three` integer, `fkey_four` integer, ' +
'foreign key(`fkey_two`) references `test_table_two`(`id`), ' +
'foreign key(`fkey_three`) references `test_table_two`(`id`), ' +
'foreign key(`fkey_four`) references `test_table_two`(`id`))',
]);
tester('oracledb', [
'create table "test_foreign_table_two" ("id" integer not null primary key, "fkey_two" integer, "fkey_three" integer, "fkey_four" integer)',
'DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE (\'CREATE SEQUENCE "test_foreign_table_two_seq"\'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = \'P\' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = \'test_foreign_table_two\'; execute immediate (\'create or replace trigger "m6uvAnbUQqcHvfWTN5IAjip1/vk" BEFORE INSERT on "test_foreign_table_two" for each row declare checking number := 1; begin if (:new."\' || PK_NAME || \'" is null) then while checking >= 1 loop select "test_foreign_table_two_seq".nextval into :new."\' || PK_NAME || \'" from dual; select count("\' || PK_NAME || \'") into checking from "test_foreign_table_two" where "\' || PK_NAME || \'" = :new."\' || PK_NAME || \'"; end loop; end if; end;\'); END;',
'alter table "test_foreign_table_two" add constraint "q7TfvbIx3HUQbh+l+e5N+J+Guag" foreign key ("fkey_two") references "test_table_two" ("id")',
'alter table "test_foreign_table_two" add constraint "fk_fkey_three" foreign key ("fkey_three") references "test_table_two" ("id")',
'alter table "test_foreign_table_two" add constraint "fk_fkey_four" foreign key ("fkey_four") references "test_table_two" ("id")',
]);
tester('mssql', [
'CREATE TABLE [test_foreign_table_two] ([id] int identity(1,1) not null primary key, [fkey_two] int, [fkey_three] int, [fkey_four] int, ' +
'CONSTRAINT [test_foreign_table_two_fkey_two_foreign] FOREIGN KEY ([fkey_two]) REFERENCES [test_table_two] ([id]), ' +
'CONSTRAINT [fk_fkey_three] FOREIGN KEY ([fkey_three]) REFERENCES [test_table_two] ([id]), ' +
'CONSTRAINT [fk_fkey_four] FOREIGN KEY ([fkey_four]) REFERENCES [test_table_two] ([id]))',
]);
});
});
it('rejects setting foreign key where tableName is not typeof === string', function() {
let builder = knex.schema.createTable(
'invalid_inTable_param_test',
function(table) {
const createInvalidUndefinedInTableSchema = function() {
table
.increments('id')
.references('id')
.inTable();
};
const createInvalidObjectInTableSchema = function() {
table
.integer('another_id')
.references('id')
.inTable({ tableName: 'this_should_fail' });
};
expect(createInvalidUndefinedInTableSchema).to.throw(TypeError);
expect(createInvalidObjectInTableSchema).to.throw(TypeError);
table
.integer('yet_another_id')
.references('id')
.inTable({ tableName: 'this_should_fail_too' });
}
);
expect(() => builder.toSQL()).to.throw(TypeError);
});
2013-10-10 11:21:32 -04:00
it('allows for composite keys', function() {
2013-12-27 14:44:21 -05:00
return knex.schema
.createTable('composite_key_test', function(table) {
table.integer('column_a');
table.integer('column_b');
2014-04-16 02:50:19 -04:00
table.text('details');
table.tinyint('status');
2013-12-27 14:44:21 -05:00
table.unique(['column_a', 'column_b']);
})
.testSql(function(tester) {
tester('mysql', [
'create table `composite_key_test` (`column_a` int, `column_b` int, `details` text, `status` tinyint) default character set utf8',
'alter table `composite_key_test` add unique `composite_key_test_column_a_column_b_unique`(`column_a`, `column_b`)',
]);
tester('pg', [
'create table "composite_key_test" ("column_a" integer, "column_b" integer, "details" text, "status" smallint)',
'alter table "composite_key_test" add constraint "composite_key_test_column_a_column_b_unique" unique ("column_a", "column_b")',
]);
tester('pg-redshift', [
'create table "composite_key_test" ("column_a" integer, "column_b" integer, "details" varchar(max), "status" smallint)',
'alter table "composite_key_test" add constraint "composite_key_test_column_a_column_b_unique" unique ("column_a", "column_b")',
]);
tester('sqlite3', [
'create table `composite_key_test` (`column_a` integer, `column_b` integer, `details` text, `status` tinyint)',
'create unique index `composite_key_test_column_a_column_b_unique` on `composite_key_test` (`column_a`, `column_b`)',
]);
tester('oracledb', [
'create table "composite_key_test" ("column_a" integer, "column_b" integer, "details" clob, "status" smallint)',
'alter table "composite_key_test" add constraint "zYmMt0VQwlLZ20XnrMicXZ0ufZk" unique ("column_a", "column_b")',
]);
tester('mssql', [
'CREATE TABLE [composite_key_test] ([column_a] int, [column_b] int, [details] nvarchar(max), [status] tinyint)',
'CREATE UNIQUE INDEX [composite_key_test_column_a_column_b_unique] ON [composite_key_test] ([column_a], [column_b]) WHERE [column_a] IS NOT NULL AND [column_b] IS NOT NULL',
]);
})
.then(function() {
return knex('composite_key_test').insert([
{
column_a: 1,
column_b: 1,
details: 'One, One, One',
status: 1,
},
{
column_a: 1,
column_b: 2,
details: 'One, Two, Zero',
status: 0,
},
{
column_a: 1,
column_b: 3,
details: 'One, Three, Zero',
status: 0,
},
]);
2013-12-27 14:44:21 -05:00
});
2013-10-10 11:21:32 -04:00
});
it('is possible to set the table collation with table.charset and table.collate', function() {
2013-12-27 14:44:21 -05:00
return knex.schema
.createTable('charset_collate_test', function(table) {
table.charset('latin1');
table.collate('latin1_general_ci');
table.engine('InnoDB');
table.increments();
table.integer('account_id');
table.text('details');
table.tinyint('status');
})
.testSql(function(tester) {
tester('mysql', [
'create table `charset_collate_test` (`id` int unsigned not null auto_increment primary key, `account_id` int, `details` text, `status` tinyint) default character set latin1 collate latin1_general_ci engine = InnoDB',
]);
tester('pg', [
'create table "charset_collate_test" ("id" serial primary key, "account_id" integer, "details" text, "status" smallint)',
]);
tester('pg-redshift', [
'create table "charset_collate_test" ("id" integer identity(1,1) primary key not null, "account_id" integer, "details" varchar(max), "status" smallint)',
]);
tester('sqlite3', [
'create table `charset_collate_test` (`id` integer not null primary key autoincrement, `account_id` integer, `details` text, `status` tinyint)',
]);
tester('oracledb', [
'create table "charset_collate_test" ("id" integer not null primary key, "account_id" integer, "details" clob, "status" smallint)',
'DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE (\'CREATE SEQUENCE "charset_collate_test_seq"\'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = \'P\' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = \'charset_collate_test\'; execute immediate (\'create or replace trigger "x9C3VzXH9urIKnTjm32JM7OvYYQ" BEFORE INSERT on "charset_collate_test" for each row declare checking number := 1; begin if (:new."\' || PK_NAME || \'" is null) then while checking >= 1 loop select "charset_collate_test_seq".nextval into :new."\' || PK_NAME || \'" from dual; select count("\' || PK_NAME || \'") into checking from "charset_collate_test" where "\' || PK_NAME || \'" = :new."\' || PK_NAME || \'"; end loop; end if; end;\'); END;',
]);
tester('mssql', [
'CREATE TABLE [charset_collate_test] ([id] int identity(1,1) not null primary key, [account_id] int, [details] nvarchar(max), [status] tinyint)',
2014-08-11 12:25:39 +02:00
]);
2013-12-27 14:44:21 -05:00
});
});
it('sets booleans & defaults correctly', function() {
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
return knex.schema
.createTable('bool_test', function(table) {
table.bool('one');
table.bool('two').defaultTo(false);
table.bool('three').defaultTo(true);
table.bool('four').defaultTo('true');
table.bool('five').defaultTo('false');
})
.testSql(function(tester) {
tester('mysql', [
"create table `bool_test` (`one` boolean, `two` boolean default '0', `three` boolean default '1', `four` boolean default '1', `five` boolean default '0') default character set utf8",
]);
tester('pg', [
'create table "bool_test" ("one" boolean, "two" boolean default \'0\', "three" boolean default \'1\', "four" boolean default \'1\', "five" boolean default \'0\')',
]);
tester('pg-redshift', [
'create table "bool_test" ("one" boolean, "two" boolean default \'0\', "three" boolean default \'1\', "four" boolean default \'1\', "five" boolean default \'0\')',
]);
tester('sqlite3', [
"create table `bool_test` (`one` boolean, `two` boolean default '0', `three` boolean default '1', `four` boolean default '1', `five` boolean default '0')",
]);
tester('oracledb', [
"create table \"bool_test\" (\"one\" number(1, 0) check (\"one\" in ('0', '1')), \"two\" number(1, 0) default '0' check (\"two\" in ('0', '1')), \"three\" number(1, 0) default '1' check (\"three\" in ('0', '1')), \"four\" number(1, 0) default '1' check (\"four\" in ('0', '1')), \"five\" number(1, 0) default '0' check (\"five\" in ('0', '1')))",
]);
tester('mssql', [
"CREATE TABLE [bool_test] ([one] bit, [two] bit default '0', [three] bit default '1', [four] bit default '1', [five] bit default '0')",
]);
})
.then(function() {
return knex.insert({ one: false }).into('bool_test');
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
});
});
2015-07-15 23:16:30 -03:00
it('accepts table names starting with numeric values', function() {
return knex.schema
.createTable('10_test_table', function(table) {
table.bigIncrements('id');
table.string('first_name').index();
table.string('last_name');
table
.string('email')
.unique()
.nullable();
table
.integer('logins')
.defaultTo(1)
.index()
.comment();
})
.testSql(function(tester) {
2015-07-15 23:16:30 -03:00
tester('mysql', [
"create table `10_test_table` (`id` bigint unsigned not null auto_increment primary key, `first_name` varchar(255), `last_name` varchar(255), `email` varchar(255) null, `logins` int default '1') default character set utf8",
2015-07-15 23:16:30 -03:00
'alter table `10_test_table` add index `10_test_table_first_name_index`(`first_name`)',
'alter table `10_test_table` add unique `10_test_table_email_unique`(`email`)',
'alter table `10_test_table` add index `10_test_table_logins_index`(`logins`)',
2015-07-15 23:16:30 -03:00
]);
tester('pg', [
'create table "10_test_table" ("id" bigserial primary key, "first_name" varchar(255), "last_name" varchar(255), "email" varchar(255) null, "logins" integer default \'1\')',
'comment on column "10_test_table"."logins" is NULL',
2015-07-15 23:16:30 -03:00
'create index "10_test_table_first_name_index" on "10_test_table" ("first_name")',
'alter table "10_test_table" add constraint "10_test_table_email_unique" unique ("email")',
'create index "10_test_table_logins_index" on "10_test_table" ("logins")',
2015-07-15 23:16:30 -03:00
]);
tester('sqlite3', [
"create table `10_test_table` (`id` integer not null primary key autoincrement, `first_name` varchar(255), `last_name` varchar(255), `email` varchar(255) null, `logins` integer default '1')",
'create index `10_test_table_first_name_index` on `10_test_table` (`first_name`)',
'create unique index `10_test_table_email_unique` on `10_test_table` (`email`)',
'create index `10_test_table_logins_index` on `10_test_table` (`logins`)',
2015-07-15 23:16:30 -03:00
]);
tester('oracledb', [
2015-07-15 23:16:30 -03:00
'create table "10_test_table" ("id" number(20, 0) not null primary key, "first_name" varchar2(255), "last_name" varchar2(255), "email" varchar2(255) null, "logins" integer default \'1\')',
'DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE (\'CREATE SEQUENCE "10_test_table_seq"\'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = \'P\' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = \'10_test_table\'; execute immediate (\'create or replace trigger "10_test_table_autoinc_trg" BEFORE INSERT on "10_test_table" for each row declare checking number := 1; begin if (:new."\' || PK_NAME || \'" is null) then while checking >= 1 loop select "10_test_table_seq".nextval into :new."\' || PK_NAME || \'" from dual; select count("\' || PK_NAME || \'") into checking from "10_test_table" where "\' || PK_NAME || \'" = :new."\' || PK_NAME || \'"; end loop; end if; end;\'); END;',
'comment on column "10_test_table"."logins" is \'\'',
'create index "10_test_table_first_name_index" on "10_test_table" ("first_name")',
2015-07-15 23:16:30 -03:00
'alter table "10_test_table" add constraint "10_test_table_email_unique" unique ("email")',
'create index "10_test_table_logins_index" on "10_test_table" ("logins")',
2015-07-15 23:16:30 -03:00
]);
});
});
2013-09-12 13:30:47 -04:00
});
describe('table', function() {
it('Callback function must be supplied', function() {
expect(function() {
knex.schema.createTable('callback_must_be_supplied').toString();
}).to.throw(TypeError);
expect(function() {
knex.schema
.createTable('callback_must_be_supplied', function() {})
.toString();
}).to.not.throw(TypeError);
});
it('allows adding a field', function() {
2013-09-12 13:30:47 -04:00
return knex.schema.table('test_table_two', function(t) {
t.json('json_data', true);
});
});
it('allows adding multiple columns at once', function() {
if (/redshift/i.test(knex.client.driverName)) {
return;
}
return knex.schema
.table('test_table_two', function(t) {
t.string('one');
t.string('two');
t.string('three');
})
.then(function() {
return knex.schema.table('test_table_two', function(t) {
t.dropColumn('one');
t.dropColumn('two');
t.dropColumn('three');
});
});
});
it('allows alter column syntax', function() {
if (
knex.client.driverName.match('sqlite3') ||
knex.client.driverName.match('pg-redshift') ||
knex.client.driverName.match('mssql') ||
knex.client.driverName.match('oracledb')
) {
return;
}
return knex.schema
.table('test_table_two', function(t) {
t.integer('remove_not_null')
.notNull()
.defaultTo(1);
t.string('remove_default')
.notNull()
.defaultTo(1);
t.dateTime('datetime_to_date')
.notNull()
.defaultTo(knex.fn.now());
})
.then(function() {
return knex.schema.table('test_table_two', function(t) {
t.integer('remove_not_null')
.defaultTo(1)
.alter();
t.integer('remove_default')
.notNull()
.alter();
t.date('datetime_to_date').alter();
});
})
.then(function() {
return knex('test_table_two').columnInfo();
})
.then(function(info) {
expect(info.remove_not_null.nullable).to.equal(true);
expect(info.remove_not_null.defaultValue).to.not.equal(null);
expect(info.remove_default.nullable).to.equal(false);
expect(info.remove_default.defaultValue).to.equal(null);
expect(info.remove_default.type).to.contains('int');
return knex.schema.table('test_table_two', function(t) {
t.dropColumn('remove_default');
t.dropColumn('remove_not_null');
t.dropColumn('datetime_to_date');
});
});
});
it('allows adding a field with custom collation after another field', function() {
return knex.schema
.table('test_table_two', function(t) {
t.string('ref_column').after('json_data');
})
.then(function() {
return knex.schema.table('test_table_two', function(t) {
t.string('after_column')
.after('ref_column')
.collate('utf8_bin');
});
})
.then(function() {
return knex.schema.table('test_table_two', function(t) {
t.dropColumn('ref_column');
t.dropColumn('after_column');
});
});
});
it('allows adding a field with custom collation first', function() {
return knex.schema
.table('test_table_two', function(t) {
t.string('first_column')
.first()
.collate('utf8_bin');
})
.then(function() {
return knex.schema.table('test_table_two', function(t) {
t.dropColumn('first_column');
});
});
});
2013-09-12 13:30:47 -04:00
it('allows changing a field', function() {
return knex.schema.table('test_table_one', function(t) {
t.string('phone').nullable();
});
});
2014-05-20 17:30:40 +10:00
it('allows dropping a unique index', function() {
return knex.schema.table('composite_key_test', function(t) {
t.dropUnique(['column_a', 'column_b']);
});
});
it('allows dropping a index', function() {
return knex.schema.table('test_table_one', function(t) {
t.dropIndex('first_name');
});
});
2013-09-12 13:30:47 -04:00
});
describe('hasTable', function() {
2013-09-12 13:30:47 -04:00
it('checks whether a table exists', function() {
2013-09-13 18:11:50 -04:00
return knex.schema.hasTable('test_table_two').then(function(resp) {
2013-12-27 14:44:21 -05:00
expect(resp).to.equal(true);
2013-09-12 13:30:47 -04:00
});
});
2013-09-12 13:30:47 -04:00
2013-09-23 23:07:38 -04:00
it('should be false if a table does not exists', function() {
return knex.schema.hasTable('this_table_is_fake').then(function(resp) {
2013-12-27 14:44:21 -05:00
expect(resp).to.equal(false);
2013-09-23 23:07:38 -04:00
});
});
it('should be false whether a parameter is not specified', function() {
return knex.schema.hasTable('').then(function(resp) {
expect(resp).to.equal(false);
});
});
});
describe('renameTable', function() {
it('renames the table from one to another', function() {
2013-09-12 13:30:47 -04:00
return knex.schema.renameTable('test_table_one', 'accounts');
});
});
describe('dropTable', function() {
2013-09-12 13:30:47 -04:00
it('should drop a table', function() {
return knex.schema.dropTable('test_table_three').then(function() {
// Drop this here so we don't have foreign key constraints...
return knex.schema.dropTable('test_foreign_table_two');
});
});
});
2013-09-12 13:30:47 -04:00
describe('hasColumn', function() {
it('checks whether a column exists, resolving with a boolean', function() {
return knex.schema
.hasColumn('accounts', 'first_name')
.then(function(exists) {
expect(exists).to.equal(true);
});
2013-09-12 13:30:47 -04:00
});
});
describe('addColumn', function() {
describe('mysql only', function() {
if (!knex || !knex.client || !/mysql/i.test(knex.client.driverName)) {
return Promise.resolve();
}
before(function() {
return knex.schema
.createTable('add_column_test_mysql', function(tbl) {
tbl.integer('field_foo');
tbl.integer('field_bar');
})
.then(function() {
return knex.schema.alterTable('add_column_test_mysql', function(
tbl
) {
tbl
.integer('field_foo')
.comment('foo')
.alter();
tbl
.integer('field_bar')
.comment('bar')
.alter();
tbl
.integer('field_first')
.first()
.comment('First');
tbl
.integer('field_after_foo')
.after('field_foo')
.comment('After');
tbl
.increments('field_nondefault_increments')
.comment('Comment on increment col');
});
});
});
after(function() {
return knex.schema.dropTable('add_column_test_mysql');
});
it('should columns order be correctly with after and first', function() {
return knex
.raw('SHOW CREATE TABLE `add_column_test_mysql`')
.then(function(schema) {
// .columnInfo() keys does not guaranteed fields order.
const fields = schema[0][0]['Create Table']
.split('\n')
.filter(function(e) {
return e.trim().indexOf('`field_') === 0;
})
.map(function(e) {
return e.trim();
})
.map(function(e) {
return e.slice(1, e.slice(1).indexOf('`') + 1);
});
// Fields order
expect(fields[0]).to.equal('field_first');
expect(fields[1]).to.equal('field_foo');
expect(fields[2]).to.equal('field_after_foo');
expect(fields[3]).to.equal('field_bar');
expect(fields[4]).to.equal('field_nondefault_increments');
// .columnInfo() does not included fields comment.
const comments = schema[0][0]['Create Table']
.split('\n')
.filter(function(e) {
return e.trim().indexOf('`field_') === 0;
})
.map(function(e) {
return e.slice(e.indexOf("'")).trim();
})
.map(function(e) {
return e.slice(1, e.slice(1).indexOf("'") + 1);
});
// Fields comment
expect(comments[0]).to.equal('First');
expect(comments[1]).to.equal('foo');
expect(comments[2]).to.equal('After');
expect(comments[3]).to.equal('bar');
expect(comments[4]).to.equal('Comment on increment col');
});
});
});
});
describe('renameColumn', function() {
before(function() {
return knex.schema
.createTable('rename_column_test', function(tbl) {
tbl
.increments('id_test')
.unsigned()
.primary();
tbl
.integer('parent_id_test')
.unsigned()
.references('id_test')
.inTable('rename_column_test');
})
.createTable('rename_column_foreign_test', function(tbl) {
tbl
.increments('id')
.unsigned()
.primary();
tbl
.integer('foreign_id_test')
.unsigned()
.references('id_test')
.inTable('rename_column_test');
})
.createTable('rename_col_test', function(tbl) {
tbl.integer('colnameint').defaultTo(1);
tbl
.string('colnamestring')
.defaultTo('knex')
.notNullable();
})
.then(function() {
// without data, the column isn't found??
return knex
.insert({ parent_id_test: 1 })
.into('rename_column_test');
});
2015-02-04 11:03:14 -05:00
});
after(function() {
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
return knex.schema
.dropTable('rename_column_foreign_test')
.dropTable('rename_column_test')
.dropTable('rename_col_test');
2015-02-04 11:03:14 -05:00
});
it('renames the column', function() {
return knex.schema
.table('rename_column_test', function(tbl) {
return tbl.renameColumn('id_test', 'id');
})
.then(function() {
return knex.schema.hasColumn('rename_column_test', 'id');
})
.then(function(exists) {
expect(exists).to.equal(true);
});
2015-02-04 11:03:14 -05:00
});
it('successfully renames a column referenced in a foreign key', function() {
return knex.schema.table('rename_column_test', function(tbl) {
2015-02-04 11:03:14 -05:00
tbl.renameColumn('parent_id_test', 'parent_id');
});
});
it('successfully renames a column referenced by another table', function() {
return knex.schema.table('rename_column_test', function(tbl) {
tbl.renameColumn('id', 'id_new');
});
});
it('#933 - .renameColumn should not drop null or default value', function() {
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
const tableName = 'rename_col_test';
return knex.transaction(function(tr) {
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
const getColInfo = () => tr(tableName).columnInfo();
return getColInfo()
.then(function(colInfo) {
expect(String(colInfo.colnameint.defaultValue)).to.contain('1');
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
// Using contain because of different response per dialect.
// IE mysql 'knex', postgres 'knex::character varying'
expect(colInfo.colnamestring.defaultValue).to.contain('knex');
expect(colInfo.colnamestring.nullable).to.equal(false);
return tr.schema.table(tableName, function(table) {
table.renameColumn('colnameint', 'colnameintchanged');
table.renameColumn('colnamestring', 'colnamestringchanged');
});
})
.then(getColInfo)
.then(function(columnInfo) {
expect(
String(columnInfo.colnameintchanged.defaultValue)
).to.contain('1');
expect(columnInfo.colnamestringchanged.defaultValue).to.contain(
'knex'
);
expect(columnInfo.colnamestringchanged.nullable).to.equal(false);
});
});
});
2015-02-04 11:03:14 -05:00
});
describe('withSchema', function() {
describe('mssql only', function() {
if (!knex || !knex.client || !/mssql/i.test(knex.client.dialect)) {
return Promise.resolve();
}
const columnName = 'test';
function checkTable(schema, tableName, expected) {
return knex.schema
.withSchema(schema)
.hasTable(tableName)
.then(function(exists) {
return expect(exists).to.equal(expected);
});
}
function createTable(schema, tableName) {
return knex.schema
.withSchema(schema)
.createTable(tableName, (table) => {
table.string(columnName);
});
}
function checkColumn(schema, tableName) {
return knex.schema
.withSchema(schema)
.hasColumn(tableName, columnName)
.then((exists) => {
return expect(exists).to.equal(true);
});
}
function renameTable(schema, from, to) {
return knex.schema.withSchema(schema).renameTable(from, to);
}
function createSchema(schema) {
return knex.schema.raw('CREATE SCHEMA ' + schema);
}
const defaultSchemaName = 'public';
const testSchemaName = 'test';
before(function() {
return createSchema(testSchemaName);
});
after(function() {
return knex.schema.raw('DROP SCHEMA ' + testSchemaName);
});
it('should not find non-existent tables', function() {
return checkTable(testSchemaName, 'test', false).then(() =>
checkTable(defaultSchemaName, 'test', false)
);
});
it('should create and drop tables', function() {
return createTable(testSchemaName, 'test')
.then(() => checkColumn(testSchemaName, 'test'))
.then(() => checkTable(testSchemaName, 'test', true))
.then(() => checkTable(defaultSchemaName, 'test', false))
.then(() =>
knex.schema.withSchema(testSchemaName).dropTableIfExists('test')
)
.then(() => checkTable(testSchemaName, 'test', false));
});
it('should rename tables', function() {
return createTable(testSchemaName, 'test')
.then(() => renameTable(testSchemaName, 'test', 'test2'))
.then(() => checkColumn(testSchemaName, 'test2'))
.then(() => checkTable(defaultSchemaName, 'test2', false))
.then(() => checkTable(testSchemaName, 'test', false))
.then(() => checkTable(testSchemaName, 'test2', true))
.then(() =>
knex.schema.withSchema(testSchemaName).dropTableIfExists('test2')
);
});
});
});
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
it('should warn attempting to create primary from nonexistent columns', function() {
// Redshift only
if (!knex || !knex.client || !/redshift/i.test(knex.client.driverName)) {
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
return Promise.resolve();
}
const tableName = 'no_test_column';
const constraintName = 'testconstraintname';
return knex.transaction(function(tr) {
return tr.schema
.dropTableIfExists(tableName)
.then(function() {
return tr.schema.createTable(tableName, function(t) {
t.string('test_zero').notNullable();
t.string('test_one').notNullable();
});
})
.then(function() {
return tr.schema.table(tableName, function(u) {
u.primary(['test_one', 'test_two'], constraintName);
});
})
.then(function() {
throw new Error('should have failed');
})
.catch(function(err) {
expect(err.code).to.equal('42703');
expect(err.message).to.equal(
`alter table "${tableName}" add constraint "${constraintName}" primary key ("test_one", "test_two") - column "test_two" named in key does not exist`
);
})
.then(function(res) {
return knex.schema.dropTableIfExists(tableName);
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
});
});
});
//Unit tests checks SQL -- This will test running those queries, no hard assertions here.
it('#1430 - .primary() & .dropPrimary() same for all dialects', function() {
if (/sqlite/i.test(knex.client.driverName)) {
return Promise.resolve();
}
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
const constraintName = 'testconstraintname';
const tableName = 'primarytest';
return knex.transaction(function(tr) {
return tr.schema
.dropTableIfExists(tableName)
.then(function() {
return tr.schema.createTable(tableName, function(table) {
table.string('test').primary(constraintName);
table.string('test2').notNullable();
});
})
.then(function(res) {
return tr.schema.table(tableName, function(table) {
table.dropPrimary(constraintName);
});
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
})
.then(function() {
return tr.schema.table(tableName, function(table) {
table.primary(['test', 'test2'], constraintName);
});
});
});
});
describe('invalid field', function() {
describe('sqlite3 only', function() {
Add redshift support without changing cli or package.json (#2233) * Add a Redshift dialect that inherits from Postgres. * Turn .index() and .dropIndex() into no-ops with warnings in the Redshift dialect. * Update the Redshift dialect to be compatible with master. * Update package.json * Disable liftoff cli * Remove the CLI * Add lib to the repo * Allow the escaping of named bindings. * Update dist * Update the Redshift dialect’s instantiation of the query and column compilers. * Update the distribution * Fix a merge conflict * Take lib back out * Trying to bring back in line with tgreisser/knex * Add npm 5 package-lock * Bring cli.js back in line * Bring cli.js back in line * Progress commit on redshift integration tests * Revert "Progress commit on redshift integration tests" This reverts commit 207e31635c638853dec54ce0580d34559ba5a54c. * Progress commit * Working not null on primary columns in createTable * Working redshift unit tests * Working unit and integration tests, still need to fix migration tests * Brought datatypes more in line with what redshift actually supports * Added query compiler unit tests * Add a hacky returning clause for redshift ugh * Working migration integration tests * Working insert integration tests * Allow multiple insert returning values * Working select integration tests * Working join integration tests * Working aggregate integration tests * All integration suite tests working * Put docker index for reconnect tests back * Redshift does not support insert...returning, there does not seem to be a way around that, therefore accept it and test accordingly * Leave redshift integration tests in place, but do not run them by default * Fix mysql order by test * Fix more tests * Change test db name to knex_test for consistency * Address PR comments
2018-02-03 08:33:02 -05:00
const tableName = 'invalid_field_test_sqlite3';
const fieldName = 'field_foo';
if (!knex || !knex.client || !/sqlite3/i.test(knex.client.driverName)) {
return Promise.resolve();
}
before(function() {
return knex.schema.createTable(tableName, function(tbl) {
tbl.integer(fieldName);
});
});
after(function() {
return knex.schema.dropTable(tableName);
});
it('should return empty resultset when referencing an existent column', function() {
return knex(tableName)
.select()
.where(fieldName, 'something')
.then(function(rows) {
expect(rows.length).to.equal(0);
});
});
it('should throw when referencing a non-existent column', function() {
return knex(tableName)
.select()
.where(fieldName + 'foo', 'something')
.then(function() {
throw new Error('should have failed');
})
.catch(function(err) {
expect(err.code).to.equal('SQLITE_ERROR');
});
});
});
});
2015-02-04 11:03:14 -05:00
});
2014-04-16 02:50:19 -04:00
};