mirror of
https://github.com/strapi/strapi.git
synced 2025-12-28 07:33:17 +00:00
Cleanup and add storage with hash to compare and sync in one direction only
This commit is contained in:
parent
130aade450
commit
0d9ba0e5fd
@ -72,10 +72,6 @@ const toStrapiType = column => {
|
||||
return { type: 'text', args: ['longtext'] };
|
||||
}
|
||||
case 'varchar': {
|
||||
if (Number(column.character_maximum_length) === 255) {
|
||||
return { type: 'string', args: [] };
|
||||
}
|
||||
|
||||
return { type: 'string', args: [column.character_maximum_length] };
|
||||
}
|
||||
case 'datetime': {
|
||||
|
||||
@ -77,10 +77,6 @@ const toStrapiType = column => {
|
||||
return { type: 'boolean' };
|
||||
}
|
||||
case 'character': {
|
||||
if (Number(column.character_maximum_length) === 255) {
|
||||
return { type: 'string', args: [] };
|
||||
}
|
||||
|
||||
return { type: 'string', args: [column.character_maximum_length] };
|
||||
}
|
||||
case 'timestamp': {
|
||||
|
||||
@ -21,11 +21,14 @@ const toStrapiType = column => {
|
||||
|
||||
return { type: 'integer' };
|
||||
}
|
||||
case 'float': {
|
||||
return { type: 'float', args: [10, 2] };
|
||||
}
|
||||
case 'bigint': {
|
||||
return { type: 'bigInteger' };
|
||||
}
|
||||
case 'varchar': {
|
||||
const length = type.slice(8, type.length - 1);
|
||||
if (Number(length) === 255) {
|
||||
return { type: 'string', args: [] };
|
||||
}
|
||||
|
||||
return { type: 'string', args: [Number(length)] };
|
||||
}
|
||||
@ -47,12 +50,6 @@ const toStrapiType = column => {
|
||||
case 'time': {
|
||||
return { type: 'time', args: [{ precision: 3 }] };
|
||||
}
|
||||
case 'float': {
|
||||
return { type: 'float', args: [10, 2] };
|
||||
}
|
||||
case 'bigint': {
|
||||
return { type: 'bigInteger' };
|
||||
}
|
||||
// TODO: enum
|
||||
default: {
|
||||
return { type: 'specificType', args: [column.data_type] };
|
||||
|
||||
@ -72,6 +72,7 @@ const createMigrationProvider = db => {
|
||||
const sql = fse.readFileSync(path, 'utf8');
|
||||
|
||||
return {
|
||||
// TODO: check multiple commands in one sql statement
|
||||
up: knex => knex.raw(sql),
|
||||
down() {},
|
||||
};
|
||||
@ -86,26 +87,17 @@ const createMigrationProvider = db => {
|
||||
// TODO: add internal migrations for core & plugins
|
||||
// How do we intersperse them
|
||||
|
||||
// const internalMigrations = new Umzug({
|
||||
// storage: new CustomStorage({ db, tableName: 'strapi_internal_migrations' }),
|
||||
// migrations: {
|
||||
// path: path.join(__dirname, 'migrations'),
|
||||
// params: [db],
|
||||
// wrap: fn => db => {
|
||||
// return db.connection.transaction(trx => Promise.resolve(fn(trx)));
|
||||
// },
|
||||
// },
|
||||
// });
|
||||
|
||||
return {
|
||||
async shouldRun() {
|
||||
const pending = await migrations.pending();
|
||||
|
||||
return pending.length > 0;
|
||||
},
|
||||
async up() {
|
||||
// await migrations.down();
|
||||
await migrations.up();
|
||||
// await internalMigrations.up();
|
||||
},
|
||||
async down() {
|
||||
await migrations.down();
|
||||
// await internalMigrations.down();
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
@ -2,6 +2,8 @@
|
||||
|
||||
const _ = require('lodash/fp');
|
||||
|
||||
const RESERVED_TABLE_NAMES = ['strapi_migrations', 'strapi_database_schema'];
|
||||
|
||||
const statuses = {
|
||||
CHANGED: 'CHANGED',
|
||||
UNCHANGED: 'UNCHANGED',
|
||||
@ -135,17 +137,17 @@ module.exports = db => {
|
||||
const diffColumns = (oldColumn, column) => {
|
||||
const changes = [];
|
||||
|
||||
// NOTE: we might want to move that to the schema generation instead
|
||||
const isIgnoredType = ['increments', 'enum'].includes(column.type);
|
||||
|
||||
// NOTE: enum aren't updated, they need to be dropped & recreated. Knex doesn't handle it
|
||||
const oldType = oldColumn.type;
|
||||
const type = db.dialect.getSqlType(column.type);
|
||||
if (oldType !== type && !['increments', 'enum'].includes(type)) {
|
||||
|
||||
if (oldType !== type && !isIgnoredType) {
|
||||
changes.push('type');
|
||||
}
|
||||
|
||||
if (!_.isEqual(oldColumn.args, column.args) && !['increments', 'enum'].includes(column.type)) {
|
||||
changes.push('args');
|
||||
}
|
||||
// NOTE: compare args at some point and split them into specific properties instead
|
||||
|
||||
if (oldColumn.notNullable !== column.notNullable) {
|
||||
changes.push('notNullable');
|
||||
@ -345,7 +347,10 @@ module.exports = db => {
|
||||
}
|
||||
|
||||
for (const srcTable of srcSchema.tables) {
|
||||
if (!helpers.hasTable(destSchema, srcTable.name) && srcTable.name !== 'strapi_migrations') {
|
||||
if (
|
||||
!helpers.hasTable(destSchema, srcTable.name) &&
|
||||
!RESERVED_TABLE_NAMES.includes(srcTable.name)
|
||||
) {
|
||||
removedTables.push(srcTable);
|
||||
}
|
||||
}
|
||||
@ -1,7 +1,10 @@
|
||||
'use strict';
|
||||
|
||||
const debug = require('debug')('strapi::database');
|
||||
|
||||
const createSchemaBuilder = require('./builder');
|
||||
const createSchemaDiff = require('./schema-diff');
|
||||
const createSchemaDiff = require('./diff');
|
||||
const createSchemaStorage = require('./storage');
|
||||
const { metadataToSchema } = require('./schema');
|
||||
|
||||
const createSchemaProvider = db => {
|
||||
@ -10,11 +13,14 @@ const createSchemaProvider = db => {
|
||||
return {
|
||||
builder: createSchemaBuilder(db),
|
||||
schemaDiff: createSchemaDiff(db),
|
||||
schemaStorage: createSchemaStorage(db),
|
||||
|
||||
/**
|
||||
* Drops the database schema
|
||||
*/
|
||||
async drop() {
|
||||
debug('Dropping database schema');
|
||||
|
||||
const DBSchema = await db.dialect.schemaInspector.getSchema();
|
||||
await this.builder.dropSchema(DBSchema);
|
||||
},
|
||||
@ -23,6 +29,7 @@ const createSchemaProvider = db => {
|
||||
* Creates the database schema
|
||||
*/
|
||||
async create() {
|
||||
debug('Created database schema');
|
||||
await this.builder.createSchema(schema);
|
||||
},
|
||||
|
||||
@ -30,29 +37,55 @@ const createSchemaProvider = db => {
|
||||
* Resets the database schema
|
||||
*/
|
||||
async reset() {
|
||||
debug('Resetting database schema');
|
||||
await this.drop();
|
||||
await this.create();
|
||||
},
|
||||
|
||||
async syncSchema() {
|
||||
debug('Synchronizing database schema');
|
||||
|
||||
const DBSchema = await db.dialect.schemaInspector.getSchema();
|
||||
|
||||
const { status, diff } = this.schemaDiff.diff(DBSchema, schema);
|
||||
|
||||
console.log(diff.tables.updated.flatMap(t => t.columns.updated).map(x => x.object));
|
||||
|
||||
if (status === 'CHANGED') {
|
||||
await this.builder.updateSchema(diff);
|
||||
}
|
||||
|
||||
await this.schemaStorage.add(schema);
|
||||
},
|
||||
|
||||
// TODO: support options to migrate softly or forcefully
|
||||
// TODO: support option to disable auto migration & run a CLI command instead to avoid doing it at startup
|
||||
// TODO: Allow keeping extra indexes / extra tables / extra columns (globally or on a per table basis)
|
||||
async sync() {
|
||||
// Run users migrations
|
||||
db.migration.up();
|
||||
if (await db.migration.shouldRun()) {
|
||||
debug('Found migrations to run');
|
||||
await db.migration.up();
|
||||
|
||||
// Read schema from DB
|
||||
const DBSchema = await db.dialect.schemaInspector.getSchema();
|
||||
|
||||
// Diff schema
|
||||
const { status, diff } = this.schemaDiff.diff(DBSchema, schema);
|
||||
|
||||
if (status === 'UNCHANGED') {
|
||||
return;
|
||||
return this.syncSchema();
|
||||
}
|
||||
|
||||
// Update schema
|
||||
await this.builder.updateSchema(diff);
|
||||
const oldSchema = await this.schemaStorage.read();
|
||||
|
||||
if (!oldSchema) {
|
||||
debug('Schema not persisted yet');
|
||||
return this.syncSchema();
|
||||
}
|
||||
|
||||
const { hash: oldHash } = oldSchema;
|
||||
const hash = await this.schemaStorage.hashSchema(schema);
|
||||
|
||||
if (oldHash !== hash) {
|
||||
debug('Schema changed');
|
||||
return this.syncSchema();
|
||||
}
|
||||
|
||||
debug('Schema unchanged');
|
||||
return;
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
70
packages/core/database/lib/schema/storage.js
Normal file
70
packages/core/database/lib/schema/storage.js
Normal file
@ -0,0 +1,70 @@
|
||||
'use strict';
|
||||
|
||||
const crypto = require('crypto');
|
||||
|
||||
const TABLE_NAME = 'strapi_database_schema';
|
||||
|
||||
module.exports = db => {
|
||||
const hasSchemaTable = () => db.connection.schema.hasTable(TABLE_NAME);
|
||||
|
||||
const createSchemaTable = () => {
|
||||
return db.connection.schema.createTable(TABLE_NAME, t => {
|
||||
t.increments('id');
|
||||
t.json('schema');
|
||||
t.datetime('time', { useTz: false });
|
||||
t.string('hash');
|
||||
});
|
||||
};
|
||||
|
||||
const checkTableExists = async () => {
|
||||
if (!(await hasSchemaTable())) {
|
||||
await createSchemaTable();
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
async read() {
|
||||
await checkTableExists();
|
||||
|
||||
const res = await db.connection
|
||||
.select('*')
|
||||
.from(TABLE_NAME)
|
||||
.orderBy('time', 'DESC')
|
||||
.first();
|
||||
|
||||
if (!res) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return typeof res.schema === 'object' ? res.schema : JSON.parse(res.schema);
|
||||
},
|
||||
|
||||
hashSchema(schema) {
|
||||
return crypto
|
||||
.createHash('md5')
|
||||
.update(JSON.stringify(schema))
|
||||
.digest('hex');
|
||||
},
|
||||
|
||||
async add(schema) {
|
||||
await checkTableExists();
|
||||
|
||||
// NOTE: we can remove this to add history
|
||||
await db.connection(TABLE_NAME).delete();
|
||||
|
||||
const time = new Date();
|
||||
|
||||
await db.connection(TABLE_NAME).insert({
|
||||
schema: JSON.stringify(schema),
|
||||
hash: this.hashSchema(schema),
|
||||
time,
|
||||
});
|
||||
},
|
||||
|
||||
async clear() {
|
||||
await checkTableExists();
|
||||
|
||||
await db.connection(TABLE_NAME).truncate();
|
||||
},
|
||||
};
|
||||
};
|
||||
Loading…
x
Reference in New Issue
Block a user