mirror of
https://github.com/strapi/strapi.git
synced 2025-11-02 02:44:55 +00:00
Merge pull request #106 from wistityhq/feature/migrations
Migrations (alpha)
This commit is contained in:
commit
d17d48db0b
@ -9,6 +9,7 @@ const _ = require('lodash');
|
||||
|
||||
// Strapi helper for GraphQL.
|
||||
const helpers = require('strapi/lib/configuration/hooks/graphql/helpers/');
|
||||
const utils = require('./');
|
||||
|
||||
/**
|
||||
* Utils functions for BookShelf
|
||||
@ -132,6 +133,8 @@ module.exports = {
|
||||
update: function (collectionIdentity, rootValue, args) {
|
||||
_.merge(args, rootValue.context.request.body);
|
||||
|
||||
const PK = utils.getPK(collectionIdentity.toLowerCase(), null, strapi.models);
|
||||
|
||||
return strapi.services[collectionIdentity.toLowerCase()]
|
||||
.edit(_.set({}, PK, args[PK]), _.omit(args, PK))
|
||||
.then(function (data) {
|
||||
|
||||
@ -5,12 +5,17 @@
|
||||
*/
|
||||
|
||||
// Node.js core.
|
||||
const _ = require('lodash');
|
||||
const async = require('async');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// Public node modules.
|
||||
const beautify = require('js-beautify').js_beautify;
|
||||
|
||||
// Local utilities.
|
||||
const dictionary = require('strapi-utils/lib/dictionary');
|
||||
|
||||
/**
|
||||
* Runs after this generator has finished
|
||||
*
|
||||
@ -19,26 +24,77 @@ const beautify = require('js-beautify').js_beautify;
|
||||
*/
|
||||
|
||||
module.exports = function afterGenerate(scope, cb) {
|
||||
const migrationFile = path.resolve(scope.rootPath, 'data', 'migrations', scope.connection, scope.filename);
|
||||
async.parallel({
|
||||
migrationFile: function (cb) {
|
||||
const migrationFile = path.resolve(scope.rootPath, 'data', 'migrations', scope.connection, scope.filename);
|
||||
|
||||
// Read the migration file.
|
||||
fs.readFile(migrationFile, 'utf8', function (err, data) {
|
||||
// Read the migration file.
|
||||
fs.readFile(migrationFile, 'utf8', function (err, data) {
|
||||
if (err) {
|
||||
return cb.invalid(err);
|
||||
}
|
||||
|
||||
// And rewrite it with the beautify node module.
|
||||
fs.writeFile(migrationFile, beautify(data, {
|
||||
indent_size: 2,
|
||||
keep_function_indentation: true,
|
||||
space_before_conditional: true,
|
||||
end_with_newline: true
|
||||
}), 'utf8', function (err) {
|
||||
if (err) {
|
||||
return cb(err, null);
|
||||
} else {
|
||||
return cb(null, null);
|
||||
}
|
||||
});
|
||||
});
|
||||
},
|
||||
settings: function (cb) {
|
||||
dictionary.aggregate({
|
||||
dirname: path.resolve(scope.rootPath, 'api'),
|
||||
filter: /(.+)\.settings.json$/,
|
||||
depth: 4
|
||||
}, cb);
|
||||
},
|
||||
functions: function (cb) {
|
||||
dictionary.aggregate({
|
||||
dirname: path.resolve(scope.rootPath, 'api'),
|
||||
filter: /(.+)\.js$/,
|
||||
depth: 4
|
||||
}, cb);
|
||||
}
|
||||
}, function (err, data) {
|
||||
if (err) {
|
||||
return cb.invalid(err);
|
||||
}
|
||||
|
||||
// And rewrite it with the beautify node module.
|
||||
fs.writeFile(migrationFile, beautify(data, {
|
||||
indent_size: 2,
|
||||
keep_function_indentation: true,
|
||||
space_before_conditional: true,
|
||||
end_with_newline: true
|
||||
}), 'utf8', function (err) {
|
||||
if (err) {
|
||||
return cb.invalid(err);
|
||||
} else {
|
||||
return cb.success();
|
||||
}
|
||||
});
|
||||
// Fetch all models
|
||||
const models = _.get(_.merge(data.settings, data.functions), 'models');
|
||||
|
||||
if (!_.isUndefined(models)) {
|
||||
_.mapValues(models, function (model) {
|
||||
return _.omitBy(model, _.isFunction);
|
||||
});
|
||||
|
||||
const modelsKeyLowercased = _.mapKeys(models, function (model, key) {
|
||||
return key.toLowerCase();
|
||||
});
|
||||
|
||||
const historyFile = path.resolve(scope.rootPath, 'data', 'migrations', '.history');
|
||||
|
||||
// And rewrite it with the beautify node module.
|
||||
fs.writeFile(historyFile, beautify(JSON.stringify(modelsKeyLowercased), {
|
||||
indent_size: 2,
|
||||
keep_function_indentation: true,
|
||||
space_before_conditional: true,
|
||||
end_with_newline: true
|
||||
}), 'utf8', function (err) {
|
||||
if (err) {
|
||||
return cb.invalid(err);
|
||||
} else {
|
||||
return cb.success();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
@ -76,61 +76,89 @@ module.exports = function (scope, cb) {
|
||||
}
|
||||
});
|
||||
|
||||
const history = (function () {
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(path.resolve(scope.rootPath, 'data', 'migrations', '.history'), 'utf8'));
|
||||
} catch (err) {
|
||||
// File not existing
|
||||
return {};
|
||||
}
|
||||
})();
|
||||
|
||||
// Register every model.
|
||||
const migrations = glob.sync(path.resolve(scope.rootPath, 'api', '**', 'models', '*.json')).map((file) => {
|
||||
let modelName;
|
||||
const migrations = glob.sync(path.resolve(scope.rootPath, 'api', '**', 'models', '*.json')).map((filepath) => {
|
||||
try {
|
||||
const file = JSON.parse(fs.readFileSync(path.resolve(filepath)));
|
||||
|
||||
// Only create migration file for the models with the specified connection.
|
||||
if (JSON.parse(fs.readFileSync(path.resolve(file))).connection === scope.connection) {
|
||||
// Only create migration file for the models with the specified connection.
|
||||
if (_.get(file, 'connection') === _.get(scope, 'connection')) {
|
||||
// Save the model name thanks to the given table name.
|
||||
const modelName = _.get(file, 'tableName');
|
||||
scope.models[modelName] = file;
|
||||
|
||||
// Save the model name thanks to the given table name.
|
||||
modelName = JSON.parse(fs.readFileSync(path.resolve(file))).tableName;
|
||||
scope.models[modelName] = JSON.parse(fs.readFileSync(path.resolve(file)));
|
||||
|
||||
// First, we need to know if the table already exists.
|
||||
scope.db.schema.hasTable(modelName).then(function (exists) {
|
||||
|
||||
// If the table doesn't exist.
|
||||
if (!exists) {
|
||||
|
||||
// Builder: add needed options specified in the model
|
||||
// for each option.
|
||||
_.forEach(scope.models[modelName].options, function (value, option) {
|
||||
builder.options(scope.models, modelName, value, option);
|
||||
});
|
||||
|
||||
// Builder: create template for each attribute-- either with a column type
|
||||
// or with a relationship.
|
||||
_.forEach(scope.models[modelName].attributes, function (details, attribute) {
|
||||
if (details.type && _.isString(details.type)) {
|
||||
builder.types(scope.models, modelName, details, attribute);
|
||||
} else if (_.isString(details.collection) || _.isString(details.model)) {
|
||||
builder.relations(scope.models, modelName, details, attribute);
|
||||
}
|
||||
});
|
||||
|
||||
// Builder: create and drop the table.
|
||||
builder.createTable(scope.models, modelName);
|
||||
if (!_.isEmpty(history) && history.hasOwnProperty(modelName)) {
|
||||
_.set(scope.models, modelName + '.oldAttributes', _.get(history, modelName + '.attributes'));
|
||||
} else {
|
||||
_.set(scope.models, modelName + '.oldAttributes', {});
|
||||
}
|
||||
|
||||
// If the table already exists.
|
||||
else {
|
||||
// First, we need to know if the table already exists.
|
||||
scope.db.schema.hasTable(modelName).then(function (exists) {
|
||||
// If the table doesn't exist.
|
||||
if (!exists) {
|
||||
// Builder: add needed options specified in the model
|
||||
// for each option.
|
||||
_.forEach(scope.models[modelName].options, function (value, option) {
|
||||
builder.options(scope.models, modelName, value, option);
|
||||
});
|
||||
|
||||
// Ideally, we need to verify the table properties here
|
||||
// to see if they still are the same.
|
||||
// Builder: create template for each attribute-- either with a column type
|
||||
// or with a relationship.
|
||||
_.forEach(scope.models[modelName].attributes, function (details, attribute) {
|
||||
if (details.type && _.isString(details.type)) {
|
||||
builder.types(scope.models, modelName, details, attribute);
|
||||
} else if (_.isString(details.collection) || _.isString(details.model)) {
|
||||
builder.relations(scope.models, modelName, details, attribute);
|
||||
}
|
||||
});
|
||||
|
||||
// Parse every attribute.
|
||||
_.forEach(scope.models[modelName].attributes, function (details, attribute) {
|
||||
// Builder: create and drop the table.
|
||||
builder.createTable(scope.models, modelName);
|
||||
} else {
|
||||
// If the table already exists.
|
||||
|
||||
// Verify if a column already exists for the attribute.
|
||||
scope.db.schema.hasColumn(modelName, attribute).then(function (exists) {
|
||||
scope.models[modelName].newAttributes = {};
|
||||
// Set new attributes object
|
||||
_.set(scope.models[modelName], 'newAttributes', {});
|
||||
|
||||
// Identity added, updated and removed attributes
|
||||
const attributesRemoved = _.difference(_.keys(scope.models[modelName].oldAttributes), _.keys(scope.models[modelName].attributes));
|
||||
const attributesAddedOrUpdated = _.difference(_.keys(scope.models[modelName].attributes), attributesRemoved);
|
||||
|
||||
// Parse every attribute which has been removed.
|
||||
_.forEach(attributesRemoved, function (attribute) {
|
||||
const details = scope.models[modelName].oldAttributes[attribute];
|
||||
details.isRemoved = true;
|
||||
|
||||
// Save the attribute as a new attribute.
|
||||
scope.models[modelName].newAttributes[attribute] = _.cloneDeep(details);
|
||||
|
||||
// Builder: create template for each attribute-- either with a column type
|
||||
// or with a relationship.
|
||||
if (details.type && _.isString(details.type)) {
|
||||
builder.types(scope.models, modelName, scope.models[modelName].newAttributes[attribute], attribute, true, true);
|
||||
} else if (_.isString(details.collection) || _.isString(details.model)) {
|
||||
builder.relations(scope.models, modelName, scope.models[modelName].newAttributes[attribute], attribute, true, true, history);
|
||||
}
|
||||
});
|
||||
|
||||
// Parse every attribute which has been added or updated.
|
||||
_.forEach(attributesAddedOrUpdated, function (attribute) {
|
||||
const details = scope.models[modelName].attributes[attribute];
|
||||
|
||||
// If it's a new attribute.
|
||||
if (!exists) {
|
||||
|
||||
if (!scope.models[modelName].oldAttributes.hasOwnProperty(attribute)) {
|
||||
// Save the attribute as a new attribute.
|
||||
scope.models[modelName].newAttributes[attribute] = details;
|
||||
scope.models[modelName].newAttributes[attribute] = _.cloneDeep(details);
|
||||
|
||||
// Builder: create template for each attribute-- either with a column type
|
||||
// or with a relationship.
|
||||
@ -139,29 +167,71 @@ module.exports = function (scope, cb) {
|
||||
} else if (_.isString(details.collection) || _.isString(details.model)) {
|
||||
builder.relations(scope.models, modelName, scope.models[modelName].newAttributes[attribute], attribute);
|
||||
}
|
||||
} else {
|
||||
// If it's an existing attribute.
|
||||
|
||||
// Builder: select the table.
|
||||
builder.selectTable(scope.models, modelName);
|
||||
// Try to identify attribute updates
|
||||
const toDrop = (function () {
|
||||
if (details.hasOwnProperty('collection') && details.hasOwnProperty('via') &&
|
||||
(_.get(scope.models[modelName].oldAttributes[attribute], 'collection') !== details.collection || _.get(scope.models[modelName].oldAttributes[attribute], 'via') !== details.via)) {
|
||||
return true;
|
||||
} else if (details.hasOwnProperty('model') && details.hasOwnProperty('via') &&
|
||||
(_.get(scope.models[modelName].oldAttributes[attribute], 'model') !== details.model || _.get(scope.models[modelName].oldAttributes[attribute], 'via') !== details.via)) {
|
||||
return true;
|
||||
} else if (details.hasOwnProperty('model') &&
|
||||
(_.get(scope.models[modelName].oldAttributes[attribute], 'model') !== details.model)) {
|
||||
return true;
|
||||
} else if (details.hasOwnProperty('model') && !_.get(scope.models[modelName].oldAttributes, attribute).hasOwnProperty('model')) {
|
||||
return true;
|
||||
} else if (details.hasOwnProperty('collection') && !_.get(scope.models[modelName].oldAttributes, attribute).hasOwnProperty('collection')) {
|
||||
return true;
|
||||
} else if (details.hasOwnProperty('via') && !_.get(scope.models[modelName].oldAttributes, attribute).hasOwnProperty('via')) {
|
||||
return true;
|
||||
} else if (!_.isUndefined(details.type) && _.get(scope.models[modelName].oldAttributes[attribute], 'type') !== _.get(details, 'type')) {
|
||||
return true;
|
||||
} else if (!_.isUndefined(details.defaultValue) && _.get(scope.models[modelName].oldAttributes[attribute], 'defaultValue') === _.get(details, 'defaultValue')) {
|
||||
return true;
|
||||
} else if (!_.isUndefined(details.maxLength) && _.get(scope.models[modelName].oldAttributes[attribute], 'maxLength') === _.get(details, 'maxLength')) {
|
||||
return true;
|
||||
} else if (!_.isUndefined(details.nullable) && _.get(scope.models[modelName].oldAttributes[attribute], 'nullable') === _.get(details, 'nullable')) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
})();
|
||||
|
||||
// The attribute has been updated.
|
||||
// We will drop it then create it again with the new options.
|
||||
if (toDrop) {
|
||||
// Save the attribute as a new attribute.
|
||||
scope.models[modelName].newAttributes[attribute] = _.cloneDeep(details);
|
||||
|
||||
// Builder: create template for each attribute-- either with a column type
|
||||
// or with a relationship.
|
||||
if (details.type && _.isString(details.type)) {
|
||||
builder.types(scope.models, modelName, scope.models[modelName].newAttributes[attribute], attribute, true);
|
||||
} else if (_.isString(details.collection) || _.isString(details.model)) {
|
||||
builder.relations(scope.models, modelName, scope.models[modelName].newAttributes[attribute], attribute, true, false, history);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If the column already exists.
|
||||
else {
|
||||
|
||||
// TODO: Verify columns info are the same.
|
||||
// scope.db(modelName).columnInfo(attribute).then(function (info) {
|
||||
//
|
||||
// });
|
||||
}
|
||||
}).catch(function (err) {
|
||||
console.log(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return new Promise((resolve) => {
|
||||
asyncFunction(file, resolve);
|
||||
});
|
||||
// For lightweight migration file,
|
||||
// only call this when new attributes are detected.
|
||||
if (!_.isEmpty(scope.models[modelName].newAttributes)) {
|
||||
// Builder: select the table.
|
||||
builder.selectTable(scope.models, modelName);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return new Promise((resolve) => {
|
||||
asyncFunction(filepath, resolve);
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
return cb.invalid(e);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@ -23,19 +23,35 @@ module.exports = function (models, modelName) {
|
||||
// Then, every `up` logic of every model call the
|
||||
// `./builder/tables/createTableIfNotExists` template.
|
||||
const tplTableCreate = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'tables', 'createTableIfNotExists.template'), 'utf8');
|
||||
models[modelName].up = _.unescape(_.template(tplTableCreate)({
|
||||
models: models,
|
||||
tableName: modelName,
|
||||
attributes: models[modelName].attributes,
|
||||
options: models[modelName].options
|
||||
}));
|
||||
if (_.isEmpty(_.get(models[modelName], 'up.others'))) {
|
||||
_.set(models[modelName], 'up.others', _.unescape(_.template(tplTableCreate)({
|
||||
models: models,
|
||||
tableName: modelName,
|
||||
attributes: models[modelName].attributes,
|
||||
options: models[modelName].options
|
||||
})));
|
||||
} else {
|
||||
models[modelName].up.others += _.unescape(_.template(tplTableCreate)({
|
||||
models: models,
|
||||
tableName: modelName,
|
||||
attributes: models[modelName].attributes,
|
||||
options: models[modelName].options
|
||||
}));
|
||||
}
|
||||
|
||||
// Template: drop the table for the `down` export.
|
||||
// This adds a `down` logic for the current model.
|
||||
// Then, every `down` logic of every model call the
|
||||
// `./builder/tables/dropTable` template.
|
||||
const tplTableDrop = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'tables', 'dropTable.template'), 'utf8');
|
||||
models[modelName].down = _.unescape(_.template(tplTableDrop)({
|
||||
tableName: modelName
|
||||
}));
|
||||
|
||||
if (_.isEmpty(_.get(models[modelName], 'down.others'))) {
|
||||
_.set(models[modelName], 'down.others', _.unescape(_.template(tplTableDrop)({
|
||||
tableName: modelName
|
||||
})));
|
||||
} else {
|
||||
models[modelName].down.others += _.unescape(_.template(tplTableDrop)({
|
||||
tableName: modelName
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
@ -12,144 +12,384 @@ const path = require('path');
|
||||
const _ = require('lodash');
|
||||
const pluralize = require('pluralize');
|
||||
|
||||
// Bookshelf utils.
|
||||
// Collections utils.
|
||||
const utilsModels = require('strapi/lib/configuration/hooks/models/utils/');
|
||||
const utilsBookShelf = require('strapi-bookshelf/lib/utils/');
|
||||
|
||||
// Template builder.
|
||||
const selectTable = require('./selectTable');
|
||||
|
||||
/**
|
||||
* Relationship templates
|
||||
*/
|
||||
|
||||
module.exports = function (models, modelName, details, attribute) {
|
||||
module.exports = function (rootModels, modelName, details, attribute, toDrop, onlyDrop, history) {
|
||||
let tplRelationUp;
|
||||
let tplRelationDown;
|
||||
let infos = {};
|
||||
let oldInfos = {};
|
||||
|
||||
const infos = utilsModels.getNature(details, attribute, models);
|
||||
if (!onlyDrop && toDrop) {
|
||||
infos = utilsModels.getNature(details, attribute, rootModels);
|
||||
oldInfos = utilsModels.getNature(_.get(rootModels[modelName].oldAttributes, attribute), attribute, history);
|
||||
|
||||
// If it's a "one-to-one" relationship.
|
||||
if (infos.verbose === 'hasOne') {
|
||||
const isDifferentVerbose = !(oldInfos.hasOwnProperty('nature') && oldInfos.nature === infos.nature);
|
||||
|
||||
// Force singular foreign key
|
||||
details.attribute = pluralize.singular(details.model);
|
||||
|
||||
// Define PK column
|
||||
details.column = utilsBookShelf.getPK(modelName, undefined, models);
|
||||
|
||||
// Template: create a new column thanks to the attribute's relation.
|
||||
// Simply make a `create` template for this attribute wich will be added
|
||||
// to the table template-- either `./builder/tables/selectTable` or
|
||||
// `./builder/tables/createTableIfNotExists`.
|
||||
tplRelationUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'relations', 'hasOne.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].create = _.unescape(_.template(tplRelationUp)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
|
||||
// Template: drop the column.
|
||||
// Simply make a `delete` template for this attribute wich drop the column
|
||||
// with the `./builder/columns/dropColumn` template.
|
||||
tplRelationDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropColumn.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].delete = _.unescape(_.template(tplRelationDown)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
}
|
||||
|
||||
else if (infos.verbose === 'belongsTo') {
|
||||
// Force singular foreign key
|
||||
details.attribute = pluralize.singular(details.model);
|
||||
|
||||
// Define PK column
|
||||
details.column = utilsBookShelf.getPK(modelName, undefined, models);
|
||||
|
||||
if (infos.nature === 'oneToMany' || infos.nature === 'oneWay') {
|
||||
// Template: create a new column thanks to the attribute's relation.
|
||||
// Simply make a `create` template for this attribute wich will be added
|
||||
// to the table template-- either `./builder/tables/selectTable` or
|
||||
// `./builder/tables/createTableIfNotExists`.
|
||||
tplRelationUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'relations', 'belongsTo.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].create = _.unescape(_.template(tplRelationUp)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details,
|
||||
nature: infos.nature
|
||||
}));
|
||||
|
||||
// Template: drop the column.
|
||||
// Simply make a `delete` template for this attribute wich drop the column
|
||||
// with the `./builder/columns/dropColumn` template.
|
||||
tplRelationDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropColumn.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].delete = _.unescape(_.template(tplRelationDown)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
if (isDifferentVerbose) {
|
||||
handleRelation(oldInfos, history, modelName, _.get(rootModels[modelName].oldAttributes, attribute), attribute, true, true);
|
||||
handleRelation(infos, rootModels, modelName, details, attribute);
|
||||
} else {
|
||||
// Template: create a new column thanks to the attribute's relation.
|
||||
// Simply make a `create` template for this attribute wich will be added
|
||||
// to the table template-- either `./builder/tables/selectTable` or
|
||||
// `./builder/tables/createTableIfNotExists`.
|
||||
tplRelationUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'relations', 'belongsTo-unique.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].create = _.unescape(_.template(tplRelationUp)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
|
||||
// Template: drop the column.
|
||||
// Simply make a `delete` template for this attribute wich drop the column
|
||||
// with the `./builder/columns/dropColumn` template.
|
||||
tplRelationDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropColumn.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].delete = _.unescape(_.template(tplRelationDown)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
handleRelation(infos, rootModels, modelName, details, attribute, true, true);
|
||||
}
|
||||
} else if (onlyDrop || toDrop) {
|
||||
oldInfos = utilsModels.getNature(_.get(rootModels[modelName].oldAttributes, attribute), attribute, history);
|
||||
|
||||
handleRelation(oldInfos, history, modelName, _.get(rootModels[modelName].oldAttributes, attribute), attribute, true, true);
|
||||
} else {
|
||||
infos = utilsModels.getNature(details, attribute, rootModels);
|
||||
|
||||
handleRelation(infos, rootModels, modelName, details, attribute);
|
||||
}
|
||||
|
||||
// Otherwise if it's a "many-to-many" relationship.
|
||||
else if (infos.verbose === 'belongsToMany') {
|
||||
// Save the relationship.
|
||||
const relationship = models[details.collection].attributes[details.via];
|
||||
function handleRelation(infos, models, modelName, details, attribute, toDrop, onlyDrop) {
|
||||
if (_.isEmpty(_.get(rootModels[modelName].attributes, attribute + '.create'))) {
|
||||
_.set(rootModels[modelName].attributes, attribute + '.create', {
|
||||
drop: '',
|
||||
others: ''
|
||||
});
|
||||
}
|
||||
|
||||
// Construct relation table name
|
||||
const relationTable = _.map(_.sortBy([relationship, details], 'collection'), function (table) {
|
||||
return _.snakeCase(pluralize.plural(table.collection) + ' ' + pluralize.plural(table.via));
|
||||
}).join('__');
|
||||
if (_.isEmpty(_.get(rootModels[modelName].attributes, attribute + '.delete'))) {
|
||||
_.set(rootModels[modelName].attributes, attribute + '.delete', {
|
||||
drop: '',
|
||||
others: ''
|
||||
});
|
||||
}
|
||||
|
||||
// Force singular foreign key
|
||||
relationship.attribute = pluralize.singular(relationship.collection);
|
||||
details.attribute = pluralize.singular(details.collection);
|
||||
// If it's a "one-to-one" relationship.
|
||||
if (infos.verbose === 'hasOne') {
|
||||
// Force singular foreign key.
|
||||
details.attribute = pluralize.singular(details.model);
|
||||
|
||||
// Define PK column
|
||||
details.column = utilsBookShelf.getPK(modelName, undefined, models);
|
||||
relationship.column = utilsBookShelf.getPK(details.collection, undefined, models);
|
||||
// Define PK column.
|
||||
details.column = utilsBookShelf.getPK(modelName, undefined, models);
|
||||
|
||||
if (!models.hasOwnProperty(relationTable)) {
|
||||
// Save the relation table as a new model in the scope
|
||||
// aiming to benefit of templates for the table such as
|
||||
// `createTableIfNotExists` and `dropTable`.
|
||||
models[relationTable] = {};
|
||||
if (!toDrop) {
|
||||
tplRelationUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'relations', 'hasOne.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].create.others += _.unescape(_.template(tplRelationUp)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
|
||||
// Template: create the table for the `up` export if it doesn't exist.
|
||||
// This adds a `up` logic for the relation table.
|
||||
const tplTableUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'relations', 'belongsToMany.template'), 'utf8');
|
||||
models[relationTable].up = _.unescape(_.template(tplTableUp)({
|
||||
models: models,
|
||||
tableName: relationTable,
|
||||
details: details,
|
||||
relationship: relationship
|
||||
}));
|
||||
tplRelationDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropColumn-unique.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].delete.others += _.unescape(_.template(tplRelationDown)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
} else {
|
||||
tplRelationDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropColumn-unique.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].create.drop += _.unescape(_.template(tplRelationDown)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
|
||||
// Template: drop the table for the `down` export.
|
||||
// This adds a `down` logic for the relation table.
|
||||
const tplTableDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'tables', 'dropTable.template'), 'utf8');
|
||||
models[relationTable].down = _.unescape(_.template(tplTableDown)({
|
||||
tableName: relationTable
|
||||
}));
|
||||
tplRelationUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'relations', 'hasOne.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].delete.drop += _.unescape(_.template(tplRelationUp)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
}
|
||||
} else if (infos.verbose === 'belongsTo') {
|
||||
// Force singular foreign key.
|
||||
details.attribute = pluralize.singular(details.model);
|
||||
|
||||
// Define PK column.
|
||||
details.column = utilsBookShelf.getPK(modelName, undefined, models);
|
||||
|
||||
if (infos.nature === 'oneToMany' || infos.nature === 'oneWay') {
|
||||
if (!toDrop) {
|
||||
tplRelationUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'relations', 'belongsTo.template'), 'utf8');
|
||||
rootModels[modelName].attributes[attribute].create.others += _.unescape(_.template(tplRelationUp)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details,
|
||||
nature: infos.nature
|
||||
}));
|
||||
|
||||
tplRelationDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropColumn.template'), 'utf8');
|
||||
rootModels[modelName].attributes[attribute].delete.drop += _.unescape(_.template(tplRelationDown)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
} else {
|
||||
tplRelationDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropForeign.template'), 'utf8');
|
||||
rootModels[modelName].attributes[attribute].create.drop += _.unescape(_.template(tplRelationDown)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
|
||||
tplRelationUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'relations', 'belongsTo.template'), 'utf8');
|
||||
rootModels[modelName].attributes[attribute].delete.others += _.unescape(_.template(tplRelationUp)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details,
|
||||
nature: infos.nature
|
||||
}));
|
||||
}
|
||||
} else {
|
||||
if (!toDrop) {
|
||||
tplRelationUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'relations', 'belongsTo-unique.template'), 'utf8');
|
||||
rootModels[modelName].attributes[attribute].create.others += _.unescape(_.template(tplRelationUp)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
|
||||
tplRelationDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropColumn-unique.template'), 'utf8');
|
||||
rootModels[modelName].attributes[attribute].delete.drop += _.unescape(_.template(tplRelationDown)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
} else {
|
||||
tplRelationDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropColumn.template'), 'utf8');
|
||||
rootModels[modelName].attributes[attribute].create.drop += _.unescape(_.template(tplRelationDown)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
|
||||
tplRelationUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'relations', 'belongsTo.template'), 'utf8');
|
||||
rootModels[modelName].attributes[attribute].delete.others += _.unescape(_.template(tplRelationUp)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details,
|
||||
nature: infos.nature
|
||||
}));
|
||||
}
|
||||
}
|
||||
} else if (infos.verbose === 'hasMany') {
|
||||
if (toDrop) {
|
||||
tplRelationDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropForeign.template'), 'utf8');
|
||||
rootModels[modelName].attributes[attribute].create.drop += _.unescape(_.template(tplRelationDown)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
|
||||
tplRelationUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'relations', 'belongsTo.template'), 'utf8');
|
||||
rootModels[modelName].attributes[attribute].delete.others += _.unescape(_.template(tplRelationUp)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details,
|
||||
nature: infos.nature
|
||||
}));
|
||||
}
|
||||
} else if (infos.verbose === 'belongsToMany') {
|
||||
// Otherwise if it's a "many-to-many" relationship.
|
||||
|
||||
// Save the relationship.
|
||||
const relationship = models[details.collection].attributes[details.via];
|
||||
|
||||
// Construct relation table name.
|
||||
const relationTable = _.map(_.sortBy([relationship, details], 'collection'), function (table) {
|
||||
return _.snakeCase(pluralize.plural(table.collection) + ' ' + pluralize.plural(table.via));
|
||||
}).join('__');
|
||||
|
||||
// Force singular foreign key.
|
||||
relationship.attribute = pluralize.singular(relationship.collection);
|
||||
details.attribute = pluralize.singular(details.collection);
|
||||
|
||||
// Define PK column.
|
||||
details.column = utilsBookShelf.getPK(modelName, undefined, models);
|
||||
relationship.column = utilsBookShelf.getPK(details.collection, undefined, models);
|
||||
|
||||
// Avoid to create table both times.
|
||||
if (!rootModels.hasOwnProperty(relationTable) || !_.isEmpty(_.get(rootModels, relationTable + '.up.drop'))) {
|
||||
// Set objects
|
||||
if (_.isUndefined(_.get(models, relationTable + '.up.others'))) {
|
||||
_.set(rootModels, relationTable + '.up.others', '');
|
||||
}
|
||||
|
||||
if (_.isUndefined(_.get(rootModels, relationTable + '.up.drop'))) {
|
||||
_.set(rootModels, relationTable + '.up.drop', '');
|
||||
}
|
||||
|
||||
if (_.isUndefined(_.get(rootModels, relationTable + '.down.others'))) {
|
||||
_.set(rootModels, relationTable + '.down.others', '');
|
||||
}
|
||||
|
||||
if (_.isUndefined(_.get(rootModels, relationTable + '.down.drop'))) {
|
||||
_.set(rootModels, relationTable + '.down.drop', '');
|
||||
}
|
||||
|
||||
if (_.isUndefined(_.get(rootModels, relationTable + '.attributes'))) {
|
||||
_.set(rootModels, relationTable + '.attributes', {});
|
||||
}
|
||||
|
||||
if (!toDrop) {
|
||||
// Load templates.
|
||||
const tplTableUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'relations', 'belongsToMany.template'), 'utf8');
|
||||
const tplTableDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'tables', 'dropTable.template'), 'utf8');
|
||||
|
||||
// Create relationships table for many-to-many.
|
||||
rootModels[relationTable].up.others += _.unescape(_.template(tplTableUp)({
|
||||
models: models,
|
||||
tableName: relationTable,
|
||||
details: details,
|
||||
relationship: relationship
|
||||
}));
|
||||
|
||||
if (_.isUndefined(_.get(rootModels, relationTable + '.attributes.fk'))) {
|
||||
// Load templates.
|
||||
const tplFKDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropForeign.template'), 'utf8');
|
||||
const tplSelectTableDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'tables', 'select', 'down.template'), 'utf8');
|
||||
|
||||
// Drop current relationships table on migration rollback.
|
||||
rootModels[relationTable].down.others += _.unescape(_.template(tplTableDown)({
|
||||
tableName: relationTable
|
||||
}));
|
||||
|
||||
// Remove foreign key current relationships table before drop the table on migration rollback.
|
||||
rootModels[relationTable].attributes.fk = {
|
||||
delete: {
|
||||
drop: _.unescape(_.template(tplFKDown)({
|
||||
attribute: details.attribute + '_' + details.column
|
||||
})) + _.unescape(_.template(tplFKDown)({
|
||||
attribute: relationship.attribute + '_' + relationship.column
|
||||
}))
|
||||
}
|
||||
};
|
||||
|
||||
rootModels[relationTable].down.drop += _.unescape(_.template(tplSelectTableDown)({
|
||||
models: models,
|
||||
tableName: relationTable,
|
||||
attributes: models[relationTable].attributes,
|
||||
toDrop: true
|
||||
}));
|
||||
} else {
|
||||
const dropMigrationTable = _.unescape(_.template(tplTableDown)({
|
||||
tableName: relationTable
|
||||
}));
|
||||
|
||||
// Eliminate duplicate
|
||||
if (rootModels[relationTable].down.drop.indexOf(dropMigrationTable) === -1) {
|
||||
// Drop current relationships table on migration rollback.
|
||||
rootModels[relationTable].down.drop += dropMigrationTable;
|
||||
}
|
||||
}
|
||||
} else if (onlyDrop) {
|
||||
// Load templates.
|
||||
const tplTableUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'relations', 'belongsToMany.template'), 'utf8');
|
||||
const tplTableDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'tables', 'dropTable.template'), 'utf8');
|
||||
const tplFKDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropForeign.template'), 'utf8');
|
||||
|
||||
const dropMigrationTable = _.unescape(_.template(tplTableDown)({
|
||||
tableName: relationTable
|
||||
}));
|
||||
|
||||
if (_.isUndefined(_.get(rootModels[relationTable].attributes, 'fk.delete')) && _.get(rootModels[modelName].newAttributes[attribute], 'isRemoved') !== true) {
|
||||
// Eliminate duplicate
|
||||
if (rootModels[relationTable].up.drop.indexOf(dropMigrationTable) === -1) {
|
||||
// Drop current relationships table on migration run.
|
||||
rootModels[relationTable].up.drop += _.unescape(_.template(tplTableDown)({
|
||||
tableName: relationTable
|
||||
}));
|
||||
}
|
||||
|
||||
// We have to this to be in the up template loop
|
||||
_.set(rootModels[relationTable], 'newAttributes.fk', {});
|
||||
_.set(rootModels[relationTable].attributes, 'fk', {
|
||||
delete: {
|
||||
drop: ''
|
||||
}
|
||||
});
|
||||
|
||||
// Drop first FK on migration relation table.
|
||||
const dropMigrationFK1 = _.unescape(_.template(tplFKDown)({
|
||||
attribute: details.attribute + '_' + details.column
|
||||
}));
|
||||
|
||||
// Eliminate duplicate
|
||||
if (rootModels[relationTable].attributes.fk.delete.drop.indexOf(dropMigrationFK1) === -1) {
|
||||
// Remove foreign key current relationships table before drop the table on migration rollback.
|
||||
rootModels[relationTable].attributes.fk.delete.drop += dropMigrationFK1;
|
||||
}
|
||||
|
||||
// Drop first FK on migration relation table.
|
||||
const dropMigrationFK2 = _.unescape(_.template(tplFKDown)({
|
||||
attribute: relationship.attribute + '_' + relationship.column
|
||||
}));
|
||||
|
||||
// Eliminate duplicate
|
||||
if (rootModels[relationTable].attributes.fk.delete.drop.indexOf(dropMigrationFK2) === -1) {
|
||||
rootModels[relationTable].attributes.fk.delete.drop += dropMigrationFK2;
|
||||
}
|
||||
|
||||
// Builder: select the table.
|
||||
selectTable(rootModels, relationTable);
|
||||
} else if (_.get(rootModels[modelName].newAttributes[attribute], 'isRemoved') === true) {
|
||||
// Eliminate duplicate
|
||||
if (rootModels[relationTable].up.others.indexOf(dropMigrationTable) === -1) {
|
||||
// Drop current relationships table on migration run.
|
||||
rootModels[relationTable].up.others += _.unescape(_.template(tplTableDown)({
|
||||
tableName: relationTable
|
||||
}));
|
||||
}
|
||||
|
||||
if (_.isUndefined(_.get(rootModels[relationTable].attributes, 'fk.create'))) {
|
||||
// We have to this to be in the up template loop
|
||||
_.set(rootModels[relationTable], 'newAttributes.fk.create', {});
|
||||
_.set(rootModels[relationTable].attributes, 'fk', {
|
||||
create: {
|
||||
drop: ''
|
||||
}
|
||||
});
|
||||
|
||||
// Drop first FK on migration relation table.
|
||||
const dropMigrationFK1 = _.unescape(_.template(tplFKDown)({
|
||||
attribute: details.attribute + '_' + details.column
|
||||
}));
|
||||
|
||||
// Eliminate duplicate
|
||||
if (rootModels[relationTable].attributes.fk.create.drop.indexOf(dropMigrationFK1) === -1) {
|
||||
// Remove foreign key current relationships table before drop the table on migration rollback.
|
||||
rootModels[relationTable].attributes.fk.create.drop += dropMigrationFK1;
|
||||
}
|
||||
|
||||
// Drop first FK on migration relation table.
|
||||
const dropMigrationFK2 = _.unescape(_.template(tplFKDown)({
|
||||
attribute: relationship.attribute + '_' + relationship.column
|
||||
}));
|
||||
|
||||
// Eliminate duplicate
|
||||
if (rootModels[relationTable].attributes.fk.create.drop.indexOf(dropMigrationFK2) === -1) {
|
||||
rootModels[relationTable].attributes.fk.create.drop += dropMigrationFK2;
|
||||
}
|
||||
|
||||
// Builder: select the table.
|
||||
selectTable(rootModels, relationTable);
|
||||
}
|
||||
}
|
||||
|
||||
// Eliminate duplicate
|
||||
if (rootModels[relationTable].down.others.indexOf('createTableIfNotExists(\'' + relationTable + '\'') === -1) {
|
||||
// Create previous relationships table on migration rollback.
|
||||
rootModels[relationTable].down.others += _.unescape(_.template(tplTableUp)({
|
||||
models: models,
|
||||
tableName: relationTable || relationTable,
|
||||
details: details,
|
||||
relationship: relationship
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@ -17,21 +17,89 @@ const _ = require('lodash');
|
||||
|
||||
module.exports = function (models, modelName) {
|
||||
|
||||
if (!models[modelName].hasOwnProperty('up')) {
|
||||
models[modelName].up = {
|
||||
drop: '',
|
||||
others: ''
|
||||
};
|
||||
}
|
||||
|
||||
// Allow to template only when it's necessary
|
||||
let emptyArrayForDrop = [];
|
||||
let emptyArrayForOthers = [];
|
||||
|
||||
_.forEach(models[modelName].newAttributes, function (attribute, key) {
|
||||
if (!_.isEmpty(_.get(models[modelName].attributes, key + '.create.drop'))) {
|
||||
emptyArrayForDrop.push(true);
|
||||
}
|
||||
|
||||
if (!_.isEmpty(_.get(models[modelName].attributes, key + '.create.others'))) {
|
||||
emptyArrayForOthers.push(true);
|
||||
}
|
||||
});
|
||||
|
||||
// Template: select the table for the `up` export.
|
||||
// Every attribute with `create` key will be added in this template.
|
||||
const tplSelectTableUp = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'tables', 'select', 'up.template'), 'utf8');
|
||||
models[modelName].up = _.unescape(_.template(tplSelectTableUp)({
|
||||
models: models,
|
||||
tableName: modelName,
|
||||
attributes: models[modelName].newAttributes
|
||||
}));
|
||||
|
||||
if (!_.isEmpty(emptyArrayForDrop)) {
|
||||
models[modelName].up.drop += _.unescape(_.template(tplSelectTableUp)({
|
||||
models: models,
|
||||
tableName: modelName,
|
||||
attributes: models[modelName].newAttributes,
|
||||
toDrop: true
|
||||
}));
|
||||
}
|
||||
|
||||
if (!_.isEmpty(emptyArrayForOthers)) {
|
||||
models[modelName].up.others += _.unescape(_.template(tplSelectTableUp)({
|
||||
models: models,
|
||||
tableName: modelName,
|
||||
attributes: models[modelName].newAttributes,
|
||||
toDrop: false
|
||||
}));
|
||||
}
|
||||
|
||||
if (!models[modelName].hasOwnProperty('down')) {
|
||||
models[modelName].down = {
|
||||
drop: '',
|
||||
others: ''
|
||||
};
|
||||
}
|
||||
|
||||
// Allow to template only when it's necessary
|
||||
emptyArrayForDrop = [];
|
||||
emptyArrayForOthers = [];
|
||||
|
||||
_.forEach(models[modelName].newAttributes, function (attribute, key) {
|
||||
if (!_.isEmpty(_.get(models[modelName].attributes, key + '.delete.drop'))) {
|
||||
emptyArrayForDrop.push(true);
|
||||
}
|
||||
|
||||
if (!_.isEmpty(_.get(models[modelName].attributes, key + '.delete.others'))) {
|
||||
emptyArrayForOthers.push(true);
|
||||
}
|
||||
});
|
||||
|
||||
// Template: select the table for the `down` export.
|
||||
// Every attribute with `delete` key will be added in this template.
|
||||
const tplSelectTableDown = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'tables', 'select', 'down.template'), 'utf8');
|
||||
models[modelName].down = _.unescape(_.template(tplSelectTableDown)({
|
||||
models: models,
|
||||
tableName: modelName,
|
||||
attributes: models[modelName].newAttributes
|
||||
}));
|
||||
|
||||
if (!_.isEmpty(emptyArrayForDrop)) {
|
||||
models[modelName].down.drop += _.unescape(_.template(tplSelectTableDown)({
|
||||
models: models,
|
||||
tableName: modelName,
|
||||
attributes: models[modelName].newAttributes,
|
||||
toDrop: true
|
||||
}));
|
||||
}
|
||||
|
||||
if (!_.isEmpty(emptyArrayForOthers)) {
|
||||
models[modelName].down.others += _.unescape(_.template(tplSelectTableDown)({
|
||||
models: models,
|
||||
tableName: modelName,
|
||||
attributes: models[modelName].newAttributes,
|
||||
toDrop: false
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
@ -15,7 +15,7 @@ const _ = require('lodash');
|
||||
* Template types
|
||||
*/
|
||||
|
||||
module.exports = function (models, modelName, details, attribute) {
|
||||
module.exports = function (models, modelName, details, attribute, toDrop, onlyDrop) {
|
||||
|
||||
// Template: create a new column thanks to the attribute's type.
|
||||
// Firt, make sure we know the attribute type. If not, just do it
|
||||
@ -26,17 +26,34 @@ module.exports = function (models, modelName, details, attribute) {
|
||||
} catch (err) {
|
||||
tplTypeCreate = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'types', 'specificType.template'), 'utf8');
|
||||
}
|
||||
models[modelName].attributes[attribute].create = _.unescape(_.template(tplTypeCreate)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
|
||||
const tplTypeDelete = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropColumn.template'), 'utf8');
|
||||
|
||||
// UP
|
||||
_.set(models[modelName].attributes, attribute + '.create', {});
|
||||
|
||||
if (!_.isUndefined(toDrop) && toDrop) {
|
||||
// Template: delete a specific column.
|
||||
models[modelName].attributes[attribute].create.drop = _.unescape(_.template(tplTypeDelete)({
|
||||
tableName: modelName,
|
||||
attribute: attribute
|
||||
}));
|
||||
}
|
||||
|
||||
// Create when it's not an onlyDrop action
|
||||
if (_.isUndefined(onlyDrop)) {
|
||||
models[modelName].attributes[attribute].create.others = _.unescape(_.template(tplTypeCreate)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: details
|
||||
}));
|
||||
}
|
||||
|
||||
// Template: make the column chainable with the `defaultTo` template
|
||||
// if a default value is needed.
|
||||
if (!_.isUndefined(details.defaultTo)) {
|
||||
const tplDefaultTo = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'chainables', 'defaultTo.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].create += _.unescape(_.template(tplDefaultTo)({
|
||||
models[modelName].attributes[attribute].create.others += _.unescape(_.template(tplDefaultTo)({
|
||||
details: details
|
||||
}));
|
||||
}
|
||||
@ -45,20 +62,46 @@ module.exports = function (models, modelName, details, attribute) {
|
||||
// if the column respect uniqueness rule.
|
||||
if (details.unique === true) {
|
||||
const tplUnique = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'chainables', 'unique.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].create += _.unescape(_.template(tplUnique)({}));
|
||||
models[modelName].attributes[attribute].create.others += _.unescape(_.template(tplUnique)({}));
|
||||
}
|
||||
|
||||
// Template: make the column chainable with the `primary` template
|
||||
// if the column needs the rule.
|
||||
if (details.primary === true) {
|
||||
const tplPrimary = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'chainables', 'primary.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].create += _.unescape(_.template(tplPrimary)({}));
|
||||
models[modelName].attributes[attribute].create.others += _.unescape(_.template(tplPrimary)({}));
|
||||
}
|
||||
|
||||
// Template: delete a specific column.
|
||||
const tplTypeDelete = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'dropColumn.template'), 'utf8');
|
||||
models[modelName].attributes[attribute].delete = _.unescape(_.template(tplTypeDelete)({
|
||||
tableName: modelName,
|
||||
attribute: attribute
|
||||
}));
|
||||
// DOWN
|
||||
_.set(models[modelName].attributes, attribute + '.delete', {});
|
||||
|
||||
if (!_.isUndefined(toDrop) && toDrop) {
|
||||
let tplTypeDeleteCreate;
|
||||
try {
|
||||
tplTypeDeleteCreate = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'types', models[modelName].oldAttributes[attribute].type + '.template'), 'utf8');
|
||||
} catch (err) {
|
||||
tplTypeDeleteCreate = fs.readFileSync(path.resolve(__dirname, '..', '..', 'templates', 'builder', 'columns', 'types', 'specificType.template'), 'utf8');
|
||||
}
|
||||
|
||||
// Create when it's not an onlyDrop action
|
||||
if (_.isUndefined(onlyDrop)) {
|
||||
// Template: delete a specific column.
|
||||
models[modelName].attributes[attribute].delete.drop = _.unescape(_.template(tplTypeDelete)({
|
||||
tableName: modelName,
|
||||
attribute: attribute
|
||||
}));
|
||||
}
|
||||
|
||||
models[modelName].attributes[attribute].delete.others = _.unescape(_.template(tplTypeDeleteCreate)({
|
||||
tableName: modelName,
|
||||
attribute: attribute,
|
||||
details: models[modelName].oldAttributes[attribute]
|
||||
}));
|
||||
} else {
|
||||
// Template: delete a specific column.
|
||||
models[modelName].attributes[attribute].delete.others = _.unescape(_.template(tplTypeDelete)({
|
||||
tableName: modelName,
|
||||
attribute: attribute
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
@ -0,0 +1,3 @@
|
||||
|
||||
// Delete the `<%= attribute %>` column with unique constraint.
|
||||
table.dropUnique('<%= tableName.toLowerCase() %>_<%= attribute %>_unique').dropColumn('<%= attribute %>')
|
||||
@ -0,0 +1,3 @@
|
||||
|
||||
// Delete the foreign key on `<%= attribute %>` column.
|
||||
table.dropForeign('<%= attribute %>').dropColumn('<%= attribute %>')
|
||||
@ -5,7 +5,7 @@
|
||||
|
||||
connection.schema.createTableIfNotExists('<%= tableName %>', function (table) {<% if (_.isObject(options)) { _.forEach(options, function(value, option) { %><% if (models[tableName].options[option] !== false) { %>
|
||||
<%= models[tableName][option] %>;<% } %><% }); } %>
|
||||
<% _.forEach(attributes, function(details, attribute) { %><%= models[tableName].attributes[attribute].create %>;
|
||||
<% _.forEach(attributes, function(details, attribute) { %><%= models[tableName].attributes[attribute].create.others %>;
|
||||
<% }); %>
|
||||
}).catch(function (err) {
|
||||
console.log('Impossible to create the `<%= tableName %>` table.');
|
||||
|
||||
@ -2,11 +2,18 @@
|
||||
/**
|
||||
* Select the `<%= tableName %>` table.
|
||||
*/
|
||||
|
||||
<% if (toDrop === true) { %>
|
||||
connection.schema.table('<%= tableName %>', function (table) {
|
||||
<% if (!_.isEmpty(attributes)) { _.forEach(attributes, function(details, attribute) { %><%= models[tableName].attributes[attribute].delete %>;
|
||||
<% if (!_.isEmpty(attributes)) { _.forEach(attributes, function(details, attribute) { %><%= models[tableName].attributes[attribute].delete.drop %>;
|
||||
<% }); } %>
|
||||
}).catch(function (err) {
|
||||
console.log('Impossible to select the `<%= tableName %>` table.');
|
||||
console.log(err);
|
||||
}),
|
||||
}), <% } else { %>
|
||||
connection.schema.table('<%= tableName %>', function (table) {
|
||||
<% if (!_.isEmpty(attributes)) { _.forEach(attributes, function(details, attribute) { %><%= models[tableName].attributes[attribute].delete.others %>;
|
||||
<% }); } %>
|
||||
}).catch(function (err) {
|
||||
console.log('Impossible to select the `<%= tableName %>` table.');
|
||||
console.log(err);
|
||||
}),<% } %>
|
||||
|
||||
@ -2,11 +2,18 @@
|
||||
/**
|
||||
* Select the `<%= tableName %>` table.
|
||||
*/
|
||||
|
||||
<% if (toDrop === true) { %>
|
||||
connection.schema.table('<%= tableName %>', function (table) {
|
||||
<% if (!_.isEmpty(attributes)) { _.forEach(attributes, function(details, attribute) { %><%= models[tableName].attributes[attribute].create %>;
|
||||
<% if (!_.isEmpty(attributes)) { _.forEach(attributes, function(details, attribute) { %><%= models[tableName].attributes[attribute].create.drop %>;
|
||||
<% }); } %>
|
||||
}).catch(function (err) {
|
||||
console.log('Impossible to select the `<%= tableName %>` table.');
|
||||
console.log(err);
|
||||
}),
|
||||
}), <% } else { %>
|
||||
connection.schema.table('<%= tableName %>', function (table) {
|
||||
<% if (!_.isEmpty(attributes)) { _.forEach(attributes, function(details, attribute) { %><%= models[tableName].attributes[attribute].create.others %>;
|
||||
<% }); } %>
|
||||
}).catch(function (err) {
|
||||
console.log('Impossible to select the `<%= tableName %>` table.');
|
||||
console.log(err);
|
||||
}),<% } %>
|
||||
|
||||
@ -8,9 +8,34 @@
|
||||
*/
|
||||
|
||||
exports.up = function(connection, Promise) {
|
||||
return Promise.all([
|
||||
<% _.forEach(models, function(definition, model) { %><%= models[model].up %><% }); %>
|
||||
]);
|
||||
<% var dropped = false; var onlyDrop = true;
|
||||
|
||||
_.forEach(models, function (definition, model) {
|
||||
if (!_.isEmpty(_.get(models[model], 'up.drop'))) {
|
||||
dropped = true;
|
||||
}
|
||||
|
||||
if (!_.isEmpty(_.get(models[model], 'up.others'))) {
|
||||
onlyDrop = false;
|
||||
}
|
||||
});
|
||||
|
||||
if (dropped === true && onlyDrop === false) { %> return Promise.all([
|
||||
<% _.forEach(models, function(definition, model) { %><%= _.get(models[model], 'up.drop') %><% }); %>
|
||||
]).then(function() {
|
||||
return Promise.all([
|
||||
<% _.forEach(models, function(definition, model) { %><%= _.get(models[model], 'up.others') %><% }); %>
|
||||
]);
|
||||
});
|
||||
<% } else if (dropped === true && onlyDrop === true) { %>
|
||||
return Promise.all([
|
||||
<% _.forEach(models, function(definition, model) { %><%= _.get(models[model], 'up.drop') %><% }); %>
|
||||
]);
|
||||
<% } else { %>
|
||||
return Promise.all([
|
||||
<% _.forEach(models, function(definition, model) { %><%= _.get(models[model], 'up.others') %><% }); %>
|
||||
]);
|
||||
<% } %>
|
||||
};
|
||||
|
||||
/**
|
||||
@ -21,9 +46,35 @@ exports.up = function(connection, Promise) {
|
||||
*/
|
||||
|
||||
exports.down = function(connection, Promise) {
|
||||
return Promise.all([
|
||||
<% _.forEach(models, function(definition, model) { %><%= models[model].down %><% }); %>
|
||||
]);
|
||||
<% var dropped = false; var onlyDrop = true;
|
||||
|
||||
_.forEach(models, function (definition, model) {
|
||||
if (!_.isEmpty(_.get(models[model], 'down.drop'))) {
|
||||
dropped = true;
|
||||
}
|
||||
|
||||
if (!_.isEmpty(_.get(models[model], 'down.others'))) {
|
||||
onlyDrop = false;
|
||||
}
|
||||
});
|
||||
|
||||
if (dropped === true && onlyDrop === false) { %>
|
||||
return Promise.all([
|
||||
<% _.forEach(models, function(definition, model) { %><%= _.get(models[model], 'down.drop') %><% }); %>
|
||||
]).then(function() {
|
||||
return Promise.all([
|
||||
<% _.forEach(models, function(definition, model) { %><%= _.get(models[model], 'down.others') %><% }); %>
|
||||
]);
|
||||
});
|
||||
<% } else if (dropped === true && onlyDrop === true) { %>
|
||||
return Promise.all([
|
||||
<% _.forEach(models, function(definition, model) { %><%= _.get(models[model], 'down.drop') %><% }); %>
|
||||
]);
|
||||
<% } else { %>
|
||||
return Promise.all([
|
||||
<% _.forEach(models, function(definition, model) { %><%= _.get(models[model], 'down.others') %><% }); %>
|
||||
]);
|
||||
<% } %>
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@ -64,6 +64,13 @@ module.exports = function (strapi) {
|
||||
// Expose the GraphQL schemas at `strapi.schemas`
|
||||
strapi.schemas = schemas;
|
||||
|
||||
global.graphql = require('graphql');
|
||||
global.graphql.query = function * (query, context) {
|
||||
return this.graphql(schemas, query, {
|
||||
context: context
|
||||
});
|
||||
};
|
||||
|
||||
cb();
|
||||
});
|
||||
} else {
|
||||
|
||||
@ -34,8 +34,6 @@ module.exports = {
|
||||
*/
|
||||
|
||||
getObject: function (matchedRoute) {
|
||||
// TODO:
|
||||
// - Improve way to detect collection/ressource/relationships/related
|
||||
switch (_.size(matchedRoute.regexp.keys)) {
|
||||
case 0:
|
||||
return 'collection';
|
||||
|
||||
@ -69,8 +69,12 @@ module.exports = {
|
||||
other: ''
|
||||
};
|
||||
|
||||
if (_.isUndefined(models)) {
|
||||
models = global['strapi'].models;
|
||||
}
|
||||
|
||||
if (association.hasOwnProperty('via') && association.hasOwnProperty('collection')) {
|
||||
const relatedAttribute = strapi.models[association.collection].attributes[association.via];
|
||||
const relatedAttribute = models[association.collection].attributes[association.via];
|
||||
|
||||
types.current = 'collection';
|
||||
|
||||
@ -83,7 +87,7 @@ module.exports = {
|
||||
types.current = 'modelD';
|
||||
|
||||
// We have to find if they are a model linked to this key
|
||||
_.forIn(strapi.models, function (model) {
|
||||
_.forIn(models, function (model) {
|
||||
_.forIn(model.attributes, function (attribute) {
|
||||
if (attribute.hasOwnProperty('via') && attribute.via === key && attribute.hasOwnProperty('collection')) {
|
||||
types.other = 'collection';
|
||||
@ -102,7 +106,7 @@ module.exports = {
|
||||
types.current = 'model';
|
||||
|
||||
// We have to find if they are a model linked to this key
|
||||
_.forIn(strapi.models, function (model) {
|
||||
_.forIn(models, function (model) {
|
||||
_.forIn(model.attributes, function (attribute) {
|
||||
if (attribute.hasOwnProperty('via') && attribute.via === key) {
|
||||
if (attribute.hasOwnProperty('collection')) {
|
||||
|
||||
@ -198,5 +198,14 @@ Then, you can apply one or more policies on each query and mutation.
|
||||
}
|
||||
```
|
||||
|
||||
## Use GraphQL in your codebase
|
||||
|
||||
You can make GraphQL query in your codebase. The `graphql` is exposed in global, and Strapi add a new function called `query` to easily make GraphQL query.
|
||||
|
||||
```javascript
|
||||
this.body = yield graphql.query("{articles{title}}", this);
|
||||
// Don't forget to send the context. This is needed to apply permissions.
|
||||
```
|
||||
|
||||
!!! note
|
||||
The policy doesn't need to be in the same API folder. The GraphQL permissions are based on the global `strapi.policies` variable which is an aggregate of the policies of the whole application. Also, the request is apply to the policies, in others words, this means you can handle sessions and cookies in the policy as usual.
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user