2021-02-18 18:42:28 +01:00
|
|
|
'use strict';
|
|
|
|
|
|
|
|
const { difference, pick, orderBy, prop, intersection } = require('lodash/fp');
|
|
|
|
const { getService } = require('../../../utils');
|
|
|
|
|
2021-02-25 17:40:14 +01:00
|
|
|
const BATCH_SIZE = 1000;
|
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
// Common functions
|
|
|
|
|
2021-03-01 11:26:44 +01:00
|
|
|
const shouldBeProcesseed = processedLocaleCodes => entry => {
|
|
|
|
return (
|
|
|
|
entry.localizations.length > 1 &&
|
|
|
|
intersection(entry.localizations.map(prop('locale')), processedLocaleCodes).length === 0
|
|
|
|
);
|
|
|
|
};
|
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
const getUpdatesInfo = ({ entriesToProcess, locale, attributesToMigrate }) => {
|
|
|
|
const updates = [];
|
|
|
|
for (const entry of entriesToProcess) {
|
2021-03-01 11:26:44 +01:00
|
|
|
const attributesValues = pick(attributesToMigrate, entry);
|
|
|
|
const entriesIdsToUpdate = entry.localizations
|
|
|
|
.filter(related => related.locale !== locale.code)
|
|
|
|
.map(prop('id'));
|
2021-03-02 16:42:17 +01:00
|
|
|
updates.push({ entriesIdsToUpdate, attributesValues });
|
|
|
|
}
|
|
|
|
return updates;
|
|
|
|
};
|
|
|
|
|
|
|
|
// Bookshelf
|
2021-03-01 11:26:44 +01:00
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
const TMP_TABLE_NAME = '__tmp__i18n_field_migration';
|
|
|
|
|
|
|
|
const batchInsertInTmpTable = async (updatesInfo, trx) => {
|
|
|
|
const tmpEntries = [];
|
|
|
|
updatesInfo.forEach(({ entriesIdsToUpdate, attributesValues }) => {
|
|
|
|
entriesIdsToUpdate.forEach(id => {
|
|
|
|
tmpEntries.push({ id, ...attributesValues });
|
|
|
|
});
|
|
|
|
});
|
|
|
|
await trx.batchInsert(TMP_TABLE_NAME, tmpEntries, 100);
|
2021-03-01 11:26:44 +01:00
|
|
|
};
|
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
const batchUpdate = async (updatesInfo, trx, model) => {
|
|
|
|
const promises = updatesInfo.map(({ entriesIdsToUpdate, attributesValues }) =>
|
|
|
|
trx
|
|
|
|
.from(model.collectionName)
|
|
|
|
.update(attributesValues)
|
|
|
|
.whereIn('id', entriesIdsToUpdate)
|
|
|
|
);
|
|
|
|
await Promise.all(promises);
|
|
|
|
};
|
2021-03-01 11:26:44 +01:00
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
const updateFromTmpTable = async ({ model, trx, attributesToMigrate }) => {
|
|
|
|
const collectionName = model.collectionName;
|
|
|
|
let bindings = [];
|
|
|
|
if (model.client === 'pg') {
|
|
|
|
const substitutes = attributesToMigrate.map(() => '?? = ??.??').join(',');
|
|
|
|
bindings.push(collectionName);
|
|
|
|
attributesToMigrate.forEach(attr => bindings.push(attr, TMP_TABLE_NAME, attr));
|
|
|
|
bindings.push(TMP_TABLE_NAME, collectionName, TMP_TABLE_NAME);
|
|
|
|
|
|
|
|
await trx.raw(`UPDATE ?? SET ${substitutes} FROM ?? WHERE ??.id = ??.id;`, bindings);
|
|
|
|
} else if (model.client === 'mysql') {
|
|
|
|
const substitutes = attributesToMigrate.map(() => '??.?? = ??.??').join(',');
|
|
|
|
bindings.push(collectionName, TMP_TABLE_NAME, collectionName, TMP_TABLE_NAME);
|
|
|
|
attributesToMigrate.forEach(attr => bindings.push(collectionName, attr, TMP_TABLE_NAME, attr));
|
|
|
|
|
|
|
|
await trx.raw(`UPDATE ?? JOIN ?? ON ??.id = ??.id SET ${substitutes};`, bindings);
|
|
|
|
}
|
|
|
|
};
|
2021-02-25 17:40:14 +01:00
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
const createTmpTable = async ({ ORM, attributesToMigrate, model }) => {
|
2021-02-25 17:40:14 +01:00
|
|
|
const columnsToCopy = ['id', ...attributesToMigrate];
|
|
|
|
await ORM.knex.schema.dropTableIfExists(TMP_TABLE_NAME);
|
|
|
|
await ORM.knex.raw(`CREATE TABLE ?? AS ??`, [
|
|
|
|
TMP_TABLE_NAME,
|
|
|
|
ORM.knex
|
|
|
|
.select(columnsToCopy)
|
|
|
|
.from(model.collectionName)
|
|
|
|
.whereRaw('?', 0),
|
|
|
|
]);
|
2021-03-02 16:42:17 +01:00
|
|
|
};
|
2021-02-25 17:40:14 +01:00
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
const deleteTmpTable = ({ ORM }) => ORM.knex.schema.dropTableIfExists(TMP_TABLE_NAME);
|
|
|
|
|
|
|
|
const migrateForBookshelf = async ({ ORM, model, attributesToMigrate, locales }) => {
|
|
|
|
// The migration is custom for pg and mysql for better perfomance
|
|
|
|
const isPgOrMysql = ['pg', 'mysql'].includes(model.client);
|
|
|
|
|
|
|
|
if (isPgOrMysql) {
|
|
|
|
await createTmpTable({ ORM, attributesToMigrate, model });
|
|
|
|
}
|
2021-02-25 17:40:14 +01:00
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
const trx = await ORM.knex.transaction();
|
2021-02-25 17:40:14 +01:00
|
|
|
try {
|
|
|
|
const processedLocaleCodes = [];
|
|
|
|
for (const locale of locales) {
|
|
|
|
let offset = 0;
|
2021-03-02 16:42:17 +01:00
|
|
|
// eslint-disable-next-line no-constant-condition
|
|
|
|
while (true) {
|
2021-02-25 17:40:14 +01:00
|
|
|
const batch = await trx
|
|
|
|
.select([...attributesToMigrate, 'locale', 'localizations'])
|
|
|
|
.from(model.collectionName)
|
|
|
|
.where('locale', locale.code)
|
|
|
|
.orderBy('id')
|
|
|
|
.offset(offset)
|
2021-03-01 11:26:44 +01:00
|
|
|
.limit(BATCH_SIZE);
|
2021-02-18 18:42:28 +01:00
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
offset += BATCH_SIZE;
|
|
|
|
|
2021-03-01 11:26:44 +01:00
|
|
|
// postgres automatically parses JSON, but not sqlite nor mysql
|
2021-02-25 17:40:14 +01:00
|
|
|
batch.forEach(entry => {
|
|
|
|
if (typeof entry.localizations === 'string') {
|
|
|
|
entry.localizations = JSON.parse(entry.localizations);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2021-03-01 11:26:44 +01:00
|
|
|
const entriesToProcess = batch.filter(shouldBeProcesseed(processedLocaleCodes));
|
2021-03-02 16:42:17 +01:00
|
|
|
const updatesInfo = getUpdatesInfo({ entriesToProcess, locale, attributesToMigrate });
|
2021-02-25 17:40:14 +01:00
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
if (isPgOrMysql) {
|
|
|
|
await batchInsertInTmpTable(updatesInfo, trx);
|
|
|
|
} else {
|
|
|
|
await batchUpdate(updatesInfo, trx, model);
|
|
|
|
}
|
2021-02-25 17:40:14 +01:00
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
if (batch.length < BATCH_SIZE) {
|
|
|
|
break;
|
|
|
|
}
|
2021-02-25 17:40:14 +01:00
|
|
|
}
|
|
|
|
processedLocaleCodes.push(locale.code);
|
|
|
|
}
|
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
if (isPgOrMysql) {
|
|
|
|
await updateFromTmpTable({ model, trx, attributesToMigrate });
|
|
|
|
}
|
2021-02-25 17:40:14 +01:00
|
|
|
|
|
|
|
await trx.commit();
|
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
if (isPgOrMysql) {
|
|
|
|
await deleteTmpTable({ ORM });
|
|
|
|
}
|
2021-02-25 17:40:14 +01:00
|
|
|
} catch (e) {
|
|
|
|
await trx.rollback();
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
// Mongoose
|
|
|
|
|
2021-02-25 17:40:14 +01:00
|
|
|
const migrateForMongoose = async ({ model, attributesToMigrate, locales }) => {
|
|
|
|
const processedLocaleCodes = [];
|
|
|
|
for (const locale of locales) {
|
|
|
|
let batchCount = BATCH_SIZE;
|
|
|
|
let lastId;
|
2021-03-01 11:26:44 +01:00
|
|
|
while (batchCount === BATCH_SIZE) {
|
2021-02-25 17:40:14 +01:00
|
|
|
const findParams = { locale: locale.code };
|
|
|
|
if (lastId) {
|
|
|
|
findParams._id = { $gt: lastId };
|
|
|
|
}
|
|
|
|
|
|
|
|
const batch = await model
|
|
|
|
.find(findParams, [...attributesToMigrate, 'locale', 'localizations'])
|
|
|
|
.sort({ _id: 1 })
|
2021-03-01 11:26:44 +01:00
|
|
|
.limit(BATCH_SIZE);
|
2021-02-25 17:40:14 +01:00
|
|
|
|
|
|
|
if (batch.length > 0) {
|
|
|
|
lastId = batch[batch.length - 1]._id;
|
|
|
|
}
|
|
|
|
batchCount = batch.length;
|
|
|
|
|
2021-03-01 11:26:44 +01:00
|
|
|
const entriesToProcess = batch.filter(shouldBeProcesseed);
|
2021-02-25 17:40:14 +01:00
|
|
|
|
2021-03-02 16:42:17 +01:00
|
|
|
const updatesInfo = getUpdatesInfo({ entriesToProcess, locale, attributesToMigrate });
|
|
|
|
const updates = updatesInfo.map(({ entriesIdsToUpdate, attributesValues }) => ({
|
|
|
|
updateMany: { filter: { _id: { $in: entriesIdsToUpdate } }, update: attributesValues },
|
|
|
|
}));
|
2021-02-25 17:40:14 +01:00
|
|
|
|
|
|
|
await model.bulkWrite(updates);
|
|
|
|
}
|
|
|
|
processedLocaleCodes.push(locale.code);
|
|
|
|
}
|
|
|
|
};
|
2021-02-18 18:42:28 +01:00
|
|
|
|
2021-02-23 16:52:30 +01:00
|
|
|
// Migration when i18n is disabled on a field of a content-type that have i18n enabled
|
2021-02-18 18:42:28 +01:00
|
|
|
const after = async ({ model, definition, previousDefinition, ORM }) => {
|
|
|
|
const ctService = getService('content-types');
|
|
|
|
const localeService = getService('locales');
|
|
|
|
|
|
|
|
if (!ctService.isLocalized(model)) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const localizedAttributes = ctService.getLocalizedAttributes(definition);
|
|
|
|
const prevLocalizedAttributes = ctService.getLocalizedAttributes(previousDefinition);
|
|
|
|
const attributesDisabled = difference(prevLocalizedAttributes, localizedAttributes);
|
|
|
|
const attributesToMigrate = intersection(Object.keys(definition.attributes), attributesDisabled);
|
|
|
|
|
|
|
|
if (attributesToMigrate.length === 0) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
let locales = await localeService.find();
|
|
|
|
locales = await localeService.setIsDefault(locales);
|
|
|
|
locales = orderBy(['isDefault', 'code'], ['desc', 'asc'])(locales); // Put default locale first
|
|
|
|
|
|
|
|
if (model.orm === 'bookshelf') {
|
2021-02-25 17:40:14 +01:00
|
|
|
await migrateForBookshelf({ ORM, model, attributesToMigrate, locales });
|
2021-02-18 18:42:28 +01:00
|
|
|
} else if (model.orm === 'mongoose') {
|
2021-02-25 17:40:14 +01:00
|
|
|
await migrateForMongoose({ model, attributesToMigrate, locales });
|
2021-02-18 18:42:28 +01:00
|
|
|
}
|
2021-03-02 16:42:17 +01:00
|
|
|
throw new Error('Done');
|
2021-02-18 18:42:28 +01:00
|
|
|
};
|
|
|
|
|
2021-02-25 17:40:14 +01:00
|
|
|
const before = () => {};
|
|
|
|
|
2021-02-18 18:42:28 +01:00
|
|
|
module.exports = {
|
|
|
|
before,
|
|
|
|
after,
|
|
|
|
};
|