refactor strapi.cron

This commit is contained in:
Pierre Noël 2022-07-22 17:28:51 +02:00
parent b78af34ebf
commit bac1080f13
9 changed files with 99 additions and 153 deletions

View File

@ -376,6 +376,11 @@ class Strapi {
entityValidator: this.entityValidator,
});
if (strapi.config.get('server.cron.enabled', true)) {
const cronTasks = this.config.get('server.cron.tasks', {});
this.cron.add(cronTasks);
}
this.telemetry.bootstrap();
let oldContentTypes;
@ -413,11 +418,7 @@ class Strapi {
await this.runLifecyclesFunctions(LIFECYCLES.BOOTSTRAP);
if (strapi.config.get('server.cron.enabled', true)) {
const cronTasks = this.config.get('server.cron.tasks', {});
this.cron.add(cronTasks, 'user');
this.cron.start('user');
}
this.cron.start();
return this;
}

View File

@ -1,18 +1,14 @@
'use strict';
const { Job } = require('node-schedule');
const { isFunction, forEach } = require('lodash/fp');
const { isFunction } = require('lodash/fp');
const createCronService = () => {
let jobsSpecsMap = {};
let jobsSpecs = [];
let running = false;
return {
add(tasks = {}, namespace) {
if (!namespace) {
throw new Error('Tasks should be attached to a namespace.');
}
jobsSpecsMap[namespace] = jobsSpecsMap[namespace] || [];
add(tasks = {}) {
for (const taskExpression in tasks) {
const taskValue = tasks[taskExpression];
@ -33,42 +29,27 @@ const createCronService = () => {
const fnWithStrapi = (...args) => fn({ strapi }, ...args);
const job = new Job(null, fnWithStrapi);
jobsSpecsMap[namespace].push({ job, options });
jobsSpecs.push({ job, options });
if (running) {
job.schedule(options);
}
}
return this;
},
start(namespace) {
if (namespace && !jobsSpecsMap[namespace]) {
throw new Error('namespace not found');
}
if (!namespace) {
forEach(jobs => jobs.forEach(({ job, options }) => job.schedule(options)))(jobsSpecsMap);
return this;
}
jobsSpecsMap[namespace].forEach(({ job, options }) => job.schedule(options));
start() {
jobsSpecs.forEach(({ job, options }) => job.schedule(options));
running = true;
return this;
},
stop(namespace) {
if (namespace && !jobsSpecsMap[namespace]) {
throw new Error('namespace not found');
}
if (!namespace) {
forEach(jobs => jobs.forEach(({ job }) => job.cancel()))(jobsSpecsMap);
return this;
}
jobsSpecsMap[namespace].forEach(({ job }) => job.cancel());
stop() {
jobsSpecs.forEach(({ job }) => job.cancel());
running = false;
return this;
},
destroy() {
this.stop();
jobsSpecsMap = {};
jobsSpecs = [];
return this;
},
};

View File

@ -38,7 +38,7 @@ describe('Upload plugin bootstrap function', () => {
upload: {
services: {
metrics: {
startRegularMetricsUpdate() {},
registerCron() {},
},
},
},

View File

@ -21,7 +21,7 @@ module.exports = async ({ strapi }) => {
await registerPermissionActions();
getService('metrics').startRegularMetricsUpdate();
getService('metrics').registerCron();
};
const registerPermissionActions = async () => {

View File

@ -1,7 +0,0 @@
'use strict';
const { getService } = require('./utils');
module.exports = () => {
getService('metrics').stopRegularMetricsUpdate();
};

View File

@ -2,7 +2,6 @@
const register = require('./register');
const bootstrap = require('./bootstrap');
const destroy = require('./destroy');
const contentTypes = require('./content-types');
const services = require('./services');
const routes = require('./routes');
@ -13,7 +12,6 @@ module.exports = () => {
return {
register,
bootstrap,
destroy,
config,
routes,
controllers,

View File

@ -3,7 +3,7 @@
const metricsService = require('../metrics');
describe('metrics', () => {
describe('computeWeeklyMetrics', () => {
describe('computeCronMetrics', () => {
test.each([
[[], 0, [0, 0, 0, 0, 0]],
[
@ -54,9 +54,9 @@ describe('metrics', () => {
};
strapi.db.connection.raw = raw;
const { computeWeeklyMetrics } = metricsService({ strapi });
const { computeCronMetrics } = metricsService({ strapi });
const results = await computeWeeklyMetrics();
const results = await computeCronMetrics();
const [
assetNumber,
maxDepth,

View File

@ -5,107 +5,88 @@ const { FOLDER_MODEL_UID, FILE_MODEL_UID } = require('../constants');
const rand = max => Math.floor(Math.random() * max);
const getCronRandomWeekly = () => `${rand(60)} ${rand(60)} ${rand(24)} * * ${rand(7)}`;
module.exports = ({ strapi }) => {
let running = false;
module.exports = ({ strapi }) => ({
async computeCronMetrics() {
// Folder metrics
const pathColName = strapi.db.metadata.get(FOLDER_MODEL_UID).attributes.path.columnName;
const folderTable = strapi.getModel(FOLDER_MODEL_UID).collectionName;
return {
async computeWeeklyMetrics() {
// Folder metrics
const pathColName = strapi.db.metadata.get(FOLDER_MODEL_UID).attributes.path.columnName;
const folderTable = strapi.getModel(FOLDER_MODEL_UID).collectionName;
let keepOnlySlashesSQLString = '??';
let queryParams = [pathColName];
for (let i = 0; i < 10; i += 1) {
keepOnlySlashesSQLString = `REPLACE(${keepOnlySlashesSQLString}, ?, ?)`;
queryParams.push(String(i), '');
}
let keepOnlySlashesSQLString = '??';
let queryParams = [pathColName];
for (let i = 0; i < 10; i += 1) {
keepOnlySlashesSQLString = `REPLACE(${keepOnlySlashesSQLString}, ?, ?)`;
queryParams.push(String(i), '');
}
const knex = strapi.db.connection;
const knex = strapi.db.connection;
/*
The following query goal is to count the number of folders with depth 1, depth 2 etc.
The query returns :
[
{ depth: 1, occurence: 4 },
{ depth: 2, occurence: 2 },
{ depth: 3, occurence: 5 },
]
/*
The following query goal is to count the number of folders with depth 1, depth 2 etc.
The query returns :
[
{ depth: 1, occurence: 4 },
{ depth: 2, occurence: 2 },
{ depth: 3, occurence: 5 },
]
The query is built as follow:
1. In order to get the depth level of a folder:
- we take their path
- remove all numbers (by replacing 0123456789 by '', thus the 10 REPLACE in the query)
- count the remaining `/`, which correspond to their depth (by using LENGTH)
We now have, for each folder, its depth.
2. In order to get the number of folders for each depth:
- we group them by their depth and use COUNT(*)
*/
The query is built as follow:
1. In order to get the depth level of a folder:
- we take their path
- remove all numbers (by replacing 0123456789 by '', thus the 10 REPLACE in the query)
- count the remaining `/`, which correspond to their depth (by using LENGTH)
We now have, for each folder, its depth.
2. In order to get the number of folders for each depth:
- we group them by their depth and use COUNT(*)
*/
const folderLevelsArray = (
await knex(folderTable)
.select(
knex.raw(
`LENGTH(${keepOnlySlashesSQLString}) AS depth, COUNT(*) AS occurence`,
queryParams
)
const folderLevelsArray = (
await knex(folderTable)
.select(
knex.raw(
`LENGTH(${keepOnlySlashesSQLString}) AS depth, COUNT(*) AS occurence`,
queryParams
)
.groupBy('depth')
).map(map => ({ depth: Number(map.depth), occurence: Number(map.occurence) })); // values can be strings depending on the database
)
.groupBy('depth')
).map(map => ({ depth: Number(map.depth), occurence: Number(map.occurence) })); // values can be strings depending on the database
let product = 0;
let folderNumber = 0;
let maxDepth = 0;
for (const folderLevel of folderLevelsArray) {
product += folderLevel.depth * folderLevel.occurence;
folderNumber += folderLevel.occurence;
if (folderLevel.depth > maxDepth) {
maxDepth = folderLevel.depth;
}
let product = 0;
let folderNumber = 0;
let maxDepth = 0;
for (const folderLevel of folderLevelsArray) {
product += folderLevel.depth * folderLevel.occurence;
folderNumber += folderLevel.occurence;
if (folderLevel.depth > maxDepth) {
maxDepth = folderLevel.depth;
}
const averageDepth = folderNumber !== 0 ? product / folderNumber : 0;
}
const averageDepth = folderNumber !== 0 ? product / folderNumber : 0;
let sumOfDeviation = 0;
for (const folderLevel of folderLevelsArray) {
sumOfDeviation += Math.abs(folderLevel.depth - averageDepth) * folderLevel.occurence;
}
let sumOfDeviation = 0;
for (const folderLevel of folderLevelsArray) {
sumOfDeviation += Math.abs(folderLevel.depth - averageDepth) * folderLevel.occurence;
}
const averageDeviationDepth = folderNumber !== 0 ? sumOfDeviation / folderNumber : 0;
const averageDeviationDepth = folderNumber !== 0 ? sumOfDeviation / folderNumber : 0;
// File metrics
const assetNumber = await strapi.entityService.count(FILE_MODEL_UID);
// File metrics
const assetNumber = await strapi.entityService.count(FILE_MODEL_UID);
return {
assetNumber,
folderNumber,
averageDepth,
maxDepth,
averageDeviationDepth,
};
},
return {
assetNumber,
folderNumber,
averageDepth,
maxDepth,
averageDeviationDepth,
};
},
async startRegularMetricsUpdate() {
if (running) {
throw new Error('Regular metrics updates are already running');
}
running = true;
strapi.cron.add(
{
[getCronRandomWeekly()]: async ({ strapi }) => {
const metrics = await this.computeWeeklyMetrics();
strapi.telemetry.send('didSendUploadPropertiesOnceAWeek', metrics);
},
},
'upload.weekly'
);
strapi.cron.start('upload.weekly');
},
stopRegularMetricsUpdate() {
strapi.cron.stop('upload.weekly');
running = false;
},
};
};
async registerCron() {
strapi.cron.add({
[getCronRandomWeekly()]: async ({ strapi }) => {
const metrics = await this.computeCronMetrics();
strapi.telemetry.send('didSendUploadPropertiesOnceAWeek', metrics);
},
});
},
});

View File

@ -15720,14 +15720,6 @@ jest-message-util@^28.1.1:
slash "^3.0.0"
stack-utils "^2.0.3"
jest-mock@^26.6.2:
version "26.6.2"
resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-26.6.2.tgz#d6cb712b041ed47fe0d9b6fc3474bc6543feb302"
integrity sha512-YyFjePHHp1LzpzYcmgqkJ0nm0gg/lJx2aZFzFy1S6eUqNjXsOqTK10zNRff2dNfssgokjkG65OlWNcIlgd3zew==
dependencies:
"@jest/types" "^26.6.2"
"@types/node" "*"
jest-mock@^27.0.6:
version "27.5.1"
resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6"