mirror of
https://github.com/strapi/strapi.git
synced 2025-11-16 01:57:56 +00:00
refactor strapi.cron
This commit is contained in:
parent
b78af34ebf
commit
bac1080f13
@ -376,6 +376,11 @@ class Strapi {
|
|||||||
entityValidator: this.entityValidator,
|
entityValidator: this.entityValidator,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (strapi.config.get('server.cron.enabled', true)) {
|
||||||
|
const cronTasks = this.config.get('server.cron.tasks', {});
|
||||||
|
this.cron.add(cronTasks);
|
||||||
|
}
|
||||||
|
|
||||||
this.telemetry.bootstrap();
|
this.telemetry.bootstrap();
|
||||||
|
|
||||||
let oldContentTypes;
|
let oldContentTypes;
|
||||||
@ -413,11 +418,7 @@ class Strapi {
|
|||||||
|
|
||||||
await this.runLifecyclesFunctions(LIFECYCLES.BOOTSTRAP);
|
await this.runLifecyclesFunctions(LIFECYCLES.BOOTSTRAP);
|
||||||
|
|
||||||
if (strapi.config.get('server.cron.enabled', true)) {
|
this.cron.start();
|
||||||
const cronTasks = this.config.get('server.cron.tasks', {});
|
|
||||||
this.cron.add(cronTasks, 'user');
|
|
||||||
this.cron.start('user');
|
|
||||||
}
|
|
||||||
|
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,18 +1,14 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const { Job } = require('node-schedule');
|
const { Job } = require('node-schedule');
|
||||||
const { isFunction, forEach } = require('lodash/fp');
|
const { isFunction } = require('lodash/fp');
|
||||||
|
|
||||||
const createCronService = () => {
|
const createCronService = () => {
|
||||||
let jobsSpecsMap = {};
|
let jobsSpecs = [];
|
||||||
|
let running = false;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
add(tasks = {}, namespace) {
|
add(tasks = {}) {
|
||||||
if (!namespace) {
|
|
||||||
throw new Error('Tasks should be attached to a namespace.');
|
|
||||||
}
|
|
||||||
jobsSpecsMap[namespace] = jobsSpecsMap[namespace] || [];
|
|
||||||
|
|
||||||
for (const taskExpression in tasks) {
|
for (const taskExpression in tasks) {
|
||||||
const taskValue = tasks[taskExpression];
|
const taskValue = tasks[taskExpression];
|
||||||
|
|
||||||
@ -33,42 +29,27 @@ const createCronService = () => {
|
|||||||
const fnWithStrapi = (...args) => fn({ strapi }, ...args);
|
const fnWithStrapi = (...args) => fn({ strapi }, ...args);
|
||||||
|
|
||||||
const job = new Job(null, fnWithStrapi);
|
const job = new Job(null, fnWithStrapi);
|
||||||
jobsSpecsMap[namespace].push({ job, options });
|
jobsSpecs.push({ job, options });
|
||||||
|
|
||||||
|
if (running) {
|
||||||
|
job.schedule(options);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return this;
|
return this;
|
||||||
},
|
},
|
||||||
|
start() {
|
||||||
start(namespace) {
|
jobsSpecs.forEach(({ job, options }) => job.schedule(options));
|
||||||
if (namespace && !jobsSpecsMap[namespace]) {
|
running = true;
|
||||||
throw new Error('namespace not found');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!namespace) {
|
|
||||||
forEach(jobs => jobs.forEach(({ job, options }) => job.schedule(options)))(jobsSpecsMap);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
jobsSpecsMap[namespace].forEach(({ job, options }) => job.schedule(options));
|
|
||||||
return this;
|
return this;
|
||||||
},
|
},
|
||||||
|
stop() {
|
||||||
stop(namespace) {
|
jobsSpecs.forEach(({ job }) => job.cancel());
|
||||||
if (namespace && !jobsSpecsMap[namespace]) {
|
running = false;
|
||||||
throw new Error('namespace not found');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!namespace) {
|
|
||||||
forEach(jobs => jobs.forEach(({ job }) => job.cancel()))(jobsSpecsMap);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
jobsSpecsMap[namespace].forEach(({ job }) => job.cancel());
|
|
||||||
return this;
|
return this;
|
||||||
},
|
},
|
||||||
|
|
||||||
destroy() {
|
destroy() {
|
||||||
this.stop();
|
this.stop();
|
||||||
jobsSpecsMap = {};
|
jobsSpecs = [];
|
||||||
return this;
|
return this;
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|||||||
@ -38,7 +38,7 @@ describe('Upload plugin bootstrap function', () => {
|
|||||||
upload: {
|
upload: {
|
||||||
services: {
|
services: {
|
||||||
metrics: {
|
metrics: {
|
||||||
startRegularMetricsUpdate() {},
|
registerCron() {},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
2
packages/core/upload/server/bootstrap.js
vendored
2
packages/core/upload/server/bootstrap.js
vendored
@ -21,7 +21,7 @@ module.exports = async ({ strapi }) => {
|
|||||||
|
|
||||||
await registerPermissionActions();
|
await registerPermissionActions();
|
||||||
|
|
||||||
getService('metrics').startRegularMetricsUpdate();
|
getService('metrics').registerCron();
|
||||||
};
|
};
|
||||||
|
|
||||||
const registerPermissionActions = async () => {
|
const registerPermissionActions = async () => {
|
||||||
|
|||||||
@ -1,7 +0,0 @@
|
|||||||
'use strict';
|
|
||||||
|
|
||||||
const { getService } = require('./utils');
|
|
||||||
|
|
||||||
module.exports = () => {
|
|
||||||
getService('metrics').stopRegularMetricsUpdate();
|
|
||||||
};
|
|
||||||
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
const register = require('./register');
|
const register = require('./register');
|
||||||
const bootstrap = require('./bootstrap');
|
const bootstrap = require('./bootstrap');
|
||||||
const destroy = require('./destroy');
|
|
||||||
const contentTypes = require('./content-types');
|
const contentTypes = require('./content-types');
|
||||||
const services = require('./services');
|
const services = require('./services');
|
||||||
const routes = require('./routes');
|
const routes = require('./routes');
|
||||||
@ -13,7 +12,6 @@ module.exports = () => {
|
|||||||
return {
|
return {
|
||||||
register,
|
register,
|
||||||
bootstrap,
|
bootstrap,
|
||||||
destroy,
|
|
||||||
config,
|
config,
|
||||||
routes,
|
routes,
|
||||||
controllers,
|
controllers,
|
||||||
|
|||||||
@ -3,7 +3,7 @@
|
|||||||
const metricsService = require('../metrics');
|
const metricsService = require('../metrics');
|
||||||
|
|
||||||
describe('metrics', () => {
|
describe('metrics', () => {
|
||||||
describe('computeWeeklyMetrics', () => {
|
describe('computeCronMetrics', () => {
|
||||||
test.each([
|
test.each([
|
||||||
[[], 0, [0, 0, 0, 0, 0]],
|
[[], 0, [0, 0, 0, 0, 0]],
|
||||||
[
|
[
|
||||||
@ -54,9 +54,9 @@ describe('metrics', () => {
|
|||||||
};
|
};
|
||||||
strapi.db.connection.raw = raw;
|
strapi.db.connection.raw = raw;
|
||||||
|
|
||||||
const { computeWeeklyMetrics } = metricsService({ strapi });
|
const { computeCronMetrics } = metricsService({ strapi });
|
||||||
|
|
||||||
const results = await computeWeeklyMetrics();
|
const results = await computeCronMetrics();
|
||||||
const [
|
const [
|
||||||
assetNumber,
|
assetNumber,
|
||||||
maxDepth,
|
maxDepth,
|
||||||
|
|||||||
@ -5,107 +5,88 @@ const { FOLDER_MODEL_UID, FILE_MODEL_UID } = require('../constants');
|
|||||||
const rand = max => Math.floor(Math.random() * max);
|
const rand = max => Math.floor(Math.random() * max);
|
||||||
const getCronRandomWeekly = () => `${rand(60)} ${rand(60)} ${rand(24)} * * ${rand(7)}`;
|
const getCronRandomWeekly = () => `${rand(60)} ${rand(60)} ${rand(24)} * * ${rand(7)}`;
|
||||||
|
|
||||||
module.exports = ({ strapi }) => {
|
module.exports = ({ strapi }) => ({
|
||||||
let running = false;
|
async computeCronMetrics() {
|
||||||
|
// Folder metrics
|
||||||
|
const pathColName = strapi.db.metadata.get(FOLDER_MODEL_UID).attributes.path.columnName;
|
||||||
|
const folderTable = strapi.getModel(FOLDER_MODEL_UID).collectionName;
|
||||||
|
|
||||||
return {
|
let keepOnlySlashesSQLString = '??';
|
||||||
async computeWeeklyMetrics() {
|
let queryParams = [pathColName];
|
||||||
// Folder metrics
|
for (let i = 0; i < 10; i += 1) {
|
||||||
const pathColName = strapi.db.metadata.get(FOLDER_MODEL_UID).attributes.path.columnName;
|
keepOnlySlashesSQLString = `REPLACE(${keepOnlySlashesSQLString}, ?, ?)`;
|
||||||
const folderTable = strapi.getModel(FOLDER_MODEL_UID).collectionName;
|
queryParams.push(String(i), '');
|
||||||
|
}
|
||||||
|
|
||||||
let keepOnlySlashesSQLString = '??';
|
const knex = strapi.db.connection;
|
||||||
let queryParams = [pathColName];
|
|
||||||
for (let i = 0; i < 10; i += 1) {
|
|
||||||
keepOnlySlashesSQLString = `REPLACE(${keepOnlySlashesSQLString}, ?, ?)`;
|
|
||||||
queryParams.push(String(i), '');
|
|
||||||
}
|
|
||||||
|
|
||||||
const knex = strapi.db.connection;
|
/*
|
||||||
|
The following query goal is to count the number of folders with depth 1, depth 2 etc.
|
||||||
|
The query returns :
|
||||||
|
[
|
||||||
|
{ depth: 1, occurence: 4 },
|
||||||
|
{ depth: 2, occurence: 2 },
|
||||||
|
{ depth: 3, occurence: 5 },
|
||||||
|
]
|
||||||
|
|
||||||
/*
|
The query is built as follow:
|
||||||
The following query goal is to count the number of folders with depth 1, depth 2 etc.
|
1. In order to get the depth level of a folder:
|
||||||
The query returns :
|
- we take their path
|
||||||
[
|
- remove all numbers (by replacing 0123456789 by '', thus the 10 REPLACE in the query)
|
||||||
{ depth: 1, occurence: 4 },
|
- count the remaining `/`, which correspond to their depth (by using LENGTH)
|
||||||
{ depth: 2, occurence: 2 },
|
We now have, for each folder, its depth.
|
||||||
{ depth: 3, occurence: 5 },
|
2. In order to get the number of folders for each depth:
|
||||||
]
|
- we group them by their depth and use COUNT(*)
|
||||||
|
*/
|
||||||
|
|
||||||
The query is built as follow:
|
const folderLevelsArray = (
|
||||||
1. In order to get the depth level of a folder:
|
await knex(folderTable)
|
||||||
- we take their path
|
.select(
|
||||||
- remove all numbers (by replacing 0123456789 by '', thus the 10 REPLACE in the query)
|
knex.raw(
|
||||||
- count the remaining `/`, which correspond to their depth (by using LENGTH)
|
`LENGTH(${keepOnlySlashesSQLString}) AS depth, COUNT(*) AS occurence`,
|
||||||
We now have, for each folder, its depth.
|
queryParams
|
||||||
2. In order to get the number of folders for each depth:
|
|
||||||
- we group them by their depth and use COUNT(*)
|
|
||||||
*/
|
|
||||||
|
|
||||||
const folderLevelsArray = (
|
|
||||||
await knex(folderTable)
|
|
||||||
.select(
|
|
||||||
knex.raw(
|
|
||||||
`LENGTH(${keepOnlySlashesSQLString}) AS depth, COUNT(*) AS occurence`,
|
|
||||||
queryParams
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
.groupBy('depth')
|
)
|
||||||
).map(map => ({ depth: Number(map.depth), occurence: Number(map.occurence) })); // values can be strings depending on the database
|
.groupBy('depth')
|
||||||
|
).map(map => ({ depth: Number(map.depth), occurence: Number(map.occurence) })); // values can be strings depending on the database
|
||||||
|
|
||||||
let product = 0;
|
let product = 0;
|
||||||
let folderNumber = 0;
|
let folderNumber = 0;
|
||||||
let maxDepth = 0;
|
let maxDepth = 0;
|
||||||
for (const folderLevel of folderLevelsArray) {
|
for (const folderLevel of folderLevelsArray) {
|
||||||
product += folderLevel.depth * folderLevel.occurence;
|
product += folderLevel.depth * folderLevel.occurence;
|
||||||
folderNumber += folderLevel.occurence;
|
folderNumber += folderLevel.occurence;
|
||||||
if (folderLevel.depth > maxDepth) {
|
if (folderLevel.depth > maxDepth) {
|
||||||
maxDepth = folderLevel.depth;
|
maxDepth = folderLevel.depth;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
const averageDepth = folderNumber !== 0 ? product / folderNumber : 0;
|
}
|
||||||
|
const averageDepth = folderNumber !== 0 ? product / folderNumber : 0;
|
||||||
|
|
||||||
let sumOfDeviation = 0;
|
let sumOfDeviation = 0;
|
||||||
for (const folderLevel of folderLevelsArray) {
|
for (const folderLevel of folderLevelsArray) {
|
||||||
sumOfDeviation += Math.abs(folderLevel.depth - averageDepth) * folderLevel.occurence;
|
sumOfDeviation += Math.abs(folderLevel.depth - averageDepth) * folderLevel.occurence;
|
||||||
}
|
}
|
||||||
|
|
||||||
const averageDeviationDepth = folderNumber !== 0 ? sumOfDeviation / folderNumber : 0;
|
const averageDeviationDepth = folderNumber !== 0 ? sumOfDeviation / folderNumber : 0;
|
||||||
|
|
||||||
// File metrics
|
// File metrics
|
||||||
const assetNumber = await strapi.entityService.count(FILE_MODEL_UID);
|
const assetNumber = await strapi.entityService.count(FILE_MODEL_UID);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
assetNumber,
|
assetNumber,
|
||||||
folderNumber,
|
folderNumber,
|
||||||
averageDepth,
|
averageDepth,
|
||||||
maxDepth,
|
maxDepth,
|
||||||
averageDeviationDepth,
|
averageDeviationDepth,
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
|
||||||
async startRegularMetricsUpdate() {
|
async registerCron() {
|
||||||
if (running) {
|
strapi.cron.add({
|
||||||
throw new Error('Regular metrics updates are already running');
|
[getCronRandomWeekly()]: async ({ strapi }) => {
|
||||||
}
|
const metrics = await this.computeCronMetrics();
|
||||||
running = true;
|
strapi.telemetry.send('didSendUploadPropertiesOnceAWeek', metrics);
|
||||||
|
},
|
||||||
strapi.cron.add(
|
});
|
||||||
{
|
},
|
||||||
[getCronRandomWeekly()]: async ({ strapi }) => {
|
});
|
||||||
const metrics = await this.computeWeeklyMetrics();
|
|
||||||
strapi.telemetry.send('didSendUploadPropertiesOnceAWeek', metrics);
|
|
||||||
},
|
|
||||||
},
|
|
||||||
'upload.weekly'
|
|
||||||
);
|
|
||||||
|
|
||||||
strapi.cron.start('upload.weekly');
|
|
||||||
},
|
|
||||||
|
|
||||||
stopRegularMetricsUpdate() {
|
|
||||||
strapi.cron.stop('upload.weekly');
|
|
||||||
running = false;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|||||||
@ -15720,14 +15720,6 @@ jest-message-util@^28.1.1:
|
|||||||
slash "^3.0.0"
|
slash "^3.0.0"
|
||||||
stack-utils "^2.0.3"
|
stack-utils "^2.0.3"
|
||||||
|
|
||||||
jest-mock@^26.6.2:
|
|
||||||
version "26.6.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-26.6.2.tgz#d6cb712b041ed47fe0d9b6fc3474bc6543feb302"
|
|
||||||
integrity sha512-YyFjePHHp1LzpzYcmgqkJ0nm0gg/lJx2aZFzFy1S6eUqNjXsOqTK10zNRff2dNfssgokjkG65OlWNcIlgd3zew==
|
|
||||||
dependencies:
|
|
||||||
"@jest/types" "^26.6.2"
|
|
||||||
"@types/node" "*"
|
|
||||||
|
|
||||||
jest-mock@^27.0.6:
|
jest-mock@^27.0.6:
|
||||||
version "27.5.1"
|
version "27.5.1"
|
||||||
resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6"
|
resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-27.5.1.tgz#19948336d49ef4d9c52021d34ac7b5f36ff967d6"
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user