112 lines
3.5 KiB
JavaScript
Raw Normal View History

2022-07-07 21:20:53 +02:00
'use strict';
const { FOLDER_MODEL_UID, FILE_MODEL_UID } = require('../constants');
const rand = max => Math.floor(Math.random() * max);
2022-07-08 09:44:45 +02:00
const getCronRandomWeekly = () => `${rand(60)} ${rand(60)} ${rand(24)} * * ${rand(7)}`;
2022-07-07 21:20:53 +02:00
module.exports = ({ strapi }) => {
2022-07-08 10:12:58 +02:00
let running = false;
2022-07-07 21:20:53 +02:00
return {
2022-07-08 12:39:07 +02:00
async computeWeeklyMetrics() {
2022-07-07 21:20:53 +02:00
// Folder metrics
const pathColName = strapi.db.metadata.get(FOLDER_MODEL_UID).attributes.path.columnName;
const folderTable = strapi.getModel(FOLDER_MODEL_UID).collectionName;
let keepOnlySlashesSQLString = '??';
let queryParams = [pathColName];
for (let i = 0; i < 10; i += 1) {
keepOnlySlashesSQLString = `REPLACE(${keepOnlySlashesSQLString}, ?, ?)`;
queryParams.push(String(i), '');
}
const knex = strapi.db.connection;
/*
The following query goal is to count the number of folders with depth 1, depth 2 etc.
The query returns :
[
{ depth: 1, occurence: 4 },
{ depth: 2, occurence: 2 },
{ depth: 3, occurence: 5 },
]
The query is built as follow:
1. In order to get the depth level of a folder:
- we take their path
- remove all numbers (by replacing 0123456789 by '', thus the 10 REPLACE in the query)
- count the remaining `/`, which correspond to their depth (by using LENGTH)
We now have, for each folder, its depth.
2. In order to get the number of folders for each depth:
- we group them by their depth and use COUNT(*)
*/
2022-07-07 21:20:53 +02:00
const folderLevelsArray = (
await knex(folderTable)
.select(
knex.raw(
`LENGTH(${keepOnlySlashesSQLString}) AS depth, COUNT(*) AS occurence`,
2022-07-07 21:20:53 +02:00
queryParams
)
)
.groupBy('depth')
).map(map => ({ depth: Number(map.depth), occurence: Number(map.occurence) })); // values can be strings depending on the database
let product = 0;
let folderNumber = 0;
let maxDepth = 0;
for (const folderLevel of folderLevelsArray) {
product += folderLevel.depth * folderLevel.occurence;
folderNumber += folderLevel.occurence;
if (folderLevel.depth > maxDepth) {
maxDepth = folderLevel.depth;
}
}
2022-07-08 12:39:07 +02:00
const averageDepth = folderNumber !== 0 ? product / folderNumber : 0;
2022-07-07 21:20:53 +02:00
let sumOfDeviation = 0;
for (const folderLevel of folderLevelsArray) {
2022-07-08 12:39:07 +02:00
sumOfDeviation += Math.abs(folderLevel.depth - averageDepth) * folderLevel.occurence;
2022-07-07 21:20:53 +02:00
}
2022-07-08 12:39:07 +02:00
const averageDeviationDepth = folderNumber !== 0 ? sumOfDeviation / folderNumber : 0;
2022-07-07 21:20:53 +02:00
// File metrics
const assetNumber = await strapi.entityService.count(FILE_MODEL_UID);
return {
assetNumber,
folderNumber,
averageDepth,
maxDepth,
averageDeviationDepth,
};
},
async startRegularMetricsUpdate() {
2022-07-08 10:12:58 +02:00
if (running) {
throw new Error('Regular metrics updates are already running');
2022-07-07 21:20:53 +02:00
}
2022-07-08 10:12:58 +02:00
running = true;
2022-07-07 21:20:53 +02:00
strapi.cron.add(
{
[getCronRandomWeekly()]: async ({ strapi }) => {
const metrics = await this.computeWeeklyMetrics();
strapi.telemetry.send('didSendUploadPropertiesOnceAWeek', metrics);
},
},
'upload.weekly'
);
2022-07-07 21:20:53 +02:00
strapi.cron.start('upload.weekly');
2022-07-07 21:20:53 +02:00
},
2022-07-08 10:12:58 +02:00
stopRegularMetricsUpdate() {
strapi.cron.stop('upload.weekly');
2022-07-08 10:12:58 +02:00
running = false;
2022-07-07 21:20:53 +02:00
},
};
};