Merge branch 'main' into chore/tracking-edit-field-name

This commit is contained in:
Simone 2022-11-21 10:30:05 +01:00 committed by GitHub
commit a45e27372a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 48 additions and 37 deletions

View File

@ -1,12 +1,7 @@
name: 'Install modules'
description: 'Run yarn install and add global modules'
inputs:
globalPackages:
description: 'Global packages to install'
description: 'Install yarn dependencies'
runs:
using: 'composite'
steps:
- run: $GITHUB_ACTION_PATH/script.sh
env:
GLOBAL_PACKAGES: ${{ inputs.globalPackages }}
shell: bash

View File

@ -1,8 +1,2 @@
# install global packages if set
if [[ -n "$GLOBAL_PACKAGES" ]]; then
yarn global add "$GLOBAL_PACKAGES"
yarn global bin >>$GITHUB_PATH
fi
# run yarn
yarn

View File

@ -0,0 +1 @@
CREATE SCHEMA myschema;

View File

@ -34,8 +34,6 @@ jobs:
name: 'unit_back (node: ${{ matrix.node }})'
needs: [lint]
runs-on: ubuntu-latest
env:
CODECOV_TOKEN: ${{ secrets.codecov }}
strategy:
matrix:
node: [14, 16, 18]
@ -46,17 +44,19 @@ jobs:
node-version: ${{ matrix.node }}
cache: yarn
- uses: ./.github/actions/install-modules
with:
globalPackages: codecov
- name: Run tests
run: yarn run -s test:unit --coverage && codecov -C -F unit
run: yarn run -s test:unit --coverage
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./coverage
flags: back,unit_back
unit_front:
name: 'unit_front (node: ${{ matrix.node }})'
needs: [lint]
runs-on: ubuntu-latest
env:
CODECOV_TOKEN: ${{ secrets.codecov }}
strategy:
matrix:
node: [14, 16, 18]
@ -67,12 +67,16 @@ jobs:
node-version: ${{ matrix.node }}
cache: yarn
- uses: ./.github/actions/install-modules
with:
globalPackages: codecov
- name: Build
run: yarn build
- name: Run test
run: yarn run -s test:front --coverage && codecov -C -F front
run: yarn run -s test:front --coverage
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./coverage
flags: front,unit_front
api_ce_pg:
runs-on: ubuntu-latest
@ -96,6 +100,7 @@ jobs:
--health-interval 10s
--health-timeout 5s
--health-retries 5
-v /__w/.github/workflows/db/postgres:/docker-entrypoint-initdb.d
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
@ -226,6 +231,7 @@ jobs:
--health-interval 10s
--health-timeout 5s
--health-retries 5
-v /__w/.github/workflows/db/postgres:/docker-entrypoint-initdb.d
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432

1
.gitignore vendored
View File

@ -68,6 +68,7 @@ $RECYCLE.BIN/
.tmp
*.log
*.sql
!.github/workflows/db/**/*.sql
*.sqlite

View File

@ -1,5 +1,6 @@
'use strict';
module.exports = {
coverageDirectory: '<rootDir>/coverage',
projects: ['<rootDir>/packages/**/jest.config.js', '<rootDir>/.github'],
};

View File

@ -8,5 +8,4 @@ module.exports = {
displayName: (pkg.strapi && pkg.strapi.name) || pkg.name,
roots: [__dirname],
collectCoverageFrom: ['<rootDir>/packages/core/admin/admin/**/*.js'],
coverageDirectory: '<rootDir>/packages/core/admin/coverage',
};

View File

@ -10,6 +10,7 @@ const {
hasInverseOrderColumn,
} = require('../metadata/relations');
const { createQueryBuilder } = require('../query');
const { addSchema } = require('../utils/knex');
/**
* If some relations currently exist for this oneToX relation, on the one side, this function removes them and update the inverse order if needed.
@ -241,7 +242,8 @@ const cleanOrderColumns = async ({ id, attribute, db, inverseRelIds, transaction
)
.transacting(trx);
break;
default:
default: {
const joinTableName = addSchema(joinTable.name);
await db.connection
.raw(
`UPDATE ?? as a
@ -252,9 +254,10 @@ const cleanOrderColumns = async ({ id, attribute, db, inverseRelIds, transaction
WHERE ${where.join(' OR ')}
) AS b
WHERE b.id = a.id`,
[joinTable.name, ...updateBinding, ...selectBinding, joinTable.name, ...whereBinding]
[joinTableName, ...updateBinding, ...selectBinding, joinTableName, ...whereBinding]
)
.transacting(trx);
}
/*
`UPDATE :joinTable: as a
SET :orderColumn: = b.src_order, :inverseOrderColumn: = b.inv_order

View File

@ -7,6 +7,16 @@ const isKnexQuery = (value) => {
return value instanceof KnexBuilder || value instanceof KnexRaw;
};
/**
* Adds the name of the schema to the table name if the schema was defined by the user.
* Users can set the db schema only for Postgres in strapi database config.
*/
const addSchema = (tableName) => {
const schemaName = strapi.db.connection.getSchemaName();
return schemaName ? `${schemaName}.${tableName}` : tableName;
};
module.exports = {
isKnexQuery,
addSchema,
};

View File

@ -6,5 +6,4 @@ module.exports = {
displayName: (pkg.strapi && pkg.strapi.name) || pkg.name,
roots: [__dirname],
collectCoverageFrom: ['<rootDir>/packages/core/helper-plugin/lib/src/**/*.js'],
coverageDirectory: '<rootDir>/packages/core/helper-plugin/coverage',
};

View File

@ -154,7 +154,7 @@ module.exports = {
// update path for folders themselves & folders below
totalFolderNumber = await strapi.db
.connection(folderTable)
.getConnection(folderTable)
.transacting(trx)
.where(pathColName, existingFolder.path)
.orWhere(pathColName, 'like', `${existingFolder.path}/%`)
@ -169,7 +169,7 @@ module.exports = {
// update path of files below
totalFileNumber = await strapi.db
.connection(fileTable)
.getConnection(fileTable)
.transacting(trx)
.where(folderPathColName, existingFolder.path)
.orWhere(folderPathColName, 'like', `${existingFolder.path}/%`)
@ -209,7 +209,7 @@ module.exports = {
// update files main fields (path + updatedBy)
await strapi.db
.connection(fileTable)
.getConnection(fileTable)
.transacting(trx)
.whereIn('id', fileIds)
.update(folderPathColName, destinationFolderPath);

View File

@ -41,7 +41,7 @@ describe('metrics', () => {
metadata: {
get: () => ({ attributes: { path: { columnName: 'path' } } }),
},
connection() {
getConnection() {
return {
select() {
return {
@ -53,7 +53,7 @@ describe('metrics', () => {
},
},
};
strapi.db.connection.raw = raw;
strapi.db.connection = { raw };
const { computeMetrics } = metricsService({ strapi });

View File

@ -147,7 +147,7 @@ const update = async (id, { name, parent }, { user }) => {
// update folders below
await strapi.db
.connection(folderTable)
.getConnection(folderTable)
.transacting(trx)
.where(pathColumnName, existingFolder.path)
.orWhere(pathColumnName, 'like', `${existingFolder.path}/%`)
@ -162,7 +162,7 @@ const update = async (id, { name, parent }, { user }) => {
// update files below
await strapi.db
.connection(fileTable)
.getConnection(fileTable)
.transacting(trx)
.where(folderPathColumnName, existingFolder.path)
.orWhere(folderPathColumnName, 'like', `${existingFolder.path}/%`)

View File

@ -27,8 +27,6 @@ module.exports = ({ strapi }) => ({
queryParams.push(String(i), '');
}
const knex = strapi.db.connection;
/*
The following query goal is to count the number of folders with depth 1, depth 2 etc.
The query returns :
@ -49,9 +47,10 @@ module.exports = ({ strapi }) => ({
*/
const folderLevelsArray = (
await knex(folderTable)
await strapi.db
.getConnection(folderTable)
.select(
knex.raw(
strapi.db.connection.raw(
`LENGTH(${keepOnlySlashesSQLString}) AS depth, COUNT(*) AS occurence`,
queryParams
)

View File

@ -18,6 +18,7 @@ const batchInsertInTmpTable = async ({ updatesInfo }, { transacting: trx }) => {
const updateFromTmpTable = async ({ model, attributesToMigrate }, { transacting: trx }) => {
const { collectionName } = model;
if (model.client === 'pg') {
// IMPORTANT TODO: use postgres schema
const substitutes = attributesToMigrate.map(() => '?? = ??.??').join(',');
const bindings = [collectionName];
attributesToMigrate.forEach((attr) => bindings.push(attr, TMP_TABLE_NAME, attr));

View File

@ -20,6 +20,7 @@ const databases = {
database: 'strapi_test',
username: 'strapi',
password: 'strapi',
schema: 'myschema',
},
},
mysql: {

View File

@ -16,6 +16,7 @@ const databases = {
database: 'strapi_test',
username: 'strapi',
password: 'strapi',
schema: 'myschema',
},
},
mysql: {