mirror of
https://github.com/strapi/strapi.git
synced 2025-11-01 18:33:55 +00:00
fix: allow '.default' to be ignored during schema validation (#20421)
This commit is contained in:
parent
40f9762848
commit
714ea0a15a
@ -1,5 +1,5 @@
|
||||
import { posix, win32 } from 'path';
|
||||
import { cloneDeep } from 'lodash/fp';
|
||||
import { cloneDeep, get, set } from 'lodash/fp';
|
||||
import { Readable, Writable } from 'stream-chain';
|
||||
import type { Struct } from '@strapi/types';
|
||||
import { createTransferEngine, TRANSFER_STAGES } from '..';
|
||||
@ -19,6 +19,7 @@ import {
|
||||
providerStages,
|
||||
sourceStages,
|
||||
} from '../../__tests__/test-utils';
|
||||
import { TransferEngineValidationError } from '../errors';
|
||||
|
||||
const getMockSourceStream = (data: Iterable<unknown>) => Readable.from(data);
|
||||
|
||||
@ -265,25 +266,28 @@ const getSchemasMockSourceStream = (
|
||||
data: Array<Struct.Schema> = [
|
||||
{
|
||||
uid: 'api::foo.foo',
|
||||
kind: 'collectionType',
|
||||
modelName: 'foo',
|
||||
globalId: 'foo',
|
||||
info: { displayName: 'foo' },
|
||||
info: { displayName: 'foo', singularName: 'foo', pluralName: 'foos' },
|
||||
modelType: 'contentType',
|
||||
attributes: { foo: { type: 'string' } },
|
||||
},
|
||||
{
|
||||
uid: 'api::bar.bar',
|
||||
kind: 'collectionType',
|
||||
modelName: 'bar',
|
||||
globalId: 'bar',
|
||||
info: { displayName: 'bar' },
|
||||
info: { displayName: 'bar', singularName: 'bar', pluralName: 'bars' },
|
||||
modelType: 'contentType',
|
||||
attributes: { bar: { type: 'integer' } },
|
||||
},
|
||||
{
|
||||
uid: 'api::homepage.homepage',
|
||||
kind: 'collectionType',
|
||||
modelName: 'homepage',
|
||||
globalId: 'homepage',
|
||||
info: { displayName: 'Homepage' },
|
||||
info: { displayName: 'Homepage', singularName: 'homepage', pluralName: 'homepages' },
|
||||
modelType: 'contentType',
|
||||
attributes: {
|
||||
action: { type: 'string' },
|
||||
@ -291,9 +295,10 @@ const getSchemasMockSourceStream = (
|
||||
},
|
||||
{
|
||||
uid: 'api::permission.permission',
|
||||
kind: 'collectionType',
|
||||
modelName: 'permission',
|
||||
globalId: 'permission',
|
||||
info: { displayName: 'Permission' },
|
||||
info: { displayName: 'Permission', singularName: 'permission', pluralName: 'permissions' },
|
||||
modelType: 'contentType',
|
||||
attributes: {
|
||||
action: { type: 'string' },
|
||||
@ -769,6 +774,7 @@ describe('Transfer engine', () => {
|
||||
schemaStrategy: 'exact',
|
||||
exclude: [],
|
||||
} as unknown as ITransferEngineOptions;
|
||||
|
||||
test('source with source schema missing in destination fails', async () => {
|
||||
const source = createSource();
|
||||
source.getSchemas = jest.fn().mockResolvedValue({ ...schemas, foo: { foo: 'bar' } });
|
||||
@ -779,6 +785,7 @@ describe('Transfer engine', () => {
|
||||
})()
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
test('source with destination schema missing in source fails', async () => {
|
||||
const destination = createDestination();
|
||||
destination.getSchemas = jest.fn().mockResolvedValue({ ...schemas, foo: { foo: 'bar' } });
|
||||
@ -789,6 +796,7 @@ describe('Transfer engine', () => {
|
||||
})()
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
test('differing nested field fails', async () => {
|
||||
const destination = createDestination();
|
||||
const fakeSchema = cloneDeep(schemas);
|
||||
@ -806,6 +814,67 @@ describe('Transfer engine', () => {
|
||||
).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('strict', () => {
|
||||
const engineOptions = {
|
||||
versionStrategy: 'exact',
|
||||
schemaStrategy: 'strict',
|
||||
exclude: [],
|
||||
} as unknown as ITransferEngineOptions;
|
||||
|
||||
test.each([
|
||||
['private', (v: boolean) => !v],
|
||||
['required', (v: boolean) => !v],
|
||||
['configurable', (v: boolean) => v],
|
||||
['default', () => () => null],
|
||||
])(
|
||||
`Don't throw on ignorable attribute's properties: %s`,
|
||||
(attributeName, transformValue) => {
|
||||
const destination = createDestination();
|
||||
const fakeSchemas = cloneDeep(schemas);
|
||||
|
||||
const path = `attributes.createdAt.${attributeName}`;
|
||||
const oldValue = get(path, fakeSchemas['api::homepage.homepage']);
|
||||
|
||||
fakeSchemas['api::homepage.homepage'] = set(
|
||||
path,
|
||||
transformValue(oldValue),
|
||||
fakeSchemas['api::homepage.homepage']
|
||||
);
|
||||
|
||||
destination.getSchemas = jest.fn().mockResolvedValue(fakeSchemas);
|
||||
const engine = createTransferEngine(completeSource, destination, engineOptions);
|
||||
|
||||
expect(
|
||||
(async () => {
|
||||
await engine.transfer();
|
||||
})()
|
||||
).resolves.not.toThrow();
|
||||
}
|
||||
);
|
||||
|
||||
test(`Throws on regular attributes' properties`, () => {
|
||||
const destination = createDestination();
|
||||
const fakeSchemas = set(
|
||||
'["api::homepage.homepage"].attributes.createdAt.type',
|
||||
'string',
|
||||
cloneDeep(schemas)
|
||||
);
|
||||
|
||||
destination.getSchemas = jest.fn().mockResolvedValue(fakeSchemas);
|
||||
const engine = createTransferEngine(completeSource, destination, engineOptions);
|
||||
|
||||
expect(
|
||||
(async () => {
|
||||
await engine.transfer();
|
||||
})()
|
||||
).rejects.toThrow(
|
||||
new TransferEngineValidationError(`Invalid schema changes detected during integrity checks (using the strict strategy). Please find a summary of the changes below:
|
||||
- api::homepage.homepage:
|
||||
- Schema value changed at "attributes.createdAt.type": "datetime" (string) => "string" (string)`)
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('version matching', () => {
|
||||
|
||||
@ -13,21 +13,25 @@ const isAttributeIgnorable = (diff: Diff) => {
|
||||
// Need a valid string attribute name
|
||||
typeof diff.path[1] === 'string' &&
|
||||
// The diff must be on ignorable attribute properties
|
||||
['private', 'required', 'configurable'].includes(diff.path[2])
|
||||
['private', 'required', 'configurable', 'default'].includes(diff.path[2])
|
||||
);
|
||||
};
|
||||
|
||||
// TODO: clean up the type checking, which will require cleaning up the typings in utils/json.ts
|
||||
// exclude admin tables that are not transferrable and are optionally available (such as audit logs which are only available in EE)
|
||||
// exclude admin tables that are not transferable and are optionally available (such as audit logs which are only available in EE)
|
||||
const isOptionalAdminType = (diff: Diff) => {
|
||||
// added/deleted
|
||||
if ('value' in diff && isObject(diff.value)) {
|
||||
const name = (diff?.value as Struct.ContentTypeSchema)?.info?.singularName;
|
||||
return (OPTIONAL_CONTENT_TYPES as ReadonlyArray<string | undefined>).includes(name);
|
||||
}
|
||||
|
||||
// modified
|
||||
if ('values' in diff && isArray(diff.values) && isObject(diff.values[0])) {
|
||||
const name = (diff?.values[0] as Struct.ContentTypeSchema)?.info?.singularName;
|
||||
return (OPTIONAL_CONTENT_TYPES as ReadonlyArray<string | undefined>).includes(name);
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
|
||||
@ -12,8 +12,7 @@ import type { Struct } from '@strapi/types';
|
||||
|
||||
import type { IAsset, IMetadata, ISourceProvider, ProviderType, IFile } from '../../../../types';
|
||||
|
||||
import { createDecryptionCipher } from '../../../utils/encryption';
|
||||
import { collect } from '../../../utils/stream';
|
||||
import * as utils from '../../../utils';
|
||||
import { ProviderInitializationError, ProviderTransferError } from '../../../errors/providers';
|
||||
import { isFilePathInDirname, isPathEquivalent, unknownPathToPosix } from './utils';
|
||||
|
||||
@ -108,13 +107,19 @@ class LocalFileSourceProvider implements ISourceProvider {
|
||||
}
|
||||
|
||||
async getSchemas() {
|
||||
const schemas = await collect<Struct.Schema>(this.createSchemasReadStream());
|
||||
const schemaCollection = await utils.stream.collect<Struct.Schema>(
|
||||
this.createSchemasReadStream()
|
||||
);
|
||||
|
||||
if (isEmpty(schemas)) {
|
||||
if (isEmpty(schemaCollection)) {
|
||||
throw new ProviderInitializationError('Could not load schemas from Strapi data file.');
|
||||
}
|
||||
|
||||
return keyBy('uid', schemas);
|
||||
// Group schema by UID
|
||||
const schemas = keyBy('uid', schemaCollection);
|
||||
|
||||
// Transform to valid JSON
|
||||
return utils.schema.schemasToValidJSON(schemas);
|
||||
}
|
||||
|
||||
createEntitiesReadStream(): Readable {
|
||||
@ -191,7 +196,7 @@ class LocalFileSourceProvider implements ISourceProvider {
|
||||
}
|
||||
|
||||
if (encryption.enabled && encryption.key) {
|
||||
streams.push(createDecryptionCipher(encryption.key));
|
||||
streams.push(utils.encryption.createDecryptionCipher(encryption.key));
|
||||
}
|
||||
|
||||
if (compression.enabled) {
|
||||
|
||||
@ -183,10 +183,11 @@ class LocalStrapiDestinationProvider implements IDestinationProvider {
|
||||
|
||||
getSchemas(): Record<string, Struct.Schema> {
|
||||
assertValidStrapi(this.strapi, 'Not able to get Schemas');
|
||||
const schemas = {
|
||||
|
||||
const schemas = utils.schema.schemasToValidJSON({
|
||||
...this.strapi.contentTypes,
|
||||
...this.strapi.components,
|
||||
};
|
||||
});
|
||||
|
||||
return utils.schema.mapSchemasValues(schemas);
|
||||
}
|
||||
|
||||
@ -85,10 +85,10 @@ class LocalStrapiSourceProvider implements ISourceProvider {
|
||||
getSchemas(): Record<string, Struct.Schema> {
|
||||
assertValidStrapi(this.strapi, 'Not able to get Schemas');
|
||||
|
||||
const schemas = {
|
||||
const schemas = utils.schema.schemasToValidJSON({
|
||||
...this.strapi.contentTypes,
|
||||
...this.strapi.components,
|
||||
};
|
||||
});
|
||||
|
||||
return utils.schema.mapSchemasValues(schemas);
|
||||
}
|
||||
|
||||
@ -25,3 +25,7 @@ const VALID_SCHEMA_PROPERTIES = [
|
||||
export const mapSchemasValues = (schemas: Utils.String.Dict<Struct.Schema>) => {
|
||||
return mapValues(pick(VALID_SCHEMA_PROPERTIES), schemas) as Utils.String.Dict<Struct.Schema>;
|
||||
};
|
||||
|
||||
export const schemasToValidJSON = (schemas: Utils.String.Dict<Struct.Schema>) => {
|
||||
return JSON.parse(JSON.stringify(schemas));
|
||||
};
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user