mirror of
https://github.com/datahub-project/datahub.git
synced 2025-09-02 22:03:11 +00:00
fix(ui) Merge duplicate schema fields on siblings regardless of casing (#7413)
Co-authored-by: Pedro Silva <pedro@acryl.io>
This commit is contained in:
parent
2c3e3c203f
commit
b8f8a10ee2
@ -124,6 +124,20 @@ const datasetUnprimary = {
|
||||
label: 'hi',
|
||||
},
|
||||
...(dataset4WithLineage.schemaMetadata?.fields || []),
|
||||
{
|
||||
__typename: 'SchemaField',
|
||||
nullable: false,
|
||||
recursive: false,
|
||||
fieldPath: 'duplicate_field',
|
||||
description: 'Test to make sure fields merge works case insensitive',
|
||||
type: SchemaFieldDataType.String,
|
||||
nativeDataType: 'varchar(100)',
|
||||
isPartOfKey: false,
|
||||
jsonPath: null,
|
||||
globalTags: null,
|
||||
glossaryTerms: null,
|
||||
label: 'hi',
|
||||
},
|
||||
],
|
||||
},
|
||||
siblings: {
|
||||
@ -133,6 +147,27 @@ const datasetUnprimary = {
|
||||
|
||||
const datasetPrimaryWithSiblings = {
|
||||
...datasetPrimary,
|
||||
schemaMetadata: {
|
||||
...datasetPrimary.schemaMetadata,
|
||||
fields: [
|
||||
...(datasetPrimary.schemaMetadata?.fields || []),
|
||||
{
|
||||
__typename: 'SchemaField',
|
||||
nullable: false,
|
||||
recursive: false,
|
||||
fieldPath: 'DUPLICATE_FIELD',
|
||||
description: 'Test to make sure fields merge works case insensitive',
|
||||
type: SchemaFieldDataType.String,
|
||||
nativeDataType: 'varchar(100)',
|
||||
isPartOfKey: false,
|
||||
jsonPath: null,
|
||||
globalTags: null,
|
||||
glossaryTerms: null,
|
||||
label: 'hi',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
siblings: {
|
||||
isPrimary: true,
|
||||
siblings: [datasetUnprimary],
|
||||
@ -492,10 +527,11 @@ describe('siblingUtils', () => {
|
||||
expect(combinedData.dataset.globalTags.tags[1].tag.urn).toEqual('urn:li:tag:primary-tag');
|
||||
|
||||
// merges schema metadata properly by fieldPath
|
||||
expect(combinedData.dataset.schemaMetadata?.fields).toHaveLength(3);
|
||||
expect(combinedData.dataset.schemaMetadata?.fields).toHaveLength(4);
|
||||
expect(combinedData.dataset.schemaMetadata?.fields[0].fieldPath).toEqual('new_one');
|
||||
expect(combinedData.dataset.schemaMetadata?.fields[1].fieldPath).toEqual('user_id');
|
||||
expect(combinedData.dataset.schemaMetadata?.fields[2].fieldPath).toEqual('user_name');
|
||||
expect(combinedData.dataset.schemaMetadata?.fields[3].fieldPath).toEqual('DUPLICATE_FIELD');
|
||||
|
||||
// will overwrite string properties w/ primary
|
||||
expect(combinedData.dataset.editableProperties.description).toEqual('secondary description');
|
||||
|
@ -51,9 +51,16 @@ const combineMerge = (target, source, options) => {
|
||||
return destination;
|
||||
};
|
||||
|
||||
// use when you want to merge and array of objects by key in the object as opposed to by index of array
|
||||
function convertObjectKeysToLowercase(object: Record<string, unknown>) {
|
||||
return Object.fromEntries(Object.entries(object).map(([key, value]) => [key.toLowerCase(), value]));
|
||||
}
|
||||
|
||||
// use when you want to merge an array of objects by key in the object as opposed to by index of array
|
||||
const mergeArrayOfObjectsByKey = (destinationArray: any[], sourceArray: any[], key: string) => {
|
||||
return values(merge(keyBy(destinationArray, key), keyBy(sourceArray, key)));
|
||||
const destination = convertObjectKeysToLowercase(keyBy(destinationArray, key));
|
||||
const source = convertObjectKeysToLowercase(keyBy(sourceArray, key));
|
||||
|
||||
return values(merge(destination, source));
|
||||
};
|
||||
|
||||
const mergeTags = (destinationArray, sourceArray, _options) => {
|
||||
|
Loading…
x
Reference in New Issue
Block a user