mirror of
https://github.com/open-metadata/OpenMetadata.git
synced 2025-11-12 17:02:23 +00:00
fix(ui): UI lag when viewing kafka topics with large nested schemas (#22988)
* fix ui lag for kafka topic for large nested columns * fix type --------- Co-authored-by: Shailesh Parmar <shailesh.parmar.webdev@gmail.com>
This commit is contained in:
parent
26fedbaf0e
commit
5179ce53bc
@ -44,7 +44,13 @@ import {
|
||||
} from '../../../utils/TableTags/TableTags.utils';
|
||||
import {
|
||||
getAllRowKeysByKeyName,
|
||||
getExpandAllKeysToDepth,
|
||||
getSafeExpandAllKeys,
|
||||
getSchemaDepth,
|
||||
getSchemaFieldCount,
|
||||
getTableExpandableConfig,
|
||||
isLargeSchema,
|
||||
shouldCollapseSchema,
|
||||
updateFieldDescription,
|
||||
updateFieldTags,
|
||||
} from '../../../utils/TableUtils';
|
||||
@ -129,6 +135,17 @@ const TopicSchemaFields: FC<TopicSchemaFieldsProps> = ({
|
||||
);
|
||||
}, [messageSchema?.schemaFields]);
|
||||
|
||||
const schemaStats = useMemo(() => {
|
||||
const fields = messageSchema?.schemaFields ?? [];
|
||||
|
||||
return {
|
||||
totalFields: getSchemaFieldCount(fields),
|
||||
maxDepth: getSchemaDepth(fields),
|
||||
isLargeSchema: isLargeSchema(fields),
|
||||
shouldCollapse: shouldCollapseSchema(fields),
|
||||
};
|
||||
}, [messageSchema?.schemaFields]);
|
||||
|
||||
const handleFieldTagsChange = async (
|
||||
selectedTags: EntityTags[],
|
||||
editColumnTag: Field
|
||||
@ -161,7 +178,12 @@ const TopicSchemaFields: FC<TopicSchemaFieldsProps> = ({
|
||||
|
||||
const toggleExpandAll = () => {
|
||||
if (expandedRowKeys.length < schemaAllRowKeys.length) {
|
||||
setExpandedRowKeys(schemaAllRowKeys);
|
||||
const safeKeys = getSafeExpandAllKeys(
|
||||
messageSchema?.schemaFields ?? [],
|
||||
schemaStats.isLargeSchema,
|
||||
schemaAllRowKeys
|
||||
);
|
||||
setExpandedRowKeys(safeKeys);
|
||||
} else {
|
||||
setExpandedRowKeys([]);
|
||||
}
|
||||
@ -212,6 +234,58 @@ const TopicSchemaFields: FC<TopicSchemaFieldsProps> = ({
|
||||
>;
|
||||
}, [messageSchema?.schemaFields]);
|
||||
|
||||
const renderDescription = useCallback(
|
||||
(_: string, record: Field, index: number) => (
|
||||
<TableDescription
|
||||
columnData={{
|
||||
fqn: record.fullyQualifiedName ?? '',
|
||||
field: record.description,
|
||||
}}
|
||||
entityFqn={entityFqn}
|
||||
entityType={EntityType.TOPIC}
|
||||
hasEditPermission={hasDescriptionEditAccess}
|
||||
index={index}
|
||||
isReadOnly={isReadOnly}
|
||||
onClick={() => setEditFieldDescription(record)}
|
||||
/>
|
||||
),
|
||||
[entityFqn, hasDescriptionEditAccess, isReadOnly]
|
||||
);
|
||||
|
||||
const renderClassificationTags = useCallback(
|
||||
(tags: TagLabel[], record: Field, index: number) => (
|
||||
<TableTags<Field>
|
||||
entityFqn={entityFqn}
|
||||
entityType={EntityType.TOPIC}
|
||||
handleTagSelection={handleFieldTagsChange}
|
||||
hasTagEditAccess={hasTagEditAccess}
|
||||
index={index}
|
||||
isReadOnly={isReadOnly}
|
||||
record={record}
|
||||
tags={tags}
|
||||
type={TagSource.Classification}
|
||||
/>
|
||||
),
|
||||
[entityFqn, handleFieldTagsChange, hasTagEditAccess, isReadOnly]
|
||||
);
|
||||
|
||||
const renderGlossaryTags = useCallback(
|
||||
(tags: TagLabel[], record: Field, index: number) => (
|
||||
<TableTags<Field>
|
||||
entityFqn={entityFqn}
|
||||
entityType={EntityType.TOPIC}
|
||||
handleTagSelection={handleFieldTagsChange}
|
||||
hasTagEditAccess={hasGlossaryTermEditAccess}
|
||||
index={index}
|
||||
isReadOnly={isReadOnly}
|
||||
record={record}
|
||||
tags={tags}
|
||||
type={TagSource.Glossary}
|
||||
/>
|
||||
),
|
||||
[entityFqn, handleFieldTagsChange, hasGlossaryTermEditAccess, isReadOnly]
|
||||
);
|
||||
|
||||
const columns: ColumnsType<Field> = useMemo(
|
||||
() => [
|
||||
{
|
||||
@ -235,20 +309,7 @@ const TopicSchemaFields: FC<TopicSchemaFieldsProps> = ({
|
||||
dataIndex: TABLE_COLUMNS_KEYS.DESCRIPTION,
|
||||
key: TABLE_COLUMNS_KEYS.DESCRIPTION,
|
||||
width: 350,
|
||||
render: (_, record, index) => (
|
||||
<TableDescription
|
||||
columnData={{
|
||||
fqn: record.fullyQualifiedName ?? '',
|
||||
field: record.description,
|
||||
}}
|
||||
entityFqn={entityFqn}
|
||||
entityType={EntityType.TOPIC}
|
||||
hasEditPermission={hasDescriptionEditAccess}
|
||||
index={index}
|
||||
isReadOnly={isReadOnly}
|
||||
onClick={() => setEditFieldDescription(record)}
|
||||
/>
|
||||
),
|
||||
render: renderDescription,
|
||||
},
|
||||
{
|
||||
title: t('label.tag-plural'),
|
||||
@ -256,19 +317,7 @@ const TopicSchemaFields: FC<TopicSchemaFieldsProps> = ({
|
||||
key: TABLE_COLUMNS_KEYS.TAGS,
|
||||
width: 300,
|
||||
filterIcon: columnFilterIcon,
|
||||
render: (tags: TagLabel[], record: Field, index: number) => (
|
||||
<TableTags<Field>
|
||||
entityFqn={entityFqn}
|
||||
entityType={EntityType.TOPIC}
|
||||
handleTagSelection={handleFieldTagsChange}
|
||||
hasTagEditAccess={hasTagEditAccess}
|
||||
index={index}
|
||||
isReadOnly={isReadOnly}
|
||||
record={record}
|
||||
tags={tags}
|
||||
type={TagSource.Classification}
|
||||
/>
|
||||
),
|
||||
render: renderClassificationTags,
|
||||
filters: tagFilter.Classification,
|
||||
filterDropdown: ColumnFilter,
|
||||
onFilter: searchTagInData,
|
||||
@ -279,39 +328,39 @@ const TopicSchemaFields: FC<TopicSchemaFieldsProps> = ({
|
||||
key: TABLE_COLUMNS_KEYS.GLOSSARY,
|
||||
width: 300,
|
||||
filterIcon: columnFilterIcon,
|
||||
render: (tags: TagLabel[], record: Field, index: number) => (
|
||||
<TableTags<Field>
|
||||
entityFqn={entityFqn}
|
||||
entityType={EntityType.TOPIC}
|
||||
handleTagSelection={handleFieldTagsChange}
|
||||
hasTagEditAccess={hasGlossaryTermEditAccess}
|
||||
index={index}
|
||||
isReadOnly={isReadOnly}
|
||||
record={record}
|
||||
tags={tags}
|
||||
type={TagSource.Glossary}
|
||||
/>
|
||||
),
|
||||
render: renderGlossaryTags,
|
||||
filters: tagFilter.Glossary,
|
||||
filterDropdown: ColumnFilter,
|
||||
onFilter: searchTagInData,
|
||||
},
|
||||
],
|
||||
[
|
||||
isReadOnly,
|
||||
messageSchema,
|
||||
hasTagEditAccess,
|
||||
editFieldDescription,
|
||||
hasDescriptionEditAccess,
|
||||
handleFieldTagsChange,
|
||||
t,
|
||||
renderSchemaName,
|
||||
renderDataType,
|
||||
renderDescription,
|
||||
renderClassificationTags,
|
||||
renderGlossaryTags,
|
||||
tagFilter,
|
||||
]
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const fields = messageSchema?.schemaFields ?? [];
|
||||
|
||||
if (schemaStats.shouldCollapse) {
|
||||
// For large schemas, expand only 2 levels deep for better performance
|
||||
const optimalKeys = getExpandAllKeysToDepth(fields, 2);
|
||||
setExpandedRowKeys(optimalKeys);
|
||||
} else {
|
||||
// For small schemas, expand all for better UX
|
||||
setExpandedRowKeys(schemaAllRowKeys);
|
||||
}, []);
|
||||
}
|
||||
}, [
|
||||
schemaStats.shouldCollapse,
|
||||
schemaAllRowKeys,
|
||||
messageSchema?.schemaFields,
|
||||
]);
|
||||
|
||||
return (
|
||||
<Row gutter={[16, 16]}>
|
||||
|
||||
@ -17,9 +17,15 @@ import {
|
||||
ExtraTableDropdownOptions,
|
||||
findColumnByEntityLink,
|
||||
getEntityIcon,
|
||||
getExpandAllKeysToDepth,
|
||||
getSafeExpandAllKeys,
|
||||
getSchemaDepth,
|
||||
getSchemaFieldCount,
|
||||
getTagsWithoutTier,
|
||||
getTierTags,
|
||||
isLargeSchema,
|
||||
pruneEmptyChildren,
|
||||
shouldCollapseSchema,
|
||||
updateColumnInNestedStructure,
|
||||
} from '../utils/TableUtils';
|
||||
import EntityLink from './EntityLink';
|
||||
@ -664,4 +670,247 @@ describe('TableUtils', () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Schema Performance Functions', () => {
|
||||
// Mock field structure for testing
|
||||
type MockField = { name?: string; children?: MockField[] };
|
||||
|
||||
const mockNestedFields: MockField[] = [
|
||||
{
|
||||
name: 'level1_field1',
|
||||
children: [
|
||||
{
|
||||
name: 'level2_field1',
|
||||
children: [{ name: 'level3_field1' }, { name: 'level3_field2' }],
|
||||
},
|
||||
{
|
||||
name: 'level2_field2',
|
||||
children: [{ name: 'level3_field3' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'level1_field2',
|
||||
children: [
|
||||
{
|
||||
name: 'level2_field3',
|
||||
children: [{ name: 'level3_field4' }, { name: 'level3_field5' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'level1_field3',
|
||||
},
|
||||
];
|
||||
|
||||
describe('getSchemaFieldCount', () => {
|
||||
it('should count all fields in a flat structure', () => {
|
||||
const flatFields: MockField[] = [
|
||||
{ name: 'field1' },
|
||||
{ name: 'field2' },
|
||||
{ name: 'field3' },
|
||||
];
|
||||
|
||||
expect(getSchemaFieldCount(flatFields)).toBe(3);
|
||||
});
|
||||
|
||||
it('should count all fields recursively in nested structure', () => {
|
||||
expect(getSchemaFieldCount(mockNestedFields)).toBe(11); // 3 level1 + 3 level2 + 5 level3 = 11 total fields
|
||||
});
|
||||
|
||||
it('should return 0 for empty array', () => {
|
||||
expect(getSchemaFieldCount([])).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle fields without children property', () => {
|
||||
const fieldsWithoutChildren: MockField[] = [
|
||||
{ name: 'field1' },
|
||||
{ name: 'field2', children: undefined },
|
||||
];
|
||||
|
||||
expect(getSchemaFieldCount(fieldsWithoutChildren)).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSchemaDepth', () => {
|
||||
it('should return 0 for empty array', () => {
|
||||
expect(getSchemaDepth([])).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 1 for flat structure', () => {
|
||||
const flatFields: MockField[] = [
|
||||
{ name: 'field1' },
|
||||
{ name: 'field2' },
|
||||
];
|
||||
|
||||
expect(getSchemaDepth(flatFields)).toBe(1);
|
||||
});
|
||||
|
||||
it('should calculate correct depth for nested structure', () => {
|
||||
expect(getSchemaDepth(mockNestedFields)).toBe(3); // 3 levels deep
|
||||
});
|
||||
|
||||
it('should handle mixed depth structure correctly', () => {
|
||||
const mixedDepthFields: MockField[] = [
|
||||
{
|
||||
name: 'shallow',
|
||||
children: [{ name: 'level2' }],
|
||||
},
|
||||
{
|
||||
name: 'deep',
|
||||
children: [
|
||||
{
|
||||
name: 'level2',
|
||||
children: [
|
||||
{
|
||||
name: 'level3',
|
||||
children: [{ name: 'level4' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
expect(getSchemaDepth(mixedDepthFields)).toBe(4); // Should return maximum depth
|
||||
});
|
||||
});
|
||||
|
||||
describe('isLargeSchema', () => {
|
||||
it('should return false for small schemas', () => {
|
||||
const smallFields: MockField[] = Array.from({ length: 10 }, (_, i) => ({
|
||||
name: `field${i}`,
|
||||
}));
|
||||
|
||||
expect(isLargeSchema(smallFields)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for large schemas with default threshold', () => {
|
||||
const largeFields: MockField[] = Array.from(
|
||||
{ length: 600 },
|
||||
(_, i) => ({
|
||||
name: `field${i}`,
|
||||
})
|
||||
);
|
||||
|
||||
expect(isLargeSchema(largeFields)).toBe(true);
|
||||
});
|
||||
|
||||
it('should respect custom threshold', () => {
|
||||
const fields: MockField[] = Array.from({ length: 100 }, (_, i) => ({
|
||||
name: `field${i}`,
|
||||
}));
|
||||
|
||||
expect(isLargeSchema(fields, 50)).toBe(true);
|
||||
expect(isLargeSchema(fields, 150)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('shouldCollapseSchema', () => {
|
||||
it('should return false for small schemas', () => {
|
||||
const smallFields: MockField[] = Array.from({ length: 10 }, (_, i) => ({
|
||||
name: `field${i}`,
|
||||
}));
|
||||
|
||||
expect(shouldCollapseSchema(smallFields)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for schemas above default threshold', () => {
|
||||
const largeFields: MockField[] = Array.from({ length: 60 }, (_, i) => ({
|
||||
name: `field${i}`,
|
||||
}));
|
||||
|
||||
expect(shouldCollapseSchema(largeFields)).toBe(true);
|
||||
});
|
||||
|
||||
it('should respect custom threshold', () => {
|
||||
const fields: MockField[] = Array.from({ length: 30 }, (_, i) => ({
|
||||
name: `field${i}`,
|
||||
}));
|
||||
|
||||
expect(shouldCollapseSchema(fields, 20)).toBe(true);
|
||||
expect(shouldCollapseSchema(fields, 40)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getExpandAllKeysToDepth', () => {
|
||||
it('should return empty array for empty fields', () => {
|
||||
expect(getExpandAllKeysToDepth([], 2)).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return all expandable keys up to specified depth', () => {
|
||||
const result = getExpandAllKeysToDepth(mockNestedFields, 2);
|
||||
|
||||
// Should include level 1 and level 2 fields that have children
|
||||
expect(result).toContain('level1_field1');
|
||||
expect(result).toContain('level1_field2');
|
||||
expect(result).toContain('level2_field1');
|
||||
expect(result).toContain('level2_field2');
|
||||
expect(result).toContain('level2_field3');
|
||||
|
||||
// Should not include level 3 fields (depth 2 stops before level 3)
|
||||
expect(result).not.toContain('level3_field1');
|
||||
expect(result).not.toContain('level3_field2');
|
||||
});
|
||||
|
||||
it('should respect depth limit', () => {
|
||||
const result1 = getExpandAllKeysToDepth(mockNestedFields, 1);
|
||||
const result2 = getExpandAllKeysToDepth(mockNestedFields, 3);
|
||||
|
||||
// Depth 1 should only include top-level expandable fields
|
||||
expect(result1).toContain('level1_field1');
|
||||
expect(result1).toContain('level1_field2');
|
||||
expect(result1).not.toContain('level2_field1');
|
||||
|
||||
// Depth 3 should include all expandable fields
|
||||
expect(result2).toContain('level1_field1');
|
||||
expect(result2).toContain('level2_field1');
|
||||
expect(result2.length).toBeGreaterThan(result1.length);
|
||||
});
|
||||
|
||||
it('should not include fields without children', () => {
|
||||
const result = getExpandAllKeysToDepth(mockNestedFields, 2);
|
||||
|
||||
// level1_field3 has no children, so should not be included
|
||||
expect(result).not.toContain('level1_field3');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSafeExpandAllKeys', () => {
|
||||
it('should return all keys for small schemas', () => {
|
||||
const allKeys = ['key1', 'key2', 'key3'];
|
||||
const smallFields: MockField[] = [{ name: 'field1' }];
|
||||
|
||||
const result = getSafeExpandAllKeys(smallFields, false, allKeys);
|
||||
|
||||
expect(result).toEqual(allKeys);
|
||||
});
|
||||
|
||||
it('should return limited keys for large schemas', () => {
|
||||
const allKeys = ['key1', 'key2', 'key3', 'key4', 'key5'];
|
||||
|
||||
const result = getSafeExpandAllKeys(mockNestedFields, true, allKeys);
|
||||
|
||||
// Should return depth-limited keys, not all keys
|
||||
expect(result).not.toEqual(allKeys);
|
||||
expect(result.length).toBeLessThanOrEqual(allKeys.length);
|
||||
});
|
||||
|
||||
it('should use depth-based expansion for large schemas', () => {
|
||||
const allKeys = [
|
||||
'level1_field1',
|
||||
'level1_field2',
|
||||
'level2_field1',
|
||||
'level2_field2',
|
||||
];
|
||||
|
||||
const result = getSafeExpandAllKeys(mockNestedFields, true, allKeys);
|
||||
|
||||
// Should include top-level and second-level expandable fields
|
||||
expect(result).toContain('level1_field1');
|
||||
expect(result).toContain('level1_field2');
|
||||
expect(result).toContain('level2_field1');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -1322,3 +1322,103 @@ export const pruneEmptyChildren = (columns: Column[]): Column[] => {
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
export const getSchemaFieldCount = <T extends { children?: T[] }>(
|
||||
fields: T[]
|
||||
): number => {
|
||||
let count = 0;
|
||||
|
||||
const countFields = (items: T[]): void => {
|
||||
items.forEach((item) => {
|
||||
count++;
|
||||
if (item.children && item.children.length > 0) {
|
||||
countFields(item.children);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
countFields(fields);
|
||||
|
||||
return count;
|
||||
};
|
||||
|
||||
export const getSchemaDepth = <T extends { children?: T[] }>(
|
||||
fields: T[]
|
||||
): number => {
|
||||
if (!fields || fields.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
let maxDepth = 1;
|
||||
|
||||
const calculateDepth = (items: T[], currentDepth: number): void => {
|
||||
items.forEach((item) => {
|
||||
maxDepth = Math.max(maxDepth, currentDepth);
|
||||
if (item.children && item.children.length > 0) {
|
||||
calculateDepth(item.children, currentDepth + 1);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
calculateDepth(fields, 1);
|
||||
|
||||
return maxDepth;
|
||||
};
|
||||
|
||||
export const isLargeSchema = <T extends { children?: T[] }>(
|
||||
fields: T[],
|
||||
threshold = 500
|
||||
): boolean => {
|
||||
return getSchemaFieldCount(fields) > threshold;
|
||||
};
|
||||
|
||||
export const shouldCollapseSchema = <T extends { children?: T[] }>(
|
||||
fields: T[],
|
||||
threshold = 50
|
||||
): boolean => {
|
||||
return getSchemaFieldCount(fields) > threshold;
|
||||
};
|
||||
|
||||
export const getExpandAllKeysToDepth = <
|
||||
T extends { children?: T[]; name?: string }
|
||||
>(
|
||||
fields: T[],
|
||||
maxDepth = 3
|
||||
): string[] => {
|
||||
const keys: string[] = [];
|
||||
|
||||
const collectKeys = (items: T[], currentDepth = 0): void => {
|
||||
if (currentDepth >= maxDepth) {
|
||||
return;
|
||||
}
|
||||
|
||||
items.forEach((item) => {
|
||||
if (item.children && item.children.length > 0) {
|
||||
if (item.name) {
|
||||
keys.push(item.name);
|
||||
}
|
||||
// Continue collecting keys from children up to maxDepth
|
||||
collectKeys(item.children, currentDepth + 1);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
collectKeys(fields);
|
||||
|
||||
return keys;
|
||||
};
|
||||
|
||||
export const getSafeExpandAllKeys = <
|
||||
T extends { children?: T[]; name?: string }
|
||||
>(
|
||||
fields: T[],
|
||||
isLargeSchema: boolean,
|
||||
allKeys: string[]
|
||||
): string[] => {
|
||||
if (!isLargeSchema) {
|
||||
return allKeys;
|
||||
}
|
||||
|
||||
// For large schemas, expand to exactly 2 levels deep
|
||||
return getExpandAllKeysToDepth(fields, 2);
|
||||
};
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user