From 9dbaabad4444e0cf5823b75bc1497d9e44e2023e Mon Sep 17 00:00:00 2001 From: Sriharsha Chintalapani Date: Mon, 19 Jun 2023 03:13:05 -0700 Subject: [PATCH] Adding MD5 hash to fullyQualifiedName and names to simplify DB indexes for lookups and increasing the size of FQN (#11960) * Fix fqn hash * Add name & fqnHash and remove generated columns from top level * Add name & fqnHash and remove generated columns from top level * Add name & fqnHash and remove generated columns from top level * Add name & fqnHash and remove generated columns from top level * Add name & fqnHash and remove generated columns from top level * Add name & fqnHash and remove generated columns from top level * Add name & fqnHash and remove generated columns from top level * Add name & fqnHash and remove generated columns from top level * Add name & fqnHash and remove generated columns from top level * merge commits * merge commits * merge commits * Fix glossary import/export * Fix BotResource Tests * Fix BotResource Tests * Fix Glossary Term tests * Fix Glossary Term tests * Fix Glossary Import/Export tests * All backend tests are fixed. * merge with main * Fix tests and revert local changes * Fix tests and revert local changes * Fix NullPointerException for Glossary and Query * Fix Tests --------- Co-authored-by: Ayush Shah --- .../v013__create_db_connection_info.sql | 94 +++- .../v013__create_db_connection_info.sql | 87 +++- common/pom.xml | 6 +- .../openmetadata/common/utils/CommonUtil.java | 9 + conf/openmetadata.yaml | 3 +- ingestion/pipelines/sample_data.yaml | 2 +- .../test_suite/test_e2e_workflow.py | 4 +- .../integration/test_suite/test_workflow.py | 10 +- .../java/org/openmetadata/csv/CsvUtil.java | 5 + .../java/org/openmetadata/csv/EntityCsv.java | 34 +- .../ElasticSearchEventPublisher.java | 8 +- .../ElasticSearchIndexDefinition.java | 6 +- .../service/formatter/util/FormatterUtil.java | 10 +- .../service/jdbi3/ChartRepository.java | 2 +- .../jdbi3/ClassificationRepository.java | 7 +- .../service/jdbi3/CollectionDAO.java | 491 ++++++++++-------- .../service/jdbi3/ContainerRepository.java | 5 + .../jdbi3/DashboardDataModelRepository.java | 5 + .../service/jdbi3/DashboardRepository.java | 3 +- .../service/jdbi3/DatabaseRepository.java | 5 + .../jdbi3/DatabaseSchemaRepository.java | 5 + .../openmetadata/service/jdbi3/EntityDAO.java | 94 +++- .../service/jdbi3/EntityRepository.java | 123 ++++- .../service/jdbi3/FeedRepository.java | 123 +++-- .../service/jdbi3/GlossaryRepository.java | 17 +- .../service/jdbi3/GlossaryTermRepository.java | 13 +- .../jdbi3/IngestionPipelineRepository.java | 24 +- .../service/jdbi3/KpiRepository.java | 42 +- .../service/jdbi3/ListFilter.java | 7 +- .../service/jdbi3/MetricsRepository.java | 5 + .../service/jdbi3/MlModelRepository.java | 8 +- .../service/jdbi3/PipelineRepository.java | 64 +-- .../service/jdbi3/QueryRepository.java | 5 +- .../service/jdbi3/ReportDataRepository.java | 6 +- .../jdbi3/ServiceEntityRepository.java | 3 +- .../service/jdbi3/TableRepository.java | 163 ++---- .../service/jdbi3/TagRepository.java | 9 +- .../service/jdbi3/TeamRepository.java | 15 +- .../service/jdbi3/TestCaseRepository.java | 85 ++- .../service/jdbi3/TopicRepository.java | 7 +- .../service/jdbi3/TypeRepository.java | 14 +- .../service/jdbi3/UserRepository.java | 8 +- .../jdbi3/WebAnalyticEventRepository.java | 23 +- .../service/resources/bots/BotResource.java | 15 +- .../resources/dqtests/TestCaseResource.java | 8 + .../resources/dqtests/TestSuiteResource.java | 8 + .../glossary/GlossaryTermResource.java | 9 +- .../resources/query/QueryResource.java | 9 +- .../resources/search/SearchResource.java | 4 +- .../service/resources/tags/TagLabelCache.java | 2 +- .../service/resources/teams/TeamResource.java | 10 +- .../service/resources/teams/UserResource.java | 12 +- .../service/security/DefaultAuthorizer.java | 3 +- .../policyevaluator/SubjectCache.java | 3 +- .../openmetadata/service/util/EntityUtil.java | 12 + .../service/util/FullyQualifiedName.java | 25 +- .../service/util/NotificationHandler.java | 5 +- .../openmetadata/service/util/QueryUtil.java | 14 - .../service/util/ReIndexingHandler.java | 5 +- .../PaginatedDataInsightSource.java | 4 +- .../searchIndex/SearchIndexWorkflow.java | 9 +- .../databases/DatabaseResourceTest.java | 8 +- .../databases/TableResourceTest.java | 5 +- .../resources/feeds/FeedResourceTest.java | 6 +- .../glossary/GlossaryResourceTest.java | 11 +- .../glossary/GlossaryTermResourceTest.java | 13 +- .../resources/metadata/TypeResourceTest.java | 25 +- .../pipelines/PipelineResourceTest.java | 27 +- .../resources/query/QueryResourceTest.java | 4 +- .../resources/teams/RoleResourceTest.java | 4 +- .../service/util/FullyQualifiedNameTest.java | 12 +- .../openmetadata/schema/EntityInterface.java | 4 +- .../schema/utils/EntityInterfaceUtil.java | 11 + .../sdk/PipelineServiceClient.java | 4 +- .../schema/api/data/createGlossaryTerm.json | 4 +- .../json/schema/events/eventSubscription.json | 4 +- 76 files changed, 1226 insertions(+), 732 deletions(-) delete mode 100644 openmetadata-service/src/main/java/org/openmetadata/service/util/QueryUtil.java create mode 100644 openmetadata-spec/src/main/java/org/openmetadata/schema/utils/EntityInterfaceUtil.java diff --git a/bootstrap/sql/com.mysql.cj.jdbc.Driver/v013__create_db_connection_info.sql b/bootstrap/sql/com.mysql.cj.jdbc.Driver/v013__create_db_connection_info.sql index 399ef0cc505..5e35e3cb6e8 100644 --- a/bootstrap/sql/com.mysql.cj.jdbc.Driver/v013__create_db_connection_info.sql +++ b/bootstrap/sql/com.mysql.cj.jdbc.Driver/v013__create_db_connection_info.sql @@ -115,9 +115,97 @@ SET pe.json = JSON_INSERT( UPDATE dbservice_entity SET json = JSON_INSERT( JSON_REMOVE(json, '$.connection.config.password'), - '$.connection.config.authType', - JSON_OBJECT(), - '$.connection.config.authType.password', + '$.connection.config.authType', + JSON_OBJECT(), + '$.connection.config.authType.password', JSON_EXTRACT(json, '$.connection.config.password')) where serviceType in ('Postgres', 'Mysql'); + +-- add fullyQualifiedName hash and remove existing columns + +-- update the OM system tables + +ALTER TABLE field_relationship DROP KEY `PRIMARY`, ADD COLUMN fromFQNHash VARCHAR(256), ADD COLUMN toFQNHash VARCHAR(256), +DROP INDEX from_index, DROP INDEX to_index, ADD INDEX from_fqnhash_index(fromFQNHash, relation), ADD INDEX to_fqnhash_index(toFQNHash, relation), + ADD CONSTRAINT `field_relationship_primary` PRIMARY KEY(fromFQNHash, toFQNHash, relation), MODIFY fromFQN VARCHAR(2096) NOT NULL, + MODIFY toFQN VARCHAR(2096) NOT NULL; + +ALTER TABLE entity_extension_time_series DROP COLUMN entityFQN, ADD COLUMN entityFQNHash VARCHAR (256) NOT NULL; + +ALTER TABLE type_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); + +ALTER TABLE event_subscription_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); + +ALTER TABLE test_definition DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE test_suite DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE test_case DROP COLUMN fullyQualifiedName, ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL, + ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash); + +ALTER TABLE web_analytic_event DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash); +ALTER TABLE data_insight_chart DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash); +ALTER TABLE kpi_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); + +ALTER TABLE classification DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);; + +ALTER TABLE glossary_term_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; + +ALTER TABLE tag DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; + +ALTER TABLE tag_usage DROP index `source`, DROP COLUMN targetFQN, ADD COLUMN tagFQNHash VARCHAR(256), ADD COLUMN targetFQNHash VARCHAR(256), + ADD UNIQUE KEY `tag_usage_key` (source, tagFQNHash, targetFQNHash); + +ALTER TABLE policy_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; + +ALTER TABLE role_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE automations_workflow DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE test_connection_definition ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; + + +-- update services +ALTER TABLE dbservice_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE messaging_service_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE dashboard_service_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE pipeline_service_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE storage_service_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE metadata_service_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE mlmodel_service_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); + + +-- all entity tables +ALTER TABLE database_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; +ALTER TABLE database_schema_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; +ALTER TABLE table_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; +ALTER TABLE metric_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; +ALTER TABLE report_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; +ALTER TABLE dashboard_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; +ALTER TABLE chart_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; +ALTER TABLE ml_model_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; +ALTER TABLE pipeline_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; +ALTER TABLE topic_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; +ALTER TABLE ingestion_pipeline_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; +ALTER TABLE storage_container_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; +ALTER TABLE dashboard_data_model_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL; + +ALTER TABLE query_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE team_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE user_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE bot_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE glossary_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); diff --git a/bootstrap/sql/org.postgresql.Driver/v013__create_db_connection_info.sql b/bootstrap/sql/org.postgresql.Driver/v013__create_db_connection_info.sql index 718e24812ba..17a4dbf0acf 100644 --- a/bootstrap/sql/org.postgresql.Driver/v013__create_db_connection_info.sql +++ b/bootstrap/sql/org.postgresql.Driver/v013__create_db_connection_info.sql @@ -80,7 +80,92 @@ SET json = jsonb_set( json #-'{connection,config,password}', '{connection,config,authType}', jsonb_build_object('password',json#>'{connection,config,password}') -) +) WHERE serviceType IN ('Postgres', 'Mysql') and json#>'{connection,config,password}' is not null; +DROP INDEX field_relationship_from_index, field_relationship_to_index; +ALTER TABLE field_relationship DROP CONSTRAINT field_relationship_pkey, ADD COLUMN fromFQNHash VARCHAR(256), ADD COLUMN toFQNHash VARCHAR(256), + ADD CONSTRAINT field_relationship_pkey PRIMARY KEY(fromFQNHash, toFQNHash, relation), +ALTER fromFQN TYPE VARCHAR(2096), ALTER toFQN TYPE VARCHAR(2096); +CREATE INDEX IF NOT EXISTS field_relationship_from_index ON field_relationship(fromFQNHash, relation); +CREATE INDEX IF NOT EXISTS field_relationship_to_index ON field_relationship(toFQNHash, relation); + +ALTER TABLE entity_extension_time_series DROP COLUMN entityFQN, ADD COLUMN entityFQNHash VARCHAR (256) NOT NULL; + +ALTER TABLE type_entity DROP CONSTRAINT type_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); + +ALTER TABLE event_subscription_entity DROP CONSTRAINT event_subscription_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); + +ALTER TABLE test_definition DROP CONSTRAINT test_definition_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE test_suite DROP CONSTRAINT test_suite_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE test_case DROP COLUMN fullyQualifiedName, ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL, + ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash); + +ALTER TABLE web_analytic_event DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash); +ALTER TABLE data_insight_chart DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash); +ALTER TABLE kpi_entity DROP CONSTRAINT kpi_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); + +ALTER TABLE classification DROP CONSTRAINT tag_category_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);; + +ALTER TABLE glossary_term_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; + +ALTER TABLE tag DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; + +ALTER TABLE tag_usage DROP CONSTRAINT tag_usage_source_tagfqn_targetfqn_key, DROP COLUMN targetFQN, ADD COLUMN tagFQNHash VARCHAR(256), ADD COLUMN targetFQNHash VARCHAR(256), + ADD UNIQUE (source, tagFQNHash, targetFQNHash); + +ALTER TABLE policy_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; + +ALTER TABLE role_entity DROP CONSTRAINT role_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE automations_workflow DROP CONSTRAINT automations_workflow_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE test_connection_definition ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; + + +-- update services +ALTER TABLE dbservice_entity DROP CONSTRAINT dbservice_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE messaging_service_entity DROP CONSTRAINT messaging_service_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE dashboard_service_entity DROP CONSTRAINT dashboard_service_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE pipeline_service_entity DROP CONSTRAINT pipeline_service_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE storage_service_entity DROP CONSTRAINT storage_service_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE metadata_service_entity DROP CONSTRAINT metadata_service_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE mlmodel_service_entity DROP CONSTRAINT mlmodel_service_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); + + +-- all entity tables +ALTER TABLE database_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; +ALTER TABLE database_schema_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; +ALTER TABLE table_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; +ALTER TABLE metric_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; +ALTER TABLE report_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; +ALTER TABLE dashboard_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; +ALTER TABLE chart_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; +ALTER TABLE ml_model_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; +ALTER TABLE pipeline_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; +ALTER TABLE topic_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; +ALTER TABLE ingestion_pipeline_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; +ALTER TABLE storage_container_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; +ALTER TABLE dashboard_data_model_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash), + ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL; + +ALTER TABLE query_entity ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE team_entity DROP CONSTRAINT team_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE user_entity DROP CONSTRAINT user_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE bot_entity DROP CONSTRAINT bot_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); +ALTER TABLE glossary_entity DROP CONSTRAINT glossary_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash); diff --git a/common/pom.xml b/common/pom.xml index 19cae68c6aa..a965ab4ae09 100644 --- a/common/pom.xml +++ b/common/pom.xml @@ -71,7 +71,11 @@ 2.1.6 compile - + + commons-codec + commons-codec + 1.15 + diff --git a/common/src/main/java/org/openmetadata/common/utils/CommonUtil.java b/common/src/main/java/org/openmetadata/common/utils/CommonUtil.java index 25121a84e28..439b4a9b271 100644 --- a/common/src/main/java/org/openmetadata/common/utils/CommonUtil.java +++ b/common/src/main/java/org/openmetadata/common/utils/CommonUtil.java @@ -21,6 +21,7 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.security.MessageDigest; import java.text.DateFormat; import java.text.ParseException; import java.util.ArrayList; @@ -42,7 +43,9 @@ import java.util.zip.ZipEntry; import java.util.zip.ZipFile; import javax.crypto.Mac; import javax.crypto.spec.SecretKeySpec; +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.codec.binary.Hex; import org.apache.commons.io.IOUtils; @Slf4j @@ -179,4 +182,10 @@ public final class CommonUtil { } return new ArrayList<>(Arrays.asList(entries)); } + + @SneakyThrows + public static String getCheckSum(String input) { + byte[] checksum = MessageDigest.getInstance("MD5").digest(input.getBytes()); + return Hex.encodeHexString(checksum); + } } diff --git a/conf/openmetadata.yaml b/conf/openmetadata.yaml index f08d4c79823..78ef0a003f7 100644 --- a/conf/openmetadata.yaml +++ b/conf/openmetadata.yaml @@ -91,7 +91,7 @@ server: logging: level: ${LOG_LEVEL:-INFO} loggers: - io.swagger: ERROR + io.swagger: DEBUG appenders: - type: console threshold: TRACE @@ -127,7 +127,6 @@ database: # the JDBC URL; the database is called openmetadata_db url: jdbc:${DB_SCHEME:-mysql}://${DB_HOST:-localhost}:${DB_PORT:-3306}/${OM_DATABASE:-openmetadata_db}?allowPublicKeyRetrieval=true&useSSL=${DB_USE_SSL:-false}&serverTimezone=UTC - migrationConfiguration: path: "./bootstrap/sql" diff --git a/ingestion/pipelines/sample_data.yaml b/ingestion/pipelines/sample_data.yaml index 7b12963e083..b925f2f5ca7 100644 --- a/ingestion/pipelines/sample_data.yaml +++ b/ingestion/pipelines/sample_data.yaml @@ -18,4 +18,4 @@ workflowConfig: authProvider: openmetadata securityConfig: "jwtToken": "eyJraWQiOiJHYjM4OWEtOWY3Ni1nZGpzLWE5MmotMDI0MmJrOTQzNTYiLCJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJhZG1pbiIsImlzQm90IjpmYWxzZSwiaXNzIjoib3Blbi1tZXRhZGF0YS5vcmciLCJpYXQiOjE2NjM5Mzg0NjIsImVtYWlsIjoiYWRtaW5Ab3Blbm1ldGFkYXRhLm9yZyJ9.tS8um_5DKu7HgzGBzS1VTA5uUjKWOCU0B_j08WXBiEC0mr0zNREkqVfwFDD-d24HlNEbrqioLsBuFRiwIWKc1m_ZlVQbG7P36RUxhuv2vbSp80FKyNM-Tj93FDzq91jsyNmsQhyNv_fNr3TXfzzSPjHt8Go0FMMP66weoKMgW2PbXlhVKwEuXUHyakLLzewm9UMeQaEiRzhiTMU3UkLXcKbYEJJvfNFcLwSl9W8JCO_l0Yj3ud-qt_nQYEZwqW6u5nfdQllN133iikV4fM5QZsMCnm8Rq1mvLR0y9bmJiD7fwM1tmJ791TUWqmKaTnP49U493VanKpUAfzIiOiIbhg" - \ No newline at end of file + diff --git a/ingestion/tests/integration/test_suite/test_e2e_workflow.py b/ingestion/tests/integration/test_suite/test_e2e_workflow.py index 2d13b4147b4..0c5bd8749f9 100644 --- a/ingestion/tests/integration/test_suite/test_e2e_workflow.py +++ b/ingestion/tests/integration/test_suite/test_e2e_workflow.py @@ -64,7 +64,7 @@ test_suite_config = { "testCases": [ { "name": "my_test_case", - "testDefinitionName": "TableColumnCountToBeBetween", + "testDefinitionName": "tableColumnCountToBeBetween", "parameterValues": [ {"name": "minColValue", "value": 1}, {"name": "maxColValue", "value": 5}, @@ -72,7 +72,7 @@ test_suite_config = { }, { "name": "table_column_name_to_exists", - "testDefinitionName": "TableColumnNameToExist", + "testDefinitionName": "tableColumnNameToExist", "parameterValues": [{"name": "columnName", "value": "id"}], }, ], diff --git a/ingestion/tests/integration/test_suite/test_workflow.py b/ingestion/tests/integration/test_suite/test_workflow.py index f01bf93e3f3..1eed3f37d2d 100644 --- a/ingestion/tests/integration/test_suite/test_workflow.py +++ b/ingestion/tests/integration/test_suite/test_workflow.py @@ -101,7 +101,7 @@ class TestSuiteWorkflowTests(unittest.TestCase): "testCases": [ { "name": "my_test_case", - "testDefinitionName": "TableColumnCountToBeBetween", + "testDefinitionName": "tableColumnCountToBeBetween", "parameterValues": [ {"name": "minColValue", "value": 1}, {"name": "maxColValue", "value": 5}, @@ -149,7 +149,7 @@ class TestSuiteWorkflowTests(unittest.TestCase): "testCases": [ { "name": "my_test_case", - "testDefinitionName": "TableColumnCountToBeBetween", + "testDefinitionName": "tableColumnCountToBeBetween", "parameterValues": [ {"name": "minColValue", "value": 1}, {"name": "maxColValue", "value": 5}, @@ -186,7 +186,7 @@ class TestSuiteWorkflowTests(unittest.TestCase): "testCases": [ { "name": "my_test_case", - "testDefinitionName": "TableColumnCountToBeBetween", + "testDefinitionName": "tableColumnCountToBeBetween", "parameterValues": [ {"name": "minColValue", "value": 1}, {"name": "maxColValue", "value": 5}, @@ -194,7 +194,7 @@ class TestSuiteWorkflowTests(unittest.TestCase): }, { "name": "my_test_case_two", - "testDefinitionName": "TableColumnCountToBeBetween", + "testDefinitionName": "tableColumnCountToBeBetween", "parameterValues": [ {"name": "minColValue", "value": 1}, {"name": "maxColValue", "value": 5}, @@ -226,7 +226,7 @@ class TestSuiteWorkflowTests(unittest.TestCase): "testCases": [ { "name": "my_test_case", - "testDefinitionName": "TableColumnCountToBeBetween", + "testDefinitionName": "tableColumnCountToBeBetween", "parameterValues": [ {"name": "minColValue", "value": 1}, {"name": "maxColValue", "value": 5}, diff --git a/openmetadata-service/src/main/java/org/openmetadata/csv/CsvUtil.java b/openmetadata-service/src/main/java/org/openmetadata/csv/CsvUtil.java index 8281d1bac89..c0985873337 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/csv/CsvUtil.java +++ b/openmetadata-service/src/main/java/org/openmetadata/csv/CsvUtil.java @@ -134,6 +134,11 @@ public final class CsvUtil { return csvRecord; } + public static List addUserOwner(List csvRecord, EntityReference owner) { + csvRecord.add(nullOrEmpty(owner) ? null : owner.getName()); + return csvRecord; + } + private static String quoteCsvField(String str) { if (str.contains(SEPARATOR) || str.contains(FIELD_SEPARATOR)) { return quote(str); diff --git a/openmetadata-service/src/main/java/org/openmetadata/csv/EntityCsv.java b/openmetadata-service/src/main/java/org/openmetadata/csv/EntityCsv.java index fd546c17c07..1838cf92b2d 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/csv/EntityCsv.java +++ b/openmetadata-service/src/main/java/org/openmetadata/csv/EntityCsv.java @@ -47,6 +47,7 @@ import org.openmetadata.schema.type.csv.CsvFile; import org.openmetadata.schema.type.csv.CsvHeader; import org.openmetadata.schema.type.csv.CsvImportResult; import org.openmetadata.schema.type.csv.CsvImportResult.Status; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.jdbi3.EntityRepository; import org.openmetadata.service.util.EntityUtil; @@ -153,8 +154,18 @@ public abstract class EntityCsv { List list = CsvUtil.fieldToStrings(owner); if (list.size() != 2) { importFailure(printer, invalidOwner(fieldNumber), csvRecord); + return null; } - return getEntityReference(printer, csvRecord, fieldNumber, list.get(0), list.get(1)); + return getEntityReference(printer, csvRecord, fieldNumber, list.get(0), EntityInterfaceUtil.quoteName(list.get(1))); + } + + /** Owner field is in entityName format */ + public EntityReference getOwnerAsUser(CSVPrinter printer, CSVRecord csvRecord, int fieldNumber) throws IOException { + String owner = csvRecord.get(fieldNumber); + if (nullOrEmpty(owner)) { + return null; + } + return getEntityReference(printer, csvRecord, fieldNumber, Entity.USER, EntityInterfaceUtil.quoteName(owner)); } protected final Boolean getBoolean(CSVPrinter printer, CSVRecord csvRecord, int fieldNumber) throws IOException { @@ -222,6 +233,27 @@ public abstract class EntityCsv { return refs.isEmpty() ? null : refs; } + protected final List getUserOrTeamEntityReferences( + CSVPrinter printer, CSVRecord csvRecord, int fieldNumber, String entityType) throws IOException { + String fqns = csvRecord.get(fieldNumber); + if (nullOrEmpty(fqns)) { + return null; + } + List fqnList = listOrEmpty(CsvUtil.fieldToStrings(fqns)); + List refs = new ArrayList<>(); + for (String fqn : fqnList) { + EntityReference ref = + getEntityReference(printer, csvRecord, fieldNumber, entityType, EntityInterfaceUtil.quoteName(fqn)); + if (!processRecord) { + return null; + } + if (ref != null) { + refs.add(ref); + } + } + return refs.isEmpty() ? null : refs; + } + protected final List getTagLabels(CSVPrinter printer, CSVRecord csvRecord, int fieldNumber) throws IOException { List refs = getEntityReferences(printer, csvRecord, fieldNumber, Entity.TAG); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/elasticsearch/ElasticSearchEventPublisher.java b/openmetadata-service/src/main/java/org/openmetadata/service/elasticsearch/ElasticSearchEventPublisher.java index 2301f6b8ae8..70a8b747f05 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/elasticsearch/ElasticSearchEventPublisher.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/elasticsearch/ElasticSearchEventPublisher.java @@ -102,6 +102,7 @@ import org.openmetadata.service.events.errors.EventPublisherException; import org.openmetadata.service.jdbi3.CollectionDAO; import org.openmetadata.service.resources.events.EventResource.EventList; import org.openmetadata.service.util.ElasticSearchClientUtils; +import org.openmetadata.service.util.EntityUtil; import org.openmetadata.service.util.JsonUtils; @Slf4j @@ -810,7 +811,7 @@ public class ElasticSearchEventPublisher extends AbstractEventPublisher { .withFailure(new Failure().withSinkError(failureDetails)); dao.entityExtensionTimeSeriesDao() .insert( - ELASTIC_SEARCH_ENTITY_FQN_STREAM, + EntityUtil.hash(ELASTIC_SEARCH_ENTITY_FQN_STREAM), ELASTIC_SEARCH_EXTENSION, "eventPublisherJob", JsonUtils.pojoToJson(streamJob)); @@ -823,7 +824,8 @@ public class ElasticSearchEventPublisher extends AbstractEventPublisher { try { long updateTime = Date.from(LocalDateTime.now().atZone(ZoneId.systemDefault()).toInstant()).getTime(); String recordString = - dao.entityExtensionTimeSeriesDao().getExtension(ELASTIC_SEARCH_ENTITY_FQN_STREAM, ELASTIC_SEARCH_EXTENSION); + dao.entityExtensionTimeSeriesDao() + .getExtension(EntityUtil.hash(ELASTIC_SEARCH_ENTITY_FQN_STREAM), ELASTIC_SEARCH_EXTENSION); EventPublisherJob lastRecord = JsonUtils.readValue(recordString, EventPublisherJob.class); long originalLastUpdate = lastRecord.getTimestamp(); lastRecord.setStatus(status); @@ -838,7 +840,7 @@ public class ElasticSearchEventPublisher extends AbstractEventPublisher { dao.entityExtensionTimeSeriesDao() .update( - ELASTIC_SEARCH_ENTITY_FQN_STREAM, + EntityUtil.hash(ELASTIC_SEARCH_ENTITY_FQN_STREAM), ELASTIC_SEARCH_EXTENSION, JsonUtils.pojoToJson(lastRecord), originalLastUpdate); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/elasticsearch/ElasticSearchIndexDefinition.java b/openmetadata-service/src/main/java/org/openmetadata/service/elasticsearch/ElasticSearchIndexDefinition.java index ac1809caf65..3f3a0c9e09a 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/elasticsearch/ElasticSearchIndexDefinition.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/elasticsearch/ElasticSearchIndexDefinition.java @@ -40,6 +40,7 @@ import org.openmetadata.schema.type.TagLabel; import org.openmetadata.service.Entity; import org.openmetadata.service.events.errors.EventPublisherException; import org.openmetadata.service.jdbi3.CollectionDAO; +import org.openmetadata.service.util.EntityUtil; import org.openmetadata.service.util.JsonUtils; @Slf4j @@ -293,7 +294,8 @@ public class ElasticSearchIndexDefinition { try { long updateTime = Date.from(LocalDateTime.now().atZone(ZoneId.systemDefault()).toInstant()).getTime(); String recordString = - dao.entityExtensionTimeSeriesDao().getExtension(ELASTIC_SEARCH_ENTITY_FQN_STREAM, ELASTIC_SEARCH_EXTENSION); + dao.entityExtensionTimeSeriesDao() + .getExtension(EntityUtil.hash(ELASTIC_SEARCH_ENTITY_FQN_STREAM), ELASTIC_SEARCH_EXTENSION); EventPublisherJob lastRecord = JsonUtils.readValue(recordString, EventPublisherJob.class); long originalLastUpdate = lastRecord.getTimestamp(); lastRecord.setStatus(Status.ACTIVE_WITH_ERROR); @@ -308,7 +310,7 @@ public class ElasticSearchIndexDefinition { dao.entityExtensionTimeSeriesDao() .update( - ELASTIC_SEARCH_ENTITY_FQN_STREAM, + EntityUtil.hash(ELASTIC_SEARCH_ENTITY_FQN_STREAM), ELASTIC_SEARCH_EXTENSION, JsonUtils.pojoToJson(lastRecord), originalLastUpdate); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/formatter/util/FormatterUtil.java b/openmetadata-service/src/main/java/org/openmetadata/service/formatter/util/FormatterUtil.java index 1ee63e57da1..aed37b229c7 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/formatter/util/FormatterUtil.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/formatter/util/FormatterUtil.java @@ -269,6 +269,11 @@ public class FormatterUtil { .withEntityFullyQualifiedName(entityFQN); } + // PUT or PATCH operation didn't result in any change + if (changeType == null || RestUtil.ENTITY_NO_CHANGE.equals(changeType)) { + return null; + } + // Handles Bulk Add test cases to a logical test suite if (changeType.equals(RestUtil.LOGICAL_TEST_CASES_ADDED)) { EntityInterface entityInterface = (EntityInterface) responseContext.getEntity(); @@ -280,11 +285,6 @@ public class FormatterUtil { .withEntityFullyQualifiedName(entityFQN); } - // PUT or PATCH operation didn't result in any change - if (changeType == null || RestUtil.ENTITY_NO_CHANGE.equals(changeType)) { - return null; - } - // Entity was updated by either PUT .../entities or PATCH .../entities // Entity was soft deleted by DELETE .../entities/{id} that updated the attribute `deleted` to true if (changeType.equals(RestUtil.ENTITY_UPDATED) || changeType.equals(RestUtil.ENTITY_SOFT_DELETED)) { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ChartRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ChartRepository.java index 27ed2f92d7e..ffee1aa42f2 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ChartRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ChartRepository.java @@ -47,7 +47,7 @@ public class ChartRepository extends EntityRepository { @Override public void setFullyQualifiedName(Chart chart) { - chart.setFullyQualifiedName(FullyQualifiedName.add(chart.getService().getName(), chart.getName())); + chart.setFullyQualifiedName(FullyQualifiedName.add(chart.getService().getFullyQualifiedName(), chart.getName())); } @Override diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ClassificationRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ClassificationRepository.java index 4dfae99bff9..ee50d37bc53 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ClassificationRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ClassificationRepository.java @@ -72,13 +72,14 @@ public class ClassificationRepository extends EntityRepository { } private int getTermCount(Classification category) { - ListFilter filter = - new ListFilter(Include.NON_DELETED).addQueryParam("parent", FullyQualifiedName.build(category.getName())); + ListFilter filter = new ListFilter(Include.NON_DELETED).addQueryParam("parent", category.getName()); return daoCollection.tagDAO().listCount(filter); } private Integer getUsageCount(Classification classification) { - return daoCollection.tagUsageDAO().getTagCount(TagSource.CLASSIFICATION.ordinal(), classification.getName()); + return daoCollection + .tagUsageDAO() + .getTagCount(TagSource.CLASSIFICATION.ordinal(), FullyQualifiedName.buildHash(classification.getName())); } @Transaction diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/CollectionDAO.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/CollectionDAO.java index 851eeb13b83..105aa3ef979 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/CollectionDAO.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/CollectionDAO.java @@ -17,7 +17,6 @@ import static org.openmetadata.schema.type.Relationship.CONTAINS; import static org.openmetadata.schema.type.Relationship.MENTIONED_IN; import static org.openmetadata.service.Entity.ORGANIZATION_NAME; import static org.openmetadata.service.Entity.QUERY; -import static org.openmetadata.service.jdbi3.ListFilter.escape; import static org.openmetadata.service.jdbi3.ListFilter.escapeApostrophe; import static org.openmetadata.service.jdbi3.locator.ConnectionType.MYSQL; import static org.openmetadata.service.jdbi3.locator.ConnectionType.POSTGRES; @@ -108,6 +107,7 @@ import org.openmetadata.schema.type.UsageDetails; import org.openmetadata.schema.type.UsageStats; import org.openmetadata.schema.util.EntitiesCount; import org.openmetadata.schema.util.ServicesCount; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.jdbi3.CollectionDAO.TagUsageDAO.TagLabelMapper; import org.openmetadata.service.jdbi3.CollectionDAO.UsageDAO.UsageDetailsMapper; @@ -283,8 +283,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -295,8 +295,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } @Override @@ -317,8 +317,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -334,8 +334,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -351,8 +351,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } } @@ -368,8 +368,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } } @@ -385,8 +385,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "nameHash"; } } @@ -402,8 +402,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } } @@ -419,8 +419,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } @Override @@ -988,7 +988,7 @@ public interface CollectionDAO { filterRelation = MENTIONED_IN.ordinal(); } return listThreadsByEntityLink( - entityLink.getFullyQualifiedFieldValue(), + FullyQualifiedName.buildHash(entityLink.getFullyQualifiedFieldValue()), entityLink.getFullyQualifiedFieldType(), limit, relation, @@ -1000,16 +1000,16 @@ public interface CollectionDAO { @SqlQuery( "SELECT json FROM thread_entity " - + "AND id in (SELECT fromFQN FROM field_relationship WHERE " - + "(:fqnPrefix IS NULL OR toFQN LIKE CONCAT(:fqnPrefix, '.%') OR toFQN=:fqnPrefix) AND fromType='THREAD' AND " + + "AND MD5(id) in (SELECT fromFQNHash FROM field_relationship WHERE " + + "(:fqnPrefixHash IS NULL OR toFQNHash LIKE CONCAT(:fqnPrefixHash, '.%') OR toFQNHash=:fqnPrefixHash) AND fromType='THREAD' AND " + "(:toType IS NULL OR toType LIKE CONCAT(:toType, '.%') OR toType=:toType) AND relation= :relation) " - + "AND (:userName IS NULL OR id in (SELECT toFQN FROM field_relationship WHERE " - + " ((fromType='user' AND fromFQN= :userName) OR" - + " (fromType='team' AND fromFQN IN ())) AND toType='THREAD' AND relation= :filterRelation) )" + + "AND (:userName IS NULL OR MD5(id) in (SELECT toFQNHash FROM field_relationship WHERE " + + " ((fromType='user' AND fromFQNHash= :userName) OR" + + " (fromType='team' AND fromFQNHash IN ())) AND toType='THREAD' AND relation= :filterRelation) )" + "ORDER BY createdAt DESC " + "LIMIT :limit") List listThreadsByEntityLink( - @Bind("fqnPrefix") String fqnPrefix, + @Bind("fqnPrefixHash") String fqnPrefixHash, @Bind("toType") String toType, @Bind("limit") int limit, @Bind("relation") int relation, @@ -1025,7 +1025,7 @@ public interface CollectionDAO { filterRelation = MENTIONED_IN.ordinal(); } return listCountThreadsByEntityLink( - entityLink.getFullyQualifiedFieldValue(), + FullyQualifiedName.buildHash(entityLink.getFullyQualifiedFieldValue()), entityLink.getFullyQualifiedFieldType(), relation, userName, @@ -1036,14 +1036,14 @@ public interface CollectionDAO { @SqlQuery( "SELECT count(id) FROM thread_entity " - + "AND id in (SELECT fromFQN FROM field_relationship WHERE " - + "(:fqnPrefix IS NULL OR toFQN LIKE CONCAT(:fqnPrefix, '.%') OR toFQN=:fqnPrefix) AND fromType='THREAD' AND " + + "AND MD5(id) in (SELECT fromFQNHash FROM field_relationship WHERE " + + "(:fqnPrefixHash IS NULL OR toFQNHash LIKE CONCAT(:fqnPrefixHash, '.%') OR toFQNHash=:fqnPrefixHash) AND fromType='THREAD' AND " + "(:toType IS NULL OR toType LIKE CONCAT(:toType, '.%') OR toType=:toType) AND relation= :relation) " - + "AND (:userName IS NULL OR id in (SELECT toFQN FROM field_relationship WHERE " - + " ((fromType='user' AND fromFQN= :userName) OR" - + " (fromType='team' AND fromFQN IN ())) AND toType='THREAD' AND relation= :filterRelation) )") + + "AND (:userName IS NULL OR id in (SELECT toFQNHash FROM field_relationship WHERE " + + " ((fromType='user' AND fromFQNHash= :userName) OR" + + " (fromType='team' AND fromFQNHash IN ())) AND toType='THREAD' AND relation= :filterRelation) )") int listCountThreadsByEntityLink( - @Bind("fqnPrefix") String fqnPrefix, + @Bind("fqnPrefixHash") String fqnPrefixHash, @Bind("toType") String toType, @Bind("relation") int relation, @Bind("userName") String userName, @@ -1058,15 +1058,15 @@ public interface CollectionDAO { void update(@Bind("id") String id, @Bind("json") String json); @SqlQuery( - "SELECT entityLink, COUNT(id) count FROM field_relationship fr INNER JOIN thread_entity te ON fr.fromFQN=te.id " - + "WHERE (:fqnPrefix IS NULL OR fr.toFQN LIKE CONCAT(:fqnPrefix, '.%') OR fr.toFQN=:fqnPrefix) AND " + "SELECT entityLink, COUNT(id) count FROM field_relationship fr INNER JOIN thread_entity te ON fr.fromFQNHash=MD5(te.id) " + + "WHERE (:fqnPrefixHash IS NULL OR fr.toFQNHash LIKE CONCAT(:fqnPrefixHash, '.%') OR fr.toFQNHash=:fqnPrefixHash) AND " + "(:toType IS NULL OR fr.toType like concat(:toType, '.%') OR fr.toType=:toType) AND fr.fromType = :fromType " + "AND fr.relation = :relation AND te.resolved= :isResolved AND (:status IS NULL OR te.taskStatus = :status) " + "AND (:type IS NULL OR te.type = :type) " + "GROUP BY entityLink") @RegisterRowMapper(CountFieldMapper.class) List> listCountByEntityLink( - @Bind("fqnPrefix") String fqnPrefix, + @Bind("fqnPrefixHash") String fqnPrefixHash, @Bind("fromType") String fromType, @Bind("toType") String toType, @Bind("relation") int relation, @@ -1116,10 +1116,10 @@ public interface CollectionDAO { @SqlQuery( "SELECT json FROM thread_entity AND " - + "id in (" - + "SELECT toFQN FROM field_relationship WHERE " - + "((fromType='user' AND fromFQN= :userName) OR " - + "(fromType='team' AND fromFQN IN ())) AND toType='THREAD' AND relation= :relation) " + + "MD5(id) in (" + + "SELECT toFQNHash FROM field_relationship WHERE " + + "((fromType='user' AND fromFQNHash= :userName) OR " + + "(fromType='team' AND fromFQNHash IN ())) AND toType='THREAD' AND relation= :relation) " + "ORDER BY createdAt DESC " + "LIMIT :limit") List listThreadsByMentions( @@ -1131,10 +1131,10 @@ public interface CollectionDAO { @SqlQuery( "SELECT count(id) FROM thread_entity AND " - + "id in (" - + "SELECT toFQN FROM field_relationship WHERE " - + "((fromType='user' AND fromFQN= :userName) OR " - + "(fromType='team' AND fromFQN IN ())) AND toType='THREAD' AND relation= :relation) ") + + "MD5(id) in (" + + "SELECT toFQNHash FROM field_relationship WHERE " + + "((fromType='user' AND fromFQNHash= :userName) OR " + + "(fromType='team' AND fromFQNHash IN ())) AND toType='THREAD' AND relation= :relation) ") int listCountThreadsByMentions( @Bind("userName") String userName, @BindList("teamNames") List teamNames, @@ -1155,16 +1155,18 @@ public interface CollectionDAO { interface FieldRelationshipDAO { @ConnectionAwareSqlUpdate( value = - "INSERT IGNORE INTO field_relationship(fromFQN, toFQN, fromType, toType, relation, json) " - + "VALUES (:fromFQN, :toFQN, :fromType, :toType, :relation, :json)", + "INSERT IGNORE INTO field_relationship(fromFQNHash, toFQNHash, fromFQN, toFQN, fromType, toType, relation, json) " + + "VALUES (:fromFQNHash, :toFQNHash, :fromFQN, :toFQN, :fromType, :toType, :relation, :json)", connectionType = MYSQL) @ConnectionAwareSqlUpdate( value = - "INSERT INTO field_relationship(fromFQN, toFQN, fromType, toType, relation, json) " - + "VALUES (:fromFQN, :toFQN, :fromType, :toType, :relation, (:json :: jsonb)) " - + "ON CONFLICT (fromFQN, toFQN, relation) DO NOTHING", + "INSERT INTO field_relationship(fromFQNHash, toFQNHash, fromFQN, toFQN, fromType, toType, relation, json) " + + "VALUES (:fromFQNHash, :toFQNHash, :fromFQN, :toFQN, :fromType, :toType, :relation, (:json :: jsonb)) " + + "ON CONFLICT (fromFQNHash, toFQNHash, relation) DO NOTHING", connectionType = POSTGRES) void insert( + @Bind("fromFQNHash") String fromFQNHash, + @Bind("toFQNHash") String toFQNHash, @Bind("fromFQN") String fromFQN, @Bind("toFQN") String toFQN, @Bind("fromType") String fromType, @@ -1174,17 +1176,19 @@ public interface CollectionDAO { @ConnectionAwareSqlUpdate( value = - "INSERT INTO field_relationship(fromFQN, toFQN, fromType, toType, relation, jsonSchema, json) " - + "VALUES (:fromFQN, :toFQN, :fromType, :toType, :relation, :jsonSchema, :json) " + "INSERT INTO field_relationship(fromFQNHash, toFQNHash, fromFQN, toFQN, fromType, toType, relation, jsonSchema, json) " + + "VALUES (:fromFQNHash, :toFQNHash, :fromFQN, :toFQN, :fromType, :toType, :relation, :jsonSchema, :json) " + "ON DUPLICATE KEY UPDATE json = :json", connectionType = MYSQL) @ConnectionAwareSqlUpdate( value = - "INSERT INTO field_relationship(fromFQN, toFQN, fromType, toType, relation, jsonSchema, json) " - + "VALUES (:fromFQN, :toFQN, :fromType, :toType, :relation, :jsonSchema, (:json :: jsonb)) " - + "ON CONFLICT (fromFQN, toFQN, relation) DO UPDATE SET json = EXCLUDED.json", + "INSERT INTO field_relationship(fromFQNHash, toFQNHash, fromFQN, toFQN, fromType, toType, relation, jsonSchema, json) " + + "VALUES (:fromFQNHash, :toFQNHash, :fromFQN, :toFQN, :fromType, :toType, :relation, :jsonSchema, (:json :: jsonb)) " + + "ON CONFLICT (fromFQNHash, toFQNHash, relation) DO UPDATE SET json = EXCLUDED.json", connectionType = POSTGRES) void upsert( + @Bind("fromFQNHash") String fromFQNHash, + @Bind("toFQNHash") String toFQNHash, @Bind("fromFQN") String fromFQN, @Bind("toFQN") String toFQN, @Bind("fromType") String fromType, @@ -1195,55 +1199,55 @@ public interface CollectionDAO { @SqlQuery( "SELECT json FROM field_relationship WHERE " - + "fromFQN = :fromFQN AND toFQN = :toFQN AND fromType = :fromType " + + "fromFQNHash = :fromFQNHash AND toFQNHash = :toFQNHash AND fromType = :fromType " + "AND toType = :toType AND relation = :relation") String find( - @Bind("fromFQN") String fromFQN, - @Bind("toFQN") String toFQN, + @Bind("fromFQNHash") String fromFQNHash, + @Bind("toFQNHash") String toFQNHash, @Bind("fromType") String fromType, @Bind("toType") String toType, @Bind("relation") int relation); @SqlQuery( "SELECT fromFQN, toFQN, json FROM field_relationship WHERE " - + "fromFQN LIKE CONCAT(:fqnPrefix, '%') AND fromType = :fromType AND toType = :toType " + + "fromFQNHash LIKE CONCAT(:fqnPrefixHash, '%') AND fromType = :fromType AND toType = :toType " + "AND relation = :relation") @RegisterRowMapper(ToFieldMapper.class) List> listToByPrefix( - @Bind("fqnPrefix") String fqnPrefix, + @Bind("fqnPrefixHash") String fqnPrefixHash, @Bind("fromType") String fromType, @Bind("toType") String toType, @Bind("relation") int relation); @SqlQuery( "SELECT fromFQN, toFQN, json FROM field_relationship WHERE " - + "fromFQN = :fqn AND fromType = :type AND toType = :otherType AND relation = :relation " + + "fromFQNHash = :fqnHash AND fromType = :type AND toType = :otherType AND relation = :relation " + "UNION " + "SELECT toFQN, fromFQN, json FROM field_relationship WHERE " - + "toFQN = :fqn AND toType = :type AND fromType = :otherType AND relation = :relation") + + "toFQNHash = :fqnHash AND toType = :type AND fromType = :otherType AND relation = :relation") @RegisterRowMapper(ToFieldMapper.class) List> listBidirectional( - @Bind("fqn") String fqn, + @Bind("fqnHash") String fqnHash, @Bind("type") String type, @Bind("otherType") String otherType, @Bind("relation") int relation); @SqlQuery( "SELECT fromFQN, toFQN, json FROM field_relationship WHERE " - + "fromFQN LIKE CONCAT(:fqnPrefix, '%') AND fromType = :type AND toType = :otherType AND relation = :relation " + + "fromFQNHash LIKE CONCAT(:fqnPrefixHash, '%') AND fromType = :type AND toType = :otherType AND relation = :relation " + "UNION " + "SELECT toFQN, fromFQN, json FROM field_relationship WHERE " - + "toFQN LIKE CONCAT(:fqnPrefix, '%') AND toType = :type AND fromType = :otherType AND relation = :relation") + + "toFQNHash LIKE CONCAT(:fqnPrefixHash, '%') AND toType = :type AND fromType = :otherType AND relation = :relation") @RegisterRowMapper(ToFieldMapper.class) List> listBidirectionalByPrefix( - @Bind("fqnPrefix") String fqnPrefix, + @Bind("fqnPrefixHash") String fqnPrefixHash, @Bind("type") String type, @Bind("otherType") String otherType, @Bind("relation") int relation); - default void deleteAllByPrefix(String fqnPrefix) { - String prefix = String.format("%s%s%%", fqnPrefix, Entity.SEPARATOR); - String condition = "WHERE (toFQN LIKE :prefix OR fromFQN LIKE :prefix)"; + default void deleteAllByPrefix(String fqnPrefixHash) { + String prefix = String.format("%s%s%%", fqnPrefixHash, Entity.SEPARATOR); + String condition = "WHERE (toFQNHash LIKE :prefix OR fromFQNHash LIKE :prefix)"; Map bindMap = new HashMap<>(); bindMap.put("prefix", prefix); deleteAllByPrefixInternal(condition, bindMap); @@ -1253,11 +1257,11 @@ public interface CollectionDAO { void deleteAllByPrefixInternal(@Define("cond") String cond, @BindMap Map bindings); @SqlUpdate( - "DELETE from field_relationship WHERE fromFQN = :fromFQN AND toFQN = :toFQN AND fromType = :fromType " + "DELETE from field_relationship WHERE fromFQNHash = :fromFQNHash AND toFQNHash = :toFQNHash AND fromType = :fromType " + "AND toType = :toType AND relation = :relation") void delete( - @Bind("fromFQN") String fromFQN, - @Bind("toFQN") String toFQN, + @Bind("fromFQNHash") String fromFQNHash, + @Bind("toFQNHash") String toFQNHash, @Bind("fromType") String fromType, @Bind("toType") String toType, @Bind("relation") int relation); @@ -1282,8 +1286,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } } @@ -1299,8 +1303,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } @SqlQuery("SELECT json FROM ") @@ -1319,8 +1323,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -1336,8 +1340,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } } @@ -1353,8 +1357,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -1370,8 +1374,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -1387,8 +1391,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } } @@ -1404,8 +1408,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -1421,8 +1425,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } @Override @@ -1534,8 +1538,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } } @@ -1551,8 +1555,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } } @@ -1568,8 +1572,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -1585,8 +1589,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -1602,8 +1606,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -1619,8 +1623,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } @Override @@ -1671,6 +1675,7 @@ public interface CollectionDAO { String.format( "%s WHERE entity_relationship.fromId = :entityId and entity_relationship.relation = :relation and entity_relationship.toEntity = :toEntity and query_entity.name > :after order by query_entity.name ASC LIMIT :limit", condition); + bindMap.put("entityId", entityId); bindMap.put("relation", MENTIONED_IN.ordinal()); bindMap.put("toEntity", QUERY); @@ -1704,8 +1709,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -1721,8 +1726,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } } @@ -1738,78 +1743,95 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } - @SqlUpdate("DELETE FROM tag where fullyQualifiedName LIKE CONCAT(:fqnPrefix, '.%')") - void deleteTagsByPrefix(@Bind("fqnPrefix") String fqnPrefix); + @SqlUpdate("DELETE FROM tag where fqnHash LIKE CONCAT(:fqnHashPrefix, '.%')") + void deleteTagsByPrefix(@Bind("fqnHashPrefix") String fqnHashPrefix); } @RegisterRowMapper(TagLabelMapper.class) interface TagUsageDAO { @ConnectionAwareSqlUpdate( value = - "INSERT IGNORE INTO tag_usage (source, tagFQN, targetFQN, labelType, state) VALUES (:source, :tagFQN, :targetFQN, :labelType, :state)", + "INSERT IGNORE INTO tag_usage (source, tagFQN, tagFQNHash, targetFQNHash, labelType, state) VALUES (:source, :tagFQN, :tagFQNHash, :targetFQNHash, :labelType, :state)", connectionType = MYSQL) @ConnectionAwareSqlUpdate( value = - "INSERT INTO tag_usage (source, tagFQN, targetFQN, labelType, state) VALUES (:source, :tagFQN, :targetFQN, :labelType, :state) ON CONFLICT (source, tagFQN, targetFQN) DO NOTHING", + "INSERT INTO tag_usage (source, tagFQN, tagFQNHash, targetFQNHash, labelType, state) VALUES (:source, :tagFQN, :tagFQNHash, :targetFQNHash, :labelType, :state) ON CONFLICT (source, tagFQNHash, targetFQNHash) DO NOTHING", connectionType = POSTGRES) void applyTag( @Bind("source") int source, @Bind("tagFQN") String tagFQN, - @Bind("targetFQN") String targetFQN, + @Bind("tagFQNHash") String tagFQNHash, + @Bind("targetFQNHash") String targetFQNHash, @Bind("labelType") int labelType, @Bind("state") int state); - @SqlQuery("SELECT targetFQN FROM tag_usage WHERE source = :source AND tagFQN = :tagFQN") - List getTargetFQNs(@Bind("source") int source, @Bind("tagFQN") String tagFQN); + @SqlQuery("SELECT targetFQNHash FROM tag_usage WHERE source = :source AND tagFQNHash = :tagFQNHash") + List getTargetFQNs(@Bind("source") int source, @Bind("tagFQNHash") String tagFQNHash); default List getTags(String targetFQN) { - List tags = getTagsInternal(targetFQN); + List tags = getTagsInternal(FullyQualifiedName.buildHash(targetFQN)); tags.forEach(tagLabel -> tagLabel.setDescription(TagLabelCache.getInstance().getDescription(tagLabel))); return tags; } - @SqlQuery("SELECT source, tagFQN, labelType, state FROM tag_usage WHERE targetFQN = :targetFQN ORDER BY tagFQN") - List getTagsInternal(@Bind("targetFQN") String targetFQN); + @SqlQuery( + "SELECT source, tagFQN, labelType, state FROM tag_usage WHERE targetFQNHash = :targetFQNHash ORDER BY tagFQN") + List getTagsInternal(@Bind("targetFQNHash") String targetFQNHash); @SqlQuery( "SELECT COUNT(*) FROM tag_usage " - + "WHERE (tagFQN LIKE CONCAT(:tagFqn, '.%') OR tagFQN = :tagFqn) " + + "WHERE (tagFQNHash LIKE CONCAT(:tagFqnHash, '.%') OR tagFQNHash = :tagFqnHash) " + "AND source = :source") - int getTagCount(@Bind("source") int source, @Bind("tagFqn") String tagFqn); + int getTagCount(@Bind("source") int source, @Bind("tagFqnHash") String tagFqnHash); - @SqlUpdate("DELETE FROM tag_usage where targetFQN = :targetFQN") - void deleteTagsByTarget(@Bind("targetFQN") String targetFQN); + @SqlUpdate("DELETE FROM tag_usage where targetFQNHash = :targetFQNHash") + void deleteTagsByTarget(@Bind("targetFQNHash") String targetFQNHash); - @SqlUpdate("DELETE FROM tag_usage where tagFQN = :tagFQN AND source = :source") - void deleteTagLabels(@Bind("source") int source, @Bind("tagFQN") String tagFQN); + @SqlUpdate("DELETE FROM tag_usage where tagFQNHash = :tagFQNHash AND source = :source") + void deleteTagLabels(@Bind("source") int source, @Bind("tagFQNHash") String tagFQNHash); - @SqlUpdate("DELETE FROM tag_usage where tagFQN LIKE CONCAT(:tagFQN, '.%') AND source = :source") - void deleteTagLabelsByPrefix(@Bind("source") int source, @Bind("tagFQN") String tagFQN); + @SqlUpdate("DELETE FROM tag_usage where tagFQNHash LIKE CONCAT(:tagFQNHash, '.%') AND source = :source") + void deleteTagLabelsByPrefix(@Bind("source") int source, @Bind("tagFQNHash") String tagFQNHash); - @SqlUpdate("DELETE FROM tag_usage where targetFQN = :targetFQN OR targetFQN LIKE CONCAT(:targetFQN, '.%')") - void deleteTagLabelsByTargetPrefix(@Bind("targetFQN") String targetFQN); + @SqlUpdate( + "DELETE FROM tag_usage where targetFQNHash = :targetFQNHash OR targetFQNHash LIKE CONCAT(:targetFQNHash, '.%')") + void deleteTagLabelsByTargetPrefix(@Bind("targetFQNHash") String targetFQNHash); /** Update all the tagFQN starting with oldPrefix to start with newPrefix due to tag or glossary name change */ default void updateTagPrefix(int source, String oldPrefix, String newPrefix) { String update = String.format( - "UPDATE tag_usage set tagFQN = REPLACE(tagFQN, '%s.', '%s.') WHERE source = %s AND tagFQN LIKE '%s.%%'", - escapeApostrophe(oldPrefix), escapeApostrophe(newPrefix), source, escape(oldPrefix)); + "UPDATE tag_usage SET tagFQN = REPLACE(tagFQN, '%s.', '%s.'), tagFQNHash = REPLACE(tagFQNHash, '%s.', '%s.') WHERE source = %s AND tagFQNHash LIKE '%s.%%'", + escapeApostrophe(oldPrefix), + escapeApostrophe(newPrefix), + FullyQualifiedName.buildHash(oldPrefix), + FullyQualifiedName.buildHash(newPrefix), + source, + FullyQualifiedName.buildHash(oldPrefix)); updateTagPrefixInternal(update); } default void rename(int source, String oldFQN, String newFQN) { - renameInternal(source, oldFQN, newFQN); // First rename tagFQN from oldFQN to newFQN + renameInternal( + source, + FullyQualifiedName.buildHash(oldFQN), + newFQN, + FullyQualifiedName.buildHash(newFQN)); // First rename tagFQN from oldFQN to newFQN updateTagPrefix(source, oldFQN, newFQN); // Rename all the tagFQN prefixes starting with the oldFQN to newFQN } /** Rename the tagFQN */ - @SqlUpdate("Update tag_usage set tagFQN = :newFQN WHERE source = :source AND tagFQN = :oldFQN") - void renameInternal(@Bind("source") int source, @Bind("oldFQN") String oldFQN, @Bind("newFQN") String newFQN); + @SqlUpdate( + "Update tag_usage set tagFQN = :newFQN, tagFQNHash = :newFQNHash WHERE source = :source AND tagFQNHash = :oldFQNHash") + void renameInternal( + @Bind("source") int source, + @Bind("oldFQNHash") String oldFQNHash, + @Bind("newFQN") String newFQN, + @Bind("newFQNHash") String newFQNHash); @SqlUpdate("") void updateTagPrefixInternal(@Define("update") String update); @@ -1838,8 +1860,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } } @@ -1855,8 +1877,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } @Override @@ -2044,8 +2066,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -2192,13 +2214,13 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } @Override default int listCount(ListFilter filter) { - String team = filter.getQueryParam("team"); + String team = FullyQualifiedName.buildHash(EntityInterfaceUtil.quoteName(filter.getQueryParam("team"))); String isBotStr = filter.getQueryParam("isBot"); String isAdminStr = filter.getQueryParam("isAdmin"); String mySqlCondition = filter.getCondition("ue"); @@ -2243,7 +2265,7 @@ public interface CollectionDAO { @Override default List listBefore(ListFilter filter, int limit, String before) { - String team = filter.getQueryParam("team"); + String team = FullyQualifiedName.buildHash(EntityInterfaceUtil.quoteName(filter.getQueryParam("team"))); String isBotStr = filter.getQueryParam("isBot"); String isAdminStr = filter.getQueryParam("isAdmin"); String mySqlCondition = filter.getCondition("ue"); @@ -2295,7 +2317,7 @@ public interface CollectionDAO { @Override default List listAfter(ListFilter filter, int limit, String after) { - String team = filter.getQueryParam("team"); + String team = FullyQualifiedName.buildHash(EntityInterfaceUtil.quoteName(filter.getQueryParam("team"))); String isBotStr = filter.getQueryParam("isBot"); String isAdminStr = filter.getQueryParam("isAdmin"); String mySqlCondition = filter.getCondition("ue"); @@ -2353,7 +2375,7 @@ public interface CollectionDAO { + "LEFT JOIN entity_relationship er on ue.id = er.toId " + "LEFT JOIN team_entity te on te.id = er.fromId and er.relation = :relation " + " " - + " AND (:team IS NULL OR te.name = :team) " + + " AND (:team IS NULL OR te.nameHash = :team) " + "GROUP BY ue.id) subquery", connectionType = MYSQL) @ConnectionAwareSqlQuery( @@ -2364,7 +2386,7 @@ public interface CollectionDAO { + "LEFT JOIN entity_relationship er on ue.id = er.toId " + "LEFT JOIN team_entity te on te.id = er.fromId and er.relation = :relation " + " " - + " AND (:team IS NULL OR te.name = :team) " + + " AND (:team IS NULL OR te.nameHash = :team) " + "GROUP BY ue.id) subquery", connectionType = POSTGRES) int listCount( @@ -2383,7 +2405,7 @@ public interface CollectionDAO { + "LEFT JOIN entity_relationship er on ue.id = er.toId " + "LEFT JOIN team_entity te on te.id = er.fromId and er.relation = :relation " + " " - + "AND (:team IS NULL OR te.name = :team) " + + "AND (:team IS NULL OR te.nameHash = :team) " + "AND ue. < :before " + "GROUP BY ue., ue.json " + "ORDER BY ue. DESC " @@ -2398,7 +2420,7 @@ public interface CollectionDAO { + "LEFT JOIN entity_relationship er on ue.id = er.toId " + "LEFT JOIN team_entity te on te.id = er.fromId and er.relation = :relation " + " " - + "AND (:team IS NULL OR te.name = :team) " + + "AND (:team IS NULL OR te.nameHash = :team) " + "AND ue. < :before " + "GROUP BY ue., ue.json " + "ORDER BY ue. DESC " @@ -2422,7 +2444,7 @@ public interface CollectionDAO { + "LEFT JOIN entity_relationship er on ue.id = er.toId " + "LEFT JOIN team_entity te on te.id = er.fromId and er.relation = :relation " + " " - + "AND (:team IS NULL OR te.name = :team) " + + "AND (:team IS NULL OR te.nameHash = :team) " + "AND ue. > :after " + "GROUP BY ue., ue.json " + "ORDER BY ue. " @@ -2435,7 +2457,7 @@ public interface CollectionDAO { + "LEFT JOIN entity_relationship er on ue.id = er.toId " + "LEFT JOIN team_entity te on te.id = er.fromId and er.relation = :relation " + " " - + "AND (:team IS NULL OR te.name = :team) " + + "AND (:team IS NULL OR te.nameHash = :team) " + "AND ue. > :after " + "GROUP BY ue., ue.json " + "ORDER BY ue. " @@ -2504,8 +2526,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } @Override @@ -2526,8 +2548,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } @Override @@ -2708,8 +2730,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } } @@ -2725,8 +2747,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } default int countOfTestCases(List testCaseIds) { @@ -2750,8 +2772,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -2767,8 +2789,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } @@ -2780,30 +2802,30 @@ public interface CollectionDAO { @ConnectionAwareSqlUpdate( value = - "INSERT INTO entity_extension_time_series(entityFQN, extension, jsonSchema, json) " - + "VALUES (:entityFQN, :extension, :jsonSchema, :json)", + "INSERT INTO entity_extension_time_series(entityFQNHash, extension, jsonSchema, json) " + + "VALUES (:entityFQNHash, :extension, :jsonSchema, :json)", connectionType = MYSQL) @ConnectionAwareSqlUpdate( value = - "INSERT INTO entity_extension_time_series(entityFQN, extension, jsonSchema, json) " - + "VALUES (:entityFQN, :extension, :jsonSchema, (:json :: jsonb))", + "INSERT INTO entity_extension_time_series(entityFQNHash, extension, jsonSchema, json) " + + "VALUES (:entityFQNHash, :extension, :jsonSchema, (:json :: jsonb))", connectionType = POSTGRES) void insert( - @Bind("entityFQN") String entityFQN, + @Bind("entityFQNHash") String entityFQNHash, @Bind("extension") String extension, @Bind("jsonSchema") String jsonSchema, @Bind("json") String json); @ConnectionAwareSqlUpdate( value = - "UPDATE entity_extension_time_series set json = :json where entityFQN=:entityFQN and extension=:extension and timestamp=:timestamp", + "UPDATE entity_extension_time_series set json = :json where entityFQNHash=:entityFQNHash and extension=:extension and timestamp=:timestamp", connectionType = MYSQL) @ConnectionAwareSqlUpdate( value = - "UPDATE entity_extension_time_series set json = (:json :: jsonb) where entityFQN=:entityFQN and extension=:extension and timestamp=:timestamp", + "UPDATE entity_extension_time_series set json = (:json :: jsonb) where entityFQNHash=:entityFQNHash and extension=:extension and timestamp=:timestamp", connectionType = POSTGRES) void update( - @Bind("entityFQN") String entityFQN, + @Bind("entityFQNHash") String entityFQNHash, @Bind("extension") String extension, @Bind("json") String json, @Bind("timestamp") Long timestamp); @@ -2823,71 +2845,74 @@ public interface CollectionDAO { @Bind("timestamp") Long timestamp, @Bind("operation") String operation); - @SqlQuery("SELECT json FROM entity_extension_time_series WHERE entityFQN = :entityFQN AND extension = :extension") - String getExtension(@Bind("entityFQN") String entityId, @Bind("extension") String extension); + @SqlQuery( + "SELECT json FROM entity_extension_time_series WHERE entityFQNHash = :entityFQN AND extension = :extension") + String getExtension(@Bind("entityFQNHash") String entityId, @Bind("extension") String extension); - @SqlQuery("SELECT count(*) FROM entity_extension_time_series WHERE EntityFQN = :entityFQN") - int listCount(@Bind("entityFQN") String entityFQN); + @SqlQuery("SELECT count(*) FROM entity_extension_time_series WHERE EntityFQNHash = :entityFQNHash") + int listCount(@Bind("entityFQNHash") String entityFQNHash); @ConnectionAwareSqlQuery( value = "WITH data AS (SELECT ROW_NUMBER() OVER(ORDER BY timestamp ASC) AS row_num, json " - + "FROM entity_extension_time_series WHERE EntityFQN = :entityFQN) " + + "FROM entity_extension_time_series WHERE EntityFQNHash = :entityFQNHash) " + "SELECT row_num, json FROM data WHERE row_num < :before LIMIT :limit", connectionType = MYSQL) @ConnectionAwareSqlQuery( value = "WITH data AS (SELECT ROW_NUMBER() OVER(ORDER BY timestamp ASC) AS row_num, json " - + "FROM entity_extension_time_series WHERE EntityFQN = :entityFQN) " + + "FROM entity_extension_time_series WHERE EntityFQNHash = :entityFQNHash) " + "SELECT row_num, json FROM data WHERE row_num < (:before :: integer) LIMIT :limit", connectionType = POSTGRES) @RegisterRowMapper(ReportDataMapper.class) List getBeforeExtension( - @Bind("entityFQN") String entityFQN, @Bind("limit") int limit, @Bind("before") String before); + @Bind("entityFQNHash") String entityFQN, @Bind("limit") int limit, @Bind("before") String before); @ConnectionAwareSqlQuery( value = "WITH data AS (SELECT ROW_NUMBER() OVER(ORDER BY timestamp ASC) AS row_num, json " - + "FROM entity_extension_time_series WHERE EntityFQN = :entityFQN) " + + "FROM entity_extension_time_series WHERE EntityFQNHash = :entityFQNHash) " + "SELECT row_num, json FROM data WHERE row_num > :after LIMIT :limit", connectionType = MYSQL) @ConnectionAwareSqlQuery( value = "WITH data AS (SELECT ROW_NUMBER() OVER(ORDER BY timestamp ASC) AS row_num, json " - + "FROM entity_extension_time_series WHERE EntityFQN = :entityFQN) " + + "FROM entity_extension_time_series WHERE EntityFQNHash = :entityFQNHash) " + "SELECT row_num, json FROM data WHERE row_num > (:after :: integer) LIMIT :limit", connectionType = POSTGRES) @RegisterRowMapper(ReportDataMapper.class) List getAfterExtension( - @Bind("entityFQN") String entityFQN, @Bind("limit") int limit, @Bind("after") String after); + @Bind("entityFQNHash") String entityFQNHash, @Bind("limit") int limit, @Bind("after") String after); @SqlQuery( - "SELECT json FROM entity_extension_time_series WHERE entityFQN = :entityFQN AND extension = :extension AND timestamp = :timestamp") + "SELECT json FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp = :timestamp") String getExtensionAtTimestamp( - @Bind("entityFQN") String entityFQN, @Bind("extension") String extension, @Bind("timestamp") long timestamp); + @Bind("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Bind("timestamp") long timestamp); @ConnectionAwareSqlQuery( value = - "SELECT json FROM entity_extension_time_series WHERE entityFQN = :entityFQN AND extension = :extension AND timestamp = :timestamp AND json -> '$.operation' = :operation", + "SELECT json FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp = :timestamp AND json -> '$.operation' = :operation", connectionType = MYSQL) @ConnectionAwareSqlQuery( value = - "SELECT json FROM entity_extension_time_series WHERE entityFQN = :entityFQN AND extension = :extension AND timestamp = :timestamp AND json #>>'{operation}' = :operation", + "SELECT json FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp = :timestamp AND json #>>'{operation}' = :operation", connectionType = POSTGRES) String getExtensionAtTimestampWithOperation( - @Bind("entityFQN") String entityFQN, + @Bind("entityFQNHash") String entityFQNHash, @Bind("extension") String extension, @Bind("timestamp") long timestamp, @Bind("operation") String operation); @SqlQuery( - "SELECT json FROM entity_extension_time_series WHERE entityFQN = :entityFQN AND extension = :extension " + "SELECT json FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension " + "ORDER BY timestamp DESC LIMIT 1") - String getLatestExtension(@Bind("entityFQN") String entityFQN, @Bind("extension") String extension); + String getLatestExtension(@Bind("entityFQNHash") String entityFQNHash, @Bind("extension") String extension); @SqlQuery( - "SELECT ranked.json FROM (SELECT json, ROW_NUMBER() OVER(PARTITION BY entityFQN ORDER BY timestamp DESC) AS row_num " - + "FROM entity_extension_time_series WHERE entityFQN IN ()) ranked WHERE ranked.row_num = 1") + "SELECT ranked.json FROM (SELECT json, ROW_NUMBER() OVER(PARTITION BY entityFQNHash ORDER BY timestamp DESC) AS row_num " + + "FROM entity_extension_time_series WHERE entityFQNHash IN ()) ranked WHERE ranked.row_num = 1") List getLatestExtensionByFQNs( @BindList("entityFQNs") List entityFQNs, @Bind("extension") String extension); @@ -2906,82 +2931,86 @@ public interface CollectionDAO { + "ORDER BY extension") List getExtensions(@Bind("id") String id, @Bind("extensionPrefix") String extensionPrefix); - @SqlUpdate("DELETE FROM entity_extension_time_series WHERE entityFQN = :entityFQN AND extension = :extension") - void delete(@Bind("entityFQN") String entityFQN, @Bind("extension") String extension); + @SqlUpdate("DELETE FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash") + void deleteAll(@Bind("entityFQNHash") String entityFQNHash); - @SqlUpdate("DELETE FROM entity_extension_time_series WHERE entityFQN = :entityFQN") - void deleteAll(@Bind("entityFQN") String entityFQN); + @SqlUpdate( + "DELETE FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension") + void delete(@Bind("entityFQNHash") String entityFQNHash, @Bind("extension") String extension); // This just saves the limit number of records, and remove all other with given extension @SqlUpdate( - "DELETE FROM entity_extension_time_series WHERE extension = :extension AND entityFQN NOT IN(SELECT entityFQN FROM (select * from entity_extension_time_series WHERE extension = :extension ORDER BY timestamp DESC LIMIT :records) AS subquery)") + "DELETE FROM entity_extension_time_series WHERE extension = :extension AND entityFQNHash NOT IN(SELECT entityFQN FROM (select * from entity_extension_time_series WHERE extension = :extension ORDER BY timestamp DESC LIMIT :records) AS subquery)") void deleteLastRecords(@Bind("extension") String extension, @Bind("records") int noOfRecord); @SqlUpdate( - "DELETE FROM entity_extension_time_series WHERE entityFQN = :entityFQN AND extension = :extension AND timestamp = :timestamp") + "DELETE FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp = :timestamp") void deleteAtTimestamp( - @Bind("entityFQN") String entityFQN, @Bind("extension") String extension, @Bind("timestamp") Long timestamp); + @Bind("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Bind("timestamp") Long timestamp); @SqlUpdate( - "DELETE FROM entity_extension_time_series WHERE entityFQN = :entityFQN AND extension = :extension AND timestamp < :timestamp") - void deleteBeforeExclusive( - @Bind("entityFQN") String entityFQN, @Bind("extension") String extension, @Bind("timestamp") Long timestamp); + "DELETE FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND extension = :extension AND timestamp < :timestamp") + void deleteBeforeTimestamp( + @Bind("entityFQNHash") String entityFQNHash, + @Bind("extension") String extension, + @Bind("timestamp") Long timestamp); @SqlQuery( - "SELECT json FROM entity_extension_time_series WHERE entityFQN = :entityFQN AND jsonSchema = :jsonSchema " + "SELECT json FROM entity_extension_time_series WHERE entityFQNHash = :entityFQNHash AND jsonSchema = :jsonSchema " + "ORDER BY timestamp DESC LIMIT 1") - String getLatestExtensionByFQN(@Bind("entityFQN") String entityFQN, @Bind("jsonSchema") String jsonSchema); + String getLatestExtensionByFQN(@Bind("entityFQNHash") String entityFQNHash, @Bind("jsonSchema") String jsonSchema); @SqlQuery( - "SELECT json FROM entity_extension_time_series where entityFQN = :entityFQN and jsonSchema = :jsonSchema " + "SELECT json FROM entity_extension_time_series where entityFQNHash = :entityFQNHash and jsonSchema = :jsonSchema " + " AND timestamp >= :startTs and timestamp <= :endTs ORDER BY timestamp DESC") List listBetweenTimestampsByFQN( - @Bind("entityFQN") String entityFQN, + @Bind("entityFQNHash") String entityFQNHash, @Bind("jsonSchema") String jsonSchema, @Bind("startTs") Long startTs, @Bind("endTs") long endTs); @SqlQuery( - "SELECT json FROM entity_extension_time_series where entityFQN = :entityFQN and extension = :extension " + "SELECT json FROM entity_extension_time_series where entityFQNHash = :entityFQNHash and extension = :extension " + " AND timestamp >= :startTs and timestamp <= :endTs ORDER BY timestamp DESC") List listBetweenTimestamps( - @Bind("entityFQN") String entityFQN, + @Bind("entityFQNHash") String entityFQNHash, @Bind("extension") String extension, @Bind("startTs") Long startTs, @Bind("endTs") long endTs); @SqlQuery( - "SELECT json FROM entity_extension_time_series where entityFQN = :entityFQN and extension = :extension " + "SELECT json FROM entity_extension_time_series where entityFQNHash = :entityFQNHash and extension = :extension " + " AND timestamp >= :startTs and timestamp <= :endTs ORDER BY timestamp ") List listBetweenTimestampsByOrder( - @Bind("entityFQN") String entityFQN, + @Bind("entityFQNHash") String entityFQNHash, @Bind("extension") String extension, @Bind("startTs") Long startTs, @Bind("endTs") long endTs, @Define("orderBy") OrderBy orderBy); default void updateExtensionByKey(String key, String value, String entityFQN, String extension, String json) { - + String entityFQNHash = FullyQualifiedName.buildHash(entityFQN); String mysqlCond = String.format("AND JSON_UNQUOTE(JSON_EXTRACT(json, '$.%s')) = :value", key); String psqlCond = String.format("AND json->>'%s' = :value", key); - updateExtensionByKeyInternal(value, entityFQN, extension, json, mysqlCond, psqlCond); + updateExtensionByKeyInternal(value, entityFQNHash, extension, json, mysqlCond, psqlCond); } - default String getExtensionByKey(String key, String value, String entityFQN, String extension) { + default String getExtensionByKey(String key, String value, String entityFQNHash, String extension) { String mysqlCond = String.format("AND JSON_UNQUOTE(JSON_EXTRACT(json, '$.%s')) = :value", key); String psqlCond = String.format("AND json->>'%s' = :value", key); - return getExtensionByKeyInternal(value, entityFQN, extension, mysqlCond, psqlCond); + return getExtensionByKeyInternal(value, entityFQNHash, extension, mysqlCond, psqlCond); } default String getLatestExtensionByKey(String key, String value, String entityFQN, String extension) { - + String entityFQNHash = FullyQualifiedName.buildHash(entityFQN); String mysqlCond = String.format("AND JSON_UNQUOTE(JSON_EXTRACT(json, '$.%s')) = :value", key); String psqlCond = String.format("AND json->>'%s' = :value", key); - - return getLatestExtensionByKeyInternal(value, entityFQN, extension, mysqlCond, psqlCond); + return getLatestExtensionByKeyInternal(value, entityFQNHash, extension, mysqlCond, psqlCond); } /* @@ -2990,20 +3019,20 @@ public interface CollectionDAO { @ConnectionAwareSqlUpdate( value = "UPDATE entity_extension_time_series SET json = :json " - + "WHERE entityFQN = :entityFQN " + + "WHERE entityFQNHash = :entityFQNHash " + "AND extension = :extension " + "", connectionType = MYSQL) @ConnectionAwareSqlUpdate( value = "UPDATE entity_extension_time_series SET json = (:json :: jsonb) " - + "WHERE entityFQN = :entityFQN " + + "WHERE entityFQNHash = :entityFQNHash " + "AND extension = :extension " + "", connectionType = POSTGRES) void updateExtensionByKeyInternal( @Bind("value") String value, - @Bind("entityFQN") String entityFQN, + @Bind("entityFQNHash") String entityFQNHash, @Bind("extension") String extension, @Bind("json") String json, @Define("mysqlCond") String mysqlCond, @@ -3015,20 +3044,20 @@ public interface CollectionDAO { @ConnectionAwareSqlQuery( value = "SELECT json from entity_extension_time_series " - + "WHERE entityFQN = :entityFQN " + + "WHERE entityFQNHash = :entityFQNHash " + "AND extension = :extension " + "", connectionType = MYSQL) @ConnectionAwareSqlQuery( value = "SELECT json from entity_extension_time_series " - + "WHERE entityFQN = :entityFQN " + + "WHERE entityFQNHash = :entityFQNHash " + "AND extension = :extension " + "", connectionType = POSTGRES) String getExtensionByKeyInternal( @Bind("value") String value, - @Bind("entityFQN") String entityFQN, + @Bind("entityFQNHash") String entityFQNHash, @Bind("extension") String extension, @Define("mysqlCond") String mysqlCond, @Define("psqlCond") String psqlCond); @@ -3036,7 +3065,7 @@ public interface CollectionDAO { @ConnectionAwareSqlQuery( value = "SELECT json from entity_extension_time_series " - + "WHERE entityFQN = :entityFQN " + + "WHERE entityFQNHash = :entityFQNHash " + "AND extension = :extension " + " " + "ORDER BY timestamp DESC LIMIT 1", @@ -3044,14 +3073,14 @@ public interface CollectionDAO { @ConnectionAwareSqlQuery( value = "SELECT json from entity_extension_time_series " - + "WHERE entityFQN = :entityFQN " + + "WHERE entityFQNHash = :entityFQNHash " + "AND extension = :extension " + " " + "ORDER BY timestamp DESC LIMIT 1", connectionType = POSTGRES) String getLatestExtensionByKeyInternal( @Bind("value") String value, - @Bind("entityFQN") String entityFQN, + @Bind("entityFQNHash") String entityFQNHash, @Bind("extension") String extension, @Define("mysqlCond") String mysqlCond, @Define("psqlCond") String psqlCond); @@ -3298,8 +3327,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } } @@ -3315,8 +3344,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "name"; + default String getNameHashColumn() { + return "nameHash"; } @Override @@ -3340,7 +3369,7 @@ public interface CollectionDAO { sqlCondition.append(String.format("AND status='%s' ", status)); } - return listBefore(getTableName(), getNameColumn(), sqlCondition.toString(), limit, before); + return listBefore(getTableName(), getNameHashColumn(), sqlCondition.toString(), limit, before); } @Override @@ -3364,7 +3393,7 @@ public interface CollectionDAO { sqlCondition.append(String.format("AND status='%s' ", status)); } - return listAfter(getTableName(), getNameColumn(), sqlCondition.toString(), limit, after); + return listAfter(getTableName(), getNameHashColumn(), sqlCondition.toString(), limit, after); } @Override @@ -3388,7 +3417,7 @@ public interface CollectionDAO { sqlCondition.append(String.format("AND status='%s' ", status)); } - return listCount(getTableName(), getNameColumn(), sqlCondition.toString()); + return listCount(getTableName(), getNameHashColumn(), sqlCondition.toString()); } @SqlQuery( @@ -3438,8 +3467,8 @@ public interface CollectionDAO { } @Override - default String getNameColumn() { - return "fullyQualifiedName"; + default String getNameHashColumn() { + return "fqnHash"; } } } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ContainerRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ContainerRepository.java index db11857c470..0cdf4d7f35d 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ContainerRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ContainerRepository.java @@ -92,6 +92,11 @@ public class ContainerRepository extends EntityRepository { } } + @Override + public String getFullyQualifiedNameHash(Container container) { + return FullyQualifiedName.buildHash(container.getFullyQualifiedName()); + } + private void setColumnFQN(String parentFQN, List columns) { columns.forEach( c -> { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DashboardDataModelRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DashboardDataModelRepository.java index 1224d96a40b..91231bf7bf7 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DashboardDataModelRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DashboardDataModelRepository.java @@ -96,6 +96,11 @@ public class DashboardDataModelRepository extends EntityRepository { @Override public void setFullyQualifiedName(Dashboard dashboard) { - dashboard.setFullyQualifiedName(FullyQualifiedName.add(dashboard.getService().getName(), dashboard.getName())); + dashboard.setFullyQualifiedName( + FullyQualifiedName.add(dashboard.getService().getFullyQualifiedName(), dashboard.getName())); } @Override diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseRepository.java index 0ab4fa7a692..95075791a90 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseRepository.java @@ -47,6 +47,11 @@ public class DatabaseRepository extends EntityRepository { database.setFullyQualifiedName(FullyQualifiedName.build(database.getService().getName(), database.getName())); } + @Override + public String getFullyQualifiedNameHash(Database entity) { + return FullyQualifiedName.buildHash(entity.getFullyQualifiedName()); + } + @Override public void prepare(Database database) throws IOException { populateService(database); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseSchemaRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseSchemaRepository.java index 129ae0842cc..ab038c1a0ba 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseSchemaRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/DatabaseSchemaRepository.java @@ -53,6 +53,11 @@ public class DatabaseSchemaRepository extends EntityRepository { FullyQualifiedName.add(schema.getDatabase().getFullyQualifiedName(), schema.getName())); } + @Override + public String getFullyQualifiedNameHash(DatabaseSchema schema) { + return FullyQualifiedName.buildHash(schema.getFullyQualifiedName()); + } + @Override public void prepare(DatabaseSchema schema) throws IOException { populateDatabase(schema); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityDAO.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityDAO.java index db9ab5a69c7..b78f0bf579e 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityDAO.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityDAO.java @@ -46,42 +46,75 @@ public interface EntityDAO { Class getEntityClass(); - String getNameColumn(); + default String getNameColumn() { + return "name"; + } + + default String getNameHashColumn() { + return "nameHash"; + }; default boolean supportsSoftDelete() { return true; } /** Common queries for all entities implemented here. Do not override. */ - @ConnectionAwareSqlUpdate(value = "INSERT INTO
(json) VALUES (:json)", connectionType = MYSQL) - @ConnectionAwareSqlUpdate(value = "INSERT INTO
(json) VALUES (:json :: jsonb)", connectionType = POSTGRES) - void insert(@Define("table") String table, @Bind("json") String json); - - @ConnectionAwareSqlUpdate(value = "UPDATE
SET json = :json WHERE id = :id", connectionType = MYSQL) @ConnectionAwareSqlUpdate( - value = "UPDATE
SET json = (:json :: jsonb) WHERE id = :id", + value = "INSERT INTO
(, json) VALUES (:nameHashColumnValue, :json)", + connectionType = MYSQL) + @ConnectionAwareSqlUpdate( + value = "INSERT INTO
(, json) VALUES (:nameHashColumnValue, :json :: jsonb)", connectionType = POSTGRES) - void update(@Define("table") String table, @Bind("id") String id, @Bind("json") String json); + void insert( + @Define("table") String table, + @Define("nameHashColumn") String nameHashColumn, + @Bind("nameHashColumnValue") String nameHashColumnValue, + @Bind("json") String json); + + @ConnectionAwareSqlUpdate( + value = "UPDATE
SET json = :json, = :nameHashColumnValue WHERE id = :id", + connectionType = MYSQL) + @ConnectionAwareSqlUpdate( + value = "UPDATE
SET json = (:json :: jsonb), = :nameHashColumnValue WHERE id = :id", + connectionType = POSTGRES) + void update( + @Define("table") String table, + @Define("nameHashColumn") String nameHashColumn, + @Bind("nameHashColumnValue") String nameHashColumnValue, + @Bind("id") String id, + @Bind("json") String json); default void updateFqn(String oldPrefix, String newPrefix) { LOG.info("Updating FQN for {} from {} to {}", getTableName(), oldPrefix, newPrefix); - if (!getNameColumn().equals("fullyQualifiedName")) { + if (!getNameHashColumn().equals("fqnHash")) { return; } String mySqlUpdate = String.format( "UPDATE %s SET json = " - + "JSON_REPLACE(json, '$.fullyQualifiedName', REGEXP_REPLACE(fullyQualifiedName, '^%s\\.', '%s.')) " - + "WHERE fullyQualifiedName LIKE '%s.%%'", - getTableName(), escape(oldPrefix), escapeApostrophe(newPrefix), escape(oldPrefix)); + + "JSON_REPLACE(json, '$.fullyQualifiedName', REGEXP_REPLACE(JSON_UNQUOTE(JSON_EXTRACT(json, '$.fullyQualifiedName')), '^%s\\.', '%s.')) " + + ", fqnHash = REPLACE(fqnHash, '%s.', '%s.') " + + "WHERE fqnHash LIKE '%s.%%'", + getTableName(), + escape(oldPrefix), + escapeApostrophe(newPrefix), + FullyQualifiedName.buildHash(oldPrefix), + FullyQualifiedName.buildHash(newPrefix), + FullyQualifiedName.buildHash(oldPrefix)); String postgresUpdate = String.format( "UPDATE %s SET json = " + "REPLACE(json::text, '\"fullyQualifiedName\": \"%s.', " + "'\"fullyQualifiedName\": \"%s.')::jsonb " - + "WHERE fullyQualifiedName LIKE '%s.%%'", - getTableName(), escapeApostrophe(oldPrefix), escapeApostrophe(newPrefix), escape(oldPrefix)); + + ", fqnHash = REPLACE(fqnHash, '%s.', '%s.') " + + "WHERE fqnHash LIKE '%s.%%'", + getTableName(), + escapeApostrophe(oldPrefix), + escapeApostrophe(newPrefix), + FullyQualifiedName.buildHash(oldPrefix), + FullyQualifiedName.buildHash(newPrefix), + FullyQualifiedName.buildHash(oldPrefix)); updateFqnInternal(mySqlUpdate, postgresUpdate); } @@ -141,23 +174,29 @@ public interface EntityDAO { @SqlQuery("SELECT EXISTS (SELECT * FROM
WHERE id = :id)") boolean exists(@Define("table") String table, @Bind("id") String id); - @SqlQuery("SELECT EXISTS (SELECT * FROM
WHERE = :fqn)") - boolean existsByName(@Define("table") String table, @Define("nameColumn") String nameColumn, @Bind("fqn") String fqn); + @SqlQuery("SELECT EXISTS (SELECT * FROM
WHERE = :fqnHash)") + boolean existsByName( + @Define("table") String table, @Define("nameColumnHash") String nameColumnHash, @Bind("fqnHash") String fqnHash); @SqlUpdate("DELETE FROM
WHERE id = :id") int delete(@Define("table") String table, @Bind("id") String id); /** Default methods that interfaces with implementation. Don't override */ - default void insert(EntityInterface entity) throws JsonProcessingException { - insert(getTableName(), JsonUtils.pojoToJson(entity)); + default void insert(EntityInterface entity, String fqnHash) throws JsonProcessingException { + insert(getTableName(), getNameHashColumn(), fqnHash, JsonUtils.pojoToJson(entity)); } - default void update(UUID id, String json) { - update(getTableName(), id.toString(), json); + default void update(UUID id, String fqnHash, String json) { + update(getTableName(), getNameHashColumn(), fqnHash, id.toString(), json); } default void update(EntityInterface entity) throws JsonProcessingException { - update(getTableName(), entity.getId().toString(), JsonUtils.pojoToJson(entity)); + update( + getTableName(), + getNameHashColumn(), + FullyQualifiedName.buildHash(entity.getFullyQualifiedName()), + entity.getId().toString(), + JsonUtils.pojoToJson(entity)); } default String getCondition(Include include) { @@ -188,7 +227,8 @@ public interface EntityDAO { @SneakyThrows default T findEntityByName(String fqn, Include include) { - return jsonToEntity(findByName(getTableName(), getNameColumn(), fqn, getCondition(include)), fqn); + return jsonToEntity( + findByName(getTableName(), getNameHashColumn(), FullyQualifiedName.buildHash(fqn), getCondition(include)), fqn); } default T jsonToEntity(String json, String identity) throws IOException { @@ -226,15 +266,15 @@ public interface EntityDAO { } default String findJsonByFqn(String fqn, Include include) { - return findByName(getTableName(), getNameColumn(), fqn, getCondition(include)); + return findByName(getTableName(), getNameHashColumn(), FullyQualifiedName.buildHash(fqn), getCondition(include)); } default int listCount(ListFilter filter) { - return listCount(getTableName(), getNameColumn(), filter.getCondition()); + return listCount(getTableName(), getNameHashColumn(), filter.getCondition()); } default int listTotalCount() { - return listTotalCount(getTableName(), getNameColumn()); + return listTotalCount(getTableName(), getNameHashColumn()); } default List listBefore(ListFilter filter, int limit, String before) { @@ -250,7 +290,7 @@ public interface EntityDAO { } default List listAfter(ListFilter filter, int limit, int offset) { - return listAfter(getTableName(), getNameColumn(), filter.getCondition(), limit, offset); + return listAfter(getTableName(), getNameHashColumn(), filter.getCondition(), limit, offset); } default void exists(UUID id) { @@ -261,7 +301,7 @@ public interface EntityDAO { } default void existsByName(String fqn) { - if (!existsByName(getTableName(), getNameColumn(), fqn)) { + if (!existsByName(getTableName(), getNameHashColumn(), FullyQualifiedName.buildHash(fqn))) { String entityType = Entity.getEntityTypeFromClass(getEntityClass()); throw EntityNotFoundException.byMessage(CatalogExceptionMessage.entityNotFound(entityType, fqn)); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityRepository.java index 948f2cc1b06..b1808aac190 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/EntityRepository.java @@ -96,6 +96,7 @@ import org.openmetadata.schema.type.TaskDetails; import org.openmetadata.schema.type.TaskType; import org.openmetadata.schema.type.Votes; import org.openmetadata.schema.type.csv.CsvImportResult; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.OpenMetadataApplicationConfig; import org.openmetadata.service.TypeRegistry; @@ -265,7 +266,12 @@ public abstract class EntityRepository { /** Set fullyQualifiedName of an entity */ public void setFullyQualifiedName(T entity) { - entity.setFullyQualifiedName(entity.getName()); + entity.setFullyQualifiedName(EntityInterfaceUtil.quoteName(entity.getName())); + } + + /** Set fullyQualifiedNameHash of an entity */ + public String getFullyQualifiedNameHash(T entity) { + return FullyQualifiedName.buildHash(entity.getFullyQualifiedName()); } /** Update an entity based suggested description and tags in the task */ @@ -364,7 +370,7 @@ public abstract class EntityRepository { } @Transaction - public final T getByName(UriInfo uriInfo, String fqn, Fields fields) throws IOException { + public T getByName(UriInfo uriInfo, String fqn, Fields fields) throws IOException { return getByName(uriInfo, fqn, fields, NON_DELETED); } @@ -411,10 +417,10 @@ public abstract class EntityRepository { String beforeCursor; String afterCursor = null; - beforeCursor = after == null ? null : entities.get(0).getFullyQualifiedName(); + beforeCursor = after == null ? null : entities.get(0).getName(); if (entities.size() > limitParam) { // If extra result exists, then next page exists - return after cursor entities.remove(limitParam); - afterCursor = entities.get(limitParam - 1).getFullyQualifiedName(); + afterCursor = entities.get(limitParam - 1).getName(); } return getResultList(entities, beforeCursor, afterCursor, total); } else { @@ -445,11 +451,11 @@ public abstract class EntityRepository { String beforeCursor; String afterCursor = null; - beforeCursor = after == null ? null : JsonUtils.readValue(jsons.get(0), entityClass).getFullyQualifiedName(); - if (jsons.size() > limitParam) { + beforeCursor = after == null ? null : entities.get(0).getName(); + if (entities.size() > limitParam) { // If extra result exists, then next page exists - return after cursor T lastReadEntity = JsonUtils.readValue(jsons.get(limitParam), entityClass); - entities.remove(lastReadEntity.getId()); - afterCursor = JsonUtils.readValue(jsons.get(limitParam - 1), entityClass).getFullyQualifiedName(); + entities.remove(limitParam); + afterCursor = entities.get(limitParam - 1).getName(); errors.forEach((key, value) -> entities.remove(key)); // Remove the Last Json Entry if present in error, since the read was actually just till limitParam , and if // error @@ -481,9 +487,9 @@ public abstract class EntityRepository { String afterCursor; if (entities.size() > limitParam) { // If extra result exists, then previous page exists - return before cursor entities.remove(0); - beforeCursor = entities.get(0).getFullyQualifiedName(); + beforeCursor = entities.get(0).getName(); } - afterCursor = entities.get(entities.size() - 1).getFullyQualifiedName(); + afterCursor = entities.get(entities.size() - 1).getName(); return getResultList(entities, beforeCursor, afterCursor, total); } @@ -810,7 +816,9 @@ public abstract class EntityRepository { daoCollection.relationshipDAO().deleteAll(id, entityType); // Delete all the field relationships to other entities - daoCollection.fieldRelationshipDAO().deleteAllByPrefix(entityInterface.getFullyQualifiedName()); + daoCollection + .fieldRelationshipDAO() + .deleteAllByPrefix(FullyQualifiedName.buildHash(entityInterface.getFullyQualifiedName())); // Delete all the extensions of entity daoCollection.entityExtensionDAO().deleteAll(id); @@ -883,10 +891,10 @@ public abstract class EntityRepository { entity.setTags(null); if (update) { - dao.update(entity.getId(), JsonUtils.pojoToJson(entity)); + dao.update(entity.getId(), getFullyQualifiedNameHash(entity), JsonUtils.pojoToJson(entity)); LOG.info("Updated {}:{}:{}", entityType, entity.getId(), entity.getFullyQualifiedName()); } else { - dao.insert(entity); + dao.insert(entity, getFullyQualifiedNameHash(entity)); LOG.info("Created {}:{}:{}", entityType, entity.getId(), entity.getFullyQualifiedName()); } @@ -895,6 +903,82 @@ public abstract class EntityRepository { entity.setTags(tags); } + protected void storeTimeSeries( + String fullyQualifiedName, String extension, String jsonSchema, String entityJson, Long timestamp, boolean update) + throws JsonProcessingException { + String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName); + if (update) { + daoCollection.entityExtensionTimeSeriesDao().update(fqnHash, extension, entityJson, timestamp); + } else { + daoCollection.entityExtensionTimeSeriesDao().insert(fqnHash, extension, jsonSchema, entityJson); + } + } + + protected void storeTimeSeriesWithOperation( + String fullyQualifiedName, + String extension, + String jsonSchema, + String entityJson, + Long timestamp, + String operation, + boolean update) + throws JsonProcessingException { + String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName); + if (update) { + daoCollection + .entityExtensionTimeSeriesDao() + .updateExtensionByOperation(fqnHash, extension, entityJson, timestamp, operation); + } else { + daoCollection.entityExtensionTimeSeriesDao().insert(fqnHash, extension, jsonSchema, entityJson); + } + } + + public String getExtensionAtTimestamp(String fullyQualifiedName, String extension, Long timestamp) { + String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName); + return daoCollection.entityExtensionTimeSeriesDao().getExtensionAtTimestamp(fqnHash, extension, timestamp); + } + + public String getExtensionAtTimestampWithOperation( + String fullyQualifiedName, String extension, Long timestamp, String operation) { + String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName); + return daoCollection + .entityExtensionTimeSeriesDao() + .getExtensionAtTimestampWithOperation(fqnHash, extension, timestamp, operation); + } + + public String getLatestExtensionFromTimeseries(String fullyQualifiedName, String extension) { + String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName); + return daoCollection.entityExtensionTimeSeriesDao().getLatestExtension(fqnHash, extension); + } + + public List getResultsFromAndToTimestamps( + String fullyQualifiedName, String extension, Long startTs, Long endTs) { + return getResultsFromAndToTimestamps( + fullyQualifiedName, extension, startTs, endTs, CollectionDAO.EntityExtensionTimeSeriesDAO.OrderBy.DESC); + } + + public List getResultsFromAndToTimestamps( + String fullyQualifiedName, + String extension, + Long startTs, + Long endTs, + CollectionDAO.EntityExtensionTimeSeriesDAO.OrderBy orderBy) { + String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName); + return daoCollection + .entityExtensionTimeSeriesDao() + .listBetweenTimestampsByOrder(fqnHash, extension, startTs, endTs, orderBy); + } + + public void deleteExtensionAtTimestamp(String fullyQualifiedName, String extension, Long timestamp) { + String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName); + daoCollection.entityExtensionTimeSeriesDao().deleteAtTimestamp(fqnHash, extension, timestamp); + } + + public void deleteExtensionBeforeTimestamp(String fullyQualifiedName, String extension, Long timestamp) { + String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName); + daoCollection.entityExtensionTimeSeriesDao().deleteBeforeTimestamp(fqnHash, extension, timestamp); + } + public void validateExtension(T entity) { if (entity.getExtension() == null) { return; @@ -1019,7 +1103,8 @@ public abstract class EntityRepository { .applyTag( tagLabel.getSource().ordinal(), tagLabel.getTagFQN(), - targetFQN, + FullyQualifiedName.buildHash(tagLabel.getTagFQN()), + FullyQualifiedName.buildHash(targetFQN), tagLabel.getLabelType().ordinal(), tagLabel.getState().ordinal()); } @@ -1029,7 +1114,7 @@ public abstract class EntityRepository { Map map = new HashMap<>(); for (TagLabel tagLabel : listOrEmpty(tagLabels)) { // When two tags have the same parent that is mutuallyExclusive, then throw an error - String parentFqn = FullyQualifiedName.getParent(tagLabel.getTagFQN()); + String parentFqn = FullyQualifiedName.getParentFQN(tagLabel.getTagFQN()); TagLabel stored = map.put(parentFqn, tagLabel); if (stored != null && TagLabelCache.getInstance().mutuallyExclusive(tagLabel)) { throw new IllegalArgumentException(CatalogExceptionMessage.mutuallyExclusiveLabels(tagLabel, stored)); @@ -1503,6 +1588,9 @@ public abstract class EntityRepository { return; // Nothing to update } + // Remove current entity tags in the database. It will be added back later from the merged tag list. + daoCollection.tagUsageDAO().deleteTagsByTarget(FullyQualifiedName.buildHash(fqn)); + if (operation.isPut()) { // PUT operation merges tags in the request with what already exists EntityUtil.mergeTags(updatedTags, origTags); @@ -1848,7 +1936,10 @@ public abstract class EntityRepository { // Delete tags related to deleted columns deletedColumns.forEach( - deleted -> daoCollection.tagUsageDAO().deleteTagsByTarget(deleted.getFullyQualifiedName())); + deleted -> + daoCollection + .tagUsageDAO() + .deleteTagsByTarget(FullyQualifiedName.buildHash(deleted.getFullyQualifiedName()))); // Add tags related to newly added columns for (Column added : addedColumns) { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/FeedRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/FeedRepository.java index c6a800a9714..4c0d7254c92 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/FeedRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/FeedRepository.java @@ -69,6 +69,7 @@ import org.openmetadata.schema.type.TaskDetails; import org.openmetadata.schema.type.TaskStatus; import org.openmetadata.schema.type.TaskType; import org.openmetadata.schema.type.ThreadType; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.ResourceRegistry; import org.openmetadata.service.exception.EntityNotFoundException; @@ -82,6 +83,7 @@ import org.openmetadata.service.resources.feeds.MessageParser.EntityLink; import org.openmetadata.service.security.Authorizer; import org.openmetadata.service.security.policyevaluator.SubjectCache; import org.openmetadata.service.util.EntityUtil; +import org.openmetadata.service.util.FullyQualifiedName; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.RestUtil.DeleteResponse; @@ -125,7 +127,7 @@ public class FeedRepository { thread.withEntityId(aboutEntity.getId()); // Add entity id to thread EntityReference entityOwner = aboutEntity.getOwner(); - // Validate user creating the thread + // Validate user creating thread User createdByUser = SubjectCache.getInstance().getUser(thread.getCreatedBy()); if (thread.getType() == ThreadType.Task) { @@ -154,8 +156,10 @@ public class FeedRepository { // Add field relationship for data asset - Thread -- isAbout ---> entity/entityField dao.fieldRelationshipDAO() .insert( - thread.getId().toString(), // from FQN - about.getFullyQualifiedFieldValue(), // to FQN + FullyQualifiedName.buildHash(thread.getId().toString()), // from FQN + FullyQualifiedName.buildHash(about.getFullyQualifiedFieldValue()), // to FQN, + thread.getId().toString(), + about.getFullyQualifiedFieldValue(), Entity.THREAD, // From type about.getFullyQualifiedFieldType(), // to Type IS_ABOUT.ordinal(), @@ -187,7 +191,7 @@ public class FeedRepository { public PatchResponse closeTask(UriInfo uriInfo, Thread thread, String user, CloseTask closeTask) throws IOException { // Update the attributes - closeTask(thread, user, closeTask.getComment()); + closeTask(thread, EntityInterfaceUtil.quoteName(user), closeTask.getComment()); Thread updatedHref = FeedResource.addHref(uriInfo, thread); return new PatchResponse<>(Status.OK, updatedHref, RestUtil.ENTITY_UPDATED); } @@ -281,6 +285,8 @@ public class FeedRepository { mention -> dao.fieldRelationshipDAO() .insert( + FullyQualifiedName.buildHash(mention.getFullyQualifiedFieldValue()), + FullyQualifiedName.buildHash(thread.getId().toString()), mention.getFullyQualifiedFieldValue(), thread.getId().toString(), mention.getFullyQualifiedFieldType(), @@ -344,7 +350,7 @@ public class FeedRepository { dao.relationshipDAO().deleteAll(id, Entity.THREAD); // Delete all the field relationships to other entities - dao.fieldRelationshipDAO().deleteAllByPrefix(id); + dao.fieldRelationshipDAO().deleteAllByPrefix(FullyQualifiedName.buildHash(id)); // Finally, delete the entity dao.feedDAO().delete(id); @@ -353,6 +359,10 @@ public class FeedRepository { return new DeleteResponse<>(thread, RestUtil.ENTITY_DELETED); } + public EntityReference getOwnerReference(String username) { + return dao.userDAO().findEntityByName(EntityInterfaceUtil.quoteName(username)).getEntityReference(); + } + @Transaction public ThreadCount getThreadsCount(FeedFilter filter, String link) throws IOException { List> result; @@ -385,7 +395,7 @@ public class FeedRepository { result = dao.feedDAO() .listCountByEntityLink( - entityLink.getFullyQualifiedFieldValue(), + FullyQualifiedName.buildHash(entityLink.getFullyQualifiedFieldValue()), Entity.THREAD, entityLink.getFullyQualifiedFieldType(), IS_ABOUT.ordinal(), @@ -437,15 +447,17 @@ public class FeedRepository { total = filteredThreads.getTotalCount(); } else { // Only data assets are added as about - String userName = userId != null ? SubjectCache.getInstance().getUserById(userId).getName() : null; - List teamNames = getTeamNames(userId); - List jsons; - jsons = + User user = userId != null ? SubjectCache.getInstance().getUserById(userId) : null; + List teamNameHash = getTeamNames(user); + String userNameHash = getUserNameHash(user); + List jsons = dao.feedDAO() - .listThreadsByEntityLink(filter, entityLink, limit + 1, IS_ABOUT.ordinal(), userName, teamNames); + .listThreadsByEntityLink( + filter, entityLink, limit + 1, IS_ABOUT.ordinal(), userNameHash, teamNameHash); threads = JsonUtils.readObjects(jsons, Thread.class); total = - dao.feedDAO().listCountThreadsByEntityLink(filter, entityLink, IS_ABOUT.ordinal(), userName, teamNames); + dao.feedDAO() + .listCountThreadsByEntityLink(filter, entityLink, IS_ABOUT.ordinal(), userNameHash, teamNameHash); } } else { // userId filter present @@ -501,7 +513,15 @@ public class FeedRepository { // Multiple reactions by the same user on same thread or post is handled by // field relationship table constraint (primary key) dao.fieldRelationshipDAO() - .insert(user, thread.getId().toString(), Entity.USER, Entity.THREAD, Relationship.REACTED_TO.ordinal(), null); + .insert( + FullyQualifiedName.buildHash(EntityInterfaceUtil.quoteName(user)), + FullyQualifiedName.buildHash(thread.getId().toString()), + user, + thread.getId().toString(), + Entity.USER, + Entity.THREAD, + Relationship.REACTED_TO.ordinal(), + null); } @Transaction @@ -818,6 +838,27 @@ public class FeedRepository { return new FilteredThreads(threads, totalCount); } + /** Returns the threads where the user or the team they belong to were mentioned by other users with @mention. */ + private FilteredThreads getThreadsByMentions(FeedFilter filter, String userId, int limit) throws IOException { + + User user = SubjectCache.getInstance().getUserById(userId); + String userNameHash = getUserNameHash(user); + // Return the threads where the user or team was mentioned + List teamNamesHash = getTeamNames(user); + + // Return the threads where the user or team was mentioned + List jsons = + dao.feedDAO() + .listThreadsByMentions( + userNameHash, teamNamesHash, limit, Relationship.MENTIONED_IN.ordinal(), filter.getCondition()); + List threads = JsonUtils.readObjects(jsons, Thread.class); + int totalCount = + dao.feedDAO() + .listCountThreadsByMentions( + userNameHash, teamNamesHash, Relationship.MENTIONED_IN.ordinal(), filter.getCondition(false)); + return new FilteredThreads(threads, totalCount); + } + /** Get a list of team ids that the given user is a part of. */ private List getTeamIds(String userId) { List teamIds = null; @@ -828,40 +869,6 @@ public class FeedRepository { return nullOrEmpty(teamIds) ? List.of(StringUtils.EMPTY) : teamIds; } - /** Get a list of team names that the given user is a part of. */ - private List getTeamNames(String userId) { - List teamNames = null; - if (userId != null) { - User user = SubjectCache.getInstance().getUserById(userId); - teamNames = listOrEmpty(user.getTeams()).stream().map(EntityReference::getName).collect(Collectors.toList()); - } - return nullOrEmpty(teamNames) ? List.of(StringUtils.EMPTY) : teamNames; - } - - /** Returns the threads where the user or the team they belong to were mentioned by other users with @mention. */ - private FilteredThreads getThreadsByMentions(FeedFilter filter, String userId, int limit) throws IOException { - List teams = - populateEntityReferences( - dao.relationshipDAO().findFrom(userId, Entity.USER, Relationship.HAS.ordinal(), Entity.TEAM), Entity.TEAM); - List teamNames = teams.stream().map(EntityReference::getName).collect(Collectors.toList()); - if (teamNames.isEmpty()) { - teamNames = List.of(StringUtils.EMPTY); - } - User user = dao.userDAO().findEntityById(UUID.fromString(userId)); - - // Return the threads where the user or team was mentioned - List jsons = - dao.feedDAO() - .listThreadsByMentions( - user.getName(), teamNames, limit, Relationship.MENTIONED_IN.ordinal(), filter.getCondition()); - List threads = JsonUtils.readObjects(jsons, Thread.class); - int totalCount = - dao.feedDAO() - .listCountThreadsByMentions( - user.getName(), teamNames, Relationship.MENTIONED_IN.ordinal(), filter.getCondition(false)); - return new FilteredThreads(threads, totalCount); - } - /** Returns the threads that are associated with the entities followed by the user. */ private FilteredThreads getThreadsByFollows(FeedFilter filter, String userId, int limit) throws IOException { List teamIds = getTeamIds(userId); @@ -874,6 +881,28 @@ public class FeedRepository { return new FilteredThreads(threads, totalCount); } + /** Get a list of team names that the given user is a part of. */ + private List getTeamNames(User user) { + List teamNames = null; + if (user != null) { + teamNames = + listOrEmpty(user.getTeams()).stream() + .map( + x -> { + return FullyQualifiedName.buildHash(x.getFullyQualifiedName()); + }) + .collect(Collectors.toList()); + } + return nullOrEmpty(teamNames) ? List.of(StringUtils.EMPTY) : teamNames; + } + + private String getUserNameHash(User user) { + if (user != null) { + return FullyQualifiedName.buildHash(user.getFullyQualifiedName()); + } + return null; + } + public static class FilteredThreads { @Getter private final List threads; @Getter private final int totalCount; diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryRepository.java index 3f46c5d2e72..00ead6762e8 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryRepository.java @@ -18,6 +18,7 @@ package org.openmetadata.service.jdbi3; import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; +import static org.openmetadata.csv.CsvUtil.FIELD_SEPARATOR; import static org.openmetadata.csv.CsvUtil.addEntityReference; import static org.openmetadata.csv.CsvUtil.addEntityReferences; import static org.openmetadata.csv.CsvUtil.addField; @@ -104,7 +105,9 @@ public class GlossaryRepository extends EntityRepository { } private Integer getUsageCount(Glossary glossary) { - return daoCollection.tagUsageDAO().getTagCount(TagSource.GLOSSARY.ordinal(), glossary.getName()); + return daoCollection + .tagUsageDAO() + .getTagCount(TagSource.GLOSSARY.ordinal(), FullyQualifiedName.buildHash(glossary.getName())); } private Integer getTermCount(Glossary glossary) { @@ -187,7 +190,7 @@ public class GlossaryRepository extends EntityRepository { } // Field 9 - reviewers - glossaryTerm.withReviewers(getEntityReferences(printer, csvRecord, 8, Entity.USER)); + glossaryTerm.withReviewers(getUserOrTeamEntityReferences(printer, csvRecord, 8, Entity.USER)); if (!processRecord) { return null; } @@ -242,7 +245,7 @@ public class GlossaryRepository extends EntityRepository { addEntityReferences(recordList, entity.getRelatedTerms()); addField(recordList, termReferencesToRecord(entity.getReferences())); addTagLabels(recordList, entity.getTags()); - addEntityReferences(recordList, entity.getReviewers()); + addField(recordList, reviewerReferencesToRecord(entity.getReviewers())); addOwner(recordList, entity.getOwner()); addField(recordList, entity.getStatus().value()); return recordList; @@ -253,7 +256,13 @@ public class GlossaryRepository extends EntityRepository { ? null : list.stream() .map(termReference -> termReference.getName() + CsvUtil.FIELD_SEPARATOR + termReference.getEndpoint()) - .collect(Collectors.joining(";")); + .collect(Collectors.joining(FIELD_SEPARATOR)); + } + + private String reviewerReferencesToRecord(List reviewers) { + return nullOrEmpty(reviewers) + ? null + : reviewers.stream().map(EntityReference::getName).collect(Collectors.joining(FIELD_SEPARATOR)); } } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryTermRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryTermRepository.java index 623e17322c2..358f3f744f9 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryTermRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/GlossaryTermRepository.java @@ -77,7 +77,9 @@ public class GlossaryTermRepository extends EntityRepository { } private Integer getUsageCount(GlossaryTerm term) { - return daoCollection.tagUsageDAO().getTagCount(TagSource.GLOSSARY.ordinal(), term.getFullyQualifiedName()); + return daoCollection + .tagUsageDAO() + .getTagCount(TagSource.GLOSSARY.ordinal(), FullyQualifiedName.buildHash(term.getFullyQualifiedName())); } private EntityReference getParent(GlossaryTerm entity) throws IOException { @@ -184,6 +186,11 @@ public class GlossaryTermRepository extends EntityRepository { } } + @Override + public String getFullyQualifiedNameHash(GlossaryTerm entity) { + return FullyQualifiedName.buildHash(entity.getFullyQualifiedName()); + } + protected EntityReference getGlossary(GlossaryTerm term) throws IOException { return getFromEntityRef(term.getId(), Relationship.CONTAINS, GLOSSARY, true); } @@ -200,7 +207,9 @@ public class GlossaryTermRepository extends EntityRepository { @Override protected void postDelete(GlossaryTerm entity) { // Cleanup all the tag labels using this glossary term - daoCollection.tagUsageDAO().deleteTagLabels(TagSource.GLOSSARY.ordinal(), entity.getFullyQualifiedName()); + daoCollection + .tagUsageDAO() + .deleteTagLabels(TagSource.GLOSSARY.ordinal(), FullyQualifiedName.buildHash(entity.getFullyQualifiedName())); } private void addGlossaryRelationship(GlossaryTerm term) { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/IngestionPipelineRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/IngestionPipelineRepository.java index 2f3eb42a81c..99e1963b750 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/IngestionPipelineRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/IngestionPipelineRepository.java @@ -69,7 +69,12 @@ public class IngestionPipelineRepository extends EntityRepository pipelineStatusList = JsonUtils.readObjects( - daoCollection - .entityExtensionTimeSeriesDao() - .listBetweenTimestampsByFQN( - ingestionPipeline.getFullyQualifiedName(), PIPELINE_STATUS_JSON_SCHEMA, startTs, endTs), + getResultsFromAndToTimestamps( + ingestionPipeline.getFullyQualifiedName(), PIPELINE_STATUS_EXTENSION, startTs, endTs), PipelineStatus.class); List allPipelineStatusList = pipelineServiceClient.getQueuedPipelineStatus(ingestionPipeline); allPipelineStatusList.addAll(pipelineStatusList); @@ -224,9 +226,7 @@ public class IngestionPipelineRepository extends EntityRepository { // Validate the request content Kpi kpi = dao.findEntityByName(fqn); - KpiResult storedKpiResult = - JsonUtils.readValue( - daoCollection - .entityExtensionTimeSeriesDao() - .getExtensionAtTimestamp(kpi.getFullyQualifiedName(), KPI_RESULT_EXTENSION, kpiResult.getTimestamp()), - KpiResult.class); - if (storedKpiResult != null) { - daoCollection - .entityExtensionTimeSeriesDao() - .update( - kpi.getFullyQualifiedName(), - KPI_RESULT_EXTENSION, - JsonUtils.pojoToJson(kpiResult), - kpiResult.getTimestamp()); - } else { - daoCollection - .entityExtensionTimeSeriesDao() - .insert(kpi.getFullyQualifiedName(), KPI_RESULT_EXTENSION, KPI_RESULT_FIELD, JsonUtils.pojoToJson(kpiResult)); - } + String storedKpiResult = + getExtensionAtTimestamp(kpi.getFullyQualifiedName(), KPI_RESULT_EXTENSION, kpiResult.getTimestamp()); + storeTimeSeries( + kpi.getFullyQualifiedName(), + KPI_RESULT_EXTENSION, + "kpiResult", + JsonUtils.pojoToJson(kpiResult), + kpiResult.getTimestamp(), + storedKpiResult != null); ChangeDescription change = addKpiResultChangeDescription(kpi.getVersion(), kpiResult, storedKpiResult); ChangeEvent changeEvent = getChangeEvent(withHref(uriInfo, kpi), change, entityType, kpi.getVersion()); @@ -133,11 +123,9 @@ public class KpiRepository extends EntityRepository { // Validate the request content Kpi kpi = dao.findEntityByName(fqn); KpiResult storedKpiResult = - JsonUtils.readValue( - daoCollection.entityExtensionTimeSeriesDao().getExtensionAtTimestamp(fqn, KPI_RESULT_EXTENSION, timestamp), - KpiResult.class); + JsonUtils.readValue(getExtensionAtTimestamp(fqn, KPI_RESULT_EXTENSION, timestamp), KpiResult.class); if (storedKpiResult != null) { - daoCollection.entityExtensionTimeSeriesDao().deleteAtTimestamp(fqn, KPI_RESULT_EXTENSION, timestamp); + deleteExtensionAtTimestamp(fqn, KPI_RESULT_EXTENSION, timestamp); kpi.setKpiResult(storedKpiResult); ChangeDescription change = deleteKpiChangeDescription(kpi.getVersion(), storedKpiResult); ChangeEvent changeEvent = getChangeEvent(kpi, change, entityType, kpi.getVersion()); @@ -175,8 +163,7 @@ public class KpiRepository extends EntityRepository { } public KpiResult getKpiResult(String fqn) throws IOException { - return JsonUtils.readValue( - daoCollection.entityExtensionTimeSeriesDao().getLatestExtension(fqn, KPI_RESULT_EXTENSION), KpiResult.class); + return JsonUtils.readValue(getLatestExtensionFromTimeseries(fqn, KPI_RESULT_EXTENSION), KpiResult.class); } public ResultList getKpiResults( @@ -185,10 +172,7 @@ public class KpiRepository extends EntityRepository { List kpiResults; kpiResults = JsonUtils.readObjects( - daoCollection - .entityExtensionTimeSeriesDao() - .listBetweenTimestampsByOrder(fqn, KPI_RESULT_EXTENSION, startTs, endTs, orderBy), - KpiResult.class); + getResultsFromAndToTimestamps(fqn, KPI_RESULT_EXTENSION, startTs, endTs, orderBy), KpiResult.class); return new ResultList<>(kpiResults, String.valueOf(startTs), String.valueOf(endTs), kpiResults.size()); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ListFilter.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ListFilter.java index 198ece0d1cc..5959229bf46 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ListFilter.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ListFilter.java @@ -10,6 +10,7 @@ import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.Relationship; import org.openmetadata.service.Entity; import org.openmetadata.service.resources.databases.DatasourceConfig; +import org.openmetadata.service.util.FullyQualifiedName; public class ListFilter { @Getter private final Include include; @@ -155,10 +156,10 @@ public class ListFilter { } private String getFqnPrefixCondition(String tableName, String fqnPrefix) { - fqnPrefix = escape(fqnPrefix); return tableName == null - ? String.format("fullyQualifiedName LIKE '%s%s%%'", fqnPrefix, Entity.SEPARATOR) - : String.format("%s.fullyQualifiedName LIKE '%s%s%%'", tableName, fqnPrefix, Entity.SEPARATOR); + ? String.format("fqnHash LIKE '%s%s%%'", FullyQualifiedName.buildHash(fqnPrefix), Entity.SEPARATOR) + : String.format( + "%s.fqnHash LIKE '%s%s%%'", tableName, FullyQualifiedName.buildHash(fqnPrefix), Entity.SEPARATOR); } private String getWebhookTypePrefixCondition(String tableName, String typePrefix) { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MetricsRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MetricsRepository.java index 50e0bd1d415..833def3ed66 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MetricsRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MetricsRepository.java @@ -45,6 +45,11 @@ public class MetricsRepository extends EntityRepository { metrics.setFullyQualifiedName(FullyQualifiedName.add(metrics.getService().getName(), metrics.getName())); } + @Override + public String getFullyQualifiedNameHash(Metrics metrics) { + return FullyQualifiedName.buildHash(metrics.getFullyQualifiedName()); + } + @Override public Metrics setFields(Metrics metrics, Fields fields) throws IOException { metrics.setService(getContainer(metrics.getId())); // service is a default field diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MlModelRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MlModelRepository.java index 05250f10b68..666328f09ba 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MlModelRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MlModelRepository.java @@ -61,12 +61,18 @@ public class MlModelRepository extends EntityRepository { @Override public void setFullyQualifiedName(MlModel mlModel) { - mlModel.setFullyQualifiedName(FullyQualifiedName.add(mlModel.getService().getName(), mlModel.getName())); + mlModel.setFullyQualifiedName( + FullyQualifiedName.add(mlModel.getService().getFullyQualifiedName(), mlModel.getName())); if (!nullOrEmpty(mlModel.getMlFeatures())) { setMlFeatureFQN(mlModel.getFullyQualifiedName(), mlModel.getMlFeatures()); } } + @Override + public String getFullyQualifiedNameHash(MlModel mlModel) { + return FullyQualifiedName.buildHash(mlModel.getFullyQualifiedName()); + } + @Override public MlModel setFields(MlModel mlModel, Fields fields) throws IOException { mlModel.setService(getContainer(mlModel.getId())); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/PipelineRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/PipelineRepository.java index 2c347a69428..ebd909840cf 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/PipelineRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/PipelineRepository.java @@ -39,7 +39,7 @@ import org.openmetadata.schema.type.TaskDetails; import org.openmetadata.service.Entity; import org.openmetadata.service.exception.CatalogExceptionMessage; import org.openmetadata.service.exception.EntityNotFoundException; -import org.openmetadata.service.resources.feeds.MessageParser.EntityLink; +import org.openmetadata.service.resources.feeds.MessageParser; import org.openmetadata.service.resources.pipelines.PipelineResource; import org.openmetadata.service.util.EntityUtil; import org.openmetadata.service.util.EntityUtil.Fields; @@ -66,12 +66,19 @@ public class PipelineRepository extends EntityRepository { @Override public void setFullyQualifiedName(Pipeline pipeline) { - pipeline.setFullyQualifiedName(FullyQualifiedName.add(pipeline.getService().getName(), pipeline.getName())); + pipeline.setFullyQualifiedName( + FullyQualifiedName.add(pipeline.getService().getFullyQualifiedName(), pipeline.getName())); setTaskFQN(pipeline.getFullyQualifiedName(), pipeline.getTasks()); } @Override - public void update(TaskDetails task, EntityLink entityLink, String newValue, String user) throws IOException { + public String getFullyQualifiedNameHash(Pipeline pipeline) { + return FullyQualifiedName.buildHash(pipeline.getFullyQualifiedName()); + } + + @Override + public void update(TaskDetails task, MessageParser.EntityLink entityLink, String newValue, String user) + throws IOException { if (entityLink.getFieldName().equals("tasks")) { Pipeline pipeline = getByName(null, entityLink.getEntityFQN(), getFields("tasks,tags"), Include.ALL); String oldJson = JsonUtils.pojoToJson(pipeline); @@ -110,9 +117,7 @@ public class PipelineRepository extends EntityRepository { private PipelineStatus getPipelineStatus(Pipeline pipeline) throws IOException { return JsonUtils.readValue( - daoCollection - .entityExtensionTimeSeriesDao() - .getLatestExtension(pipeline.getFullyQualifiedName(), PIPELINE_STATUS_EXTENSION), + getLatestExtensionFromTimeseries(pipeline.getFullyQualifiedName(), PIPELINE_STATUS_EXTENSION), PipelineStatus.class); } @@ -127,29 +132,16 @@ public class PipelineRepository extends EntityRepository { validateTask(pipeline, taskStatus.getName()); } - PipelineStatus storedPipelineStatus = - JsonUtils.readValue( - daoCollection - .entityExtensionTimeSeriesDao() - .getExtensionAtTimestamp(fqn, PIPELINE_STATUS_EXTENSION, pipelineStatus.getTimestamp()), - PipelineStatus.class); - if (storedPipelineStatus != null) { - daoCollection - .entityExtensionTimeSeriesDao() - .update( - pipeline.getFullyQualifiedName(), - PIPELINE_STATUS_EXTENSION, - JsonUtils.pojoToJson(pipelineStatus), - pipelineStatus.getTimestamp()); - } else { - daoCollection - .entityExtensionTimeSeriesDao() - .insert( - pipeline.getFullyQualifiedName(), - PIPELINE_STATUS_EXTENSION, - "pipelineStatus", - JsonUtils.pojoToJson(pipelineStatus)); - } + String storedPipelineStatus = + getExtensionAtTimestamp(fqn, PIPELINE_STATUS_EXTENSION, pipelineStatus.getTimestamp()); + storeTimeSeries( + pipeline.getFullyQualifiedName(), + PIPELINE_STATUS_EXTENSION, + "pipelineStatus", + JsonUtils.pojoToJson(pipelineStatus), + pipelineStatus.getTimestamp(), + storedPipelineStatus != null); + return pipeline.withPipelineStatus(pipelineStatus); } @@ -159,13 +151,9 @@ public class PipelineRepository extends EntityRepository { Pipeline pipeline = dao.findEntityByName(fqn); pipeline.setService(getContainer(pipeline.getId())); PipelineStatus storedPipelineStatus = - JsonUtils.readValue( - daoCollection - .entityExtensionTimeSeriesDao() - .getExtensionAtTimestamp(fqn, PIPELINE_STATUS_EXTENSION, timestamp), - PipelineStatus.class); + JsonUtils.readValue(getExtensionAtTimestamp(fqn, PIPELINE_STATUS_EXTENSION, timestamp), PipelineStatus.class); if (storedPipelineStatus != null) { - daoCollection.entityExtensionTimeSeriesDao().deleteAtTimestamp(fqn, PIPELINE_STATUS_EXTENSION, timestamp); + deleteExtensionAtTimestamp(fqn, PIPELINE_STATUS_EXTENSION, timestamp); pipeline.setPipelineStatus(storedPipelineStatus); return pipeline; } @@ -177,11 +165,7 @@ public class PipelineRepository extends EntityRepository { List pipelineStatuses; pipelineStatuses = JsonUtils.readObjects( - daoCollection - .entityExtensionTimeSeriesDao() - .listBetweenTimestamps(fqn, PIPELINE_STATUS_EXTENSION, starTs, endTs), - PipelineStatus.class); - + getResultsFromAndToTimestamps(fqn, PIPELINE_STATUS_EXTENSION, starTs, endTs), PipelineStatus.class); return new ResultList<>(pipelineStatuses, starTs.toString(), endTs.toString(), pipelineStatuses.size()); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/QueryRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/QueryRepository.java index 31de2f2cac4..50d9e328c3e 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/QueryRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/QueryRepository.java @@ -22,7 +22,6 @@ import org.openmetadata.schema.type.Relationship; import org.openmetadata.service.Entity; import org.openmetadata.service.resources.query.QueryResource; import org.openmetadata.service.util.EntityUtil; -import org.openmetadata.service.util.QueryUtil; import org.openmetadata.service.util.RestUtil; public class QueryRepository extends EntityRepository { @@ -74,7 +73,7 @@ public class QueryRepository extends EntityRepository { @SneakyThrows public void prepare(Query entity) { if (CommonUtil.nullOrEmpty(entity.getName())) { - String checkSum = QueryUtil.getCheckSum(entity.getQuery()); + String checkSum = EntityUtil.hash(entity.getQuery()); entity.setChecksum(checkSum); entity.setName(checkSum); } @@ -185,7 +184,7 @@ public class QueryRepository extends EntityRepository { "users", USER, original.getUsers(), updated.getUsers(), Relationship.USES, Entity.QUERY, original.getId()); if (operation.isPatch() && !original.getQuery().equals(updated.getQuery())) { recordChange("query", original.getQuery(), updated.getQuery()); - String checkSum = QueryUtil.getCheckSum(updated.getQuery()); + String checkSum = EntityUtil.hash(updated.getQuery()); recordChange("name", original.getName(), checkSum); recordChange("checkSum", original.getChecksum(), checkSum); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ReportDataRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ReportDataRepository.java index d24f25b9f57..469a066d575 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ReportDataRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ReportDataRepository.java @@ -7,6 +7,7 @@ import javax.ws.rs.core.Response; import org.jdbi.v3.sqlobject.transaction.Transaction; import org.openmetadata.schema.analytics.ReportData; import org.openmetadata.schema.analytics.ReportData.ReportDataType; +import org.openmetadata.service.util.EntityUtil; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.ResultList; @@ -22,11 +23,10 @@ public class ReportDataRepository { @Transaction public Response addReportData(ReportData reportData) throws IOException { reportData.setId(UUID.randomUUID()); - daoCollection .entityExtensionTimeSeriesDao() .insert( - reportData.getReportDataType().value(), + EntityUtil.hash(reportData.getReportDataType().value()), REPORT_DATA_EXTENSION, "reportData", JsonUtils.pojoToJson(reportData)); @@ -41,7 +41,7 @@ public class ReportDataRepository { JsonUtils.readObjects( daoCollection .entityExtensionTimeSeriesDao() - .listBetweenTimestamps(reportDataType.value(), REPORT_DATA_EXTENSION, startTs, endTs), + .listBetweenTimestamps(EntityUtil.hash(reportDataType.value()), REPORT_DATA_EXTENSION, startTs, endTs), ReportData.class); return new ResultList<>(reportData, String.valueOf(startTs), String.valueOf(endTs), reportData.size()); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ServiceEntityRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ServiceEntityRepository.java index f9569a501ef..913dd3ec127 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ServiceEntityRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/ServiceEntityRepository.java @@ -24,6 +24,7 @@ import org.openmetadata.schema.entity.services.connections.TestConnectionResult; import org.openmetadata.service.secrets.SecretsManager; import org.openmetadata.service.secrets.SecretsManagerFactory; import org.openmetadata.service.util.EntityUtil; +import org.openmetadata.service.util.FullyQualifiedName; import org.openmetadata.service.util.JsonUtils; public abstract class ServiceEntityRepository< @@ -95,7 +96,7 @@ public abstract class ServiceEntityRepository< public T addTestConnectionResult(UUID serviceId, TestConnectionResult testConnectionResult) throws IOException { T service = dao.findEntityById(serviceId); service.setTestConnectionResult(testConnectionResult); - dao.update(serviceId, JsonUtils.pojoToJson(service)); + dao.update(serviceId, FullyQualifiedName.buildHash(service.getFullyQualifiedName()), JsonUtils.pojoToJson(service)); return service; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TableRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TableRepository.java index 299c1d3c9f0..a53e86f42c7 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TableRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TableRepository.java @@ -168,6 +168,11 @@ public class TableRepository extends EntityRepository
{ ColumnUtil.setColumnFQN(table.getFullyQualifiedName(), table.getColumns()); } + @Override + public String getFullyQualifiedNameHash(Table entity) { + return FullyQualifiedName.buildHash(entity.getFullyQualifiedName()); + } + @Transaction public Table addJoins(UUID tableId, TableJoins joins) throws IOException { // Validate the request content @@ -341,32 +346,18 @@ public class TableRepository extends EntityRepository
{ public Table addTableProfileData(UUID tableId, CreateTableProfile createTableProfile) throws IOException { // Validate the request content Table table = dao.findEntityById(tableId); - TableProfile storedTableProfile = - JsonUtils.readValue( - daoCollection - .entityExtensionTimeSeriesDao() - .getExtensionAtTimestamp( - table.getFullyQualifiedName(), - TABLE_PROFILE_EXTENSION, - createTableProfile.getTableProfile().getTimestamp()), - TableProfile.class); - if (storedTableProfile != null) { - daoCollection - .entityExtensionTimeSeriesDao() - .update( - table.getFullyQualifiedName(), - TABLE_PROFILE_EXTENSION, - JsonUtils.pojoToJson(createTableProfile.getTableProfile()), - createTableProfile.getTableProfile().getTimestamp()); - } else { - daoCollection - .entityExtensionTimeSeriesDao() - .insert( - table.getFullyQualifiedName(), - TABLE_PROFILE_EXTENSION, - "tableProfile", - JsonUtils.pojoToJson(createTableProfile.getTableProfile())); - } + String storedTableProfile = + getExtensionAtTimestamp( + table.getFullyQualifiedName(), + TABLE_PROFILE_EXTENSION, + createTableProfile.getTableProfile().getTimestamp()); + storeTimeSeries( + table.getFullyQualifiedName(), + TABLE_PROFILE_EXTENSION, + "tableProfile", + JsonUtils.pojoToJson(createTableProfile.getTableProfile()), + createTableProfile.getTableProfile().getTimestamp(), + storedTableProfile != null); for (ColumnProfile columnProfile : createTableProfile.getColumnProfile()) { // Validate all the columns @@ -374,64 +365,35 @@ public class TableRepository extends EntityRepository
{ if (column == null) { throw new IllegalArgumentException("Invalid column name " + columnProfile.getName()); } - ColumnProfile storedColumnProfile = - JsonUtils.readValue( - daoCollection - .entityExtensionTimeSeriesDao() - .getExtensionAtTimestamp( - column.getFullyQualifiedName(), TABLE_COLUMN_PROFILE_EXTENSION, columnProfile.getTimestamp()), - ColumnProfile.class); - - if (storedColumnProfile != null) { - daoCollection - .entityExtensionTimeSeriesDao() - .update( - column.getFullyQualifiedName(), - TABLE_COLUMN_PROFILE_EXTENSION, - JsonUtils.pojoToJson(columnProfile), - storedColumnProfile.getTimestamp()); - } else { - daoCollection - .entityExtensionTimeSeriesDao() - .insert( - column.getFullyQualifiedName(), - TABLE_COLUMN_PROFILE_EXTENSION, - "columnProfile", - JsonUtils.pojoToJson(columnProfile)); - } + String storedColumnProfile = + getExtensionAtTimestamp( + column.getFullyQualifiedName(), TABLE_COLUMN_PROFILE_EXTENSION, columnProfile.getTimestamp()); + storeTimeSeries( + column.getFullyQualifiedName(), + TABLE_COLUMN_PROFILE_EXTENSION, + "columnProfile", + JsonUtils.pojoToJson(columnProfile), + columnProfile.getTimestamp(), + storedColumnProfile != null); } List systemProfiles = createTableProfile.getSystemProfile(); if (systemProfiles != null && !systemProfiles.isEmpty()) { for (SystemProfile systemProfile : createTableProfile.getSystemProfile()) { - SystemProfile storedSystemProfile = - JsonUtils.readValue( - daoCollection - .entityExtensionTimeSeriesDao() - .getExtensionAtTimestampWithOperation( - table.getFullyQualifiedName(), - SYSTEM_PROFILE_EXTENSION, - systemProfile.getTimestamp(), - systemProfile.getOperation().value()), - SystemProfile.class); - if (storedSystemProfile != null) { - daoCollection - .entityExtensionTimeSeriesDao() - .updateExtensionByOperation( - table.getFullyQualifiedName(), - SYSTEM_PROFILE_EXTENSION, - JsonUtils.pojoToJson(systemProfile), - storedSystemProfile.getTimestamp(), - storedSystemProfile.getOperation().value()); - } else { - daoCollection - .entityExtensionTimeSeriesDao() - .insert( - table.getFullyQualifiedName(), - SYSTEM_PROFILE_EXTENSION, - "systemProfile", - JsonUtils.pojoToJson(systemProfile)); - } + String storedSystemProfile = + getExtensionAtTimestampWithOperation( + table.getFullyQualifiedName(), + SYSTEM_PROFILE_EXTENSION, + systemProfile.getTimestamp(), + systemProfile.getOperation().value()); + storeTimeSeriesWithOperation( + table.getFullyQualifiedName(), + SYSTEM_PROFILE_EXTENSION, + "systemProfile", + JsonUtils.pojoToJson(systemProfile), + systemProfile.getTimestamp(), + systemProfile.getOperation().value(), + storedSystemProfile != null); } } @@ -452,13 +414,11 @@ public class TableRepository extends EntityRepository
{ } TableProfile storedTableProfile = - JsonUtils.readValue( - daoCollection.entityExtensionTimeSeriesDao().getExtensionAtTimestamp(fqn, extension, timestamp), - TableProfile.class); + JsonUtils.readValue(getExtensionAtTimestamp(fqn, extension, timestamp), TableProfile.class); if (storedTableProfile == null) { throw new EntityNotFoundException(String.format("Failed to find table profile for %s at %s", fqn, timestamp)); } - daoCollection.entityExtensionTimeSeriesDao().deleteAtTimestamp(fqn, extension, timestamp); + deleteExtensionAtTimestamp(fqn, extension, timestamp); } @Transaction @@ -466,10 +426,7 @@ public class TableRepository extends EntityRepository
{ List tableProfiles; tableProfiles = JsonUtils.readObjects( - daoCollection - .entityExtensionTimeSeriesDao() - .listBetweenTimestamps(fqn, TABLE_PROFILE_EXTENSION, startTs, endTs), - TableProfile.class); + getResultsFromAndToTimestamps(fqn, TABLE_PROFILE_EXTENSION, startTs, endTs), TableProfile.class); return new ResultList<>(tableProfiles, startTs.toString(), endTs.toString(), tableProfiles.size()); } @@ -478,10 +435,7 @@ public class TableRepository extends EntityRepository
{ List columnProfiles; columnProfiles = JsonUtils.readObjects( - daoCollection - .entityExtensionTimeSeriesDao() - .listBetweenTimestamps(fqn, TABLE_COLUMN_PROFILE_EXTENSION, startTs, endTs), - ColumnProfile.class); + getResultsFromAndToTimestamps(fqn, TABLE_COLUMN_PROFILE_EXTENSION, startTs, endTs), ColumnProfile.class); return new ResultList<>(columnProfiles, startTs.toString(), endTs.toString(), columnProfiles.size()); } @@ -490,10 +444,7 @@ public class TableRepository extends EntityRepository
{ List systemProfiles; systemProfiles = JsonUtils.readObjects( - daoCollection - .entityExtensionTimeSeriesDao() - .listBetweenTimestamps(fqn, SYSTEM_PROFILE_EXTENSION, startTs, endTs), - SystemProfile.class); + getResultsFromAndToTimestamps(fqn, SYSTEM_PROFILE_EXTENSION, startTs, endTs), SystemProfile.class); return new ResultList<>(systemProfiles, startTs.toString(), endTs.toString(), systemProfiles.size()); } @@ -501,9 +452,7 @@ public class TableRepository extends EntityRepository
{ for (Column column : columnList) { ColumnProfile columnProfile = JsonUtils.readValue( - daoCollection - .entityExtensionTimeSeriesDao() - .getLatestExtension(column.getFullyQualifiedName(), TABLE_COLUMN_PROFILE_EXTENSION), + getLatestExtensionFromTimeseries(column.getFullyQualifiedName(), TABLE_COLUMN_PROFILE_EXTENSION), ColumnProfile.class); column.setProfile(columnProfile); if (column.getChildren() != null) { @@ -517,9 +466,7 @@ public class TableRepository extends EntityRepository
{ Table table = dao.findEntityByName(fqn); TableProfile tableProfile = JsonUtils.readValue( - daoCollection - .entityExtensionTimeSeriesDao() - .getLatestExtension(table.getFullyQualifiedName(), TABLE_PROFILE_EXTENSION), + getLatestExtensionFromTimeseries(table.getFullyQualifiedName(), TABLE_PROFILE_EXTENSION), TableProfile.class); table.setProfile(tableProfile); setColumnProfile(table.getColumns()); @@ -629,11 +576,9 @@ public class TableRepository extends EntityRepository
{ stored.setTags(modelColumn.getTags()); } applyTags(table.getColumns()); - dao.update(table.getId(), JsonUtils.pojoToJson(table)); - + dao.update(table.getId(), FullyQualifiedName.buildHash(table.getFullyQualifiedName()), JsonUtils.pojoToJson(table)); setFieldsInternal(table, new Fields(List.of(FIELD_OWNER), FIELD_OWNER)); setFieldsInternal(table, new Fields(List.of(FIELD_TAGS), FIELD_TAGS)); - return table; } @@ -843,8 +788,8 @@ public class TableRepository extends EntityRepository
{ daoCollection .fieldRelationshipDAO() .find( - fromEntityFQN, - toEntityFQN, + FullyQualifiedName.buildHash(fromEntityFQN), + FullyQualifiedName.buildHash(toEntityFQN), entityRelationType, entityRelationType, Relationship.JOINED_WITH.ordinal())) @@ -858,6 +803,8 @@ public class TableRepository extends EntityRepository
{ daoCollection .fieldRelationshipDAO() .upsert( + FullyQualifiedName.buildHash(fromEntityFQN), + FullyQualifiedName.buildHash(toEntityFQN), fromEntityFQN, toEntityFQN, entityRelationType, @@ -913,7 +860,7 @@ public class TableRepository extends EntityRepository
{ List>> entityRelations = daoCollection.fieldRelationshipDAO() .listBidirectional( - table.getFullyQualifiedName(), + FullyQualifiedName.buildHash(table.getFullyQualifiedName()), FIELD_RELATION_TABLE_TYPE, FIELD_RELATION_TABLE_TYPE, Relationship.JOINED_WITH.ordinal()) @@ -935,7 +882,7 @@ public class TableRepository extends EntityRepository
{ List>> entityRelations = daoCollection.fieldRelationshipDAO() .listBidirectionalByPrefix( - table.getFullyQualifiedName(), + FullyQualifiedName.buildHash(table.getFullyQualifiedName()), FIELD_RELATION_COLUMN_TYPE, FIELD_RELATION_COLUMN_TYPE, Relationship.JOINED_WITH.ordinal()) diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TagRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TagRepository.java index a1edeb87026..09ca0451f02 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TagRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TagRepository.java @@ -85,6 +85,11 @@ public class TagRepository extends EntityRepository { } } + @Override + public String getFullyQualifiedNameHash(Tag tag) { + return FullyQualifiedName.buildHash(tag.getFullyQualifiedName()); + } + @Override public EntityRepository.EntityUpdater getUpdater(Tag original, Tag updated, Operation operation) { return new TagUpdater(original, updated, operation); @@ -104,7 +109,9 @@ public class TagRepository extends EntityRepository { } private Integer getUsageCount(Tag tag) { - return daoCollection.tagUsageDAO().getTagCount(TagSource.CLASSIFICATION.ordinal(), tag.getFullyQualifiedName()); + return daoCollection + .tagUsageDAO() + .getTagCount(TagSource.CLASSIFICATION.ordinal(), FullyQualifiedName.buildHash(tag.getFullyQualifiedName())); } private List getChildren(Tag entity) throws IOException { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TeamRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TeamRepository.java index bc8f76d18f5..2957e46d724 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TeamRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TeamRepository.java @@ -17,6 +17,7 @@ import static org.openmetadata.common.utils.CommonUtil.listOrEmpty; import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; import static org.openmetadata.csv.CsvUtil.addEntityReferences; import static org.openmetadata.csv.CsvUtil.addField; +import static org.openmetadata.csv.CsvUtil.addUserOwner; import static org.openmetadata.schema.api.teams.CreateTeam.TeamType.BUSINESS_UNIT; import static org.openmetadata.schema.api.teams.CreateTeam.TeamType.DEPARTMENT; import static org.openmetadata.schema.api.teams.CreateTeam.TeamType.DIVISION; @@ -49,10 +50,10 @@ import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; +import javax.ws.rs.core.UriInfo; import lombok.extern.slf4j.Slf4j; import org.apache.commons.csv.CSVPrinter; import org.apache.commons.csv.CSVRecord; -import org.openmetadata.csv.CsvUtil; import org.openmetadata.csv.EntityCsv; import org.openmetadata.schema.api.teams.CreateTeam.TeamType; import org.openmetadata.schema.entity.teams.Team; @@ -64,6 +65,7 @@ import org.openmetadata.schema.type.csv.CsvDocumentation; import org.openmetadata.schema.type.csv.CsvErrorType; import org.openmetadata.schema.type.csv.CsvHeader; import org.openmetadata.schema.type.csv.CsvImportResult; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.exception.EntityNotFoundException; import org.openmetadata.service.jdbi3.CollectionDAO.EntityRelationshipRecord; @@ -104,6 +106,11 @@ public class TeamRepository extends EntityRepository { return team; } + @Override + public Team getByName(UriInfo uriInfo, String name, Fields fields) throws IOException { + return super.getByName(uriInfo, EntityInterfaceUtil.quoteName(name), fields); + } + @Override public void restorePatchAttributes(Team original, Team updated) { // Patch can't make changes to following fields. Ignore the changes @@ -558,7 +565,7 @@ public class TeamRepository extends EntityRepository { } // Field 6 - Owner - importedTeam.setOwner(getEntityReference(printer, csvRecord, 5, Entity.USER)); + importedTeam.setOwner(getOwnerAsUser(printer, csvRecord, 5)); if (!processRecord) { return null; } @@ -585,7 +592,7 @@ public class TeamRepository extends EntityRepository { addField(recordList, entity.getDescription()); addField(recordList, entity.getTeamType().value()); addEntityReferences(recordList, entity.getParents()); - CsvUtil.addEntityReference(recordList, entity.getOwner()); + addUserOwner(recordList, entity.getOwner()); addField(recordList, entity.getIsJoinable()); addEntityReferences(recordList, entity.getDefaultRoles()); addEntityReferences(recordList, entity.getPolicies()); @@ -593,7 +600,7 @@ public class TeamRepository extends EntityRepository { } private void getParents(CSVPrinter printer, CSVRecord csvRecord, Team importedTeam) throws IOException { - List parentRefs = getEntityReferences(printer, csvRecord, 4, Entity.TEAM); + List parentRefs = getUserOrTeamEntityReferences(printer, csvRecord, 4, Entity.TEAM); // Validate team being created is under the hierarchy of the team for which CSV is being imported to for (EntityReference parentRef : listOrEmpty(parentRefs)) { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TestCaseRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TestCaseRepository.java index 847898beaf8..80ee3a21af2 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TestCaseRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TestCaseRepository.java @@ -35,6 +35,7 @@ import org.openmetadata.schema.type.EventType; import org.openmetadata.schema.type.FieldChange; import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.Relationship; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.exception.EntityNotFoundException; import org.openmetadata.service.resources.feeds.MessageParser.EntityLink; @@ -71,7 +72,7 @@ public class TestCaseRepository extends EntityRepository { JsonUtils.readValue( daoCollection .entityExtensionTimeSeriesDao() - .getExtensionAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp), + .getExtensionAtTimestamp(FullyQualifiedName.buildHash(fqn), TESTCASE_RESULT_EXTENSION, timestamp), TestCaseResult.class); TestCaseResult updated = JsonUtils.applyPatch(original, patch, TestCaseResult.class); @@ -81,7 +82,8 @@ public class TestCaseRepository extends EntityRepository { updated.getTestCaseFailureStatus().setUpdatedAt(System.currentTimeMillis()); daoCollection .entityExtensionTimeSeriesDao() - .update(fqn, TESTCASE_RESULT_EXTENSION, JsonUtils.pojoToJson(updated), timestamp); + .update( + FullyQualifiedName.buildHash(fqn), TESTCASE_RESULT_EXTENSION, JsonUtils.pojoToJson(updated), timestamp); change = ENTITY_UPDATED; } return new RestUtil.PatchResponse<>(Response.Status.OK, updated, change); @@ -90,10 +92,17 @@ public class TestCaseRepository extends EntityRepository { @Override public void setFullyQualifiedName(TestCase test) { EntityLink entityLink = EntityLink.parse(test.getEntityLink()); - test.setFullyQualifiedName(FullyQualifiedName.add(entityLink.getFullyQualifiedFieldValue(), test.getName())); + test.setFullyQualifiedName( + FullyQualifiedName.add( + entityLink.getFullyQualifiedFieldValue(), EntityInterfaceUtil.quoteName(test.getName()))); test.setEntityFQN(entityLink.getFullyQualifiedFieldValue()); } + @Override + public String getFullyQualifiedNameHash(TestCase test) { + return FullyQualifiedName.buildHash(test.getFullyQualifiedName()); + } + @Override public void prepare(TestCase test) throws IOException { EntityLink entityLink = EntityLink.parse(test.getEntityLink()); @@ -171,31 +180,19 @@ public class TestCaseRepository extends EntityRepository { // Validate the request content TestCase testCase = dao.findEntityByName(fqn); - TestCaseResult storedTestCaseResult = - JsonUtils.readValue( - daoCollection - .entityExtensionTimeSeriesDao() - .getExtensionAtTimestamp( - testCase.getFullyQualifiedName(), TESTCASE_RESULT_EXTENSION, testCaseResult.getTimestamp()), - TestCaseResult.class); - if (storedTestCaseResult != null) { - daoCollection - .entityExtensionTimeSeriesDao() - .update( - testCase.getFullyQualifiedName(), - TESTCASE_RESULT_EXTENSION, - JsonUtils.pojoToJson(testCaseResult), - testCaseResult.getTimestamp()); - } else { - daoCollection - .entityExtensionTimeSeriesDao() - .insert( - testCase.getFullyQualifiedName(), - TESTCASE_RESULT_EXTENSION, - TEST_CASE_RESULT_FIELD, - JsonUtils.pojoToJson(testCaseResult)); - } - setFieldsInternal(testCase, new Fields(allowedFields, TEST_SUITE_FIELD)); + String storedTestCaseResult = + getExtensionAtTimestamp( + testCase.getFullyQualifiedName(), TESTCASE_RESULT_EXTENSION, testCaseResult.getTimestamp()); + + storeTimeSeries( + testCase.getFullyQualifiedName(), + TESTCASE_RESULT_EXTENSION, + "testCaseResult", + JsonUtils.pojoToJson(testCaseResult), + testCaseResult.getTimestamp(), + storedTestCaseResult != null); + + setFieldsInternal(testCase, new EntityUtil.Fields(allowedFields, "testSuite")); ChangeDescription change = addTestCaseChangeDescription(testCase.getVersion(), testCaseResult, storedTestCaseResult); ChangeEvent changeEvent = @@ -209,13 +206,10 @@ public class TestCaseRepository extends EntityRepository { // Validate the request content TestCase testCase = dao.findEntityByName(fqn); TestCaseResult storedTestCaseResult = - JsonUtils.readValue( - daoCollection - .entityExtensionTimeSeriesDao() - .getExtensionAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp), - TestCaseResult.class); + JsonUtils.readValue(getExtensionAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp), TestCaseResult.class); + if (storedTestCaseResult != null) { - daoCollection.entityExtensionTimeSeriesDao().deleteAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp); + deleteExtensionAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp); testCase.setTestCaseResult(storedTestCaseResult); ChangeDescription change = deleteTestCaseChangeDescription(testCase.getVersion(), storedTestCaseResult); ChangeEvent changeEvent = getChangeEvent(updatedBy, testCase, change, entityType, testCase.getVersion()); @@ -257,9 +251,7 @@ public class TestCaseRepository extends EntityRepository { private TestCaseResult getTestCaseResult(TestCase testCase) throws IOException { return JsonUtils.readValue( - daoCollection - .entityExtensionTimeSeriesDao() - .getLatestExtension(testCase.getFullyQualifiedName(), TESTCASE_RESULT_EXTENSION), + getLatestExtensionFromTimeseries(testCase.getFullyQualifiedName(), TESTCASE_RESULT_EXTENSION), TestCaseResult.class); } @@ -267,11 +259,7 @@ public class TestCaseRepository extends EntityRepository { List testCaseResults; testCaseResults = JsonUtils.readObjects( - daoCollection - .entityExtensionTimeSeriesDao() - .listBetweenTimestamps(fqn, TESTCASE_RESULT_EXTENSION, startTs, endTs), - TestCaseResult.class); - + getResultsFromAndToTimestamps(fqn, TESTCASE_RESULT_EXTENSION, startTs, endTs), TestCaseResult.class); return new ResultList<>(testCaseResults, String.valueOf(startTs), String.valueOf(endTs), testCaseResults.size()); } @@ -320,12 +308,17 @@ public class TestCaseRepository extends EntityRepository { public TestSummary getTestSummary() throws IOException { List testCases = listAll(Fields.EMPTY_FIELDS, new ListFilter()); - List testCaseFQNs = testCases.stream().map(TestCase::getFullyQualifiedName).collect(Collectors.toList()); + List testCaseFQNHashes = + testCases.stream() + .map(testCase -> FullyQualifiedName.buildHash(testCase.getFullyQualifiedName())) + .collect(Collectors.toList()); - if (testCaseFQNs.isEmpty()) return new TestSummary(); + if (testCaseFQNHashes.isEmpty()) return new TestSummary(); List jsonList = - daoCollection.entityExtensionTimeSeriesDao().getLatestExtensionByFQNs(testCaseFQNs, TESTCASE_RESULT_EXTENSION); + daoCollection + .entityExtensionTimeSeriesDao() + .getLatestExtensionByFQNs(testCaseFQNHashes, TESTCASE_RESULT_EXTENSION); HashMap testCaseSummary = new HashMap<>(); for (String json : jsonList) { @@ -338,7 +331,7 @@ public class TestCaseRepository extends EntityRepository { .withAborted(testCaseSummary.getOrDefault(TestCaseStatus.Aborted.toString(), 0)) .withFailed(testCaseSummary.getOrDefault(TestCaseStatus.Failed.toString(), 0)) .withSuccess(testCaseSummary.getOrDefault(TestCaseStatus.Success.toString(), 0)) - .withTotal(testCaseFQNs.size()); + .withTotal(testCaseFQNHashes.size()); } @Override diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TopicRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TopicRepository.java index 918828423bd..d2dae5d6eb8 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TopicRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TopicRepository.java @@ -51,12 +51,17 @@ public class TopicRepository extends EntityRepository { @Override public void setFullyQualifiedName(Topic topic) { - topic.setFullyQualifiedName(FullyQualifiedName.add(topic.getService().getName(), topic.getName())); + topic.setFullyQualifiedName(FullyQualifiedName.add(topic.getService().getFullyQualifiedName(), topic.getName())); if (topic.getMessageSchema() != null) { setFieldFQN(topic.getFullyQualifiedName(), topic.getMessageSchema().getSchemaFields()); } } + @Override + public String getFullyQualifiedNameHash(Topic topic) { + return FullyQualifiedName.buildHash(topic.getFullyQualifiedName()); + } + public TopicRepository(CollectionDAO dao) { super( TopicResource.COLLECTION_PATH, diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TypeRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TypeRepository.java index 4d2d31c3d69..ef725c1257e 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TypeRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/TypeRepository.java @@ -40,6 +40,7 @@ import org.openmetadata.service.TypeRegistry; import org.openmetadata.service.resources.types.TypeResource; import org.openmetadata.service.util.EntityUtil; import org.openmetadata.service.util.EntityUtil.Fields; +import org.openmetadata.service.util.FullyQualifiedName; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.RestUtil.PutResponse; @@ -129,7 +130,10 @@ public class TypeRepository extends EntityRepository { daoCollection .fieldRelationshipDAO() .listToByPrefix( - getCustomPropertyFQNPrefix(type.getName()), Entity.TYPE, Entity.TYPE, Relationship.HAS.ordinal()); + FullyQualifiedName.buildHash(getCustomPropertyFQNPrefix(type.getName())), + Entity.TYPE, + Entity.TYPE, + Relationship.HAS.ordinal()); for (Triple result : results) { CustomProperty property = JsonUtils.readValue(result.getRight(), CustomProperty.class); property.setPropertyType(dao.findEntityReferenceByName(result.getMiddle())); @@ -188,6 +192,8 @@ public class TypeRepository extends EntityRepository { daoCollection .fieldRelationshipDAO() .insert( + FullyQualifiedName.buildHash(customPropertyFQN), + FullyQualifiedName.buildHash(property.getPropertyType().getName()), customPropertyFQN, property.getPropertyType().getName(), Entity.TYPE, @@ -206,8 +212,8 @@ public class TypeRepository extends EntityRepository { daoCollection .fieldRelationshipDAO() .delete( - customPropertyFQN, - property.getPropertyType().getName(), + FullyQualifiedName.buildHash(customPropertyFQN), + FullyQualifiedName.buildHash(property.getPropertyType().getName()), Entity.TYPE, Entity.TYPE, Relationship.HAS.ordinal()); @@ -226,6 +232,8 @@ public class TypeRepository extends EntityRepository { daoCollection .fieldRelationshipDAO() .upsert( + FullyQualifiedName.buildHash(customPropertyFQN), + FullyQualifiedName.buildHash(updatedProperty.getPropertyType().getName()), customPropertyFQN, updatedProperty.getPropertyType().getName(), Entity.TYPE, diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/UserRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/UserRepository.java index 289e8762373..de5e84a780f 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/UserRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/UserRepository.java @@ -45,6 +45,7 @@ import org.openmetadata.schema.type.csv.CsvDocumentation; import org.openmetadata.schema.type.csv.CsvErrorType; import org.openmetadata.schema.type.csv.CsvHeader; import org.openmetadata.schema.type.csv.CsvImportResult; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.OpenMetadataApplicationConfig; import org.openmetadata.service.exception.CatalogExceptionMessage; @@ -81,6 +82,11 @@ public class UserRepository extends EntityRepository { return new Fields(tempFields, fields); } + @Override + public User getByName(UriInfo uriInfo, String name, Fields fields) throws IOException { + return super.getByName(uriInfo, EntityInterfaceUtil.quoteName(name), fields); + } + /** Ensures that the default roles are added for POST, PUT and PATCH operations. */ @Override public void prepare(User user) throws IOException { @@ -281,7 +287,7 @@ public class UserRepository extends EntityRepository { } /* Get all the teams that user belongs to User entity */ - private List getTeams(User user) throws IOException { + public List getTeams(User user) throws IOException { List records = findFrom(user.getId(), USER, Relationship.HAS, Entity.TEAM); List teams = EntityUtil.populateEntityReferences(records, Entity.TEAM); teams = teams.stream().filter(team -> !team.getDeleted()).collect(Collectors.toList()); // Filter deleted teams diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/WebAnalyticEventRepository.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/WebAnalyticEventRepository.java index eaae15b0e3d..cbde2b9026c 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/WebAnalyticEventRepository.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/WebAnalyticEventRepository.java @@ -54,22 +54,19 @@ public class WebAnalyticEventRepository extends EntityRepository getWebAnalyticEventData(String eventType, Long startTs, Long endTs) @@ -77,9 +74,7 @@ public class WebAnalyticEventRepository extends EntityRepository webAnalyticEventData; webAnalyticEventData = JsonUtils.readObjects( - daoCollection - .entityExtensionTimeSeriesDao() - .listBetweenTimestamps(eventType, WEB_ANALYTICS_EVENT_DATA_EXTENSION, startTs, endTs), + getResultsFromAndToTimestamps(eventType, WEB_ANALYTICS_EVENT_DATA_EXTENSION, startTs, endTs), WebAnalyticEventData.class); return new ResultList<>( diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/bots/BotResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/bots/BotResource.java index 25c4e6c6cb9..611f3e47054 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/bots/BotResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/bots/BotResource.java @@ -56,6 +56,7 @@ import org.openmetadata.schema.type.EntityHistory; import org.openmetadata.schema.type.EntityReference; import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.Relationship; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.OpenMetadataApplicationConfig; import org.openmetadata.service.exception.CatalogExceptionMessage; @@ -211,7 +212,7 @@ public class BotResource extends EntityResource { @DefaultValue("non-deleted") Include include) throws IOException { - return getByNameInternal(uriInfo, securityContext, name, "", include); + return getByNameInternal(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), "", include); } @GET @@ -364,7 +365,7 @@ public class BotResource extends EntityResource { boolean hardDelete, @Parameter(description = "Name of the bot", schema = @Schema(type = "string")) @PathParam("name") String name) throws IOException { - return deleteByName(uriInfo, securityContext, name, true, hardDelete); + return deleteByName(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), true, hardDelete); } @PUT @@ -387,7 +388,7 @@ public class BotResource extends EntityResource { private Bot getBot(CreateBot create, String user) throws IOException { return copy(new Bot(), create, user) - .withBotUser(getEntityReference(Entity.USER, create.getBotUser())) + .withBotUser(getEntityReference(Entity.USER, EntityInterfaceUtil.quoteName(create.getBotUser()))) .withProvider(create.getProvider()) .withFullyQualifiedName(create.getName()); } @@ -408,7 +409,7 @@ public class BotResource extends EntityResource { private Bot getBot(SecurityContext securityContext, CreateBot create) throws IOException { Bot bot = getBot(create, securityContext.getUserPrincipal().getName()); - Bot originalBot = retrieveBot(bot.getName()); + Bot originalBot = retrieveBot(EntityInterfaceUtil.quoteName(bot.getName())); User botUser = retrieveUser(bot); if (botUser != null && !Boolean.TRUE.equals(botUser.getIsBot())) { throw new IllegalArgumentException(String.format("User [%s] is not a bot user", botUser.getName())); @@ -430,7 +431,11 @@ public class BotResource extends EntityResource { private User retrieveUser(Bot bot) { // TODO fix this code - don't depend on exception try { - return Entity.getEntity(bot.getBotUser(), "", Include.NON_DELETED); + return Entity.getEntityByName( + Entity.USER, + EntityInterfaceUtil.quoteName(bot.getBotUser().getFullyQualifiedName()), + "", + Include.NON_DELETED); } catch (Exception exception) { return null; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/dqtests/TestCaseResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/dqtests/TestCaseResource.java index e38bc43ae9d..9685e651520 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/dqtests/TestCaseResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/dqtests/TestCaseResource.java @@ -48,6 +48,7 @@ import org.openmetadata.schema.type.Column; import org.openmetadata.schema.type.EntityHistory; import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.MetadataOperation; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.jdbi3.CollectionDAO; import org.openmetadata.service.jdbi3.ListFilter; @@ -715,6 +716,13 @@ public class TestCaseResource extends EntityResource { .withQuery(create.getQuery()) .withDuration(create.getDuration()) .withVotes(new Votes().withUpVotes(0).withDownVotes(0)) - .withUsers(getEntityReferences(USER, create.getUsers())) + .withUsers( + getEntityReferences( + USER, + create.getUsers() == null + ? create.getUsers() + : create.getUsers().stream().map(EntityInterfaceUtil::quoteName).collect(Collectors.toList()))) .withQueryUsedIn(EntityUtil.populateEntityReferences(create.getQueryUsedIn())) .withQueryDate(create.getQueryDate()); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/search/SearchResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/search/SearchResource.java index 752b709ccf8..73653af1112 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/search/SearchResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/search/SearchResource.java @@ -88,6 +88,7 @@ import org.openmetadata.service.jdbi3.CollectionDAO; import org.openmetadata.service.resources.Collection; import org.openmetadata.service.security.Authorizer; import org.openmetadata.service.util.ElasticSearchClientUtils; +import org.openmetadata.service.util.FullyQualifiedName; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.ReIndexingHandler; @@ -463,7 +464,8 @@ public class SearchResource { String jobRecord; jobRecord = dao.entityExtensionTimeSeriesDao() - .getLatestExtension(ELASTIC_SEARCH_ENTITY_FQN_STREAM, ELASTIC_SEARCH_EXTENSION); + .getLatestExtension( + FullyQualifiedName.buildHash(ELASTIC_SEARCH_ENTITY_FQN_STREAM), ELASTIC_SEARCH_EXTENSION); if (jobRecord != null) { return Response.status(Response.Status.OK) .entity(JsonUtils.readValue(jobRecord, EventPublisherJob.class)) diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/TagLabelCache.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/TagLabelCache.java index d4efffb5119..a41a37c0f50 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/TagLabelCache.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/tags/TagLabelCache.java @@ -143,7 +143,7 @@ public class TagLabelCache { /** Returns true if the parent of the tag label is mutually exclusive */ public boolean mutuallyExclusive(TagLabel label) { String[] fqnParts = FullyQualifiedName.split(label.getTagFQN()); - String parentFqn = FullyQualifiedName.getParent(fqnParts); + String parentFqn = FullyQualifiedName.getParentFQN(fqnParts); boolean rootParent = fqnParts.length == 2; if (label.getSource() == TagSource.CLASSIFICATION) { return rootParent diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/TeamResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/TeamResource.java index b42f8860046..6b449948a56 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/TeamResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/TeamResource.java @@ -59,6 +59,7 @@ import org.openmetadata.schema.type.EntityHistory; import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.MetadataOperation; import org.openmetadata.schema.type.csv.CsvImportResult; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.OpenMetadataApplicationConfig; import org.openmetadata.service.jdbi3.CollectionDAO; @@ -433,7 +434,7 @@ public class TeamResource extends EntityResource { boolean hardDelete, @Parameter(description = "Name of the team", schema = @Schema(type = "string")) @PathParam("name") String name) throws IOException { - return deleteByName(uriInfo, securityContext, name, false, hardDelete); + return deleteByName(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), false, hardDelete); } @PUT @@ -527,4 +528,11 @@ public class TeamResource extends EntityResource { .withPolicies(EntityUtil.toEntityReferences(ct.getPolicies(), Entity.POLICY)) .withEmail(ct.getEmail()); } + + @Override + public Team getByNameInternal( + UriInfo uriInfo, SecurityContext securityContext, String name, String fieldsParam, Include include) + throws IOException { + return super.getByNameInternal(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), fieldsParam, include); + } } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/UserResource.java b/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/UserResource.java index fe5034a4298..103a4572f57 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/UserResource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/resources/teams/UserResource.java @@ -105,6 +105,7 @@ import org.openmetadata.schema.type.MetadataOperation; import org.openmetadata.schema.type.ProviderType; import org.openmetadata.schema.type.Relationship; import org.openmetadata.schema.type.csv.CsvImportResult; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.OpenMetadataApplicationConfig; import org.openmetadata.service.auth.JwtResponse; @@ -363,7 +364,7 @@ public class UserResource extends EntityResource { @DefaultValue("non-deleted") Include include) throws IOException { - User user = getByNameInternal(uriInfo, securityContext, name, fieldsParam, include); + User user = getByNameInternal(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), fieldsParam, include); decryptOrNullify(securityContext, user); return user; } @@ -797,7 +798,7 @@ public class UserResource extends EntityResource { boolean hardDelete, @Parameter(description = "Name of the user", schema = @Schema(type = "string")) @PathParam("name") String name) throws IOException { - return deleteByName(uriInfo, securityContext, name, false, hardDelete); + return deleteByName(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), false, hardDelete); } @PUT @@ -1409,4 +1410,11 @@ public class UserResource extends EntityResource { } } } + + @Override + public User getByNameInternal( + UriInfo uriInfo, SecurityContext securityContext, String name, String fieldsParam, Include include) + throws IOException { + return super.getByNameInternal(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), fieldsParam, include); + } } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/security/DefaultAuthorizer.java b/openmetadata-service/src/main/java/org/openmetadata/service/security/DefaultAuthorizer.java index 8c01fd0a231..f1bae44b31e 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/security/DefaultAuthorizer.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/security/DefaultAuthorizer.java @@ -23,6 +23,7 @@ import lombok.extern.slf4j.Slf4j; import org.jdbi.v3.core.Jdbi; import org.openmetadata.schema.type.EntityReference; import org.openmetadata.schema.type.ResourcePermission; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.OpenMetadataApplicationConfig; import org.openmetadata.service.security.policyevaluator.OperationContext; import org.openmetadata.service.security.policyevaluator.PolicyEvaluator; @@ -116,7 +117,7 @@ public class DefaultAuthorizer implements Authorizer { } public static SubjectContext getSubjectContext(String userName) { - return SubjectCache.getInstance().getSubjectContext(userName); + return SubjectCache.getInstance().getSubjectContext(EntityInterfaceUtil.quoteName(userName)); } private SubjectContext changeSubjectContext(String user, SubjectContext loggedInUser) { diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/security/policyevaluator/SubjectCache.java b/openmetadata-service/src/main/java/org/openmetadata/service/security/policyevaluator/SubjectCache.java index a149246c3b4..d6fa37ad8c1 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/security/policyevaluator/SubjectCache.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/security/policyevaluator/SubjectCache.java @@ -33,6 +33,7 @@ import lombok.extern.slf4j.Slf4j; import org.openmetadata.schema.entity.teams.Team; import org.openmetadata.schema.entity.teams.User; import org.openmetadata.schema.type.EntityReference; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.exception.CatalogExceptionMessage; import org.openmetadata.service.exception.EntityNotFoundException; @@ -213,7 +214,7 @@ public class SubjectCache { static class UserLoader extends CacheLoader { @Override public SubjectContext load(@CheckForNull String userName) throws IOException { - User user = USER_REPOSITORY.getByName(null, userName, USER_FIELDS); + User user = USER_REPOSITORY.getByName(null, EntityInterfaceUtil.quoteName(userName), USER_FIELDS); LOG.info("Loaded user {}:{}", user.getName(), user.getId()); return new SubjectContext(user); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/EntityUtil.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/EntityUtil.java index 08ef7efbe2a..33e51cd8b1e 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/util/EntityUtil.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/util/EntityUtil.java @@ -17,6 +17,7 @@ import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; import static org.openmetadata.schema.type.Include.ALL; import java.io.IOException; +import java.security.MessageDigest; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -30,7 +31,9 @@ import javax.ws.rs.WebApplicationException; import lombok.Getter; import lombok.NonNull; import lombok.RequiredArgsConstructor; +import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.codec.binary.Hex; import org.openmetadata.common.utils.CommonUtil; import org.openmetadata.schema.EntityInterface; import org.openmetadata.schema.api.data.TermReference; @@ -515,6 +518,15 @@ public final class EntityUtil { } } + @SneakyThrows + public static String hash(String input) { + if (input != null) { + byte[] checksum = MessageDigest.getInstance("MD5").digest(input.getBytes()); + return Hex.encodeHexString(checksum); + } + return input; + } + public static boolean isDescriptionTask(TaskType taskType) { return taskType == TaskType.RequestDescription || taskType == TaskType.UpdateDescription; } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/FullyQualifiedName.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/FullyQualifiedName.java index 15da02483c1..630054a1177 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/util/FullyQualifiedName.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/util/FullyQualifiedName.java @@ -52,6 +52,22 @@ public class FullyQualifiedName { return String.join(Entity.SEPARATOR, list); } + public static String buildHash(String... strings) { + List list = new ArrayList<>(); + for (String string : strings) { + list.add(EntityUtil.hash(quoteName(string))); + } + return String.join(Entity.SEPARATOR, list); + } + + public static String buildHash(String fullyQualifiedName) { + if (fullyQualifiedName != null && !fullyQualifiedName.isEmpty()) { + String[] split = split(fullyQualifiedName); + return buildHash(split); + } + return fullyQualifiedName; + } + public static String[] split(String string) { SplitListener listener = new SplitListener(); walk(string, listener); @@ -68,20 +84,21 @@ public class FullyQualifiedName { walker.walk(listener, fqn); } - public static String getParent(String fqn) { + public static String getParentFQN(String fqn) { // Split fqn of format a.b.c.d and return the parent a.b.c String[] split = split(fqn); - return getParent(split); + return getParentFQN(split); } - public static String getParent(String... fqnParts) { + public static String getParentFQN(String... fqnParts) { // Fqn parts a b c d are given from fqn a.b.c.d if (fqnParts.length <= 1) { return null; } if (fqnParts.length == 2) { - return unquoteName(fqnParts[0]); // The root name is not quoted and only the unquoted name is returned + return fqnParts[0]; } + String parent = build(fqnParts[0]); for (int i = 1; i < fqnParts.length - 1; i++) { parent = add(parent, fqnParts[i]); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/NotificationHandler.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/NotificationHandler.java index 5ec49b1e7ce..a06ddfbb89c 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/util/NotificationHandler.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/util/NotificationHandler.java @@ -37,6 +37,7 @@ import org.openmetadata.schema.type.AnnouncementDetails; import org.openmetadata.schema.type.EntityReference; import org.openmetadata.schema.type.Post; import org.openmetadata.schema.type.Relationship; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.jdbi3.CollectionDAO; import org.openmetadata.service.jdbi3.UserRepository; @@ -138,10 +139,10 @@ public class NotificationHandler { entityLink -> { String fqn = entityLink.getEntityFQN(); if (USER.equals(entityLink.getEntityType())) { - User user = dao.userDAO().findEntityByName(fqn); + User user = dao.userDAO().findEntityByName(EntityInterfaceUtil.quoteName(fqn)); WebSocketManager.getInstance().sendToOne(user.getId(), WebSocketManager.MENTION_CHANNEL, jsonThread); } else if (TEAM.equals(entityLink.getEntityType())) { - Team team = dao.teamDAO().findEntityByName(fqn); + Team team = dao.teamDAO().findEntityByName(EntityInterfaceUtil.quoteName(fqn)); // fetch all that are there in the team List records = dao.relationshipDAO().findTo(team.getId().toString(), TEAM, Relationship.HAS.ordinal(), USER); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/QueryUtil.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/QueryUtil.java deleted file mode 100644 index c8993ea2729..00000000000 --- a/openmetadata-service/src/main/java/org/openmetadata/service/util/QueryUtil.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.openmetadata.service.util; - -import java.security.MessageDigest; -import lombok.SneakyThrows; -import org.apache.commons.codec.binary.Hex; - -public class QueryUtil { - - @SneakyThrows - public static String getCheckSum(String input) { - byte[] checksum = MessageDigest.getInstance("MD5").digest(input.getBytes()); - return Hex.encodeHexString(checksum); - } -} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/util/ReIndexingHandler.java b/openmetadata-service/src/main/java/org/openmetadata/service/util/ReIndexingHandler.java index 524cecde067..3e964e7fb0d 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/util/ReIndexingHandler.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/util/ReIndexingHandler.java @@ -119,7 +119,7 @@ public class ReIndexingHandler { // Create Entry in the DB dao.entityExtensionTimeSeriesDao() .insert( - jobData.getId().toString(), + EntityUtil.hash(jobData.getId().toString()), REINDEXING_JOB_EXTENSION, "eventPublisherJob", JsonUtils.pojoToJson(jobData)); @@ -179,7 +179,8 @@ public class ReIndexingHandler { SearchIndexWorkflow job = REINDEXING_JOB_MAP.get(jobId); if (job == null) { String recordString = - dao.entityExtensionTimeSeriesDao().getLatestExtension(jobId.toString(), REINDEXING_JOB_EXTENSION); + dao.entityExtensionTimeSeriesDao() + .getLatestExtension(EntityUtil.hash(jobId.toString()), REINDEXING_JOB_EXTENSION); return JsonUtils.readValue(recordString, EventPublisherJob.class); } return REINDEXING_JOB_MAP.get(jobId).getJobData(); diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/workflows/searchIndex/PaginatedDataInsightSource.java b/openmetadata-service/src/main/java/org/openmetadata/service/workflows/searchIndex/PaginatedDataInsightSource.java index 5fc029deb6e..2b0aad78dd5 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/workflows/searchIndex/PaginatedDataInsightSource.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/workflows/searchIndex/PaginatedDataInsightSource.java @@ -24,6 +24,7 @@ import org.openmetadata.schema.analytics.ReportData; import org.openmetadata.schema.system.StepStats; import org.openmetadata.service.exception.SourceException; import org.openmetadata.service.jdbi3.CollectionDAO; +import org.openmetadata.service.util.EntityUtil; import org.openmetadata.service.util.RestUtil; import org.openmetadata.service.util.ResultList; import org.openmetadata.service.workflows.interfaces.Source; @@ -93,7 +94,8 @@ public class PaginatedDataInsightSource implements Source int reportDataCount = dao.entityExtensionTimeSeriesDao().listCount(entityFQN); List reportDataList = dao.entityExtensionTimeSeriesDao() - .getAfterExtension(entityFQN, limit + 1, after == null ? "0" : RestUtil.decodeCursor(after)); + .getAfterExtension( + EntityUtil.hash(entityFQN), limit + 1, after == null ? "0" : RestUtil.decodeCursor(after)); return getAfterExtensionList(reportDataList, after, limit, reportDataCount); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/workflows/searchIndex/SearchIndexWorkflow.java b/openmetadata-service/src/main/java/org/openmetadata/service/workflows/searchIndex/SearchIndexWorkflow.java index 7a36ab3eb08..424dec1459c 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/workflows/searchIndex/SearchIndexWorkflow.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/workflows/searchIndex/SearchIndexWorkflow.java @@ -50,6 +50,7 @@ import org.openmetadata.service.exception.SinkException; import org.openmetadata.service.exception.SourceException; import org.openmetadata.service.jdbi3.CollectionDAO; import org.openmetadata.service.socket.WebSocketManager; +import org.openmetadata.service.util.EntityUtil; import org.openmetadata.service.util.JsonUtils; import org.openmetadata.service.util.ReIndexingHandler; import org.openmetadata.service.util.ResultList; @@ -300,12 +301,16 @@ public class SearchIndexWorkflow implements Runnable { public void updateRecordToDb() throws IOException { String recordString = - dao.entityExtensionTimeSeriesDao().getExtension(jobData.getId().toString(), REINDEXING_JOB_EXTENSION); + dao.entityExtensionTimeSeriesDao() + .getExtension(EntityUtil.hash(jobData.getId().toString()), REINDEXING_JOB_EXTENSION); EventPublisherJob lastRecord = JsonUtils.readValue(recordString, EventPublisherJob.class); long originalLastUpdate = lastRecord.getTimestamp(); dao.entityExtensionTimeSeriesDao() .update( - jobData.getId().toString(), REINDEXING_JOB_EXTENSION, JsonUtils.pojoToJson(jobData), originalLastUpdate); + EntityUtil.hash(jobData.getId().toString()), + REINDEXING_JOB_EXTENSION, + JsonUtils.pojoToJson(jobData), + originalLastUpdate); } private void reCreateIndexes(String entityType) { diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/DatabaseResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/DatabaseResourceTest.java index 0785f5e170f..07042e82873 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/DatabaseResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/databases/DatabaseResourceTest.java @@ -51,9 +51,9 @@ public class DatabaseResourceTest extends EntityResourceTest queryParams = new HashMap<>(); @@ -118,7 +118,7 @@ public class DatabaseResourceTest extends EntityResourceTest { public void setupDatabaseSchemas(TestInfo test) throws IOException { DatabaseResourceTest databaseResourceTest = new DatabaseResourceTest(); - CreateDatabase create = databaseResourceTest.createRequest(test).withService(SNOWFLAKE_REFERENCE.getName()); + CreateDatabase create = + databaseResourceTest.createRequest(test).withService(SNOWFLAKE_REFERENCE.getFullyQualifiedName()); DATABASE = databaseResourceTest.createEntity(create, ADMIN_AUTH_HEADERS); DatabaseSchemaResourceTest databaseSchemaResourceTest = new DatabaseSchemaResourceTest(); @@ -1999,7 +2000,7 @@ public class TableResourceTest extends EntityResourceTest { DatabaseResourceTest databaseResourceTest = new DatabaseResourceTest(); Database database = databaseResourceTest.createAndCheckEntity( - databaseResourceTest.createRequest(test).withService(service.getName()), ADMIN_AUTH_HEADERS); + databaseResourceTest.createRequest(test).withService(service.getFullyQualifiedName()), ADMIN_AUTH_HEADERS); CreateTable create = createRequest(test, index); return createEntity(create, ADMIN_AUTH_HEADERS).withDatabase(database.getEntityReference()); } diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/feeds/FeedResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/feeds/FeedResourceTest.java index 62561b59b2f..10fd98f3483 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/feeds/FeedResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/feeds/FeedResourceTest.java @@ -172,10 +172,10 @@ public class FeedResourceTest extends OpenMetadataApplicationTest { TABLE_DESCRIPTION_LINK = String.format("<#E::table::%s::description>", TABLE.getFullyQualifiedName()); USER = TableResourceTest.USER1; - USER_LINK = String.format("<#E::user::%s>", USER.getName()); + USER_LINK = String.format("<#E::user::%s>", USER.getFullyQualifiedName()); TEAM = TableResourceTest.TEAM1; - TEAM_LINK = String.format("<#E::team::%s>", TEAM.getName()); + TEAM_LINK = String.format("<#E::team::%s>", TEAM.getFullyQualifiedName()); CreateThread createThread = create(); THREAD = createAndCheck(createThread, ADMIN_AUTH_HEADERS); @@ -696,7 +696,7 @@ public class FeedResourceTest extends OpenMetadataApplicationTest { private static Stream provideStringsForListThreads() { return Stream.of( - Arguments.of(String.format("<#E::%s::%s>", Entity.USER, USER.getName())), + Arguments.of(String.format("<#E::%s::%s>", Entity.USER, USER.getFullyQualifiedName())), Arguments.of(String.format("<#E::%s::%s>", Entity.TABLE, TABLE.getFullyQualifiedName()))); } diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryResourceTest.java index fd9d5a14825..8822448865f 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/glossary/GlossaryResourceTest.java @@ -421,14 +421,14 @@ public class GlossaryResourceTest extends EntityResourceTest reviewers) throws IOException { CreateGlossaryTerm createGlossaryTerm = createRequest(termName, "", "", null) - .withGlossary(glossary.getName()) + .withGlossary(getFqn(glossary)) .withParent(getFqn(parent)) .withReviewers(getFqns(reviewers)); return createAndCheckEntity(createGlossaryTerm, ADMIN_AUTH_HEADERS); @@ -399,7 +402,7 @@ public class GlossaryTermResourceTest extends EntityResourceTest { tableEntity = addCustomPropertyAndCheck(tableEntity.getId(), fieldA, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change); assertCustomProperties(new ArrayList<>(List.of(fieldA)), tableEntity.getCustomProperties()); - // Changing property type with PUT - old property deleted and new customer property of the same name added - fieldA.withPropertyType(STRING_TYPE.getEntityReference()); - change = getChangeDescription(tableEntity.getVersion()); - fieldDeleted(change, "customProperties", tableEntity.getCustomProperties()); - fieldAdded(change, "customProperties", new ArrayList<>(List.of(fieldA))); - tableEntity = addCustomPropertyAndCheck(tableEntity.getId(), fieldA, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change); - assertCustomProperties(new ArrayList<>(List.of(fieldA)), tableEntity.getCustomProperties()); - // Changing custom property description with PATCH fieldA.withDescription("updated2"); String json = JsonUtils.pojoToJson(tableEntity); @@ -119,20 +110,6 @@ public class TypeResourceTest extends EntityResourceTest { fieldUpdated(change, EntityUtil.getCustomField(fieldA, "description"), "updated", "updated2"); tableEntity = patchEntityAndCheck(tableEntity, json, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change); - // Changing property type with PATCH - old property deleted and new customer property of the same name added - CustomProperty fieldA1 = - new CustomProperty() - .withDescription(fieldA.getDescription()) - .withPropertyType(INT_TYPE.getEntityReference()) - .withName(fieldA.getName()); - json = JsonUtils.pojoToJson(tableEntity); - tableEntity.setCustomProperties(new ArrayList<>(List.of(fieldA1))); - change = getChangeDescription(tableEntity.getVersion()); - fieldDeleted(change, "customProperties", new ArrayList<>(List.of(fieldA))); - fieldAdded(change, "customProperties", new ArrayList<>(List.of(fieldA1))); - tableEntity = patchEntityAndCheck(tableEntity, json, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change); - assertCustomProperties(new ArrayList<>(List.of(fieldA1)), tableEntity.getCustomProperties()); - // Add a second property with name intB with type integer EntityReference typeRef = new EntityReference() @@ -144,7 +121,7 @@ public class TypeResourceTest extends EntityResourceTest { tableEntity = addCustomPropertyAndCheck(tableEntity.getId(), fieldB, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change); fieldB.setPropertyType(INT_TYPE.getEntityReference()); assertEquals(2, tableEntity.getCustomProperties().size()); - assertCustomProperties(new ArrayList<>(List.of(fieldA1, fieldB)), tableEntity.getCustomProperties()); + assertCustomProperties(new ArrayList<>(List.of(fieldA, fieldB)), tableEntity.getCustomProperties()); } @Test diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/pipelines/PipelineResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/pipelines/PipelineResourceTest.java index f799d98df16..f4ac7c51d20 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/pipelines/PipelineResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/pipelines/PipelineResourceTest.java @@ -57,6 +57,7 @@ import org.openmetadata.schema.type.FieldChange; import org.openmetadata.schema.type.Status; import org.openmetadata.schema.type.StatusType; import org.openmetadata.schema.type.Task; +import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; import org.openmetadata.service.resources.EntityResourceTest; import org.openmetadata.service.resources.pipelines.PipelineResource.PipelineList; @@ -169,7 +170,7 @@ public class PipelineResourceTest extends EntityResourceTest list = listEntities(queryParams, ADMIN_AUTH_HEADERS); for (Pipeline db : list.getData()) { - assertEquals(service, db.getService().getName()); + assertEquals(service, db.getService().getFullyQualifiedName()); } } } @@ -199,7 +200,7 @@ public class PipelineResourceTest extends EntityResourceTest newTask = @@ -277,7 +284,7 @@ public class PipelineResourceTest extends EntityResourceTest { Table createdTable = tableResourceTest.createAndCheckEntity(create, ADMIN_AUTH_HEADERS); TABLE_REF = createdTable.getEntityReference(); QUERY = "select * from sales"; - QUERY_CHECKSUM = QueryUtil.getCheckSum(QUERY); + QUERY_CHECKSUM = EntityUtil.hash(QUERY); } @Override diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/resources/teams/RoleResourceTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/resources/teams/RoleResourceTest.java index da815e10482..fad7a17ca56 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/resources/teams/RoleResourceTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/resources/teams/RoleResourceTest.java @@ -181,7 +181,7 @@ public class RoleResourceTest extends EntityResourceTest { String updatedBy = getPrincipalName(ADMIN_AUTH_HEADERS); role = byName - ? getEntityByName(role.getName(), null, null, ADMIN_AUTH_HEADERS) + ? getEntityByName(role.getFullyQualifiedName(), null, null, ADMIN_AUTH_HEADERS) : getEntity(role.getId(), null, ADMIN_AUTH_HEADERS); validateRole(role, role.getDescription(), role.getDisplayName(), updatedBy); assertListNull(role.getPolicies(), role.getUsers()); @@ -189,7 +189,7 @@ public class RoleResourceTest extends EntityResourceTest { String fields = "policies,teams,users"; role = byName - ? getEntityByName(role.getName(), null, fields, ADMIN_AUTH_HEADERS) + ? getEntityByName(role.getFullyQualifiedName(), null, fields, ADMIN_AUTH_HEADERS) : getEntity(role.getId(), fields, ADMIN_AUTH_HEADERS); assertListNotNull(role.getPolicies(), role.getUsers()); validateRole(role, role.getDescription(), role.getDisplayName(), updatedBy); diff --git a/openmetadata-service/src/test/java/org/openmetadata/service/util/FullyQualifiedNameTest.java b/openmetadata-service/src/test/java/org/openmetadata/service/util/FullyQualifiedNameTest.java index a050120f933..aae5bff5026 100644 --- a/openmetadata-service/src/test/java/org/openmetadata/service/util/FullyQualifiedNameTest.java +++ b/openmetadata-service/src/test/java/org/openmetadata/service/util/FullyQualifiedNameTest.java @@ -84,11 +84,13 @@ class FullyQualifiedNameTest { } @Test - void test_getParent() { - assertEquals("a.b.c", FullyQualifiedName.getParent("a.b.c.d")); - assertEquals("a.b", FullyQualifiedName.getParent("a.b.c")); - assertEquals("a", FullyQualifiedName.getParent("a.b")); - assertNull(FullyQualifiedName.getParent("a")); + void test_getParentFQN() { + assertEquals("a.b.c", FullyQualifiedName.getParentFQN("a.b.c.d")); + assertEquals("\"a.b\"", FullyQualifiedName.getParentFQN("\"a.b\".c")); + assertEquals("a", FullyQualifiedName.getParentFQN("a.b")); + assertEquals("a", FullyQualifiedName.getParentFQN("a.\"b.c\"")); + assertEquals("a.\"b.c\"", FullyQualifiedName.getParentFQN("a.\"b.c\".d")); + assertNull(FullyQualifiedName.getParentFQN("a")); } @Test diff --git a/openmetadata-spec/src/main/java/org/openmetadata/schema/EntityInterface.java b/openmetadata-spec/src/main/java/org/openmetadata/schema/EntityInterface.java index 35dd7fcf6b8..bcab7f1f245 100644 --- a/openmetadata-spec/src/main/java/org/openmetadata/schema/EntityInterface.java +++ b/openmetadata-spec/src/main/java/org/openmetadata/schema/EntityInterface.java @@ -25,6 +25,7 @@ import org.openmetadata.schema.type.EntityReference; import org.openmetadata.schema.type.ProviderType; import org.openmetadata.schema.type.TagLabel; import org.openmetadata.schema.type.Votes; +import org.openmetadata.schema.utils.EntityInterfaceUtil; /** Interface to be implemented by all entities to provide a way to access all the common fields. */ public interface EntityInterface { @@ -121,7 +122,8 @@ public interface EntityInterface { return new EntityReference() .withId(getId()) .withName(getName()) - .withFullyQualifiedName(getFullyQualifiedName() == null ? getName() : getFullyQualifiedName()) + .withFullyQualifiedName( + getFullyQualifiedName() == null ? EntityInterfaceUtil.quoteName(getName()) : getFullyQualifiedName()) .withDescription(getDescription()) .withDisplayName(getDisplayName()) .withType(CANONICAL_ENTITY_NAME_MAP.get(this.getClass().getSimpleName().toLowerCase(Locale.ROOT))) diff --git a/openmetadata-spec/src/main/java/org/openmetadata/schema/utils/EntityInterfaceUtil.java b/openmetadata-spec/src/main/java/org/openmetadata/schema/utils/EntityInterfaceUtil.java new file mode 100644 index 00000000000..053aa28eae4 --- /dev/null +++ b/openmetadata-spec/src/main/java/org/openmetadata/schema/utils/EntityInterfaceUtil.java @@ -0,0 +1,11 @@ +package org.openmetadata.schema.utils; + +public final class EntityInterfaceUtil { + /** Adds quotes to name as required */ + public static String quoteName(String name) { + if (name != null && !name.contains("\"")) { + return name.contains(".") ? "\"" + name + "\"" : name; + } + return name; + } +} diff --git a/openmetadata-spec/src/main/java/org/openmetadata/sdk/PipelineServiceClient.java b/openmetadata-spec/src/main/java/org/openmetadata/sdk/PipelineServiceClient.java index 8f9d562176a..8979732c638 100644 --- a/openmetadata-spec/src/main/java/org/openmetadata/sdk/PipelineServiceClient.java +++ b/openmetadata-spec/src/main/java/org/openmetadata/sdk/PipelineServiceClient.java @@ -109,7 +109,9 @@ public abstract class PipelineServiceClient { public static String getServerVersion() throws IOException { InputStream fileInput = PipelineServiceClient.class.getResourceAsStream("/catalog/VERSION"); Properties props = new Properties(); - props.load(fileInput); + if (fileInput != null) { + props.load(fileInput); + } return props.getProperty("version", "unknown"); } diff --git a/openmetadata-spec/src/main/resources/json/schema/api/data/createGlossaryTerm.json b/openmetadata-spec/src/main/resources/json/schema/api/data/createGlossaryTerm.json index c66891c53da..06f990f0c8d 100644 --- a/openmetadata-spec/src/main/resources/json/schema/api/data/createGlossaryTerm.json +++ b/openmetadata-spec/src/main/resources/json/schema/api/data/createGlossaryTerm.json @@ -9,8 +9,8 @@ "properties": { "glossary": { - "description": "Name of the glossary that this term is part of.", - "$ref": "../../type/basic.json#/definitions/entityName" + "description": "FullyQualifiedName of the glossary that this term is part of.", + "$ref" : "../../type/basic.json#/definitions/fullyQualifiedEntityName" }, "parent": { "description": "Fully qualified name of the parent glossary term.", diff --git a/openmetadata-spec/src/main/resources/json/schema/events/eventSubscription.json b/openmetadata-spec/src/main/resources/json/schema/events/eventSubscription.json index bfeb845dd42..70c40d561c5 100644 --- a/openmetadata-spec/src/main/resources/json/schema/events/eventSubscription.json +++ b/openmetadata-spec/src/main/resources/json/schema/events/eventSubscription.json @@ -164,8 +164,8 @@ "$ref": "../type/basic.json#/definitions/entityName" }, "fullyQualifiedName": { - "description": "Name that uniquely identifies a Event Subscription.", - "$ref": "../type/basic.json#/definitions/entityName" + "description": "FullyQualifiedName that uniquely identifies a Event Subscription.", + "$ref": "../type/basic.json#/definitions/fullyQualifiedEntityName" }, "displayName": { "description": "Display name for this Event Subscription.",