Adding MD5 hash to fullyQualifiedName and names to simplify DB indexes for lookups and increasing the size of FQN (#11960)

* Fix fqn hash

* Add name & fqnHash and remove generated columns from top level

* Add name & fqnHash and remove generated columns from top level

* Add name & fqnHash and remove generated columns from top level

* Add name & fqnHash and remove generated columns from top level

* Add name & fqnHash and remove generated columns from top level

* Add name & fqnHash and remove generated columns from top level

* Add name & fqnHash and remove generated columns from top level

* Add name & fqnHash and remove generated columns from top level

* Add name & fqnHash and remove generated columns from top level

* merge commits

* merge commits

* merge commits

* Fix glossary import/export

* Fix BotResource Tests

* Fix BotResource Tests

* Fix Glossary Term tests

* Fix Glossary Term tests

* Fix Glossary Import/Export tests

* All backend tests are fixed.

* merge with main

* Fix tests and revert local changes

* Fix tests and revert local changes

* Fix NullPointerException for Glossary and Query

* Fix Tests

---------

Co-authored-by: Ayush Shah <ayush@getcollate.io>
This commit is contained in:
Sriharsha Chintalapani 2023-06-19 03:13:05 -07:00 committed by GitHub
parent 4a8554c313
commit 9dbaabad44
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
76 changed files with 1226 additions and 732 deletions

View File

@ -115,9 +115,97 @@ SET pe.json = JSON_INSERT(
UPDATE dbservice_entity
SET json = JSON_INSERT(
JSON_REMOVE(json, '$.connection.config.password'),
'$.connection.config.authType',
JSON_OBJECT(),
'$.connection.config.authType.password',
'$.connection.config.authType',
JSON_OBJECT(),
'$.connection.config.authType.password',
JSON_EXTRACT(json, '$.connection.config.password'))
where serviceType in ('Postgres', 'Mysql');
-- add fullyQualifiedName hash and remove existing columns
-- update the OM system tables
ALTER TABLE field_relationship DROP KEY `PRIMARY`, ADD COLUMN fromFQNHash VARCHAR(256), ADD COLUMN toFQNHash VARCHAR(256),
DROP INDEX from_index, DROP INDEX to_index, ADD INDEX from_fqnhash_index(fromFQNHash, relation), ADD INDEX to_fqnhash_index(toFQNHash, relation),
ADD CONSTRAINT `field_relationship_primary` PRIMARY KEY(fromFQNHash, toFQNHash, relation), MODIFY fromFQN VARCHAR(2096) NOT NULL,
MODIFY toFQN VARCHAR(2096) NOT NULL;
ALTER TABLE entity_extension_time_series DROP COLUMN entityFQN, ADD COLUMN entityFQNHash VARCHAR (256) NOT NULL;
ALTER TABLE type_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE event_subscription_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE test_definition DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE test_suite DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE test_case DROP COLUMN fullyQualifiedName, ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL,
ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash);
ALTER TABLE web_analytic_event DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash);
ALTER TABLE data_insight_chart DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash);
ALTER TABLE kpi_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE classification DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);;
ALTER TABLE glossary_term_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE tag DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE tag_usage DROP index `source`, DROP COLUMN targetFQN, ADD COLUMN tagFQNHash VARCHAR(256), ADD COLUMN targetFQNHash VARCHAR(256),
ADD UNIQUE KEY `tag_usage_key` (source, tagFQNHash, targetFQNHash);
ALTER TABLE policy_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE role_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE automations_workflow DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE test_connection_definition ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
-- update services
ALTER TABLE dbservice_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE messaging_service_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE dashboard_service_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE pipeline_service_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE storage_service_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE metadata_service_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE mlmodel_service_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
-- all entity tables
ALTER TABLE database_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE database_schema_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE table_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE metric_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE report_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE dashboard_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE chart_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE ml_model_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE pipeline_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE topic_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE ingestion_pipeline_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE storage_container_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE dashboard_data_model_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> '$.name') NOT NULL;
ALTER TABLE query_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE team_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE user_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE bot_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE glossary_entity DROP KEY `name`, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);

View File

@ -80,7 +80,92 @@ SET json = jsonb_set(
json #-'{connection,config,password}',
'{connection,config,authType}',
jsonb_build_object('password',json#>'{connection,config,password}')
)
)
WHERE serviceType IN ('Postgres', 'Mysql')
and json#>'{connection,config,password}' is not null;
DROP INDEX field_relationship_from_index, field_relationship_to_index;
ALTER TABLE field_relationship DROP CONSTRAINT field_relationship_pkey, ADD COLUMN fromFQNHash VARCHAR(256), ADD COLUMN toFQNHash VARCHAR(256),
ADD CONSTRAINT field_relationship_pkey PRIMARY KEY(fromFQNHash, toFQNHash, relation),
ALTER fromFQN TYPE VARCHAR(2096), ALTER toFQN TYPE VARCHAR(2096);
CREATE INDEX IF NOT EXISTS field_relationship_from_index ON field_relationship(fromFQNHash, relation);
CREATE INDEX IF NOT EXISTS field_relationship_to_index ON field_relationship(toFQNHash, relation);
ALTER TABLE entity_extension_time_series DROP COLUMN entityFQN, ADD COLUMN entityFQNHash VARCHAR (256) NOT NULL;
ALTER TABLE type_entity DROP CONSTRAINT type_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE event_subscription_entity DROP CONSTRAINT event_subscription_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE test_definition DROP CONSTRAINT test_definition_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE test_suite DROP CONSTRAINT test_suite_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE test_case DROP COLUMN fullyQualifiedName, ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL,
ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash);
ALTER TABLE web_analytic_event DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash);
ALTER TABLE data_insight_chart DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash);
ALTER TABLE kpi_entity DROP CONSTRAINT kpi_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE classification DROP CONSTRAINT tag_category_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);;
ALTER TABLE glossary_term_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE tag DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE tag_usage DROP CONSTRAINT tag_usage_source_tagfqn_targetfqn_key, DROP COLUMN targetFQN, ADD COLUMN tagFQNHash VARCHAR(256), ADD COLUMN targetFQNHash VARCHAR(256),
ADD UNIQUE (source, tagFQNHash, targetFQNHash);
ALTER TABLE policy_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE role_entity DROP CONSTRAINT role_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE automations_workflow DROP CONSTRAINT automations_workflow_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE test_connection_definition ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
-- update services
ALTER TABLE dbservice_entity DROP CONSTRAINT dbservice_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE messaging_service_entity DROP CONSTRAINT messaging_service_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE dashboard_service_entity DROP CONSTRAINT dashboard_service_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE pipeline_service_entity DROP CONSTRAINT pipeline_service_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE storage_service_entity DROP CONSTRAINT storage_service_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE metadata_service_entity DROP CONSTRAINT metadata_service_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE mlmodel_service_entity DROP CONSTRAINT mlmodel_service_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
-- all entity tables
ALTER TABLE database_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE database_schema_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE table_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE metric_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE report_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE dashboard_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE chart_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE ml_model_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE pipeline_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE topic_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE ingestion_pipeline_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE storage_container_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE dashboard_data_model_entity DROP COLUMN fullyQualifiedName, ADD COLUMN fqnHash VARCHAR(256) NOT NULL, ADD UNIQUE (fqnHash),
ADD COLUMN name VARCHAR(256) GENERATED ALWAYS AS (json ->> 'name') STORED NOT NULL;
ALTER TABLE query_entity ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE team_entity DROP CONSTRAINT team_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE user_entity DROP CONSTRAINT user_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE bot_entity DROP CONSTRAINT bot_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);
ALTER TABLE glossary_entity DROP CONSTRAINT glossary_entity_name_key, ADD COLUMN nameHash VARCHAR(256) NOT NULL, ADD UNIQUE (nameHash);

View File

@ -71,7 +71,11 @@
<version>2.1.6</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>1.15</version>
</dependency>
</dependencies>
<profiles>

View File

@ -21,6 +21,7 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.MessageDigest;
import java.text.DateFormat;
import java.text.ParseException;
import java.util.ArrayList;
@ -42,7 +43,9 @@ import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.io.IOUtils;
@Slf4j
@ -179,4 +182,10 @@ public final class CommonUtil {
}
return new ArrayList<>(Arrays.asList(entries));
}
@SneakyThrows
public static String getCheckSum(String input) {
byte[] checksum = MessageDigest.getInstance("MD5").digest(input.getBytes());
return Hex.encodeHexString(checksum);
}
}

View File

@ -91,7 +91,7 @@ server:
logging:
level: ${LOG_LEVEL:-INFO}
loggers:
io.swagger: ERROR
io.swagger: DEBUG
appenders:
- type: console
threshold: TRACE
@ -127,7 +127,6 @@ database:
# the JDBC URL; the database is called openmetadata_db
url: jdbc:${DB_SCHEME:-mysql}://${DB_HOST:-localhost}:${DB_PORT:-3306}/${OM_DATABASE:-openmetadata_db}?allowPublicKeyRetrieval=true&useSSL=${DB_USE_SSL:-false}&serverTimezone=UTC
migrationConfiguration:
path: "./bootstrap/sql"

View File

@ -18,4 +18,4 @@ workflowConfig:
authProvider: openmetadata
securityConfig:
"jwtToken": "eyJraWQiOiJHYjM4OWEtOWY3Ni1nZGpzLWE5MmotMDI0MmJrOTQzNTYiLCJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJhZG1pbiIsImlzQm90IjpmYWxzZSwiaXNzIjoib3Blbi1tZXRhZGF0YS5vcmciLCJpYXQiOjE2NjM5Mzg0NjIsImVtYWlsIjoiYWRtaW5Ab3Blbm1ldGFkYXRhLm9yZyJ9.tS8um_5DKu7HgzGBzS1VTA5uUjKWOCU0B_j08WXBiEC0mr0zNREkqVfwFDD-d24HlNEbrqioLsBuFRiwIWKc1m_ZlVQbG7P36RUxhuv2vbSp80FKyNM-Tj93FDzq91jsyNmsQhyNv_fNr3TXfzzSPjHt8Go0FMMP66weoKMgW2PbXlhVKwEuXUHyakLLzewm9UMeQaEiRzhiTMU3UkLXcKbYEJJvfNFcLwSl9W8JCO_l0Yj3ud-qt_nQYEZwqW6u5nfdQllN133iikV4fM5QZsMCnm8Rq1mvLR0y9bmJiD7fwM1tmJ791TUWqmKaTnP49U493VanKpUAfzIiOiIbhg"

View File

@ -64,7 +64,7 @@ test_suite_config = {
"testCases": [
{
"name": "my_test_case",
"testDefinitionName": "TableColumnCountToBeBetween",
"testDefinitionName": "tableColumnCountToBeBetween",
"parameterValues": [
{"name": "minColValue", "value": 1},
{"name": "maxColValue", "value": 5},
@ -72,7 +72,7 @@ test_suite_config = {
},
{
"name": "table_column_name_to_exists",
"testDefinitionName": "TableColumnNameToExist",
"testDefinitionName": "tableColumnNameToExist",
"parameterValues": [{"name": "columnName", "value": "id"}],
},
],

View File

@ -101,7 +101,7 @@ class TestSuiteWorkflowTests(unittest.TestCase):
"testCases": [
{
"name": "my_test_case",
"testDefinitionName": "TableColumnCountToBeBetween",
"testDefinitionName": "tableColumnCountToBeBetween",
"parameterValues": [
{"name": "minColValue", "value": 1},
{"name": "maxColValue", "value": 5},
@ -149,7 +149,7 @@ class TestSuiteWorkflowTests(unittest.TestCase):
"testCases": [
{
"name": "my_test_case",
"testDefinitionName": "TableColumnCountToBeBetween",
"testDefinitionName": "tableColumnCountToBeBetween",
"parameterValues": [
{"name": "minColValue", "value": 1},
{"name": "maxColValue", "value": 5},
@ -186,7 +186,7 @@ class TestSuiteWorkflowTests(unittest.TestCase):
"testCases": [
{
"name": "my_test_case",
"testDefinitionName": "TableColumnCountToBeBetween",
"testDefinitionName": "tableColumnCountToBeBetween",
"parameterValues": [
{"name": "minColValue", "value": 1},
{"name": "maxColValue", "value": 5},
@ -194,7 +194,7 @@ class TestSuiteWorkflowTests(unittest.TestCase):
},
{
"name": "my_test_case_two",
"testDefinitionName": "TableColumnCountToBeBetween",
"testDefinitionName": "tableColumnCountToBeBetween",
"parameterValues": [
{"name": "minColValue", "value": 1},
{"name": "maxColValue", "value": 5},
@ -226,7 +226,7 @@ class TestSuiteWorkflowTests(unittest.TestCase):
"testCases": [
{
"name": "my_test_case",
"testDefinitionName": "TableColumnCountToBeBetween",
"testDefinitionName": "tableColumnCountToBeBetween",
"parameterValues": [
{"name": "minColValue", "value": 1},
{"name": "maxColValue", "value": 5},

View File

@ -134,6 +134,11 @@ public final class CsvUtil {
return csvRecord;
}
public static List<String> addUserOwner(List<String> csvRecord, EntityReference owner) {
csvRecord.add(nullOrEmpty(owner) ? null : owner.getName());
return csvRecord;
}
private static String quoteCsvField(String str) {
if (str.contains(SEPARATOR) || str.contains(FIELD_SEPARATOR)) {
return quote(str);

View File

@ -47,6 +47,7 @@ import org.openmetadata.schema.type.csv.CsvFile;
import org.openmetadata.schema.type.csv.CsvHeader;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.schema.type.csv.CsvImportResult.Status;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.jdbi3.EntityRepository;
import org.openmetadata.service.util.EntityUtil;
@ -153,8 +154,18 @@ public abstract class EntityCsv<T extends EntityInterface> {
List<String> list = CsvUtil.fieldToStrings(owner);
if (list.size() != 2) {
importFailure(printer, invalidOwner(fieldNumber), csvRecord);
return null;
}
return getEntityReference(printer, csvRecord, fieldNumber, list.get(0), list.get(1));
return getEntityReference(printer, csvRecord, fieldNumber, list.get(0), EntityInterfaceUtil.quoteName(list.get(1)));
}
/** Owner field is in entityName format */
public EntityReference getOwnerAsUser(CSVPrinter printer, CSVRecord csvRecord, int fieldNumber) throws IOException {
String owner = csvRecord.get(fieldNumber);
if (nullOrEmpty(owner)) {
return null;
}
return getEntityReference(printer, csvRecord, fieldNumber, Entity.USER, EntityInterfaceUtil.quoteName(owner));
}
protected final Boolean getBoolean(CSVPrinter printer, CSVRecord csvRecord, int fieldNumber) throws IOException {
@ -222,6 +233,27 @@ public abstract class EntityCsv<T extends EntityInterface> {
return refs.isEmpty() ? null : refs;
}
protected final List<EntityReference> getUserOrTeamEntityReferences(
CSVPrinter printer, CSVRecord csvRecord, int fieldNumber, String entityType) throws IOException {
String fqns = csvRecord.get(fieldNumber);
if (nullOrEmpty(fqns)) {
return null;
}
List<String> fqnList = listOrEmpty(CsvUtil.fieldToStrings(fqns));
List<EntityReference> refs = new ArrayList<>();
for (String fqn : fqnList) {
EntityReference ref =
getEntityReference(printer, csvRecord, fieldNumber, entityType, EntityInterfaceUtil.quoteName(fqn));
if (!processRecord) {
return null;
}
if (ref != null) {
refs.add(ref);
}
}
return refs.isEmpty() ? null : refs;
}
protected final List<TagLabel> getTagLabels(CSVPrinter printer, CSVRecord csvRecord, int fieldNumber)
throws IOException {
List<EntityReference> refs = getEntityReferences(printer, csvRecord, fieldNumber, Entity.TAG);

View File

@ -102,6 +102,7 @@ import org.openmetadata.service.events.errors.EventPublisherException;
import org.openmetadata.service.jdbi3.CollectionDAO;
import org.openmetadata.service.resources.events.EventResource.EventList;
import org.openmetadata.service.util.ElasticSearchClientUtils;
import org.openmetadata.service.util.EntityUtil;
import org.openmetadata.service.util.JsonUtils;
@Slf4j
@ -810,7 +811,7 @@ public class ElasticSearchEventPublisher extends AbstractEventPublisher {
.withFailure(new Failure().withSinkError(failureDetails));
dao.entityExtensionTimeSeriesDao()
.insert(
ELASTIC_SEARCH_ENTITY_FQN_STREAM,
EntityUtil.hash(ELASTIC_SEARCH_ENTITY_FQN_STREAM),
ELASTIC_SEARCH_EXTENSION,
"eventPublisherJob",
JsonUtils.pojoToJson(streamJob));
@ -823,7 +824,8 @@ public class ElasticSearchEventPublisher extends AbstractEventPublisher {
try {
long updateTime = Date.from(LocalDateTime.now().atZone(ZoneId.systemDefault()).toInstant()).getTime();
String recordString =
dao.entityExtensionTimeSeriesDao().getExtension(ELASTIC_SEARCH_ENTITY_FQN_STREAM, ELASTIC_SEARCH_EXTENSION);
dao.entityExtensionTimeSeriesDao()
.getExtension(EntityUtil.hash(ELASTIC_SEARCH_ENTITY_FQN_STREAM), ELASTIC_SEARCH_EXTENSION);
EventPublisherJob lastRecord = JsonUtils.readValue(recordString, EventPublisherJob.class);
long originalLastUpdate = lastRecord.getTimestamp();
lastRecord.setStatus(status);
@ -838,7 +840,7 @@ public class ElasticSearchEventPublisher extends AbstractEventPublisher {
dao.entityExtensionTimeSeriesDao()
.update(
ELASTIC_SEARCH_ENTITY_FQN_STREAM,
EntityUtil.hash(ELASTIC_SEARCH_ENTITY_FQN_STREAM),
ELASTIC_SEARCH_EXTENSION,
JsonUtils.pojoToJson(lastRecord),
originalLastUpdate);

View File

@ -40,6 +40,7 @@ import org.openmetadata.schema.type.TagLabel;
import org.openmetadata.service.Entity;
import org.openmetadata.service.events.errors.EventPublisherException;
import org.openmetadata.service.jdbi3.CollectionDAO;
import org.openmetadata.service.util.EntityUtil;
import org.openmetadata.service.util.JsonUtils;
@Slf4j
@ -293,7 +294,8 @@ public class ElasticSearchIndexDefinition {
try {
long updateTime = Date.from(LocalDateTime.now().atZone(ZoneId.systemDefault()).toInstant()).getTime();
String recordString =
dao.entityExtensionTimeSeriesDao().getExtension(ELASTIC_SEARCH_ENTITY_FQN_STREAM, ELASTIC_SEARCH_EXTENSION);
dao.entityExtensionTimeSeriesDao()
.getExtension(EntityUtil.hash(ELASTIC_SEARCH_ENTITY_FQN_STREAM), ELASTIC_SEARCH_EXTENSION);
EventPublisherJob lastRecord = JsonUtils.readValue(recordString, EventPublisherJob.class);
long originalLastUpdate = lastRecord.getTimestamp();
lastRecord.setStatus(Status.ACTIVE_WITH_ERROR);
@ -308,7 +310,7 @@ public class ElasticSearchIndexDefinition {
dao.entityExtensionTimeSeriesDao()
.update(
ELASTIC_SEARCH_ENTITY_FQN_STREAM,
EntityUtil.hash(ELASTIC_SEARCH_ENTITY_FQN_STREAM),
ELASTIC_SEARCH_EXTENSION,
JsonUtils.pojoToJson(lastRecord),
originalLastUpdate);

View File

@ -269,6 +269,11 @@ public class FormatterUtil {
.withEntityFullyQualifiedName(entityFQN);
}
// PUT or PATCH operation didn't result in any change
if (changeType == null || RestUtil.ENTITY_NO_CHANGE.equals(changeType)) {
return null;
}
// Handles Bulk Add test cases to a logical test suite
if (changeType.equals(RestUtil.LOGICAL_TEST_CASES_ADDED)) {
EntityInterface entityInterface = (EntityInterface) responseContext.getEntity();
@ -280,11 +285,6 @@ public class FormatterUtil {
.withEntityFullyQualifiedName(entityFQN);
}
// PUT or PATCH operation didn't result in any change
if (changeType == null || RestUtil.ENTITY_NO_CHANGE.equals(changeType)) {
return null;
}
// Entity was updated by either PUT .../entities or PATCH .../entities
// Entity was soft deleted by DELETE .../entities/{id} that updated the attribute `deleted` to true
if (changeType.equals(RestUtil.ENTITY_UPDATED) || changeType.equals(RestUtil.ENTITY_SOFT_DELETED)) {

View File

@ -47,7 +47,7 @@ public class ChartRepository extends EntityRepository<Chart> {
@Override
public void setFullyQualifiedName(Chart chart) {
chart.setFullyQualifiedName(FullyQualifiedName.add(chart.getService().getName(), chart.getName()));
chart.setFullyQualifiedName(FullyQualifiedName.add(chart.getService().getFullyQualifiedName(), chart.getName()));
}
@Override

View File

@ -72,13 +72,14 @@ public class ClassificationRepository extends EntityRepository<Classification> {
}
private int getTermCount(Classification category) {
ListFilter filter =
new ListFilter(Include.NON_DELETED).addQueryParam("parent", FullyQualifiedName.build(category.getName()));
ListFilter filter = new ListFilter(Include.NON_DELETED).addQueryParam("parent", category.getName());
return daoCollection.tagDAO().listCount(filter);
}
private Integer getUsageCount(Classification classification) {
return daoCollection.tagUsageDAO().getTagCount(TagSource.CLASSIFICATION.ordinal(), classification.getName());
return daoCollection
.tagUsageDAO()
.getTagCount(TagSource.CLASSIFICATION.ordinal(), FullyQualifiedName.buildHash(classification.getName()));
}
@Transaction

View File

@ -92,6 +92,11 @@ public class ContainerRepository extends EntityRepository<Container> {
}
}
@Override
public String getFullyQualifiedNameHash(Container container) {
return FullyQualifiedName.buildHash(container.getFullyQualifiedName());
}
private void setColumnFQN(String parentFQN, List<Column> columns) {
columns.forEach(
c -> {

View File

@ -96,6 +96,11 @@ public class DashboardDataModelRepository extends EntityRepository<DashboardData
super.update(task, entityLink, newValue, user);
}
@Override
public String getFullyQualifiedNameHash(DashboardDataModel dashboardDataModel) {
return FullyQualifiedName.buildHash(dashboardDataModel.getFullyQualifiedName());
}
@Override
public void prepare(DashboardDataModel dashboardDataModel) throws IOException {
DashboardService dashboardService = Entity.getEntity(dashboardDataModel.getService(), "", Include.ALL);

View File

@ -55,7 +55,8 @@ public class DashboardRepository extends EntityRepository<Dashboard> {
@Override
public void setFullyQualifiedName(Dashboard dashboard) {
dashboard.setFullyQualifiedName(FullyQualifiedName.add(dashboard.getService().getName(), dashboard.getName()));
dashboard.setFullyQualifiedName(
FullyQualifiedName.add(dashboard.getService().getFullyQualifiedName(), dashboard.getName()));
}
@Override

View File

@ -47,6 +47,11 @@ public class DatabaseRepository extends EntityRepository<Database> {
database.setFullyQualifiedName(FullyQualifiedName.build(database.getService().getName(), database.getName()));
}
@Override
public String getFullyQualifiedNameHash(Database entity) {
return FullyQualifiedName.buildHash(entity.getFullyQualifiedName());
}
@Override
public void prepare(Database database) throws IOException {
populateService(database);

View File

@ -53,6 +53,11 @@ public class DatabaseSchemaRepository extends EntityRepository<DatabaseSchema> {
FullyQualifiedName.add(schema.getDatabase().getFullyQualifiedName(), schema.getName()));
}
@Override
public String getFullyQualifiedNameHash(DatabaseSchema schema) {
return FullyQualifiedName.buildHash(schema.getFullyQualifiedName());
}
@Override
public void prepare(DatabaseSchema schema) throws IOException {
populateDatabase(schema);

View File

@ -46,42 +46,75 @@ public interface EntityDAO<T extends EntityInterface> {
Class<T> getEntityClass();
String getNameColumn();
default String getNameColumn() {
return "name";
}
default String getNameHashColumn() {
return "nameHash";
};
default boolean supportsSoftDelete() {
return true;
}
/** Common queries for all entities implemented here. Do not override. */
@ConnectionAwareSqlUpdate(value = "INSERT INTO <table> (json) VALUES (:json)", connectionType = MYSQL)
@ConnectionAwareSqlUpdate(value = "INSERT INTO <table> (json) VALUES (:json :: jsonb)", connectionType = POSTGRES)
void insert(@Define("table") String table, @Bind("json") String json);
@ConnectionAwareSqlUpdate(value = "UPDATE <table> SET json = :json WHERE id = :id", connectionType = MYSQL)
@ConnectionAwareSqlUpdate(
value = "UPDATE <table> SET json = (:json :: jsonb) WHERE id = :id",
value = "INSERT INTO <table> (<nameHashColumn>, json) VALUES (:nameHashColumnValue, :json)",
connectionType = MYSQL)
@ConnectionAwareSqlUpdate(
value = "INSERT INTO <table> (<nameHashColumn>, json) VALUES (:nameHashColumnValue, :json :: jsonb)",
connectionType = POSTGRES)
void update(@Define("table") String table, @Bind("id") String id, @Bind("json") String json);
void insert(
@Define("table") String table,
@Define("nameHashColumn") String nameHashColumn,
@Bind("nameHashColumnValue") String nameHashColumnValue,
@Bind("json") String json);
@ConnectionAwareSqlUpdate(
value = "UPDATE <table> SET json = :json, <nameHashColumn> = :nameHashColumnValue WHERE id = :id",
connectionType = MYSQL)
@ConnectionAwareSqlUpdate(
value = "UPDATE <table> SET json = (:json :: jsonb), <nameHashColumn> = :nameHashColumnValue WHERE id = :id",
connectionType = POSTGRES)
void update(
@Define("table") String table,
@Define("nameHashColumn") String nameHashColumn,
@Bind("nameHashColumnValue") String nameHashColumnValue,
@Bind("id") String id,
@Bind("json") String json);
default void updateFqn(String oldPrefix, String newPrefix) {
LOG.info("Updating FQN for {} from {} to {}", getTableName(), oldPrefix, newPrefix);
if (!getNameColumn().equals("fullyQualifiedName")) {
if (!getNameHashColumn().equals("fqnHash")) {
return;
}
String mySqlUpdate =
String.format(
"UPDATE %s SET json = "
+ "JSON_REPLACE(json, '$.fullyQualifiedName', REGEXP_REPLACE(fullyQualifiedName, '^%s\\.', '%s.')) "
+ "WHERE fullyQualifiedName LIKE '%s.%%'",
getTableName(), escape(oldPrefix), escapeApostrophe(newPrefix), escape(oldPrefix));
+ "JSON_REPLACE(json, '$.fullyQualifiedName', REGEXP_REPLACE(JSON_UNQUOTE(JSON_EXTRACT(json, '$.fullyQualifiedName')), '^%s\\.', '%s.')) "
+ ", fqnHash = REPLACE(fqnHash, '%s.', '%s.') "
+ "WHERE fqnHash LIKE '%s.%%'",
getTableName(),
escape(oldPrefix),
escapeApostrophe(newPrefix),
FullyQualifiedName.buildHash(oldPrefix),
FullyQualifiedName.buildHash(newPrefix),
FullyQualifiedName.buildHash(oldPrefix));
String postgresUpdate =
String.format(
"UPDATE %s SET json = "
+ "REPLACE(json::text, '\"fullyQualifiedName\": \"%s.', "
+ "'\"fullyQualifiedName\": \"%s.')::jsonb "
+ "WHERE fullyQualifiedName LIKE '%s.%%'",
getTableName(), escapeApostrophe(oldPrefix), escapeApostrophe(newPrefix), escape(oldPrefix));
+ ", fqnHash = REPLACE(fqnHash, '%s.', '%s.') "
+ "WHERE fqnHash LIKE '%s.%%'",
getTableName(),
escapeApostrophe(oldPrefix),
escapeApostrophe(newPrefix),
FullyQualifiedName.buildHash(oldPrefix),
FullyQualifiedName.buildHash(newPrefix),
FullyQualifiedName.buildHash(oldPrefix));
updateFqnInternal(mySqlUpdate, postgresUpdate);
}
@ -141,23 +174,29 @@ public interface EntityDAO<T extends EntityInterface> {
@SqlQuery("SELECT EXISTS (SELECT * FROM <table> WHERE id = :id)")
boolean exists(@Define("table") String table, @Bind("id") String id);
@SqlQuery("SELECT EXISTS (SELECT * FROM <table> WHERE <nameColumn> = :fqn)")
boolean existsByName(@Define("table") String table, @Define("nameColumn") String nameColumn, @Bind("fqn") String fqn);
@SqlQuery("SELECT EXISTS (SELECT * FROM <table> WHERE <nameColumnHash> = :fqnHash)")
boolean existsByName(
@Define("table") String table, @Define("nameColumnHash") String nameColumnHash, @Bind("fqnHash") String fqnHash);
@SqlUpdate("DELETE FROM <table> WHERE id = :id")
int delete(@Define("table") String table, @Bind("id") String id);
/** Default methods that interfaces with implementation. Don't override */
default void insert(EntityInterface entity) throws JsonProcessingException {
insert(getTableName(), JsonUtils.pojoToJson(entity));
default void insert(EntityInterface entity, String fqnHash) throws JsonProcessingException {
insert(getTableName(), getNameHashColumn(), fqnHash, JsonUtils.pojoToJson(entity));
}
default void update(UUID id, String json) {
update(getTableName(), id.toString(), json);
default void update(UUID id, String fqnHash, String json) {
update(getTableName(), getNameHashColumn(), fqnHash, id.toString(), json);
}
default void update(EntityInterface entity) throws JsonProcessingException {
update(getTableName(), entity.getId().toString(), JsonUtils.pojoToJson(entity));
update(
getTableName(),
getNameHashColumn(),
FullyQualifiedName.buildHash(entity.getFullyQualifiedName()),
entity.getId().toString(),
JsonUtils.pojoToJson(entity));
}
default String getCondition(Include include) {
@ -188,7 +227,8 @@ public interface EntityDAO<T extends EntityInterface> {
@SneakyThrows
default T findEntityByName(String fqn, Include include) {
return jsonToEntity(findByName(getTableName(), getNameColumn(), fqn, getCondition(include)), fqn);
return jsonToEntity(
findByName(getTableName(), getNameHashColumn(), FullyQualifiedName.buildHash(fqn), getCondition(include)), fqn);
}
default T jsonToEntity(String json, String identity) throws IOException {
@ -226,15 +266,15 @@ public interface EntityDAO<T extends EntityInterface> {
}
default String findJsonByFqn(String fqn, Include include) {
return findByName(getTableName(), getNameColumn(), fqn, getCondition(include));
return findByName(getTableName(), getNameHashColumn(), FullyQualifiedName.buildHash(fqn), getCondition(include));
}
default int listCount(ListFilter filter) {
return listCount(getTableName(), getNameColumn(), filter.getCondition());
return listCount(getTableName(), getNameHashColumn(), filter.getCondition());
}
default int listTotalCount() {
return listTotalCount(getTableName(), getNameColumn());
return listTotalCount(getTableName(), getNameHashColumn());
}
default List<String> listBefore(ListFilter filter, int limit, String before) {
@ -250,7 +290,7 @@ public interface EntityDAO<T extends EntityInterface> {
}
default List<String> listAfter(ListFilter filter, int limit, int offset) {
return listAfter(getTableName(), getNameColumn(), filter.getCondition(), limit, offset);
return listAfter(getTableName(), getNameHashColumn(), filter.getCondition(), limit, offset);
}
default void exists(UUID id) {
@ -261,7 +301,7 @@ public interface EntityDAO<T extends EntityInterface> {
}
default void existsByName(String fqn) {
if (!existsByName(getTableName(), getNameColumn(), fqn)) {
if (!existsByName(getTableName(), getNameHashColumn(), FullyQualifiedName.buildHash(fqn))) {
String entityType = Entity.getEntityTypeFromClass(getEntityClass());
throw EntityNotFoundException.byMessage(CatalogExceptionMessage.entityNotFound(entityType, fqn));
}

View File

@ -96,6 +96,7 @@ import org.openmetadata.schema.type.TaskDetails;
import org.openmetadata.schema.type.TaskType;
import org.openmetadata.schema.type.Votes;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.OpenMetadataApplicationConfig;
import org.openmetadata.service.TypeRegistry;
@ -265,7 +266,12 @@ public abstract class EntityRepository<T extends EntityInterface> {
/** Set fullyQualifiedName of an entity */
public void setFullyQualifiedName(T entity) {
entity.setFullyQualifiedName(entity.getName());
entity.setFullyQualifiedName(EntityInterfaceUtil.quoteName(entity.getName()));
}
/** Set fullyQualifiedNameHash of an entity */
public String getFullyQualifiedNameHash(T entity) {
return FullyQualifiedName.buildHash(entity.getFullyQualifiedName());
}
/** Update an entity based suggested description and tags in the task */
@ -364,7 +370,7 @@ public abstract class EntityRepository<T extends EntityInterface> {
}
@Transaction
public final T getByName(UriInfo uriInfo, String fqn, Fields fields) throws IOException {
public T getByName(UriInfo uriInfo, String fqn, Fields fields) throws IOException {
return getByName(uriInfo, fqn, fields, NON_DELETED);
}
@ -411,10 +417,10 @@ public abstract class EntityRepository<T extends EntityInterface> {
String beforeCursor;
String afterCursor = null;
beforeCursor = after == null ? null : entities.get(0).getFullyQualifiedName();
beforeCursor = after == null ? null : entities.get(0).getName();
if (entities.size() > limitParam) { // If extra result exists, then next page exists - return after cursor
entities.remove(limitParam);
afterCursor = entities.get(limitParam - 1).getFullyQualifiedName();
afterCursor = entities.get(limitParam - 1).getName();
}
return getResultList(entities, beforeCursor, afterCursor, total);
} else {
@ -445,11 +451,11 @@ public abstract class EntityRepository<T extends EntityInterface> {
String beforeCursor;
String afterCursor = null;
beforeCursor = after == null ? null : JsonUtils.readValue(jsons.get(0), entityClass).getFullyQualifiedName();
if (jsons.size() > limitParam) {
beforeCursor = after == null ? null : entities.get(0).getName();
if (entities.size() > limitParam) { // If extra result exists, then next page exists - return after cursor
T lastReadEntity = JsonUtils.readValue(jsons.get(limitParam), entityClass);
entities.remove(lastReadEntity.getId());
afterCursor = JsonUtils.readValue(jsons.get(limitParam - 1), entityClass).getFullyQualifiedName();
entities.remove(limitParam);
afterCursor = entities.get(limitParam - 1).getName();
errors.forEach((key, value) -> entities.remove(key));
// Remove the Last Json Entry if present in error, since the read was actually just till limitParam , and if
// error
@ -481,9 +487,9 @@ public abstract class EntityRepository<T extends EntityInterface> {
String afterCursor;
if (entities.size() > limitParam) { // If extra result exists, then previous page exists - return before cursor
entities.remove(0);
beforeCursor = entities.get(0).getFullyQualifiedName();
beforeCursor = entities.get(0).getName();
}
afterCursor = entities.get(entities.size() - 1).getFullyQualifiedName();
afterCursor = entities.get(entities.size() - 1).getName();
return getResultList(entities, beforeCursor, afterCursor, total);
}
@ -810,7 +816,9 @@ public abstract class EntityRepository<T extends EntityInterface> {
daoCollection.relationshipDAO().deleteAll(id, entityType);
// Delete all the field relationships to other entities
daoCollection.fieldRelationshipDAO().deleteAllByPrefix(entityInterface.getFullyQualifiedName());
daoCollection
.fieldRelationshipDAO()
.deleteAllByPrefix(FullyQualifiedName.buildHash(entityInterface.getFullyQualifiedName()));
// Delete all the extensions of entity
daoCollection.entityExtensionDAO().deleteAll(id);
@ -883,10 +891,10 @@ public abstract class EntityRepository<T extends EntityInterface> {
entity.setTags(null);
if (update) {
dao.update(entity.getId(), JsonUtils.pojoToJson(entity));
dao.update(entity.getId(), getFullyQualifiedNameHash(entity), JsonUtils.pojoToJson(entity));
LOG.info("Updated {}:{}:{}", entityType, entity.getId(), entity.getFullyQualifiedName());
} else {
dao.insert(entity);
dao.insert(entity, getFullyQualifiedNameHash(entity));
LOG.info("Created {}:{}:{}", entityType, entity.getId(), entity.getFullyQualifiedName());
}
@ -895,6 +903,82 @@ public abstract class EntityRepository<T extends EntityInterface> {
entity.setTags(tags);
}
protected void storeTimeSeries(
String fullyQualifiedName, String extension, String jsonSchema, String entityJson, Long timestamp, boolean update)
throws JsonProcessingException {
String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName);
if (update) {
daoCollection.entityExtensionTimeSeriesDao().update(fqnHash, extension, entityJson, timestamp);
} else {
daoCollection.entityExtensionTimeSeriesDao().insert(fqnHash, extension, jsonSchema, entityJson);
}
}
protected void storeTimeSeriesWithOperation(
String fullyQualifiedName,
String extension,
String jsonSchema,
String entityJson,
Long timestamp,
String operation,
boolean update)
throws JsonProcessingException {
String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName);
if (update) {
daoCollection
.entityExtensionTimeSeriesDao()
.updateExtensionByOperation(fqnHash, extension, entityJson, timestamp, operation);
} else {
daoCollection.entityExtensionTimeSeriesDao().insert(fqnHash, extension, jsonSchema, entityJson);
}
}
public String getExtensionAtTimestamp(String fullyQualifiedName, String extension, Long timestamp) {
String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName);
return daoCollection.entityExtensionTimeSeriesDao().getExtensionAtTimestamp(fqnHash, extension, timestamp);
}
public String getExtensionAtTimestampWithOperation(
String fullyQualifiedName, String extension, Long timestamp, String operation) {
String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName);
return daoCollection
.entityExtensionTimeSeriesDao()
.getExtensionAtTimestampWithOperation(fqnHash, extension, timestamp, operation);
}
public String getLatestExtensionFromTimeseries(String fullyQualifiedName, String extension) {
String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName);
return daoCollection.entityExtensionTimeSeriesDao().getLatestExtension(fqnHash, extension);
}
public List<String> getResultsFromAndToTimestamps(
String fullyQualifiedName, String extension, Long startTs, Long endTs) {
return getResultsFromAndToTimestamps(
fullyQualifiedName, extension, startTs, endTs, CollectionDAO.EntityExtensionTimeSeriesDAO.OrderBy.DESC);
}
public List<String> getResultsFromAndToTimestamps(
String fullyQualifiedName,
String extension,
Long startTs,
Long endTs,
CollectionDAO.EntityExtensionTimeSeriesDAO.OrderBy orderBy) {
String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName);
return daoCollection
.entityExtensionTimeSeriesDao()
.listBetweenTimestampsByOrder(fqnHash, extension, startTs, endTs, orderBy);
}
public void deleteExtensionAtTimestamp(String fullyQualifiedName, String extension, Long timestamp) {
String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName);
daoCollection.entityExtensionTimeSeriesDao().deleteAtTimestamp(fqnHash, extension, timestamp);
}
public void deleteExtensionBeforeTimestamp(String fullyQualifiedName, String extension, Long timestamp) {
String fqnHash = FullyQualifiedName.buildHash(fullyQualifiedName);
daoCollection.entityExtensionTimeSeriesDao().deleteBeforeTimestamp(fqnHash, extension, timestamp);
}
public void validateExtension(T entity) {
if (entity.getExtension() == null) {
return;
@ -1019,7 +1103,8 @@ public abstract class EntityRepository<T extends EntityInterface> {
.applyTag(
tagLabel.getSource().ordinal(),
tagLabel.getTagFQN(),
targetFQN,
FullyQualifiedName.buildHash(tagLabel.getTagFQN()),
FullyQualifiedName.buildHash(targetFQN),
tagLabel.getLabelType().ordinal(),
tagLabel.getState().ordinal());
}
@ -1029,7 +1114,7 @@ public abstract class EntityRepository<T extends EntityInterface> {
Map<String, TagLabel> map = new HashMap<>();
for (TagLabel tagLabel : listOrEmpty(tagLabels)) {
// When two tags have the same parent that is mutuallyExclusive, then throw an error
String parentFqn = FullyQualifiedName.getParent(tagLabel.getTagFQN());
String parentFqn = FullyQualifiedName.getParentFQN(tagLabel.getTagFQN());
TagLabel stored = map.put(parentFqn, tagLabel);
if (stored != null && TagLabelCache.getInstance().mutuallyExclusive(tagLabel)) {
throw new IllegalArgumentException(CatalogExceptionMessage.mutuallyExclusiveLabels(tagLabel, stored));
@ -1503,6 +1588,9 @@ public abstract class EntityRepository<T extends EntityInterface> {
return; // Nothing to update
}
// Remove current entity tags in the database. It will be added back later from the merged tag list.
daoCollection.tagUsageDAO().deleteTagsByTarget(FullyQualifiedName.buildHash(fqn));
if (operation.isPut()) {
// PUT operation merges tags in the request with what already exists
EntityUtil.mergeTags(updatedTags, origTags);
@ -1848,7 +1936,10 @@ public abstract class EntityRepository<T extends EntityInterface> {
// Delete tags related to deleted columns
deletedColumns.forEach(
deleted -> daoCollection.tagUsageDAO().deleteTagsByTarget(deleted.getFullyQualifiedName()));
deleted ->
daoCollection
.tagUsageDAO()
.deleteTagsByTarget(FullyQualifiedName.buildHash(deleted.getFullyQualifiedName())));
// Add tags related to newly added columns
for (Column added : addedColumns) {

View File

@ -69,6 +69,7 @@ import org.openmetadata.schema.type.TaskDetails;
import org.openmetadata.schema.type.TaskStatus;
import org.openmetadata.schema.type.TaskType;
import org.openmetadata.schema.type.ThreadType;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.ResourceRegistry;
import org.openmetadata.service.exception.EntityNotFoundException;
@ -82,6 +83,7 @@ import org.openmetadata.service.resources.feeds.MessageParser.EntityLink;
import org.openmetadata.service.security.Authorizer;
import org.openmetadata.service.security.policyevaluator.SubjectCache;
import org.openmetadata.service.util.EntityUtil;
import org.openmetadata.service.util.FullyQualifiedName;
import org.openmetadata.service.util.JsonUtils;
import org.openmetadata.service.util.RestUtil;
import org.openmetadata.service.util.RestUtil.DeleteResponse;
@ -125,7 +127,7 @@ public class FeedRepository {
thread.withEntityId(aboutEntity.getId()); // Add entity id to thread
EntityReference entityOwner = aboutEntity.getOwner();
// Validate user creating the thread
// Validate user creating thread
User createdByUser = SubjectCache.getInstance().getUser(thread.getCreatedBy());
if (thread.getType() == ThreadType.Task) {
@ -154,8 +156,10 @@ public class FeedRepository {
// Add field relationship for data asset - Thread -- isAbout ---> entity/entityField
dao.fieldRelationshipDAO()
.insert(
thread.getId().toString(), // from FQN
about.getFullyQualifiedFieldValue(), // to FQN
FullyQualifiedName.buildHash(thread.getId().toString()), // from FQN
FullyQualifiedName.buildHash(about.getFullyQualifiedFieldValue()), // to FQN,
thread.getId().toString(),
about.getFullyQualifiedFieldValue(),
Entity.THREAD, // From type
about.getFullyQualifiedFieldType(), // to Type
IS_ABOUT.ordinal(),
@ -187,7 +191,7 @@ public class FeedRepository {
public PatchResponse<Thread> closeTask(UriInfo uriInfo, Thread thread, String user, CloseTask closeTask)
throws IOException {
// Update the attributes
closeTask(thread, user, closeTask.getComment());
closeTask(thread, EntityInterfaceUtil.quoteName(user), closeTask.getComment());
Thread updatedHref = FeedResource.addHref(uriInfo, thread);
return new PatchResponse<>(Status.OK, updatedHref, RestUtil.ENTITY_UPDATED);
}
@ -281,6 +285,8 @@ public class FeedRepository {
mention ->
dao.fieldRelationshipDAO()
.insert(
FullyQualifiedName.buildHash(mention.getFullyQualifiedFieldValue()),
FullyQualifiedName.buildHash(thread.getId().toString()),
mention.getFullyQualifiedFieldValue(),
thread.getId().toString(),
mention.getFullyQualifiedFieldType(),
@ -344,7 +350,7 @@ public class FeedRepository {
dao.relationshipDAO().deleteAll(id, Entity.THREAD);
// Delete all the field relationships to other entities
dao.fieldRelationshipDAO().deleteAllByPrefix(id);
dao.fieldRelationshipDAO().deleteAllByPrefix(FullyQualifiedName.buildHash(id));
// Finally, delete the entity
dao.feedDAO().delete(id);
@ -353,6 +359,10 @@ public class FeedRepository {
return new DeleteResponse<>(thread, RestUtil.ENTITY_DELETED);
}
public EntityReference getOwnerReference(String username) {
return dao.userDAO().findEntityByName(EntityInterfaceUtil.quoteName(username)).getEntityReference();
}
@Transaction
public ThreadCount getThreadsCount(FeedFilter filter, String link) throws IOException {
List<List<String>> result;
@ -385,7 +395,7 @@ public class FeedRepository {
result =
dao.feedDAO()
.listCountByEntityLink(
entityLink.getFullyQualifiedFieldValue(),
FullyQualifiedName.buildHash(entityLink.getFullyQualifiedFieldValue()),
Entity.THREAD,
entityLink.getFullyQualifiedFieldType(),
IS_ABOUT.ordinal(),
@ -437,15 +447,17 @@ public class FeedRepository {
total = filteredThreads.getTotalCount();
} else {
// Only data assets are added as about
String userName = userId != null ? SubjectCache.getInstance().getUserById(userId).getName() : null;
List<String> teamNames = getTeamNames(userId);
List<String> jsons;
jsons =
User user = userId != null ? SubjectCache.getInstance().getUserById(userId) : null;
List<String> teamNameHash = getTeamNames(user);
String userNameHash = getUserNameHash(user);
List<String> jsons =
dao.feedDAO()
.listThreadsByEntityLink(filter, entityLink, limit + 1, IS_ABOUT.ordinal(), userName, teamNames);
.listThreadsByEntityLink(
filter, entityLink, limit + 1, IS_ABOUT.ordinal(), userNameHash, teamNameHash);
threads = JsonUtils.readObjects(jsons, Thread.class);
total =
dao.feedDAO().listCountThreadsByEntityLink(filter, entityLink, IS_ABOUT.ordinal(), userName, teamNames);
dao.feedDAO()
.listCountThreadsByEntityLink(filter, entityLink, IS_ABOUT.ordinal(), userNameHash, teamNameHash);
}
} else {
// userId filter present
@ -501,7 +513,15 @@ public class FeedRepository {
// Multiple reactions by the same user on same thread or post is handled by
// field relationship table constraint (primary key)
dao.fieldRelationshipDAO()
.insert(user, thread.getId().toString(), Entity.USER, Entity.THREAD, Relationship.REACTED_TO.ordinal(), null);
.insert(
FullyQualifiedName.buildHash(EntityInterfaceUtil.quoteName(user)),
FullyQualifiedName.buildHash(thread.getId().toString()),
user,
thread.getId().toString(),
Entity.USER,
Entity.THREAD,
Relationship.REACTED_TO.ordinal(),
null);
}
@Transaction
@ -818,6 +838,27 @@ public class FeedRepository {
return new FilteredThreads(threads, totalCount);
}
/** Returns the threads where the user or the team they belong to were mentioned by other users with @mention. */
private FilteredThreads getThreadsByMentions(FeedFilter filter, String userId, int limit) throws IOException {
User user = SubjectCache.getInstance().getUserById(userId);
String userNameHash = getUserNameHash(user);
// Return the threads where the user or team was mentioned
List<String> teamNamesHash = getTeamNames(user);
// Return the threads where the user or team was mentioned
List<String> jsons =
dao.feedDAO()
.listThreadsByMentions(
userNameHash, teamNamesHash, limit, Relationship.MENTIONED_IN.ordinal(), filter.getCondition());
List<Thread> threads = JsonUtils.readObjects(jsons, Thread.class);
int totalCount =
dao.feedDAO()
.listCountThreadsByMentions(
userNameHash, teamNamesHash, Relationship.MENTIONED_IN.ordinal(), filter.getCondition(false));
return new FilteredThreads(threads, totalCount);
}
/** Get a list of team ids that the given user is a part of. */
private List<String> getTeamIds(String userId) {
List<String> teamIds = null;
@ -828,40 +869,6 @@ public class FeedRepository {
return nullOrEmpty(teamIds) ? List.of(StringUtils.EMPTY) : teamIds;
}
/** Get a list of team names that the given user is a part of. */
private List<String> getTeamNames(String userId) {
List<String> teamNames = null;
if (userId != null) {
User user = SubjectCache.getInstance().getUserById(userId);
teamNames = listOrEmpty(user.getTeams()).stream().map(EntityReference::getName).collect(Collectors.toList());
}
return nullOrEmpty(teamNames) ? List.of(StringUtils.EMPTY) : teamNames;
}
/** Returns the threads where the user or the team they belong to were mentioned by other users with @mention. */
private FilteredThreads getThreadsByMentions(FeedFilter filter, String userId, int limit) throws IOException {
List<EntityReference> teams =
populateEntityReferences(
dao.relationshipDAO().findFrom(userId, Entity.USER, Relationship.HAS.ordinal(), Entity.TEAM), Entity.TEAM);
List<String> teamNames = teams.stream().map(EntityReference::getName).collect(Collectors.toList());
if (teamNames.isEmpty()) {
teamNames = List.of(StringUtils.EMPTY);
}
User user = dao.userDAO().findEntityById(UUID.fromString(userId));
// Return the threads where the user or team was mentioned
List<String> jsons =
dao.feedDAO()
.listThreadsByMentions(
user.getName(), teamNames, limit, Relationship.MENTIONED_IN.ordinal(), filter.getCondition());
List<Thread> threads = JsonUtils.readObjects(jsons, Thread.class);
int totalCount =
dao.feedDAO()
.listCountThreadsByMentions(
user.getName(), teamNames, Relationship.MENTIONED_IN.ordinal(), filter.getCondition(false));
return new FilteredThreads(threads, totalCount);
}
/** Returns the threads that are associated with the entities followed by the user. */
private FilteredThreads getThreadsByFollows(FeedFilter filter, String userId, int limit) throws IOException {
List<String> teamIds = getTeamIds(userId);
@ -874,6 +881,28 @@ public class FeedRepository {
return new FilteredThreads(threads, totalCount);
}
/** Get a list of team names that the given user is a part of. */
private List<String> getTeamNames(User user) {
List<String> teamNames = null;
if (user != null) {
teamNames =
listOrEmpty(user.getTeams()).stream()
.map(
x -> {
return FullyQualifiedName.buildHash(x.getFullyQualifiedName());
})
.collect(Collectors.toList());
}
return nullOrEmpty(teamNames) ? List.of(StringUtils.EMPTY) : teamNames;
}
private String getUserNameHash(User user) {
if (user != null) {
return FullyQualifiedName.buildHash(user.getFullyQualifiedName());
}
return null;
}
public static class FilteredThreads {
@Getter private final List<Thread> threads;
@Getter private final int totalCount;

View File

@ -18,6 +18,7 @@ package org.openmetadata.service.jdbi3;
import static org.openmetadata.common.utils.CommonUtil.listOrEmpty;
import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty;
import static org.openmetadata.csv.CsvUtil.FIELD_SEPARATOR;
import static org.openmetadata.csv.CsvUtil.addEntityReference;
import static org.openmetadata.csv.CsvUtil.addEntityReferences;
import static org.openmetadata.csv.CsvUtil.addField;
@ -104,7 +105,9 @@ public class GlossaryRepository extends EntityRepository<Glossary> {
}
private Integer getUsageCount(Glossary glossary) {
return daoCollection.tagUsageDAO().getTagCount(TagSource.GLOSSARY.ordinal(), glossary.getName());
return daoCollection
.tagUsageDAO()
.getTagCount(TagSource.GLOSSARY.ordinal(), FullyQualifiedName.buildHash(glossary.getName()));
}
private Integer getTermCount(Glossary glossary) {
@ -187,7 +190,7 @@ public class GlossaryRepository extends EntityRepository<Glossary> {
}
// Field 9 - reviewers
glossaryTerm.withReviewers(getEntityReferences(printer, csvRecord, 8, Entity.USER));
glossaryTerm.withReviewers(getUserOrTeamEntityReferences(printer, csvRecord, 8, Entity.USER));
if (!processRecord) {
return null;
}
@ -242,7 +245,7 @@ public class GlossaryRepository extends EntityRepository<Glossary> {
addEntityReferences(recordList, entity.getRelatedTerms());
addField(recordList, termReferencesToRecord(entity.getReferences()));
addTagLabels(recordList, entity.getTags());
addEntityReferences(recordList, entity.getReviewers());
addField(recordList, reviewerReferencesToRecord(entity.getReviewers()));
addOwner(recordList, entity.getOwner());
addField(recordList, entity.getStatus().value());
return recordList;
@ -253,7 +256,13 @@ public class GlossaryRepository extends EntityRepository<Glossary> {
? null
: list.stream()
.map(termReference -> termReference.getName() + CsvUtil.FIELD_SEPARATOR + termReference.getEndpoint())
.collect(Collectors.joining(";"));
.collect(Collectors.joining(FIELD_SEPARATOR));
}
private String reviewerReferencesToRecord(List<EntityReference> reviewers) {
return nullOrEmpty(reviewers)
? null
: reviewers.stream().map(EntityReference::getName).collect(Collectors.joining(FIELD_SEPARATOR));
}
}

View File

@ -77,7 +77,9 @@ public class GlossaryTermRepository extends EntityRepository<GlossaryTerm> {
}
private Integer getUsageCount(GlossaryTerm term) {
return daoCollection.tagUsageDAO().getTagCount(TagSource.GLOSSARY.ordinal(), term.getFullyQualifiedName());
return daoCollection
.tagUsageDAO()
.getTagCount(TagSource.GLOSSARY.ordinal(), FullyQualifiedName.buildHash(term.getFullyQualifiedName()));
}
private EntityReference getParent(GlossaryTerm entity) throws IOException {
@ -184,6 +186,11 @@ public class GlossaryTermRepository extends EntityRepository<GlossaryTerm> {
}
}
@Override
public String getFullyQualifiedNameHash(GlossaryTerm entity) {
return FullyQualifiedName.buildHash(entity.getFullyQualifiedName());
}
protected EntityReference getGlossary(GlossaryTerm term) throws IOException {
return getFromEntityRef(term.getId(), Relationship.CONTAINS, GLOSSARY, true);
}
@ -200,7 +207,9 @@ public class GlossaryTermRepository extends EntityRepository<GlossaryTerm> {
@Override
protected void postDelete(GlossaryTerm entity) {
// Cleanup all the tag labels using this glossary term
daoCollection.tagUsageDAO().deleteTagLabels(TagSource.GLOSSARY.ordinal(), entity.getFullyQualifiedName());
daoCollection
.tagUsageDAO()
.deleteTagLabels(TagSource.GLOSSARY.ordinal(), FullyQualifiedName.buildHash(entity.getFullyQualifiedName()));
}
private void addGlossaryRelationship(GlossaryTerm term) {

View File

@ -69,7 +69,12 @@ public class IngestionPipelineRepository extends EntityRepository<IngestionPipel
@Override
public void setFullyQualifiedName(IngestionPipeline ingestionPipeline) {
ingestionPipeline.setFullyQualifiedName(
FullyQualifiedName.add(ingestionPipeline.getService().getName(), ingestionPipeline.getName()));
FullyQualifiedName.add(ingestionPipeline.getService().getFullyQualifiedName(), ingestionPipeline.getName()));
}
@Override
public String getFullyQualifiedNameHash(IngestionPipeline ingestionPipeline) {
return FullyQualifiedName.buildHash(ingestionPipeline.getFullyQualifiedName());
}
@Override
@ -90,7 +95,7 @@ public class IngestionPipelineRepository extends EntityRepository<IngestionPipel
daoCollection
.entityExtensionTimeSeriesDao()
.delete(ingestionPipeline.getFullyQualifiedName(), PIPELINE_STATUS_EXTENSION);
.delete(FullyQualifiedName.buildHash(ingestionPipeline.getFullyQualifiedName()), PIPELINE_STATUS_EXTENSION);
setFieldsInternal(ingestionPipeline, Fields.EMPTY_FIELDS);
return ingestionPipeline;
}
@ -169,7 +174,6 @@ public class IngestionPipelineRepository extends EntityRepository<IngestionPipel
throws IOException {
// Validate the request content
IngestionPipeline ingestionPipeline = dao.findEntityByName(fqn);
PipelineStatus storedPipelineStatus =
JsonUtils.readValue(
daoCollection
@ -193,7 +197,7 @@ public class IngestionPipelineRepository extends EntityRepository<IngestionPipel
daoCollection
.entityExtensionTimeSeriesDao()
.insert(
ingestionPipeline.getFullyQualifiedName(),
FullyQualifiedName.buildHash(ingestionPipeline.getFullyQualifiedName()),
PIPELINE_STATUS_EXTENSION,
PIPELINE_STATUS_JSON_SCHEMA,
JsonUtils.pojoToJson(pipelineStatus));
@ -211,10 +215,8 @@ public class IngestionPipelineRepository extends EntityRepository<IngestionPipel
IngestionPipeline ingestionPipeline = dao.findEntityByName(ingestionPipelineFQN);
List<PipelineStatus> pipelineStatusList =
JsonUtils.readObjects(
daoCollection
.entityExtensionTimeSeriesDao()
.listBetweenTimestampsByFQN(
ingestionPipeline.getFullyQualifiedName(), PIPELINE_STATUS_JSON_SCHEMA, startTs, endTs),
getResultsFromAndToTimestamps(
ingestionPipeline.getFullyQualifiedName(), PIPELINE_STATUS_EXTENSION, startTs, endTs),
PipelineStatus.class);
List<PipelineStatus> allPipelineStatusList = pipelineServiceClient.getQueuedPipelineStatus(ingestionPipeline);
allPipelineStatusList.addAll(pipelineStatusList);
@ -224,9 +226,7 @@ public class IngestionPipelineRepository extends EntityRepository<IngestionPipel
public PipelineStatus getLatestPipelineStatus(IngestionPipeline ingestionPipeline) throws IOException {
return JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getLatestExtensionByFQN(ingestionPipeline.getFullyQualifiedName(), PIPELINE_STATUS_JSON_SCHEMA),
getLatestExtensionFromTimeseries(ingestionPipeline.getFullyQualifiedName(), PIPELINE_STATUS_EXTENSION),
PipelineStatus.class);
}
@ -238,7 +238,7 @@ public class IngestionPipelineRepository extends EntityRepository<IngestionPipel
.getExtensionByKey(
RUN_ID_EXTENSION_KEY,
pipelineStatusRunId.toString(),
ingestionPipeline.getFullyQualifiedName(),
FullyQualifiedName.buildHash(ingestionPipeline.getFullyQualifiedName()),
PIPELINE_STATUS_EXTENSION),
PipelineStatus.class);
}

View File

@ -103,25 +103,15 @@ public class KpiRepository extends EntityRepository<Kpi> {
// Validate the request content
Kpi kpi = dao.findEntityByName(fqn);
KpiResult storedKpiResult =
JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getExtensionAtTimestamp(kpi.getFullyQualifiedName(), KPI_RESULT_EXTENSION, kpiResult.getTimestamp()),
KpiResult.class);
if (storedKpiResult != null) {
daoCollection
.entityExtensionTimeSeriesDao()
.update(
kpi.getFullyQualifiedName(),
KPI_RESULT_EXTENSION,
JsonUtils.pojoToJson(kpiResult),
kpiResult.getTimestamp());
} else {
daoCollection
.entityExtensionTimeSeriesDao()
.insert(kpi.getFullyQualifiedName(), KPI_RESULT_EXTENSION, KPI_RESULT_FIELD, JsonUtils.pojoToJson(kpiResult));
}
String storedKpiResult =
getExtensionAtTimestamp(kpi.getFullyQualifiedName(), KPI_RESULT_EXTENSION, kpiResult.getTimestamp());
storeTimeSeries(
kpi.getFullyQualifiedName(),
KPI_RESULT_EXTENSION,
"kpiResult",
JsonUtils.pojoToJson(kpiResult),
kpiResult.getTimestamp(),
storedKpiResult != null);
ChangeDescription change = addKpiResultChangeDescription(kpi.getVersion(), kpiResult, storedKpiResult);
ChangeEvent changeEvent = getChangeEvent(withHref(uriInfo, kpi), change, entityType, kpi.getVersion());
@ -133,11 +123,9 @@ public class KpiRepository extends EntityRepository<Kpi> {
// Validate the request content
Kpi kpi = dao.findEntityByName(fqn);
KpiResult storedKpiResult =
JsonUtils.readValue(
daoCollection.entityExtensionTimeSeriesDao().getExtensionAtTimestamp(fqn, KPI_RESULT_EXTENSION, timestamp),
KpiResult.class);
JsonUtils.readValue(getExtensionAtTimestamp(fqn, KPI_RESULT_EXTENSION, timestamp), KpiResult.class);
if (storedKpiResult != null) {
daoCollection.entityExtensionTimeSeriesDao().deleteAtTimestamp(fqn, KPI_RESULT_EXTENSION, timestamp);
deleteExtensionAtTimestamp(fqn, KPI_RESULT_EXTENSION, timestamp);
kpi.setKpiResult(storedKpiResult);
ChangeDescription change = deleteKpiChangeDescription(kpi.getVersion(), storedKpiResult);
ChangeEvent changeEvent = getChangeEvent(kpi, change, entityType, kpi.getVersion());
@ -175,8 +163,7 @@ public class KpiRepository extends EntityRepository<Kpi> {
}
public KpiResult getKpiResult(String fqn) throws IOException {
return JsonUtils.readValue(
daoCollection.entityExtensionTimeSeriesDao().getLatestExtension(fqn, KPI_RESULT_EXTENSION), KpiResult.class);
return JsonUtils.readValue(getLatestExtensionFromTimeseries(fqn, KPI_RESULT_EXTENSION), KpiResult.class);
}
public ResultList<KpiResult> getKpiResults(
@ -185,10 +172,7 @@ public class KpiRepository extends EntityRepository<Kpi> {
List<KpiResult> kpiResults;
kpiResults =
JsonUtils.readObjects(
daoCollection
.entityExtensionTimeSeriesDao()
.listBetweenTimestampsByOrder(fqn, KPI_RESULT_EXTENSION, startTs, endTs, orderBy),
KpiResult.class);
getResultsFromAndToTimestamps(fqn, KPI_RESULT_EXTENSION, startTs, endTs, orderBy), KpiResult.class);
return new ResultList<>(kpiResults, String.valueOf(startTs), String.valueOf(endTs), kpiResults.size());
}

View File

@ -10,6 +10,7 @@ import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.Relationship;
import org.openmetadata.service.Entity;
import org.openmetadata.service.resources.databases.DatasourceConfig;
import org.openmetadata.service.util.FullyQualifiedName;
public class ListFilter {
@Getter private final Include include;
@ -155,10 +156,10 @@ public class ListFilter {
}
private String getFqnPrefixCondition(String tableName, String fqnPrefix) {
fqnPrefix = escape(fqnPrefix);
return tableName == null
? String.format("fullyQualifiedName LIKE '%s%s%%'", fqnPrefix, Entity.SEPARATOR)
: String.format("%s.fullyQualifiedName LIKE '%s%s%%'", tableName, fqnPrefix, Entity.SEPARATOR);
? String.format("fqnHash LIKE '%s%s%%'", FullyQualifiedName.buildHash(fqnPrefix), Entity.SEPARATOR)
: String.format(
"%s.fqnHash LIKE '%s%s%%'", tableName, FullyQualifiedName.buildHash(fqnPrefix), Entity.SEPARATOR);
}
private String getWebhookTypePrefixCondition(String tableName, String typePrefix) {

View File

@ -45,6 +45,11 @@ public class MetricsRepository extends EntityRepository<Metrics> {
metrics.setFullyQualifiedName(FullyQualifiedName.add(metrics.getService().getName(), metrics.getName()));
}
@Override
public String getFullyQualifiedNameHash(Metrics metrics) {
return FullyQualifiedName.buildHash(metrics.getFullyQualifiedName());
}
@Override
public Metrics setFields(Metrics metrics, Fields fields) throws IOException {
metrics.setService(getContainer(metrics.getId())); // service is a default field

View File

@ -61,12 +61,18 @@ public class MlModelRepository extends EntityRepository<MlModel> {
@Override
public void setFullyQualifiedName(MlModel mlModel) {
mlModel.setFullyQualifiedName(FullyQualifiedName.add(mlModel.getService().getName(), mlModel.getName()));
mlModel.setFullyQualifiedName(
FullyQualifiedName.add(mlModel.getService().getFullyQualifiedName(), mlModel.getName()));
if (!nullOrEmpty(mlModel.getMlFeatures())) {
setMlFeatureFQN(mlModel.getFullyQualifiedName(), mlModel.getMlFeatures());
}
}
@Override
public String getFullyQualifiedNameHash(MlModel mlModel) {
return FullyQualifiedName.buildHash(mlModel.getFullyQualifiedName());
}
@Override
public MlModel setFields(MlModel mlModel, Fields fields) throws IOException {
mlModel.setService(getContainer(mlModel.getId()));

View File

@ -39,7 +39,7 @@ import org.openmetadata.schema.type.TaskDetails;
import org.openmetadata.service.Entity;
import org.openmetadata.service.exception.CatalogExceptionMessage;
import org.openmetadata.service.exception.EntityNotFoundException;
import org.openmetadata.service.resources.feeds.MessageParser.EntityLink;
import org.openmetadata.service.resources.feeds.MessageParser;
import org.openmetadata.service.resources.pipelines.PipelineResource;
import org.openmetadata.service.util.EntityUtil;
import org.openmetadata.service.util.EntityUtil.Fields;
@ -66,12 +66,19 @@ public class PipelineRepository extends EntityRepository<Pipeline> {
@Override
public void setFullyQualifiedName(Pipeline pipeline) {
pipeline.setFullyQualifiedName(FullyQualifiedName.add(pipeline.getService().getName(), pipeline.getName()));
pipeline.setFullyQualifiedName(
FullyQualifiedName.add(pipeline.getService().getFullyQualifiedName(), pipeline.getName()));
setTaskFQN(pipeline.getFullyQualifiedName(), pipeline.getTasks());
}
@Override
public void update(TaskDetails task, EntityLink entityLink, String newValue, String user) throws IOException {
public String getFullyQualifiedNameHash(Pipeline pipeline) {
return FullyQualifiedName.buildHash(pipeline.getFullyQualifiedName());
}
@Override
public void update(TaskDetails task, MessageParser.EntityLink entityLink, String newValue, String user)
throws IOException {
if (entityLink.getFieldName().equals("tasks")) {
Pipeline pipeline = getByName(null, entityLink.getEntityFQN(), getFields("tasks,tags"), Include.ALL);
String oldJson = JsonUtils.pojoToJson(pipeline);
@ -110,9 +117,7 @@ public class PipelineRepository extends EntityRepository<Pipeline> {
private PipelineStatus getPipelineStatus(Pipeline pipeline) throws IOException {
return JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getLatestExtension(pipeline.getFullyQualifiedName(), PIPELINE_STATUS_EXTENSION),
getLatestExtensionFromTimeseries(pipeline.getFullyQualifiedName(), PIPELINE_STATUS_EXTENSION),
PipelineStatus.class);
}
@ -127,29 +132,16 @@ public class PipelineRepository extends EntityRepository<Pipeline> {
validateTask(pipeline, taskStatus.getName());
}
PipelineStatus storedPipelineStatus =
JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getExtensionAtTimestamp(fqn, PIPELINE_STATUS_EXTENSION, pipelineStatus.getTimestamp()),
PipelineStatus.class);
if (storedPipelineStatus != null) {
daoCollection
.entityExtensionTimeSeriesDao()
.update(
pipeline.getFullyQualifiedName(),
PIPELINE_STATUS_EXTENSION,
JsonUtils.pojoToJson(pipelineStatus),
pipelineStatus.getTimestamp());
} else {
daoCollection
.entityExtensionTimeSeriesDao()
.insert(
pipeline.getFullyQualifiedName(),
PIPELINE_STATUS_EXTENSION,
"pipelineStatus",
JsonUtils.pojoToJson(pipelineStatus));
}
String storedPipelineStatus =
getExtensionAtTimestamp(fqn, PIPELINE_STATUS_EXTENSION, pipelineStatus.getTimestamp());
storeTimeSeries(
pipeline.getFullyQualifiedName(),
PIPELINE_STATUS_EXTENSION,
"pipelineStatus",
JsonUtils.pojoToJson(pipelineStatus),
pipelineStatus.getTimestamp(),
storedPipelineStatus != null);
return pipeline.withPipelineStatus(pipelineStatus);
}
@ -159,13 +151,9 @@ public class PipelineRepository extends EntityRepository<Pipeline> {
Pipeline pipeline = dao.findEntityByName(fqn);
pipeline.setService(getContainer(pipeline.getId()));
PipelineStatus storedPipelineStatus =
JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getExtensionAtTimestamp(fqn, PIPELINE_STATUS_EXTENSION, timestamp),
PipelineStatus.class);
JsonUtils.readValue(getExtensionAtTimestamp(fqn, PIPELINE_STATUS_EXTENSION, timestamp), PipelineStatus.class);
if (storedPipelineStatus != null) {
daoCollection.entityExtensionTimeSeriesDao().deleteAtTimestamp(fqn, PIPELINE_STATUS_EXTENSION, timestamp);
deleteExtensionAtTimestamp(fqn, PIPELINE_STATUS_EXTENSION, timestamp);
pipeline.setPipelineStatus(storedPipelineStatus);
return pipeline;
}
@ -177,11 +165,7 @@ public class PipelineRepository extends EntityRepository<Pipeline> {
List<PipelineStatus> pipelineStatuses;
pipelineStatuses =
JsonUtils.readObjects(
daoCollection
.entityExtensionTimeSeriesDao()
.listBetweenTimestamps(fqn, PIPELINE_STATUS_EXTENSION, starTs, endTs),
PipelineStatus.class);
getResultsFromAndToTimestamps(fqn, PIPELINE_STATUS_EXTENSION, starTs, endTs), PipelineStatus.class);
return new ResultList<>(pipelineStatuses, starTs.toString(), endTs.toString(), pipelineStatuses.size());
}

View File

@ -22,7 +22,6 @@ import org.openmetadata.schema.type.Relationship;
import org.openmetadata.service.Entity;
import org.openmetadata.service.resources.query.QueryResource;
import org.openmetadata.service.util.EntityUtil;
import org.openmetadata.service.util.QueryUtil;
import org.openmetadata.service.util.RestUtil;
public class QueryRepository extends EntityRepository<Query> {
@ -74,7 +73,7 @@ public class QueryRepository extends EntityRepository<Query> {
@SneakyThrows
public void prepare(Query entity) {
if (CommonUtil.nullOrEmpty(entity.getName())) {
String checkSum = QueryUtil.getCheckSum(entity.getQuery());
String checkSum = EntityUtil.hash(entity.getQuery());
entity.setChecksum(checkSum);
entity.setName(checkSum);
}
@ -185,7 +184,7 @@ public class QueryRepository extends EntityRepository<Query> {
"users", USER, original.getUsers(), updated.getUsers(), Relationship.USES, Entity.QUERY, original.getId());
if (operation.isPatch() && !original.getQuery().equals(updated.getQuery())) {
recordChange("query", original.getQuery(), updated.getQuery());
String checkSum = QueryUtil.getCheckSum(updated.getQuery());
String checkSum = EntityUtil.hash(updated.getQuery());
recordChange("name", original.getName(), checkSum);
recordChange("checkSum", original.getChecksum(), checkSum);
}

View File

@ -7,6 +7,7 @@ import javax.ws.rs.core.Response;
import org.jdbi.v3.sqlobject.transaction.Transaction;
import org.openmetadata.schema.analytics.ReportData;
import org.openmetadata.schema.analytics.ReportData.ReportDataType;
import org.openmetadata.service.util.EntityUtil;
import org.openmetadata.service.util.JsonUtils;
import org.openmetadata.service.util.ResultList;
@ -22,11 +23,10 @@ public class ReportDataRepository {
@Transaction
public Response addReportData(ReportData reportData) throws IOException {
reportData.setId(UUID.randomUUID());
daoCollection
.entityExtensionTimeSeriesDao()
.insert(
reportData.getReportDataType().value(),
EntityUtil.hash(reportData.getReportDataType().value()),
REPORT_DATA_EXTENSION,
"reportData",
JsonUtils.pojoToJson(reportData));
@ -41,7 +41,7 @@ public class ReportDataRepository {
JsonUtils.readObjects(
daoCollection
.entityExtensionTimeSeriesDao()
.listBetweenTimestamps(reportDataType.value(), REPORT_DATA_EXTENSION, startTs, endTs),
.listBetweenTimestamps(EntityUtil.hash(reportDataType.value()), REPORT_DATA_EXTENSION, startTs, endTs),
ReportData.class);
return new ResultList<>(reportData, String.valueOf(startTs), String.valueOf(endTs), reportData.size());

View File

@ -24,6 +24,7 @@ import org.openmetadata.schema.entity.services.connections.TestConnectionResult;
import org.openmetadata.service.secrets.SecretsManager;
import org.openmetadata.service.secrets.SecretsManagerFactory;
import org.openmetadata.service.util.EntityUtil;
import org.openmetadata.service.util.FullyQualifiedName;
import org.openmetadata.service.util.JsonUtils;
public abstract class ServiceEntityRepository<
@ -95,7 +96,7 @@ public abstract class ServiceEntityRepository<
public T addTestConnectionResult(UUID serviceId, TestConnectionResult testConnectionResult) throws IOException {
T service = dao.findEntityById(serviceId);
service.setTestConnectionResult(testConnectionResult);
dao.update(serviceId, JsonUtils.pojoToJson(service));
dao.update(serviceId, FullyQualifiedName.buildHash(service.getFullyQualifiedName()), JsonUtils.pojoToJson(service));
return service;
}

View File

@ -168,6 +168,11 @@ public class TableRepository extends EntityRepository<Table> {
ColumnUtil.setColumnFQN(table.getFullyQualifiedName(), table.getColumns());
}
@Override
public String getFullyQualifiedNameHash(Table entity) {
return FullyQualifiedName.buildHash(entity.getFullyQualifiedName());
}
@Transaction
public Table addJoins(UUID tableId, TableJoins joins) throws IOException {
// Validate the request content
@ -341,32 +346,18 @@ public class TableRepository extends EntityRepository<Table> {
public Table addTableProfileData(UUID tableId, CreateTableProfile createTableProfile) throws IOException {
// Validate the request content
Table table = dao.findEntityById(tableId);
TableProfile storedTableProfile =
JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getExtensionAtTimestamp(
table.getFullyQualifiedName(),
TABLE_PROFILE_EXTENSION,
createTableProfile.getTableProfile().getTimestamp()),
TableProfile.class);
if (storedTableProfile != null) {
daoCollection
.entityExtensionTimeSeriesDao()
.update(
table.getFullyQualifiedName(),
TABLE_PROFILE_EXTENSION,
JsonUtils.pojoToJson(createTableProfile.getTableProfile()),
createTableProfile.getTableProfile().getTimestamp());
} else {
daoCollection
.entityExtensionTimeSeriesDao()
.insert(
table.getFullyQualifiedName(),
TABLE_PROFILE_EXTENSION,
"tableProfile",
JsonUtils.pojoToJson(createTableProfile.getTableProfile()));
}
String storedTableProfile =
getExtensionAtTimestamp(
table.getFullyQualifiedName(),
TABLE_PROFILE_EXTENSION,
createTableProfile.getTableProfile().getTimestamp());
storeTimeSeries(
table.getFullyQualifiedName(),
TABLE_PROFILE_EXTENSION,
"tableProfile",
JsonUtils.pojoToJson(createTableProfile.getTableProfile()),
createTableProfile.getTableProfile().getTimestamp(),
storedTableProfile != null);
for (ColumnProfile columnProfile : createTableProfile.getColumnProfile()) {
// Validate all the columns
@ -374,64 +365,35 @@ public class TableRepository extends EntityRepository<Table> {
if (column == null) {
throw new IllegalArgumentException("Invalid column name " + columnProfile.getName());
}
ColumnProfile storedColumnProfile =
JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getExtensionAtTimestamp(
column.getFullyQualifiedName(), TABLE_COLUMN_PROFILE_EXTENSION, columnProfile.getTimestamp()),
ColumnProfile.class);
if (storedColumnProfile != null) {
daoCollection
.entityExtensionTimeSeriesDao()
.update(
column.getFullyQualifiedName(),
TABLE_COLUMN_PROFILE_EXTENSION,
JsonUtils.pojoToJson(columnProfile),
storedColumnProfile.getTimestamp());
} else {
daoCollection
.entityExtensionTimeSeriesDao()
.insert(
column.getFullyQualifiedName(),
TABLE_COLUMN_PROFILE_EXTENSION,
"columnProfile",
JsonUtils.pojoToJson(columnProfile));
}
String storedColumnProfile =
getExtensionAtTimestamp(
column.getFullyQualifiedName(), TABLE_COLUMN_PROFILE_EXTENSION, columnProfile.getTimestamp());
storeTimeSeries(
column.getFullyQualifiedName(),
TABLE_COLUMN_PROFILE_EXTENSION,
"columnProfile",
JsonUtils.pojoToJson(columnProfile),
columnProfile.getTimestamp(),
storedColumnProfile != null);
}
List<SystemProfile> systemProfiles = createTableProfile.getSystemProfile();
if (systemProfiles != null && !systemProfiles.isEmpty()) {
for (SystemProfile systemProfile : createTableProfile.getSystemProfile()) {
SystemProfile storedSystemProfile =
JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getExtensionAtTimestampWithOperation(
table.getFullyQualifiedName(),
SYSTEM_PROFILE_EXTENSION,
systemProfile.getTimestamp(),
systemProfile.getOperation().value()),
SystemProfile.class);
if (storedSystemProfile != null) {
daoCollection
.entityExtensionTimeSeriesDao()
.updateExtensionByOperation(
table.getFullyQualifiedName(),
SYSTEM_PROFILE_EXTENSION,
JsonUtils.pojoToJson(systemProfile),
storedSystemProfile.getTimestamp(),
storedSystemProfile.getOperation().value());
} else {
daoCollection
.entityExtensionTimeSeriesDao()
.insert(
table.getFullyQualifiedName(),
SYSTEM_PROFILE_EXTENSION,
"systemProfile",
JsonUtils.pojoToJson(systemProfile));
}
String storedSystemProfile =
getExtensionAtTimestampWithOperation(
table.getFullyQualifiedName(),
SYSTEM_PROFILE_EXTENSION,
systemProfile.getTimestamp(),
systemProfile.getOperation().value());
storeTimeSeriesWithOperation(
table.getFullyQualifiedName(),
SYSTEM_PROFILE_EXTENSION,
"systemProfile",
JsonUtils.pojoToJson(systemProfile),
systemProfile.getTimestamp(),
systemProfile.getOperation().value(),
storedSystemProfile != null);
}
}
@ -452,13 +414,11 @@ public class TableRepository extends EntityRepository<Table> {
}
TableProfile storedTableProfile =
JsonUtils.readValue(
daoCollection.entityExtensionTimeSeriesDao().getExtensionAtTimestamp(fqn, extension, timestamp),
TableProfile.class);
JsonUtils.readValue(getExtensionAtTimestamp(fqn, extension, timestamp), TableProfile.class);
if (storedTableProfile == null) {
throw new EntityNotFoundException(String.format("Failed to find table profile for %s at %s", fqn, timestamp));
}
daoCollection.entityExtensionTimeSeriesDao().deleteAtTimestamp(fqn, extension, timestamp);
deleteExtensionAtTimestamp(fqn, extension, timestamp);
}
@Transaction
@ -466,10 +426,7 @@ public class TableRepository extends EntityRepository<Table> {
List<TableProfile> tableProfiles;
tableProfiles =
JsonUtils.readObjects(
daoCollection
.entityExtensionTimeSeriesDao()
.listBetweenTimestamps(fqn, TABLE_PROFILE_EXTENSION, startTs, endTs),
TableProfile.class);
getResultsFromAndToTimestamps(fqn, TABLE_PROFILE_EXTENSION, startTs, endTs), TableProfile.class);
return new ResultList<>(tableProfiles, startTs.toString(), endTs.toString(), tableProfiles.size());
}
@ -478,10 +435,7 @@ public class TableRepository extends EntityRepository<Table> {
List<ColumnProfile> columnProfiles;
columnProfiles =
JsonUtils.readObjects(
daoCollection
.entityExtensionTimeSeriesDao()
.listBetweenTimestamps(fqn, TABLE_COLUMN_PROFILE_EXTENSION, startTs, endTs),
ColumnProfile.class);
getResultsFromAndToTimestamps(fqn, TABLE_COLUMN_PROFILE_EXTENSION, startTs, endTs), ColumnProfile.class);
return new ResultList<>(columnProfiles, startTs.toString(), endTs.toString(), columnProfiles.size());
}
@ -490,10 +444,7 @@ public class TableRepository extends EntityRepository<Table> {
List<SystemProfile> systemProfiles;
systemProfiles =
JsonUtils.readObjects(
daoCollection
.entityExtensionTimeSeriesDao()
.listBetweenTimestamps(fqn, SYSTEM_PROFILE_EXTENSION, startTs, endTs),
SystemProfile.class);
getResultsFromAndToTimestamps(fqn, SYSTEM_PROFILE_EXTENSION, startTs, endTs), SystemProfile.class);
return new ResultList<>(systemProfiles, startTs.toString(), endTs.toString(), systemProfiles.size());
}
@ -501,9 +452,7 @@ public class TableRepository extends EntityRepository<Table> {
for (Column column : columnList) {
ColumnProfile columnProfile =
JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getLatestExtension(column.getFullyQualifiedName(), TABLE_COLUMN_PROFILE_EXTENSION),
getLatestExtensionFromTimeseries(column.getFullyQualifiedName(), TABLE_COLUMN_PROFILE_EXTENSION),
ColumnProfile.class);
column.setProfile(columnProfile);
if (column.getChildren() != null) {
@ -517,9 +466,7 @@ public class TableRepository extends EntityRepository<Table> {
Table table = dao.findEntityByName(fqn);
TableProfile tableProfile =
JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getLatestExtension(table.getFullyQualifiedName(), TABLE_PROFILE_EXTENSION),
getLatestExtensionFromTimeseries(table.getFullyQualifiedName(), TABLE_PROFILE_EXTENSION),
TableProfile.class);
table.setProfile(tableProfile);
setColumnProfile(table.getColumns());
@ -629,11 +576,9 @@ public class TableRepository extends EntityRepository<Table> {
stored.setTags(modelColumn.getTags());
}
applyTags(table.getColumns());
dao.update(table.getId(), JsonUtils.pojoToJson(table));
dao.update(table.getId(), FullyQualifiedName.buildHash(table.getFullyQualifiedName()), JsonUtils.pojoToJson(table));
setFieldsInternal(table, new Fields(List.of(FIELD_OWNER), FIELD_OWNER));
setFieldsInternal(table, new Fields(List.of(FIELD_TAGS), FIELD_TAGS));
return table;
}
@ -843,8 +788,8 @@ public class TableRepository extends EntityRepository<Table> {
daoCollection
.fieldRelationshipDAO()
.find(
fromEntityFQN,
toEntityFQN,
FullyQualifiedName.buildHash(fromEntityFQN),
FullyQualifiedName.buildHash(toEntityFQN),
entityRelationType,
entityRelationType,
Relationship.JOINED_WITH.ordinal()))
@ -858,6 +803,8 @@ public class TableRepository extends EntityRepository<Table> {
daoCollection
.fieldRelationshipDAO()
.upsert(
FullyQualifiedName.buildHash(fromEntityFQN),
FullyQualifiedName.buildHash(toEntityFQN),
fromEntityFQN,
toEntityFQN,
entityRelationType,
@ -913,7 +860,7 @@ public class TableRepository extends EntityRepository<Table> {
List<Pair<String, List<DailyCount>>> entityRelations =
daoCollection.fieldRelationshipDAO()
.listBidirectional(
table.getFullyQualifiedName(),
FullyQualifiedName.buildHash(table.getFullyQualifiedName()),
FIELD_RELATION_TABLE_TYPE,
FIELD_RELATION_TABLE_TYPE,
Relationship.JOINED_WITH.ordinal())
@ -935,7 +882,7 @@ public class TableRepository extends EntityRepository<Table> {
List<Triple<String, String, List<DailyCount>>> entityRelations =
daoCollection.fieldRelationshipDAO()
.listBidirectionalByPrefix(
table.getFullyQualifiedName(),
FullyQualifiedName.buildHash(table.getFullyQualifiedName()),
FIELD_RELATION_COLUMN_TYPE,
FIELD_RELATION_COLUMN_TYPE,
Relationship.JOINED_WITH.ordinal())

View File

@ -85,6 +85,11 @@ public class TagRepository extends EntityRepository<Tag> {
}
}
@Override
public String getFullyQualifiedNameHash(Tag tag) {
return FullyQualifiedName.buildHash(tag.getFullyQualifiedName());
}
@Override
public EntityRepository<Tag>.EntityUpdater getUpdater(Tag original, Tag updated, Operation operation) {
return new TagUpdater(original, updated, operation);
@ -104,7 +109,9 @@ public class TagRepository extends EntityRepository<Tag> {
}
private Integer getUsageCount(Tag tag) {
return daoCollection.tagUsageDAO().getTagCount(TagSource.CLASSIFICATION.ordinal(), tag.getFullyQualifiedName());
return daoCollection
.tagUsageDAO()
.getTagCount(TagSource.CLASSIFICATION.ordinal(), FullyQualifiedName.buildHash(tag.getFullyQualifiedName()));
}
private List<EntityReference> getChildren(Tag entity) throws IOException {

View File

@ -17,6 +17,7 @@ import static org.openmetadata.common.utils.CommonUtil.listOrEmpty;
import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty;
import static org.openmetadata.csv.CsvUtil.addEntityReferences;
import static org.openmetadata.csv.CsvUtil.addField;
import static org.openmetadata.csv.CsvUtil.addUserOwner;
import static org.openmetadata.schema.api.teams.CreateTeam.TeamType.BUSINESS_UNIT;
import static org.openmetadata.schema.api.teams.CreateTeam.TeamType.DEPARTMENT;
import static org.openmetadata.schema.api.teams.CreateTeam.TeamType.DIVISION;
@ -49,10 +50,10 @@ import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
import javax.ws.rs.core.UriInfo;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.csv.CSVPrinter;
import org.apache.commons.csv.CSVRecord;
import org.openmetadata.csv.CsvUtil;
import org.openmetadata.csv.EntityCsv;
import org.openmetadata.schema.api.teams.CreateTeam.TeamType;
import org.openmetadata.schema.entity.teams.Team;
@ -64,6 +65,7 @@ import org.openmetadata.schema.type.csv.CsvDocumentation;
import org.openmetadata.schema.type.csv.CsvErrorType;
import org.openmetadata.schema.type.csv.CsvHeader;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.exception.EntityNotFoundException;
import org.openmetadata.service.jdbi3.CollectionDAO.EntityRelationshipRecord;
@ -104,6 +106,11 @@ public class TeamRepository extends EntityRepository<Team> {
return team;
}
@Override
public Team getByName(UriInfo uriInfo, String name, Fields fields) throws IOException {
return super.getByName(uriInfo, EntityInterfaceUtil.quoteName(name), fields);
}
@Override
public void restorePatchAttributes(Team original, Team updated) {
// Patch can't make changes to following fields. Ignore the changes
@ -558,7 +565,7 @@ public class TeamRepository extends EntityRepository<Team> {
}
// Field 6 - Owner
importedTeam.setOwner(getEntityReference(printer, csvRecord, 5, Entity.USER));
importedTeam.setOwner(getOwnerAsUser(printer, csvRecord, 5));
if (!processRecord) {
return null;
}
@ -585,7 +592,7 @@ public class TeamRepository extends EntityRepository<Team> {
addField(recordList, entity.getDescription());
addField(recordList, entity.getTeamType().value());
addEntityReferences(recordList, entity.getParents());
CsvUtil.addEntityReference(recordList, entity.getOwner());
addUserOwner(recordList, entity.getOwner());
addField(recordList, entity.getIsJoinable());
addEntityReferences(recordList, entity.getDefaultRoles());
addEntityReferences(recordList, entity.getPolicies());
@ -593,7 +600,7 @@ public class TeamRepository extends EntityRepository<Team> {
}
private void getParents(CSVPrinter printer, CSVRecord csvRecord, Team importedTeam) throws IOException {
List<EntityReference> parentRefs = getEntityReferences(printer, csvRecord, 4, Entity.TEAM);
List<EntityReference> parentRefs = getUserOrTeamEntityReferences(printer, csvRecord, 4, Entity.TEAM);
// Validate team being created is under the hierarchy of the team for which CSV is being imported to
for (EntityReference parentRef : listOrEmpty(parentRefs)) {

View File

@ -35,6 +35,7 @@ import org.openmetadata.schema.type.EventType;
import org.openmetadata.schema.type.FieldChange;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.Relationship;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.exception.EntityNotFoundException;
import org.openmetadata.service.resources.feeds.MessageParser.EntityLink;
@ -71,7 +72,7 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getExtensionAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp),
.getExtensionAtTimestamp(FullyQualifiedName.buildHash(fqn), TESTCASE_RESULT_EXTENSION, timestamp),
TestCaseResult.class);
TestCaseResult updated = JsonUtils.applyPatch(original, patch, TestCaseResult.class);
@ -81,7 +82,8 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
updated.getTestCaseFailureStatus().setUpdatedAt(System.currentTimeMillis());
daoCollection
.entityExtensionTimeSeriesDao()
.update(fqn, TESTCASE_RESULT_EXTENSION, JsonUtils.pojoToJson(updated), timestamp);
.update(
FullyQualifiedName.buildHash(fqn), TESTCASE_RESULT_EXTENSION, JsonUtils.pojoToJson(updated), timestamp);
change = ENTITY_UPDATED;
}
return new RestUtil.PatchResponse<>(Response.Status.OK, updated, change);
@ -90,10 +92,17 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
@Override
public void setFullyQualifiedName(TestCase test) {
EntityLink entityLink = EntityLink.parse(test.getEntityLink());
test.setFullyQualifiedName(FullyQualifiedName.add(entityLink.getFullyQualifiedFieldValue(), test.getName()));
test.setFullyQualifiedName(
FullyQualifiedName.add(
entityLink.getFullyQualifiedFieldValue(), EntityInterfaceUtil.quoteName(test.getName())));
test.setEntityFQN(entityLink.getFullyQualifiedFieldValue());
}
@Override
public String getFullyQualifiedNameHash(TestCase test) {
return FullyQualifiedName.buildHash(test.getFullyQualifiedName());
}
@Override
public void prepare(TestCase test) throws IOException {
EntityLink entityLink = EntityLink.parse(test.getEntityLink());
@ -171,31 +180,19 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
// Validate the request content
TestCase testCase = dao.findEntityByName(fqn);
TestCaseResult storedTestCaseResult =
JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getExtensionAtTimestamp(
testCase.getFullyQualifiedName(), TESTCASE_RESULT_EXTENSION, testCaseResult.getTimestamp()),
TestCaseResult.class);
if (storedTestCaseResult != null) {
daoCollection
.entityExtensionTimeSeriesDao()
.update(
testCase.getFullyQualifiedName(),
TESTCASE_RESULT_EXTENSION,
JsonUtils.pojoToJson(testCaseResult),
testCaseResult.getTimestamp());
} else {
daoCollection
.entityExtensionTimeSeriesDao()
.insert(
testCase.getFullyQualifiedName(),
TESTCASE_RESULT_EXTENSION,
TEST_CASE_RESULT_FIELD,
JsonUtils.pojoToJson(testCaseResult));
}
setFieldsInternal(testCase, new Fields(allowedFields, TEST_SUITE_FIELD));
String storedTestCaseResult =
getExtensionAtTimestamp(
testCase.getFullyQualifiedName(), TESTCASE_RESULT_EXTENSION, testCaseResult.getTimestamp());
storeTimeSeries(
testCase.getFullyQualifiedName(),
TESTCASE_RESULT_EXTENSION,
"testCaseResult",
JsonUtils.pojoToJson(testCaseResult),
testCaseResult.getTimestamp(),
storedTestCaseResult != null);
setFieldsInternal(testCase, new EntityUtil.Fields(allowedFields, "testSuite"));
ChangeDescription change =
addTestCaseChangeDescription(testCase.getVersion(), testCaseResult, storedTestCaseResult);
ChangeEvent changeEvent =
@ -209,13 +206,10 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
// Validate the request content
TestCase testCase = dao.findEntityByName(fqn);
TestCaseResult storedTestCaseResult =
JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getExtensionAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp),
TestCaseResult.class);
JsonUtils.readValue(getExtensionAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp), TestCaseResult.class);
if (storedTestCaseResult != null) {
daoCollection.entityExtensionTimeSeriesDao().deleteAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp);
deleteExtensionAtTimestamp(fqn, TESTCASE_RESULT_EXTENSION, timestamp);
testCase.setTestCaseResult(storedTestCaseResult);
ChangeDescription change = deleteTestCaseChangeDescription(testCase.getVersion(), storedTestCaseResult);
ChangeEvent changeEvent = getChangeEvent(updatedBy, testCase, change, entityType, testCase.getVersion());
@ -257,9 +251,7 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
private TestCaseResult getTestCaseResult(TestCase testCase) throws IOException {
return JsonUtils.readValue(
daoCollection
.entityExtensionTimeSeriesDao()
.getLatestExtension(testCase.getFullyQualifiedName(), TESTCASE_RESULT_EXTENSION),
getLatestExtensionFromTimeseries(testCase.getFullyQualifiedName(), TESTCASE_RESULT_EXTENSION),
TestCaseResult.class);
}
@ -267,11 +259,7 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
List<TestCaseResult> testCaseResults;
testCaseResults =
JsonUtils.readObjects(
daoCollection
.entityExtensionTimeSeriesDao()
.listBetweenTimestamps(fqn, TESTCASE_RESULT_EXTENSION, startTs, endTs),
TestCaseResult.class);
getResultsFromAndToTimestamps(fqn, TESTCASE_RESULT_EXTENSION, startTs, endTs), TestCaseResult.class);
return new ResultList<>(testCaseResults, String.valueOf(startTs), String.valueOf(endTs), testCaseResults.size());
}
@ -320,12 +308,17 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
public TestSummary getTestSummary() throws IOException {
List<TestCase> testCases = listAll(Fields.EMPTY_FIELDS, new ListFilter());
List<String> testCaseFQNs = testCases.stream().map(TestCase::getFullyQualifiedName).collect(Collectors.toList());
List<String> testCaseFQNHashes =
testCases.stream()
.map(testCase -> FullyQualifiedName.buildHash(testCase.getFullyQualifiedName()))
.collect(Collectors.toList());
if (testCaseFQNs.isEmpty()) return new TestSummary();
if (testCaseFQNHashes.isEmpty()) return new TestSummary();
List<String> jsonList =
daoCollection.entityExtensionTimeSeriesDao().getLatestExtensionByFQNs(testCaseFQNs, TESTCASE_RESULT_EXTENSION);
daoCollection
.entityExtensionTimeSeriesDao()
.getLatestExtensionByFQNs(testCaseFQNHashes, TESTCASE_RESULT_EXTENSION);
HashMap<String, Integer> testCaseSummary = new HashMap<>();
for (String json : jsonList) {
@ -338,7 +331,7 @@ public class TestCaseRepository extends EntityRepository<TestCase> {
.withAborted(testCaseSummary.getOrDefault(TestCaseStatus.Aborted.toString(), 0))
.withFailed(testCaseSummary.getOrDefault(TestCaseStatus.Failed.toString(), 0))
.withSuccess(testCaseSummary.getOrDefault(TestCaseStatus.Success.toString(), 0))
.withTotal(testCaseFQNs.size());
.withTotal(testCaseFQNHashes.size());
}
@Override

View File

@ -51,12 +51,17 @@ public class TopicRepository extends EntityRepository<Topic> {
@Override
public void setFullyQualifiedName(Topic topic) {
topic.setFullyQualifiedName(FullyQualifiedName.add(topic.getService().getName(), topic.getName()));
topic.setFullyQualifiedName(FullyQualifiedName.add(topic.getService().getFullyQualifiedName(), topic.getName()));
if (topic.getMessageSchema() != null) {
setFieldFQN(topic.getFullyQualifiedName(), topic.getMessageSchema().getSchemaFields());
}
}
@Override
public String getFullyQualifiedNameHash(Topic topic) {
return FullyQualifiedName.buildHash(topic.getFullyQualifiedName());
}
public TopicRepository(CollectionDAO dao) {
super(
TopicResource.COLLECTION_PATH,

View File

@ -40,6 +40,7 @@ import org.openmetadata.service.TypeRegistry;
import org.openmetadata.service.resources.types.TypeResource;
import org.openmetadata.service.util.EntityUtil;
import org.openmetadata.service.util.EntityUtil.Fields;
import org.openmetadata.service.util.FullyQualifiedName;
import org.openmetadata.service.util.JsonUtils;
import org.openmetadata.service.util.RestUtil.PutResponse;
@ -129,7 +130,10 @@ public class TypeRepository extends EntityRepository<Type> {
daoCollection
.fieldRelationshipDAO()
.listToByPrefix(
getCustomPropertyFQNPrefix(type.getName()), Entity.TYPE, Entity.TYPE, Relationship.HAS.ordinal());
FullyQualifiedName.buildHash(getCustomPropertyFQNPrefix(type.getName())),
Entity.TYPE,
Entity.TYPE,
Relationship.HAS.ordinal());
for (Triple<String, String, String> result : results) {
CustomProperty property = JsonUtils.readValue(result.getRight(), CustomProperty.class);
property.setPropertyType(dao.findEntityReferenceByName(result.getMiddle()));
@ -188,6 +192,8 @@ public class TypeRepository extends EntityRepository<Type> {
daoCollection
.fieldRelationshipDAO()
.insert(
FullyQualifiedName.buildHash(customPropertyFQN),
FullyQualifiedName.buildHash(property.getPropertyType().getName()),
customPropertyFQN,
property.getPropertyType().getName(),
Entity.TYPE,
@ -206,8 +212,8 @@ public class TypeRepository extends EntityRepository<Type> {
daoCollection
.fieldRelationshipDAO()
.delete(
customPropertyFQN,
property.getPropertyType().getName(),
FullyQualifiedName.buildHash(customPropertyFQN),
FullyQualifiedName.buildHash(property.getPropertyType().getName()),
Entity.TYPE,
Entity.TYPE,
Relationship.HAS.ordinal());
@ -226,6 +232,8 @@ public class TypeRepository extends EntityRepository<Type> {
daoCollection
.fieldRelationshipDAO()
.upsert(
FullyQualifiedName.buildHash(customPropertyFQN),
FullyQualifiedName.buildHash(updatedProperty.getPropertyType().getName()),
customPropertyFQN,
updatedProperty.getPropertyType().getName(),
Entity.TYPE,

View File

@ -45,6 +45,7 @@ import org.openmetadata.schema.type.csv.CsvDocumentation;
import org.openmetadata.schema.type.csv.CsvErrorType;
import org.openmetadata.schema.type.csv.CsvHeader;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.OpenMetadataApplicationConfig;
import org.openmetadata.service.exception.CatalogExceptionMessage;
@ -81,6 +82,11 @@ public class UserRepository extends EntityRepository<User> {
return new Fields(tempFields, fields);
}
@Override
public User getByName(UriInfo uriInfo, String name, Fields fields) throws IOException {
return super.getByName(uriInfo, EntityInterfaceUtil.quoteName(name), fields);
}
/** Ensures that the default roles are added for POST, PUT and PATCH operations. */
@Override
public void prepare(User user) throws IOException {
@ -281,7 +287,7 @@ public class UserRepository extends EntityRepository<User> {
}
/* Get all the teams that user belongs to User entity */
private List<EntityReference> getTeams(User user) throws IOException {
public List<EntityReference> getTeams(User user) throws IOException {
List<EntityRelationshipRecord> records = findFrom(user.getId(), USER, Relationship.HAS, Entity.TEAM);
List<EntityReference> teams = EntityUtil.populateEntityReferences(records, Entity.TEAM);
teams = teams.stream().filter(team -> !team.getDeleted()).collect(Collectors.toList()); // Filter deleted teams

View File

@ -54,22 +54,19 @@ public class WebAnalyticEventRepository extends EntityRepository<WebAnalyticEven
@Transaction
public Response addWebAnalyticEventData(WebAnalyticEventData webAnalyticEventData) throws IOException {
webAnalyticEventData.setEventId(UUID.randomUUID());
daoCollection
.entityExtensionTimeSeriesDao()
.insert(
webAnalyticEventData.getEventType().value(),
WEB_ANALYTICS_EVENT_DATA_EXTENSION,
"webAnalyticEventData",
JsonUtils.pojoToJson(webAnalyticEventData));
storeTimeSeries(
webAnalyticEventData.getEventType().value(),
WEB_ANALYTICS_EVENT_DATA_EXTENSION,
"webAnalyticEventData",
JsonUtils.pojoToJson(webAnalyticEventData),
webAnalyticEventData.getTimestamp(),
false);
return Response.ok(webAnalyticEventData).build();
}
@Transaction
public void deleteWebAnalyticEventData(WebAnalyticEventType name, Long timestamp) {
daoCollection
.entityExtensionTimeSeriesDao()
.deleteBeforeExclusive(name.value(), WEB_ANALYTICS_EVENT_DATA_EXTENSION, timestamp);
deleteExtensionBeforeTimestamp(name.value(), WEB_ANALYTICS_EVENT_DATA_EXTENSION, timestamp);
}
public ResultList<WebAnalyticEventData> getWebAnalyticEventData(String eventType, Long startTs, Long endTs)
@ -77,9 +74,7 @@ public class WebAnalyticEventRepository extends EntityRepository<WebAnalyticEven
List<WebAnalyticEventData> webAnalyticEventData;
webAnalyticEventData =
JsonUtils.readObjects(
daoCollection
.entityExtensionTimeSeriesDao()
.listBetweenTimestamps(eventType, WEB_ANALYTICS_EVENT_DATA_EXTENSION, startTs, endTs),
getResultsFromAndToTimestamps(eventType, WEB_ANALYTICS_EVENT_DATA_EXTENSION, startTs, endTs),
WebAnalyticEventData.class);
return new ResultList<>(

View File

@ -56,6 +56,7 @@ import org.openmetadata.schema.type.EntityHistory;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.Relationship;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.OpenMetadataApplicationConfig;
import org.openmetadata.service.exception.CatalogExceptionMessage;
@ -211,7 +212,7 @@ public class BotResource extends EntityResource<Bot, BotRepository> {
@DefaultValue("non-deleted")
Include include)
throws IOException {
return getByNameInternal(uriInfo, securityContext, name, "", include);
return getByNameInternal(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), "", include);
}
@GET
@ -364,7 +365,7 @@ public class BotResource extends EntityResource<Bot, BotRepository> {
boolean hardDelete,
@Parameter(description = "Name of the bot", schema = @Schema(type = "string")) @PathParam("name") String name)
throws IOException {
return deleteByName(uriInfo, securityContext, name, true, hardDelete);
return deleteByName(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), true, hardDelete);
}
@PUT
@ -387,7 +388,7 @@ public class BotResource extends EntityResource<Bot, BotRepository> {
private Bot getBot(CreateBot create, String user) throws IOException {
return copy(new Bot(), create, user)
.withBotUser(getEntityReference(Entity.USER, create.getBotUser()))
.withBotUser(getEntityReference(Entity.USER, EntityInterfaceUtil.quoteName(create.getBotUser())))
.withProvider(create.getProvider())
.withFullyQualifiedName(create.getName());
}
@ -408,7 +409,7 @@ public class BotResource extends EntityResource<Bot, BotRepository> {
private Bot getBot(SecurityContext securityContext, CreateBot create) throws IOException {
Bot bot = getBot(create, securityContext.getUserPrincipal().getName());
Bot originalBot = retrieveBot(bot.getName());
Bot originalBot = retrieveBot(EntityInterfaceUtil.quoteName(bot.getName()));
User botUser = retrieveUser(bot);
if (botUser != null && !Boolean.TRUE.equals(botUser.getIsBot())) {
throw new IllegalArgumentException(String.format("User [%s] is not a bot user", botUser.getName()));
@ -430,7 +431,11 @@ public class BotResource extends EntityResource<Bot, BotRepository> {
private User retrieveUser(Bot bot) {
// TODO fix this code - don't depend on exception
try {
return Entity.getEntity(bot.getBotUser(), "", Include.NON_DELETED);
return Entity.getEntityByName(
Entity.USER,
EntityInterfaceUtil.quoteName(bot.getBotUser().getFullyQualifiedName()),
"",
Include.NON_DELETED);
} catch (Exception exception) {
return null;
}

View File

@ -48,6 +48,7 @@ import org.openmetadata.schema.type.Column;
import org.openmetadata.schema.type.EntityHistory;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.MetadataOperation;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.jdbi3.CollectionDAO;
import org.openmetadata.service.jdbi3.ListFilter;
@ -715,6 +716,13 @@ public class TestCaseResource extends EntityResource<TestCase, TestCaseRepositor
return repository.getTestSummary();
}
@Override
public TestCase getByNameInternal(
UriInfo uriInfo, SecurityContext securityContext, String fqn, String fieldsParam, Include include)
throws IOException {
return super.getByNameInternal(uriInfo, securityContext, EntityInterfaceUtil.quoteName(fqn), fieldsParam, include);
}
private TestCase getTestCase(CreateTestCase create, String user, EntityLink entityLink) throws IOException {
return copy(new TestCase(), create, user)
.withDescription(create.getDescription())

View File

@ -43,6 +43,7 @@ import org.openmetadata.schema.type.EntityHistory;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.MetadataOperation;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.jdbi3.CollectionDAO;
import org.openmetadata.service.jdbi3.ListFilter;
@ -536,4 +537,11 @@ public class TestSuiteResource extends EntityResource<TestSuite, TestSuiteReposi
}
return testSuite;
}
@Override
public TestSuite getByNameInternal(
UriInfo uriInfo, SecurityContext securityContext, String name, String fieldsParam, Include include)
throws IOException {
return super.getByNameInternal(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), fieldsParam, include);
}
}

View File

@ -25,6 +25,7 @@ import io.swagger.v3.oas.annotations.tags.Tag;
import java.io.IOException;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
import javax.json.JsonPatch;
import javax.validation.Valid;
import javax.validation.constraints.Max;
@ -53,6 +54,7 @@ import org.openmetadata.schema.type.EntityHistory;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.MetadataOperation;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.OpenMetadataApplicationConfig;
import org.openmetadata.service.exception.CatalogExceptionMessage;
@ -448,7 +450,12 @@ public class GlossaryTermResource extends EntityResource<GlossaryTerm, GlossaryT
.withParent(getEntityReference(Entity.GLOSSARY_TERM, create.getParent()))
.withRelatedTerms(getEntityReferences(Entity.GLOSSARY_TERM, create.getRelatedTerms()))
.withReferences(create.getReferences())
.withReviewers(getEntityReferences(Entity.USER, create.getReviewers()))
.withReviewers(
getEntityReferences(
Entity.USER,
create.getReviewers() == null
? create.getReviewers()
: create.getReviewers().stream().map(EntityInterfaceUtil::quoteName).collect(Collectors.toList())))
.withTags(create.getTags())
.withProvider(create.getProvider())
.withMutuallyExclusive(create.getMutuallyExclusive());

View File

@ -14,6 +14,7 @@ import io.swagger.v3.oas.annotations.tags.Tag;
import java.io.IOException;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
import javax.json.JsonPatch;
import javax.validation.Valid;
import javax.validation.constraints.Max;
@ -45,6 +46,7 @@ import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.MetadataOperation;
import org.openmetadata.schema.type.Votes;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.jdbi3.CollectionDAO;
import org.openmetadata.service.jdbi3.ListFilter;
@ -499,7 +501,12 @@ public class QueryResource extends EntityResource<Query, QueryRepository> {
.withQuery(create.getQuery())
.withDuration(create.getDuration())
.withVotes(new Votes().withUpVotes(0).withDownVotes(0))
.withUsers(getEntityReferences(USER, create.getUsers()))
.withUsers(
getEntityReferences(
USER,
create.getUsers() == null
? create.getUsers()
: create.getUsers().stream().map(EntityInterfaceUtil::quoteName).collect(Collectors.toList())))
.withQueryUsedIn(EntityUtil.populateEntityReferences(create.getQueryUsedIn()))
.withQueryDate(create.getQueryDate());
}

View File

@ -88,6 +88,7 @@ import org.openmetadata.service.jdbi3.CollectionDAO;
import org.openmetadata.service.resources.Collection;
import org.openmetadata.service.security.Authorizer;
import org.openmetadata.service.util.ElasticSearchClientUtils;
import org.openmetadata.service.util.FullyQualifiedName;
import org.openmetadata.service.util.JsonUtils;
import org.openmetadata.service.util.ReIndexingHandler;
@ -463,7 +464,8 @@ public class SearchResource {
String jobRecord;
jobRecord =
dao.entityExtensionTimeSeriesDao()
.getLatestExtension(ELASTIC_SEARCH_ENTITY_FQN_STREAM, ELASTIC_SEARCH_EXTENSION);
.getLatestExtension(
FullyQualifiedName.buildHash(ELASTIC_SEARCH_ENTITY_FQN_STREAM), ELASTIC_SEARCH_EXTENSION);
if (jobRecord != null) {
return Response.status(Response.Status.OK)
.entity(JsonUtils.readValue(jobRecord, EventPublisherJob.class))

View File

@ -143,7 +143,7 @@ public class TagLabelCache {
/** Returns true if the parent of the tag label is mutually exclusive */
public boolean mutuallyExclusive(TagLabel label) {
String[] fqnParts = FullyQualifiedName.split(label.getTagFQN());
String parentFqn = FullyQualifiedName.getParent(fqnParts);
String parentFqn = FullyQualifiedName.getParentFQN(fqnParts);
boolean rootParent = fqnParts.length == 2;
if (label.getSource() == TagSource.CLASSIFICATION) {
return rootParent

View File

@ -59,6 +59,7 @@ import org.openmetadata.schema.type.EntityHistory;
import org.openmetadata.schema.type.Include;
import org.openmetadata.schema.type.MetadataOperation;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.OpenMetadataApplicationConfig;
import org.openmetadata.service.jdbi3.CollectionDAO;
@ -433,7 +434,7 @@ public class TeamResource extends EntityResource<Team, TeamRepository> {
boolean hardDelete,
@Parameter(description = "Name of the team", schema = @Schema(type = "string")) @PathParam("name") String name)
throws IOException {
return deleteByName(uriInfo, securityContext, name, false, hardDelete);
return deleteByName(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), false, hardDelete);
}
@PUT
@ -527,4 +528,11 @@ public class TeamResource extends EntityResource<Team, TeamRepository> {
.withPolicies(EntityUtil.toEntityReferences(ct.getPolicies(), Entity.POLICY))
.withEmail(ct.getEmail());
}
@Override
public Team getByNameInternal(
UriInfo uriInfo, SecurityContext securityContext, String name, String fieldsParam, Include include)
throws IOException {
return super.getByNameInternal(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), fieldsParam, include);
}
}

View File

@ -105,6 +105,7 @@ import org.openmetadata.schema.type.MetadataOperation;
import org.openmetadata.schema.type.ProviderType;
import org.openmetadata.schema.type.Relationship;
import org.openmetadata.schema.type.csv.CsvImportResult;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.OpenMetadataApplicationConfig;
import org.openmetadata.service.auth.JwtResponse;
@ -363,7 +364,7 @@ public class UserResource extends EntityResource<User, UserRepository> {
@DefaultValue("non-deleted")
Include include)
throws IOException {
User user = getByNameInternal(uriInfo, securityContext, name, fieldsParam, include);
User user = getByNameInternal(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), fieldsParam, include);
decryptOrNullify(securityContext, user);
return user;
}
@ -797,7 +798,7 @@ public class UserResource extends EntityResource<User, UserRepository> {
boolean hardDelete,
@Parameter(description = "Name of the user", schema = @Schema(type = "string")) @PathParam("name") String name)
throws IOException {
return deleteByName(uriInfo, securityContext, name, false, hardDelete);
return deleteByName(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), false, hardDelete);
}
@PUT
@ -1409,4 +1410,11 @@ public class UserResource extends EntityResource<User, UserRepository> {
}
}
}
@Override
public User getByNameInternal(
UriInfo uriInfo, SecurityContext securityContext, String name, String fieldsParam, Include include)
throws IOException {
return super.getByNameInternal(uriInfo, securityContext, EntityInterfaceUtil.quoteName(name), fieldsParam, include);
}
}

View File

@ -23,6 +23,7 @@ import lombok.extern.slf4j.Slf4j;
import org.jdbi.v3.core.Jdbi;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.ResourcePermission;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.OpenMetadataApplicationConfig;
import org.openmetadata.service.security.policyevaluator.OperationContext;
import org.openmetadata.service.security.policyevaluator.PolicyEvaluator;
@ -116,7 +117,7 @@ public class DefaultAuthorizer implements Authorizer {
}
public static SubjectContext getSubjectContext(String userName) {
return SubjectCache.getInstance().getSubjectContext(userName);
return SubjectCache.getInstance().getSubjectContext(EntityInterfaceUtil.quoteName(userName));
}
private SubjectContext changeSubjectContext(String user, SubjectContext loggedInUser) {

View File

@ -33,6 +33,7 @@ import lombok.extern.slf4j.Slf4j;
import org.openmetadata.schema.entity.teams.Team;
import org.openmetadata.schema.entity.teams.User;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.exception.CatalogExceptionMessage;
import org.openmetadata.service.exception.EntityNotFoundException;
@ -213,7 +214,7 @@ public class SubjectCache {
static class UserLoader extends CacheLoader<String, SubjectContext> {
@Override
public SubjectContext load(@CheckForNull String userName) throws IOException {
User user = USER_REPOSITORY.getByName(null, userName, USER_FIELDS);
User user = USER_REPOSITORY.getByName(null, EntityInterfaceUtil.quoteName(userName), USER_FIELDS);
LOG.info("Loaded user {}:{}", user.getName(), user.getId());
return new SubjectContext(user);
}

View File

@ -17,6 +17,7 @@ import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty;
import static org.openmetadata.schema.type.Include.ALL;
import java.io.IOException;
import java.security.MessageDigest;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@ -30,7 +31,9 @@ import javax.ws.rs.WebApplicationException;
import lombok.Getter;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.codec.binary.Hex;
import org.openmetadata.common.utils.CommonUtil;
import org.openmetadata.schema.EntityInterface;
import org.openmetadata.schema.api.data.TermReference;
@ -515,6 +518,15 @@ public final class EntityUtil {
}
}
@SneakyThrows
public static String hash(String input) {
if (input != null) {
byte[] checksum = MessageDigest.getInstance("MD5").digest(input.getBytes());
return Hex.encodeHexString(checksum);
}
return input;
}
public static boolean isDescriptionTask(TaskType taskType) {
return taskType == TaskType.RequestDescription || taskType == TaskType.UpdateDescription;
}

View File

@ -52,6 +52,22 @@ public class FullyQualifiedName {
return String.join(Entity.SEPARATOR, list);
}
public static String buildHash(String... strings) {
List<String> list = new ArrayList<>();
for (String string : strings) {
list.add(EntityUtil.hash(quoteName(string)));
}
return String.join(Entity.SEPARATOR, list);
}
public static String buildHash(String fullyQualifiedName) {
if (fullyQualifiedName != null && !fullyQualifiedName.isEmpty()) {
String[] split = split(fullyQualifiedName);
return buildHash(split);
}
return fullyQualifiedName;
}
public static String[] split(String string) {
SplitListener listener = new SplitListener();
walk(string, listener);
@ -68,20 +84,21 @@ public class FullyQualifiedName {
walker.walk(listener, fqn);
}
public static String getParent(String fqn) {
public static String getParentFQN(String fqn) {
// Split fqn of format a.b.c.d and return the parent a.b.c
String[] split = split(fqn);
return getParent(split);
return getParentFQN(split);
}
public static String getParent(String... fqnParts) {
public static String getParentFQN(String... fqnParts) {
// Fqn parts a b c d are given from fqn a.b.c.d
if (fqnParts.length <= 1) {
return null;
}
if (fqnParts.length == 2) {
return unquoteName(fqnParts[0]); // The root name is not quoted and only the unquoted name is returned
return fqnParts[0];
}
String parent = build(fqnParts[0]);
for (int i = 1; i < fqnParts.length - 1; i++) {
parent = add(parent, fqnParts[i]);

View File

@ -37,6 +37,7 @@ import org.openmetadata.schema.type.AnnouncementDetails;
import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.Post;
import org.openmetadata.schema.type.Relationship;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.jdbi3.CollectionDAO;
import org.openmetadata.service.jdbi3.UserRepository;
@ -138,10 +139,10 @@ public class NotificationHandler {
entityLink -> {
String fqn = entityLink.getEntityFQN();
if (USER.equals(entityLink.getEntityType())) {
User user = dao.userDAO().findEntityByName(fqn);
User user = dao.userDAO().findEntityByName(EntityInterfaceUtil.quoteName(fqn));
WebSocketManager.getInstance().sendToOne(user.getId(), WebSocketManager.MENTION_CHANNEL, jsonThread);
} else if (TEAM.equals(entityLink.getEntityType())) {
Team team = dao.teamDAO().findEntityByName(fqn);
Team team = dao.teamDAO().findEntityByName(EntityInterfaceUtil.quoteName(fqn));
// fetch all that are there in the team
List<CollectionDAO.EntityRelationshipRecord> records =
dao.relationshipDAO().findTo(team.getId().toString(), TEAM, Relationship.HAS.ordinal(), USER);

View File

@ -1,14 +0,0 @@
package org.openmetadata.service.util;
import java.security.MessageDigest;
import lombok.SneakyThrows;
import org.apache.commons.codec.binary.Hex;
public class QueryUtil {
@SneakyThrows
public static String getCheckSum(String input) {
byte[] checksum = MessageDigest.getInstance("MD5").digest(input.getBytes());
return Hex.encodeHexString(checksum);
}
}

View File

@ -119,7 +119,7 @@ public class ReIndexingHandler {
// Create Entry in the DB
dao.entityExtensionTimeSeriesDao()
.insert(
jobData.getId().toString(),
EntityUtil.hash(jobData.getId().toString()),
REINDEXING_JOB_EXTENSION,
"eventPublisherJob",
JsonUtils.pojoToJson(jobData));
@ -179,7 +179,8 @@ public class ReIndexingHandler {
SearchIndexWorkflow job = REINDEXING_JOB_MAP.get(jobId);
if (job == null) {
String recordString =
dao.entityExtensionTimeSeriesDao().getLatestExtension(jobId.toString(), REINDEXING_JOB_EXTENSION);
dao.entityExtensionTimeSeriesDao()
.getLatestExtension(EntityUtil.hash(jobId.toString()), REINDEXING_JOB_EXTENSION);
return JsonUtils.readValue(recordString, EventPublisherJob.class);
}
return REINDEXING_JOB_MAP.get(jobId).getJobData();

View File

@ -24,6 +24,7 @@ import org.openmetadata.schema.analytics.ReportData;
import org.openmetadata.schema.system.StepStats;
import org.openmetadata.service.exception.SourceException;
import org.openmetadata.service.jdbi3.CollectionDAO;
import org.openmetadata.service.util.EntityUtil;
import org.openmetadata.service.util.RestUtil;
import org.openmetadata.service.util.ResultList;
import org.openmetadata.service.workflows.interfaces.Source;
@ -93,7 +94,8 @@ public class PaginatedDataInsightSource implements Source<ResultList<ReportData>
int reportDataCount = dao.entityExtensionTimeSeriesDao().listCount(entityFQN);
List<CollectionDAO.ReportDataRow> reportDataList =
dao.entityExtensionTimeSeriesDao()
.getAfterExtension(entityFQN, limit + 1, after == null ? "0" : RestUtil.decodeCursor(after));
.getAfterExtension(
EntityUtil.hash(entityFQN), limit + 1, after == null ? "0" : RestUtil.decodeCursor(after));
return getAfterExtensionList(reportDataList, after, limit, reportDataCount);
}

View File

@ -50,6 +50,7 @@ import org.openmetadata.service.exception.SinkException;
import org.openmetadata.service.exception.SourceException;
import org.openmetadata.service.jdbi3.CollectionDAO;
import org.openmetadata.service.socket.WebSocketManager;
import org.openmetadata.service.util.EntityUtil;
import org.openmetadata.service.util.JsonUtils;
import org.openmetadata.service.util.ReIndexingHandler;
import org.openmetadata.service.util.ResultList;
@ -300,12 +301,16 @@ public class SearchIndexWorkflow implements Runnable {
public void updateRecordToDb() throws IOException {
String recordString =
dao.entityExtensionTimeSeriesDao().getExtension(jobData.getId().toString(), REINDEXING_JOB_EXTENSION);
dao.entityExtensionTimeSeriesDao()
.getExtension(EntityUtil.hash(jobData.getId().toString()), REINDEXING_JOB_EXTENSION);
EventPublisherJob lastRecord = JsonUtils.readValue(recordString, EventPublisherJob.class);
long originalLastUpdate = lastRecord.getTimestamp();
dao.entityExtensionTimeSeriesDao()
.update(
jobData.getId().toString(), REINDEXING_JOB_EXTENSION, JsonUtils.pojoToJson(jobData), originalLastUpdate);
EntityUtil.hash(jobData.getId().toString()),
REINDEXING_JOB_EXTENSION,
JsonUtils.pojoToJson(jobData),
originalLastUpdate);
}
private void reCreateIndexes(String entityType) {

View File

@ -51,9 +51,9 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat
@Test
void post_databaseFQN_as_admin_200_OK(TestInfo test) throws IOException {
// Create database with different optional fields
CreateDatabase create = createRequest(test).withService(SNOWFLAKE_REFERENCE.getName());
CreateDatabase create = createRequest(test).withService(SNOWFLAKE_REFERENCE.getFullyQualifiedName());
Database db = createAndCheckEntity(create, ADMIN_AUTH_HEADERS);
String expectedFQN = FullyQualifiedName.build(SNOWFLAKE_REFERENCE.getName(), create.getName());
String expectedFQN = FullyQualifiedName.build(SNOWFLAKE_REFERENCE.getFullyQualifiedName(), create.getName());
assertEquals(expectedFQN, db.getFullyQualifiedName());
}
@ -71,7 +71,7 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat
// Create database for each service and test APIs
for (EntityReference service : differentServices) {
createAndCheckEntity(createRequest(test).withService(service.getName()), ADMIN_AUTH_HEADERS);
createAndCheckEntity(createRequest(test).withService(service.getFullyQualifiedName()), ADMIN_AUTH_HEADERS);
// List databases by filtering on service name and ensure right databases in the response
Map<String, String> queryParams = new HashMap<>();
@ -118,7 +118,7 @@ public class DatabaseResourceTest extends EntityResourceTest<Database, CreateDat
@Override
public CreateDatabase createRequest(String name) {
return new CreateDatabase().withName(name).withService(getContainer().getName());
return new CreateDatabase().withName(name).withService(getContainer().getFullyQualifiedName());
}
@Override

View File

@ -162,7 +162,8 @@ public class TableResourceTest extends EntityResourceTest<Table, CreateTable> {
public void setupDatabaseSchemas(TestInfo test) throws IOException {
DatabaseResourceTest databaseResourceTest = new DatabaseResourceTest();
CreateDatabase create = databaseResourceTest.createRequest(test).withService(SNOWFLAKE_REFERENCE.getName());
CreateDatabase create =
databaseResourceTest.createRequest(test).withService(SNOWFLAKE_REFERENCE.getFullyQualifiedName());
DATABASE = databaseResourceTest.createEntity(create, ADMIN_AUTH_HEADERS);
DatabaseSchemaResourceTest databaseSchemaResourceTest = new DatabaseSchemaResourceTest();
@ -1999,7 +2000,7 @@ public class TableResourceTest extends EntityResourceTest<Table, CreateTable> {
DatabaseResourceTest databaseResourceTest = new DatabaseResourceTest();
Database database =
databaseResourceTest.createAndCheckEntity(
databaseResourceTest.createRequest(test).withService(service.getName()), ADMIN_AUTH_HEADERS);
databaseResourceTest.createRequest(test).withService(service.getFullyQualifiedName()), ADMIN_AUTH_HEADERS);
CreateTable create = createRequest(test, index);
return createEntity(create, ADMIN_AUTH_HEADERS).withDatabase(database.getEntityReference());
}

View File

@ -172,10 +172,10 @@ public class FeedResourceTest extends OpenMetadataApplicationTest {
TABLE_DESCRIPTION_LINK = String.format("<#E::table::%s::description>", TABLE.getFullyQualifiedName());
USER = TableResourceTest.USER1;
USER_LINK = String.format("<#E::user::%s>", USER.getName());
USER_LINK = String.format("<#E::user::%s>", USER.getFullyQualifiedName());
TEAM = TableResourceTest.TEAM1;
TEAM_LINK = String.format("<#E::team::%s>", TEAM.getName());
TEAM_LINK = String.format("<#E::team::%s>", TEAM.getFullyQualifiedName());
CreateThread createThread = create();
THREAD = createAndCheck(createThread, ADMIN_AUTH_HEADERS);
@ -696,7 +696,7 @@ public class FeedResourceTest extends OpenMetadataApplicationTest {
private static Stream<Arguments> provideStringsForListThreads() {
return Stream.of(
Arguments.of(String.format("<#E::%s::%s>", Entity.USER, USER.getName())),
Arguments.of(String.format("<#E::%s::%s>", Entity.USER, USER.getFullyQualifiedName())),
Arguments.of(String.format("<#E::%s::%s>", Entity.TABLE, TABLE.getFullyQualifiedName())));
}

View File

@ -421,14 +421,14 @@ public class GlossaryResourceTest extends EntityResourceTest<Glossary, CreateGlo
String fields = "";
entity =
byName
? getEntityByName(entity.getName(), fields, ADMIN_AUTH_HEADERS)
? getEntityByName(entity.getFullyQualifiedName(), fields, ADMIN_AUTH_HEADERS)
: getEntity(entity.getId(), fields, ADMIN_AUTH_HEADERS);
assertListNull(entity.getOwner(), entity.getTags());
fields = "owner,tags";
entity =
byName
? getEntityByName(entity.getName(), fields, ADMIN_AUTH_HEADERS)
? getEntityByName(entity.getFullyQualifiedName(), fields, ADMIN_AUTH_HEADERS)
: getEntity(entity.getId(), fields, ADMIN_AUTH_HEADERS);
// Checks for other owner, tags, and followers is done in the base class
return entity;
@ -531,4 +531,11 @@ public class GlossaryResourceTest extends EntityResourceTest<Glossary, CreateGlo
assertTagPrefixAbsent(table.getColumns().get(0).getTags(), previousTermFqn);
}
}
private static String quoteName(String name) {
if (name != null && !name.contains("\"")) {
return name.contains(".") ? "\\\"" + name + "\\\"" : name;
}
return name;
}
}

View File

@ -174,14 +174,17 @@ public class GlossaryTermResourceTest extends EntityResourceTest<GlossaryTerm, C
// Create terms t1 and a term t12 under t1 in the glossary without reviewers and owner
CreateGlossaryTerm create =
new CreateGlossaryTerm().withGlossary(glossary.getName()).withDescription("description");
new CreateGlossaryTerm().withGlossary(glossary.getFullyQualifiedName()).withDescription("description");
GlossaryTerm t1 = createEntity(create.withName("t1"), ADMIN_AUTH_HEADERS);
assertEntityReferences(glossary.getReviewers(), t1.getReviewers()); // Reviewers are inherited
assertReference(glossary.getOwner(), t1.getOwner()); // Owner is inherited
GlossaryTerm t12 =
createEntity(
create.withName("t12").withGlossary(glossary.getName()).withParent(t1.getFullyQualifiedName()),
create
.withName("t12")
.withGlossary(glossary.getFullyQualifiedName())
.withParent(t1.getFullyQualifiedName()),
ADMIN_AUTH_HEADERS);
assertEntityReferences(glossary.getReviewers(), t12.getReviewers()); // Reviewers are inherited
assertReference(glossary.getOwner(), t12.getOwner()); // Owner is inherited
@ -296,7 +299,7 @@ public class GlossaryTermResourceTest extends EntityResourceTest<GlossaryTerm, C
Glossary g1 = createGlossary(test, null, null);
// Create glossary term t1 in glossary g1
CreateGlossaryTerm create = createRequest("t1", "", "", null).withGlossary(g1.getName());
CreateGlossaryTerm create = createRequest("t1", "", "", null).withGlossary(g1.getFullyQualifiedName());
GlossaryTerm t1 = createEntity(create, ADMIN_AUTH_HEADERS);
TagLabel t1Label = EntityUtil.toTagLabel(t1);
@ -369,7 +372,7 @@ public class GlossaryTermResourceTest extends EntityResourceTest<GlossaryTerm, C
Glossary glossary, GlossaryTerm parent, String termName, List<EntityReference> reviewers) throws IOException {
CreateGlossaryTerm createGlossaryTerm =
createRequest(termName, "", "", null)
.withGlossary(glossary.getName())
.withGlossary(getFqn(glossary))
.withParent(getFqn(parent))
.withReviewers(getFqns(reviewers));
return createAndCheckEntity(createGlossaryTerm, ADMIN_AUTH_HEADERS);
@ -399,7 +402,7 @@ public class GlossaryTermResourceTest extends EntityResourceTest<GlossaryTerm, C
.withSynonyms(List.of("syn1", "syn2", "syn3"))
.withGlossary(GLOSSARY1.getName())
.withRelatedTerms(Arrays.asList(getFqn(GLOSSARY1_TERM1), getFqn(GLOSSARY2_TERM1)))
.withReviewers(List.of(USER1_REF.getName()));
.withReviewers(List.of(USER1_REF.getFullyQualifiedName()));
}
@Override

View File

@ -17,7 +17,6 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.openmetadata.common.utils.CommonUtil.listOrEmpty;
import static org.openmetadata.service.util.EntityUtil.fieldAdded;
import static org.openmetadata.service.util.EntityUtil.fieldDeleted;
import static org.openmetadata.service.util.EntityUtil.fieldUpdated;
import static org.openmetadata.service.util.TestUtils.ADMIN_AUTH_HEADERS;
import static org.openmetadata.service.util.TestUtils.UpdateType.MINOR_UPDATE;
@ -103,14 +102,6 @@ public class TypeResourceTest extends EntityResourceTest<Type, CreateType> {
tableEntity = addCustomPropertyAndCheck(tableEntity.getId(), fieldA, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change);
assertCustomProperties(new ArrayList<>(List.of(fieldA)), tableEntity.getCustomProperties());
// Changing property type with PUT - old property deleted and new customer property of the same name added
fieldA.withPropertyType(STRING_TYPE.getEntityReference());
change = getChangeDescription(tableEntity.getVersion());
fieldDeleted(change, "customProperties", tableEntity.getCustomProperties());
fieldAdded(change, "customProperties", new ArrayList<>(List.of(fieldA)));
tableEntity = addCustomPropertyAndCheck(tableEntity.getId(), fieldA, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change);
assertCustomProperties(new ArrayList<>(List.of(fieldA)), tableEntity.getCustomProperties());
// Changing custom property description with PATCH
fieldA.withDescription("updated2");
String json = JsonUtils.pojoToJson(tableEntity);
@ -119,20 +110,6 @@ public class TypeResourceTest extends EntityResourceTest<Type, CreateType> {
fieldUpdated(change, EntityUtil.getCustomField(fieldA, "description"), "updated", "updated2");
tableEntity = patchEntityAndCheck(tableEntity, json, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change);
// Changing property type with PATCH - old property deleted and new customer property of the same name added
CustomProperty fieldA1 =
new CustomProperty()
.withDescription(fieldA.getDescription())
.withPropertyType(INT_TYPE.getEntityReference())
.withName(fieldA.getName());
json = JsonUtils.pojoToJson(tableEntity);
tableEntity.setCustomProperties(new ArrayList<>(List.of(fieldA1)));
change = getChangeDescription(tableEntity.getVersion());
fieldDeleted(change, "customProperties", new ArrayList<>(List.of(fieldA)));
fieldAdded(change, "customProperties", new ArrayList<>(List.of(fieldA1)));
tableEntity = patchEntityAndCheck(tableEntity, json, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change);
assertCustomProperties(new ArrayList<>(List.of(fieldA1)), tableEntity.getCustomProperties());
// Add a second property with name intB with type integer
EntityReference typeRef =
new EntityReference()
@ -144,7 +121,7 @@ public class TypeResourceTest extends EntityResourceTest<Type, CreateType> {
tableEntity = addCustomPropertyAndCheck(tableEntity.getId(), fieldB, ADMIN_AUTH_HEADERS, MINOR_UPDATE, change);
fieldB.setPropertyType(INT_TYPE.getEntityReference());
assertEquals(2, tableEntity.getCustomProperties().size());
assertCustomProperties(new ArrayList<>(List.of(fieldA1, fieldB)), tableEntity.getCustomProperties());
assertCustomProperties(new ArrayList<>(List.of(fieldA, fieldB)), tableEntity.getCustomProperties());
}
@Test

View File

@ -57,6 +57,7 @@ import org.openmetadata.schema.type.FieldChange;
import org.openmetadata.schema.type.Status;
import org.openmetadata.schema.type.StatusType;
import org.openmetadata.schema.type.Task;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
import org.openmetadata.service.Entity;
import org.openmetadata.service.resources.EntityResourceTest;
import org.openmetadata.service.resources.pipelines.PipelineResource.PipelineList;
@ -169,7 +170,7 @@ public class PipelineResourceTest extends EntityResourceTest<Pipeline, CreatePip
@Test
void post_PipelineWithDifferentService_200_ok(TestInfo test) throws IOException {
String[] differentServices = {AIRFLOW_REFERENCE.getName(), GLUE_REFERENCE.getName()};
String[] differentServices = {AIRFLOW_REFERENCE.getFullyQualifiedName(), GLUE_REFERENCE.getFullyQualifiedName()};
// Create Pipeline for each service and test APIs
for (String service : differentServices) {
@ -181,7 +182,7 @@ public class PipelineResourceTest extends EntityResourceTest<Pipeline, CreatePip
ResultList<Pipeline> list = listEntities(queryParams, ADMIN_AUTH_HEADERS);
for (Pipeline db : list.getData()) {
assertEquals(service, db.getService().getName());
assertEquals(service, db.getService().getFullyQualifiedName());
}
}
}
@ -199,7 +200,7 @@ public class PipelineResourceTest extends EntityResourceTest<Pipeline, CreatePip
@Test
void put_PipelineUrlUpdate_200(TestInfo test) throws IOException {
CreatePipeline request =
createRequest(test).withService(AIRFLOW_REFERENCE.getName()).withDescription("description");
createRequest(test).withService(AIRFLOW_REFERENCE.getFullyQualifiedName()).withDescription("description");
createAndCheckEntity(request, ADMIN_AUTH_HEADERS);
String pipelineURL = "https://airflow.open-metadata.org/tree?dag_id=airflow_redshift_usage";
Integer pipelineConcurrency = 110;
@ -211,7 +212,10 @@ public class PipelineResourceTest extends EntityResourceTest<Pipeline, CreatePip
request.withSourceUrl(pipelineURL).withConcurrency(pipelineConcurrency).withStartDate(startDate),
OK,
ADMIN_AUTH_HEADERS);
String expectedFQN = FullyQualifiedName.add(AIRFLOW_REFERENCE.getFullyQualifiedName(), pipeline.getName());
String expectedFQN =
FullyQualifiedName.add(
EntityInterfaceUtil.quoteName(AIRFLOW_REFERENCE.getName()),
EntityInterfaceUtil.quoteName(pipeline.getName()));
assertEquals(pipelineURL, pipeline.getSourceUrl());
assertEquals(startDate, pipeline.getStartDate());
assertEquals(pipelineConcurrency, pipeline.getConcurrency());
@ -221,7 +225,10 @@ public class PipelineResourceTest extends EntityResourceTest<Pipeline, CreatePip
@Test
void put_PipelineTasksUpdate_200(TestInfo test) throws IOException {
CreatePipeline request =
createRequest(test).withService(AIRFLOW_REFERENCE.getName()).withDescription(null).withTasks(null);
createRequest(test)
.withService(AIRFLOW_REFERENCE.getFullyQualifiedName())
.withDescription(null)
.withTasks(null);
Pipeline pipeline = createAndCheckEntity(request, ADMIN_AUTH_HEADERS);
// Add description and tasks
@ -258,7 +265,7 @@ public class PipelineResourceTest extends EntityResourceTest<Pipeline, CreatePip
void put_PipelineTasksOverride_200(TestInfo test) throws IOException {
// A PUT operation with a new Task should override the current tasks in the Pipeline
// This change will always be minor, both with deletes/adds
CreatePipeline request = createRequest(test).withService(AIRFLOW_REFERENCE.getName());
CreatePipeline request = createRequest(test).withService(AIRFLOW_REFERENCE.getFullyQualifiedName());
Pipeline pipeline = createAndCheckEntity(request, ADMIN_AUTH_HEADERS);
List<Task> newTask =
@ -277,7 +284,7 @@ public class PipelineResourceTest extends EntityResourceTest<Pipeline, CreatePip
@Test
void put_PipelineStatus_200(TestInfo test) throws IOException, ParseException {
CreatePipeline request = createRequest(test).withService(AIRFLOW_REFERENCE.getName());
CreatePipeline request = createRequest(test).withService(AIRFLOW_REFERENCE.getFullyQualifiedName());
Pipeline pipeline = createAndCheckEntity(request, ADMIN_AUTH_HEADERS);
// PUT one status and validate
@ -395,7 +402,7 @@ public class PipelineResourceTest extends EntityResourceTest<Pipeline, CreatePip
@Test
void put_PipelineInvalidStatus_4xx(TestInfo test) throws IOException, ParseException {
CreatePipeline request = createRequest(test).withService(AIRFLOW_REFERENCE.getName());
CreatePipeline request = createRequest(test).withService(AIRFLOW_REFERENCE.getFullyQualifiedName());
Pipeline pipeline = createAndCheckEntity(request, ADMIN_AUTH_HEADERS);
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
@ -419,7 +426,7 @@ public class PipelineResourceTest extends EntityResourceTest<Pipeline, CreatePip
@Test
void patch_PipelineTasksUpdate_200_ok(TestInfo test) throws IOException {
CreatePipeline request = createRequest(test).withService(AIRFLOW_REFERENCE.getName());
CreatePipeline request = createRequest(test).withService(AIRFLOW_REFERENCE.getFullyQualifiedName());
Pipeline pipeline = createAndCheckEntity(request, ADMIN_AUTH_HEADERS);
String origJson = JsonUtils.pojoToJson(pipeline);
@ -487,7 +494,7 @@ public class PipelineResourceTest extends EntityResourceTest<Pipeline, CreatePip
void put_AddRemovePipelineTasksUpdate_200(TestInfo test) throws IOException {
CreatePipeline request =
createRequest(test)
.withService(AIRFLOW_REFERENCE.getName())
.withService(AIRFLOW_REFERENCE.getFullyQualifiedName())
.withDescription(null)
.withTasks(null)
.withConcurrency(null)

View File

@ -33,8 +33,8 @@ import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.service.Entity;
import org.openmetadata.service.resources.EntityResourceTest;
import org.openmetadata.service.resources.databases.TableResourceTest;
import org.openmetadata.service.util.EntityUtil;
import org.openmetadata.service.util.JsonUtils;
import org.openmetadata.service.util.QueryUtil;
import org.openmetadata.service.util.TestUtils;
@Slf4j
@ -63,7 +63,7 @@ public class QueryResourceTest extends EntityResourceTest<Query, CreateQuery> {
Table createdTable = tableResourceTest.createAndCheckEntity(create, ADMIN_AUTH_HEADERS);
TABLE_REF = createdTable.getEntityReference();
QUERY = "select * from sales";
QUERY_CHECKSUM = QueryUtil.getCheckSum(QUERY);
QUERY_CHECKSUM = EntityUtil.hash(QUERY);
}
@Override

View File

@ -181,7 +181,7 @@ public class RoleResourceTest extends EntityResourceTest<Role, CreateRole> {
String updatedBy = getPrincipalName(ADMIN_AUTH_HEADERS);
role =
byName
? getEntityByName(role.getName(), null, null, ADMIN_AUTH_HEADERS)
? getEntityByName(role.getFullyQualifiedName(), null, null, ADMIN_AUTH_HEADERS)
: getEntity(role.getId(), null, ADMIN_AUTH_HEADERS);
validateRole(role, role.getDescription(), role.getDisplayName(), updatedBy);
assertListNull(role.getPolicies(), role.getUsers());
@ -189,7 +189,7 @@ public class RoleResourceTest extends EntityResourceTest<Role, CreateRole> {
String fields = "policies,teams,users";
role =
byName
? getEntityByName(role.getName(), null, fields, ADMIN_AUTH_HEADERS)
? getEntityByName(role.getFullyQualifiedName(), null, fields, ADMIN_AUTH_HEADERS)
: getEntity(role.getId(), fields, ADMIN_AUTH_HEADERS);
assertListNotNull(role.getPolicies(), role.getUsers());
validateRole(role, role.getDescription(), role.getDisplayName(), updatedBy);

View File

@ -84,11 +84,13 @@ class FullyQualifiedNameTest {
}
@Test
void test_getParent() {
assertEquals("a.b.c", FullyQualifiedName.getParent("a.b.c.d"));
assertEquals("a.b", FullyQualifiedName.getParent("a.b.c"));
assertEquals("a", FullyQualifiedName.getParent("a.b"));
assertNull(FullyQualifiedName.getParent("a"));
void test_getParentFQN() {
assertEquals("a.b.c", FullyQualifiedName.getParentFQN("a.b.c.d"));
assertEquals("\"a.b\"", FullyQualifiedName.getParentFQN("\"a.b\".c"));
assertEquals("a", FullyQualifiedName.getParentFQN("a.b"));
assertEquals("a", FullyQualifiedName.getParentFQN("a.\"b.c\""));
assertEquals("a.\"b.c\"", FullyQualifiedName.getParentFQN("a.\"b.c\".d"));
assertNull(FullyQualifiedName.getParentFQN("a"));
}
@Test

View File

@ -25,6 +25,7 @@ import org.openmetadata.schema.type.EntityReference;
import org.openmetadata.schema.type.ProviderType;
import org.openmetadata.schema.type.TagLabel;
import org.openmetadata.schema.type.Votes;
import org.openmetadata.schema.utils.EntityInterfaceUtil;
/** Interface to be implemented by all entities to provide a way to access all the common fields. */
public interface EntityInterface {
@ -121,7 +122,8 @@ public interface EntityInterface {
return new EntityReference()
.withId(getId())
.withName(getName())
.withFullyQualifiedName(getFullyQualifiedName() == null ? getName() : getFullyQualifiedName())
.withFullyQualifiedName(
getFullyQualifiedName() == null ? EntityInterfaceUtil.quoteName(getName()) : getFullyQualifiedName())
.withDescription(getDescription())
.withDisplayName(getDisplayName())
.withType(CANONICAL_ENTITY_NAME_MAP.get(this.getClass().getSimpleName().toLowerCase(Locale.ROOT)))

View File

@ -0,0 +1,11 @@
package org.openmetadata.schema.utils;
public final class EntityInterfaceUtil {
/** Adds quotes to name as required */
public static String quoteName(String name) {
if (name != null && !name.contains("\"")) {
return name.contains(".") ? "\"" + name + "\"" : name;
}
return name;
}
}

View File

@ -109,7 +109,9 @@ public abstract class PipelineServiceClient {
public static String getServerVersion() throws IOException {
InputStream fileInput = PipelineServiceClient.class.getResourceAsStream("/catalog/VERSION");
Properties props = new Properties();
props.load(fileInput);
if (fileInput != null) {
props.load(fileInput);
}
return props.getProperty("version", "unknown");
}

View File

@ -9,8 +9,8 @@
"properties": {
"glossary": {
"description": "Name of the glossary that this term is part of.",
"$ref": "../../type/basic.json#/definitions/entityName"
"description": "FullyQualifiedName of the glossary that this term is part of.",
"$ref" : "../../type/basic.json#/definitions/fullyQualifiedEntityName"
},
"parent": {
"description": "Fully qualified name of the parent glossary term.",

View File

@ -164,8 +164,8 @@
"$ref": "../type/basic.json#/definitions/entityName"
},
"fullyQualifiedName": {
"description": "Name that uniquely identifies a Event Subscription.",
"$ref": "../type/basic.json#/definitions/entityName"
"description": "FullyQualifiedName that uniquely identifies a Event Subscription.",
"$ref": "../type/basic.json#/definitions/fullyQualifiedEntityName"
},
"displayName": {
"description": "Display name for this Event Subscription.",