diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MigrationDAO.java b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MigrationDAO.java index eef51bc43da..db2051425b7 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MigrationDAO.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/jdbi3/MigrationDAO.java @@ -32,6 +32,14 @@ public interface MigrationDAO { connectionType = POSTGRES) String getVersionMigrationChecksum(@Bind("version") String version) throws StatementException; + @ConnectionAwareSqlQuery( + value = "SELECT sqlStatement FROM SERVER_MIGRATION_SQL_LOGS where version = :version and checksum = :checksum", + connectionType = MYSQL) + @ConnectionAwareSqlQuery( + value = "SELECT sqlStatement FROM SERVER_MIGRATION_SQL_LOGS where version = :version and checksum = :checksum", + connectionType = POSTGRES) + String getSqlQuery(@Bind("version") String version, @Bind("checksum") String checksum) throws StatementException; + @ConnectionAwareSqlUpdate( value = "INSERT INTO SERVER_CHANGE_LOG (version, migrationFileName, checksum, installed_on)" diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v112/Migration.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v112/Migration.java index 5eefef50b56..94e5d7302fc 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v112/Migration.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v112/Migration.java @@ -1,7 +1,7 @@ package org.openmetadata.service.migration.mysql.v112; -import static org.openmetadata.service.migration.postgres.v112.Migration.lowerCaseUserNameAndEmail; -import static org.openmetadata.service.migration.postgres.v112.Migration.unquoteTestSuiteMigration; +import static org.openmetadata.service.migration.utils.V112.MigrationUtil.fixExecutableTestSuiteFQN; +import static org.openmetadata.service.migration.utils.V112.MigrationUtil.lowerCaseUserNameAndEmail; import lombok.SneakyThrows; import org.jdbi.v3.core.Handle; @@ -11,7 +11,6 @@ import org.openmetadata.service.migration.utils.MigrationFile; public class Migration extends MigrationProcessImpl { private CollectionDAO collectionDAO; - private Handle handle; public Migration(MigrationFile migrationFile) { super(migrationFile); @@ -20,7 +19,6 @@ public class Migration extends MigrationProcessImpl { @Override public void initialize(Handle handle) { super.initialize(handle); - this.handle = handle; this.collectionDAO = handle.attach(CollectionDAO.class); } @@ -28,8 +26,7 @@ public class Migration extends MigrationProcessImpl { @SneakyThrows public void runDataMigration() { // Run Data Migration to Remove the quoted Fqn` - unquoteTestSuiteMigration(collectionDAO); - + fixExecutableTestSuiteFQN(collectionDAO); // Run UserName Migration to make lowercase lowerCaseUserNameAndEmail(collectionDAO); } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v114/Migration.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v114/Migration.java new file mode 100644 index 00000000000..eab684f2b84 --- /dev/null +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/mysql/v114/Migration.java @@ -0,0 +1,31 @@ +package org.openmetadata.service.migration.mysql.v114; + +import static org.openmetadata.service.migration.utils.V112.MigrationUtil.lowerCaseUserNameAndEmail; +import static org.openmetadata.service.migration.utils.V114.MigrationUtil.fixTestSuites; + +import lombok.SneakyThrows; +import org.jdbi.v3.core.Handle; +import org.openmetadata.service.jdbi3.CollectionDAO; +import org.openmetadata.service.migration.api.MigrationProcessImpl; +import org.openmetadata.service.migration.utils.MigrationFile; + +public class Migration extends MigrationProcessImpl { + private CollectionDAO collectionDAO; + + public Migration(MigrationFile migrationFile) { + super(migrationFile); + } + + @Override + public void initialize(Handle handle) { + super.initialize(handle); + this.collectionDAO = handle.attach(CollectionDAO.class); + } + + @Override + @SneakyThrows + public void runDataMigration() { + fixTestSuites(collectionDAO); + lowerCaseUserNameAndEmail(collectionDAO); + } +} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v112/Migration.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v112/Migration.java index 7f9e96b7a8a..478ff8b6d30 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v112/Migration.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v112/Migration.java @@ -1,21 +1,14 @@ package org.openmetadata.service.migration.postgres.v112; -import java.util.List; -import java.util.Set; +import static org.openmetadata.service.migration.utils.V112.MigrationUtil.fixExecutableTestSuiteFQN; +import static org.openmetadata.service.migration.utils.V112.MigrationUtil.lowerCaseUserNameAndEmail; + import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.jdbi.v3.core.Handle; -import org.openmetadata.schema.entity.teams.User; -import org.openmetadata.schema.tests.TestSuite; -import org.openmetadata.schema.type.Include; -import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.jdbi3.CollectionDAO; -import org.openmetadata.service.jdbi3.ListFilter; -import org.openmetadata.service.jdbi3.TestSuiteRepository; import org.openmetadata.service.migration.api.MigrationProcessImpl; import org.openmetadata.service.migration.utils.MigrationFile; -import org.openmetadata.service.util.EntityUtil; -import org.openmetadata.service.util.JsonUtils; @Slf4j public class Migration extends MigrationProcessImpl { @@ -37,46 +30,8 @@ public class Migration extends MigrationProcessImpl { @SneakyThrows public void runDataMigration() { // Run Data Migration to Remove the quoted Fqn` - unquoteTestSuiteMigration(collectionDAO); - + fixExecutableTestSuiteFQN(collectionDAO); // Run UserName Migration to make lowercase lowerCaseUserNameAndEmail(collectionDAO); } - - public static void unquoteTestSuiteMigration(CollectionDAO collectionDAO) { - TestSuiteRepository testSuiteRepository = new TestSuiteRepository(collectionDAO); - List testSuites = - testSuiteRepository.listAll(new EntityUtil.Fields(Set.of("id")), new ListFilter(Include.ALL)); - for (TestSuite suite : testSuites) { - if (Boolean.TRUE.equals(suite.getExecutable())) { - String fqn = suite.getFullyQualifiedName(); - String updatedFqn = fqn; - if (fqn.startsWith("\"") && fqn.endsWith("\"")) { - updatedFqn = fqn.substring(1, fqn.length() - 1); - } - // update the name and fqn - suite.setName(updatedFqn); - suite.setFullyQualifiedName(updatedFqn); - collectionDAO.testSuiteDAO().update(suite); - } - } - } - - public static void lowerCaseUserNameAndEmail(CollectionDAO daoCollection) { - LOG.debug("Starting Migration UserName and Email to Lowercase"); - int total = daoCollection.userDAO().listTotalCount(); - int offset = 0; - int limit = 200; - while (offset < total) { - List userEntities = daoCollection.userDAO().listAfterWithOffset(limit, offset); - for (String json : userEntities) { - User userEntity = JsonUtils.readValue(json, User.class); - userEntity.setFullyQualifiedName( - EntityInterfaceUtil.quoteName(userEntity.getFullyQualifiedName().toLowerCase())); - daoCollection.userDAO().update(userEntity); - } - offset = offset + limit; - } - LOG.debug("Completed Migrating UserName and Email to Lowercase"); - } } diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v114/Migration.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v114/Migration.java new file mode 100644 index 00000000000..3f0cd4b6946 --- /dev/null +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/postgres/v114/Migration.java @@ -0,0 +1,35 @@ +package org.openmetadata.service.migration.postgres.v114; + +import static org.openmetadata.service.migration.utils.V112.MigrationUtil.lowerCaseUserNameAndEmail; +import static org.openmetadata.service.migration.utils.V114.MigrationUtil.fixTestSuites; + +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.jdbi.v3.core.Handle; +import org.openmetadata.service.jdbi3.CollectionDAO; +import org.openmetadata.service.migration.api.MigrationProcessImpl; +import org.openmetadata.service.migration.utils.MigrationFile; + +@Slf4j +public class Migration extends MigrationProcessImpl { + private CollectionDAO collectionDAO; + private Handle handle; + + public Migration(MigrationFile migrationFile) { + super(migrationFile); + } + + @Override + public void initialize(Handle handle) { + super.initialize(handle); + this.handle = handle; + this.collectionDAO = handle.attach(CollectionDAO.class); + } + + @Override + @SneakyThrows + public void runDataMigration() { + fixTestSuites(collectionDAO); + lowerCaseUserNameAndEmail(collectionDAO); + } +} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/V112/MigrationUtil.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/V112/MigrationUtil.java new file mode 100644 index 00000000000..03901456efc --- /dev/null +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/V112/MigrationUtil.java @@ -0,0 +1,52 @@ +package org.openmetadata.service.migration.utils.V112; + +import java.util.List; +import java.util.Set; +import lombok.extern.slf4j.Slf4j; +import org.openmetadata.schema.entity.teams.User; +import org.openmetadata.schema.tests.TestSuite; +import org.openmetadata.schema.type.Include; +import org.openmetadata.schema.utils.EntityInterfaceUtil; +import org.openmetadata.service.jdbi3.CollectionDAO; +import org.openmetadata.service.jdbi3.ListFilter; +import org.openmetadata.service.jdbi3.TestSuiteRepository; +import org.openmetadata.service.util.EntityUtil; +import org.openmetadata.service.util.JsonUtils; + +@Slf4j +public class MigrationUtil { + private MigrationUtil() {} + + public static void fixExecutableTestSuiteFQN(CollectionDAO collectionDAO) { + TestSuiteRepository testSuiteRepository = new TestSuiteRepository(collectionDAO); + List testSuites = + testSuiteRepository.listAll(new EntityUtil.Fields(Set.of("id")), new ListFilter(Include.ALL)); + for (TestSuite suite : testSuites) { + if (Boolean.TRUE.equals(suite.getExecutable()) && suite.getExecutableEntityReference() != null) { + String tableFQN = suite.getExecutableEntityReference().getFullyQualifiedName(); + String suiteFQN = tableFQN + ".testSuite"; + suite.setName(suiteFQN); + suite.setFullyQualifiedName(suiteFQN); + collectionDAO.testSuiteDAO().update(suite); + } + } + } + + public static void lowerCaseUserNameAndEmail(CollectionDAO daoCollection) { + LOG.debug("Starting Migration UserName and Email to Lowercase"); + int total = daoCollection.userDAO().listTotalCount(); + int offset = 0; + int limit = 200; + while (offset < total) { + List userEntities = daoCollection.userDAO().listAfterWithOffset(limit, offset); + for (String json : userEntities) { + User userEntity = JsonUtils.readValue(json, User.class); + userEntity.setFullyQualifiedName( + EntityInterfaceUtil.quoteName(userEntity.getFullyQualifiedName().toLowerCase())); + daoCollection.userDAO().update(userEntity); + } + offset = offset + limit; + } + LOG.debug("Completed Migrating UserName and Email to Lowercase"); + } +} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/V114/MigrationUtil.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/V114/MigrationUtil.java new file mode 100644 index 00000000000..2657bbe1afc --- /dev/null +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/V114/MigrationUtil.java @@ -0,0 +1,91 @@ +package org.openmetadata.service.migration.utils.V114; + +import static org.openmetadata.service.Entity.TEST_CASE; +import static org.openmetadata.service.Entity.TEST_SUITE; +import static org.openmetadata.service.migration.utils.v110.MigrationUtil.groupTestCasesByTable; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.openmetadata.schema.tests.TestCase; +import org.openmetadata.schema.tests.TestSuite; +import org.openmetadata.schema.type.Include; +import org.openmetadata.schema.type.Relationship; +import org.openmetadata.service.exception.EntityNotFoundException; +import org.openmetadata.service.jdbi3.CollectionDAO; +import org.openmetadata.service.jdbi3.ListFilter; +import org.openmetadata.service.jdbi3.TestSuiteRepository; +import org.openmetadata.service.util.EntityUtil; + +public class MigrationUtil { + private MigrationUtil() { + /* Cannot create object util class*/ + } + + /** + * Step 1: re-run the fix for FQN to catch any issues from previous release where we were quoting the FQN Step 2: + * Group all the testCases with the table. We will create a Map with Table FQN as the key and all the test cases + * belonging to that Table Step 3: Iterate through the Map keySet, which is table names. For each table name we create + * a executable test suite FQN Step 4: Fetch executable testSuite using step 3 FQN Step 5: Iterate through the test + * case list associated with the current table FQN in the loop Step 6: for each test case fetch TestSuite + * relationships Step 7: Iterate through the testSuite relation to check if the executableTestSuite FQN matches. If it + * matches there exists a relation from testCase to a executable Test suite Step 8: If we can't find a match, create a + * relationship. + * + * @param collectionDAO + */ + public static void fixTestSuites(CollectionDAO collectionDAO) { + // Fix any FQN issues for executable TestSuite + TestSuiteRepository testSuiteRepository = new TestSuiteRepository(collectionDAO); + List testSuites = + testSuiteRepository.listAll(new EntityUtil.Fields(Set.of("id")), new ListFilter(Include.ALL)); + for (TestSuite suite : testSuites) { + if (suite.getExecutableEntityReference() != null) { + String tableFQN = suite.getExecutableEntityReference().getFullyQualifiedName(); + String suiteFQN = tableFQN + ".testSuite"; + suite.setName(suiteFQN); + suite.setFullyQualifiedName(suiteFQN); + suite.setExecutable(true); + collectionDAO.testSuiteDAO().update(suite); + } + } + // Let's iterate through the test cases and make sure there exists a relationship between testcases and its native + // TestSuite + Map> testCasesGroupByTable = groupTestCasesByTable(collectionDAO); + for (String tableFQN : testCasesGroupByTable.keySet()) { + List testCases = testCasesGroupByTable.get(tableFQN); + String executableTestSuiteFQN = tableFQN + ".testSuite"; + TestSuite executableTestSuite = + testSuiteRepository.getDao().findEntityByName(executableTestSuiteFQN, "fqnHash", Include.ALL); + for (TestCase testCase : testCases) { + // we are setting mustHaveRelationship to "false" to not throw any error. + List existingRelations = + testSuiteRepository.findFromRecords(testCase.getId(), TEST_CASE, Relationship.CONTAINS, TEST_SUITE); + boolean relationWithExecutableTestSuiteExists = false; + if (existingRelations != null) { + for (CollectionDAO.EntityRelationshipRecord existingTestSuiteRel : existingRelations) { + try { + TestSuite existingTestSuite = testSuiteRepository.getDao().findEntityById(existingTestSuiteRel.getId()); + if (existingTestSuite.getExecutable() + && existingTestSuite.getFullyQualifiedName().equals(executableTestSuiteFQN)) { + // remove the existing relation + relationWithExecutableTestSuiteExists = true; + } + } catch (EntityNotFoundException ex) { + // if testsuite cannot be retrieved but the relation exists, then this is orphaned realtion, we will + // delete the relation + testSuiteRepository.deleteRelationship( + existingTestSuiteRel.getId(), TEST_SUITE, testCase.getId(), TEST_CASE, Relationship.CONTAINS); + } + } + } + // if we can't find any executable testSuite relationship add one + if (!relationWithExecutableTestSuiteExists) { + testSuiteRepository.addRelationship( + executableTestSuite.getId(), testCase.getId(), TEST_SUITE, TEST_CASE, Relationship.CONTAINS); + } + } + } + } +} diff --git a/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/v110/MigrationUtil.java b/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/v110/MigrationUtil.java index 7adade769c2..d12626135f5 100644 --- a/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/v110/MigrationUtil.java +++ b/openmetadata-service/src/main/java/org/openmetadata/service/migration/utils/v110/MigrationUtil.java @@ -4,11 +4,9 @@ import static org.openmetadata.common.utils.CommonUtil.nullOrEmpty; import static org.openmetadata.service.Entity.INGESTION_PIPELINE; import static org.openmetadata.service.Entity.TEST_CASE; import static org.openmetadata.service.Entity.TEST_SUITE; +import static org.openmetadata.service.util.EntityUtil.hash; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; -import java.util.UUID; +import java.util.*; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; @@ -61,7 +59,6 @@ import org.openmetadata.schema.type.Include; import org.openmetadata.schema.type.Relationship; import org.openmetadata.schema.utils.EntityInterfaceUtil; import org.openmetadata.service.Entity; -import org.openmetadata.service.exception.EntityNotFoundException; import org.openmetadata.service.jdbi3.CollectionDAO; import org.openmetadata.service.jdbi3.EntityDAO; import org.openmetadata.service.jdbi3.IngestionPipelineRepository; @@ -72,7 +69,6 @@ import org.openmetadata.service.jdbi3.TestCaseRepository; import org.openmetadata.service.jdbi3.TestSuiteRepository; import org.openmetadata.service.resources.databases.DatasourceConfig; import org.openmetadata.service.resources.feeds.MessageParser; -import org.openmetadata.service.util.EntityUtil; import org.openmetadata.service.util.EntityUtil.Fields; import org.openmetadata.service.util.FullyQualifiedName; import org.openmetadata.service.util.JsonUtils; @@ -157,55 +153,60 @@ public class MigrationUtil { } while (true) { // Read from Database - List jsons = dao.migrationListAfterWithOffset(limitParam, nameHashColumn); - LOG.debug("[{}]Read a Batch of Size: {}", dao.getTableName(), jsons.size()); - if (jsons.isEmpty()) { + try { + List jsons = dao.migrationListAfterWithOffset(limitParam, nameHashColumn); + LOG.debug("[{}]Read a Batch of Size: {}", dao.getTableName(), jsons.size()); + if (jsons.isEmpty()) { + break; + } + // Process Update + for (String json : jsons) { + // Update the Statements to Database + T entity = JsonUtils.readValue(json, clazz); + try { + String hash; + if (entity.getFullyQualifiedName() != null) { + hash = + withName + ? FullyQualifiedName.buildHash(EntityInterfaceUtil.quoteName(entity.getFullyQualifiedName())) + : FullyQualifiedName.buildHash(entity.getFullyQualifiedName()); + } else { + LOG.info( + "Failed in creating FQN Hash for Entity Name : {}, since the FQN is null. Auto Correcting.", + entity.getName()); + hash = + withName + ? FullyQualifiedName.buildHash(EntityInterfaceUtil.quoteName(entity.getName())) + : FullyQualifiedName.buildHash(entity.getName()); + entity.setFullyQualifiedName(entity.getName()); + dao.update(entity.getId(), entity.getName(), JsonUtils.pojoToJson(entity)); + } + int result = + handle + .createUpdate(updateSql) + .bind("nameHashColumnValue", hash) + .bind("id", entity.getId().toString()) + .execute(); + if (result <= 0) { + LOG.error("No Rows Affected for Updating Hash with Entity Name : {}", entity.getFullyQualifiedName()); + } + } catch (Exception ex) { + LOG.error("Failed in creating FQN Hash for Entity Name : {}", entity.getFullyQualifiedName(), ex); + } + } + } catch (Exception ex) { + LOG.warn("Failed to list the entities, they might already migrated ", ex); break; } - // Process Update - for (String json : jsons) { - // Update the Statements to Database - T entity = JsonUtils.readValue(json, clazz); - try { - String hash; - if (entity.getFullyQualifiedName() != null) { - hash = - withName - ? FullyQualifiedName.buildHash(EntityInterfaceUtil.quoteName(entity.getFullyQualifiedName())) - : FullyQualifiedName.buildHash(entity.getFullyQualifiedName()); - } else { - LOG.info( - "Failed in creating FQN Hash for Entity Name : {}, since the FQN is null. Auto Correcting.", - entity.getName()); - hash = - withName - ? FullyQualifiedName.buildHash(EntityInterfaceUtil.quoteName(entity.getName())) - : FullyQualifiedName.buildHash(entity.getName()); - entity.setFullyQualifiedName(entity.getName()); - dao.update(entity.getId(), entity.getName(), JsonUtils.pojoToJson(entity)); - } - int result = - handle - .createUpdate(updateSql) - .bind("nameHashColumnValue", hash) - .bind("id", entity.getId().toString()) - .execute(); - if (result <= 0) { - LOG.error("No Rows Affected for Updating Hash with Entity Name : {}", entity.getFullyQualifiedName()); - } - } catch (Exception ex) { - LOG.error("Failed in creating FQN Hash for Entity Name : {}", entity.getFullyQualifiedName(), ex); - } - } + LOG.debug("End Migration for table : {}", dao.getTableName()); } - LOG.debug("End Migration for table : {}", dao.getTableName()); } public static MigrationDAO.ServerMigrationSQLTable buildServerMigrationTable(String version, String statement) { MigrationDAO.ServerMigrationSQLTable result = new MigrationDAO.ServerMigrationSQLTable(); result.setVersion(String.valueOf(version)); result.setSqlStatement(statement); - result.setCheckSum(EntityUtil.hash(statement)); + result.setCheckSum(hash(statement)); return result; } @@ -406,11 +407,13 @@ public class MigrationUtil { if (!nullOrEmpty(queryList)) { for (String sql : queryList) { try { - handle.execute(sql); - migrationDAO.upsertServerMigrationSQL(version, sql, EntityUtil.hash(sql)); + String previouslyRanSql = migrationDAO.getSqlQuery(hash(sql), version); + if ((previouslyRanSql == null || previouslyRanSql.isEmpty())) { + handle.execute(sql); + migrationDAO.upsertServerMigrationSQL(version, sql, hash(sql)); + } } catch (Exception e) { LOG.error(String.format("Failed to run sql %s due to %s", sql, e)); - throw e; } } } @@ -450,110 +453,101 @@ public class MigrationUtil { return entity; } + /** + * Test Suites Migration in 1.0.x -> 1.1.4 1. This is the first time users are migrating from User created TestSuite + * to System created native TestSuite Per Table 2. Our Goal with this migration is to list all the test cases and + * create .testSuite with executable set to true and associate all of the respective test cases with new native test + * suite. + * + * @param collectionDAO + */ @SneakyThrows public static void testSuitesMigration(CollectionDAO collectionDAO) { - IngestionPipelineRepository ingestionPipelineRepository = new IngestionPipelineRepository(collectionDAO); + // Update existing test suites as logical test suites and delete any ingestion pipeline associated with the existing + // test suite + migrateExistingTestSuitesToLogical(collectionDAO); + + // create native test suites TestSuiteRepository testSuiteRepository = new TestSuiteRepository(collectionDAO); - TestCaseRepository testCaseRepository = new TestCaseRepository(collectionDAO); - List testCases = testCaseRepository.listAll(new Fields(Set.of("id")), new ListFilter(Include.ALL)); - - for (TestCase test : testCases) { - - // Create New Executable Test Suites - MessageParser.EntityLink entityLink = MessageParser.EntityLink.parse(test.getEntityLink()); - // Create new Logical Test Suite - String testSuiteFqn = entityLink.getEntityFQN() + ".testSuite"; - TestSuite stored; - try { - // If entity is found by Hash it is already migrated - testSuiteRepository.getDao().findEntityByName(testSuiteFqn, "nameHash", Include.ALL); - } catch (EntityNotFoundException entityNotFoundException) { + Map> testCasesByTable = groupTestCasesByTable(collectionDAO); + for (String tableFQN : testCasesByTable.keySet()) { + String nativeTestSuiteFqn = tableFQN + ".testSuite"; + List testCases = testCasesByTable.get(tableFQN); + if (testCases != null && !testCases.isEmpty()) { + MessageParser.EntityLink entityLink = + MessageParser.EntityLink.parse(testCases.stream().findFirst().get().getEntityLink()); + TestSuite newExecutableTestSuite = + getTestSuite( + collectionDAO, + new CreateTestSuite() + .withName(FullyQualifiedName.buildHash(nativeTestSuiteFqn)) + .withDisplayName(nativeTestSuiteFqn) + .withExecutableEntityReference(entityLink.getEntityFQN()), + "ingestion-bot") + .withExecutable(true) + .withFullyQualifiedName(nativeTestSuiteFqn); + testSuiteRepository.prepareInternal(newExecutableTestSuite, false); try { - // Check if the test Suite Exists, this brings the data on nameHash basis - stored = - testSuiteRepository - .getDao() - .findEntityByName(EntityInterfaceUtil.quoteName(testSuiteFqn), "nameHash", Include.ALL); + testSuiteRepository + .getDao() + .insert("nameHash", newExecutableTestSuite, newExecutableTestSuite.getFullyQualifiedName()); + } catch (Exception ex) { + LOG.warn("TestSuite %s exists".format(nativeTestSuiteFqn)); + } + // add relationship between executable TestSuite with Table + testSuiteRepository.addRelationship( + newExecutableTestSuite.getExecutableEntityReference().getId(), + newExecutableTestSuite.getId(), + Entity.TABLE, + TEST_SUITE, + Relationship.CONTAINS); + + // add relationship between all the testCases that are created against a table with native test suite. + for (TestCase testCase : testCases) { testSuiteRepository.addRelationship( - stored.getId(), test.getId(), TEST_SUITE, TEST_CASE, Relationship.CONTAINS); - stored.setExecutable(true); - stored.setName(FullyQualifiedName.buildHash(testSuiteFqn)); - // the update() method here internally calls FullyQualifiedName.buildHash so not adding it - stored.setFullyQualifiedName(EntityInterfaceUtil.quoteName(FullyQualifiedName.buildHash(testSuiteFqn))); - stored.setDisplayName(testSuiteFqn); - testSuiteRepository.getDao().update(stored); - } catch (EntityNotFoundException ex) { - try { - TestSuite newExecutableTestSuite = - getTestSuite( - collectionDAO, - new CreateTestSuite() - .withName(FullyQualifiedName.buildHash(testSuiteFqn)) - .withDisplayName(testSuiteFqn) - .withExecutableEntityReference(entityLink.getEntityFQN()), - "ingestion-bot") - .withExecutable(false); - // Create - testSuiteRepository.prepareInternal(newExecutableTestSuite, true); - testSuiteRepository - .getDao() - .insert("nameHash", newExecutableTestSuite, newExecutableTestSuite.getFullyQualifiedName()); - // Here we aer manually adding executable relationship since the table Repository is not registered and - // result - // into null for entity type table - testSuiteRepository.addRelationship( - newExecutableTestSuite.getExecutableEntityReference().getId(), - newExecutableTestSuite.getId(), - Entity.TABLE, - TEST_SUITE, - Relationship.CONTAINS); - - // add relationship from testSuite to TestCases - testSuiteRepository.addRelationship( - newExecutableTestSuite.getId(), test.getId(), TEST_SUITE, TEST_CASE, Relationship.CONTAINS); - - // Not a good approach but executable cannot be set true before - TestSuite temp = testSuiteRepository.getDao().findEntityByName(testSuiteFqn, "nameHash", Include.ALL); - temp.setExecutable(true); - testSuiteRepository.getDao().update("nameHash", temp); - } catch (Exception exIgnore) { - LOG.warn("Ignoring error since already added: {}", ex.getMessage()); - } + newExecutableTestSuite.getId(), testCase.getId(), TEST_SUITE, TEST_CASE, Relationship.CONTAINS); } } } - - // Update Test Suites - ListFilter filter = new ListFilter(Include.ALL); - filter.addQueryParam("testSuiteType", "logical"); - List testSuites = testSuiteRepository.listAll(new Fields(Set.of("id")), filter); - - for (TestSuite testSuiteRecord : testSuites) { - TestSuite temp = testSuiteRepository.getDao().findEntityById(testSuiteRecord.getId(), Include.ALL); - if (Boolean.FALSE.equals(temp.getExecutable())) { - temp.setExecutable(false); - testSuiteRepository.getDao().update(temp); - } - - // get Ingestion Pipelines - try { - List ingestionPipelineRecords = - collectionDAO - .relationshipDAO() - .findTo( - testSuiteRecord.getId().toString(), - TEST_SUITE, - Relationship.CONTAINS.ordinal(), - INGESTION_PIPELINE); - for (CollectionDAO.EntityRelationshipRecord ingestionRecord : ingestionPipelineRecords) { - // remove relationship - collectionDAO.relationshipDAO().deleteAll(ingestionRecord.getId().toString(), INGESTION_PIPELINE); - // Cannot use Delete directly it uses other repos internally - ingestionPipelineRepository.getDao().delete(ingestionRecord.getId().toString()); - } - } catch (EntityNotFoundException ex) { - // Already Removed - } - } + } + + private static void migrateExistingTestSuitesToLogical(CollectionDAO collectionDAO) { + IngestionPipelineRepository ingestionPipelineRepository = new IngestionPipelineRepository(collectionDAO); + TestSuiteRepository testSuiteRepository = new TestSuiteRepository(collectionDAO); + ListFilter filter = new ListFilter(Include.ALL); + List testSuites = testSuiteRepository.listAll(new Fields(Set.of("id")), filter); + for (TestSuite testSuite : testSuites) { + testSuite.setExecutable(false); + List ingestionPipelineRecords = + collectionDAO + .relationshipDAO() + .findTo(testSuite.getId().toString(), TEST_SUITE, Relationship.CONTAINS.ordinal(), INGESTION_PIPELINE); + for (CollectionDAO.EntityRelationshipRecord ingestionRecord : ingestionPipelineRecords) { + // remove relationship + collectionDAO.relationshipDAO().deleteAll(ingestionRecord.getId().toString(), INGESTION_PIPELINE); + // Cannot use Delete directly it uses other repos internally + ingestionPipelineRepository.getDao().delete(ingestionRecord.getId().toString()); + } + } + } + + public static Map> groupTestCasesByTable(CollectionDAO collectionDAO) { + Map> testCasesByTable = new HashMap<>(); + TestCaseRepository testCaseRepository = new TestCaseRepository(collectionDAO); + List testCases = testCaseRepository.listAll(new Fields(Set.of("id")), new ListFilter(Include.ALL)); + for (TestCase testCase : testCases) { + // Create New Executable Test Suites + MessageParser.EntityLink entityLink = MessageParser.EntityLink.parse(testCase.getEntityLink()); + // Create new Logical Test Suite + ArrayList testCasesGroup = new ArrayList<>(); + if (testCasesByTable.containsKey(entityLink.getEntityFQN())) { + testCasesGroup = testCasesByTable.get(entityLink.getEntityFQN()); + testCasesGroup.add(testCase); + } else { + testCasesGroup.add(testCase); + } + testCasesByTable.put(entityLink.getEntityFQN(), testCasesGroup); + } + return testCasesByTable; } }