From e07306b51e060ad2ad9dc12d436a67d5d7907a3d Mon Sep 17 00:00:00 2001 From: "Yi (Alan) Wang" Date: Fri, 11 Nov 2016 17:25:41 -0800 Subject: [PATCH] Update MetadataChangeEvent, separate privacy compliance from security (#275) --- .../controllers/DatasetInfoController.java | 69 ++- .../app/models/daos/DatasetInfoDao.java | 86 ++- .../models/kafka/MetadataChangeProcessor.java | 11 +- backend-service/conf/routes | 6 + .../DDL/ETL_DDL/dataset_info_metadata.sql | 28 +- data-model/avro/MetadataChangeEvent.avsc | 548 ++++++++++-------- web/app/controllers/api/v1/Dataset.java | 43 +- web/conf/routes | 4 + .../common/schemas/DatasetCapacityRecord.java | 12 +- .../schemas/DatasetComplianceRecord.java | 81 +++ .../schemas/DatasetOriginalSchemaRecord.java | 9 - .../common/schemas/DatasetOwnerRecord.java | 16 +- .../schemas/DatasetSchemaInfoRecord.java | 22 +- .../common/schemas/DatasetSecurityRecord.java | 22 +- 14 files changed, 626 insertions(+), 331 deletions(-) create mode 100644 wherehows-common/src/main/java/wherehows/common/schemas/DatasetComplianceRecord.java diff --git a/backend-service/app/controllers/DatasetInfoController.java b/backend-service/app/controllers/DatasetInfoController.java index 83b7e08f65..127e552564 100644 --- a/backend-service/app/controllers/DatasetInfoController.java +++ b/backend-service/app/controllers/DatasetInfoController.java @@ -28,6 +28,7 @@ import play.mvc.Controller; import play.mvc.Result; import wherehows.common.schemas.DatasetCapacityRecord; import wherehows.common.schemas.DatasetCaseSensitiveRecord; +import wherehows.common.schemas.DatasetComplianceRecord; import wherehows.common.schemas.DatasetConstraintRecord; import wherehows.common.schemas.DatasetIndexRecord; import wherehows.common.schemas.DatasetOwnerRecord; @@ -171,7 +172,7 @@ public class DatasetInfoController extends Controller { try { List records = DatasetInfoDao.getDatasetTagByDatasetId(datasetId); resultJson.put("return_code", 200); - resultJson.set("capacity", Json.toJson(records)); + resultJson.set("tags", Json.toJson(records)); } catch (EmptyResultDataAccessException e) { Logger.debug("DataAccessException dataset id: " + datasetId, e); resultJson.put("return_code", 404); @@ -190,7 +191,7 @@ public class DatasetInfoController extends Controller { try { List records = DatasetInfoDao.getDatasetTagByDatasetUrn(urn); resultJson.put("return_code", 200); - resultJson.set("capacity", Json.toJson(records)); + resultJson.set("tags", Json.toJson(records)); } catch (EmptyResultDataAccessException e) { Logger.debug("DataAccessException urn: " + urn, e); resultJson.put("return_code", 404); @@ -401,6 +402,66 @@ public class DatasetInfoController extends Controller { return ok(resultJson); } + public static Result getDatasetCompliance() + throws SQLException { + ObjectNode resultJson = Json.newObject(); + String datasetIdString = request().getQueryString("datasetId"); + if (datasetIdString != null) { + int datasetId = Integer.parseInt(datasetIdString); + + try { + DatasetComplianceRecord record = DatasetInfoDao.getDatasetComplianceByDatasetId(datasetId); + resultJson.put("return_code", 200); + resultJson.set("privacyCompliancePolicy", Json.toJson(record)); + } catch (EmptyResultDataAccessException e) { + Logger.debug("DataAccessException dataset id: " + datasetId, e); + resultJson.put("return_code", 404); + resultJson.put("error_message", "dataset " + datasetId + " privacy compliance info cannot be found!"); + } + return ok(resultJson); + } + + String urn = request().getQueryString("urn"); + if (urn != null) { + if (!Urn.validateUrn(urn)) { + resultJson.put("return_code", 400); + resultJson.put("error_message", "Urn format wrong!"); + return ok(resultJson); + } + try { + DatasetComplianceRecord record = DatasetInfoDao.getDatasetComplianceByDatasetUrn(urn); + resultJson.put("return_code", 200); + resultJson.set("privacyCompliancePolicy", Json.toJson(record)); + } catch (EmptyResultDataAccessException e) { + Logger.debug("DataAccessException urn: " + urn, e); + resultJson.put("return_code", 404); + resultJson.put("error_message", "dataset " + urn + " privacy compliance info cannot be found!"); + } + return ok(resultJson); + } + + // if no parameter, return an error message + resultJson.put("return_code", 400); + resultJson.put("error_message", "No parameter provided"); + return ok(resultJson); + } + + @BodyParser.Of(BodyParser.Json.class) + public static Result updateDatasetCompliance() { + JsonNode root = request().body().asJson(); + ObjectNode resultJson = Json.newObject(); + try { + DatasetInfoDao.updateDatasetCompliance(root); + resultJson.put("return_code", 200); + resultJson.put("message", "Dataset privacy compliance info updated!"); + } catch (Exception e) { + e.printStackTrace(); + resultJson.put("return_code", 404); + resultJson.put("error_message", e.getMessage()); + } + return ok(resultJson); + } + public static Result getDatasetSecurity() throws SQLException { ObjectNode resultJson = Json.newObject(); @@ -411,7 +472,7 @@ public class DatasetInfoController extends Controller { try { DatasetSecurityRecord record = DatasetInfoDao.getDatasetSecurityByDatasetId(datasetId); resultJson.put("return_code", 200); - resultJson.set("securitySpec", Json.toJson(record)); + resultJson.set("securitySpecification", Json.toJson(record)); } catch (EmptyResultDataAccessException e) { Logger.debug("DataAccessException dataset id: " + datasetId, e); resultJson.put("return_code", 404); @@ -430,7 +491,7 @@ public class DatasetInfoController extends Controller { try { DatasetSecurityRecord record = DatasetInfoDao.getDatasetSecurityByDatasetUrn(urn); resultJson.put("return_code", 200); - resultJson.set("securitySpec", Json.toJson(record)); + resultJson.set("securitySpecification", Json.toJson(record)); } catch (EmptyResultDataAccessException e) { Logger.debug("DataAccessException urn: " + urn, e); resultJson.put("return_code", 404); diff --git a/backend-service/app/models/daos/DatasetInfoDao.java b/backend-service/app/models/daos/DatasetInfoDao.java index 5c401cd385..13c8337ca3 100644 --- a/backend-service/app/models/daos/DatasetInfoDao.java +++ b/backend-service/app/models/daos/DatasetInfoDao.java @@ -30,6 +30,7 @@ import utils.JdbcUtil; import wherehows.common.enums.OwnerType; import wherehows.common.schemas.DatasetCapacityRecord; import wherehows.common.schemas.DatasetCaseSensitiveRecord; +import wherehows.common.schemas.DatasetComplianceRecord; import wherehows.common.schemas.DatasetConstraintRecord; import wherehows.common.schemas.DeploymentRecord; import wherehows.common.schemas.DatasetFieldIndexRecord; @@ -57,7 +58,8 @@ public class DatasetInfoDao { private static final String DATASET_CASE_SENSITIVE_TABLE = "dataset_case_sensitivity"; private static final String DATASET_REFERENCE_TABLE = "dataset_reference"; private static final String DATASET_PARTITION_TABLE = "dataset_partition"; - private static final String DATASET_SECURITY_TABLE = "dataset_security_info"; + private static final String DATASET_COMPLIANCE_TABLE = "dataset_privacy_compliance"; + private static final String DATASET_SECURITY_TABLE = "dataset_security"; private static final String DATASET_OWNER_TABLE = "dataset_owner"; private static final String DATASET_OWNER_UNMATCHED_TABLE = "stg_dataset_owner_unmatched"; private static final String DATASET_CONSTRAINT_TABLE = "dataset_constraint"; @@ -80,6 +82,8 @@ public class DatasetInfoDao { new DatabaseWriter(JdbcUtil.wherehowsJdbcTemplate, DATASET_REFERENCE_TABLE); private static final DatabaseWriter PARTITION_WRITER = new DatabaseWriter(JdbcUtil.wherehowsJdbcTemplate, DATASET_PARTITION_TABLE); + private static final DatabaseWriter COMPLIANCE_WRITER = + new DatabaseWriter(JdbcUtil.wherehowsJdbcTemplate, DATASET_COMPLIANCE_TABLE); private static final DatabaseWriter SECURITY_WRITER = new DatabaseWriter(JdbcUtil.wherehowsJdbcTemplate, DATASET_SECURITY_TABLE); private static final DatabaseWriter OWNER_WRITER = @@ -146,6 +150,12 @@ public class DatasetInfoDao { public static final String GET_DATASET_PARTITION_BY_URN = "SELECT * FROM " + DATASET_PARTITION_TABLE + " WHERE dataset_urn = :dataset_urn"; + public static final String GET_DATASET_COMPLIANCE_BY_DATASET_ID = + "SELECT * FROM " + DATASET_COMPLIANCE_TABLE + " WHERE dataset_id = :dataset_id"; + + public static final String GET_DATASET_COMPLIANCE_BY_URN = + "SELECT * FROM " + DATASET_COMPLIANCE_TABLE + " WHERE dataset_urn = :dataset_urn"; + public static final String GET_DATASET_SECURITY_BY_DATASET_ID = "SELECT * FROM " + DATASET_SECURITY_TABLE + " WHERE dataset_id = :dataset_id"; @@ -649,6 +659,64 @@ public class DatasetInfoDao { } } + public static DatasetComplianceRecord getDatasetComplianceByDatasetId(int datasetId) + throws DataAccessException { + Map params = new HashMap<>(); + params.put("dataset_id", datasetId); + Map result = + JdbcUtil.wherehowsNamedJdbcTemplate.queryForMap(GET_DATASET_COMPLIANCE_BY_DATASET_ID, params); + + DatasetComplianceRecord record = new DatasetComplianceRecord(); + record.convertToRecord(result); + return record; + } + + public static DatasetComplianceRecord getDatasetComplianceByDatasetUrn(String datasetUrn) + throws DataAccessException { + Map params = new HashMap<>(); + params.put("dataset_urn", datasetUrn); + Map result = + JdbcUtil.wherehowsNamedJdbcTemplate.queryForMap(GET_DATASET_COMPLIANCE_BY_URN, params); + + DatasetComplianceRecord record = new DatasetComplianceRecord(); + record.convertToRecord(result); + return record; + } + + public static void updateDatasetCompliance(JsonNode root) + throws Exception { + final JsonNode security = root.path("privacyCompliancePolicy"); + if (security.isMissingNode() || security.isNull()) { + throw new IllegalArgumentException( + "Dataset security info update fail, missing necessary fields: " + root.toString()); + } + + final Object[] idUrn = findDataset(root); + if (idUrn[0] == null || idUrn[1] == null) { + throw new IllegalArgumentException("Cannot identify dataset from id/uri/urn: " + root.toString()); + } + final Integer datasetId = (Integer) idUrn[0]; + final String urn = (String) idUrn[1]; + + ObjectMapper om = new ObjectMapper(); + + DatasetComplianceRecord record = om.convertValue(security, DatasetComplianceRecord.class); + record.setDatasetId(datasetId); + record.setDatasetUrn(urn); + record.setModifiedTime(System.currentTimeMillis() / 1000); + try { + DatasetComplianceRecord result = getDatasetComplianceByDatasetId(datasetId); + String[] columns = record.getDbColumnNames(); + Object[] columnValues = record.getAllValuesToString(); + String[] conditions = {"dataset_id"}; + Object[] conditionValues = new Object[]{datasetId}; + COMPLIANCE_WRITER.update(columns, columnValues, conditions, conditionValues); + } catch (EmptyResultDataAccessException ex) { + COMPLIANCE_WRITER.append(record); + COMPLIANCE_WRITER.insert(); + } + } + public static DatasetSecurityRecord getDatasetSecurityByDatasetId(int datasetId) throws DataAccessException { Map params = new HashMap<>(); @@ -675,7 +743,7 @@ public class DatasetInfoDao { public static void updateDatasetSecurity(JsonNode root) throws Exception { - final JsonNode security = root.path("securitySpec"); + final JsonNode security = root.path("securitySpecification"); if (security.isMissingNode() || security.isNull()) { throw new IllegalArgumentException( "Dataset security info update fail, missing necessary fields: " + root.toString()); @@ -778,15 +846,15 @@ public class DatasetInfoDao { record.setCreatedTime(eventTime); record.setModifiedTime(System.currentTimeMillis() / 1000); - final String ownerString = record.getOwnerUrn(); + final String ownerString = record.getOwner(); int lastIndex = ownerString.lastIndexOf(':'); if (lastIndex >= 0) { - record.setOwnerUrn(ownerString.substring(lastIndex + 1)); + record.setOwner(ownerString.substring(lastIndex + 1)); record.setNamespace(ownerString.substring(0, lastIndex)); } else { record.setNamespace(""); } - Map ownerInfo = getOwnerByOwnerId(record.getOwnerUrn()); + Map ownerInfo = getOwnerByOwnerId(record.getOwner()); Integer appId = 0; String isActive = "N"; if (ownerInfo.containsKey("app_id")) { @@ -850,7 +918,7 @@ public class DatasetInfoDao { record.setDatasetId(datasetId); record.setDatasetUrn(datasetUrn); record.setOwnerType("Producer"); - record.setOwnerUrn(ownerName); + record.setOwner(ownerName); record.setOwnerType(ownerIdType); record.setIsGroup(isGroup); record.setIsActive("Y"); @@ -882,13 +950,13 @@ public class DatasetInfoDao { if (newOwnerList != null) { for (DatasetOwnerRecord owner : newOwnerList) { owner.setSortId(sortId++); - uniqueRecords.put(owner.getOwnerUrn(), owner); + uniqueRecords.put(owner.getOwner(), owner); combinedList.add(owner); } } for (DatasetOwnerRecord owner : oldOwnerList) { - DatasetOwnerRecord exist = uniqueRecords.get(owner.getOwnerUrn()); + DatasetOwnerRecord exist = uniqueRecords.get(owner.getOwner()); if (exist != null) { exist.setDbIds(owner.getDbIds()); exist.setCreatedTime(StringUtil.toLong(owner.getCreatedTime())); @@ -903,7 +971,7 @@ public class DatasetInfoDao { } else { if (!(source != null && source.equalsIgnoreCase(owner.getOwnerSource()))) { owner.setSortId(sortId++); - uniqueRecords.put(owner.getOwnerUrn(), owner); + uniqueRecords.put(owner.getOwner(), owner); combinedList.add(owner); } } diff --git a/backend-service/app/models/kafka/MetadataChangeProcessor.java b/backend-service/app/models/kafka/MetadataChangeProcessor.java index ef4a208545..e781d83192 100644 --- a/backend-service/app/models/kafka/MetadataChangeProcessor.java +++ b/backend-service/app/models/kafka/MetadataChangeProcessor.java @@ -25,7 +25,7 @@ public class MetadataChangeProcessor { private final String[] CHANGE_ITEMS = {"schema", "owners", "datasetProperties", "references", "partitionSpec", "deploymentInfo", "tags", - "constraints", "indices", "capacity", "securitySpec"}; + "constraints", "indices", "capacity", "privacyCompliancePolicy", "securitySpecification"}; /** * Process a MetadataChangeEvent record @@ -134,7 +134,14 @@ public class MetadataChangeProcessor { Logger.debug("Metadata change exception: capacity ", ex); } break; - case "securitySpec": + case "privacyCompliancePolicy": + try { + DatasetInfoDao.updateDatasetCompliance(rootNode); + } catch (Exception ex) { + Logger.debug("Metadata change exception: compliance ", ex); + } + break; + case "securitySpecification": try { DatasetInfoDao.updateDatasetSecurity(rootNode); } catch (Exception ex) { diff --git a/backend-service/conf/routes b/backend-service/conf/routes index 200f85746e..04e2462fcb 100644 --- a/backend-service/conf/routes +++ b/backend-service/conf/routes @@ -166,6 +166,12 @@ GET /dataset/partition controllers.DatasetInfoController.getD # Update partition info of a dataset POST /dataset/partition controllers.DatasetInfoController.updateDatasetPartition() +# Get privacy compliance info of a dataset by either id or urn +GET /dataset/compliance controllers.DatasetInfoController.getDatasetCompliance() + +# Update privacy compliance info of a dataset +POST /dataset/compliance controllers.DatasetInfoController.updateDatasetCompliance() + # Get security info of a dataset by either id or urn GET /dataset/security controllers.DatasetInfoController.getDatasetSecurity() diff --git a/data-model/DDL/ETL_DDL/dataset_info_metadata.sql b/data-model/DDL/ETL_DDL/dataset_info_metadata.sql index db17bf07df..aabc648f1f 100644 --- a/data-model/DDL/ETL_DDL/dataset_info_metadata.sql +++ b/data-model/DDL/ETL_DDL/dataset_info_metadata.sql @@ -38,8 +38,8 @@ CREATE TABLE dataset_capacity ( `capacity_name` VARCHAR(100) NOT NULL, `capacity_type` VARCHAR(50) DEFAULT NULL, `capacity_unit` VARCHAR(20) DEFAULT NULL, - `capacity_low` BIGINT DEFAULT NULL, - `capacity_high` BIGINT DEFAULT NULL, + `capacity_low` DOUBLE DEFAULT NULL, + `capacity_high` DOUBLE DEFAULT NULL, `modified_time` INT UNSIGNED DEFAULT NULL COMMENT 'the modified time in epoch', PRIMARY KEY (`dataset_id`, `capacity_name`), @@ -105,20 +105,31 @@ CREATE TABLE dataset_partition ( ENGINE = InnoDB DEFAULT CHARSET = latin1; -CREATE TABLE `dataset_security_info` ( +CREATE TABLE `dataset_privacy_compliance` ( + `dataset_id` INT(10) UNSIGNED NOT NULL, + `dataset_urn` VARCHAR(200) NOT NULL, + `compliance_purge_type` VARCHAR(30) DEFAULT NULL + COMMENT 'AUTO_PURGE,CUSTOM_PURGE,LIMITED_RETENTION,PURGE_NOT_APPLICABLE', + `compliance_purge_entities` VARCHAR(200) DEFAULT NULL, + `modified_time` INT(10) UNSIGNED DEFAULT NULL + COMMENT 'the modified time in epoch', + PRIMARY KEY (`dataset_id`), + UNIQUE KEY `dataset_urn` (`dataset_urn`) +) + ENGINE = InnoDB + DEFAULT CHARSET = utf8; + +CREATE TABLE `dataset_security` ( `dataset_id` INT(10) UNSIGNED NOT NULL, `dataset_urn` VARCHAR(200) NOT NULL, `classification` VARCHAR(500) DEFAULT NULL COMMENT 'JSON: confidential fields', + `record_owner_type` VARCHAR(50) DEFAULT NULL + COMMENT 'MEMBER,CUSTOMER,INTERNAL,COMPANY,GROUP', `retention_policy` VARCHAR(200) DEFAULT NULL COMMENT 'JSON: specification of retention', `geographic_affinity` VARCHAR(200) DEFAULT NULL COMMENT 'JSON: must be stored in the geo region', - `record_owner_type` VARCHAR(50) DEFAULT NULL - COMMENT 'MEMBER,CUSTOMER,INTERNAL,COMPANY,GROUP', - `compliance_purge_type` VARCHAR(30) DEFAULT NULL - COMMENT 'AUTO_PURGE,CUSTOM_PURGE,LIMITED_RETENTION,PURGE_NOT_APPLICABLE', - `compliance_purge_entities` VARCHAR(200) DEFAULT NULL, `modified_time` INT(10) UNSIGNED DEFAULT NULL COMMENT 'the modified time in epoch', PRIMARY KEY (`dataset_id`), @@ -163,6 +174,7 @@ CREATE TABLE dataset_index ( CREATE TABLE dataset_schema_info ( `dataset_id` INT UNSIGNED NOT NULL, `dataset_urn` VARCHAR(200) NOT NULL, + `is_backward_compatible` BOOLEAN DEFAULT NULL, `is_latest_revision` BOOLEAN NOT NULL, `create_time` BIGINT NOT NULL, `revision` INT UNSIGNED DEFAULT NULL, diff --git a/data-model/avro/MetadataChangeEvent.avsc b/data-model/avro/MetadataChangeEvent.avsc index 5c75627066..e9a839d786 100644 --- a/data-model/avro/MetadataChangeEvent.avsc +++ b/data-model/avro/MetadataChangeEvent.avsc @@ -13,13 +13,12 @@ { "name": "time", "type": "long", - "doc": "The time at which the event was emitted into Kafka.", - "logicalType": "timestamp-millis" + "doc": "The time at which the event was emitted into kafka." }, { "name": "server", "type": "string", - "doc": "The fully-qualified name of the host of this event." + "doc": "The fully qualified name of the host from which the event is being emitted." }, { "name": "instance", @@ -27,12 +26,12 @@ "null", "string" ], - "doc": "The name of the application instance on the server. e.g. i002,stg05,group003" + "doc": "The instance on the server from which the event is being emitted. e.g. i001" }, { "name": "appName", "type": "string", - "doc": "The name of the application/service from which the event is emitted." + "doc": "The name of the application from which the event is being emitted. see go/appname" }, { "name": "messageId", @@ -42,10 +41,37 @@ "namespace": "com.linkedin.events", "size": 16 }, - "doc": "A unique identifier for the message." + "doc": "A unique identifier for the message" + }, + { + "name": "auditVersion", + "type": [ + "null", + "int" + ], + "doc": "The version that is being used for auditing. In version 0, the audit trail buckets events into 10 minute audit windows based on the EventHeader timestamp. In version 1, the audit trail buckets events as follows: if the schema has an outer KafkaAuditHeader, use the outer audit header timestamp for bucketing; else if the EventHeader has an inner KafkaAuditHeader use that inner audit header's timestamp for bucketing", + "default": null + }, + { + "name": "fabricUrn", + "type": [ + "null", + "string" + ], + "doc": "The fabricUrn of the host from which the event is being emitted. Fabric Urn in the format of urn:li:fabric:{fabric_name}. See go/fabric.", + "default": null } ] - } + }, + "doc": "This header records information about the context of an event as it is emitted into kafka and is intended to be used by the kafka audit application. For more information see go/kafkaauditheader" + }, + { + "name": "urn", + "type": [ + "null", + "string" + ], + "doc": "The applicable URN - urn:li:dataset:(urn:li:dataPlatform:dali, search_mp.search_event_event, PROD), urn:salesforce:table:opportunity, arn:aws:dynamodb:::table/. This is the preferred identifier for a dataset." }, { "name": "datasetIdentifier", @@ -111,15 +137,8 @@ } ] } - ] - }, - { - "name": "urn", - "type": [ - "string", - "null" ], - "doc": "The applicable URN - urn:li:dataset:(urn:li:dataPlatform:dali, search_mp.search_event_event, PROD), urn:salesforce:table:opportunity, arn:aws:dynamodb:::table/
. This is the preferred identifier for a dataset." + "doc": "This has the same function as URN, but it can be used when the source system does not provide the URN" }, { "name": "datasetProperties", @@ -127,7 +146,7 @@ "null", { "type": "record", - "name": "DatasetProperties", + "name": "DatasetProperty", "fields": [ { "name": "changeAuditStamp", @@ -181,10 +200,11 @@ { "name": "uri", "type": [ - "string", - "null" + "null", + "string" ], - "doc": "The abstracted such as hdfs:///data/tracking/PageViewEvent, file:///dir/file_name. This is often used in codes and scripts." + "doc": "The abstracted such as hdfs:///data/tracking/PageViewEvent, file:///dir/file_name. This is often used in codes and scripts.", + "default": null }, { "name": "caseSensitivity", @@ -218,64 +238,62 @@ } ] } - ] + ], + "doc": "Basic properties of a dataset, such as Native Type, Case Sensitivity, Audit Stamp" }, { "name": "owners", - "type": [ - "null", - { - "type": "array", - "items": { - "type": "record", - "name": "OwnerInfo", - "fields": [ - { - "name": "ownerCategory", - "type": { - "type": "enum", - "name": "OwnerCategory", - "symbols": [ - "OWNER", - "PRODUCER", - "DELEGATE", - "STAKEHOLDER" - ] - }, - "doc": "Owner, Producer, Delegate, Stakeholder", - "default": "OWNER" + "type": { + "type": "array", + "items": { + "type": "record", + "name": "OwnerInfo", + "fields": [ + { + "name": "ownerCategory", + "type": { + "type": "enum", + "name": "OwnerCategory", + "symbols": [ + "OWNER", + "PRODUCER", + "DELEGATE", + "STAKEHOLDER" + ] }, - { - "name": "ownerUrn", - "type": "string", - "doc": "urn:li:corp:ldap, urn:li:group:abc, urn:li:service:mp_name, user_id, group_name" + "doc": "Owner, Producer, Delegate, Stakeholder", + "default": "OWNER" + }, + { + "name": "owner", + "type": "string", + "doc": "urn:li:corp:ldap, urn:li:group:abc, urn:li:service:mp_name, user_id, group_name" + }, + { + "name": "ownerType", + "type": { + "type": "enum", + "name": "OwnerType", + "symbols": [ + "URN", + "USER", + "GROUP", + "ROLE", + "SERVICE" + ] }, - { - "name": "ownerType", - "type": { - "type": "enum", - "name": "OwnerType", - "symbols": [ - "USER", - "GROUP", - "ROLE", - "SERVICE", - "URN" - ] - }, - "doc": "user, group, role, service, or urn", - "default": "USER" - }, - { - "name": "ownerSource", - "type": "string", - "doc": "JIRA,RB,DB,FS,AUDIT,NUAGE where the owner info is extracted" - } - ] - } + "doc": "user, group, role, service, or urn", + "default": "URN" + }, + { + "name": "ownerSource", + "type": "string", + "doc": "JIRA,SCM,RB,DB,FS,AUDIT,NUAGE from where the owner id is extracted" + } + ] } - ], - "default": null + }, + "default": [] }, { "name": "references", @@ -296,7 +314,9 @@ "JIRA", "RB", "LIX", - "CR" + "CR", + "WIKI", + "SCM" ] } }, @@ -315,14 +335,11 @@ }, { "name": "referenceList", - "type": [ - "null", - { - "type": "array", - "items": "string" - } - ], - "doc": "references[]" + "type": { + "type": "array", + "items": "string" + }, + "doc": "list of reference ids, keys or uris" } ] } @@ -345,7 +362,7 @@ "null" ], "doc": "What is the deepest partition level", - "default": 0 + "default": 1 }, { "name": "partitionSpecText", @@ -425,7 +442,8 @@ "type": "array", "items": "string" }, - "doc": "Partition keys (use the full field path for nested fields)" + "doc": "Partition keys (use the full field path for nested fields)", + "default": [] }, { "name": "partitionValues", @@ -433,7 +451,8 @@ "type": "array", "items": "string" }, - "doc": "For RANGE: [min, next_value); for LIST: [values...]; for Hive: [value]; for HASH: N/A" + "doc": "For RANGE: [min, next_partition_min_value); for LIST: [value1, value2...], for CASE: [expr1, expr2]; for Hive: [value]; for HASH: N/A", + "default": [] }, { "name": "numberOfHashBuckets", @@ -447,7 +466,7 @@ } } ], - "doc": "Array of partition keys", + "doc": "Array of partition keys/fields", "default": null }, { @@ -547,13 +566,10 @@ }, { "name": "tags", - "type": [ - "null", - { - "type": "array", - "items": "string" - } - ], + "type": { + "type": "array", + "items": "string" + }, "doc": "Tags of the dataset object" }, { @@ -565,10 +581,19 @@ "name": "DatasetSchema", "fields": [ { - "name": "isLatestRevision", + "name": "isBackwardCompatible", + "type": [ + "null", + "boolean" + ], + "doc": "Is this reversion compatible with the previous revision? null means unknown", + "default": null + }, + { + "name": "isFieldNameCaseSensitive", "type": "boolean", - "doc": "Is the latest reversion?", - "default": false + "doc": "Are the field names in schema Case Sensitive?", + "default": true }, { "name": "createTime", @@ -631,14 +656,6 @@ }, "default": "TEXT" }, - { - "name": "name", - "type": [ - "null", - "string" - ], - "doc": "The schema name or class name if available" - }, { "name": "text", "type": "string", @@ -663,23 +680,6 @@ "type": "record", "name": "DatasetKeySchema", "fields": [ - { - "name": "keyType", - "type": { - "type": "enum", - "name": "SchemaKeyValueType", - "symbols": [ - "NONE", - "KEY", - "DATABASE", - "TABLE", - "VALUE", - "DOCUMENT" - ] - }, - "doc": "KEY is only applicable for Key-Value(redis, voldemort) and Document(monogo, espresso) datasets.", - "default": "KEY" - }, { "name": "format", "type": "SchemaTextFormat", @@ -696,12 +696,6 @@ ], "doc": "if dataset has dedicated key schema which is separated from the table or value part, it can be stored here" }, - { - "name": "isFieldNameCaseSensitive", - "type": "boolean", - "doc": "Are the field names in schema Case Sensitive?", - "default": true - }, { "name": "fieldSchema", "type": [ @@ -724,18 +718,18 @@ "doc": "Position id of the parent field for nested structure. 0 means this field is the top-level field", "default": 0 }, + { + "name": "fieldPath", + "type": "string", + "doc": "For example, ..." + }, { "name": "fieldJsonPath", "type": [ "null", "string" ], - "doc": "http://goessner.net/articles/JsonPath For example, $.store.book[0].title" - }, - { - "name": "fieldPath", - "type": "string", - "doc": "For example, ..." + "doc": "http://goessner.net/articles/JsonPath this is an alternative for fieldPath. For example, $.store.book[0].title" }, { "name": "label", @@ -764,7 +758,7 @@ "null", "string" ], - "doc": "logical data type, DateTime, NUMBER(15,2), GUID, Epoch(3)" + "doc": "logical data type when the native type is too generic, DateTime, NUMBER(15,2), GUID, Epoch(3), Epoch(0)" }, { "name": "semanticType", @@ -932,7 +926,7 @@ ] } ], - "doc": "The schema/structure definition of a dataset. For Key-Value and Document store, dedicated Key Schema is provided.", + "doc": "The schema/structure definition of a dataset. For Key-Value and Document db, a dedicated KeySchema is provided. Schema includes KeySchema, OriginalSchema, FieldSchema, ChangeDataCaptureFields, AuditFields", "default": null }, { @@ -1126,28 +1120,41 @@ }, { "name": "capacityType", - "type": [ - "null", - "string" - ], - "doc": "storage, read qps, write qps" + "type": "string", + "doc": "storage, read qps, write qps, response time, ...", + "default": "storage" }, { "name": "capacityUnit", "type": [ "null", - "string" + { + "type": "enum", + "name": "CapacityMeasurementUnit", + "symbols": [ + "BYTE", + "KB", + "MB", + "GB", + "TB", + "PB", + "QPS", + "m", + "s", + "ms" + ] + } ], - "doc": "KB,MB,GB,TB,QPS" + "doc": "measurement unit when applicable" }, { "name": "capacityLow", - "type": "long", + "type": "double", "doc": "lower/min capacity" }, { "name": "capacityHigh", - "type": "long", + "type": "double", "doc": "higher/max capacity" } ] @@ -1157,75 +1164,13 @@ "default": null }, { - "name": "securitySpec", + "name": "privacyCompliancePolicy", "type": [ "null", { "type": "record", - "name": "SecuritySpecification", + "name": "PrivacyCompliancePolicy", "fields": [ - { - "name": "classification", - "type": [ - "null", - { - "type": "record", - "name": "ConfidentialClassification", - "fields": [ - { - "name": "highlyConfidential", - "type": { - "type": "array", - "items": "string" - } - }, - { - "name": "confidential", - "type": { - "type": "array", - "items": "string" - } - }, - { - "name": "limitedDistribution", - "type": { - "type": "array", - "items": "string" - } - }, - { - "name": "mustBeEncrypted", - "type": { - "type": "array", - "items": "string" - } - }, - { - "name": "mustBeMasked", - "type": { - "type": "array", - "items": "string" - } - } - ] - } - ], - "doc": "HighlyConfidential, Confidential, LimitedDistribution, MustBeEncrypted, MustBeMasked" - }, - { - "name": "recordOwnerType", - "type": { - "type": "enum", - "name": "RecordOwnerType", - "symbols": [ - "MEMBER", - "CUSTOMER", - "JOINT", - "INTERNAL", - "COMPANY" - ] - } - }, { "name": "complianceType", "type": { @@ -1247,7 +1192,7 @@ "type": "array", "items": { "type": "record", - "name": "PurageableEntityField", + "name": "PurgeableEntityField", "fields": [ { "name": "identifierType", @@ -1261,7 +1206,8 @@ "SUBJECT_URN", "COMPANY_ID", "GROUP_ID", - "CUSTOMER_ID" + "CUSTOMER_ID", + "SUBJECT_CUSTOMER_ID" ] } }, @@ -1275,63 +1221,161 @@ } ], "doc": "The fields which identify purgeable entities in records" - }, + } + ] + } + ], + "doc": "Describe the purgeable entity fields", + "default": null + }, + { + "name": "securitySpecification", + "type": [ + "null", + { + "type": "record", + "name": "SecuritySpecification", + "fields": [ { - "name": "retentionPolicy", + "name": "classification", "type": { "type": "record", - "name": "RetentionSpec", - "namespace": "com.linkedin.common", + "name": "ConfidentialClassification", "fields": [ { - "name": "retentionType", - "type": { - "type": "enum", - "name": "RetentionType", - "namespace": "com.linkedin.common", - "symbols": [ - "LIMITED", - "LEGAL_HOLD", - "UNLIMITED" - ] - }, - "doc": "Retention type on dataset" - }, - { - "name": "retentionWindow", - "type": [ - "null", - "long" - ], - "doc": "Time in (unit) how long data is retained for in case of LIMITED retention", - "default": null - }, - { - "name": "retentionWindowUnit", + "name": "highlyConfidential", "type": [ "null", { - "type": "enum", - "name": "TimePeriodUnit", - "namespace": "com.linkedin.common", - "symbols": [ - "YEAR", - "MONTH", - "WEEK", - "DAY", - "HOUR", - "MINUTE", - "SECOND", - "MILLISECOND" - ] + "type": "array", + "items": "string" } ], - "doc": "", + "doc": "list of highly confidential fields", + "default": null + }, + { + "name": "confidential", + "type": [ + "null", + { + "type": "array", + "items": "string" + } + ], + "doc": "list of confidential fields", + "default": null + }, + { + "name": "limitedDistribution", + "type": [ + "null", + { + "type": "array", + "items": "string" + } + ], + "doc": "list of limited distribution fields", + "default": null + }, + { + "name": "mustBeEncrypted", + "type": [ + "null", + { + "type": "array", + "items": "string" + } + ], + "doc": "list of fields that must be encrypted", + "default": null + }, + { + "name": "mustBeMasked", + "type": [ + "null", + { + "type": "array", + "items": "string" + } + ], + "doc": "list of fields that must be masked", "default": null } ] + }, + "doc": "Classify the sensitive fields into 5 categories: HighlyConfidential, Confidential, LimitedDistribution, MustBeEncrypted, MustBeMasked. If every field is confidential, * can be used." + }, + { + "name": "recordOwnerType", + "type": { + "type": "enum", + "name": "RecordOwnerType", + "symbols": [ + "MEMBER", + "CUSTOMER", + "JOINT", + "INTERNAL", + "COMPANY" + ] } }, + { + "name": "retentionPolicy", + "type": [ + "null", + { + "type": "record", + "name": "RetentionPolicy", + "fields": [ + { + "name": "retentionType", + "type": { + "type": "enum", + "name": "RetentionType", + "symbols": [ + "LIMITED", + "LEGAL_HOLD", + "UNLIMITED" + ] + }, + "doc": "Retention type on dataset" + }, + { + "name": "retentionWindow", + "type": [ + "null", + "long" + ], + "doc": "Time in (unit) how long data is retained for in case of LIMITED retention", + "default": null + }, + { + "name": "retentionWindowUnit", + "type": [ + "null", + { + "type": "enum", + "name": "TimePeriodUnit", + "symbols": [ + "YEAR", + "MONTH", + "WEEK", + "DAY", + "HOUR", + "MINUTE", + "SECOND", + "MILLISECOND" + ] + } + ], + "default": null + } + ] + } + ], + "doc": "Retention" + }, { "name": "geographicAffinity", "type": [ @@ -1401,7 +1445,9 @@ } ] } - ] + ], + "doc": "Classify the confidential specification", + "default": null } ] } \ No newline at end of file diff --git a/web/app/controllers/api/v1/Dataset.java b/web/app/controllers/api/v1/Dataset.java index 66c5a30a92..7e6bff80e2 100644 --- a/web/app/controllers/api/v1/Dataset.java +++ b/web/app/controllers/api/v1/Dataset.java @@ -39,10 +39,11 @@ public class Dataset extends Controller { public static final String BACKEND_SERVICE_URL_KEY = "backend.service.url"; - public static final String DATASET_SECURITY_PATH = "/dataset/security"; - public static final String BACKEND_URL = Play.application().configuration().getString(BACKEND_SERVICE_URL_KEY); + public static final String DATASET_SECURITY_PATH = "/dataset/security"; + public static final String DATASET_COMPLIANCE_PATH = "/dataset/compliance"; + public static Result getDatasetOwnerTypes() { ObjectNode result = Json.newObject(); @@ -839,6 +840,42 @@ public class Dataset extends Controller return ok(result); } + public static Promise getDatasetCompliance(int datasetId) { + final String queryUrl = BACKEND_URL + DATASET_COMPLIANCE_PATH; + return WS.url(queryUrl) + .setQueryParameter("datasetId", Integer.toString(datasetId)) + .setRequestTimeout(1000) + .get() + .map(response -> + ok(response.asJson()) + ); + } + + public static Promise updateDatasetCompliance(int datasetId) { + String username = session("user"); + if (StringUtils.isNotBlank(username)) { + final String queryUrl = BACKEND_URL + DATASET_COMPLIANCE_PATH; + + final JsonNode queryNode = Json.newObject() + .put("datasetId", datasetId) + .set("privacyCompliancePolicy", request().body().asJson()); + + return WS.url(queryUrl) + .setRequestTimeout(1000) + .post(queryNode) + .map(response -> + ok(response.asJson()) + ); + } else { + final JsonNode result = Json.newObject() + .put("status", "failed") + .put("error", "true") + .put("msg", "Unauthorized User."); + + return Promise.promise(() -> ok(result)); + } + } + public static Promise getDatasetSecurity(int datasetId) { final String queryUrl = BACKEND_URL + DATASET_SECURITY_PATH; return WS.url(queryUrl) @@ -857,7 +894,7 @@ public class Dataset extends Controller final JsonNode queryNode = Json.newObject() .put("datasetId", datasetId) - .set("securitySpec", request().body().asJson()); + .set("securitySpecification", request().body().asJson()); return WS.url(queryUrl) .setRequestTimeout(1000) diff --git a/web/conf/routes b/web/conf/routes index 3c18c777f3..99b67138c2 100644 --- a/web/conf/routes +++ b/web/conf/routes @@ -109,6 +109,10 @@ POST /api/v1/datasets/:id/watch controllers.api.v1.Dataset.w DELETE /api/v1/datasets/:id/watch/:watchId controllers.api.v1.Dataset.unwatchDataset(id:Int, watchId:Int) +GET /api/v1/datasets/:id/compliance controllers.api.v1.Dataset.getDatasetCompliance(id:Int) + +POST /api/v1/datasets/:id/compliance controllers.api.v1.Dataset.updateDatasetCompliance(id:Int) + GET /api/v1/datasets/:id/security controllers.api.v1.Dataset.getDatasetSecurity(id:Int) POST /api/v1/datasets/:id/security controllers.api.v1.Dataset.updateDatasetSecurity(id:Int) diff --git a/wherehows-common/src/main/java/wherehows/common/schemas/DatasetCapacityRecord.java b/wherehows-common/src/main/java/wherehows/common/schemas/DatasetCapacityRecord.java index c6dfee89ca..3dee493b4c 100644 --- a/wherehows-common/src/main/java/wherehows/common/schemas/DatasetCapacityRecord.java +++ b/wherehows-common/src/main/java/wherehows/common/schemas/DatasetCapacityRecord.java @@ -23,8 +23,8 @@ public class DatasetCapacityRecord extends AbstractRecord { String capacityName; String capacityType; String capacityUnit; - Long capacityLow; - Long capacityHigh; + Double capacityLow; + Double capacityHigh; Long modifiedTime; @Override @@ -81,19 +81,19 @@ public class DatasetCapacityRecord extends AbstractRecord { this.capacityUnit = capacityUnit; } - public Long getCapacityLow() { + public Double getCapacityLow() { return capacityLow; } - public void setCapacityLow(Long capacityLow) { + public void setCapacityLow(Double capacityLow) { this.capacityLow = capacityLow; } - public Long getCapacityHigh() { + public Double getCapacityHigh() { return capacityHigh; } - public void setCapacityHigh(Long capacityHigh) { + public void setCapacityHigh(Double capacityHigh) { this.capacityHigh = capacityHigh; } diff --git a/wherehows-common/src/main/java/wherehows/common/schemas/DatasetComplianceRecord.java b/wherehows-common/src/main/java/wherehows/common/schemas/DatasetComplianceRecord.java new file mode 100644 index 0000000000..d377685259 --- /dev/null +++ b/wherehows-common/src/main/java/wherehows/common/schemas/DatasetComplianceRecord.java @@ -0,0 +1,81 @@ +/** + * Copyright 2015 LinkedIn Corp. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + */ +package wherehows.common.schemas; + +import java.util.List; +import java.util.Map; + + +public class DatasetComplianceRecord extends AbstractRecord { + + Integer datasetId; + String datasetUrn; + String complianceType; + List compliancePurgeEntities; + Long modifiedTime; + + @Override + public String[] getDbColumnNames() { + return new String[]{"dataset_id", "dataset_urn", "compliance_purge_type", "compliance_purge_entities", + "modified_time"}; + } + + @Override + public List fillAllFields() { + return null; + } + + public DatasetComplianceRecord() { + } + + public Integer getDatasetId() { + return datasetId; + } + + public void setDatasetId(Integer datasetId) { + this.datasetId = datasetId; + } + + public String getDatasetUrn() { + return datasetUrn; + } + + public void setDatasetUrn(String datasetUrn) { + this.datasetUrn = datasetUrn; + } + + public String getComplianceType() { + return complianceType; + } + + public void setComplianceType(String complianceType) { + this.complianceType = complianceType; + } + + public List getCompliancePurgeEntities() { + return compliancePurgeEntities; + } + + public void setCompliancePurgeEntities(List compliancePurgeEntities) { + this.compliancePurgeEntities = compliancePurgeEntities; + } + + public Long getModifiedTime() { + return modifiedTime; + } + + public void setModifiedTime(Long modifiedTime) { + this.modifiedTime = modifiedTime; + } +} diff --git a/wherehows-common/src/main/java/wherehows/common/schemas/DatasetOriginalSchemaRecord.java b/wherehows-common/src/main/java/wherehows/common/schemas/DatasetOriginalSchemaRecord.java index 8377535f9b..3d22179c42 100644 --- a/wherehows-common/src/main/java/wherehows/common/schemas/DatasetOriginalSchemaRecord.java +++ b/wherehows-common/src/main/java/wherehows/common/schemas/DatasetOriginalSchemaRecord.java @@ -20,7 +20,6 @@ import java.util.Map; public class DatasetOriginalSchemaRecord extends AbstractRecord { String format; - String name; String text; Map checksum; @@ -40,14 +39,6 @@ public class DatasetOriginalSchemaRecord extends AbstractRecord { this.format = format; } - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - public String getText() { return text; } diff --git a/wherehows-common/src/main/java/wherehows/common/schemas/DatasetOwnerRecord.java b/wherehows-common/src/main/java/wherehows/common/schemas/DatasetOwnerRecord.java index 85d2191b79..8bced4623d 100644 --- a/wherehows-common/src/main/java/wherehows/common/schemas/DatasetOwnerRecord.java +++ b/wherehows-common/src/main/java/wherehows/common/schemas/DatasetOwnerRecord.java @@ -24,7 +24,7 @@ public class DatasetOwnerRecord extends AbstractRecord { String datasetUrn; Integer appId; String ownerCategory; - String ownerUrn; + String owner; String ownerType; String isGroup; String isActive; @@ -49,7 +49,7 @@ public class DatasetOwnerRecord extends AbstractRecord { public List fillAllFields() { List allFields = new ArrayList<>(); allFields.add(datasetUrn); - allFields.add(ownerUrn); + allFields.add(owner); allFields.add(sortId); allFields.add(namespace); allFields.add(dbIds); @@ -65,7 +65,7 @@ public class DatasetOwnerRecord extends AbstractRecord { @JsonIgnore public Object[] getValuesForUnmatchedOwner() { - return new Object[]{datasetUrn, appId, ownerCategory, ownerUrn, ownerType, isGroup, + return new Object[]{datasetUrn, appId, ownerCategory, owner, ownerType, isGroup, isActive, sortId, namespace, ownerSource, "N/A", dbIds, sourceTime}; } @@ -75,7 +75,7 @@ public class DatasetOwnerRecord extends AbstractRecord { public DatasetOwnerRecord(String datasetUrn, String ownerId, Integer sortId, String namespace, String dbName, Long sourceTime) { this.datasetUrn = datasetUrn; - this.ownerUrn = ownerId; + this.owner = ownerId; this.sortId = sortId; this.namespace = namespace; this.dbIds = dbName; @@ -114,12 +114,12 @@ public class DatasetOwnerRecord extends AbstractRecord { this.ownerCategory = ownerCategory; } - public String getOwnerUrn() { - return ownerUrn; + public String getOwner() { + return owner; } - public void setOwnerUrn(String ownerUrn) { - this.ownerUrn = ownerUrn; + public void setOwner(String owner) { + this.owner = owner; } public String getOwnerType() { diff --git a/wherehows-common/src/main/java/wherehows/common/schemas/DatasetSchemaInfoRecord.java b/wherehows-common/src/main/java/wherehows/common/schemas/DatasetSchemaInfoRecord.java index e3caf87e71..95f6673e80 100644 --- a/wherehows-common/src/main/java/wherehows/common/schemas/DatasetSchemaInfoRecord.java +++ b/wherehows-common/src/main/java/wherehows/common/schemas/DatasetSchemaInfoRecord.java @@ -21,15 +21,15 @@ public class DatasetSchemaInfoRecord extends AbstractRecord { Integer datasetId; String datasetUrn; - Boolean isLatestRevision; + Boolean isBackwardCompatible; + Boolean isFieldNameCaseSensitive; Long createTime; Integer revision; String version; String name; String description; DatasetOriginalSchemaRecord originalSchema; - DatasetKeySchemaRecord keySchema; - Boolean isFieldNameCaseSensitive; + Map keySchema; List fieldSchema; List changeDataCaptureFields; List auditFields; @@ -37,8 +37,8 @@ public class DatasetSchemaInfoRecord extends AbstractRecord { @Override public String[] getDbColumnNames() { - return new String[]{"dataset_id", "dataset_urn", "is_latest_revision", "create_time", "revision", "version", "name", - "description", "original_schema", "key_schema", "is_field_name_case_sensitive", "field_schema", + return new String[]{"dataset_id", "dataset_urn", "is_backward_compatible", "is_field_name_case_sensitive", + "create_time", "revision", "version", "name", "description", "original_schema", "key_schema", "field_schema", "change_data_capture_fields", "audit_fields", "modified_time"}; } @@ -66,12 +66,12 @@ public class DatasetSchemaInfoRecord extends AbstractRecord { this.datasetUrn = datasetUrn; } - public Boolean getIsLatestRevision() { - return isLatestRevision; + public Boolean getIsBackwardCompatible() { + return isBackwardCompatible; } - public void setIsLatestRevision(Boolean isLatestRevision) { - this.isLatestRevision = isLatestRevision; + public void setIsBackwardCompatible(Boolean backwardCompatible) { + isBackwardCompatible = backwardCompatible; } public Long getCreateTime() { @@ -122,11 +122,11 @@ public class DatasetSchemaInfoRecord extends AbstractRecord { this.originalSchema = originalSchema; } - public DatasetKeySchemaRecord getKeySchema() { + public Map getKeySchema() { return keySchema; } - public void setKeySchema(DatasetKeySchemaRecord keySchema) { + public void setKeySchema(Map keySchema) { this.keySchema = keySchema; } diff --git a/wherehows-common/src/main/java/wherehows/common/schemas/DatasetSecurityRecord.java b/wherehows-common/src/main/java/wherehows/common/schemas/DatasetSecurityRecord.java index d15e26c3e0..84fd6892a6 100644 --- a/wherehows-common/src/main/java/wherehows/common/schemas/DatasetSecurityRecord.java +++ b/wherehows-common/src/main/java/wherehows/common/schemas/DatasetSecurityRecord.java @@ -23,16 +23,14 @@ public class DatasetSecurityRecord extends AbstractRecord { String datasetUrn; Map> classification; String recordOwnerType; - String complianceType; - List compliancePurgeEntities; DatasetRetentionRecord retentionPolicy; DatasetGeographicAffinityRecord geographicAffinity; Long modifiedTime; @Override public String[] getDbColumnNames() { - return new String[]{"dataset_id", "dataset_urn", "classification", "record_owner_type", "compliance_purge_type", - "compliance_purge_entities", "retention_policy", "geographic_affinity", "modified_time"}; + return new String[]{"dataset_id", "dataset_urn", "classification", "record_owner_type", "retention_policy", + "geographic_affinity", "modified_time"}; } @Override @@ -75,22 +73,6 @@ public class DatasetSecurityRecord extends AbstractRecord { this.recordOwnerType = recordOwnerType; } - public List getCompliancePurgeEntities() { - return compliancePurgeEntities; - } - - public void setCompliancePurgeEntities(List compliancePurgeEntities) { - this.compliancePurgeEntities = compliancePurgeEntities; - } - - public String getComplianceType() { - return complianceType; - } - - public void setComplianceType(String complianceType) { - this.complianceType = complianceType; - } - public DatasetRetentionRecord getRetentionPolicy() { return retentionPolicy; }