mirror of
https://github.com/datahub-project/datahub.git
synced 2025-07-27 03:19:51 +00:00
Frontend dataset columns get API to fetch data from Metadata store (#358)
This commit is contained in:
parent
976e4e5d49
commit
48aa36fa79
@ -23,3 +23,14 @@ model {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// used for Intellij to recognized play project
|
||||||
|
idea {
|
||||||
|
module {
|
||||||
|
sourceDirs += file("app")
|
||||||
|
testSourceDirs += file("test")
|
||||||
|
scopes.COMPILE = [plus: [configurations.play], minus: []]
|
||||||
|
scopes.RUNTIME = [plus: [configurations.playRun], minus: [configurations.play]]
|
||||||
|
scopes.TEST = [plus: [configurations.playTest], minus: [configurations.playRun]]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -21,10 +21,18 @@ repositories {
|
|||||||
name "confluent-maven-release"
|
name "confluent-maven-release"
|
||||||
url 'http://packages.confluent.io/maven/'
|
url 'http://packages.confluent.io/maven/'
|
||||||
}
|
}
|
||||||
maven { // this is required by various gralde plugins
|
maven { // this is required by various gradle plugins
|
||||||
name "gradle-plugins"
|
name "gradle-plugins"
|
||||||
url 'http://plugins.gradle.org/m2/'
|
url 'http://plugins.gradle.org/m2/'
|
||||||
}
|
}
|
||||||
|
/* ivy { // this is required by metadata store Restli client within LinkedIn
|
||||||
|
url 'http://artifactory.corp.linkedin.com:8081/artifactory/repo'
|
||||||
|
layout 'pattern', {
|
||||||
|
ivy '[organisation]/[module]/[revision]/[module]-[revision].ivy'
|
||||||
|
artifact '[organisation]/[module]/[revision]/[artifact]-[revision](-[classifier]).[ext]'
|
||||||
|
m2compatible = true
|
||||||
|
}
|
||||||
|
} */
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -1,30 +1,8 @@
|
|||||||
apply plugin: 'java'
|
|
||||||
|
|
||||||
/*
|
|
||||||
repositories {
|
|
||||||
mavenCentral()
|
|
||||||
ivy {
|
|
||||||
url 'http://artifactory.corp.linkedin.com:8081/artifactory/repo'
|
|
||||||
layout 'pattern', {
|
|
||||||
ivy '[organisation]/[module]/[revision]/[module]-[revision].ivy'
|
|
||||||
artifact '[organisation]/[module]/[revision]/[artifact]-[revision](-[classifier]).[ext]'
|
|
||||||
m2compatible = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
mavenLocal()
|
|
||||||
// maven {
|
|
||||||
// url 'http://artifactory.corp.linkedin.com:8081/artifactory/SI'
|
|
||||||
// }
|
|
||||||
// flatDir {
|
|
||||||
// dirs 'extralibs'
|
|
||||||
// }
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
compile 'com.linkedin.pegasus:restli-client:6.0.12'
|
compile 'com.linkedin.pegasus:restli-client:6.0.12'
|
||||||
compile 'com.linkedin.pegasus:r2-netty:6.0.12'
|
compile 'com.linkedin.pegasus:r2-netty:6.0.12'
|
||||||
|
// if used within LinkedIn, uncomment the following two lines and comment the extralibs to use the latest artifacts
|
||||||
// compile group:'com.linkedin.metadata-store', name:'metadata-store-api', version:'0.1.+', configuration:'dataTemplate'
|
// compile group:'com.linkedin.metadata-store', name:'metadata-store-api', version:'0.1.+', configuration:'dataTemplate'
|
||||||
// compile group:'com.linkedin.metadata-store', name:'metadata-store-api', version:'0.1.+', configuration:'restClient'
|
// compile group:'com.linkedin.metadata-store', name:'metadata-store-api', version:'0.1.+', configuration:'restClient'
|
||||||
compile fileTree(dir: 'extralibs', include: ['*.jar']) // externalDependency.oracle/teradata/gsp
|
compile fileTree(dir: 'extralibs', include: ['*.jar'])
|
||||||
}
|
}
|
@ -0,0 +1,85 @@
|
|||||||
|
/**
|
||||||
|
* Copyright 2015 LinkedIn Corp. All rights reserved.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
*/
|
||||||
|
package wherehows.restli.util;
|
||||||
|
|
||||||
|
import com.linkedin.common.FabricType;
|
||||||
|
import com.linkedin.common.urn.DataPlatformUrn;
|
||||||
|
import com.linkedin.common.urn.DatasetUrn;
|
||||||
|
import com.linkedin.common.urn.Urn;
|
||||||
|
import java.net.URISyntaxException;
|
||||||
|
|
||||||
|
|
||||||
|
public class UrnUtil {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform platform name to DataPlatformUrn
|
||||||
|
* @param platformName String
|
||||||
|
* @return DataPlatformUrn
|
||||||
|
* @throws URISyntaxException
|
||||||
|
*/
|
||||||
|
public static DataPlatformUrn toDataPlatformUrn(String platformName) throws URISyntaxException {
|
||||||
|
return DataPlatformUrn.deserialize("urn:li:dataPlatform:" + platformName);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform platform name, dataset name and origin fabric into DatasetUrn
|
||||||
|
* @param platformName String
|
||||||
|
* @param datasetName String
|
||||||
|
* @param origin String
|
||||||
|
* @return DatasetUrn
|
||||||
|
* @throws URISyntaxException
|
||||||
|
*/
|
||||||
|
public static DatasetUrn toDatasetUrn(String platformName, String datasetName, String origin)
|
||||||
|
throws URISyntaxException {
|
||||||
|
return DatasetUrn.createFromUrn(
|
||||||
|
Urn.createFromTuple("dataset", toDataPlatformUrn(platformName), datasetName, toFabricType(origin)));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transform fabric string into FabricType enum
|
||||||
|
* @param fabric String
|
||||||
|
* @return FabricType
|
||||||
|
*/
|
||||||
|
public static FabricType toFabricType(String fabric) {
|
||||||
|
switch (fabric.toUpperCase()) {
|
||||||
|
case "PROD":
|
||||||
|
return FabricType.PROD;
|
||||||
|
case "CORP":
|
||||||
|
return FabricType.CORP;
|
||||||
|
case "EI":
|
||||||
|
return FabricType.EI;
|
||||||
|
case "DEV":
|
||||||
|
return FabricType.DEV;
|
||||||
|
default:
|
||||||
|
return FabricType.$UNKNOWN;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Split WhereHows dataset URN into two parts: platform + dataset name
|
||||||
|
* @param urn String WhereHows dataset URN
|
||||||
|
* @return String[] platform + dataset name
|
||||||
|
*/
|
||||||
|
public static String[] splitWhUrn(String urn) {
|
||||||
|
int index = urn.indexOf(":///");
|
||||||
|
String fabric = urn.substring(0, index);
|
||||||
|
String dataset = urn.substring(index + 4);
|
||||||
|
|
||||||
|
// for espresso, change '/' back to '.'
|
||||||
|
if (fabric.equalsIgnoreCase("espresso")) {
|
||||||
|
dataset = dataset.replace("/", ".");
|
||||||
|
}
|
||||||
|
return new String[]{fabric, dataset};
|
||||||
|
}
|
||||||
|
}
|
@ -16,6 +16,9 @@ package controllers.api.v1;
|
|||||||
import com.fasterxml.jackson.databind.JsonNode;
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||||
|
import com.linkedin.dataset.SchemaField;
|
||||||
|
import com.linkedin.dataset.SchemaFieldArray;
|
||||||
|
import dao.MetadataStoreDao;
|
||||||
import dao.ReturnCode;
|
import dao.ReturnCode;
|
||||||
import models.DatasetColumn;
|
import models.DatasetColumn;
|
||||||
import models.DatasetDependency;
|
import models.DatasetDependency;
|
||||||
@ -144,23 +147,38 @@ public class Dataset extends Controller
|
|||||||
return ok(result);
|
return ok(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Result getDatasetColumnsByID(int id)
|
public static Result getDatasetColumnsByID(int id) {
|
||||||
{
|
|
||||||
List<DatasetColumn> datasetColumnList = DatasetsDAO.getDatasetColumnsByID(id);
|
List<DatasetColumn> datasetColumnList = DatasetsDAO.getDatasetColumnsByID(id);
|
||||||
|
|
||||||
ObjectNode result = Json.newObject();
|
ObjectNode result = Json.newObject();
|
||||||
|
if (datasetColumnList != null && datasetColumnList.size() > 0) {
|
||||||
if (datasetColumnList != null && datasetColumnList.size() > 0)
|
|
||||||
{
|
|
||||||
result.put("status", "ok");
|
result.put("status", "ok");
|
||||||
result.set("columns", Json.toJson(datasetColumnList));
|
result.set("columns", Json.toJson(datasetColumnList));
|
||||||
|
return ok(result);
|
||||||
}
|
}
|
||||||
else
|
|
||||||
{
|
SchemaFieldArray datasetFields = null;
|
||||||
|
if (datasetColumnList == null || datasetColumnList.size() == 0) {
|
||||||
|
String urn = DatasetsDAO.getDatasetUrnById(id);
|
||||||
|
if (urn != null && urn.length() > 6) {
|
||||||
|
try {
|
||||||
|
datasetFields = MetadataStoreDao.getLatestSchemaByWhUrn(urn).getFields();
|
||||||
|
} catch (Exception e) {
|
||||||
|
Logger.debug("Can't find schema for URN: " + urn + ", Exception: " + e.getMessage());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Logger.debug("Dataset id " + id + " not found.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (datasetFields != null && datasetFields.size() > 0) {
|
||||||
|
datasetColumnList = MetadataStoreDao.datasetColumnsMapper(datasetFields);
|
||||||
|
result.put("status", "ok");
|
||||||
|
result.set("columns", Json.toJson(datasetColumnList));
|
||||||
|
} else {
|
||||||
result.put("status", "error");
|
result.put("status", "error");
|
||||||
result.put("message", "record not found");
|
result.put("message", "record not found");
|
||||||
}
|
}
|
||||||
|
|
||||||
return ok(result);
|
return ok(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -652,19 +652,7 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
String urn = null;
|
String urn = getDatasetUrnById(id);
|
||||||
try
|
|
||||||
{
|
|
||||||
urn = (String)getJdbcTemplate().queryForObject(
|
|
||||||
GET_DATASET_URN_BY_ID,
|
|
||||||
String.class,
|
|
||||||
id);
|
|
||||||
}
|
|
||||||
catch(EmptyResultDataAccessException e)
|
|
||||||
{
|
|
||||||
Logger.error("Dataset ownDataset get urn failed, id = " + id);
|
|
||||||
Logger.error("Exception = " + e.getMessage());
|
|
||||||
}
|
|
||||||
int status = getJdbcTemplate().update(
|
int status = getJdbcTemplate().update(
|
||||||
UPDATE_DATASET_OWNERS,
|
UPDATE_DATASET_OWNERS,
|
||||||
id,
|
id,
|
||||||
@ -756,6 +744,15 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
|
|||||||
return resultNode;
|
return resultNode;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String getDatasetUrnById(int dataset_id) {
|
||||||
|
try {
|
||||||
|
return getJdbcTemplate().queryForObject(GET_DATASET_URN_BY_ID, String.class, dataset_id);
|
||||||
|
} catch(EmptyResultDataAccessException e) {
|
||||||
|
Logger.error("Can not find URN for dataset id: " + dataset_id + ", Exception: " + e.getMessage());
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
public static Dataset getDatasetByID(int id, String user)
|
public static Dataset getDatasetByID(int id, String user)
|
||||||
{
|
{
|
||||||
Dataset dataset = null;
|
Dataset dataset = null;
|
||||||
@ -922,21 +919,7 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
|
|||||||
|
|
||||||
public static List<ImpactDataset> getImpactAnalysisByID(int id)
|
public static List<ImpactDataset> getImpactAnalysisByID(int id)
|
||||||
{
|
{
|
||||||
String urn = null;
|
String urn = getDatasetUrnById(id);
|
||||||
|
|
||||||
try
|
|
||||||
{
|
|
||||||
urn = (String)getJdbcTemplate().queryForObject(
|
|
||||||
GET_DATASET_URN_BY_ID,
|
|
||||||
String.class,
|
|
||||||
id);
|
|
||||||
}
|
|
||||||
catch(EmptyResultDataAccessException e)
|
|
||||||
{
|
|
||||||
Logger.error("Dataset getImpactAnalysisByID get urn failed, id = " + id);
|
|
||||||
Logger.error("Exception = " + e.getMessage());
|
|
||||||
}
|
|
||||||
|
|
||||||
return LineageDAO.getImpactDatasetsByUrn(urn);
|
return LineageDAO.getImpactDatasetsByUrn(urn);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1838,13 +1821,7 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
|
|||||||
getJdbcTemplate().update(MARK_DATASET_OWNERS_AS_DELETED, datasetId);
|
getJdbcTemplate().update(MARK_DATASET_OWNERS_AS_DELETED, datasetId);
|
||||||
|
|
||||||
if (owners.size() > 0) {
|
if (owners.size() > 0) {
|
||||||
String urn = null;
|
String urn = getDatasetUrnById(datasetId);
|
||||||
try {
|
|
||||||
urn = getJdbcTemplate().queryForObject(GET_DATASET_URN_BY_ID, String.class, datasetId);
|
|
||||||
} catch(EmptyResultDataAccessException e) {
|
|
||||||
Logger.error("Dataset updateDatasetOwners get urn failed, id = " + datasetId);
|
|
||||||
Logger.error("Exception = " + e.getMessage());
|
|
||||||
}
|
|
||||||
updateDatasetOwnerDatabase(datasetId, urn, owners);
|
updateDatasetOwnerDatabase(datasetId, urn, owners);
|
||||||
}
|
}
|
||||||
return ReturnCode.Success;
|
return ReturnCode.Success;
|
||||||
|
146
web/app/dao/MetadataStoreDao.java
Normal file
146
web/app/dao/MetadataStoreDao.java
Normal file
@ -0,0 +1,146 @@
|
|||||||
|
/**
|
||||||
|
* Copyright 2015 LinkedIn Corp. All rights reserved.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
*/
|
||||||
|
package dao;
|
||||||
|
|
||||||
|
import com.linkedin.common.urn.DatasetUrn;
|
||||||
|
import com.linkedin.dataset.Dataset;
|
||||||
|
import com.linkedin.dataset.DatasetKey;
|
||||||
|
import com.linkedin.dataset.DatasetPrivacyCompliancePoliciesGetRequestBuilder;
|
||||||
|
import com.linkedin.dataset.DatasetPrivacyCompliancePoliciesRequestBuilders;
|
||||||
|
import com.linkedin.dataset.DatasetsGetRequestBuilder;
|
||||||
|
import com.linkedin.dataset.DatasetsRequestBuilders;
|
||||||
|
import com.linkedin.dataset.PrivacyCompliancePolicy;
|
||||||
|
import com.linkedin.dataset.PrivacyCompliancePolicyKey;
|
||||||
|
import com.linkedin.dataset.SchemaField;
|
||||||
|
import com.linkedin.dataset.SchemaFieldArray;
|
||||||
|
import com.linkedin.dataset.SchemaMetadata;
|
||||||
|
import com.linkedin.dataset.SchemaMetadataFindByDatasetRequestBuilder;
|
||||||
|
import com.linkedin.dataset.SchemaMetadataGetRequestBuilder;
|
||||||
|
import com.linkedin.dataset.SchemaMetadataKey;
|
||||||
|
import com.linkedin.dataset.SchemaMetadataRequestBuilders;
|
||||||
|
import com.linkedin.r2.transport.common.Client;
|
||||||
|
import com.linkedin.r2.transport.common.bridge.client.TransportClientAdapter;
|
||||||
|
import com.linkedin.r2.transport.http.client.HttpClientFactory;
|
||||||
|
import com.linkedin.restli.client.FindRequest;
|
||||||
|
import com.linkedin.restli.client.Request;
|
||||||
|
import com.linkedin.restli.client.ResponseFuture;
|
||||||
|
import com.linkedin.restli.client.RestClient;
|
||||||
|
import com.linkedin.restli.common.CollectionResponse;
|
||||||
|
import com.linkedin.restli.common.ComplexResourceKey;
|
||||||
|
import com.linkedin.restli.common.EmptyRecord;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.List;
|
||||||
|
import models.DatasetColumn;
|
||||||
|
import play.Logger;
|
||||||
|
import play.Play;
|
||||||
|
|
||||||
|
import static wherehows.restli.util.UrnUtil.*;
|
||||||
|
|
||||||
|
|
||||||
|
public class MetadataStoreDao {
|
||||||
|
|
||||||
|
private static final String MetadataStoreURL =
|
||||||
|
Play.application().configuration().getString("wherehows.restli.server.url");
|
||||||
|
|
||||||
|
private static final HttpClientFactory http = new HttpClientFactory();
|
||||||
|
private static final Client r2Client =
|
||||||
|
new TransportClientAdapter(http.getClient(Collections.<String, String>emptyMap()));
|
||||||
|
|
||||||
|
private static final RestClient _client = new RestClient(r2Client, MetadataStoreURL);
|
||||||
|
|
||||||
|
private static final DatasetsRequestBuilders _datasetsBuilders = new DatasetsRequestBuilders();
|
||||||
|
|
||||||
|
private static final SchemaMetadataRequestBuilders _schemaMetadataBuilder = new SchemaMetadataRequestBuilders();
|
||||||
|
|
||||||
|
private static final DatasetPrivacyCompliancePoliciesRequestBuilders _privacyComplianceBuilder =
|
||||||
|
new DatasetPrivacyCompliancePoliciesRequestBuilders();
|
||||||
|
|
||||||
|
public static Dataset getDataset(String datasetName, String platformName, String origin) throws Exception {
|
||||||
|
|
||||||
|
DatasetKey key = new DatasetKey().setName(datasetName)
|
||||||
|
.setPlatform(toDataPlatformUrn(platformName))
|
||||||
|
.setOrigin(toFabricType(origin));
|
||||||
|
|
||||||
|
DatasetsGetRequestBuilder builder = _datasetsBuilders.get();
|
||||||
|
Request<Dataset> req = builder.id(new ComplexResourceKey<>(key, new EmptyRecord())).build();
|
||||||
|
|
||||||
|
// Send the request and wait for a response
|
||||||
|
final ResponseFuture<Dataset> responseFuture = _client.sendRequest(req);
|
||||||
|
return responseFuture.getResponse().getEntity();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static SchemaMetadata getSchemaMetadata(String schemaName, String platformName, long version)
|
||||||
|
throws Exception {
|
||||||
|
|
||||||
|
SchemaMetadataKey key = new SchemaMetadataKey().setSchemaName(schemaName)
|
||||||
|
.setPlatform(toDataPlatformUrn(platformName))
|
||||||
|
.setVersion(version);
|
||||||
|
|
||||||
|
SchemaMetadataGetRequestBuilder builder = _schemaMetadataBuilder.get();
|
||||||
|
Request<SchemaMetadata> req = builder.id(new ComplexResourceKey<>(key, new EmptyRecord())).build();
|
||||||
|
|
||||||
|
ResponseFuture<SchemaMetadata> responseFuture = _client.sendRequest(req);
|
||||||
|
return responseFuture.getResponse().getEntity();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static SchemaMetadata getLatestSchemaByDataset(String platformName, String datasetName, String origin)
|
||||||
|
throws Exception {
|
||||||
|
DatasetUrn urn = toDatasetUrn(platformName, datasetName, origin);
|
||||||
|
|
||||||
|
SchemaMetadataFindByDatasetRequestBuilder builder = _schemaMetadataBuilder.findByDataset();
|
||||||
|
FindRequest<SchemaMetadata> req = builder.datasetParam(urn).build();
|
||||||
|
|
||||||
|
ResponseFuture<CollectionResponse<SchemaMetadata>> responseFuture = _client.sendRequest(req);
|
||||||
|
long version = 0;
|
||||||
|
SchemaMetadata latestSchema = null;
|
||||||
|
for (SchemaMetadata sc : responseFuture.getResponse().getEntity().getElements()) {
|
||||||
|
if (sc.getVersion() > version) {
|
||||||
|
latestSchema = sc;
|
||||||
|
version = sc.getVersion();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return latestSchema;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static List<DatasetColumn> datasetColumnsMapper(SchemaFieldArray fields) {
|
||||||
|
List<DatasetColumn> columns = new ArrayList<>();
|
||||||
|
for (SchemaField field : fields) {
|
||||||
|
DatasetColumn col = new DatasetColumn();
|
||||||
|
col.fieldName = field.getFieldPath();
|
||||||
|
col.dataType = field.getNativeDataType();
|
||||||
|
col.comment = field.getDescription();
|
||||||
|
columns.add(col);
|
||||||
|
}
|
||||||
|
return columns;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static SchemaMetadata getLatestSchemaByWhUrn(String urn) throws Exception {
|
||||||
|
String[] urnParts = splitWhUrn(urn);
|
||||||
|
return getLatestSchemaByDataset(urnParts[0], urnParts[1], "PROD");
|
||||||
|
}
|
||||||
|
|
||||||
|
public static PrivacyCompliancePolicy getPrivacyCompliancePolicy(String platformName, String datasetName,
|
||||||
|
String origin, long version) throws Exception {
|
||||||
|
|
||||||
|
DatasetUrn urn = toDatasetUrn(platformName, datasetName, origin);
|
||||||
|
PrivacyCompliancePolicyKey key = new PrivacyCompliancePolicyKey().setDataset(urn).setVersion(version);
|
||||||
|
|
||||||
|
DatasetPrivacyCompliancePoliciesGetRequestBuilder builder = _privacyComplianceBuilder.get();
|
||||||
|
Request<PrivacyCompliancePolicy> req = builder.id(new ComplexResourceKey<>(key, new EmptyRecord())).build();
|
||||||
|
|
||||||
|
ResponseFuture<PrivacyCompliancePolicy> responseFuture = _client.sendRequest(req);
|
||||||
|
return responseFuture.getResponse().getEntity();
|
||||||
|
}
|
||||||
|
}
|
@ -5,6 +5,8 @@ project.ext.httpPort = 9000
|
|||||||
project.ext.playBinaryBaseName = "wherehows-frontend"
|
project.ext.playBinaryBaseName = "wherehows-frontend"
|
||||||
|
|
||||||
dependencies{
|
dependencies{
|
||||||
|
play project(':restli-client')
|
||||||
|
|
||||||
play externalDependency.play_java_ws
|
play externalDependency.play_java_ws
|
||||||
play externalDependency.play_java_jdbc
|
play externalDependency.play_java_jdbc
|
||||||
play externalDependency.play_filter
|
play externalDependency.play_filter
|
||||||
|
@ -63,6 +63,7 @@ elasticsearch.dataset.url = "$YOUR_DATASET_INDEX_URL"
|
|||||||
elasticsearch.flow.url = "$YOUR_FLOW_INDEX_URL"
|
elasticsearch.flow.url = "$YOUR_FLOW_INDEX_URL"
|
||||||
|
|
||||||
backend.service.url = "$YOUR_BACKEND_SERVICE_URL"
|
backend.service.url = "$YOUR_BACKEND_SERVICE_URL"
|
||||||
|
wherehows.restli.server.url = "$YOUR_METADATA_STORE_RESTLI_SERVICE_URL"
|
||||||
|
|
||||||
linkedin.internal = true
|
linkedin.internal = true
|
||||||
authentication.ldap.url = "$YOUR_LDAP_SERVER"
|
authentication.ldap.url = "$YOUR_LDAP_SERVER"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user