2015-11-19 14:39:21 -08:00
|
|
|
/**
|
|
|
|
* Copyright 2015 LinkedIn Corp. All rights reserved.
|
|
|
|
*
|
|
|
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
* you may not use this file except in compliance with the License.
|
|
|
|
* You may obtain a copy of the License at
|
|
|
|
*
|
|
|
|
* http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
*
|
|
|
|
* Unless required by applicable law or agreed to in writing, software
|
|
|
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
*/
|
|
|
|
package dao;
|
|
|
|
|
|
|
|
import models.Dataset;
|
2016-02-04 03:30:58 -08:00
|
|
|
import models.User;
|
2015-11-19 14:39:21 -08:00
|
|
|
import org.apache.commons.lang3.StringUtils;
|
|
|
|
import org.springframework.jdbc.core.RowMapper;
|
2016-02-04 03:30:58 -08:00
|
|
|
import play.Logger;
|
2015-11-19 14:39:21 -08:00
|
|
|
import play.Play;
|
|
|
|
|
|
|
|
import java.sql.Time;
|
|
|
|
import java.sql.ResultSet;
|
|
|
|
import java.sql.SQLException;
|
2016-02-04 03:30:58 -08:00
|
|
|
import java.util.ArrayList;
|
|
|
|
import java.util.Arrays;
|
2015-11-19 14:39:21 -08:00
|
|
|
|
|
|
|
public class DatasetRowMapper implements RowMapper<Dataset>
|
|
|
|
{
|
2017-04-27 17:38:58 -07:00
|
|
|
private static final String HDFS_BROWSER_URL =
|
|
|
|
Play.application().configuration().getString(DatasetsDAO.HDFS_BROWSER_URL_KEY);
|
|
|
|
|
|
|
|
public static final String DATASET_ID_COLUMN = "id";
|
|
|
|
public static final String DATASET_NAME_COLUMN = "name";
|
|
|
|
public static final String DATASET_URN_COLUMN = "urn";
|
|
|
|
public static final String DATASET_SOURCE_COLUMN = "source";
|
|
|
|
public static final String DATASET_CREATED_TIME_COLUMN = "created";
|
|
|
|
private static final String DATASET_MODIFIED_TIME_COLUMN = "modified";
|
|
|
|
private static final String DATASET_SOURCE_MODIFIED_TIME_COLUMN = "source_modified_time";
|
|
|
|
private static final String DATASET_PROPERTIES_COLUMN = "properties";
|
|
|
|
private static final String DATASET_OWNER_ID_COLUMN = "owner_id";
|
|
|
|
private static final String DATASET_OWNER_NAME_COLUMN = "owner_name";
|
|
|
|
private static final String DATASET_OWNER_EMAIL_COLUMN = "owner_email";
|
|
|
|
private static final String SCHEMA_HISTORY_ID_COLUMN = "schema_history_id";
|
|
|
|
public static final String DATASET_SCHEMA_COLUMN = "schema";
|
|
|
|
public static final String HDFS_PREFIX = "hdfs";
|
|
|
|
public static final int HDFS_URN_PREFIX_LEN = 7; //for hdfs prefix is hdfs:///, but we need the last slash
|
2015-11-19 14:39:21 -08:00
|
|
|
|
|
|
|
|
|
|
|
@Override
|
|
|
|
public Dataset mapRow(ResultSet rs, int rowNum) throws SQLException
|
|
|
|
{
|
|
|
|
int id = rs.getInt(DATASET_ID_COLUMN);
|
|
|
|
String name = rs.getString(DATASET_NAME_COLUMN);
|
|
|
|
String urn = rs.getString(DATASET_URN_COLUMN);
|
|
|
|
String source = rs.getString(DATASET_SOURCE_COLUMN);
|
2016-02-04 03:30:58 -08:00
|
|
|
String strOwner = rs.getString(DATASET_OWNER_ID_COLUMN);
|
|
|
|
String strOwnerName = rs.getString(DATASET_OWNER_NAME_COLUMN);
|
2017-04-27 17:38:58 -07:00
|
|
|
String strOwnerEmail = rs.getString(DATASET_OWNER_EMAIL_COLUMN);
|
2016-07-26 10:44:52 -07:00
|
|
|
String schema = rs.getString(DATASET_SCHEMA_COLUMN);
|
2015-11-19 14:39:21 -08:00
|
|
|
Time created = rs.getTime(DATASET_CREATED_TIME_COLUMN);
|
|
|
|
Time modified = rs.getTime(DATASET_MODIFIED_TIME_COLUMN);
|
2016-02-02 05:15:44 -08:00
|
|
|
Integer schemaHistoryId = rs.getInt(SCHEMA_HISTORY_ID_COLUMN);
|
2016-02-04 03:30:58 -08:00
|
|
|
Long sourceModifiedTime = rs.getLong(DATASET_SOURCE_MODIFIED_TIME_COLUMN);
|
2017-04-27 17:38:58 -07:00
|
|
|
|
2015-11-19 14:39:21 -08:00
|
|
|
Dataset dataset = new Dataset();
|
|
|
|
dataset.id = id;
|
|
|
|
dataset.name = name;
|
|
|
|
dataset.urn = urn;
|
2016-07-26 10:44:52 -07:00
|
|
|
dataset.schema = schema;
|
2017-04-27 17:38:58 -07:00
|
|
|
String[] owners = StringUtils.isNotBlank(strOwner) ? strOwner.split(",") : null;
|
|
|
|
String[] ownerNames = StringUtils.isNotBlank(strOwnerName) ? strOwnerName.split(",") : null;
|
|
|
|
String[] ownerEmail = StringUtils.isNotBlank(strOwnerEmail) ? strOwnerEmail.split(",") : null;
|
|
|
|
|
|
|
|
dataset.owners = new ArrayList<>();
|
|
|
|
if (owners != null && ownerNames != null && ownerEmail != null && owners.length == ownerNames.length
|
|
|
|
&& owners.length == ownerEmail.length)
|
2016-02-04 03:30:58 -08:00
|
|
|
{
|
2017-04-27 17:38:58 -07:00
|
|
|
for (int i = 0; i < owners.length; i++)
|
2016-02-04 03:30:58 -08:00
|
|
|
{
|
2017-04-27 17:38:58 -07:00
|
|
|
User user = new User();
|
|
|
|
user.userName = owners[i];
|
|
|
|
user.name = ownerNames[i];
|
|
|
|
user.email = ownerEmail[i];
|
|
|
|
dataset.owners.add(user);
|
2016-02-04 03:30:58 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-04-27 17:38:58 -07:00
|
|
|
if (StringUtils.isNotBlank(dataset.urn) && dataset.urn.substring(0, 4).equalsIgnoreCase(HDFS_PREFIX))
|
2015-11-19 14:39:21 -08:00
|
|
|
{
|
2017-04-27 17:38:58 -07:00
|
|
|
dataset.hdfsBrowserLink = HDFS_BROWSER_URL + dataset.urn.substring(HDFS_URN_PREFIX_LEN);
|
2015-11-19 14:39:21 -08:00
|
|
|
}
|
2017-04-27 17:38:58 -07:00
|
|
|
|
2015-11-19 14:39:21 -08:00
|
|
|
dataset.source = source;
|
2016-02-04 03:30:58 -08:00
|
|
|
if (modified != null && sourceModifiedTime != null && sourceModifiedTime > 0)
|
2015-11-19 14:39:21 -08:00
|
|
|
{
|
|
|
|
dataset.modified = new java.util.Date(modified.getTime());
|
2016-02-04 03:30:58 -08:00
|
|
|
dataset.formatedModified = dataset.modified.toString();
|
2015-11-19 14:39:21 -08:00
|
|
|
}
|
|
|
|
if (created != null)
|
|
|
|
{
|
|
|
|
dataset.created = new java.util.Date(created.getTime());
|
|
|
|
} else if (modified != null) {
|
|
|
|
dataset.created = new java.util.Date(modified.getTime());
|
|
|
|
}
|
|
|
|
|
2017-04-27 17:38:58 -07:00
|
|
|
dataset.hasSchemaHistory = schemaHistoryId != null && schemaHistoryId > 0;
|
2016-02-02 05:15:44 -08:00
|
|
|
|
2015-11-19 14:39:21 -08:00
|
|
|
return dataset;
|
|
|
|
}
|
2016-02-10 00:14:42 -08:00
|
|
|
}
|