mirror of
https://github.com/datahub-project/datahub.git
synced 2025-11-09 16:03:31 +00:00
add the owner in the datasets page
This commit is contained in:
parent
728bfa16d6
commit
b1c18be831
@ -46,25 +46,25 @@ public class DatasetColumnRowMapper implements RowMapper<DatasetColumn>
|
|||||||
String strPartitioned = rs.getString(PARTITIONED_COLUMN);
|
String strPartitioned = rs.getString(PARTITIONED_COLUMN);
|
||||||
Long commentCount = rs.getLong(COMMENT_COUNT_COLUMN);
|
Long commentCount = rs.getLong(COMMENT_COUNT_COLUMN);
|
||||||
boolean partitioned = false;
|
boolean partitioned = false;
|
||||||
if (StringUtils.isNotBlank(strPartitioned) && strPartitioned == "Y")
|
if (StringUtils.isNotBlank(strPartitioned) && strPartitioned.equalsIgnoreCase("y"))
|
||||||
{
|
{
|
||||||
partitioned = true;
|
partitioned = true;
|
||||||
}
|
}
|
||||||
String strIndexed = rs.getString(INDEXED_COLUMN);
|
String strIndexed = rs.getString(INDEXED_COLUMN);
|
||||||
boolean indexed = false;
|
boolean indexed = false;
|
||||||
if (StringUtils.isNotBlank(strIndexed) && strIndexed == "Y")
|
if (StringUtils.isNotBlank(strIndexed) && strIndexed.equalsIgnoreCase("y"))
|
||||||
{
|
{
|
||||||
indexed = true;
|
indexed = true;
|
||||||
}
|
}
|
||||||
String strNullable = rs.getString(NULLABLE_COLUMN);
|
String strNullable = rs.getString(NULLABLE_COLUMN);
|
||||||
boolean nullable = false;
|
boolean nullable = false;
|
||||||
if (StringUtils.isNotBlank(strNullable) && strNullable == "Y")
|
if (StringUtils.isNotBlank(strNullable) && strNullable.equalsIgnoreCase("y"))
|
||||||
{
|
{
|
||||||
nullable = true;
|
nullable = true;
|
||||||
}
|
}
|
||||||
String strDistributed = rs.getString(DISTRIBUTED_COLUMN);
|
String strDistributed = rs.getString(DISTRIBUTED_COLUMN);
|
||||||
boolean distributed = false;
|
boolean distributed = false;
|
||||||
if (StringUtils.isNotBlank(strDistributed) && strDistributed == "Y")
|
if (StringUtils.isNotBlank(strDistributed) && strDistributed.equalsIgnoreCase("y"))
|
||||||
{
|
{
|
||||||
distributed = true;
|
distributed = true;
|
||||||
}
|
}
|
||||||
|
|||||||
@ -14,13 +14,17 @@
|
|||||||
package dao;
|
package dao;
|
||||||
|
|
||||||
import models.Dataset;
|
import models.Dataset;
|
||||||
|
import models.User;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.springframework.jdbc.core.RowMapper;
|
import org.springframework.jdbc.core.RowMapper;
|
||||||
|
import play.Logger;
|
||||||
import play.Play;
|
import play.Play;
|
||||||
|
|
||||||
import java.sql.Time;
|
import java.sql.Time;
|
||||||
import java.sql.ResultSet;
|
import java.sql.ResultSet;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
|
||||||
public class DatasetRowMapper implements RowMapper<Dataset>
|
public class DatasetRowMapper implements RowMapper<Dataset>
|
||||||
{
|
{
|
||||||
@ -30,8 +34,11 @@ public class DatasetRowMapper implements RowMapper<Dataset>
|
|||||||
public static String DATASET_SOURCE_COLUMN = "source";
|
public static String DATASET_SOURCE_COLUMN = "source";
|
||||||
public static String DATASET_CREATED_TIME_COLUMN = "created";
|
public static String DATASET_CREATED_TIME_COLUMN = "created";
|
||||||
public static String DATASET_MODIFIED_TIME_COLUMN = "modified";
|
public static String DATASET_MODIFIED_TIME_COLUMN = "modified";
|
||||||
|
public static String DATASET_SOURCE_MODIFIED_TIME_COLUMN = "source_modified_time";
|
||||||
public static String DATASET_PROPERTIES_COLUMN = "properties";
|
public static String DATASET_PROPERTIES_COLUMN = "properties";
|
||||||
public static String DATASET_SCHEMA_COLUMN = "schema";
|
public static String DATASET_SCHEMA_COLUMN = "schema";
|
||||||
|
public static String DATASET_OWNER_ID_COLUMN = "owner_id";
|
||||||
|
public static String DATASET_OWNER_NAME_COLUMN = "owner_name";
|
||||||
public static String SCHEMA_HISTORY_ID_COLUMN = "schema_history_id";
|
public static String SCHEMA_HISTORY_ID_COLUMN = "schema_history_id";
|
||||||
public static String HDFS_PREFIX = "hdfs";
|
public static String HDFS_PREFIX = "hdfs";
|
||||||
|
|
||||||
@ -43,13 +50,53 @@ public class DatasetRowMapper implements RowMapper<Dataset>
|
|||||||
String name = rs.getString(DATASET_NAME_COLUMN);
|
String name = rs.getString(DATASET_NAME_COLUMN);
|
||||||
String urn = rs.getString(DATASET_URN_COLUMN);
|
String urn = rs.getString(DATASET_URN_COLUMN);
|
||||||
String source = rs.getString(DATASET_SOURCE_COLUMN);
|
String source = rs.getString(DATASET_SOURCE_COLUMN);
|
||||||
|
String strOwner = rs.getString(DATASET_OWNER_ID_COLUMN);
|
||||||
|
String strOwnerName = rs.getString(DATASET_OWNER_NAME_COLUMN);
|
||||||
Time created = rs.getTime(DATASET_CREATED_TIME_COLUMN);
|
Time created = rs.getTime(DATASET_CREATED_TIME_COLUMN);
|
||||||
Time modified = rs.getTime(DATASET_MODIFIED_TIME_COLUMN);
|
Time modified = rs.getTime(DATASET_MODIFIED_TIME_COLUMN);
|
||||||
Integer schemaHistoryId = rs.getInt(SCHEMA_HISTORY_ID_COLUMN);
|
Integer schemaHistoryId = rs.getInt(SCHEMA_HISTORY_ID_COLUMN);
|
||||||
|
Long sourceModifiedTime = rs.getLong(DATASET_SOURCE_MODIFIED_TIME_COLUMN);
|
||||||
Dataset dataset = new Dataset();
|
Dataset dataset = new Dataset();
|
||||||
dataset.id = id;
|
dataset.id = id;
|
||||||
dataset.name = name;
|
dataset.name = name;
|
||||||
dataset.urn = urn;
|
dataset.urn = urn;
|
||||||
|
String[] owners = null;
|
||||||
|
if (StringUtils.isNotBlank(strOwner))
|
||||||
|
{
|
||||||
|
owners = strOwner.split(",");
|
||||||
|
}
|
||||||
|
String[] ownerNames = null;
|
||||||
|
if (StringUtils.isNotBlank(strOwnerName))
|
||||||
|
{
|
||||||
|
ownerNames = strOwnerName.split(",");
|
||||||
|
}
|
||||||
|
dataset.owners = new ArrayList<User>();
|
||||||
|
if (owners != null && ownerNames != null)
|
||||||
|
{
|
||||||
|
if (owners.length == ownerNames.length)
|
||||||
|
{
|
||||||
|
for (int i = 0; i < owners.length; i++)
|
||||||
|
{
|
||||||
|
User user = new User();
|
||||||
|
user.userName = owners[i];
|
||||||
|
if (StringUtils.isBlank(ownerNames[i]) || ownerNames[i].equalsIgnoreCase("*"))
|
||||||
|
{
|
||||||
|
user.name = owners[i];
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
user.name = ownerNames[i];
|
||||||
|
dataset.owners.add(user);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Logger.error("DatasetWithUserRowMapper get wrong owner and names. Dataset ID: "
|
||||||
|
+ Long.toString(dataset.id) + " Owner: " + owners + " Owner names: " + ownerNames);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (StringUtils.isNotBlank(dataset.urn))
|
if (StringUtils.isNotBlank(dataset.urn))
|
||||||
{
|
{
|
||||||
if (dataset.urn.substring(0, 4).equalsIgnoreCase(HDFS_PREFIX))
|
if (dataset.urn.substring(0, 4).equalsIgnoreCase(HDFS_PREFIX))
|
||||||
@ -59,9 +106,10 @@ public class DatasetRowMapper implements RowMapper<Dataset>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
dataset.source = source;
|
dataset.source = source;
|
||||||
if (modified != null)
|
if (modified != null && sourceModifiedTime != null && sourceModifiedTime > 0)
|
||||||
{
|
{
|
||||||
dataset.modified = new java.util.Date(modified.getTime());
|
dataset.modified = new java.util.Date(modified.getTime());
|
||||||
|
dataset.formatedModified = dataset.modified.toString();
|
||||||
}
|
}
|
||||||
if (created != null)
|
if (created != null)
|
||||||
{
|
{
|
||||||
|
|||||||
@ -14,13 +14,17 @@
|
|||||||
package dao;
|
package dao;
|
||||||
|
|
||||||
import models.Dataset;
|
import models.Dataset;
|
||||||
|
import models.User;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.springframework.jdbc.core.RowMapper;
|
import org.springframework.jdbc.core.RowMapper;
|
||||||
import play.Play;
|
import play.Play;
|
||||||
|
import play.Logger;
|
||||||
|
|
||||||
import java.sql.Time;
|
import java.sql.Time;
|
||||||
import java.sql.ResultSet;
|
import java.sql.ResultSet;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
|
||||||
public class DatasetWithUserRowMapper implements RowMapper<Dataset>
|
public class DatasetWithUserRowMapper implements RowMapper<Dataset>
|
||||||
{
|
{
|
||||||
@ -31,10 +35,13 @@ public class DatasetWithUserRowMapper implements RowMapper<Dataset>
|
|||||||
public static String DATASET_SOURCE_COLUMN = "source";
|
public static String DATASET_SOURCE_COLUMN = "source";
|
||||||
public static String DATASET_CREATED_TIME_COLUMN = "created";
|
public static String DATASET_CREATED_TIME_COLUMN = "created";
|
||||||
public static String DATASET_MODIFIED_TIME_COLUMN = "modified";
|
public static String DATASET_MODIFIED_TIME_COLUMN = "modified";
|
||||||
|
public static String DATASET_SOURCE_MODIFIED_TIME_COLUMN = "source_modified_time";
|
||||||
public static String FAVORITE_DATASET_ID_COLUMN = "dataset_id";
|
public static String FAVORITE_DATASET_ID_COLUMN = "dataset_id";
|
||||||
public static String DATASET_WATCH_ID_COLUMN = "watch_id";
|
public static String DATASET_WATCH_ID_COLUMN = "watch_id";
|
||||||
public static String DATASET_PROPERTIES_COLUMN = "properties";
|
public static String DATASET_PROPERTIES_COLUMN = "properties";
|
||||||
public static String SCHEMA_HISTORY_ID_COLUMN = "schema_history_id";
|
public static String SCHEMA_HISTORY_ID_COLUMN = "schema_history_id";
|
||||||
|
public static String DATASET_OWNER_ID_COLUMN = "owner_id";
|
||||||
|
public static String DATASET_OWNER_NAME_COLUMN = "owner_name";
|
||||||
public static String HDFS_PREFIX = "hdfs";
|
public static String HDFS_PREFIX = "hdfs";
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -50,11 +57,50 @@ public class DatasetWithUserRowMapper implements RowMapper<Dataset>
|
|||||||
Integer favoriteId = rs.getInt(FAVORITE_DATASET_ID_COLUMN);
|
Integer favoriteId = rs.getInt(FAVORITE_DATASET_ID_COLUMN);
|
||||||
Integer schemaHistoryId = rs.getInt(SCHEMA_HISTORY_ID_COLUMN);
|
Integer schemaHistoryId = rs.getInt(SCHEMA_HISTORY_ID_COLUMN);
|
||||||
Long watchId = rs.getLong(DATASET_WATCH_ID_COLUMN);
|
Long watchId = rs.getLong(DATASET_WATCH_ID_COLUMN);
|
||||||
|
Long sourceModifiedTime = rs.getLong(DATASET_SOURCE_MODIFIED_TIME_COLUMN);
|
||||||
|
String strOwner = rs.getString(DATASET_OWNER_ID_COLUMN);
|
||||||
|
String strOwnerName = rs.getString(DATASET_OWNER_NAME_COLUMN);
|
||||||
Dataset dataset = new Dataset();
|
Dataset dataset = new Dataset();
|
||||||
dataset.id = id;
|
dataset.id = id;
|
||||||
dataset.name = name;
|
dataset.name = name;
|
||||||
dataset.urn = urn;
|
dataset.urn = urn;
|
||||||
dataset.schema = schema;
|
dataset.schema = schema;
|
||||||
|
String[] owners = null;
|
||||||
|
if (StringUtils.isNotBlank(strOwner))
|
||||||
|
{
|
||||||
|
owners = strOwner.split(",");
|
||||||
|
}
|
||||||
|
String[] ownerNames = null;
|
||||||
|
if (StringUtils.isNotBlank(strOwnerName))
|
||||||
|
{
|
||||||
|
ownerNames = strOwnerName.split(",");
|
||||||
|
}
|
||||||
|
dataset.owners = new ArrayList<User>();
|
||||||
|
if (owners != null && ownerNames != null)
|
||||||
|
{
|
||||||
|
if (owners.length == ownerNames.length)
|
||||||
|
{
|
||||||
|
for (int i = 0; i < owners.length; i++)
|
||||||
|
{
|
||||||
|
User user = new User();
|
||||||
|
user.userName = owners[i];
|
||||||
|
if (StringUtils.isBlank(ownerNames[i]) || ownerNames[i].equalsIgnoreCase("*"))
|
||||||
|
{
|
||||||
|
user.name = owners[i];
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
user.name = ownerNames[i];
|
||||||
|
dataset.owners.add(user);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Logger.error("DatasetWithUserRowMapper get wrong owner and names. Dataset ID: "
|
||||||
|
+ Long.toString(dataset.id) + " Owner: " + owners + " Owner names: " + ownerNames);
|
||||||
|
}
|
||||||
|
}
|
||||||
if (StringUtils.isNotBlank(dataset.urn))
|
if (StringUtils.isNotBlank(dataset.urn))
|
||||||
{
|
{
|
||||||
if (dataset.urn.substring(0, 4).equalsIgnoreCase(HDFS_PREFIX))
|
if (dataset.urn.substring(0, 4).equalsIgnoreCase(HDFS_PREFIX))
|
||||||
@ -64,9 +110,10 @@ public class DatasetWithUserRowMapper implements RowMapper<Dataset>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
dataset.source = source;
|
dataset.source = source;
|
||||||
if (modified != null)
|
if (modified != null && sourceModifiedTime != null && sourceModifiedTime > 0)
|
||||||
{
|
{
|
||||||
dataset.modified = new java.util.Date(modified.getTime());
|
dataset.modified = new java.util.Date(modified.getTime());
|
||||||
|
dataset.formatedModified = dataset.modified.toString();
|
||||||
}
|
}
|
||||||
if (created != null)
|
if (created != null)
|
||||||
{
|
{
|
||||||
|
|||||||
@ -49,42 +49,91 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
|
|||||||
|
|
||||||
|
|
||||||
private final static String SELECT_PAGED_DATASET = "SELECT SQL_CALC_FOUND_ROWS " +
|
private final static String SELECT_PAGED_DATASET = "SELECT SQL_CALC_FOUND_ROWS " +
|
||||||
"id, name, urn, source, properties, `schema` FROM dict_dataset ORDER BY urn LIMIT ?, ?";
|
"d.id, d.name, d.urn, d.source, d.properties, d.schema, " +
|
||||||
|
"GROUP_CONCAT(o.owner_id ORDER BY o.sort_id ASC SEPARATOR ',') as owner_id, " +
|
||||||
|
"GROUP_CONCAT(IFNULL(u.display_name, '*') ORDER BY o.sort_id ASC SEPARATOR ',') as owner_name, " +
|
||||||
|
"FROM_UNIXTIME(source_created_time) as created, d.source_modified_time, " +
|
||||||
|
"FROM_UNIXTIME(source_modified_time) as modified " +
|
||||||
|
"FROM dict_dataset d " +
|
||||||
|
"LEFT JOIN dataset_owner o on (d.id = o.dataset_id and (o.is_deleted is null OR o.is_deleted != 'Y')) " +
|
||||||
|
"LEFT JOIN dir_external_user_info u on (o.owner_id = u.user_id and u.app_id = 300) " +
|
||||||
|
"GROUP BY d.id, d.name, d.urn, d.source, d.properties, d.schema, " +
|
||||||
|
"created, d.source_modified_time, modified ORDER BY d.urn LIMIT ?, ?";
|
||||||
|
|
||||||
private final static String SELECT_PAGED_DATASET_BY_CURRENT_USER = "SELECT SQL_CALC_FOUND_ROWS " +
|
private final static String SELECT_PAGED_DATASET_BY_CURRENT_USER = "SELECT SQL_CALC_FOUND_ROWS " +
|
||||||
"d.id, d.name, d.urn, d.source, d.schema, d.properties, f.dataset_id, w.id as watch_id " +
|
"d.id, d.name, d.urn, d.source, d.schema, d.properties, " +
|
||||||
|
"f.dataset_id, w.id as watch_id, " +
|
||||||
|
"GROUP_CONCAT(o.owner_id ORDER BY o.sort_id ASC SEPARATOR ',') as owner_id, " +
|
||||||
|
"GROUP_CONCAT(IFNULL(u.display_name, '*') ORDER BY o.sort_id ASC SEPARATOR ',') as owner_name, " +
|
||||||
|
"FROM_UNIXTIME(source_created_time) as created, d.source_modified_time, " +
|
||||||
|
"FROM_UNIXTIME(source_modified_time) as modified " +
|
||||||
"FROM dict_dataset d LEFT JOIN favorites f ON (" +
|
"FROM dict_dataset d LEFT JOIN favorites f ON (" +
|
||||||
"d.id = f.dataset_id and f.user_id = ?) " +
|
"d.id = f.dataset_id and f.user_id = ?) " +
|
||||||
"LEFT JOIN watch w on (d.id = w.item_id and w.item_type = 'dataset' and w.user_id = ?) " +
|
"LEFT JOIN watch w on (d.id = w.item_id and w.item_type = 'dataset' and w.user_id = ?) " +
|
||||||
"ORDER BY d.urn LIMIT ?, ?";
|
"LEFT JOIN dataset_owner o on (d.id = o.dataset_id and (o.is_deleted is null OR o.is_deleted != 'Y')) " +
|
||||||
|
"LEFT JOIN dir_external_user_info u on (o.owner_id = u.user_id and u.app_id = 300) " +
|
||||||
|
"GROUP BY d.id, d.name, d.urn, d.source, d.schema, d.properties, f.dataset_id, " +
|
||||||
|
"watch_id, created, d.source_modified_time, modified ORDER BY d.urn LIMIT ?, ?";
|
||||||
|
|
||||||
private final static String SELECT_PAGED_DATASET_BY_URN = "SELECT SQL_CALC_FOUND_ROWS " +
|
private final static String SELECT_PAGED_DATASET_BY_URN = "SELECT SQL_CALC_FOUND_ROWS " +
|
||||||
"id, name, urn, source, properties, `schema` FROM dict_dataset WHERE urn LIKE ? ORDER BY urn limit ?, ?";
|
"d.id, d.name, d.urn, d.source, d.properties, d.schema, " +
|
||||||
|
"GROUP_CONCAT(o.owner_id ORDER BY o.sort_id ASC SEPARATOR ',') as owner_id, " +
|
||||||
|
"GROUP_CONCAT(IFNULL(u.display_name, '*') ORDER BY o.sort_id ASC SEPARATOR ',') as owner_name, " +
|
||||||
|
"FROM_UNIXTIME(source_created_time) as created, d.source_modified_time, " +
|
||||||
|
"FROM_UNIXTIME(source_modified_time) as modified " +
|
||||||
|
"FROM dict_dataset d " +
|
||||||
|
"LEFT JOIN dataset_owner o on (d.id = o.dataset_id and (o.is_deleted is null OR o.is_deleted != 'Y')) " +
|
||||||
|
"LEFT JOIN dir_external_user_info u on (o.owner_id = u.user_id and u.app_id = 300) " +
|
||||||
|
"WHERE d.urn LIKE ? " +
|
||||||
|
"GROUP BY d.id, d.name, d.urn, d.source, d.properties, d.schema, created, " +
|
||||||
|
"d.source_modified_time, modified " +
|
||||||
|
"ORDER BY d.urn limit ?, ?";
|
||||||
|
|
||||||
private final static String SELECT_PAGED_DATASET_BY_URN_CURRENT_USER = "SELECT SQL_CALC_FOUND_ROWS " +
|
private final static String SELECT_PAGED_DATASET_BY_URN_CURRENT_USER = "SELECT SQL_CALC_FOUND_ROWS " +
|
||||||
"d.id, d.name, d.urn, d.source, d.schema, d.properties, f.dataset_id, w.id as watch_id " +
|
"d.id, d.name, d.urn, d.source, d.schema, " +
|
||||||
|
"GROUP_CONCAT(o.owner_id ORDER BY o.sort_id ASC SEPARATOR ',') as owner_id, " +
|
||||||
|
"GROUP_CONCAT(IFNULL(u.display_name, '*') ORDER BY o.sort_id ASC SEPARATOR ',') as owner_name, " +
|
||||||
|
"d.properties, f.dataset_id, w.id as watch_id, " +
|
||||||
|
"FROM_UNIXTIME(source_created_time) as created, d.source_modified_time, " +
|
||||||
|
"FROM_UNIXTIME(source_modified_time) as modified " +
|
||||||
"FROM dict_dataset d LEFT JOIN favorites f ON (" +
|
"FROM dict_dataset d LEFT JOIN favorites f ON (" +
|
||||||
"d.id = f.dataset_id and f.user_id = ?) " +
|
"d.id = f.dataset_id and f.user_id = ?) " +
|
||||||
"LEFT JOIN watch w ON (d.id = w.item_id and w.item_type = 'dataset' and w.user_id = ?) " +
|
"LEFT JOIN watch w ON (d.id = w.item_id and w.item_type = 'dataset' and w.user_id = ?) " +
|
||||||
"WHERE d.urn LIKE ? ORDER BY urn LIMIT ?, ?";
|
"LEFT JOIN dataset_owner o on (d.id = o.dataset_id and (o.is_deleted is null OR o.is_deleted != 'Y')) " +
|
||||||
|
"LEFT JOIN dir_external_user_info u on (o.owner_id = u.user_id and u.app_id = 300) " +
|
||||||
|
"WHERE d.urn LIKE ? " +
|
||||||
|
"GROUP BY d.id, d.name, d.urn, d.source, d.schema, d.properties, f.dataset_id, " +
|
||||||
|
"watch_id, created, d.source_modified_time, modified ORDER BY urn LIMIT ?, ?";
|
||||||
|
|
||||||
private final static String CHECK_SCHEMA_HISTORY = "SELECT COUNT(*) FROM dict_dataset_schema_history " +
|
private final static String CHECK_SCHEMA_HISTORY = "SELECT COUNT(*) FROM dict_dataset_schema_history " +
|
||||||
"WHERE dataset_id = ? ";
|
"WHERE dataset_id = ? ";
|
||||||
|
|
||||||
private final static String GET_DATASET_BY_ID = "SELECT id, max(s.id) as schema_history_id, " +
|
private final static String GET_DATASET_BY_ID = "SELECT d.id, max(s.id) as schema_history_id, d.name, " +
|
||||||
"name, urn, source, `schema`, " +
|
"d.urn, d.source, d.schema, GROUP_CONCAT(o.owner_id ORDER BY o.sort_id ASC SEPARATOR ',') as owner_id, " +
|
||||||
"FROM_UNIXTIME(source_created_time) as created, FROM_UNIXTIME(source_modified_time) as modified " +
|
"GROUP_CONCAT(IFNULL(u.display_name, '*') ORDER BY o.sort_id ASC SEPARATOR ',') as owner_name, " +
|
||||||
"FROM dict_dataset d " +
|
"FROM_UNIXTIME(source_created_time) as created, d.source_modified_time, " +
|
||||||
"LEFT JOIN dict_dataset_schema_history s on (d.id = s.dataset_id) WHERE d.id = ?";
|
"FROM_UNIXTIME(source_modified_time) as modified " +
|
||||||
|
"FROM dict_dataset d LEFT JOIN dict_dataset_schema_history s on (d.id = s.dataset_id) " +
|
||||||
|
"LEFT JOIN dataset_owner o on (d.id = o.dataset_id) " +
|
||||||
|
"LEFT JOIN dir_external_user_info u on (o.owner_id = u.user_id) " +
|
||||||
|
"WHERE d.id = ? GROUP BY d.id, d.name, d.urn, d.source, d.schema, " +
|
||||||
|
"created, d.source_modified_time, modified";
|
||||||
|
|
||||||
private final static String GET_DATASET_BY_ID_CURRENT_USER = "SELECT DISTINCT d.id, " +
|
private final static String GET_DATASET_BY_ID_CURRENT_USER = "SELECT DISTINCT d.id, " +
|
||||||
"max(s.id) as schema_history_id, " +
|
"max(s.id) as schema_history_id, " +
|
||||||
"d.name, d.urn, d.source, d.schema, FROM_UNIXTIME(d.source_created_time) as created, " +
|
"d.name, d.urn, d.source, d.schema, " +
|
||||||
|
"GROUP_CONCAT(o.owner_id ORDER BY o.sort_id ASC SEPARATOR ',') as owner_id, " +
|
||||||
|
"GROUP_CONCAT(IFNULL(u.display_name, '*') ORDER BY o.sort_id ASC SEPARATOR ',') as owner_name, " +
|
||||||
|
"FROM_UNIXTIME(d.source_created_time) as created, " +
|
||||||
|
"d.source_modified_time, " +
|
||||||
"FROM_UNIXTIME(d.source_modified_time) as modified, f.dataset_id, w.id as watch_id FROM dict_dataset d " +
|
"FROM_UNIXTIME(d.source_modified_time) as modified, f.dataset_id, w.id as watch_id FROM dict_dataset d " +
|
||||||
"LEFT JOIN favorites f ON (d.id = f.dataset_id and f.user_id = ?) " +
|
"LEFT JOIN favorites f ON (d.id = f.dataset_id and f.user_id = ?) " +
|
||||||
"LEFT JOIN dict_dataset_schema_history s on (d.id = s.dataset_id) " +
|
"LEFT JOIN dict_dataset_schema_history s on (d.id = s.dataset_id) " +
|
||||||
"LEFT JOIN watch w ON (w.item_id = d.id and w.item_type = 'dataset' and w.user_id = ?) " +
|
"LEFT JOIN watch w ON (w.item_id = d.id and w.item_type = 'dataset' and w.user_id = ?) " +
|
||||||
"WHERE d.id = ?";
|
"LEFT JOIN dataset_owner o on (d.id = o.dataset_id) " +
|
||||||
|
"LEFT JOIN dir_external_user_info u on (o.owner_id = u.user_id) " +
|
||||||
|
"WHERE d.id = ? GROUP BY d.id, d.name, d.urn, d.source, d.schema, created, " +
|
||||||
|
"d.source_modified_time, modified, f.dataset_id, watch_id";
|
||||||
|
|
||||||
private final static String GET_DATASET_COLUMNS_BY_DATASET_ID = "select dfd.field_id, dfd.sort_id, " +
|
private final static String GET_DATASET_COLUMNS_BY_DATASET_ID = "select dfd.field_id, dfd.sort_id, " +
|
||||||
"dfd.parent_sort_id, dfd.parent_path, dfd.field_name, dfd.data_type, " +
|
"dfd.parent_sort_id, dfd.parent_path, dfd.field_name, dfd.data_type, " +
|
||||||
@ -94,7 +143,7 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
|
|||||||
"WHERE ddfc.dataset_id = dfd.dataset_id AND ddfc.field_id = dfd.field_id ) as comment_count " +
|
"WHERE ddfc.dataset_id = dfd.dataset_id AND ddfc.field_id = dfd.field_id ) as comment_count " +
|
||||||
"FROM dict_field_detail dfd LEFT JOIN dict_dataset_field_comment ddfc ON " +
|
"FROM dict_field_detail dfd LEFT JOIN dict_dataset_field_comment ddfc ON " +
|
||||||
"(ddfc.field_id = dfd.field_id AND ddfc.is_default = true) LEFT JOIN field_comments c ON " +
|
"(ddfc.field_id = dfd.field_id AND ddfc.is_default = true) LEFT JOIN field_comments c ON " +
|
||||||
"c.id = ddfc.comment_id WHERE dfd.dataset_id = ? ORDER BY 1";
|
"c.id = ddfc.comment_id WHERE dfd.dataset_id = ? ORDER BY dfd.sort_id";
|
||||||
|
|
||||||
private final static String GET_DATASET_COLUMNS_BY_DATASETID_AND_COLUMNID = "SELECT dfd.field_id, " +
|
private final static String GET_DATASET_COLUMNS_BY_DATASETID_AND_COLUMNID = "SELECT dfd.field_id, " +
|
||||||
"dfd.sort_id, dfd.parent_sort_id, dfd.parent_path, dfd.field_name, dfd.data_type, " +
|
"dfd.sort_id, dfd.parent_sort_id, dfd.parent_path, dfd.field_name, dfd.data_type, " +
|
||||||
@ -104,7 +153,7 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
|
|||||||
"WHERE ddfc.dataset_id = dfd.dataset_id AND ddfc.field_id = dfd.field_id ) as comment_count " +
|
"WHERE ddfc.dataset_id = dfd.dataset_id AND ddfc.field_id = dfd.field_id ) as comment_count " +
|
||||||
"FROM dict_field_detail dfd LEFT JOIN dict_dataset_field_comment ddfc ON " +
|
"FROM dict_field_detail dfd LEFT JOIN dict_dataset_field_comment ddfc ON " +
|
||||||
"(ddfc.field_id = dfd.field_id AND ddfc.is_default = true) LEFT JOIN comments c ON " +
|
"(ddfc.field_id = dfd.field_id AND ddfc.is_default = true) LEFT JOIN comments c ON " +
|
||||||
"c.id = ddfc.comment_id WHERE dfd.dataset_id = ? AND dfd.field_id = ? ORDER BY 1";
|
"c.id = ddfc.comment_id WHERE dfd.dataset_id = ? AND dfd.field_id = ? ORDER BY dfd.sort_id";
|
||||||
|
|
||||||
private final static String GET_DATASET_PROPERTIES_BY_DATASET_ID =
|
private final static String GET_DATASET_PROPERTIES_BY_DATASET_ID =
|
||||||
"SELECT source, `properties` FROM dict_dataset WHERE id=?";
|
"SELECT source, `properties` FROM dict_dataset WHERE id=?";
|
||||||
@ -289,17 +338,65 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
|
|||||||
for (Map row : rows) {
|
for (Map row : rows) {
|
||||||
|
|
||||||
Dataset ds = new Dataset();
|
Dataset ds = new Dataset();
|
||||||
|
Timestamp modified = (Timestamp)row.get(DatasetWithUserRowMapper.DATASET_MODIFIED_TIME_COLUMN);
|
||||||
ds.id = (Long)row.get(DatasetWithUserRowMapper.DATASET_ID_COLUMN);
|
ds.id = (Long)row.get(DatasetWithUserRowMapper.DATASET_ID_COLUMN);
|
||||||
ds.name = (String)row.get(DatasetWithUserRowMapper.DATASET_NAME_COLUMN);
|
ds.name = (String)row.get(DatasetWithUserRowMapper.DATASET_NAME_COLUMN);
|
||||||
ds.source = (String)row.get(DatasetWithUserRowMapper.DATASET_SOURCE_COLUMN);
|
ds.source = (String)row.get(DatasetWithUserRowMapper.DATASET_SOURCE_COLUMN);
|
||||||
ds.urn = (String)row.get(DatasetWithUserRowMapper.DATASET_URN_COLUMN);
|
ds.urn = (String)row.get(DatasetWithUserRowMapper.DATASET_URN_COLUMN);
|
||||||
ds.schema = (String)row.get(DatasetWithUserRowMapper.DATASET_SCHEMA_COLUMN);
|
ds.schema = (String)row.get(DatasetWithUserRowMapper.DATASET_SCHEMA_COLUMN);
|
||||||
|
String strOwner = (String)row.get(DatasetWithUserRowMapper.DATASET_OWNER_ID_COLUMN);
|
||||||
|
String strOwnerName = (String)row.get(DatasetWithUserRowMapper.DATASET_OWNER_NAME_COLUMN);
|
||||||
|
Long sourceModifiedTime =
|
||||||
|
(Long)row.get(DatasetWithUserRowMapper.DATASET_SOURCE_MODIFIED_TIME_COLUMN);
|
||||||
String properties = (String)row.get(DatasetWithUserRowMapper.DATASET_PROPERTIES_COLUMN);
|
String properties = (String)row.get(DatasetWithUserRowMapper.DATASET_PROPERTIES_COLUMN);
|
||||||
if (StringUtils.isNotBlank(properties))
|
if (StringUtils.isNotBlank(properties))
|
||||||
{
|
{
|
||||||
ds.properties = Json.parse(properties);
|
ds.properties = Json.parse(properties);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (modified != null && sourceModifiedTime != null && sourceModifiedTime > 0)
|
||||||
|
{
|
||||||
|
ds.modified = modified;
|
||||||
|
ds.formatedModified = modified.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
String[] owners = null;
|
||||||
|
if (StringUtils.isNotBlank(strOwner))
|
||||||
|
{
|
||||||
|
owners = strOwner.split(",");
|
||||||
|
}
|
||||||
|
String[] ownerNames = null;
|
||||||
|
if (StringUtils.isNotBlank(strOwnerName))
|
||||||
|
{
|
||||||
|
ownerNames = strOwnerName.split(",");
|
||||||
|
}
|
||||||
|
ds.owners = new ArrayList<User>();
|
||||||
|
if (owners != null && ownerNames != null)
|
||||||
|
{
|
||||||
|
if (owners.length == ownerNames.length)
|
||||||
|
{
|
||||||
|
for (int i = 0; i < owners.length; i++)
|
||||||
|
{
|
||||||
|
User user = new User();
|
||||||
|
user.userName = owners[i];
|
||||||
|
if (StringUtils.isBlank(ownerNames[i]) || ownerNames[i].equalsIgnoreCase("*"))
|
||||||
|
{
|
||||||
|
user.name = owners[i];
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
user.name = ownerNames[i];
|
||||||
|
}
|
||||||
|
ds.owners.add(user);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
Logger.error("getPagedDatasets get wrong owner and names. Dataset ID: "
|
||||||
|
+ Long.toString(ds.id) + " Owner: " + owners + " Owner names: " + ownerNames);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
Integer favoriteId = (Integer)row.get(DatasetWithUserRowMapper.FAVORITE_DATASET_ID_COLUMN);
|
Integer favoriteId = (Integer)row.get(DatasetWithUserRowMapper.FAVORITE_DATASET_ID_COLUMN);
|
||||||
Long watchId = (Long)row.get(DatasetWithUserRowMapper.DATASET_WATCH_ID_COLUMN);
|
Long watchId = (Long)row.get(DatasetWithUserRowMapper.DATASET_WATCH_ID_COLUMN);
|
||||||
|
|
||||||
|
|||||||
@ -16,6 +16,7 @@ package models;
|
|||||||
import com.fasterxml.jackson.databind.JsonNode;
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
|
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
public class Dataset {
|
public class Dataset {
|
||||||
|
|
||||||
@ -25,6 +26,7 @@ public class Dataset {
|
|||||||
public String urn;
|
public String urn;
|
||||||
public Date created;
|
public Date created;
|
||||||
public Date modified;
|
public Date modified;
|
||||||
|
public String formatedModified;
|
||||||
public String schema;
|
public String schema;
|
||||||
public String nertzLink;
|
public String nertzLink;
|
||||||
public boolean isFavorite;
|
public boolean isFavorite;
|
||||||
@ -32,5 +34,6 @@ public class Dataset {
|
|||||||
public boolean isWatched;
|
public boolean isWatched;
|
||||||
public boolean hasSchemaHistory;
|
public boolean hasSchemaHistory;
|
||||||
public JsonNode properties;
|
public JsonNode properties;
|
||||||
|
public List<User> owners;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -323,12 +323,20 @@
|
|||||||
{{ dataset.name }}
|
{{ dataset.name }}
|
||||||
{{/link-to}}
|
{{/link-to}}
|
||||||
</div>
|
</div>
|
||||||
|
{{#if dataset.owners}}
|
||||||
<div class="col-xs-12">
|
<div class="col-xs-12">
|
||||||
{{ dataset.properties.owner }}
|
<span class="prop-label">owner:</span>
|
||||||
|
{{#each owner in dataset.owners}}
|
||||||
|
<p style="display:inline" title={{owner.name}}>{{ owner.userName }} </p>
|
||||||
|
{{/each}}
|
||||||
</div>
|
</div>
|
||||||
|
{{/if}}
|
||||||
|
{{#if dataset.formatedModified}}
|
||||||
<div class="col-xs-12">
|
<div class="col-xs-12">
|
||||||
{{ dataset.properties.lastAlterTime }}
|
<span class="prop-label">last modified:</span>
|
||||||
|
{{ dataset.formatedModified }}
|
||||||
</div>
|
</div>
|
||||||
|
{{/if}}
|
||||||
</div>
|
</div>
|
||||||
<div class="col-md-4 text-right">
|
<div class="col-md-4 text-right">
|
||||||
<ul class="datasetTableLinks">
|
<ul class="datasetTableLinks">
|
||||||
|
|||||||
@ -214,10 +214,10 @@ App.DatasetController = Ember.Controller.extend({
|
|||||||
}.observes('hasProperty', 'isHDFS').on('init'),
|
}.observes('hasProperty', 'isHDFS').on('init'),
|
||||||
buildJsonView: function(){
|
buildJsonView: function(){
|
||||||
var model = this.get("model");
|
var model = this.get("model");
|
||||||
var schema = JSON.parse(JSON.stringify(model.schema))
|
var schema = JSON.parse(model.schema)
|
||||||
setTimeout(function() {
|
setTimeout(function() {
|
||||||
$("#json-viewer").JSONView(schema)
|
$("#json-viewer").JSONView(schema)
|
||||||
}, 300)
|
}, 500);
|
||||||
},
|
},
|
||||||
actions: {
|
actions: {
|
||||||
setView: function(view) {
|
setView: function(view) {
|
||||||
|
|||||||
@ -311,6 +311,8 @@ App.DatasetRoute = Ember.Route.extend({
|
|||||||
var controller = this.get('controller')
|
var controller = this.get('controller')
|
||||||
var id = this.get('controller.model.id')
|
var id = this.get('controller.model.id')
|
||||||
var columnUrl = 'api/v1/datasets/' + id + "/columns";
|
var columnUrl = 'api/v1/datasets/' + id + "/columns";
|
||||||
|
controller.set("isTable", true);
|
||||||
|
controller.set("isJSON", false);
|
||||||
$.get(columnUrl, function(data) {
|
$.get(columnUrl, function(data) {
|
||||||
if (data && data.status == "ok")
|
if (data && data.status == "ok")
|
||||||
{
|
{
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user