mirror of
https://github.com/datahub-project/datahub.git
synced 2025-08-19 06:38:04 +00:00
add the schema history link in dataset detail page
This commit is contained in:
parent
6bbc33bc0d
commit
0ba53d0656
@ -49,6 +49,20 @@ public class SchemaHistory extends Controller
|
||||
}
|
||||
}
|
||||
|
||||
Long datasetId = 0L;
|
||||
String datasetIdStr = request().getQueryString("datasetId");
|
||||
if (StringUtils.isNotBlank(datasetIdStr))
|
||||
{
|
||||
try
|
||||
{
|
||||
datasetId = Long.parseLong(datasetIdStr);
|
||||
}
|
||||
catch(NumberFormatException e)
|
||||
{
|
||||
datasetId = 0L;
|
||||
}
|
||||
}
|
||||
|
||||
int size = 10;
|
||||
String sizeStr = request().getQueryString("size");
|
||||
if (StringUtils.isBlank(sizeStr))
|
||||
@ -70,7 +84,7 @@ public class SchemaHistory extends Controller
|
||||
}
|
||||
|
||||
result.put("status", "ok");
|
||||
result.set("data", SchemaHistoryDAO.getPagedSchemaDataset(name, page, size));
|
||||
result.set("data", SchemaHistoryDAO.getPagedSchemaDataset(name, datasetId, page, size));
|
||||
return ok(result);
|
||||
}
|
||||
|
||||
|
@ -32,6 +32,7 @@ public class DatasetRowMapper implements RowMapper<Dataset>
|
||||
public static String DATASET_MODIFIED_TIME_COLUMN = "modified";
|
||||
public static String DATASET_PROPERTIES_COLUMN = "properties";
|
||||
public static String DATASET_SCHEMA_COLUMN = "schema";
|
||||
public static String SCHEMA_HISTORY_ID_COLUMN = "schema_history_id";
|
||||
public static String HDFS_PREFIX = "hdfs";
|
||||
|
||||
|
||||
@ -44,6 +45,7 @@ public class DatasetRowMapper implements RowMapper<Dataset>
|
||||
String source = rs.getString(DATASET_SOURCE_COLUMN);
|
||||
Time created = rs.getTime(DATASET_CREATED_TIME_COLUMN);
|
||||
Time modified = rs.getTime(DATASET_MODIFIED_TIME_COLUMN);
|
||||
Integer schemaHistoryId = rs.getInt(SCHEMA_HISTORY_ID_COLUMN);
|
||||
Dataset dataset = new Dataset();
|
||||
dataset.id = id;
|
||||
dataset.name = name;
|
||||
@ -68,6 +70,15 @@ public class DatasetRowMapper implements RowMapper<Dataset>
|
||||
dataset.created = new java.util.Date(modified.getTime());
|
||||
}
|
||||
|
||||
if (schemaHistoryId != null && schemaHistoryId > 0)
|
||||
{
|
||||
dataset.hasSchemaHistory = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
dataset.hasSchemaHistory = false;
|
||||
}
|
||||
|
||||
return dataset;
|
||||
}
|
||||
}
|
@ -34,6 +34,7 @@ public class DatasetWithUserRowMapper implements RowMapper<Dataset>
|
||||
public static String FAVORITE_DATASET_ID_COLUMN = "dataset_id";
|
||||
public static String DATASET_WATCH_ID_COLUMN = "watch_id";
|
||||
public static String DATASET_PROPERTIES_COLUMN = "properties";
|
||||
public static String SCHEMA_HISTORY_ID_COLUMN = "schema_history_id";
|
||||
public static String HDFS_PREFIX = "hdfs";
|
||||
|
||||
@Override
|
||||
@ -47,6 +48,7 @@ public class DatasetWithUserRowMapper implements RowMapper<Dataset>
|
||||
Time created = rs.getTime(DATASET_CREATED_TIME_COLUMN);
|
||||
Time modified = rs.getTime(DATASET_MODIFIED_TIME_COLUMN);
|
||||
Integer favoriteId = rs.getInt(FAVORITE_DATASET_ID_COLUMN);
|
||||
Integer schemaHistoryId = rs.getInt(SCHEMA_HISTORY_ID_COLUMN);
|
||||
Long watchId = rs.getLong(DATASET_WATCH_ID_COLUMN);
|
||||
Dataset dataset = new Dataset();
|
||||
dataset.id = id;
|
||||
@ -91,6 +93,15 @@ public class DatasetWithUserRowMapper implements RowMapper<Dataset>
|
||||
dataset.isWatched = false;
|
||||
}
|
||||
|
||||
if (schemaHistoryId != null && schemaHistoryId > 0)
|
||||
{
|
||||
dataset.hasSchemaHistory = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
dataset.hasSchemaHistory = false;
|
||||
}
|
||||
|
||||
return dataset;
|
||||
}
|
||||
}
|
||||
|
@ -68,14 +68,21 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
|
||||
"LEFT JOIN watch w ON (d.id = w.item_id and w.item_type = 'dataset' and w.user_id = ?) " +
|
||||
"WHERE d.urn LIKE ? ORDER BY urn LIMIT ?, ?";
|
||||
|
||||
private final static String GET_DATASET_BY_ID = "SELECT id, name, urn, source, `schema`, " +
|
||||
private final static String CHECK_SCHEMA_HISTORY = "SELECT COUNT(*) FROM dict_dataset_schema_history " +
|
||||
"WHERE dataset_id = ? ";
|
||||
|
||||
private final static String GET_DATASET_BY_ID = "SELECT id, max(s.id) as schema_history_id, " +
|
||||
"name, urn, source, `schema`, " +
|
||||
"FROM_UNIXTIME(source_created_time) as created, FROM_UNIXTIME(source_modified_time) as modified " +
|
||||
"FROM dict_dataset WHERE id = ?";
|
||||
"FROM dict_dataset d " +
|
||||
"LEFT JOIN dict_dataset_schema_history s on (d.id = s.dataset_id) WHERE d.id = ?";
|
||||
|
||||
private final static String GET_DATASET_BY_ID_CURRENT_USER = "SELECT DISTINCT d.id, " +
|
||||
"max(s.id) as schema_history_id, " +
|
||||
"d.name, d.urn, d.source, d.schema, FROM_UNIXTIME(d.source_created_time) as created, " +
|
||||
"FROM_UNIXTIME(d.source_modified_time) as modified, f.dataset_id, w.id as watch_id FROM dict_dataset d " +
|
||||
"LEFT JOIN favorites f ON (d.id = f.dataset_id and f.user_id = ?) " +
|
||||
"LEFT JOIN dict_dataset_schema_history s on (d.id = s.dataset_id) " +
|
||||
"LEFT JOIN watch w ON (w.item_id = d.id and w.item_type = 'dataset' and w.user_id = ?) " +
|
||||
"WHERE d.id = ?";
|
||||
|
||||
@ -295,6 +302,20 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
|
||||
|
||||
Integer favoriteId = (Integer)row.get(DatasetWithUserRowMapper.FAVORITE_DATASET_ID_COLUMN);
|
||||
Long watchId = (Long)row.get(DatasetWithUserRowMapper.DATASET_WATCH_ID_COLUMN);
|
||||
|
||||
Long schemaHistoryRecordCount = 0L;
|
||||
try
|
||||
{
|
||||
schemaHistoryRecordCount = getJdbcTemplate().queryForObject(
|
||||
CHECK_SCHEMA_HISTORY,
|
||||
Long.class,
|
||||
ds.id);
|
||||
}
|
||||
catch (EmptyResultDataAccessException e)
|
||||
{
|
||||
Logger.error("Exception = " + e.getMessage());
|
||||
}
|
||||
|
||||
if (StringUtils.isNotBlank(ds.urn))
|
||||
{
|
||||
if (ds.urn.substring(0, 4).equalsIgnoreCase(DatasetRowMapper.HDFS_PREFIX))
|
||||
@ -320,6 +341,14 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
|
||||
ds.isWatched = false;
|
||||
ds.watchId = 0L;
|
||||
}
|
||||
if (schemaHistoryRecordCount != null && schemaHistoryRecordCount > 0)
|
||||
{
|
||||
ds.hasSchemaHistory = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
ds.hasSchemaHistory = false;
|
||||
}
|
||||
pagedDatasets.add(ds);
|
||||
}
|
||||
|
||||
@ -358,7 +387,7 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
|
||||
if (userId != null && userId > 0)
|
||||
{
|
||||
dataset = (Dataset)getJdbcTemplate().queryForObject(
|
||||
GET_DATASET_BY_ID_CURRENT_USER,
|
||||
GET_DATASET_BY_ID_CURRENT_USER,
|
||||
new DatasetWithUserRowMapper(),
|
||||
userId,
|
||||
userId,
|
||||
|
@ -34,15 +34,25 @@ public class SchemaHistoryDAO extends AbstractMySQLOpenSourceDAO{
|
||||
"MAX(DATE_FORMAT(modified_date,'%Y-%m-%d')) as modified_date FROM dict_dataset_schema_history " +
|
||||
"WHERE dataset_id is not null GROUP BY 1 ORDER BY urn LIMIT ?, ?";
|
||||
|
||||
private final static String GET_SPECIFIED_SCHEMA_DATASET = "SELECT SQL_CALC_FOUND_ROWS " +
|
||||
"DISTINCT dataset_id, urn, " +
|
||||
"MAX(DATE_FORMAT(modified_date,'%Y-%m-%d')) as modified_date FROM dict_dataset_schema_history " +
|
||||
"WHERE dataset_id = ? GROUP BY 1 ORDER BY urn LIMIT ?, ?";
|
||||
|
||||
private final static String GET_PAGED_SCHEMA_DATASET_WITH_FILTER = "SELECT SQL_CALC_FOUND_ROWS " +
|
||||
"DISTINCT dataset_id, urn, DATE_FORMAT(modified_date,'%Y-%m-%d') as modified_date " +
|
||||
"FROM dict_dataset_schema_history WHERE dataset_id is not null and urn LIKE ? " +
|
||||
"GROUP BY 1 ORDER BY urn LIMIT ?, ?";
|
||||
|
||||
private final static String GET_SPECIFIED_SCHEMA_DATASET_WITH_FILTER = "SELECT SQL_CALC_FOUND_ROWS " +
|
||||
"DISTINCT dataset_id, urn, DATE_FORMAT(modified_date,'%Y-%m-%d') as modified_date " +
|
||||
"FROM dict_dataset_schema_history WHERE dataset_id = ? and urn LIKE ? " +
|
||||
"GROUP BY 1 ORDER BY urn LIMIT ?, ?";
|
||||
|
||||
private final static String GET_SCHEMA_HISTORY_BY_DATASET_ID = "SELECT DATE_FORMAT(modified_date,'%Y-%m-%d') " +
|
||||
"as modified_date, `schema` FROM dict_dataset_schema_history WHERE dataset_id = ? ORDER BY 1";
|
||||
|
||||
public static ObjectNode getPagedSchemaDataset(String name, int page, int size)
|
||||
public static ObjectNode getPagedSchemaDataset(String name, Long datasetId, int page, int size)
|
||||
{
|
||||
ObjectNode result = Json.newObject();
|
||||
|
||||
@ -56,20 +66,43 @@ public class SchemaHistoryDAO extends AbstractMySQLOpenSourceDAO{
|
||||
List<SchemaDataset> pagedScripts = null;
|
||||
if (StringUtils.isNotBlank(name))
|
||||
{
|
||||
pagedScripts = getJdbcTemplate().query(
|
||||
GET_PAGED_SCHEMA_DATASET_WITH_FILTER,
|
||||
new SchemaDatasetRowMapper(),
|
||||
"%" + name + "%",
|
||||
(page - 1) * size, size);
|
||||
if (datasetId != null && datasetId > 0)
|
||||
{
|
||||
pagedScripts = getJdbcTemplate().query(
|
||||
GET_SPECIFIED_SCHEMA_DATASET_WITH_FILTER,
|
||||
new SchemaDatasetRowMapper(),
|
||||
datasetId,
|
||||
"%" + name + "%",
|
||||
(page - 1) * size, size);
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
pagedScripts = getJdbcTemplate().query(
|
||||
GET_PAGED_SCHEMA_DATASET_WITH_FILTER,
|
||||
new SchemaDatasetRowMapper(),
|
||||
"%" + name + "%",
|
||||
(page - 1) * size, size);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
pagedScripts = getJdbcTemplate().query(
|
||||
GET_PAGED_SCHEMA_DATASET,
|
||||
new SchemaDatasetRowMapper(),
|
||||
(page - 1) * size, size);
|
||||
|
||||
if (datasetId != null && datasetId > 0)
|
||||
{
|
||||
pagedScripts = getJdbcTemplate().query(
|
||||
GET_SPECIFIED_SCHEMA_DATASET,
|
||||
new SchemaDatasetRowMapper(),
|
||||
datasetId, (page - 1) * size, size);
|
||||
}
|
||||
else
|
||||
{
|
||||
pagedScripts = getJdbcTemplate().query(
|
||||
GET_PAGED_SCHEMA_DATASET,
|
||||
new SchemaDatasetRowMapper(),
|
||||
(page - 1) * size, size);
|
||||
}
|
||||
}
|
||||
|
||||
long count = 0;
|
||||
try {
|
||||
count = getJdbcTemplate().queryForObject(
|
||||
|
@ -30,6 +30,7 @@ public class Dataset {
|
||||
public boolean isFavorite;
|
||||
public long watchId;
|
||||
public boolean isWatched;
|
||||
public boolean hasSchemaHistory;
|
||||
public JsonNode properties;
|
||||
|
||||
}
|
||||
|
@ -1156,6 +1156,16 @@
|
||||
</span>
|
||||
</a>
|
||||
</li>
|
||||
{{#if model.hasSchemaHistory}}
|
||||
<li>
|
||||
<a target="_blank" {{bind-attr href=schemaHistoryUrl}}>
|
||||
<i class="fa fa-history"></i>
|
||||
<span class="hidden-sm hidden-xs">
|
||||
Schema History
|
||||
</span>
|
||||
</a>
|
||||
</li>
|
||||
{{/if}}
|
||||
<li>
|
||||
{{#dataset-watch dataset=model getDatasets="getDataset"}}
|
||||
{{/dataset-watch}}
|
||||
|
@ -186,6 +186,17 @@ App.DatasetController = Ember.Controller.extend({
|
||||
return '';
|
||||
|
||||
}.property('model.id'),
|
||||
schemaHistoryUrl: function(){
|
||||
var model = this.get("model");
|
||||
if (model)
|
||||
{
|
||||
if (model.id)
|
||||
{
|
||||
return '/schemaHistory#/schemas/' + model.id;
|
||||
}
|
||||
}
|
||||
return '';
|
||||
}.property('model.id'),
|
||||
adjustPanes: function() {
|
||||
var hasProperty = this.get('hasProperty')
|
||||
var isHDFS = this.get('isHDFS')
|
||||
|
@ -13,6 +13,7 @@
|
||||
App.Router.map(function() {
|
||||
this.resource('schemas', function(){
|
||||
this.resource('page', {path: '/page/:page'});
|
||||
this.resource('schema', {path: '/:id'});
|
||||
});
|
||||
});
|
||||
|
||||
@ -39,7 +40,7 @@
|
||||
var schemaData = [];
|
||||
var skipChangeEvent = false;
|
||||
|
||||
function updateSchemas(page)
|
||||
function updateSchemas(page, datasetId)
|
||||
{
|
||||
var url;
|
||||
if (!schemaName)
|
||||
@ -51,6 +52,11 @@
|
||||
url = '/api/v1/schemaHistory/datasets?name=' + schemaName + '&size=10&page=' + page;
|
||||
}
|
||||
|
||||
if (datasetId && datasetId > 0)
|
||||
{
|
||||
url += '&datasetId=' + datasetId;
|
||||
}
|
||||
|
||||
$.get(url, function(data) {
|
||||
if (data && data.status == "ok"){
|
||||
if (schemasController)
|
||||
@ -67,7 +73,7 @@
|
||||
|
||||
$("#name").bind("paste keyup", function() {
|
||||
schemaName = $("#name").val();
|
||||
updateSchemas(1);
|
||||
updateSchemas(1, 0);
|
||||
});
|
||||
|
||||
function updateDiffView()
|
||||
@ -301,7 +307,7 @@
|
||||
|
||||
$("#name").bind("paste keyup", function() {
|
||||
schemaName = $("#name").val();
|
||||
updateSchemas(1);
|
||||
updateSchemas(1, 0);
|
||||
});
|
||||
|
||||
App.Router.reopen({
|
||||
@ -324,7 +330,13 @@
|
||||
|
||||
App.PageRoute = Ember.Route.extend({
|
||||
setupController: function(controller, params) {
|
||||
updateSchemas(params.page);
|
||||
updateSchemas(params.page, 0);
|
||||
}
|
||||
});
|
||||
|
||||
App.SchemaRoute = Ember.Route.extend({
|
||||
setupController: function(controller, params) {
|
||||
updateSchemas(1, params.id);
|
||||
}
|
||||
});
|
||||
|
||||
@ -398,6 +410,81 @@
|
||||
}
|
||||
}.property('model.data.page')
|
||||
});
|
||||
|
||||
App.SchemaController = Ember.Controller.extend({
|
||||
actions: {
|
||||
onSelect: function(dataset, data) {
|
||||
highlightRow(dataset, data, false);
|
||||
if (dataset && (dataset.id != 0))
|
||||
{
|
||||
updateTimeLine(dataset.id, false);
|
||||
}
|
||||
}
|
||||
},
|
||||
previousPage: function(){
|
||||
var model = this.get("model");
|
||||
if (model && model.data && model.data.page) {
|
||||
var currentPage = model.data.page;
|
||||
if (currentPage <= 1) {
|
||||
return currentPage;
|
||||
}
|
||||
else {
|
||||
return currentPage - 1;
|
||||
}
|
||||
}
|
||||
else {
|
||||
return 1;
|
||||
}
|
||||
}.property('model.data.page'),
|
||||
nextPage: function(){
|
||||
var model = this.get("model");
|
||||
if (model && model.data && model.data.page) {
|
||||
var currentPage = model.data.page;
|
||||
var totalPages = model.data.totalPages;
|
||||
if (currentPage >= totalPages) {
|
||||
return totalPages;
|
||||
}
|
||||
else {
|
||||
return currentPage + 1;
|
||||
}
|
||||
}
|
||||
else {
|
||||
return 1;
|
||||
}
|
||||
}.property('model.data.page'),
|
||||
first: function(){
|
||||
var model = this.get("model");
|
||||
if (model && model.data && model.data.page) {
|
||||
var currentPage = model.data.page;
|
||||
if (currentPage <= 1) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}.property('model.data.page'),
|
||||
last: function(){
|
||||
var model = this.get("model");
|
||||
if (model && model.data && model.data.page) {
|
||||
var currentPage = model.data.page;
|
||||
var totalPages = model.data.totalPages;
|
||||
if (currentPage >= totalPages) {
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}.property('model.data.page')
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
})(jQuery)
|
||||
|
Loading…
x
Reference in New Issue
Block a user