add the schema history link in dataset detail page

This commit is contained in:
jbai 2016-02-02 05:15:44 -08:00 committed by SunZhaonan
parent 6bbc33bc0d
commit 0ba53d0656
9 changed files with 226 additions and 19 deletions

View File

@ -49,6 +49,20 @@ public class SchemaHistory extends Controller
} }
} }
Long datasetId = 0L;
String datasetIdStr = request().getQueryString("datasetId");
if (StringUtils.isNotBlank(datasetIdStr))
{
try
{
datasetId = Long.parseLong(datasetIdStr);
}
catch(NumberFormatException e)
{
datasetId = 0L;
}
}
int size = 10; int size = 10;
String sizeStr = request().getQueryString("size"); String sizeStr = request().getQueryString("size");
if (StringUtils.isBlank(sizeStr)) if (StringUtils.isBlank(sizeStr))
@ -70,7 +84,7 @@ public class SchemaHistory extends Controller
} }
result.put("status", "ok"); result.put("status", "ok");
result.set("data", SchemaHistoryDAO.getPagedSchemaDataset(name, page, size)); result.set("data", SchemaHistoryDAO.getPagedSchemaDataset(name, datasetId, page, size));
return ok(result); return ok(result);
} }

View File

@ -32,6 +32,7 @@ public class DatasetRowMapper implements RowMapper<Dataset>
public static String DATASET_MODIFIED_TIME_COLUMN = "modified"; public static String DATASET_MODIFIED_TIME_COLUMN = "modified";
public static String DATASET_PROPERTIES_COLUMN = "properties"; public static String DATASET_PROPERTIES_COLUMN = "properties";
public static String DATASET_SCHEMA_COLUMN = "schema"; public static String DATASET_SCHEMA_COLUMN = "schema";
public static String SCHEMA_HISTORY_ID_COLUMN = "schema_history_id";
public static String HDFS_PREFIX = "hdfs"; public static String HDFS_PREFIX = "hdfs";
@ -44,6 +45,7 @@ public class DatasetRowMapper implements RowMapper<Dataset>
String source = rs.getString(DATASET_SOURCE_COLUMN); String source = rs.getString(DATASET_SOURCE_COLUMN);
Time created = rs.getTime(DATASET_CREATED_TIME_COLUMN); Time created = rs.getTime(DATASET_CREATED_TIME_COLUMN);
Time modified = rs.getTime(DATASET_MODIFIED_TIME_COLUMN); Time modified = rs.getTime(DATASET_MODIFIED_TIME_COLUMN);
Integer schemaHistoryId = rs.getInt(SCHEMA_HISTORY_ID_COLUMN);
Dataset dataset = new Dataset(); Dataset dataset = new Dataset();
dataset.id = id; dataset.id = id;
dataset.name = name; dataset.name = name;
@ -68,6 +70,15 @@ public class DatasetRowMapper implements RowMapper<Dataset>
dataset.created = new java.util.Date(modified.getTime()); dataset.created = new java.util.Date(modified.getTime());
} }
if (schemaHistoryId != null && schemaHistoryId > 0)
{
dataset.hasSchemaHistory = true;
}
else
{
dataset.hasSchemaHistory = false;
}
return dataset; return dataset;
} }
} }

View File

@ -34,6 +34,7 @@ public class DatasetWithUserRowMapper implements RowMapper<Dataset>
public static String FAVORITE_DATASET_ID_COLUMN = "dataset_id"; public static String FAVORITE_DATASET_ID_COLUMN = "dataset_id";
public static String DATASET_WATCH_ID_COLUMN = "watch_id"; public static String DATASET_WATCH_ID_COLUMN = "watch_id";
public static String DATASET_PROPERTIES_COLUMN = "properties"; public static String DATASET_PROPERTIES_COLUMN = "properties";
public static String SCHEMA_HISTORY_ID_COLUMN = "schema_history_id";
public static String HDFS_PREFIX = "hdfs"; public static String HDFS_PREFIX = "hdfs";
@Override @Override
@ -47,6 +48,7 @@ public class DatasetWithUserRowMapper implements RowMapper<Dataset>
Time created = rs.getTime(DATASET_CREATED_TIME_COLUMN); Time created = rs.getTime(DATASET_CREATED_TIME_COLUMN);
Time modified = rs.getTime(DATASET_MODIFIED_TIME_COLUMN); Time modified = rs.getTime(DATASET_MODIFIED_TIME_COLUMN);
Integer favoriteId = rs.getInt(FAVORITE_DATASET_ID_COLUMN); Integer favoriteId = rs.getInt(FAVORITE_DATASET_ID_COLUMN);
Integer schemaHistoryId = rs.getInt(SCHEMA_HISTORY_ID_COLUMN);
Long watchId = rs.getLong(DATASET_WATCH_ID_COLUMN); Long watchId = rs.getLong(DATASET_WATCH_ID_COLUMN);
Dataset dataset = new Dataset(); Dataset dataset = new Dataset();
dataset.id = id; dataset.id = id;
@ -91,6 +93,15 @@ public class DatasetWithUserRowMapper implements RowMapper<Dataset>
dataset.isWatched = false; dataset.isWatched = false;
} }
if (schemaHistoryId != null && schemaHistoryId > 0)
{
dataset.hasSchemaHistory = true;
}
else
{
dataset.hasSchemaHistory = false;
}
return dataset; return dataset;
} }
} }

View File

@ -68,14 +68,21 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
"LEFT JOIN watch w ON (d.id = w.item_id and w.item_type = 'dataset' and w.user_id = ?) " + "LEFT JOIN watch w ON (d.id = w.item_id and w.item_type = 'dataset' and w.user_id = ?) " +
"WHERE d.urn LIKE ? ORDER BY urn LIMIT ?, ?"; "WHERE d.urn LIKE ? ORDER BY urn LIMIT ?, ?";
private final static String GET_DATASET_BY_ID = "SELECT id, name, urn, source, `schema`, " + private final static String CHECK_SCHEMA_HISTORY = "SELECT COUNT(*) FROM dict_dataset_schema_history " +
"WHERE dataset_id = ? ";
private final static String GET_DATASET_BY_ID = "SELECT id, max(s.id) as schema_history_id, " +
"name, urn, source, `schema`, " +
"FROM_UNIXTIME(source_created_time) as created, FROM_UNIXTIME(source_modified_time) as modified " + "FROM_UNIXTIME(source_created_time) as created, FROM_UNIXTIME(source_modified_time) as modified " +
"FROM dict_dataset WHERE id = ?"; "FROM dict_dataset d " +
"LEFT JOIN dict_dataset_schema_history s on (d.id = s.dataset_id) WHERE d.id = ?";
private final static String GET_DATASET_BY_ID_CURRENT_USER = "SELECT DISTINCT d.id, " + private final static String GET_DATASET_BY_ID_CURRENT_USER = "SELECT DISTINCT d.id, " +
"max(s.id) as schema_history_id, " +
"d.name, d.urn, d.source, d.schema, FROM_UNIXTIME(d.source_created_time) as created, " + "d.name, d.urn, d.source, d.schema, FROM_UNIXTIME(d.source_created_time) as created, " +
"FROM_UNIXTIME(d.source_modified_time) as modified, f.dataset_id, w.id as watch_id FROM dict_dataset d " + "FROM_UNIXTIME(d.source_modified_time) as modified, f.dataset_id, w.id as watch_id FROM dict_dataset d " +
"LEFT JOIN favorites f ON (d.id = f.dataset_id and f.user_id = ?) " + "LEFT JOIN favorites f ON (d.id = f.dataset_id and f.user_id = ?) " +
"LEFT JOIN dict_dataset_schema_history s on (d.id = s.dataset_id) " +
"LEFT JOIN watch w ON (w.item_id = d.id and w.item_type = 'dataset' and w.user_id = ?) " + "LEFT JOIN watch w ON (w.item_id = d.id and w.item_type = 'dataset' and w.user_id = ?) " +
"WHERE d.id = ?"; "WHERE d.id = ?";
@ -295,6 +302,20 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
Integer favoriteId = (Integer)row.get(DatasetWithUserRowMapper.FAVORITE_DATASET_ID_COLUMN); Integer favoriteId = (Integer)row.get(DatasetWithUserRowMapper.FAVORITE_DATASET_ID_COLUMN);
Long watchId = (Long)row.get(DatasetWithUserRowMapper.DATASET_WATCH_ID_COLUMN); Long watchId = (Long)row.get(DatasetWithUserRowMapper.DATASET_WATCH_ID_COLUMN);
Long schemaHistoryRecordCount = 0L;
try
{
schemaHistoryRecordCount = getJdbcTemplate().queryForObject(
CHECK_SCHEMA_HISTORY,
Long.class,
ds.id);
}
catch (EmptyResultDataAccessException e)
{
Logger.error("Exception = " + e.getMessage());
}
if (StringUtils.isNotBlank(ds.urn)) if (StringUtils.isNotBlank(ds.urn))
{ {
if (ds.urn.substring(0, 4).equalsIgnoreCase(DatasetRowMapper.HDFS_PREFIX)) if (ds.urn.substring(0, 4).equalsIgnoreCase(DatasetRowMapper.HDFS_PREFIX))
@ -320,6 +341,14 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
ds.isWatched = false; ds.isWatched = false;
ds.watchId = 0L; ds.watchId = 0L;
} }
if (schemaHistoryRecordCount != null && schemaHistoryRecordCount > 0)
{
ds.hasSchemaHistory = true;
}
else
{
ds.hasSchemaHistory = false;
}
pagedDatasets.add(ds); pagedDatasets.add(ds);
} }
@ -358,7 +387,7 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
if (userId != null && userId > 0) if (userId != null && userId > 0)
{ {
dataset = (Dataset)getJdbcTemplate().queryForObject( dataset = (Dataset)getJdbcTemplate().queryForObject(
GET_DATASET_BY_ID_CURRENT_USER, GET_DATASET_BY_ID_CURRENT_USER,
new DatasetWithUserRowMapper(), new DatasetWithUserRowMapper(),
userId, userId,
userId, userId,

View File

@ -34,15 +34,25 @@ public class SchemaHistoryDAO extends AbstractMySQLOpenSourceDAO{
"MAX(DATE_FORMAT(modified_date,'%Y-%m-%d')) as modified_date FROM dict_dataset_schema_history " + "MAX(DATE_FORMAT(modified_date,'%Y-%m-%d')) as modified_date FROM dict_dataset_schema_history " +
"WHERE dataset_id is not null GROUP BY 1 ORDER BY urn LIMIT ?, ?"; "WHERE dataset_id is not null GROUP BY 1 ORDER BY urn LIMIT ?, ?";
private final static String GET_SPECIFIED_SCHEMA_DATASET = "SELECT SQL_CALC_FOUND_ROWS " +
"DISTINCT dataset_id, urn, " +
"MAX(DATE_FORMAT(modified_date,'%Y-%m-%d')) as modified_date FROM dict_dataset_schema_history " +
"WHERE dataset_id = ? GROUP BY 1 ORDER BY urn LIMIT ?, ?";
private final static String GET_PAGED_SCHEMA_DATASET_WITH_FILTER = "SELECT SQL_CALC_FOUND_ROWS " + private final static String GET_PAGED_SCHEMA_DATASET_WITH_FILTER = "SELECT SQL_CALC_FOUND_ROWS " +
"DISTINCT dataset_id, urn, DATE_FORMAT(modified_date,'%Y-%m-%d') as modified_date " + "DISTINCT dataset_id, urn, DATE_FORMAT(modified_date,'%Y-%m-%d') as modified_date " +
"FROM dict_dataset_schema_history WHERE dataset_id is not null and urn LIKE ? " + "FROM dict_dataset_schema_history WHERE dataset_id is not null and urn LIKE ? " +
"GROUP BY 1 ORDER BY urn LIMIT ?, ?"; "GROUP BY 1 ORDER BY urn LIMIT ?, ?";
private final static String GET_SPECIFIED_SCHEMA_DATASET_WITH_FILTER = "SELECT SQL_CALC_FOUND_ROWS " +
"DISTINCT dataset_id, urn, DATE_FORMAT(modified_date,'%Y-%m-%d') as modified_date " +
"FROM dict_dataset_schema_history WHERE dataset_id = ? and urn LIKE ? " +
"GROUP BY 1 ORDER BY urn LIMIT ?, ?";
private final static String GET_SCHEMA_HISTORY_BY_DATASET_ID = "SELECT DATE_FORMAT(modified_date,'%Y-%m-%d') " + private final static String GET_SCHEMA_HISTORY_BY_DATASET_ID = "SELECT DATE_FORMAT(modified_date,'%Y-%m-%d') " +
"as modified_date, `schema` FROM dict_dataset_schema_history WHERE dataset_id = ? ORDER BY 1"; "as modified_date, `schema` FROM dict_dataset_schema_history WHERE dataset_id = ? ORDER BY 1";
public static ObjectNode getPagedSchemaDataset(String name, int page, int size) public static ObjectNode getPagedSchemaDataset(String name, Long datasetId, int page, int size)
{ {
ObjectNode result = Json.newObject(); ObjectNode result = Json.newObject();
@ -56,20 +66,43 @@ public class SchemaHistoryDAO extends AbstractMySQLOpenSourceDAO{
List<SchemaDataset> pagedScripts = null; List<SchemaDataset> pagedScripts = null;
if (StringUtils.isNotBlank(name)) if (StringUtils.isNotBlank(name))
{ {
pagedScripts = getJdbcTemplate().query( if (datasetId != null && datasetId > 0)
GET_PAGED_SCHEMA_DATASET_WITH_FILTER, {
new SchemaDatasetRowMapper(), pagedScripts = getJdbcTemplate().query(
"%" + name + "%", GET_SPECIFIED_SCHEMA_DATASET_WITH_FILTER,
(page - 1) * size, size); new SchemaDatasetRowMapper(),
datasetId,
"%" + name + "%",
(page - 1) * size, size);
}
else
{
pagedScripts = getJdbcTemplate().query(
GET_PAGED_SCHEMA_DATASET_WITH_FILTER,
new SchemaDatasetRowMapper(),
"%" + name + "%",
(page - 1) * size, size);
}
} }
else else
{ {
pagedScripts = getJdbcTemplate().query( if (datasetId != null && datasetId > 0)
GET_PAGED_SCHEMA_DATASET, {
new SchemaDatasetRowMapper(), pagedScripts = getJdbcTemplate().query(
(page - 1) * size, size); GET_SPECIFIED_SCHEMA_DATASET,
new SchemaDatasetRowMapper(),
datasetId, (page - 1) * size, size);
}
else
{
pagedScripts = getJdbcTemplate().query(
GET_PAGED_SCHEMA_DATASET,
new SchemaDatasetRowMapper(),
(page - 1) * size, size);
}
} }
long count = 0; long count = 0;
try { try {
count = getJdbcTemplate().queryForObject( count = getJdbcTemplate().queryForObject(

View File

@ -30,6 +30,7 @@ public class Dataset {
public boolean isFavorite; public boolean isFavorite;
public long watchId; public long watchId;
public boolean isWatched; public boolean isWatched;
public boolean hasSchemaHistory;
public JsonNode properties; public JsonNode properties;
} }

View File

@ -1156,6 +1156,16 @@
</span> </span>
</a> </a>
</li> </li>
{{#if model.hasSchemaHistory}}
<li>
<a target="_blank" {{bind-attr href=schemaHistoryUrl}}>
<i class="fa fa-history"></i>
<span class="hidden-sm hidden-xs">
Schema History
</span>
</a>
</li>
{{/if}}
<li> <li>
{{#dataset-watch dataset=model getDatasets="getDataset"}} {{#dataset-watch dataset=model getDatasets="getDataset"}}
{{/dataset-watch}} {{/dataset-watch}}

View File

@ -186,6 +186,17 @@ App.DatasetController = Ember.Controller.extend({
return ''; return '';
}.property('model.id'), }.property('model.id'),
schemaHistoryUrl: function(){
var model = this.get("model");
if (model)
{
if (model.id)
{
return '/schemaHistory#/schemas/' + model.id;
}
}
return '';
}.property('model.id'),
adjustPanes: function() { adjustPanes: function() {
var hasProperty = this.get('hasProperty') var hasProperty = this.get('hasProperty')
var isHDFS = this.get('isHDFS') var isHDFS = this.get('isHDFS')

View File

@ -13,6 +13,7 @@
App.Router.map(function() { App.Router.map(function() {
this.resource('schemas', function(){ this.resource('schemas', function(){
this.resource('page', {path: '/page/:page'}); this.resource('page', {path: '/page/:page'});
this.resource('schema', {path: '/:id'});
}); });
}); });
@ -39,7 +40,7 @@
var schemaData = []; var schemaData = [];
var skipChangeEvent = false; var skipChangeEvent = false;
function updateSchemas(page) function updateSchemas(page, datasetId)
{ {
var url; var url;
if (!schemaName) if (!schemaName)
@ -51,6 +52,11 @@
url = '/api/v1/schemaHistory/datasets?name=' + schemaName + '&size=10&page=' + page; url = '/api/v1/schemaHistory/datasets?name=' + schemaName + '&size=10&page=' + page;
} }
if (datasetId && datasetId > 0)
{
url += '&datasetId=' + datasetId;
}
$.get(url, function(data) { $.get(url, function(data) {
if (data && data.status == "ok"){ if (data && data.status == "ok"){
if (schemasController) if (schemasController)
@ -67,7 +73,7 @@
$("#name").bind("paste keyup", function() { $("#name").bind("paste keyup", function() {
schemaName = $("#name").val(); schemaName = $("#name").val();
updateSchemas(1); updateSchemas(1, 0);
}); });
function updateDiffView() function updateDiffView()
@ -301,7 +307,7 @@
$("#name").bind("paste keyup", function() { $("#name").bind("paste keyup", function() {
schemaName = $("#name").val(); schemaName = $("#name").val();
updateSchemas(1); updateSchemas(1, 0);
}); });
App.Router.reopen({ App.Router.reopen({
@ -324,7 +330,13 @@
App.PageRoute = Ember.Route.extend({ App.PageRoute = Ember.Route.extend({
setupController: function(controller, params) { setupController: function(controller, params) {
updateSchemas(params.page); updateSchemas(params.page, 0);
}
});
App.SchemaRoute = Ember.Route.extend({
setupController: function(controller, params) {
updateSchemas(1, params.id);
} }
}); });
@ -398,6 +410,81 @@
} }
}.property('model.data.page') }.property('model.data.page')
}); });
App.SchemaController = Ember.Controller.extend({
actions: {
onSelect: function(dataset, data) {
highlightRow(dataset, data, false);
if (dataset && (dataset.id != 0))
{
updateTimeLine(dataset.id, false);
}
}
},
previousPage: function(){
var model = this.get("model");
if (model && model.data && model.data.page) {
var currentPage = model.data.page;
if (currentPage <= 1) {
return currentPage;
}
else {
return currentPage - 1;
}
}
else {
return 1;
}
}.property('model.data.page'),
nextPage: function(){
var model = this.get("model");
if (model && model.data && model.data.page) {
var currentPage = model.data.page;
var totalPages = model.data.totalPages;
if (currentPage >= totalPages) {
return totalPages;
}
else {
return currentPage + 1;
}
}
else {
return 1;
}
}.property('model.data.page'),
first: function(){
var model = this.get("model");
if (model && model.data && model.data.page) {
var currentPage = model.data.page;
if (currentPage <= 1) {
return true;
}
else {
return false;
}
}
else {
return false;
}
}.property('model.data.page'),
last: function(){
var model = this.get("model");
if (model && model.data && model.data.page) {
var currentPage = model.data.page;
var totalPages = model.data.totalPages;
if (currentPage >= totalPages) {
return true;
}
else {
return false;
}
}
else {
return false;
}
}.property('model.data.page')
});
}); });
})(jQuery) })(jQuery)