Fix dataset insert API bug. Fix load sql bug.

This commit is contained in:
SunZhaonan 2016-03-18 15:26:55 -07:00
parent 327937486b
commit c66b00e2f6
4 changed files with 13 additions and 11 deletions

View File

@ -56,14 +56,14 @@ public class TreeBuilderActor extends UntypedActor {
in = EtlJob.class.getClassLoader().getResourceAsStream("jython/FlowTreeBuilder.py"); in = EtlJob.class.getClassLoader().getResourceAsStream("jython/FlowTreeBuilder.py");
break; break;
default: default:
Logger.warn("unknown message : {}", msg); Logger.error("unknown message : {}", msg);
} }
if (in != null) { if (in != null) {
interpreter.execfile(in); interpreter.execfile(in);
in.close(); in.close();
Logger.info("Finish build {} tree", msg); Logger.info("Finish build {} tree", msg);
} else { } else {
Logger.warn("can not find jython script"); Logger.error("can not find jython script");
} }
} else { } else {
throw new Exception("message type is not supported!"); throw new Exception("message type is not supported!");

View File

@ -54,13 +54,15 @@ public class DatasetDao {
ObjectMapper om = new ObjectMapper(); ObjectMapper om = new ObjectMapper();
om.setPropertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES); om.setPropertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES);
DatasetRecord record = om.convertValue(dataset, DatasetRecord.class); DatasetRecord record = om.convertValue(dataset, DatasetRecord.class);
Map<String, Object> refDataset = getDatasetByUrn(record.getRefDatasetUrn()); if (record.getRefDatasetUrn() != null) {
Map<String, Object> refDataset = getDatasetByUrn(record.getRefDatasetUrn());
// Find ref dataset id // Find ref dataset id
if (refDataset != null) { if (refDataset != null) {
record.setRefDatasetId((int) refDataset.get("id")); record.setRefDatasetId(((Long) refDataset.get("id")).intValue());
}
} }
// Find layout id // Find layout id
if (record.getSamplePartitionFullPath() != null) { if (record.getSamplePartitionFullPath() != null) {
PartitionPatternMatcher ppm = new PartitionPatternMatcher(PartitionLayoutDao.getPartitionLayouts()); PartitionPatternMatcher ppm = new PartitionPatternMatcher(PartitionLayoutDao.getPartitionLayouts());

View File

@ -151,9 +151,9 @@ class HdfsLoad:
or description in ('null', 'N/A', 'nothing', 'empty', 'none')); or description in ('null', 'N/A', 'nothing', 'empty', 'none'));
insert into field_comments ( insert into field_comments (
user_id, comment, created, comment_crc32_checksum user_id, comment, created, modified, comment_crc32_checksum
) )
select 0 user_id, description, now() created, crc32(description) from select 0 user_id, description, now() created, now() modified, crc32(description) from
( (
select sf.description select sf.description
from stg_dict_field_detail sf left join field_comments fc from stg_dict_field_detail sf left join field_comments fc

View File

@ -185,9 +185,9 @@ class HiveLoad:
insert into field_comments ( insert into field_comments (
user_id, comment, created, comment_crc32_checksum user_id, comment, created, modified, comment_crc32_checksum
) )
select 0 user_id, description, now() created, crc32(description) from select 0 user_id, description, now() created, now() modified, crc32(description) from
( (
select sf.description select sf.description
from stg_dict_field_detail sf left join field_comments fc from stg_dict_field_detail sf left join field_comments fc