mirror of
https://github.com/datahub-project/datahub.git
synced 2025-08-22 16:18:10 +00:00
Merge pull request #126 from jerrybai2009/master
support the elasticsearch as search engine
This commit is contained in:
commit
8a9eeb1bb8
43
data-model/ELASTICSEARCH/index_mappings.md
Normal file
43
data-model/ELASTICSEARCH/index_mappings.md
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
curl -XPUT '$YOUR_INDEX_URL:9200/wherehows' -d '
|
||||||
|
{
|
||||||
|
"mappings": {
|
||||||
|
"dataset": {},
|
||||||
|
"comment": {
|
||||||
|
"_parent": {
|
||||||
|
"type": "dataset"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"field": {
|
||||||
|
"_parent": {
|
||||||
|
"type": "dataset"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
'
|
||||||
|
|
||||||
|
curl -XPUT '$YOUR_INDEX_URL:9200/wherehows/flow_jobs/_mapping' -d '
|
||||||
|
{
|
||||||
|
"flow_jobs": {
|
||||||
|
"properties": {
|
||||||
|
"jobs": {
|
||||||
|
"type": "nested",
|
||||||
|
"properties": {
|
||||||
|
"job_name": { "type": "string" },
|
||||||
|
"job_path": { "type": "string" },
|
||||||
|
"job_type": { "type": "string" },
|
||||||
|
"pre_jobs": { "type": "string" },
|
||||||
|
"post_jobs": { "type": "string" },
|
||||||
|
"is_current": { "type": "string" },
|
||||||
|
"is_first": { "type": "string" },
|
||||||
|
"is_last": { "type": "string" },
|
||||||
|
"job_type_id": { "type": "short" },
|
||||||
|
"app_id": { "type": "short" },
|
||||||
|
"flow_id": { "type": "long" },
|
||||||
|
"job_id": { "type": "long" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
'
|
@ -2,6 +2,7 @@ sourceSets {
|
|||||||
main {
|
main {
|
||||||
resources {
|
resources {
|
||||||
srcDir 'DDL'
|
srcDir 'DDL'
|
||||||
|
srcDir 'ELASTICSEARCH'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,52 @@
|
|||||||
|
/**
|
||||||
|
* Copyright 2015 LinkedIn Corp. All rights reserved.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
*/
|
||||||
|
package metadata.etl.elasticsearch;
|
||||||
|
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.util.Properties;
|
||||||
|
import metadata.etl.EtlJob;
|
||||||
|
|
||||||
|
|
||||||
|
public class ElasticSearchBuildIndexETL extends EtlJob {
|
||||||
|
|
||||||
|
public ElasticSearchBuildIndexETL(int appId, long whExecId) {
|
||||||
|
super(appId, null, whExecId);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ElasticSearchBuildIndexETL(int appId, long whExecId, Properties properties) {
|
||||||
|
super(appId, null, whExecId, properties);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void extract()
|
||||||
|
throws Exception {
|
||||||
|
logger.info("In ElasticSearchBuildIndexETL java launch extract jython scripts");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void transform()
|
||||||
|
throws Exception {
|
||||||
|
logger.info("In ElasticSearchBuildIndexETL java launch transform jython scripts");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void load()
|
||||||
|
throws Exception {
|
||||||
|
logger.info("In ElasticSearchBuildIndexETL java launch load jython scripts");
|
||||||
|
InputStream inputStream = classLoader.getResourceAsStream("jython/ElasticSearchIndex.py");
|
||||||
|
interpreter.execfile(inputStream);
|
||||||
|
inputStream.close();
|
||||||
|
logger.info("In ElasticSearchBuildIndexETL java load jython scripts finished");
|
||||||
|
}
|
||||||
|
}
|
@ -18,6 +18,7 @@ import metadata.etl.EtlJob;
|
|||||||
import metadata.etl.dataset.hdfs.HdfsMetadataEtl;
|
import metadata.etl.dataset.hdfs.HdfsMetadataEtl;
|
||||||
import metadata.etl.dataset.hive.HiveMetadataEtl;
|
import metadata.etl.dataset.hive.HiveMetadataEtl;
|
||||||
import metadata.etl.dataset.teradata.TeradataMetadataEtl;
|
import metadata.etl.dataset.teradata.TeradataMetadataEtl;
|
||||||
|
import metadata.etl.elasticsearch.ElasticSearchBuildIndexETL;
|
||||||
import metadata.etl.git.GitMetadataEtl;
|
import metadata.etl.git.GitMetadataEtl;
|
||||||
import metadata.etl.lineage.AzLineageMetadataEtl;
|
import metadata.etl.lineage.AzLineageMetadataEtl;
|
||||||
import metadata.etl.ownership.DatasetOwnerEtl;
|
import metadata.etl.ownership.DatasetOwnerEtl;
|
||||||
@ -52,6 +53,8 @@ public class EtlJobFactory {
|
|||||||
return new GitMetadataEtl(refId, whExecId, properties);
|
return new GitMetadataEtl(refId, whExecId, properties);
|
||||||
case HIVE_DATASET_METADATA_ETL:
|
case HIVE_DATASET_METADATA_ETL:
|
||||||
return new HiveMetadataEtl(refId, whExecId, properties);
|
return new HiveMetadataEtl(refId, whExecId, properties);
|
||||||
|
case ELASTICSEARCH_EXECUTION_INDEX_ETL:
|
||||||
|
return new ElasticSearchBuildIndexETL(refId, whExecId, properties);
|
||||||
default:
|
default:
|
||||||
throw new UnsupportedOperationException("Unsupported job type: " + etlJobName);
|
throw new UnsupportedOperationException("Unsupported job type: " + etlJobName);
|
||||||
}
|
}
|
||||||
|
@ -26,6 +26,7 @@ public enum EtlJobName {
|
|||||||
LDAP_USER_ETL(EtlType.LDAP, RefIdType.APP),
|
LDAP_USER_ETL(EtlType.LDAP, RefIdType.APP),
|
||||||
GIT_MEDATA_ETL(EtlType.VCS, RefIdType.APP),
|
GIT_MEDATA_ETL(EtlType.VCS, RefIdType.APP),
|
||||||
HIVE_DATASET_METADATA_ETL(EtlType.DATASET, RefIdType.DB),
|
HIVE_DATASET_METADATA_ETL(EtlType.DATASET, RefIdType.DB),
|
||||||
|
ELASTICSEARCH_EXECUTION_INDEX_ETL(EtlType.OPERATION, RefIdType.APP),
|
||||||
;
|
;
|
||||||
|
|
||||||
EtlType etlType;
|
EtlType etlType;
|
||||||
|
@ -15,6 +15,9 @@
|
|||||||
import sys
|
import sys
|
||||||
from com.ziclix.python.sql import zxJDBC
|
from com.ziclix.python.sql import zxJDBC
|
||||||
from wherehows.common import Constant
|
from wherehows.common import Constant
|
||||||
|
from ElasticSearchIndex import ElasticSearchIndex
|
||||||
|
from datetime import datetime
|
||||||
|
import calendar
|
||||||
import json
|
import json
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
@ -79,3 +82,7 @@ class DatasetTreeBuilder:
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
d = DatasetTreeBuilder(sys.argv[1])
|
d = DatasetTreeBuilder(sys.argv[1])
|
||||||
d.run()
|
d.run()
|
||||||
|
esi = ElasticSearchIndex(sys.argv[1])
|
||||||
|
d = datetime.utcnow()
|
||||||
|
unixtime = calendar.timegm(d.utctimetuple())
|
||||||
|
esi.update_dataset(unixtime)
|
||||||
|
288
metadata-etl/src/main/resources/jython/ElasticSearchIndex.py
Normal file
288
metadata-etl/src/main/resources/jython/ElasticSearchIndex.py
Normal file
@ -0,0 +1,288 @@
|
|||||||
|
#
|
||||||
|
# Copyright 2015 LinkedIn Corp. All rights reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
#
|
||||||
|
|
||||||
|
from wherehows.common import Constant
|
||||||
|
from com.ziclix.python.sql import zxJDBC
|
||||||
|
import DbUtil
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import urllib
|
||||||
|
import urllib2
|
||||||
|
from org.slf4j import LoggerFactory
|
||||||
|
|
||||||
|
|
||||||
|
class ElasticSearchIndex():
|
||||||
|
def __init__(self, args):
|
||||||
|
self.logger = LoggerFactory.getLogger('jython script : ' + self.__class__.__name__)
|
||||||
|
self.app_id = int(args[Constant.APP_ID_KEY])
|
||||||
|
self.elasticsearch_index_url = args[Constant.WH_ELASTICSEARCH_URL_KEY]
|
||||||
|
self.elasticsearch_port = args[Constant.WH_ELASTICSEARCH_PORT_KEY]
|
||||||
|
self.wh_exec_id = long(args[Constant.WH_EXEC_ID_KEY])
|
||||||
|
self.wh_con = zxJDBC.connect(args[Constant.WH_DB_URL_KEY],
|
||||||
|
args[Constant.WH_DB_USERNAME_KEY],
|
||||||
|
args[Constant.WH_DB_PASSWORD_KEY],
|
||||||
|
args[Constant.WH_DB_DRIVER_KEY])
|
||||||
|
self.wh_cursor = self.wh_con.cursor()
|
||||||
|
|
||||||
|
def bulk_insert(self, params, url):
|
||||||
|
try:
|
||||||
|
req = urllib2.Request(url=url)
|
||||||
|
req.add_header('Content-type', 'application/json')
|
||||||
|
req.get_method = lambda: "PUT"
|
||||||
|
req.add_data('\n'.join(params) + '\n')
|
||||||
|
response = urllib2.urlopen(req)
|
||||||
|
data = json.load(response)
|
||||||
|
if str(data['errors']) != 'False':
|
||||||
|
self.logger.info(str(data))
|
||||||
|
except urllib2.HTTPError as e:
|
||||||
|
self.logger.error(str(e.code))
|
||||||
|
self.logger.error(e.read())
|
||||||
|
|
||||||
|
def update_dataset_field(self, last_time):
|
||||||
|
if last_time:
|
||||||
|
sql = """
|
||||||
|
SELECT * FROM dict_field_detail WHERE modified >= DATE_SUB(%s, INTERVAL 1 HOUR)
|
||||||
|
""" % last_time
|
||||||
|
else:
|
||||||
|
sql = """
|
||||||
|
SELECT * FROM dict_field_detail
|
||||||
|
"""
|
||||||
|
|
||||||
|
comment_query = """
|
||||||
|
SELECT d.field_id, d.dataset_id, f.comment FROM dict_dataset_field_comment d
|
||||||
|
LEFT JOIN field_comments f ON d.comment_id = f.id WHERE d.field_id = %d
|
||||||
|
"""
|
||||||
|
url = self.elasticsearch_index_url + ':' + str(self.elasticsearch_port) + '/wherehows/field/_bulk'
|
||||||
|
params = []
|
||||||
|
self.wh_cursor.execute(sql)
|
||||||
|
rows = DbUtil.copy_dict_cursor(self.wh_cursor)
|
||||||
|
row_count = 1
|
||||||
|
for row in rows:
|
||||||
|
self.wh_cursor.execute(comment_query % long(row['field_id']))
|
||||||
|
comments = []
|
||||||
|
comment_rows = DbUtil.copy_dict_cursor(self.wh_cursor)
|
||||||
|
for comment_row in comment_rows:
|
||||||
|
comments.append(comment_row['comment'])
|
||||||
|
params.append('{ "index": { "_id": ' +
|
||||||
|
str(row['field_id']) + ', "parent": ' + str(row['dataset_id']) + ' }}')
|
||||||
|
if len(comments) > 0:
|
||||||
|
params.append(
|
||||||
|
"""{ "comments": %s, "dataset_id": %d, "sort_id": %d, "field_name": "%s", "parent_path": "%s"}"""
|
||||||
|
% (json.dumps(comments) if comments else '', row['dataset_id'] if row['dataset_id'] else 0,
|
||||||
|
row['sort_id'] if row['sort_id'] else 0,
|
||||||
|
row['field_name'] if row['field_name'] else '', row['parent_path'] if row['parent_path'] else ''))
|
||||||
|
else:
|
||||||
|
params.append(
|
||||||
|
"""{ "comments": "", "dataset_id": %d, "sort_id": %d, "field_name": "%s", "parent_path": "%s"}"""
|
||||||
|
% (row['dataset_id'] if row['dataset_id'] else 0, row['sort_id'] if row['sort_id'] else 0,
|
||||||
|
row['field_name'] if row['field_name'] else '', row['parent_path'] if row['parent_path'] else ''))
|
||||||
|
if row_count % 1000 == 0:
|
||||||
|
self.bulk_insert(params, url)
|
||||||
|
params = []
|
||||||
|
row_count += 1
|
||||||
|
if len(params) > 0:
|
||||||
|
self.bulk_insert(params, url)
|
||||||
|
|
||||||
|
def update_comment(self, last_time):
|
||||||
|
if last_time:
|
||||||
|
sql = """
|
||||||
|
SELECT * FROM comments WHERE modified >= DATE_SUB(%s, INTERVAL 1 HOUR)
|
||||||
|
""" % last_time
|
||||||
|
else:
|
||||||
|
sql = """
|
||||||
|
SELECT * FROM comments
|
||||||
|
"""
|
||||||
|
|
||||||
|
url = self.elasticsearch_index_url + ':' + str(self.elasticsearch_port) + '/wherehows/comment/_bulk'
|
||||||
|
params = []
|
||||||
|
self.wh_cursor.execute(sql)
|
||||||
|
rows = DbUtil.copy_dict_cursor(self.wh_cursor)
|
||||||
|
row_count = 1
|
||||||
|
for row in rows:
|
||||||
|
params.append('{ "index": { "_id": ' + str(row['id']) + ', "parent": ' + str(row['dataset_id']) + ' }}')
|
||||||
|
params.append(
|
||||||
|
"""{ "text": %s, "user_id": %d, "dataset_id": %d, "comment_type": "%s"}"""
|
||||||
|
% (json.dumps(row['text']) if row['text'] else '', row['user_id'] if row['user_id'] else 0,
|
||||||
|
row['dataset_id'] if row['dataset_id'] else 0, row['comment_type'] if row['comment_type'] else ''))
|
||||||
|
if row_count % 1000 == 0:
|
||||||
|
self.bulk_insert(params, url)
|
||||||
|
params = []
|
||||||
|
row_count += 1
|
||||||
|
if len(params) > 0:
|
||||||
|
self.bulk_insert(params, url)
|
||||||
|
|
||||||
|
def update_dataset(self, last_unixtime):
|
||||||
|
if last_unixtime:
|
||||||
|
sql = """
|
||||||
|
SELECT * FROM dict_dataset WHERE from_unixtime(modified_time) >= DATE_SUB(from_unixtime(%f), INTERVAL 1 HOUR)
|
||||||
|
""" % last_unixtime
|
||||||
|
else:
|
||||||
|
sql = """
|
||||||
|
SELECT * FROM dict_dataset
|
||||||
|
"""
|
||||||
|
url = self.elasticsearch_index_url + ':' + str(self.elasticsearch_port) + '/wherehows/dataset/_bulk'
|
||||||
|
params = []
|
||||||
|
self.wh_cursor.execute(sql)
|
||||||
|
rows = DbUtil.copy_dict_cursor(self.wh_cursor)
|
||||||
|
row_count = 1
|
||||||
|
for row in rows:
|
||||||
|
params.append('{ "index": { "_id": ' + str(row['id']) + ' }}')
|
||||||
|
params.append(
|
||||||
|
"""{ "name": "%s", "source": "%s", "urn": "%s", "location_prefix": "%s", "parent_name": "%s",
|
||||||
|
"schema_type": "%s", "properties": %s, "schema": %s , "fields": %s}"""
|
||||||
|
% (row['name'] if row['name'] else '', row['source'] if row['source'] else '',
|
||||||
|
row['urn'] if row['urn'] else '', row['location_prefix'] if row['location_prefix'] else '',
|
||||||
|
row['parent_name'] if row['parent_name'] else '', row['schema_type'] if row['schema_type'] else '',
|
||||||
|
json.dumps(row['properties']) if row['properties'] else '',
|
||||||
|
json.dumps(row['schema']) if row['schema'] else '', json.dumps(row['fields']) if row['fields'] else ''))
|
||||||
|
if row_count % 1000 == 0:
|
||||||
|
self.bulk_insert(params, url)
|
||||||
|
params = []
|
||||||
|
row_count += 1
|
||||||
|
if len(params) > 0:
|
||||||
|
self.bulk_insert(params, url)
|
||||||
|
|
||||||
|
def update_metric(self):
|
||||||
|
sql = """
|
||||||
|
SELECT * FROM dict_business_metric
|
||||||
|
"""
|
||||||
|
url = self.elasticsearch_index_url + ':' + str(self.elasticsearch_port) + '/wherehows/metric/_bulk'
|
||||||
|
params = []
|
||||||
|
self.wh_cursor.execute(sql)
|
||||||
|
rows = DbUtil.copy_dict_cursor(self.wh_cursor)
|
||||||
|
row_count = 1
|
||||||
|
for row in rows:
|
||||||
|
params.append('{ "index": { "_id": ' + str(row['metric_id']) + ' }}')
|
||||||
|
params.append(
|
||||||
|
"""{"metric_id": %d, "metric_name": %s, "metric_description": %s, "dashboard_name": %s,
|
||||||
|
"metric_group": %s, "metric_category": %s, "metric_sub_category": %s, "metric_level": %s,
|
||||||
|
"metric_source_type": %s, "metric_source": %s, "metric_source_dataset_id": %d,
|
||||||
|
"metric_ref_id_type": %s, "metric_ref_id": %s, "metric_type": %s, "metric_additive_type": %s,
|
||||||
|
"metric_grain": %s, "metric_display_factor": %f, "metric_display_factor_sym": %s,
|
||||||
|
"metric_good_direction": %s, "metric_formula": %s, "dimensions": %s, "owners": %s, "tags": %s,
|
||||||
|
"urn": %s, "metric_url": %s, "wiki_url": %s, "scm_url": %s}"""
|
||||||
|
% (row['metric_id'], json.dumps(row['metric_name']) if row['metric_name'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_description']) if row['metric_description'] else json.dumps(''),
|
||||||
|
json.dumps(row['dashboard_name']) if row['dashboard_name'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_group']) if row['metric_group'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_category']) if row['metric_category'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_sub_category']) if row['metric_sub_category'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_level']) if row['metric_level'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_source_type']) if row['metric_source_type'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_source']) if row['metric_source'] else json.dumps(''),
|
||||||
|
row['metric_source_dataset_id'] if row['metric_source_dataset_id'] else 0,
|
||||||
|
json.dumps(row['metric_ref_id_type']) if row['metric_ref_id_type'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_ref_id']) if row['metric_ref_id'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_type']) if row['metric_type'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_additive_type']) if row['metric_additive_type'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_grain']) if row['metric_grain'] else json.dumps(''),
|
||||||
|
row['metric_display_factor'] if row['metric_display_factor'] else 0.0,
|
||||||
|
json.dumps(row['metric_display_factor_sym']) if row['metric_display_factor_sym'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_good_direction']) if row['metric_good_direction'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_formula']) if row['metric_formula'] else json.dumps(''),
|
||||||
|
json.dumps(row['dimensions']) if row['dimensions'] else json.dumps(''),
|
||||||
|
json.dumps(row['owners']) if row['owners'] else json.dumps(''),
|
||||||
|
json.dumps(row['tags']) if row['tags'] else json.dumps(''),
|
||||||
|
json.dumps(row['urn']) if row['urn'] else json.dumps(''),
|
||||||
|
json.dumps(row['metric_url']) if row['metric_url'] else json.dumps(''),
|
||||||
|
json.dumps(row['wiki_url']) if row['wiki_url'] else json.dumps(''),
|
||||||
|
json.dumps(row['scm_url']) if row['scm_url'] else json.dumps('')))
|
||||||
|
if row_count % 1000 == 0:
|
||||||
|
self.bulk_insert(params, url)
|
||||||
|
params = []
|
||||||
|
row_count += 1
|
||||||
|
if len(params) > 0:
|
||||||
|
self.bulk_insert(params, url)
|
||||||
|
|
||||||
|
def update_flow_jobs(self, last_unixtime):
|
||||||
|
if last_unixtime:
|
||||||
|
flow_sql = """
|
||||||
|
SELECT a.app_code, f.* FROM flow f JOIN cfg_application a on f.app_id = a.app_id
|
||||||
|
WHERE from_unixtime(modified_time) >= DATE_SUB(from_unixtime(%f), INTERVAL 1 HOUR)
|
||||||
|
""" % last_unixtime
|
||||||
|
else:
|
||||||
|
flow_sql = """
|
||||||
|
SELECT a.app_code, f.* FROM flow f JOIN cfg_application a on f.app_id = a.app_id
|
||||||
|
"""
|
||||||
|
job_sql = """
|
||||||
|
SELECT * FROM flow_job WHERE app_id = %d and flow_id = %d
|
||||||
|
"""
|
||||||
|
url = self.elasticsearch_index_url + ':' + str(self.elasticsearch_port) + '/wherehows/flow_jobs/_bulk'
|
||||||
|
params = []
|
||||||
|
self.wh_cursor.execute(flow_sql)
|
||||||
|
rows = DbUtil.copy_dict_cursor(self.wh_cursor)
|
||||||
|
row_count = 1
|
||||||
|
for row in rows:
|
||||||
|
self.wh_cursor.execute(job_sql %(long(row['app_id']), long(row['flow_id'])))
|
||||||
|
jobs = []
|
||||||
|
job_rows = DbUtil.copy_dict_cursor(self.wh_cursor)
|
||||||
|
if job_rows:
|
||||||
|
for job_row in job_rows:
|
||||||
|
jobs.append({"app_id": job_row['app_id'], "flow_id": job_row['flow_id'], "job_id": job_row['job_id'],
|
||||||
|
"job_name": job_row['job_name'] if job_row['job_name'] else '',
|
||||||
|
"job_path": job_row['job_path'] if job_row['job_path'] else '',
|
||||||
|
"job_type_id": job_row['job_type_id'],
|
||||||
|
"job_type": job_row['job_type'] if job_row['job_type'] else '',
|
||||||
|
"pre_jobs": job_row['pre_jobs'] if job_row['pre_jobs'] else '',
|
||||||
|
"post_jobs": job_row['post_jobs'] if job_row['post_jobs'] else '',
|
||||||
|
"is_current": job_row['is_current'] if job_row['is_current'] else '',
|
||||||
|
"is_first": job_row['is_first'] if job_row['is_first'] else '',
|
||||||
|
"is_last": job_row['is_last'] if job_row['is_last'] else ''})
|
||||||
|
|
||||||
|
params.append('{ "index": { "_id": ' + str(long(row['flow_id'])*10000 + long(row['app_id'])) + ' }}')
|
||||||
|
if len(jobs) > 0:
|
||||||
|
params.append(
|
||||||
|
"""{"app_id": %d, "flow_id": %d, "app_code": "%s", "flow_name": "%s", "flow_group": "%s",
|
||||||
|
"flow_path": "%s", "flow_level": %d, "is_active": "%s", "is_scheduled": "%s",
|
||||||
|
"pre_flows": "%s", "jobs": %s}"""
|
||||||
|
% (row['app_id'], row['flow_id'], row['app_code'] if row['app_code'] else '',
|
||||||
|
row['flow_name'] if row['flow_name'] else '', row['flow_group'] if row['flow_group'] else '',
|
||||||
|
row['flow_path'] if row['flow_path'] else '', row['flow_level'],
|
||||||
|
row['is_active'] if row['is_active'] else '', row['is_scheduled'] if row['is_scheduled'] else '',
|
||||||
|
row['pre_flows'] if row['pre_flows'] else '', json.dumps(jobs)))
|
||||||
|
else:
|
||||||
|
params.append(
|
||||||
|
"""{"app_id": %d, "flow_id": %d, "app_code": "%s", "flow_name": "%s", "flow_group": "%s",
|
||||||
|
"flow_path": "%s", "flow_level": %d, "is_active": "%s",
|
||||||
|
"is_scheduled": "%s", "pre_flows": "%s", "jobs": ""}"""
|
||||||
|
% (row['app_id'], row['flow_id'], row['app_code'] if row['app_code'] else '',
|
||||||
|
row['flow_name'] if row['flow_name'] else '', row['flow_group'] if row['flow_group'] else '',
|
||||||
|
row['flow_path'] if row['flow_path'] else '', row['flow_level'],
|
||||||
|
row['is_active'] if row['is_active'] else '', row['is_scheduled'] if row['is_scheduled'] else '',
|
||||||
|
row['pre_flows'] if row['pre_flows'] else ''))
|
||||||
|
if row_count % 1000 == 0:
|
||||||
|
self.bulk_insert(params, url)
|
||||||
|
self.logger.info(str(row_count))
|
||||||
|
params = []
|
||||||
|
row_count += 1
|
||||||
|
if len(params) > 0:
|
||||||
|
self.logger.info(str(len(params)))
|
||||||
|
self.bulk_insert(params, url)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.update_dataset()
|
||||||
|
self.update_comment()
|
||||||
|
self.update_dataset_field()
|
||||||
|
self.update_flow_jobs()
|
||||||
|
self.update_metric()
|
||||||
|
finally:
|
||||||
|
self.wh_cursor.close()
|
||||||
|
self.wh_con.close()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
props = sys.argv[1]
|
||||||
|
esi = ElasticSearchIndex(props)
|
||||||
|
esi.run()
|
@ -15,6 +15,9 @@
|
|||||||
import sys
|
import sys
|
||||||
from com.ziclix.python.sql import zxJDBC
|
from com.ziclix.python.sql import zxJDBC
|
||||||
from wherehows.common import Constant
|
from wherehows.common import Constant
|
||||||
|
from ElasticSearchIndex import ElasticSearchIndex
|
||||||
|
from datetime import datetime
|
||||||
|
import calendar
|
||||||
import json
|
import json
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
@ -82,3 +85,7 @@ class FlowTreeBuilder:
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
ftb = FlowTreeBuilder(sys.argv[1])
|
ftb = FlowTreeBuilder(sys.argv[1])
|
||||||
ftb.run()
|
ftb.run()
|
||||||
|
esi = ElasticSearchIndex(sys.argv[1])
|
||||||
|
d = datetime.utcnow()
|
||||||
|
unixtime = calendar.timegm(d.utctimetuple())
|
||||||
|
esi.update_flow_jobs(unixtime)
|
||||||
|
@ -0,0 +1,49 @@
|
|||||||
|
/**
|
||||||
|
* Copyright 2015 LinkedIn Corp. All rights reserved.
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
*/
|
||||||
|
package metadata.etl.elasticsearch;
|
||||||
|
|
||||||
|
import org.testng.annotations.BeforeTest;
|
||||||
|
import org.testng.annotations.Test;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public class ElasticSearchBuildIndexEtlTest{
|
||||||
|
ElasticSearchBuildIndexETL esbi;
|
||||||
|
|
||||||
|
@BeforeTest
|
||||||
|
public void setUp()
|
||||||
|
throws Exception {
|
||||||
|
esbi = new ElasticSearchBuildIndexETL(33, 0L);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test(groups = {"needConfig"})
|
||||||
|
public void testExtract() throws Exception {
|
||||||
|
esbi.extract();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test(groups = {"needConfig"})
|
||||||
|
public void testTransform() throws Exception {
|
||||||
|
esbi.transform();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test(groups = {"needConfig"})
|
||||||
|
public void testLoad() throws Exception {
|
||||||
|
esbi.load();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test(groups = {"needConfig"})
|
||||||
|
public void testRun() throws Exception {
|
||||||
|
esbi.run();
|
||||||
|
}
|
||||||
|
}
|
@ -16,8 +16,10 @@ package controllers.api.v1;
|
|||||||
import com.fasterxml.jackson.databind.JsonNode;
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||||
import dao.AdvSearchDAO;
|
import dao.AdvSearchDAO;
|
||||||
|
import dao.SearchDAO;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import play.Logger;
|
import play.Logger;
|
||||||
|
import play.Play;
|
||||||
import play.libs.Json;
|
import play.libs.Json;
|
||||||
import play.mvc.Controller;
|
import play.mvc.Controller;
|
||||||
import play.mvc.Result;
|
import play.mvc.Result;
|
||||||
@ -137,17 +139,46 @@ public class AdvSearch extends Controller
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
result.put("status", "ok");
|
result.put("status", "ok");
|
||||||
|
String searchEngine = Play.application().configuration().getString(SearchDAO.WHEREHOWS_SEARCH_ENGINE__KEY);
|
||||||
|
|
||||||
if (searchOpt != null && searchOpt.has("category"))
|
if (searchOpt != null && searchOpt.has("category"))
|
||||||
{
|
{
|
||||||
String category = searchOpt.get("category").asText();
|
String category = searchOpt.get("category").asText();
|
||||||
if(category.equalsIgnoreCase("flow"))
|
if(category.equalsIgnoreCase("flow"))
|
||||||
{
|
{
|
||||||
result.set("result", Json.toJson(AdvSearchDAO.searchFlows(searchOpt, page, size)));
|
if(StringUtils.isNotBlank(searchEngine) && searchEngine.equalsIgnoreCase("elasticsearch"))
|
||||||
|
{
|
||||||
|
result.set("result", Json.toJson(AdvSearchDAO.elasticSearchFlowJobs(searchOpt, page, size)));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
result.set("result", Json.toJson(AdvSearchDAO.searchFlows(searchOpt, page, size)));
|
||||||
|
}
|
||||||
return ok(result);
|
return ok(result);
|
||||||
}
|
}
|
||||||
|
else if(category.equalsIgnoreCase("metric"))
|
||||||
|
{
|
||||||
|
if(StringUtils.isNotBlank(searchEngine) && searchEngine.equalsIgnoreCase("elasticsearch"))
|
||||||
|
{
|
||||||
|
result.set("result", Json.toJson(AdvSearchDAO.elasticSearchMetric(searchOpt, page, size)));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
result.set("result", Json.toJson(AdvSearchDAO.searchMetrics(searchOpt, page, size)));
|
||||||
|
}
|
||||||
|
return ok(result);
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
result.set("result", Json.toJson(AdvSearchDAO.search(searchOpt, page, size)));
|
if(StringUtils.isNotBlank(searchEngine) && searchEngine.equalsIgnoreCase("elasticsearch"))
|
||||||
|
{
|
||||||
|
result.set("result", Json.toJson(AdvSearchDAO.elasticSearch(searchOpt, page, size)));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
result.set("result", Json.toJson(AdvSearchDAO.search(searchOpt, page, size)));
|
||||||
|
}
|
||||||
|
|
||||||
return ok(result);
|
return ok(result);
|
||||||
}
|
}
|
||||||
|
@ -32,6 +32,15 @@ import java.util.Map;
|
|||||||
|
|
||||||
public class Dataset extends Controller
|
public class Dataset extends Controller
|
||||||
{
|
{
|
||||||
|
public static Result getDatasetOwnerTypes()
|
||||||
|
{
|
||||||
|
ObjectNode result = Json.newObject();
|
||||||
|
|
||||||
|
result.put("status", "ok");
|
||||||
|
result.set("ownerTypes", Json.toJson(DatasetsDAO.getDatasetOwnerTypes()));
|
||||||
|
return ok(result);
|
||||||
|
}
|
||||||
|
|
||||||
public static Result getPagedDatasets()
|
public static Result getPagedDatasets()
|
||||||
{
|
{
|
||||||
ObjectNode result = Json.newObject();
|
ObjectNode result = Json.newObject();
|
||||||
|
@ -17,6 +17,7 @@ import com.fasterxml.jackson.databind.JsonNode;
|
|||||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||||
import dao.SearchDAO;
|
import dao.SearchDAO;
|
||||||
import models.DatasetColumn;
|
import models.DatasetColumn;
|
||||||
|
import play.Play;
|
||||||
import play.api.libs.json.JsValue;
|
import play.api.libs.json.JsValue;
|
||||||
import play.libs.Json;
|
import play.libs.Json;
|
||||||
import play.mvc.Controller;
|
import play.mvc.Controller;
|
||||||
@ -92,51 +93,71 @@ public class Search extends Controller
|
|||||||
{
|
{
|
||||||
category = "datasets";
|
category = "datasets";
|
||||||
}
|
}
|
||||||
if (StringUtils.isBlank(source))
|
if (StringUtils.isBlank(source) || source.equalsIgnoreCase("all") || source.equalsIgnoreCase("default"))
|
||||||
{
|
{
|
||||||
source = "all";
|
source = null;
|
||||||
}
|
|
||||||
else if (source.equalsIgnoreCase("default"))
|
|
||||||
{
|
|
||||||
source = "all";
|
|
||||||
isDefault = true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
String searchEngine = Play.application().configuration().getString(SearchDAO.WHEREHOWS_SEARCH_ENGINE__KEY);
|
||||||
|
|
||||||
if (category.toLowerCase().equalsIgnoreCase("metric"))
|
if (category.toLowerCase().equalsIgnoreCase("metric"))
|
||||||
{
|
{
|
||||||
result.set("result", SearchDAO.getPagedMetricByKeyword(category, keyword, page, size));
|
if(StringUtils.isNotBlank(searchEngine) && searchEngine.equalsIgnoreCase("elasticsearch"))
|
||||||
|
{
|
||||||
|
result.set("result", SearchDAO.elasticSearchMetricByKeyword(category, keyword, page, size));
|
||||||
|
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
result.set("result", SearchDAO.getPagedMetricByKeyword(category, keyword, page, size));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else if (category.toLowerCase().equalsIgnoreCase("flows"))
|
else if (category.toLowerCase().equalsIgnoreCase("flows"))
|
||||||
{
|
{
|
||||||
result.set("result", SearchDAO.getPagedFlowByKeyword(category, keyword, page, size));
|
if(StringUtils.isNotBlank(searchEngine) && searchEngine.equalsIgnoreCase("elasticsearch"))
|
||||||
|
{
|
||||||
|
result.set("result", SearchDAO.elasticSearchFlowByKeyword(category, keyword, page, size));
|
||||||
|
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
result.set("result", SearchDAO.getPagedFlowByKeyword(category, keyword, page, size));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else if (category.toLowerCase().equalsIgnoreCase("jobs"))
|
else if (category.toLowerCase().equalsIgnoreCase("jobs"))
|
||||||
{
|
{
|
||||||
result.set("result", SearchDAO.getPagedJobByKeyword(category, keyword, page, size));
|
if(StringUtils.isNotBlank(searchEngine) && searchEngine.equalsIgnoreCase("elasticsearch"))
|
||||||
|
{
|
||||||
|
result.set("result", SearchDAO.elasticSearchFlowByKeyword(category, keyword, page, size));
|
||||||
|
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
result.set("result", SearchDAO.getPagedJobByKeyword(category, keyword, page, size));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
else if (category.toLowerCase().equalsIgnoreCase("comments"))
|
else if (category.toLowerCase().equalsIgnoreCase("comments"))
|
||||||
{
|
{
|
||||||
result.set("result", SearchDAO.getPagedCommentsByKeyword(category, keyword, page, size));
|
if(StringUtils.isNotBlank(searchEngine) && searchEngine.equalsIgnoreCase("elasticsearch"))
|
||||||
|
{
|
||||||
|
result.set("result", SearchDAO.elasticSearchDatasetByKeyword(category, keyword, null, page, size));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
result.set("result", SearchDAO.getPagedCommentsByKeyword(category, keyword, page, size));
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
ObjectNode node = SearchDAO.getPagedDatasetByKeyword(category, keyword, source, page, size);
|
if(StringUtils.isNotBlank(searchEngine) && searchEngine.equalsIgnoreCase("elasticsearch"))
|
||||||
if (isDefault && node != null && node.has("count"))
|
|
||||||
{
|
{
|
||||||
Long count = node.get("count").asLong();
|
result.set("result", SearchDAO.elasticSearchDatasetByKeyword(category, keyword, source, page, size));
|
||||||
if (count != null && count == 0)
|
}
|
||||||
{
|
else
|
||||||
node = SearchDAO.getPagedFlowByKeyword("flows", keyword, page, size);
|
{
|
||||||
if (node!= null && node.has("count"))
|
result.set("result", SearchDAO.getPagedDatasetByKeyword(category, keyword, source, page, size));
|
||||||
{
|
|
||||||
Long flowCount = node.get("count").asLong();
|
|
||||||
if (flowCount != null && flowCount == 0)
|
|
||||||
{
|
|
||||||
node = SearchDAO.getPagedJobByKeyword("jobs", keyword, page, size);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
result.set("result", node);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return ok(result);
|
return ok(result);
|
||||||
|
@ -17,6 +17,7 @@ import com.fasterxml.jackson.databind.JsonNode;
|
|||||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||||
import models.Dataset;
|
import models.Dataset;
|
||||||
import models.FlowJob;
|
import models.FlowJob;
|
||||||
|
import models.Metric;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.springframework.dao.EmptyResultDataAccessException;
|
import org.springframework.dao.EmptyResultDataAccessException;
|
||||||
import org.springframework.jdbc.core.JdbcTemplate;
|
import org.springframework.jdbc.core.JdbcTemplate;
|
||||||
@ -26,12 +27,12 @@ import org.springframework.transaction.TransactionStatus;
|
|||||||
import org.springframework.transaction.support.TransactionCallback;
|
import org.springframework.transaction.support.TransactionCallback;
|
||||||
import org.springframework.transaction.support.TransactionTemplate;
|
import org.springframework.transaction.support.TransactionTemplate;
|
||||||
import play.Logger;
|
import play.Logger;
|
||||||
|
import play.Play;
|
||||||
|
import play.libs.F;
|
||||||
import play.libs.Json;
|
import play.libs.Json;
|
||||||
import java.util.Arrays;
|
import play.libs.WS;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collections;
|
import java.util.*;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
public class AdvSearchDAO extends AbstractMySQLOpenSourceDAO
|
public class AdvSearchDAO extends AbstractMySQLOpenSourceDAO
|
||||||
{
|
{
|
||||||
@ -100,6 +101,14 @@ public class AdvSearchDAO extends AbstractMySQLOpenSourceDAO
|
|||||||
"FROM flow_job j JOIN flow f on j.app_id = f.app_id AND j.flow_id = f.flow_id " +
|
"FROM flow_job j JOIN flow f on j.app_id = f.app_id AND j.flow_id = f.flow_id " +
|
||||||
"JOIN cfg_application a on j.app_id = a.app_id ";
|
"JOIN cfg_application a on j.app_id = a.app_id ";
|
||||||
|
|
||||||
|
public final static String ADV_SEARCH_METRIC = "SELECT SQL_CALC_FOUND_ROWS metric_id, " +
|
||||||
|
"metric_name, metric_description, dashboard_name, metric_group, metric_category, " +
|
||||||
|
"metric_sub_category, metric_level, metric_source_type, metric_source, " +
|
||||||
|
"metric_source_dataset_id, metric_ref_id_type, metric_ref_id, metric_type, metric_grain, " +
|
||||||
|
"metric_display_factor, metric_display_factor_sym, metric_good_direction, " +
|
||||||
|
"metric_formula, dimensions, owners, tags, urn, metric_url, wiki_url, scm_url, 0 as watch_id " +
|
||||||
|
"FROM dict_business_metric ";
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
public static List<String> getDatasetSources()
|
public static List<String> getDatasetSources()
|
||||||
@ -193,6 +202,246 @@ public class AdvSearchDAO extends AbstractMySQLOpenSourceDAO
|
|||||||
return getJdbcTemplate().queryForList(GET_JOB_NAMES, String.class);
|
return getJdbcTemplate().queryForList(GET_JOB_NAMES, String.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static ObjectNode elasticSearch(JsonNode searchOpt, int page, int size)
|
||||||
|
{
|
||||||
|
ObjectNode resultNode = Json.newObject();
|
||||||
|
Long count = 0L;
|
||||||
|
List<Dataset> pagedDatasets = new ArrayList<Dataset>();
|
||||||
|
ObjectNode queryNode = Json.newObject();
|
||||||
|
queryNode.put("from", (page-1)*size);
|
||||||
|
queryNode.put("size", size);
|
||||||
|
|
||||||
|
JsonNode searchNode = utils.Search.generateDatasetAdvSearchQueryString(searchOpt);
|
||||||
|
|
||||||
|
if (searchNode != null && searchNode.isContainerNode())
|
||||||
|
{
|
||||||
|
queryNode.put("query", searchNode);
|
||||||
|
}
|
||||||
|
F.Promise < WS.Response> responsePromise = WS.url(
|
||||||
|
Play.application().configuration().getString(
|
||||||
|
SearchDAO.ELASTICSEARCH_DATASET_URL_KEY)).post(queryNode);
|
||||||
|
JsonNode responseNode = responsePromise.get().asJson();
|
||||||
|
|
||||||
|
resultNode.put("page", page);
|
||||||
|
resultNode.put("category", "Datasets");
|
||||||
|
resultNode.put("itemsPerPage", size);
|
||||||
|
|
||||||
|
if (responseNode != null && responseNode.isContainerNode() && responseNode.has("hits")) {
|
||||||
|
JsonNode hitsNode = responseNode.get("hits");
|
||||||
|
if (hitsNode != null) {
|
||||||
|
if (hitsNode.has("total")) {
|
||||||
|
count = hitsNode.get("total").asLong();
|
||||||
|
}
|
||||||
|
if (hitsNode.has("hits")) {
|
||||||
|
JsonNode dataNode = hitsNode.get("hits");
|
||||||
|
if (dataNode != null && dataNode.isArray()) {
|
||||||
|
Iterator<JsonNode> arrayIterator = dataNode.elements();
|
||||||
|
if (arrayIterator != null) {
|
||||||
|
while (arrayIterator.hasNext()) {
|
||||||
|
JsonNode node = arrayIterator.next();
|
||||||
|
if (node.isContainerNode() && node.has("_id")) {
|
||||||
|
Dataset dataset = new Dataset();
|
||||||
|
dataset.id = node.get("_id").asLong();
|
||||||
|
if (node.has("_source")) {
|
||||||
|
JsonNode sourceNode = node.get("_source");
|
||||||
|
if (sourceNode != null) {
|
||||||
|
if (sourceNode.has("name")) {
|
||||||
|
dataset.name = sourceNode.get("name").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("source")) {
|
||||||
|
dataset.source = sourceNode.get("source").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("urn")) {
|
||||||
|
dataset.urn = sourceNode.get("urn").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("schema")) {
|
||||||
|
dataset.schema = sourceNode.get("schema").asText();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pagedDatasets.add(dataset);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resultNode.put("count", count);
|
||||||
|
resultNode.put("totalPages", (int)Math.ceil(count/((double)size)));
|
||||||
|
resultNode.set("data", Json.toJson(pagedDatasets));
|
||||||
|
return resultNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ObjectNode elasticSearchMetric(JsonNode searchOpt, int page, int size)
|
||||||
|
{
|
||||||
|
ObjectNode resultNode = Json.newObject();
|
||||||
|
Long count = 0L;
|
||||||
|
List<Metric> pagedMetrics = new ArrayList<Metric>();
|
||||||
|
ObjectNode queryNode = Json.newObject();
|
||||||
|
queryNode.put("from", (page-1)*size);
|
||||||
|
queryNode.put("size", size);
|
||||||
|
|
||||||
|
JsonNode searchNode = utils.Search.generateMetricAdvSearchQueryString(searchOpt);
|
||||||
|
|
||||||
|
if (searchNode != null && searchNode.isContainerNode())
|
||||||
|
{
|
||||||
|
queryNode.put("query", searchNode);
|
||||||
|
}
|
||||||
|
|
||||||
|
F.Promise < WS.Response> responsePromise = WS.url(Play.application().configuration().getString(
|
||||||
|
SearchDAO.ELASTICSEARCH_METRIC_URL_KEY)).post(queryNode);
|
||||||
|
JsonNode responseNode = responsePromise.get().asJson();
|
||||||
|
|
||||||
|
resultNode.put("page", page);
|
||||||
|
resultNode.put("category", "Metrics");
|
||||||
|
resultNode.put("isMetrics", true);
|
||||||
|
resultNode.put("itemsPerPage", size);
|
||||||
|
|
||||||
|
if (responseNode != null && responseNode.isContainerNode() && responseNode.has("hits")) {
|
||||||
|
JsonNode hitsNode = responseNode.get("hits");
|
||||||
|
if (hitsNode != null) {
|
||||||
|
if (hitsNode.has("total")) {
|
||||||
|
count = hitsNode.get("total").asLong();
|
||||||
|
}
|
||||||
|
if (hitsNode.has("hits")) {
|
||||||
|
JsonNode dataNode = hitsNode.get("hits");
|
||||||
|
if (dataNode != null && dataNode.isArray()) {
|
||||||
|
Iterator<JsonNode> arrayIterator = dataNode.elements();
|
||||||
|
if (arrayIterator != null) {
|
||||||
|
while (arrayIterator.hasNext()) {
|
||||||
|
JsonNode node = arrayIterator.next();
|
||||||
|
if (node.isContainerNode() && node.has("_id")) {
|
||||||
|
Metric metric = new Metric();
|
||||||
|
metric.id = node.get("_id").asInt();
|
||||||
|
if (node.has("_source")) {
|
||||||
|
JsonNode sourceNode = node.get("_source");
|
||||||
|
if (sourceNode != null) {
|
||||||
|
if (sourceNode.has("metric_name")) {
|
||||||
|
metric.name = sourceNode.get("metric_name").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("metric_description")) {
|
||||||
|
metric.description = sourceNode.get("metric_description").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("dashboard_name")) {
|
||||||
|
metric.dashboardName = sourceNode.get("dashboard_name").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("metric_group")) {
|
||||||
|
metric.group = sourceNode.get("metric_group").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("metric_category")) {
|
||||||
|
metric.category = sourceNode.get("metric_category").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("urn")) {
|
||||||
|
metric.urn = sourceNode.get("urn").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("metric_source")) {
|
||||||
|
metric.source = sourceNode.get("metric_source").asText();
|
||||||
|
if (StringUtils.isBlank(metric.source))
|
||||||
|
{
|
||||||
|
metric.source = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
metric.schema = sourceNode.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pagedMetrics.add(metric);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resultNode.put("count", count);
|
||||||
|
resultNode.put("totalPages", (int)Math.ceil(count/((double)size)));
|
||||||
|
resultNode.set("data", Json.toJson(pagedMetrics));
|
||||||
|
return resultNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ObjectNode elasticSearchFlowJobs(JsonNode searchOpt, int page, int size)
|
||||||
|
{
|
||||||
|
ObjectNode resultNode = Json.newObject();
|
||||||
|
Long count = 0L;
|
||||||
|
List<FlowJob> pagedFlows = new ArrayList<FlowJob>();
|
||||||
|
ObjectNode queryNode = Json.newObject();
|
||||||
|
queryNode.put("from", (page-1)*size);
|
||||||
|
queryNode.put("size", size);
|
||||||
|
|
||||||
|
JsonNode searchNode = utils.Search.generateFlowJobAdvSearchQueryString(searchOpt);
|
||||||
|
|
||||||
|
if (searchNode != null && searchNode.isContainerNode())
|
||||||
|
{
|
||||||
|
queryNode.put("query", searchNode);
|
||||||
|
}
|
||||||
|
|
||||||
|
F.Promise < WS.Response> responsePromise = WS.url(Play.application().configuration().getString(
|
||||||
|
SearchDAO.ELASTICSEARCH_FLOW_URL_KEY)).post(queryNode);
|
||||||
|
JsonNode responseNode = responsePromise.get().asJson();
|
||||||
|
|
||||||
|
resultNode.put("page", page);
|
||||||
|
resultNode.put("category", "Flows");
|
||||||
|
resultNode.put("isFlowJob", true);
|
||||||
|
resultNode.put("itemsPerPage", size);
|
||||||
|
|
||||||
|
if (responseNode != null && responseNode.isContainerNode() && responseNode.has("hits")) {
|
||||||
|
JsonNode hitsNode = responseNode.get("hits");
|
||||||
|
if (hitsNode != null) {
|
||||||
|
if (hitsNode.has("total")) {
|
||||||
|
count = hitsNode.get("total").asLong();
|
||||||
|
}
|
||||||
|
if (hitsNode.has("hits")) {
|
||||||
|
JsonNode dataNode = hitsNode.get("hits");
|
||||||
|
if (dataNode != null && dataNode.isArray()) {
|
||||||
|
Iterator<JsonNode> arrayIterator = dataNode.elements();
|
||||||
|
if (arrayIterator != null) {
|
||||||
|
while (arrayIterator.hasNext()) {
|
||||||
|
JsonNode node = arrayIterator.next();
|
||||||
|
if (node.isContainerNode() && node.has("_id")) {
|
||||||
|
FlowJob flowJob = new FlowJob();
|
||||||
|
if (node.has("_source")) {
|
||||||
|
JsonNode sourceNode = node.get("_source");
|
||||||
|
if (sourceNode != null) {
|
||||||
|
if (sourceNode.has("app_code")) {
|
||||||
|
flowJob.appCode = sourceNode.get("app_code").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("app_id")) {
|
||||||
|
flowJob.appId = sourceNode.get("app_id").asInt();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("flow_id")) {
|
||||||
|
flowJob.flowId = sourceNode.get("flow_id").asLong();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("flow_name")) {
|
||||||
|
flowJob.flowName = sourceNode.get("flow_name").asText();
|
||||||
|
flowJob.displayName = flowJob.flowName;
|
||||||
|
}
|
||||||
|
if (sourceNode.has("flow_path")) {
|
||||||
|
flowJob.flowPath = sourceNode.get("flow_path").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("flow_group")) {
|
||||||
|
flowJob.flowGroup = sourceNode.get("flow_group").asText();
|
||||||
|
}
|
||||||
|
flowJob.link = "#/flows/" + flowJob.appCode + "/" +
|
||||||
|
flowJob.flowGroup + "/" + Long.toString(flowJob.flowId) + "/page/1";
|
||||||
|
flowJob.path = flowJob.appCode + "/" + flowJob.flowPath;
|
||||||
|
|
||||||
|
flowJob.schema = sourceNode.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pagedFlows.add(flowJob);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resultNode.put("count", count);
|
||||||
|
resultNode.put("totalPages", (int)Math.ceil(count/((double)size)));
|
||||||
|
resultNode.set("data", Json.toJson(pagedFlows));
|
||||||
|
return resultNode;
|
||||||
|
}
|
||||||
|
|
||||||
public static ObjectNode search(JsonNode searchOpt, int page, int size)
|
public static ObjectNode search(JsonNode searchOpt, int page, int size)
|
||||||
{
|
{
|
||||||
ObjectNode resultNode = Json.newObject();
|
ObjectNode resultNode = Json.newObject();
|
||||||
@ -1291,4 +1540,495 @@ public class AdvSearchDAO extends AbstractMySQLOpenSourceDAO
|
|||||||
return resultNode;
|
return resultNode;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static ObjectNode searchMetrics(JsonNode searchOpt, int page, int size)
|
||||||
|
{
|
||||||
|
ObjectNode resultNode = Json.newObject();
|
||||||
|
int count = 0;
|
||||||
|
List<String> dashboardInList = new ArrayList<String>();
|
||||||
|
List<String> dashboardNotInList = new ArrayList<String>();
|
||||||
|
List<String> groupInList = new ArrayList<String>();
|
||||||
|
List<String> groupNotInList = new ArrayList<String>();
|
||||||
|
List<String> categoryInList = new ArrayList<String>();
|
||||||
|
List<String> categoryNotInList = new ArrayList<String>();
|
||||||
|
List<String> metricInList = new ArrayList<String>();
|
||||||
|
List<String> metricNotInList = new ArrayList<String>();
|
||||||
|
|
||||||
|
if (searchOpt != null && (searchOpt.isContainerNode()))
|
||||||
|
{
|
||||||
|
if (searchOpt.has("dashboard")) {
|
||||||
|
JsonNode dashboardNode = searchOpt.get("dashboard");
|
||||||
|
if (dashboardNode != null && dashboardNode.isContainerNode())
|
||||||
|
{
|
||||||
|
if (dashboardNode.has("in"))
|
||||||
|
{
|
||||||
|
JsonNode dashboardInNode = dashboardNode.get("in");
|
||||||
|
if (dashboardInNode != null)
|
||||||
|
{
|
||||||
|
String dashboardInStr = dashboardInNode.asText();
|
||||||
|
if (StringUtils.isNotBlank(dashboardInStr))
|
||||||
|
{
|
||||||
|
String[] dashboardInArray = dashboardInStr.split(",");
|
||||||
|
if (dashboardInArray != null)
|
||||||
|
{
|
||||||
|
for(String value : dashboardInArray)
|
||||||
|
{
|
||||||
|
if (StringUtils.isNotBlank(value))
|
||||||
|
{
|
||||||
|
dashboardInList.add(value.trim());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (dashboardNode.has("not"))
|
||||||
|
{
|
||||||
|
JsonNode dashboardNotInNode = dashboardNode.get("not");
|
||||||
|
if (dashboardNotInNode != null)
|
||||||
|
{
|
||||||
|
String dashboardNotInStr = dashboardNotInNode.asText();
|
||||||
|
if (StringUtils.isNotBlank(dashboardNotInStr))
|
||||||
|
{
|
||||||
|
String[] dashboardNotInArray = dashboardNotInStr.split(",");
|
||||||
|
if (dashboardNotInArray != null)
|
||||||
|
{
|
||||||
|
for(String value : dashboardNotInArray)
|
||||||
|
{
|
||||||
|
if (StringUtils.isNotBlank(value))
|
||||||
|
{
|
||||||
|
dashboardNotInList.add(value.trim());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (searchOpt.has("group")) {
|
||||||
|
JsonNode groupNode = searchOpt.get("group");
|
||||||
|
if (groupNode != null && groupNode.isContainerNode())
|
||||||
|
{
|
||||||
|
if (groupNode.has("in"))
|
||||||
|
{
|
||||||
|
JsonNode groupInNode = groupNode.get("in");
|
||||||
|
if (groupInNode != null)
|
||||||
|
{
|
||||||
|
String groupInStr = groupInNode.asText();
|
||||||
|
if (StringUtils.isNotBlank(groupInStr))
|
||||||
|
{
|
||||||
|
String[] groupInArray = groupInStr.split(",");
|
||||||
|
if (groupInArray != null)
|
||||||
|
{
|
||||||
|
for(String value : groupInArray)
|
||||||
|
{
|
||||||
|
if (StringUtils.isNotBlank(value))
|
||||||
|
{
|
||||||
|
groupInList.add(value.trim());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (groupNode.has("not"))
|
||||||
|
{
|
||||||
|
JsonNode groupNotInNode = groupNode.get("not");
|
||||||
|
if (groupNotInNode != null)
|
||||||
|
{
|
||||||
|
String groupNotInStr = groupNotInNode.asText();
|
||||||
|
if (StringUtils.isNotBlank(groupNotInStr))
|
||||||
|
{
|
||||||
|
String[] groupNotInArray = groupNotInStr.split(",");
|
||||||
|
if (groupNotInArray != null)
|
||||||
|
{
|
||||||
|
for(String value : groupNotInArray)
|
||||||
|
{
|
||||||
|
if (StringUtils.isNotBlank(value))
|
||||||
|
{
|
||||||
|
groupNotInList.add(value.trim());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (searchOpt.has("cat")) {
|
||||||
|
JsonNode categoryNode = searchOpt.get("cat");
|
||||||
|
if (categoryNode != null && categoryNode.isContainerNode())
|
||||||
|
{
|
||||||
|
if (categoryNode.has("in"))
|
||||||
|
{
|
||||||
|
JsonNode categoryInNode = categoryNode.get("in");
|
||||||
|
if (categoryInNode != null)
|
||||||
|
{
|
||||||
|
String categoryInStr = categoryInNode.asText();
|
||||||
|
if (StringUtils.isNotBlank(categoryInStr))
|
||||||
|
{
|
||||||
|
String[] categoryInArray = categoryInStr.split(",");
|
||||||
|
if (categoryInArray != null)
|
||||||
|
{
|
||||||
|
for(String value : categoryInArray)
|
||||||
|
{
|
||||||
|
if (StringUtils.isNotBlank(value))
|
||||||
|
{
|
||||||
|
categoryInList.add(value.trim());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (categoryNode.has("not"))
|
||||||
|
{
|
||||||
|
JsonNode categoryNotInNode = categoryNode.get("not");
|
||||||
|
if (categoryNotInNode != null)
|
||||||
|
{
|
||||||
|
String categoryNotInStr = categoryNotInNode.asText();
|
||||||
|
if (StringUtils.isNotBlank(categoryNotInStr))
|
||||||
|
{
|
||||||
|
String[] categoryNotInArray = categoryNotInStr.split(",");
|
||||||
|
if (categoryNotInArray != null)
|
||||||
|
{
|
||||||
|
for(String value : categoryNotInArray)
|
||||||
|
{
|
||||||
|
if (StringUtils.isNotBlank(value))
|
||||||
|
{
|
||||||
|
categoryNotInList.add(value.trim());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (searchOpt.has("metric")) {
|
||||||
|
JsonNode metricNode = searchOpt.get("metric");
|
||||||
|
if (metricNode != null && metricNode.isContainerNode())
|
||||||
|
{
|
||||||
|
if (metricNode.has("in"))
|
||||||
|
{
|
||||||
|
JsonNode metricInNode = metricNode.get("in");
|
||||||
|
if (metricInNode != null)
|
||||||
|
{
|
||||||
|
String metricInStr = metricInNode.asText();
|
||||||
|
if (StringUtils.isNotBlank(metricInStr))
|
||||||
|
{
|
||||||
|
String[] metricInArray = metricInStr.split(",");
|
||||||
|
if (metricInArray != null)
|
||||||
|
{
|
||||||
|
for(String value : metricInArray)
|
||||||
|
{
|
||||||
|
if (StringUtils.isNotBlank(value))
|
||||||
|
{
|
||||||
|
metricInList.add(value.trim());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (metricNode.has("not"))
|
||||||
|
{
|
||||||
|
JsonNode metricNotInNode = metricNode.get("not");
|
||||||
|
if (metricNotInNode != null)
|
||||||
|
{
|
||||||
|
String metricNotInStr = metricNotInNode.asText();
|
||||||
|
if (StringUtils.isNotBlank(metricNotInStr))
|
||||||
|
{
|
||||||
|
String[] metricNotInArray = metricNotInStr.split(",");
|
||||||
|
if (metricNotInArray != null)
|
||||||
|
{
|
||||||
|
for(String value : metricNotInArray)
|
||||||
|
{
|
||||||
|
if (StringUtils.isNotBlank(value))
|
||||||
|
{
|
||||||
|
metricNotInList.add(value.trim());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean needAndKeyword = false;
|
||||||
|
|
||||||
|
final List<Metric> pagedMetrics = new ArrayList<Metric>();
|
||||||
|
final JdbcTemplate jdbcTemplate = getJdbcTemplate();
|
||||||
|
javax.sql.DataSource ds = jdbcTemplate.getDataSource();
|
||||||
|
DataSourceTransactionManager tm = new DataSourceTransactionManager(ds);
|
||||||
|
|
||||||
|
TransactionTemplate txTemplate = new TransactionTemplate(tm);
|
||||||
|
|
||||||
|
ObjectNode result;
|
||||||
|
String query = ADV_SEARCH_METRIC;
|
||||||
|
|
||||||
|
if (dashboardInList.size() > 0 || dashboardNotInList.size() > 0)
|
||||||
|
{
|
||||||
|
boolean dashboardNeedAndKeyword = false;
|
||||||
|
if (dashboardInList.size() > 0)
|
||||||
|
{
|
||||||
|
int indexForDashboardInList = 0;
|
||||||
|
for (String dashboard : dashboardInList)
|
||||||
|
{
|
||||||
|
if (indexForDashboardInList == 0)
|
||||||
|
{
|
||||||
|
query += "WHERE dashboard_name in ('" + dashboard + "'";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
query += ", '" + dashboard + "'";
|
||||||
|
}
|
||||||
|
indexForDashboardInList++;
|
||||||
|
}
|
||||||
|
query += ") ";
|
||||||
|
dashboardNeedAndKeyword = true;
|
||||||
|
}
|
||||||
|
if (dashboardNotInList.size() > 0)
|
||||||
|
{
|
||||||
|
if (dashboardNeedAndKeyword)
|
||||||
|
{
|
||||||
|
query += " AND ";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
query += " WHERE ";
|
||||||
|
}
|
||||||
|
int indexForDashboardNotInList = 0;
|
||||||
|
for (String dashboard : dashboardNotInList)
|
||||||
|
{
|
||||||
|
if (indexForDashboardNotInList == 0)
|
||||||
|
{
|
||||||
|
query += "dashboard_name not in ('" + dashboard + "'";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
query += ", '" + dashboard + "'";
|
||||||
|
}
|
||||||
|
indexForDashboardNotInList++;
|
||||||
|
}
|
||||||
|
query += ") ";
|
||||||
|
}
|
||||||
|
needAndKeyword = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (groupInList.size() > 0 || groupNotInList.size() > 0)
|
||||||
|
{
|
||||||
|
if (needAndKeyword)
|
||||||
|
{
|
||||||
|
query += " AND ";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
query += " WHERE ";
|
||||||
|
}
|
||||||
|
query += "( ";
|
||||||
|
boolean groupNeedAndKeyword = false;
|
||||||
|
if (groupInList.size() > 0)
|
||||||
|
{
|
||||||
|
query += "( ";
|
||||||
|
int indexForGroupInList = 0;
|
||||||
|
for (String group : groupInList)
|
||||||
|
{
|
||||||
|
if (indexForGroupInList == 0)
|
||||||
|
{
|
||||||
|
query += "metric_group LIKE '%" + group + "%'";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
query += " or metric_group LIKE '%" + group + "%'";
|
||||||
|
}
|
||||||
|
indexForGroupInList++;
|
||||||
|
}
|
||||||
|
query += ") ";
|
||||||
|
groupNeedAndKeyword = true;
|
||||||
|
}
|
||||||
|
if (groupNotInList.size() > 0)
|
||||||
|
{
|
||||||
|
if (groupNeedAndKeyword)
|
||||||
|
{
|
||||||
|
query += " AND ";
|
||||||
|
}
|
||||||
|
query += "( ";
|
||||||
|
int indexForGroupNotInList = 0;
|
||||||
|
for (String group : groupNotInList)
|
||||||
|
{
|
||||||
|
if (indexForGroupNotInList == 0)
|
||||||
|
{
|
||||||
|
query += "metric_group NOT LIKE '%" + group + "%'";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
query += " and metric_group NOT LIKE '%" + group + "%'";
|
||||||
|
}
|
||||||
|
indexForGroupNotInList++;
|
||||||
|
}
|
||||||
|
query += ") ";
|
||||||
|
}
|
||||||
|
query += ") ";
|
||||||
|
needAndKeyword = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (categoryInList.size() > 0 || categoryNotInList.size() > 0)
|
||||||
|
{
|
||||||
|
if (needAndKeyword)
|
||||||
|
{
|
||||||
|
query += " AND ";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
query += " WHERE ";
|
||||||
|
}
|
||||||
|
query += "( ";
|
||||||
|
boolean categoryNeedAndKeyword = false;
|
||||||
|
if (categoryInList.size() > 0)
|
||||||
|
{
|
||||||
|
int indexForCategoryInList = 0;
|
||||||
|
query += "( ";
|
||||||
|
for (String category : categoryInList)
|
||||||
|
{
|
||||||
|
if (indexForCategoryInList == 0)
|
||||||
|
{
|
||||||
|
query += "metric_category LIKE '%" + category + "%'";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
query += " or metric_category LIKE '%" + category + "%'";
|
||||||
|
}
|
||||||
|
indexForCategoryInList++;
|
||||||
|
}
|
||||||
|
query += ") ";
|
||||||
|
categoryNeedAndKeyword = true;
|
||||||
|
}
|
||||||
|
if (categoryNotInList.size() > 0)
|
||||||
|
{
|
||||||
|
if (categoryNeedAndKeyword)
|
||||||
|
{
|
||||||
|
query += " AND ";
|
||||||
|
}
|
||||||
|
query += "( ";
|
||||||
|
int indexForCategoryNotInList = 0;
|
||||||
|
for (String category : categoryNotInList)
|
||||||
|
{
|
||||||
|
if (indexForCategoryNotInList == 0)
|
||||||
|
{
|
||||||
|
query += "metric_category NOT LIKE '%" + category + "%'";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
query += " and metric_category NOT LIKE '%" + category + "%'";
|
||||||
|
}
|
||||||
|
indexForCategoryNotInList++;
|
||||||
|
}
|
||||||
|
query += ") ";
|
||||||
|
}
|
||||||
|
query += ") ";
|
||||||
|
needAndKeyword = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (metricInList.size() > 0 || metricNotInList.size() > 0)
|
||||||
|
{
|
||||||
|
if (needAndKeyword)
|
||||||
|
{
|
||||||
|
query += " AND ";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
query += " WHERE ";
|
||||||
|
}
|
||||||
|
query += "( ";
|
||||||
|
boolean metricNeedAndKeyword = false;
|
||||||
|
if (metricInList.size() > 0)
|
||||||
|
{
|
||||||
|
int indexForMetricInList = 0;
|
||||||
|
query += " ( ";
|
||||||
|
for (String metric : metricInList)
|
||||||
|
{
|
||||||
|
if (indexForMetricInList == 0)
|
||||||
|
{
|
||||||
|
query += "metric_name LIKE '%" + metric + "%'";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
query += " or metric_name LIKE '%" + metric + "%'";
|
||||||
|
}
|
||||||
|
indexForMetricInList++;
|
||||||
|
}
|
||||||
|
query += ") ";
|
||||||
|
metricNeedAndKeyword = true;
|
||||||
|
}
|
||||||
|
if (metricNotInList.size() > 0)
|
||||||
|
{
|
||||||
|
if (metricNeedAndKeyword)
|
||||||
|
{
|
||||||
|
query += " AND ";
|
||||||
|
}
|
||||||
|
query += "( ";
|
||||||
|
int indexForMetricNotInList = 0;
|
||||||
|
for (String metric : metricNotInList)
|
||||||
|
{
|
||||||
|
if (indexForMetricNotInList == 0)
|
||||||
|
{
|
||||||
|
query += "metric_name NOT LIKE '%" + metric + "%'";
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
query += " and metric_name NOT LIKE '%" + metric + "%'";
|
||||||
|
}
|
||||||
|
indexForMetricNotInList++;
|
||||||
|
}
|
||||||
|
query += ") ";
|
||||||
|
}
|
||||||
|
query += " )";
|
||||||
|
}
|
||||||
|
|
||||||
|
query += " LIMIT " + (page-1)*size + ", " + size;
|
||||||
|
final String queryString = query;
|
||||||
|
|
||||||
|
result = txTemplate.execute(new TransactionCallback<ObjectNode>()
|
||||||
|
{
|
||||||
|
public ObjectNode doInTransaction(TransactionStatus status)
|
||||||
|
{
|
||||||
|
List<Metric> pagedMetrics = jdbcTemplate.query(queryString, new MetricRowMapper());
|
||||||
|
|
||||||
|
long count = 0;
|
||||||
|
try {
|
||||||
|
count = jdbcTemplate.queryForObject(
|
||||||
|
"SELECT FOUND_ROWS()",
|
||||||
|
Long.class);
|
||||||
|
}
|
||||||
|
catch(EmptyResultDataAccessException e)
|
||||||
|
{
|
||||||
|
Logger.error("Exception = " + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
ObjectNode resultNode = Json.newObject();
|
||||||
|
resultNode.put("count", count);
|
||||||
|
resultNode.put("page", page);
|
||||||
|
resultNode.put("isMetrics", true);
|
||||||
|
resultNode.put("itemsPerPage", size);
|
||||||
|
resultNode.put("totalPages", (int)Math.ceil(count/((double)size)));
|
||||||
|
resultNode.set("data", Json.toJson(pagedMetrics));
|
||||||
|
|
||||||
|
return resultNode;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
resultNode.put("count", 0);
|
||||||
|
resultNode.put("page", page);
|
||||||
|
resultNode.put("itemsPerPage", size);
|
||||||
|
resultNode.put("totalPages", 0);
|
||||||
|
resultNode.set("data", Json.toJson(""));
|
||||||
|
return resultNode;
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -310,6 +310,15 @@ public class DatasetsDAO extends AbstractMySQLOpenSourceDAO
|
|||||||
"LEFT JOIN field_comments fc ON ddfc.comment_id = fc.id " +
|
"LEFT JOIN field_comments fc ON ddfc.comment_id = fc.id " +
|
||||||
"WHERE dfd.dataset_id <> ? AND dfd.field_name = ? ORDER BY d.name asc";
|
"WHERE dfd.dataset_id <> ? AND dfd.field_name = ? ORDER BY d.name asc";
|
||||||
|
|
||||||
|
|
||||||
|
private final static String GET_DATASET_OWNER_TYPES = "SELECT DISTINCT owner_type " +
|
||||||
|
"FROM dataset_owner WHERE owner_type is not null";
|
||||||
|
|
||||||
|
public static List<String> getDatasetOwnerTypes()
|
||||||
|
{
|
||||||
|
return getJdbcTemplate().queryForList(GET_DATASET_OWNER_TYPES, String.class);
|
||||||
|
}
|
||||||
|
|
||||||
public static ObjectNode getPagedDatasets(String urn, Integer page, Integer size, String user)
|
public static ObjectNode getPagedDatasets(String urn, Integer page, Integer size, String user)
|
||||||
{
|
{
|
||||||
ObjectNode result = Json.newObject();
|
ObjectNode result = Json.newObject();
|
||||||
|
@ -13,13 +13,11 @@
|
|||||||
*/
|
*/
|
||||||
package dao;
|
package dao;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.*;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.JsonNode;
|
||||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||||
import org.apache.commons.lang3.StringUtils;
|
import org.apache.commons.lang3.StringUtils;
|
||||||
import org.springframework.dao.EmptyResultDataAccessException;
|
import org.springframework.dao.EmptyResultDataAccessException;
|
||||||
@ -29,12 +27,23 @@ import org.springframework.transaction.TransactionStatus;
|
|||||||
import org.springframework.transaction.support.TransactionCallback;
|
import org.springframework.transaction.support.TransactionCallback;
|
||||||
import org.springframework.transaction.support.TransactionTemplate;
|
import org.springframework.transaction.support.TransactionTemplate;
|
||||||
import play.Logger;
|
import play.Logger;
|
||||||
|
import play.Play;
|
||||||
|
import play.libs.F;
|
||||||
import play.libs.Json;
|
import play.libs.Json;
|
||||||
import play.cache.Cache;
|
import play.cache.Cache;
|
||||||
import models.*;
|
import models.*;
|
||||||
|
import play.libs.WS;
|
||||||
|
|
||||||
public class SearchDAO extends AbstractMySQLOpenSourceDAO
|
public class SearchDAO extends AbstractMySQLOpenSourceDAO
|
||||||
{
|
{
|
||||||
|
public static String ELASTICSEARCH_DATASET_URL_KEY = "elasticsearch.dataset.url";
|
||||||
|
|
||||||
|
public static String ELASTICSEARCH_METRIC_URL_KEY = "elasticsearch.metric.url";
|
||||||
|
|
||||||
|
public static String ELASTICSEARCH_FLOW_URL_KEY = "elasticsearch.flow.url";
|
||||||
|
|
||||||
|
public static String WHEREHOWS_SEARCH_ENGINE__KEY = "search.engine";
|
||||||
|
|
||||||
public final static String SEARCH_DATASET_WITH_PAGINATION = "SELECT SQL_CALC_FOUND_ROWS " +
|
public final static String SEARCH_DATASET_WITH_PAGINATION = "SELECT SQL_CALC_FOUND_ROWS " +
|
||||||
"id, `name`, `schema`, `source`, `urn`, FROM_UNIXTIME(source_modified_time) as modified, " +
|
"id, `name`, `schema`, `source`, `urn`, FROM_UNIXTIME(source_modified_time) as modified, " +
|
||||||
"rank_01 + rank_02 + rank_03 + rank_04 + rank_05 + rank_06 + rank_07 + rank_08 + rank_09 as rank " +
|
"rank_01 + rank_02 + rank_03 + rank_04 + rank_05 + rank_06 + rank_07 + rank_08 + rank_09 as rank " +
|
||||||
@ -169,6 +178,324 @@ public class SearchDAO extends AbstractMySQLOpenSourceDAO
|
|||||||
return cachedAutoCompleteList;
|
return cachedAutoCompleteList;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static JsonNode elasticSearchDatasetByKeyword(
|
||||||
|
String category,
|
||||||
|
String keywords,
|
||||||
|
String source,
|
||||||
|
int page,
|
||||||
|
int size)
|
||||||
|
{
|
||||||
|
ObjectNode queryNode = Json.newObject();
|
||||||
|
queryNode.put("from", (page-1)*size);
|
||||||
|
queryNode.put("size", size);
|
||||||
|
JsonNode responseNode = null;
|
||||||
|
ObjectNode keywordNode = null;
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
keywordNode = utils.Search.generateElasticSearchQueryString(category, source, keywords);
|
||||||
|
}
|
||||||
|
catch(Exception e)
|
||||||
|
{
|
||||||
|
Logger.error("Elastic search dataset input query is not JSON format. Error message :" + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (keywordNode != null)
|
||||||
|
{
|
||||||
|
queryNode.put("query", keywordNode);
|
||||||
|
F.Promise < WS.Response> responsePromise = WS.url(Play.application().configuration().getString(
|
||||||
|
SearchDAO.ELASTICSEARCH_DATASET_URL_KEY)).post(queryNode);
|
||||||
|
responseNode = responsePromise.get().asJson();
|
||||||
|
}
|
||||||
|
|
||||||
|
ObjectNode resultNode = Json.newObject();
|
||||||
|
Long count = 0L;
|
||||||
|
List<Dataset> pagedDatasets = new ArrayList<Dataset>();
|
||||||
|
resultNode.put("page", page);
|
||||||
|
resultNode.put("category", category);
|
||||||
|
resultNode.put("source", source);
|
||||||
|
resultNode.put("itemsPerPage", size);
|
||||||
|
resultNode.put("keywords", keywords);
|
||||||
|
|
||||||
|
if (responseNode != null && responseNode.isContainerNode() && responseNode.has("hits"))
|
||||||
|
{
|
||||||
|
JsonNode hitsNode = responseNode.get("hits");
|
||||||
|
if (hitsNode != null)
|
||||||
|
{
|
||||||
|
if (hitsNode.has("total"))
|
||||||
|
{
|
||||||
|
count = hitsNode.get("total").asLong();
|
||||||
|
}
|
||||||
|
if (hitsNode.has("hits"))
|
||||||
|
{
|
||||||
|
JsonNode dataNode = hitsNode.get("hits");
|
||||||
|
if (dataNode != null && dataNode.isArray())
|
||||||
|
{
|
||||||
|
Iterator<JsonNode> arrayIterator = dataNode.elements();
|
||||||
|
if (arrayIterator != null)
|
||||||
|
{
|
||||||
|
while (arrayIterator.hasNext())
|
||||||
|
{
|
||||||
|
JsonNode node = arrayIterator.next();
|
||||||
|
if (node.isContainerNode() && node.has("_id"))
|
||||||
|
{
|
||||||
|
Dataset dataset = new Dataset();
|
||||||
|
dataset.id = node.get("_id").asLong();
|
||||||
|
if (node.has("_source"))
|
||||||
|
{
|
||||||
|
JsonNode sourceNode = node.get("_source");
|
||||||
|
if (sourceNode != null)
|
||||||
|
{
|
||||||
|
if (sourceNode.has("name"))
|
||||||
|
{
|
||||||
|
dataset.name = sourceNode.get("name").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("source"))
|
||||||
|
{
|
||||||
|
dataset.source = sourceNode.get("source").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("urn"))
|
||||||
|
{
|
||||||
|
dataset.urn = sourceNode.get("urn").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("schema"))
|
||||||
|
{
|
||||||
|
dataset.schema = sourceNode.get("schema").asText();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pagedDatasets.add(dataset);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resultNode.put("count", count);
|
||||||
|
resultNode.put("totalPages", (int)Math.ceil(count/((double)size)));
|
||||||
|
resultNode.set("data", Json.toJson(pagedDatasets));
|
||||||
|
return resultNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static JsonNode elasticSearchMetricByKeyword(
|
||||||
|
String category,
|
||||||
|
String keywords,
|
||||||
|
int page,
|
||||||
|
int size)
|
||||||
|
{
|
||||||
|
ObjectNode queryNode = Json.newObject();
|
||||||
|
queryNode.put("from", (page-1)*size);
|
||||||
|
queryNode.put("size", size);
|
||||||
|
JsonNode responseNode = null;
|
||||||
|
ObjectNode keywordNode = null;
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
keywordNode = utils.Search.generateElasticSearchQueryString(category, null, keywords);
|
||||||
|
}
|
||||||
|
catch(Exception e)
|
||||||
|
{
|
||||||
|
Logger.error("Elastic search metric input query is not JSON format. Error message :" + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (keywordNode != null)
|
||||||
|
{
|
||||||
|
queryNode.put("query", keywordNode);
|
||||||
|
F.Promise < WS.Response> responsePromise = WS.url(Play.application().configuration().getString(
|
||||||
|
SearchDAO.ELASTICSEARCH_METRIC_URL_KEY)).post(queryNode);
|
||||||
|
responseNode = responsePromise.get().asJson();
|
||||||
|
}
|
||||||
|
|
||||||
|
ObjectNode resultNode = Json.newObject();
|
||||||
|
Long count = 0L;
|
||||||
|
List<Metric> pagedMetrics = new ArrayList<Metric>();
|
||||||
|
resultNode.put("page", page);
|
||||||
|
resultNode.put("category", category);
|
||||||
|
resultNode.put("isMetrics", true);
|
||||||
|
resultNode.put("itemsPerPage", size);
|
||||||
|
resultNode.put("keywords", keywords);
|
||||||
|
|
||||||
|
if (responseNode != null && responseNode.isContainerNode() && responseNode.has("hits"))
|
||||||
|
{
|
||||||
|
JsonNode hitsNode = responseNode.get("hits");
|
||||||
|
if (hitsNode != null)
|
||||||
|
{
|
||||||
|
if (hitsNode.has("total"))
|
||||||
|
{
|
||||||
|
count = hitsNode.get("total").asLong();
|
||||||
|
}
|
||||||
|
if (hitsNode.has("hits"))
|
||||||
|
{
|
||||||
|
JsonNode dataNode = hitsNode.get("hits");
|
||||||
|
if (dataNode != null && dataNode.isArray())
|
||||||
|
{
|
||||||
|
Iterator<JsonNode> arrayIterator = dataNode.elements();
|
||||||
|
if (arrayIterator != null)
|
||||||
|
{
|
||||||
|
while (arrayIterator.hasNext())
|
||||||
|
{
|
||||||
|
JsonNode node = arrayIterator.next();
|
||||||
|
if (node.isContainerNode() && node.has("_id"))
|
||||||
|
{
|
||||||
|
Metric metric = new Metric();
|
||||||
|
metric.id = node.get("_id").asInt();
|
||||||
|
if (node.has("_source")) {
|
||||||
|
JsonNode sourceNode = node.get("_source");
|
||||||
|
if (sourceNode != null) {
|
||||||
|
if (sourceNode.has("metric_name")) {
|
||||||
|
metric.name = sourceNode.get("metric_name").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("metric_description")) {
|
||||||
|
metric.description = sourceNode.get("metric_description").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("dashboard_name")) {
|
||||||
|
metric.dashboardName = sourceNode.get("dashboard_name").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("metric_group")) {
|
||||||
|
metric.group = sourceNode.get("metric_group").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("metric_category")) {
|
||||||
|
metric.category = sourceNode.get("metric_category").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("urn")) {
|
||||||
|
metric.urn = sourceNode.get("urn").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("metric_source")) {
|
||||||
|
metric.source = sourceNode.get("metric_source").asText();
|
||||||
|
if (StringUtils.isBlank(metric.source))
|
||||||
|
{
|
||||||
|
metric.source = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
metric.schema = sourceNode.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pagedMetrics.add(metric);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resultNode.put("count", count);
|
||||||
|
resultNode.put("totalPages", (int)Math.ceil(count/((double)size)));
|
||||||
|
resultNode.set("data", Json.toJson(pagedMetrics));
|
||||||
|
return resultNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static JsonNode elasticSearchFlowByKeyword(
|
||||||
|
String category,
|
||||||
|
String keywords,
|
||||||
|
int page,
|
||||||
|
int size)
|
||||||
|
{
|
||||||
|
ObjectNode queryNode = Json.newObject();
|
||||||
|
queryNode.put("from", (page-1)*size);
|
||||||
|
queryNode.put("size", size);
|
||||||
|
JsonNode searchOpt = null;
|
||||||
|
JsonNode responseNode = null;
|
||||||
|
ObjectNode keywordNode = null;
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
keywordNode = utils.Search.generateElasticSearchQueryString(category, null, keywords);
|
||||||
|
}
|
||||||
|
catch(Exception e)
|
||||||
|
{
|
||||||
|
Logger.error("Elastic search flow input query is not JSON format. Error message :" + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (keywordNode != null)
|
||||||
|
{
|
||||||
|
queryNode.put("query", keywordNode);
|
||||||
|
F.Promise < WS.Response> responsePromise = WS.url(Play.application().configuration().getString(
|
||||||
|
SearchDAO.ELASTICSEARCH_FLOW_URL_KEY)).post(queryNode);
|
||||||
|
responseNode = responsePromise.get().asJson();
|
||||||
|
}
|
||||||
|
|
||||||
|
ObjectNode resultNode = Json.newObject();
|
||||||
|
Long count = 0L;
|
||||||
|
List<FlowJob> pagedFlowJobs = new ArrayList<FlowJob>();
|
||||||
|
resultNode.put("page", page);
|
||||||
|
resultNode.put("category", category);
|
||||||
|
resultNode.put("isFlowJob", true);
|
||||||
|
resultNode.put("itemsPerPage", size);
|
||||||
|
resultNode.put("keywords", keywords);
|
||||||
|
|
||||||
|
if (responseNode != null && responseNode.isContainerNode() && responseNode.has("hits"))
|
||||||
|
{
|
||||||
|
JsonNode hitsNode = responseNode.get("hits");
|
||||||
|
if (hitsNode != null)
|
||||||
|
{
|
||||||
|
if (hitsNode.has("total"))
|
||||||
|
{
|
||||||
|
count = hitsNode.get("total").asLong();
|
||||||
|
}
|
||||||
|
if (hitsNode.has("hits"))
|
||||||
|
{
|
||||||
|
JsonNode dataNode = hitsNode.get("hits");
|
||||||
|
if (dataNode != null && dataNode.isArray())
|
||||||
|
{
|
||||||
|
Iterator<JsonNode> arrayIterator = dataNode.elements();
|
||||||
|
if (arrayIterator != null)
|
||||||
|
{
|
||||||
|
while (arrayIterator.hasNext())
|
||||||
|
{
|
||||||
|
JsonNode node = arrayIterator.next();
|
||||||
|
if (node.isContainerNode() && node.has("_id"))
|
||||||
|
{
|
||||||
|
FlowJob flowJob = new FlowJob();
|
||||||
|
if (node.has("_source")) {
|
||||||
|
JsonNode sourceNode = node.get("_source");
|
||||||
|
if (sourceNode != null) {
|
||||||
|
if (sourceNode.has("app_code")) {
|
||||||
|
flowJob.appCode = sourceNode.get("app_code").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("app_id")) {
|
||||||
|
flowJob.appId = sourceNode.get("app_id").asInt();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("flow_id")) {
|
||||||
|
flowJob.flowId = sourceNode.get("flow_id").asLong();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("flow_name")) {
|
||||||
|
flowJob.flowName = sourceNode.get("flow_name").asText();
|
||||||
|
flowJob.displayName = flowJob.flowName;
|
||||||
|
}
|
||||||
|
if (sourceNode.has("flow_path")) {
|
||||||
|
flowJob.flowPath = sourceNode.get("flow_path").asText();
|
||||||
|
}
|
||||||
|
if (sourceNode.has("flow_group")) {
|
||||||
|
flowJob.flowGroup = sourceNode.get("flow_group").asText();
|
||||||
|
}
|
||||||
|
flowJob.link = "#/flows/" + flowJob.appCode + "/" +
|
||||||
|
flowJob.flowGroup + "/" + Long.toString(flowJob.flowId) + "/page/1";
|
||||||
|
flowJob.path = flowJob.appCode + "/" + flowJob.flowPath;
|
||||||
|
|
||||||
|
flowJob.schema = sourceNode.toString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pagedFlowJobs.add(flowJob);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
resultNode.put("count", count);
|
||||||
|
resultNode.put("totalPages", (int)Math.ceil(count/((double)size)));
|
||||||
|
resultNode.set("data", Json.toJson(pagedFlowJobs));
|
||||||
|
return resultNode;
|
||||||
|
}
|
||||||
|
|
||||||
public static ObjectNode getPagedDatasetByKeyword(String category, String keyword, String source, int page, int size)
|
public static ObjectNode getPagedDatasetByKeyword(String category, String keyword, String source, int page, int size)
|
||||||
{
|
{
|
||||||
List<Dataset> pagedDatasets = new ArrayList<Dataset>();
|
List<Dataset> pagedDatasets = new ArrayList<Dataset>();
|
||||||
|
@ -28,4 +28,5 @@ public class FlowJob {
|
|||||||
public String path;
|
public String path;
|
||||||
public Integer appId;
|
public Integer appId;
|
||||||
public Long flowId;
|
public Long flowId;
|
||||||
|
public String schema;
|
||||||
}
|
}
|
||||||
|
1194
web/app/utils/Search.java
Normal file
1194
web/app/utils/Search.java
Normal file
File diff suppressed because it is too large
Load Diff
@ -1198,6 +1198,7 @@
|
|||||||
<td class="col-xs-12">
|
<td class="col-xs-12">
|
||||||
<div class="dataset-name">
|
<div class="dataset-name">
|
||||||
<td class="dataset-info">
|
<td class="dataset-info">
|
||||||
|
<i class="fa fa-random"></i>
|
||||||
<a href="{{flowJob.link}}">
|
<a href="{{flowJob.link}}">
|
||||||
{{flowJob.displayName}}
|
{{flowJob.displayName}}
|
||||||
</a>
|
</a>
|
||||||
@ -1206,6 +1207,9 @@
|
|||||||
{{{ flowJob.path }}}
|
{{{ flowJob.path }}}
|
||||||
</p>
|
</p>
|
||||||
<p>source: {{{ flowJob.appCode }}}</p>
|
<p>source: {{{ flowJob.appCode }}}</p>
|
||||||
|
<div class="schematext" style="margin-top:5px;margin-bottom: 10px;">
|
||||||
|
{{{ flowJob.schema }}}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
@ -1221,10 +1225,12 @@
|
|||||||
<div class="dataset-name">
|
<div class="dataset-name">
|
||||||
<td class="dataset-info">
|
<td class="dataset-info">
|
||||||
{{#if isMetric}}
|
{{#if isMetric}}
|
||||||
|
<i class="fa fa-plus-square-o"></i>
|
||||||
{{#link-to 'metric' dataset}}
|
{{#link-to 'metric' dataset}}
|
||||||
{{{dataset.name}}}
|
{{{dataset.name}}}
|
||||||
{{/link-to}}
|
{{/link-to}}
|
||||||
{{else}}
|
{{else}}
|
||||||
|
<i class="fa fa-database"></i>
|
||||||
{{#link-to 'dataset' dataset}}
|
{{#link-to 'dataset' dataset}}
|
||||||
{{{dataset.name}}}
|
{{{dataset.name}}}
|
||||||
{{/link-to}}
|
{{/link-to}}
|
||||||
@ -1233,7 +1239,11 @@
|
|||||||
<p>
|
<p>
|
||||||
{{{ dataset.urn }}}
|
{{{ dataset.urn }}}
|
||||||
</p>
|
</p>
|
||||||
<p>source: {{{ dataset.source }}}</p>
|
{{#if dataset.source}}
|
||||||
|
<p>source: {{{ dataset.source }}}</p>
|
||||||
|
{{else}}
|
||||||
|
<p>source: Metric</p>
|
||||||
|
{{/if}}
|
||||||
<div class="schematext" style="margin-top:5px;margin-bottom: 10px;">
|
<div class="schematext" style="margin-top:5px;margin-bottom: 10px;">
|
||||||
{{{ dataset.schema }}}
|
{{{ dataset.schema }}}
|
||||||
</div>
|
</div>
|
||||||
@ -1302,6 +1312,7 @@
|
|||||||
<td class="col-xs-12">
|
<td class="col-xs-12">
|
||||||
<div class="dataset-name">
|
<div class="dataset-name">
|
||||||
<td class="dataset-info">
|
<td class="dataset-info">
|
||||||
|
<i class="fa fa-random"></i>
|
||||||
<a href="{{flowJob.link}}">
|
<a href="{{flowJob.link}}">
|
||||||
{{flowJob.displayName}}
|
{{flowJob.displayName}}
|
||||||
</a>
|
</a>
|
||||||
@ -1310,6 +1321,9 @@
|
|||||||
{{{ flowJob.path }}}
|
{{{ flowJob.path }}}
|
||||||
</p>
|
</p>
|
||||||
<p>source: {{{ flowJob.appCode }}}</p>
|
<p>source: {{{ flowJob.appCode }}}</p>
|
||||||
|
<div class="schematext" style="margin-top:5px;margin-bottom: 10px;">
|
||||||
|
{{{ flowJob.schema }}}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
@ -1324,6 +1338,7 @@
|
|||||||
<td class="col-xs-12">
|
<td class="col-xs-12">
|
||||||
<div class="dataset-name">
|
<div class="dataset-name">
|
||||||
<td class="dataset-info">
|
<td class="dataset-info">
|
||||||
|
<i class="fa fa-database"></i>
|
||||||
{{#link-to 'dataset' dataset}}
|
{{#link-to 'dataset' dataset}}
|
||||||
{{{dataset.name}}}
|
{{{dataset.name}}}
|
||||||
{{/link-to}}
|
{{/link-to}}
|
||||||
|
@ -90,20 +90,55 @@
|
|||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
<form class="navbar-form navbar-left" role="search">
|
<form class="navbar-form navbar-left" role="search">
|
||||||
<div class="input-group">
|
<div class="row">
|
||||||
<input id="searchInput"
|
<div class="btn-group" role="group">
|
||||||
type="text"
|
<button style="height: 30px;margin-right:-4px;"
|
||||||
class="form-control input-sm keyword-search"
|
type="button"
|
||||||
placeholder="Enter Keywords..."
|
data-toggle="dropdown"
|
||||||
/>
|
aria-expanded="false">
|
||||||
<span class="input-group-btn">
|
<i id="categoryIcon" class="fa fa-database"></i>
|
||||||
<button id="searchBtn"
|
<span class="caret"></span>
|
||||||
type="button"
|
</button>
|
||||||
class="btn btn-sm btn-primary"
|
<ul class="dropdown-menu" role="menu">
|
||||||
>
|
<!--
|
||||||
<i class="fa fa-search"></i>
|
<li class="active">
|
||||||
</button>
|
<a href="#" class="searchCategory">All</a>
|
||||||
</span>
|
</li>
|
||||||
|
-->
|
||||||
|
<li id="categoryDatasets" class="active">
|
||||||
|
<a href="#" class="searchCategory">Datasets</a>
|
||||||
|
</li>
|
||||||
|
<li id="categoryComments" >
|
||||||
|
<a href="#" class="searchCategory">Comments</a>
|
||||||
|
</li>
|
||||||
|
<!--
|
||||||
|
<li id="categoryMetrics" >
|
||||||
|
<a href="#" class="searchCategory">Metrics</a>
|
||||||
|
</li>
|
||||||
|
-->
|
||||||
|
<li id="categoryFlows" >
|
||||||
|
<a href="#" class="searchCategory">Flows</a>
|
||||||
|
</li>
|
||||||
|
<li id="categoryJobs" >
|
||||||
|
<a href="#" class="searchCategory">Jobs</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
<div class="input-group">
|
||||||
|
<input id="searchInput"
|
||||||
|
type="text"
|
||||||
|
class="form-control input-sm keyword-search"
|
||||||
|
placeholder="Enter Keywords..."
|
||||||
|
/>
|
||||||
|
<span class="input-group-btn">
|
||||||
|
<button id="searchBtn"
|
||||||
|
type="button"
|
||||||
|
class="btn btn-sm btn-primary"
|
||||||
|
>
|
||||||
|
<i class="fa fa-search"></i>
|
||||||
|
</button>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</form>
|
</form>
|
||||||
<div class="nav nabar-nav navbar-left">
|
<div class="nav nabar-nav navbar-left">
|
||||||
|
@ -74,6 +74,12 @@ database.opensource.username = "wherehows"
|
|||||||
database.opensource.password = "wherehows"
|
database.opensource.password = "wherehows"
|
||||||
database.opensource.url = "jdbc:mysql://localhost/wherehows?charset=utf8&zeroDateTimeBehavior=convertToNull"
|
database.opensource.url = "jdbc:mysql://localhost/wherehows?charset=utf8&zeroDateTimeBehavior=convertToNull"
|
||||||
|
|
||||||
|
search.engine = "default"
|
||||||
|
|
||||||
|
elasticsearch.dataset.url = "$YOUR_DATASET_INDEX_URL"
|
||||||
|
elasticsearch.flow.url = "$YOUR_FLOW_INDEX_URL"
|
||||||
|
|
||||||
|
|
||||||
authentication.ldap.url = "$YOUR_LDAP_SERVER"
|
authentication.ldap.url = "$YOUR_LDAP_SERVER"
|
||||||
authentication.ldap.context_factory_class = "com.sun.jndi.ldap.LdapCtxFactory"
|
authentication.ldap.context_factory_class = "com.sun.jndi.ldap.LdapCtxFactory"
|
||||||
authentication.principal.domain = "$YOUR_LDAP_DOMAIN"
|
authentication.principal.domain = "$YOUR_LDAP_DOMAIN"
|
||||||
|
@ -33,11 +33,13 @@ GET /lineage/flow/:application/:project/:flow controllers.Application.flow
|
|||||||
|
|
||||||
GET /schemaHistory controllers.Application.schemaHistory()
|
GET /schemaHistory controllers.Application.schemaHistory()
|
||||||
|
|
||||||
GET /api/v1/company/entities controllers.api.v1.User.getAllUserEntities()
|
GET /api/v1/owner/types controllers.api.v1.Dataset.getDatasetOwnerTypes()
|
||||||
|
|
||||||
GET /api/v1/company/employees controllers.api.v1.User.getAllCompanyUsers()
|
GET /api/v1/party/entities controllers.api.v1.User.getAllUserEntities()
|
||||||
|
|
||||||
GET /api/v1/company/groups controllers.api.v1.User.getAllGroups()
|
GET /api/v1/party/employees controllers.api.v1.User.getAllCompanyUsers()
|
||||||
|
|
||||||
|
GET /api/v1/party/groups controllers.api.v1.User.getAllGroups()
|
||||||
|
|
||||||
GET /api/v1/autocomplete/search controllers.api.v1.Search.getSearchAutoComplete()
|
GET /api/v1/autocomplete/search controllers.api.v1.Search.getSearchAutoComplete()
|
||||||
|
|
||||||
|
@ -138,7 +138,7 @@ App.DatasetController = Ember.Controller.extend({
|
|||||||
hasSamples: false,
|
hasSamples: false,
|
||||||
isTable: true,
|
isTable: true,
|
||||||
isJSON: false,
|
isJSON: false,
|
||||||
ownerTypes: ["", "Producer", "Consumer"],
|
ownerTypes: [],
|
||||||
userTypes: [{name:"Corporate User", value: "urn:li:corpuser"}, {name:"Group User", value: "urn:li:griduser"}],
|
userTypes: [{name:"Corporate User", value: "urn:li:corpuser"}, {name:"Group User", value: "urn:li:griduser"}],
|
||||||
isPinot: function(){
|
isPinot: function(){
|
||||||
var model = this.get("model");
|
var model = this.get("model");
|
||||||
|
@ -93,6 +93,101 @@ var convertQueryStringToObject = function() {
|
|||||||
return queryString;
|
return queryString;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function resetCategoryActiveFlag(category)
|
||||||
|
{
|
||||||
|
$('#categoryDatasets').removeClass('active');
|
||||||
|
$('#categoryComments').removeClass('active');
|
||||||
|
$('#categoryMetrics').removeClass('active');
|
||||||
|
$('#categoryFlows').removeClass('active');
|
||||||
|
$('#categoryJobs').removeClass('active');
|
||||||
|
if (category.toLowerCase() == 'datasets')
|
||||||
|
{
|
||||||
|
$('#categoryDatasets').addClass('active');
|
||||||
|
}
|
||||||
|
else if (category.toLowerCase() == 'comments')
|
||||||
|
{
|
||||||
|
$('#categoryComments').addClass('active');
|
||||||
|
}
|
||||||
|
else if (category.toLowerCase() == 'metrics')
|
||||||
|
{
|
||||||
|
$('#categoryMetrics').addClass('active');
|
||||||
|
}
|
||||||
|
else if (category.toLowerCase() == 'flows')
|
||||||
|
{
|
||||||
|
$('#categoryFlows').addClass('active');
|
||||||
|
}
|
||||||
|
else if (category.toLowerCase() == 'jobs')
|
||||||
|
{
|
||||||
|
$('#categoryJobs').addClass('active');
|
||||||
|
}
|
||||||
|
currentCategory = category;
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateSearchCategories(category)
|
||||||
|
{
|
||||||
|
if (category.toLowerCase() == 'all')
|
||||||
|
{
|
||||||
|
$('#categoryIcon').removeClass('fa fa-list');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-database');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-comment');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-random');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-plus-square-o');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-file-o');
|
||||||
|
$('#categoryIcon').addClass('fa fa-list');
|
||||||
|
}
|
||||||
|
else if (category.toLowerCase() == 'datasets')
|
||||||
|
{
|
||||||
|
$('#categoryIcon').removeClass('fa fa-list');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-database');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-comment');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-random');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-plus-square-o');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-file-o');
|
||||||
|
$('#categoryIcon').addClass('fa fa-database');
|
||||||
|
}
|
||||||
|
else if (category.toLowerCase() == 'comments')
|
||||||
|
{
|
||||||
|
$('#categoryIcon').removeClass('fa fa-list');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-database');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-comment');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-random');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-plus-square-o');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-file-o');
|
||||||
|
$('#categoryIcon').addClass('fa fa-comment');
|
||||||
|
}
|
||||||
|
else if (category.toLowerCase() == 'metrics')
|
||||||
|
{
|
||||||
|
$('#categoryIcon').removeClass('fa fa-list');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-database');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-comment');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-random');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-plus-square-o');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-file-o');
|
||||||
|
$('#categoryIcon').addClass('fa fa-plus-square-o');
|
||||||
|
}
|
||||||
|
else if (category.toLowerCase() == 'flows')
|
||||||
|
{
|
||||||
|
$('#categoryIcon').removeClass('fa fa-list');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-database');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-comment');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-random');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-plus-square-o');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-file-o');
|
||||||
|
$('#categoryIcon').addClass('fa fa-random');
|
||||||
|
}
|
||||||
|
else if (category.toLowerCase() == 'jobs')
|
||||||
|
{
|
||||||
|
$('#categoryIcon').removeClass('fa fa-list');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-database');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-comment');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-random');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-plus-square-o');
|
||||||
|
$('#categoryIcon').removeClass('fa fa-file-o');
|
||||||
|
$('#categoryIcon').addClass('fa fa-file-o');
|
||||||
|
}
|
||||||
|
resetCategoryActiveFlag(category);
|
||||||
|
}
|
||||||
|
|
||||||
String.prototype.toProperCase = function(){
|
String.prototype.toProperCase = function(){
|
||||||
return this.replace(/\w\S*/g, function(txt){
|
return this.replace(/\w\S*/g, function(txt){
|
||||||
return txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase()
|
return txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase()
|
||||||
|
@ -232,6 +232,13 @@ App.DatasetRoute = Ember.Route.extend({
|
|||||||
controller.set("breadcrumbs", breadcrumbs);
|
controller.set("breadcrumbs", breadcrumbs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var ownerTypeUrl = 'api/v1/owner/types';
|
||||||
|
$.get(ownerTypeUrl, function(data) {
|
||||||
|
if (data && data.status == "ok") {
|
||||||
|
controller.set("ownerTypes", data.ownerTypes);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
var userSettingsUrl = 'api/v1/user/me';
|
var userSettingsUrl = 'api/v1/user/me';
|
||||||
$.get(userSettingsUrl, function(data) {
|
$.get(userSettingsUrl, function(data) {
|
||||||
var tabview = false;
|
var tabview = false;
|
||||||
@ -427,7 +434,7 @@ App.DatasetRoute = Ember.Route.extend({
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
var allUserEntitiesUrl = 'api/v1/company/entities';
|
var allUserEntitiesUrl = 'api/v1/party/entities';
|
||||||
$.get(allUserEntitiesUrl, function(data) {
|
$.get(allUserEntitiesUrl, function(data) {
|
||||||
if (data && data.status == "ok")
|
if (data && data.status == "ok")
|
||||||
{
|
{
|
||||||
|
@ -20,8 +20,11 @@ function highlightResults(result, index, keyword)
|
|||||||
var newContent = content.replace(query, "<b>$1</b>");
|
var newContent = content.replace(query, "<b>$1</b>");
|
||||||
result[index].schema = newContent;
|
result[index].schema = newContent;
|
||||||
var urn = result[index].urn;
|
var urn = result[index].urn;
|
||||||
var newUrn = urn.replace(query, "<b>$1</b>");
|
if (urn)
|
||||||
result[index].urn = newUrn;
|
{
|
||||||
|
var newUrn = urn.replace(query, "<b>$1</b>");
|
||||||
|
result[index].urn = newUrn;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
App.SearchRoute = Ember.Route.extend({
|
App.SearchRoute = Ember.Route.extend({
|
||||||
@ -65,6 +68,9 @@ App.SearchRoute = Ember.Route.extend({
|
|||||||
$.get(url, function(data) {
|
$.get(url, function(data) {
|
||||||
if (data && data.status == "ok") {
|
if (data && data.status == "ok") {
|
||||||
var result = data.result;
|
var result = data.result;
|
||||||
|
var keywords = result.keywords;
|
||||||
|
window.g_currentCategory = result.category;
|
||||||
|
updateSearchCategories(result.category);
|
||||||
for(var index = 0; index < result.data.length; index++) {
|
for(var index = 0; index < result.data.length; index++) {
|
||||||
var schema = result.data[index].schema;
|
var schema = result.data[index].schema;
|
||||||
if (schema) {
|
if (schema) {
|
||||||
|
@ -1,6 +1,10 @@
|
|||||||
(function ($) {
|
(function (window, $) {
|
||||||
$('#advsearchtabs a:first').tab("show");
|
$('#advsearchtabs a:first').tab("show");
|
||||||
$('#datasetAdvSearchLink').addClass("active");
|
$('#datasetAdvSearchLink').addClass("active");
|
||||||
|
String.prototype.replaceAll = function(target, replacement) {
|
||||||
|
return this.split(target).join(replacement);
|
||||||
|
};
|
||||||
|
window.g_currentCategory = 'Datasets';
|
||||||
function renderAdvSearchDatasetSources(parent, sources)
|
function renderAdvSearchDatasetSources(parent, sources)
|
||||||
{
|
{
|
||||||
if ((!parent) || (!sources) || sources.length == 0)
|
if ((!parent) || (!sources) || sources.length == 0)
|
||||||
@ -72,7 +76,21 @@
|
|||||||
parent.append(content);
|
parent.append(content);
|
||||||
}
|
}
|
||||||
|
|
||||||
var datasetSourcesUrl = '/api/v1/advsearch/sources';
|
$(".searchCategory").click(function(e){
|
||||||
|
var objs = $(".searchCategory");
|
||||||
|
if (objs)
|
||||||
|
{
|
||||||
|
$.each(objs, function( index, value ) {
|
||||||
|
$(objs[index]).parent().removeClass("active");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
window.g_currentCategory = e.target.text;
|
||||||
|
updateSearchCategories(e.target.text);
|
||||||
|
//$(e.target).parent().addClass( "active" );
|
||||||
|
e.preventDefault();
|
||||||
|
});
|
||||||
|
|
||||||
|
var datasetSourcesUrl = '/api/v1/advsearch/sources';
|
||||||
$.get(datasetSourcesUrl, function(data) {
|
$.get(datasetSourcesUrl, function(data) {
|
||||||
if (data && data.status == "ok")
|
if (data && data.status == "ok")
|
||||||
{
|
{
|
||||||
@ -87,32 +105,25 @@
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
$("#searchInput").on( "keydown", function(event) {
|
|
||||||
if(event.which == 13)
|
|
||||||
{
|
|
||||||
event.preventDefault();
|
|
||||||
var inputObj = $('#searchInput');
|
|
||||||
if (inputObj) {
|
|
||||||
var keyword = inputObj.val();
|
|
||||||
if (keyword) {
|
|
||||||
window.location = '/#/search?keywords=' + btoa(keyword) +
|
|
||||||
'&category=Datasets&source=default&page=1';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
$.get('/api/v1/autocomplete/search', function(data){
|
$.get('/api/v1/autocomplete/search', function(data){
|
||||||
$('#searchInput').autocomplete({
|
$('#searchInput').autocomplete({
|
||||||
source: function(request, response) {
|
source: function( req, res ) {
|
||||||
var result = [];
|
var results = $.ui.autocomplete.filter(data.source, extractLast( req.term ));
|
||||||
if (data && data.source && request.term)
|
res(results.slice(0,maxReturnedResults));
|
||||||
{
|
},
|
||||||
result = sortAutocompleteResult(data.source, request.term);
|
focus: function() {
|
||||||
}
|
return false;
|
||||||
return response(result);
|
},
|
||||||
}
|
select: function( event, ui ) {
|
||||||
});
|
var terms = split( this.value );
|
||||||
|
terms.pop();
|
||||||
|
terms.push( ui.item.value );
|
||||||
|
terms.push( "" );
|
||||||
|
this.value = terms.join( ", " );
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -296,7 +307,7 @@
|
|||||||
if (keyword)
|
if (keyword)
|
||||||
{
|
{
|
||||||
window.location = '/#/search?keywords=' + btoa(keyword) +
|
window.location = '/#/search?keywords=' + btoa(keyword) +
|
||||||
'&category=Datasets&source=default&page=1';
|
'&category=' + window.g_currentCategory + '&source=default&page=1'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -577,4 +588,4 @@
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
})(jQuery)
|
})(window, jQuery)
|
||||||
|
@ -669,4 +669,8 @@ div.commentsArea td, div.commentsArea th, div.commentsArea table{
|
|||||||
|
|
||||||
.wh-clickable-icon {
|
.wh-clickable-icon {
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.keyword-search {
|
||||||
|
min-width: 500px;
|
||||||
|
}
|
||||||
|
@ -176,4 +176,7 @@ public class Constant {
|
|||||||
public static final String DB_ID_KEY = "db.id";
|
public static final String DB_ID_KEY = "db.id";
|
||||||
/** Property name of wherehows execution id for ETL process. */
|
/** Property name of wherehows execution id for ETL process. */
|
||||||
public static final String WH_EXEC_ID_KEY = "wh.exec.id";
|
public static final String WH_EXEC_ID_KEY = "wh.exec.id";
|
||||||
|
|
||||||
|
public static final String WH_ELASTICSEARCH_URL_KEY = "wh.elasticsearch.url";
|
||||||
|
public static final String WH_ELASTICSEARCH_PORT_KEY = "wh.elasticsearch.port";
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user