mirror of
https://github.com/datahub-project/datahub.git
synced 2025-07-24 18:10:11 +00:00
rename hive dependency to hive_exec; reuse metadata-etl/extralibs; test travis ci;
This commit is contained in:
parent
407adc8a8d
commit
1573fdb212
@ -39,5 +39,7 @@ before_script:
|
||||
- mysql -u root -e 'create database if not exists wherehows'
|
||||
- mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'travis'@'localhost'"
|
||||
- cd data-model/DDL; mysql -u root -D wherehows < create_all_tables_wrapper.sql; cd ../..
|
||||
- wget https://github.com/ericsun2/sandbox/raw/master/extralibs/extralibs.zip
|
||||
- mkdir -p metadata-etl/extralibs; unzip extralibs.zip -d metadata-etl/extralibs
|
||||
- sleep 5
|
||||
|
||||
|
@ -1,12 +1,15 @@
|
||||
apply plugin: 'scala'
|
||||
apply plugin: 'idea'
|
||||
|
||||
def findPlay20(){
|
||||
project.ext.playHome = "${System.env.PLAY_HOME}"
|
||||
def findPlayHome(){
|
||||
project.ext.playHome = System.getenv()['PLAY_HOME']
|
||||
if (null == project.ext.playHome) {
|
||||
throw new GradleException('PLAY_HOME env variable not set!')
|
||||
}
|
||||
project.ext.playExec = "${playHome}/play"
|
||||
}
|
||||
|
||||
findPlay20()
|
||||
findPlayHome()
|
||||
|
||||
repositories{
|
||||
mavenCentral()
|
||||
@ -22,6 +25,9 @@ repositories{
|
||||
url "https://repo.typesafe.com/typesafe/ivy-releases"
|
||||
layout "ivy"
|
||||
}
|
||||
|
||||
flatDir name: 'extralibs',
|
||||
dirs: "${projectDir}/metadata-etl/extralibs"
|
||||
}
|
||||
|
||||
configurations{
|
||||
|
@ -2,6 +2,11 @@ name := "backend-service"
|
||||
|
||||
version := "1.0-SNAPSHOT"
|
||||
|
||||
unmanagedJars in Compile <++= baseDirectory map { base =>
|
||||
val dirs = (base / "metadata-etl/extralibs") +++ (base / "extralibs")
|
||||
(dirs ** "*.jar").classpath
|
||||
}
|
||||
|
||||
libraryDependencies ++= Seq(
|
||||
javaJdbc,
|
||||
javaEbean,
|
||||
|
13
build.gradle
13
build.gradle
@ -25,6 +25,7 @@ subprojects {
|
||||
license {
|
||||
header licenseFile
|
||||
exclude "**/*.scala.html"
|
||||
exclude "**/pyparsing.py"
|
||||
}
|
||||
repositories {
|
||||
flatDir {
|
||||
@ -53,22 +54,21 @@ subprojects {
|
||||
"testng" : "org.testng:testng:6.9.6",
|
||||
"hadoop_common" : "org.apache.hadoop:hadoop-common:2.7.1",
|
||||
"hadoop_client" : "org.apache.hadoop:hadoop-mapreduce-client-core:2.7.1",
|
||||
"pig" : "org.apache.pig:pig:0.11.1",
|
||||
"hadoop_auth" : "org.apache.hadoop:hadoop-auth:2.7.1",
|
||||
"pig" : "org.apache.pig:pig:0.15.0",
|
||||
"hive_exec" : "org.apache.hive:hive-exec:1.2.1",
|
||||
"avro" : "org.apache.avro:avro:1.7.7",
|
||||
"avro_mapred" : "org.apache.avro:avro-mapred:1.7.7",
|
||||
"joda" : "joda-time:joda-time:2.8.2",
|
||||
"jsch" : "com.jcraft:jsch:0.1.53",
|
||||
"hive" : "org.apache.hive:hive-exec:1.2.1",
|
||||
"http_client" : "org.apache.httpcomponents:httpclient:4.5",
|
||||
"http_core" : "org.apache.httpcomponents:httpcore:4.4.1",
|
||||
"hadoop_auth" : "org.apache.hadoop:hadoop-auth:2.7.1",
|
||||
"json_path" : "com.jayway.jsonpath:json-path:2.0.0",
|
||||
"akka" : "com.typesafe.akka:akka-actor_2.10:2.2.0",
|
||||
"akka" : "com.typesafe.akka:akka-actor_2.10:2.3.15",
|
||||
"jgit" : "org.eclipse.jgit:org.eclipse.jgit:4.1.1.201511131810-r",
|
||||
"jsoup" : "org.jsoup:jsoup:1.8.3",
|
||||
"commons_io" : "commons-io:commons-io:2.4",
|
||||
|
||||
|
||||
"jackson_databind" : "com.fasterxml.jackson.core:jackson-databind:2.6.1",
|
||||
"jackson_core" : "com.fasterxml.jackson.core:jackson-core:2.6.1",
|
||||
"jackson_annotations": "com.fasterxml.jackson.core:jackson-annotations:2.6.1",
|
||||
@ -83,8 +83,7 @@ subprojects {
|
||||
"play" : "com.typesafe.play:play_2.10:2.2.4",
|
||||
"play_ebean" : "com.typesafe.play:play-java-ebean_2.10:2.2.4",
|
||||
"play_java_jdbc" : "com.typesafe.play:play-java-jdbc_2.10:2.2.4",
|
||||
"play_cache" : "com.typesafe.play:play-cache_2.10:2.2.4",
|
||||
"hive_exec" : "org.apache.hive:hive-exec:1.2.1"
|
||||
"play_cache" : "com.typesafe.play:play-cache_2.10:2.2.4"
|
||||
]
|
||||
}
|
||||
|
||||
|
@ -12,7 +12,7 @@ dependencies {
|
||||
//extraLibs files("extralibs/voldemort-0.91.li1.jar")
|
||||
extraLibs externalDependency.joda
|
||||
extraLibs externalDependency.avro_mapred
|
||||
extraLibs externalDependency.hive
|
||||
extraLibs externalDependency.hive_exec
|
||||
extraLibs externalDependency.pig
|
||||
|
||||
compile project(":wherehows-common")
|
||||
@ -24,7 +24,7 @@ dependencies {
|
||||
compile externalDependency.avro
|
||||
compile externalDependency.avro_mapred
|
||||
compile externalDependency.joda
|
||||
compile externalDependency.hive
|
||||
compile externalDependency.hive_exec
|
||||
compile externalDependency.http_client
|
||||
compile externalDependency.http_core
|
||||
|
||||
|
@ -23,12 +23,11 @@ dependencies {
|
||||
compile externalDependency.slf4j_api
|
||||
compile externalDependency.slf4j_log4j
|
||||
compile externalDependency.hive_exec
|
||||
compile files("extralibs/terajdbc4-15.00.00.20.jar")
|
||||
compile files("extralibs/tdgssconfig-15.00.00.20.jar")
|
||||
compile externalDependency.jython
|
||||
compile externalDependency.mysql
|
||||
// compile files("extralibs/mysql-connector-java-5.1.36.jar")
|
||||
// compile files("extralibs/jython-standalone-2.7.0.jar")
|
||||
compile files("extralibs/mysql-connector-java-5.1.*.jar") // externalDependency.mysql
|
||||
compile files("extralibs/jython-standalone-2.7.0.jar") // externalDependency.jython
|
||||
compile fileTree(dir: 'extralibs', include: ['*.jar']) // externalDependency.oracle/teradata/gsp
|
||||
provided project(":hadoop-dataset-extractor-standalone")
|
||||
testCompile externalDependency.testng
|
||||
}
|
||||
|
@ -133,10 +133,12 @@ class HiveTransform:
|
||||
flds = {}
|
||||
field_detail_list = []
|
||||
|
||||
if TableInfo.schema_literal in table and table[TableInfo.schema_literal] .startswith('{'):
|
||||
if TableInfo.schema_literal in table and \
|
||||
table[TableInfo.schema_literal] is not None and \
|
||||
table[TableInfo.schema_literal].startswith('{'):
|
||||
sort_id = 0
|
||||
urn = "hive:///%s/%s" % (one_db_info['database'], table['name'])
|
||||
self.logger.info("Getting schema literal for: " % (urn))
|
||||
self.logger.info("Getting schema literal for: %s" % (urn))
|
||||
try:
|
||||
schema_data = json.loads(table[TableInfo.schema_literal])
|
||||
schema_json = schema_data
|
||||
@ -153,7 +155,7 @@ class HiveTransform:
|
||||
uri = "dalids:///%s/%s" % (one_db_info['database'], table['name'])
|
||||
else:
|
||||
uri = "hive:///%s/%s" % (one_db_info['database'], table['name'])
|
||||
self.logger.info("Getting column definition for: " % (uri))
|
||||
self.logger.info("Getting column definition for: %s" % (uri))
|
||||
hcp = HiveColumnParser(table, urn = uri)
|
||||
schema_json = {'fields' : hcp.column_type_dict['fields'], 'type' : 'record', 'name' : table['name'], 'uri' : uri}
|
||||
field_detail_list += hcp.column_type_list
|
||||
|
@ -2,12 +2,15 @@ apply plugin: 'java'
|
||||
apply plugin: 'scala'
|
||||
apply plugin: 'idea'
|
||||
|
||||
def findPlay20(){
|
||||
project.ext.playHome = "${System.env.PLAY_HOME}"
|
||||
def findPlayHome(){
|
||||
project.ext.playHome = System.getenv()['PLAY_HOME']
|
||||
if (null == project.ext.playHome) {
|
||||
throw new GradleException('PLAY_HOME env variable not set!')
|
||||
}
|
||||
project.ext.playExec = "${playHome}/play"
|
||||
}
|
||||
|
||||
findPlay20()
|
||||
findPlayHome()
|
||||
|
||||
repositories{
|
||||
mavenCentral()
|
||||
|
Loading…
x
Reference in New Issue
Block a user