Remove deprecated code (#666)

This commit is contained in:
Mars Lan 2017-08-11 16:45:50 -07:00 committed by GitHub
parent c34193371b
commit 0637578aa6
21 changed files with 67 additions and 209 deletions

View File

@ -45,30 +45,6 @@ public abstract class EtlJob {
// default location of local test configuration file
private final static String DEFAULT_CONFIG_FILE_LOCATION = System.getProperty("user.home") + "/.wherehows/local_test.properties";
/**
* Constructor for using config file
* @param appId
* @param whExecId generated by backend service
*/
@Deprecated
public EtlJob(Integer appId, Integer dbId, long whExecId) {
this(appId, dbId, whExecId, DEFAULT_CONFIG_FILE_LOCATION);
}
/**
* Private constructor for using config file
* @param appId
* @param dbId
* @param whExecId
* @param configFile
*/
@Deprecated
public EtlJob(Integer appId, Integer dbId, long whExecId, String configFile) {
PySystemState sys = configFromFile(appId, dbId, whExecId, configFile);
addJythonToPath(sys);
interpreter = new PythonInterpreter(null, sys);
}
/**
* Used by backend service
* @param appId nullable
@ -94,39 +70,6 @@ public abstract class EtlJob {
}
}
@Deprecated
private PySystemState configFromFile(Integer appId, Integer dbId, long whExecId, String configFile) {
prop = new Properties();
if (appId != null) {
prop.setProperty(Constant.APP_ID_KEY, String.valueOf(appId));
}
if (dbId != null) {
prop.setProperty(Constant.DB_ID_KEY, String.valueOf(dbId));
}
prop.setProperty(Constant.WH_EXEC_ID_KEY, String.valueOf(whExecId));
try {
InputStream propFile = new FileInputStream(configFile);
prop.load(propFile);
propFile.close();
} catch (IOException e) {
logger.error("property file '{}' not found", configFile);
e.printStackTrace();
}
PyDictionary config = new PyDictionary();
for (String key : prop.stringPropertyNames()) {
String value = prop.getProperty(key);
config.put(new PyString(key), new PyString(value));
}
PySystemState sys = new PySystemState();
sys.argv.append(config);
return sys;
}
/**
* Copy all properties into jython envirenment
* @param appId

View File

@ -45,16 +45,6 @@ import wherehows.common.Constant;
*/
public class HdfsMetadataEtl extends EtlJob {
/**
* Constructor used in test
* @param dbId
* @param whExecId
*/
@Deprecated
public HdfsMetadataEtl(Integer dbId, Long whExecId) {
super(null, dbId, whExecId);
}
/**
* Copy the jar to remote gateway, run the collecting job on remote, copy back the result.
* @param dbId the database need to collect

View File

@ -24,16 +24,10 @@ import wherehows.common.Constant;
*/
public class HiveMetadataEtl extends EtlJob {
@Deprecated
public HiveMetadataEtl(int dbId, long whExecId) {
super(null, dbId, whExecId);
}
public HiveMetadataEtl(int dbId, long whExecId, Properties prop) {
super(null, dbId, whExecId, prop);
}
@Override
public void extract()
throws Exception {

View File

@ -36,10 +36,6 @@ public class GitMetadataEtl extends EtlJob {
protected final Logger logger = LoggerFactory.getLogger(getClass());
public static final String COMMIT_OUTPUT_FILE = "commit.csv";
public GitMetadataEtl(int appId, long whExecId) {
super(appId, null, whExecId);
}
public GitMetadataEtl(int appId, long whExecId, Properties prop) {
super(appId, null, whExecId, prop);
}

View File

@ -31,20 +31,6 @@ public class AzLineageMetadataEtl extends EtlJob {
public Long endTimeStamp = null;
Connection conn;
/**
* Assume that all job running on one Azkaban instance will running on the same Hadoop instance
* @param azkabanInstanceId
*/
public AzLineageMetadataEtl(int azkabanInstanceId) {
super(azkabanInstanceId, null, 0L);
// for default
try {
setUp();
} catch (SQLException e) {
e.printStackTrace();
}
}
/**
* Used by backend server
* @param appId The application id for the target azkaban server

View File

@ -26,15 +26,6 @@ public class AppworxLineageEtl extends EtlJob {
Connection conn;
public AppworxLineageEtl(int appId, long whExecId) {
super(appId, null, whExecId);
try {
setUp();
} catch (SQLException e) {
e.printStackTrace();
}
}
public AppworxLineageEtl(int appId, long whExecId, Properties properties) {
super(appId, null, whExecId, properties);
try {

View File

@ -29,10 +29,6 @@ import wherehows.common.Constant;
public class DatasetDescriptionEtl extends EtlJob {
@Deprecated
public DatasetDescriptionEtl(int dbId, long whExecId) {
super(null, dbId, whExecId);
}
public DatasetDescriptionEtl(int dbId, long whExecId, Properties prop) {
super(null, dbId, whExecId, prop);

View File

@ -27,10 +27,6 @@ public class DaliViewOwnerEtl extends EtlJob {
public ClassLoader classLoader = getClass().getClassLoader();
protected final Logger logger = LoggerFactory.getLogger(getClass());
public DaliViewOwnerEtl(int dbId, long whExecId) {
super(null, dbId, whExecId);
}
public DaliViewOwnerEtl(int appId, long whExecId, Properties prop) {
super(appId, null, whExecId, prop);
}

View File

@ -39,10 +39,6 @@ import wherehows.common.Constant;
* This DatasetOwnerEtl job is extract ownership info from the hive table, transform and store into WhereHows database.
*/
public class DatasetOwnerEtl extends EtlJob {
@Deprecated
public DatasetOwnerEtl(int dbId, long whExecId) {
super(null, dbId, whExecId);
}
public DatasetOwnerEtl(int dbId, long whExecId, Properties prop) {
super(null, dbId, whExecId, prop);

View File

@ -29,10 +29,6 @@ import wherehows.common.Constant;
public class DatasetConfidentialFieldEtl extends EtlJob {
@Deprecated
public DatasetConfidentialFieldEtl(int dbId, long whExecId) {
super(null, dbId, whExecId);
}
public DatasetConfidentialFieldEtl(int dbId, long whExecId, Properties prop) {
super(null, dbId, whExecId, prop);

View File

@ -20,10 +20,6 @@ import metadata.etl.EtlJob;
public class DatasetTreeBuildETL extends EtlJob {
public DatasetTreeBuildETL(int appId, long whExecId) {
super(appId, null, whExecId);
}
public DatasetTreeBuildETL(int appId, long whExecId, Properties properties) {
super(appId, null, whExecId, properties);
}

View File

@ -13,6 +13,7 @@
*/
package metadata.etl.dataset.hdfs;
import java.util.Properties;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
@ -24,34 +25,29 @@ public class HdfsMetadataEtlTest {
HdfsMetadataEtl ds;
@BeforeTest
public void setUp()
throws Exception {
ds = new HdfsMetadataEtl(2, 0L);
public void setUp() throws Exception {
ds = new HdfsMetadataEtl(2, 0L, new Properties());
}
@Test(groups = {"needConfig"})
public void testRun()
throws Exception {
public void testRun() throws Exception {
ds.run();
}
@Test(groups = {"needConfig"})
public void testExtract()
throws Exception {
public void testExtract() throws Exception {
ds.extract();
//TODO check it copy back the files
}
@Test(groups = {"needConfig"})
public void testTransform()
throws Exception {
public void testTransform() throws Exception {
ds.transform();
//TODO check it generate the final csv file
}
@Test(groups = {"needConfig"})
public void testLoad()
throws Exception {
public void testLoad() throws Exception {
ds.load();
}
}

View File

@ -13,6 +13,7 @@
*/
package metadata.etl.dataset.hive;
import java.util.Properties;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
@ -24,38 +25,32 @@ public class HiveTest {
HiveMetadataEtl hm;
@BeforeTest
public void setUp()
throws Exception {
hm = new HiveMetadataEtl(3, 0L);
public void setUp() throws Exception {
hm = new HiveMetadataEtl(3, 0L, new Properties());
}
@Test
public void extractTest()
throws Exception {
public void extractTest() throws Exception {
hm.extract();
// check the json file
}
@Test
public void transformTest()
throws Exception {
public void transformTest() throws Exception {
hm.transform();
// check the csv file
}
@Test
public void loadTest()
throws Exception {
public void loadTest() throws Exception {
hm.load();
// check in database
}
@Test
public void runTest()
throws Exception {
public void runTest() throws Exception {
extractTest();
transformTest();
loadTest();
}
}

View File

@ -13,6 +13,7 @@
*/
package metadata.etl.git;
import java.util.Properties;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@ -24,32 +25,27 @@ public class GitMetadataEtlTest {
GitMetadataEtl git;
@BeforeMethod
public void setUp()
throws Exception {
this.git = new GitMetadataEtl(500, 0L);
public void setUp() throws Exception {
this.git = new GitMetadataEtl(500, 0L, new Properties());
}
@Test
public void testExtract()
throws Exception {
public void testExtract() throws Exception {
git.extract();
}
@Test
public void testTransform()
throws Exception {
public void testTransform() throws Exception {
git.transform();
}
@Test
public void testLoad()
throws Exception {
public void testLoad() throws Exception {
git.load();
}
@Test
public void testRun()
throws Exception {
public void testRun() throws Exception {
git.run();
}
}

View File

@ -28,7 +28,7 @@ public class LineageTest {
AzLineageMetadataEtl lm;
public LineageTest() {
lm = new AzLineageMetadataEtl(31);
lm = new AzLineageMetadataEtl(31, 0L, properties);
properties = lm.prop;
}

View File

@ -13,38 +13,36 @@
*/
package metadata.etl.lineage.appworx;
import java.util.Properties;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
public class AppworxLineageEtlTest{
AppworxLineageEtl awl;
public class AppworxLineageEtlTest {
AppworxLineageEtl awl;
@BeforeTest
public void setUp()
throws Exception {
awl = new AppworxLineageEtl(3, 0L);
}
@BeforeTest
public void setUp() throws Exception {
awl = new AppworxLineageEtl(3, 0L, new Properties());
}
@Test(groups = {"needConfig"})
public void testExtract() throws Exception {
awl.extract();
}
@Test(groups = {"needConfig"})
public void testExtract() throws Exception {
awl.extract();
}
@Test(groups = {"needConfig"})
public void testTransform() throws Exception {
awl.transform();
}
@Test(groups = {"needConfig"})
public void testTransform() throws Exception {
awl.transform();
}
@Test(groups = {"needConfig"})
public void testLoad() throws Exception {
awl.load();
}
@Test(groups = {"needConfig"})
public void testRun() throws Exception {
awl.run();
}
@Test(groups = {"needConfig"})
public void testLoad() throws Exception {
awl.load();
}
@Test(groups = {"needConfig"})
public void testRun() throws Exception {
awl.run();
}
}

View File

@ -13,6 +13,7 @@
*/
package metadata.etl.metadata;
import java.util.Properties;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
@ -22,9 +23,8 @@ public class DatasetDescriptionEtlTest {
DatasetDescriptionEtl datasetDescriptionEtl;
@BeforeTest
public void setUp()
throws Exception {
datasetDescriptionEtl = new DatasetDescriptionEtl(50, 0L);
public void setUp() throws Exception {
datasetDescriptionEtl = new DatasetDescriptionEtl(50, 0L, new Properties());
}
/*

View File

@ -13,6 +13,7 @@
*/
package metadata.etl.ownership;
import java.util.Properties;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@ -25,32 +26,27 @@ public class DaliViewOwnerEtlTest {
DaliViewOwnerEtl dali;
@BeforeMethod
public void setUp()
throws Exception {
dali = new DaliViewOwnerEtl(60, 0L);
public void setUp() throws Exception {
dali = new DaliViewOwnerEtl(60, 0L, new Properties());
}
@Test
public void testExtract()
throws Exception {
public void testExtract() throws Exception {
dali.extract();
}
@Test
public void testTransform()
throws Exception {
public void testTransform() throws Exception {
dali.transform();
}
@Test
public void testLoad()
throws Exception {
public void testLoad() throws Exception {
dali.load();
}
@Test
public void testRun()
throws Exception {
public void testRun() throws Exception {
dali.run();
}
}

View File

@ -13,6 +13,7 @@
*/
package metadata.etl.ownership;
import java.util.Properties;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
@ -25,14 +26,12 @@ public class DatasetOwnerEtlTest {
DatasetOwnerEtl doe;
@BeforeTest
public void setUp()
throws Exception {
doe = new DatasetOwnerEtl(21, 0L);
public void setUp() throws Exception {
doe = new DatasetOwnerEtl(21, 0L, new Properties());
}
@Test(groups = {"needConfig"})
public void testExtract()
throws Exception {
public void testExtract() throws Exception {
doe.extract();
}

View File

@ -13,6 +13,7 @@
*/
package metadata.etl.security;
import java.util.Properties;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
@ -22,14 +23,12 @@ public class DatasetConfidentialFieldEtlTest {
DatasetConfidentialFieldEtl confidentialFieldEtl;
@BeforeTest
public void setUp()
throws Exception {
confidentialFieldEtl = new DatasetConfidentialFieldEtl(49, 0L);
public void setUp() throws Exception {
confidentialFieldEtl = new DatasetConfidentialFieldEtl(49, 0L, new Properties());
}
@Test(groups = {"needConfig"})
public void testExtract()
throws Exception {
public void testExtract() throws Exception {
confidentialFieldEtl.extract();
}

View File

@ -13,18 +13,17 @@
*/
package metadata.etl.treebuilder;
import java.util.Properties;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
public class DatasetTreeBuildETLTest{
public class DatasetTreeBuildETLTest {
DatasetTreeBuildETL datasetTreeBuildETL;
@BeforeTest
public void setUp()
throws Exception {
datasetTreeBuildETL = new DatasetTreeBuildETL(40, 0L);
public void setUp() throws Exception {
datasetTreeBuildETL = new DatasetTreeBuildETL(40, 0L, new Properties());
}
@Test(groups = {"needConfig"})