Remove deprecated code (#666)

This commit is contained in:
Mars Lan 2017-08-11 16:45:50 -07:00 committed by GitHub
parent c34193371b
commit 0637578aa6
21 changed files with 67 additions and 209 deletions

View File

@ -45,30 +45,6 @@ public abstract class EtlJob {
// default location of local test configuration file // default location of local test configuration file
private final static String DEFAULT_CONFIG_FILE_LOCATION = System.getProperty("user.home") + "/.wherehows/local_test.properties"; private final static String DEFAULT_CONFIG_FILE_LOCATION = System.getProperty("user.home") + "/.wherehows/local_test.properties";
/**
* Constructor for using config file
* @param appId
* @param whExecId generated by backend service
*/
@Deprecated
public EtlJob(Integer appId, Integer dbId, long whExecId) {
this(appId, dbId, whExecId, DEFAULT_CONFIG_FILE_LOCATION);
}
/**
* Private constructor for using config file
* @param appId
* @param dbId
* @param whExecId
* @param configFile
*/
@Deprecated
public EtlJob(Integer appId, Integer dbId, long whExecId, String configFile) {
PySystemState sys = configFromFile(appId, dbId, whExecId, configFile);
addJythonToPath(sys);
interpreter = new PythonInterpreter(null, sys);
}
/** /**
* Used by backend service * Used by backend service
* @param appId nullable * @param appId nullable
@ -94,39 +70,6 @@ public abstract class EtlJob {
} }
} }
@Deprecated
private PySystemState configFromFile(Integer appId, Integer dbId, long whExecId, String configFile) {
prop = new Properties();
if (appId != null) {
prop.setProperty(Constant.APP_ID_KEY, String.valueOf(appId));
}
if (dbId != null) {
prop.setProperty(Constant.DB_ID_KEY, String.valueOf(dbId));
}
prop.setProperty(Constant.WH_EXEC_ID_KEY, String.valueOf(whExecId));
try {
InputStream propFile = new FileInputStream(configFile);
prop.load(propFile);
propFile.close();
} catch (IOException e) {
logger.error("property file '{}' not found", configFile);
e.printStackTrace();
}
PyDictionary config = new PyDictionary();
for (String key : prop.stringPropertyNames()) {
String value = prop.getProperty(key);
config.put(new PyString(key), new PyString(value));
}
PySystemState sys = new PySystemState();
sys.argv.append(config);
return sys;
}
/** /**
* Copy all properties into jython envirenment * Copy all properties into jython envirenment
* @param appId * @param appId

View File

@ -45,16 +45,6 @@ import wherehows.common.Constant;
*/ */
public class HdfsMetadataEtl extends EtlJob { public class HdfsMetadataEtl extends EtlJob {
/**
* Constructor used in test
* @param dbId
* @param whExecId
*/
@Deprecated
public HdfsMetadataEtl(Integer dbId, Long whExecId) {
super(null, dbId, whExecId);
}
/** /**
* Copy the jar to remote gateway, run the collecting job on remote, copy back the result. * Copy the jar to remote gateway, run the collecting job on remote, copy back the result.
* @param dbId the database need to collect * @param dbId the database need to collect

View File

@ -24,16 +24,10 @@ import wherehows.common.Constant;
*/ */
public class HiveMetadataEtl extends EtlJob { public class HiveMetadataEtl extends EtlJob {
@Deprecated
public HiveMetadataEtl(int dbId, long whExecId) {
super(null, dbId, whExecId);
}
public HiveMetadataEtl(int dbId, long whExecId, Properties prop) { public HiveMetadataEtl(int dbId, long whExecId, Properties prop) {
super(null, dbId, whExecId, prop); super(null, dbId, whExecId, prop);
} }
@Override @Override
public void extract() public void extract()
throws Exception { throws Exception {

View File

@ -36,10 +36,6 @@ public class GitMetadataEtl extends EtlJob {
protected final Logger logger = LoggerFactory.getLogger(getClass()); protected final Logger logger = LoggerFactory.getLogger(getClass());
public static final String COMMIT_OUTPUT_FILE = "commit.csv"; public static final String COMMIT_OUTPUT_FILE = "commit.csv";
public GitMetadataEtl(int appId, long whExecId) {
super(appId, null, whExecId);
}
public GitMetadataEtl(int appId, long whExecId, Properties prop) { public GitMetadataEtl(int appId, long whExecId, Properties prop) {
super(appId, null, whExecId, prop); super(appId, null, whExecId, prop);
} }

View File

@ -31,20 +31,6 @@ public class AzLineageMetadataEtl extends EtlJob {
public Long endTimeStamp = null; public Long endTimeStamp = null;
Connection conn; Connection conn;
/**
* Assume that all job running on one Azkaban instance will running on the same Hadoop instance
* @param azkabanInstanceId
*/
public AzLineageMetadataEtl(int azkabanInstanceId) {
super(azkabanInstanceId, null, 0L);
// for default
try {
setUp();
} catch (SQLException e) {
e.printStackTrace();
}
}
/** /**
* Used by backend server * Used by backend server
* @param appId The application id for the target azkaban server * @param appId The application id for the target azkaban server

View File

@ -26,15 +26,6 @@ public class AppworxLineageEtl extends EtlJob {
Connection conn; Connection conn;
public AppworxLineageEtl(int appId, long whExecId) {
super(appId, null, whExecId);
try {
setUp();
} catch (SQLException e) {
e.printStackTrace();
}
}
public AppworxLineageEtl(int appId, long whExecId, Properties properties) { public AppworxLineageEtl(int appId, long whExecId, Properties properties) {
super(appId, null, whExecId, properties); super(appId, null, whExecId, properties);
try { try {

View File

@ -29,10 +29,6 @@ import wherehows.common.Constant;
public class DatasetDescriptionEtl extends EtlJob { public class DatasetDescriptionEtl extends EtlJob {
@Deprecated
public DatasetDescriptionEtl(int dbId, long whExecId) {
super(null, dbId, whExecId);
}
public DatasetDescriptionEtl(int dbId, long whExecId, Properties prop) { public DatasetDescriptionEtl(int dbId, long whExecId, Properties prop) {
super(null, dbId, whExecId, prop); super(null, dbId, whExecId, prop);

View File

@ -27,10 +27,6 @@ public class DaliViewOwnerEtl extends EtlJob {
public ClassLoader classLoader = getClass().getClassLoader(); public ClassLoader classLoader = getClass().getClassLoader();
protected final Logger logger = LoggerFactory.getLogger(getClass()); protected final Logger logger = LoggerFactory.getLogger(getClass());
public DaliViewOwnerEtl(int dbId, long whExecId) {
super(null, dbId, whExecId);
}
public DaliViewOwnerEtl(int appId, long whExecId, Properties prop) { public DaliViewOwnerEtl(int appId, long whExecId, Properties prop) {
super(appId, null, whExecId, prop); super(appId, null, whExecId, prop);
} }

View File

@ -39,10 +39,6 @@ import wherehows.common.Constant;
* This DatasetOwnerEtl job is extract ownership info from the hive table, transform and store into WhereHows database. * This DatasetOwnerEtl job is extract ownership info from the hive table, transform and store into WhereHows database.
*/ */
public class DatasetOwnerEtl extends EtlJob { public class DatasetOwnerEtl extends EtlJob {
@Deprecated
public DatasetOwnerEtl(int dbId, long whExecId) {
super(null, dbId, whExecId);
}
public DatasetOwnerEtl(int dbId, long whExecId, Properties prop) { public DatasetOwnerEtl(int dbId, long whExecId, Properties prop) {
super(null, dbId, whExecId, prop); super(null, dbId, whExecId, prop);

View File

@ -29,10 +29,6 @@ import wherehows.common.Constant;
public class DatasetConfidentialFieldEtl extends EtlJob { public class DatasetConfidentialFieldEtl extends EtlJob {
@Deprecated
public DatasetConfidentialFieldEtl(int dbId, long whExecId) {
super(null, dbId, whExecId);
}
public DatasetConfidentialFieldEtl(int dbId, long whExecId, Properties prop) { public DatasetConfidentialFieldEtl(int dbId, long whExecId, Properties prop) {
super(null, dbId, whExecId, prop); super(null, dbId, whExecId, prop);

View File

@ -20,10 +20,6 @@ import metadata.etl.EtlJob;
public class DatasetTreeBuildETL extends EtlJob { public class DatasetTreeBuildETL extends EtlJob {
public DatasetTreeBuildETL(int appId, long whExecId) {
super(appId, null, whExecId);
}
public DatasetTreeBuildETL(int appId, long whExecId, Properties properties) { public DatasetTreeBuildETL(int appId, long whExecId, Properties properties) {
super(appId, null, whExecId, properties); super(appId, null, whExecId, properties);
} }

View File

@ -13,6 +13,7 @@
*/ */
package metadata.etl.dataset.hdfs; package metadata.etl.dataset.hdfs;
import java.util.Properties;
import org.testng.annotations.BeforeTest; import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -24,34 +25,29 @@ public class HdfsMetadataEtlTest {
HdfsMetadataEtl ds; HdfsMetadataEtl ds;
@BeforeTest @BeforeTest
public void setUp() public void setUp() throws Exception {
throws Exception { ds = new HdfsMetadataEtl(2, 0L, new Properties());
ds = new HdfsMetadataEtl(2, 0L);
} }
@Test(groups = {"needConfig"}) @Test(groups = {"needConfig"})
public void testRun() public void testRun() throws Exception {
throws Exception {
ds.run(); ds.run();
} }
@Test(groups = {"needConfig"}) @Test(groups = {"needConfig"})
public void testExtract() public void testExtract() throws Exception {
throws Exception {
ds.extract(); ds.extract();
//TODO check it copy back the files //TODO check it copy back the files
} }
@Test(groups = {"needConfig"}) @Test(groups = {"needConfig"})
public void testTransform() public void testTransform() throws Exception {
throws Exception {
ds.transform(); ds.transform();
//TODO check it generate the final csv file //TODO check it generate the final csv file
} }
@Test(groups = {"needConfig"}) @Test(groups = {"needConfig"})
public void testLoad() public void testLoad() throws Exception {
throws Exception {
ds.load(); ds.load();
} }
} }

View File

@ -13,6 +13,7 @@
*/ */
package metadata.etl.dataset.hive; package metadata.etl.dataset.hive;
import java.util.Properties;
import org.testng.annotations.BeforeTest; import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -24,38 +25,32 @@ public class HiveTest {
HiveMetadataEtl hm; HiveMetadataEtl hm;
@BeforeTest @BeforeTest
public void setUp() public void setUp() throws Exception {
throws Exception { hm = new HiveMetadataEtl(3, 0L, new Properties());
hm = new HiveMetadataEtl(3, 0L);
} }
@Test @Test
public void extractTest() public void extractTest() throws Exception {
throws Exception {
hm.extract(); hm.extract();
// check the json file // check the json file
} }
@Test @Test
public void transformTest() public void transformTest() throws Exception {
throws Exception {
hm.transform(); hm.transform();
// check the csv file // check the csv file
} }
@Test @Test
public void loadTest() public void loadTest() throws Exception {
throws Exception {
hm.load(); hm.load();
// check in database // check in database
} }
@Test @Test
public void runTest() public void runTest() throws Exception {
throws Exception {
extractTest(); extractTest();
transformTest(); transformTest();
loadTest(); loadTest();
} }
} }

View File

@ -13,6 +13,7 @@
*/ */
package metadata.etl.git; package metadata.etl.git;
import java.util.Properties;
import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -24,32 +25,27 @@ public class GitMetadataEtlTest {
GitMetadataEtl git; GitMetadataEtl git;
@BeforeMethod @BeforeMethod
public void setUp() public void setUp() throws Exception {
throws Exception { this.git = new GitMetadataEtl(500, 0L, new Properties());
this.git = new GitMetadataEtl(500, 0L);
} }
@Test @Test
public void testExtract() public void testExtract() throws Exception {
throws Exception {
git.extract(); git.extract();
} }
@Test @Test
public void testTransform() public void testTransform() throws Exception {
throws Exception {
git.transform(); git.transform();
} }
@Test @Test
public void testLoad() public void testLoad() throws Exception {
throws Exception {
git.load(); git.load();
} }
@Test @Test
public void testRun() public void testRun() throws Exception {
throws Exception {
git.run(); git.run();
} }
} }

View File

@ -28,7 +28,7 @@ public class LineageTest {
AzLineageMetadataEtl lm; AzLineageMetadataEtl lm;
public LineageTest() { public LineageTest() {
lm = new AzLineageMetadataEtl(31); lm = new AzLineageMetadataEtl(31, 0L, properties);
properties = lm.prop; properties = lm.prop;
} }

View File

@ -13,20 +13,19 @@
*/ */
package metadata.etl.lineage.appworx; package metadata.etl.lineage.appworx;
import java.util.Properties;
import org.testng.annotations.BeforeTest; import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test; import org.testng.annotations.Test;
public class AppworxLineageEtlTest{ public class AppworxLineageEtlTest {
AppworxLineageEtl awl; AppworxLineageEtl awl;
@BeforeTest @BeforeTest
public void setUp() public void setUp() throws Exception {
throws Exception { awl = new AppworxLineageEtl(3, 0L, new Properties());
awl = new AppworxLineageEtl(3, 0L);
} }
@Test(groups = {"needConfig"}) @Test(groups = {"needConfig"})
public void testExtract() throws Exception { public void testExtract() throws Exception {
awl.extract(); awl.extract();
@ -46,5 +45,4 @@ public class AppworxLineageEtlTest{
public void testRun() throws Exception { public void testRun() throws Exception {
awl.run(); awl.run();
} }
} }

View File

@ -13,6 +13,7 @@
*/ */
package metadata.etl.metadata; package metadata.etl.metadata;
import java.util.Properties;
import org.testng.annotations.BeforeTest; import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -22,9 +23,8 @@ public class DatasetDescriptionEtlTest {
DatasetDescriptionEtl datasetDescriptionEtl; DatasetDescriptionEtl datasetDescriptionEtl;
@BeforeTest @BeforeTest
public void setUp() public void setUp() throws Exception {
throws Exception { datasetDescriptionEtl = new DatasetDescriptionEtl(50, 0L, new Properties());
datasetDescriptionEtl = new DatasetDescriptionEtl(50, 0L);
} }
/* /*

View File

@ -13,6 +13,7 @@
*/ */
package metadata.etl.ownership; package metadata.etl.ownership;
import java.util.Properties;
import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -25,32 +26,27 @@ public class DaliViewOwnerEtlTest {
DaliViewOwnerEtl dali; DaliViewOwnerEtl dali;
@BeforeMethod @BeforeMethod
public void setUp() public void setUp() throws Exception {
throws Exception { dali = new DaliViewOwnerEtl(60, 0L, new Properties());
dali = new DaliViewOwnerEtl(60, 0L);
} }
@Test @Test
public void testExtract() public void testExtract() throws Exception {
throws Exception {
dali.extract(); dali.extract();
} }
@Test @Test
public void testTransform() public void testTransform() throws Exception {
throws Exception {
dali.transform(); dali.transform();
} }
@Test @Test
public void testLoad() public void testLoad() throws Exception {
throws Exception {
dali.load(); dali.load();
} }
@Test @Test
public void testRun() public void testRun() throws Exception {
throws Exception {
dali.run(); dali.run();
} }
} }

View File

@ -13,6 +13,7 @@
*/ */
package metadata.etl.ownership; package metadata.etl.ownership;
import java.util.Properties;
import org.testng.annotations.BeforeTest; import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -25,14 +26,12 @@ public class DatasetOwnerEtlTest {
DatasetOwnerEtl doe; DatasetOwnerEtl doe;
@BeforeTest @BeforeTest
public void setUp() public void setUp() throws Exception {
throws Exception { doe = new DatasetOwnerEtl(21, 0L, new Properties());
doe = new DatasetOwnerEtl(21, 0L);
} }
@Test(groups = {"needConfig"}) @Test(groups = {"needConfig"})
public void testExtract() public void testExtract() throws Exception {
throws Exception {
doe.extract(); doe.extract();
} }

View File

@ -13,6 +13,7 @@
*/ */
package metadata.etl.security; package metadata.etl.security;
import java.util.Properties;
import org.testng.annotations.BeforeTest; import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test; import org.testng.annotations.Test;
@ -22,14 +23,12 @@ public class DatasetConfidentialFieldEtlTest {
DatasetConfidentialFieldEtl confidentialFieldEtl; DatasetConfidentialFieldEtl confidentialFieldEtl;
@BeforeTest @BeforeTest
public void setUp() public void setUp() throws Exception {
throws Exception { confidentialFieldEtl = new DatasetConfidentialFieldEtl(49, 0L, new Properties());
confidentialFieldEtl = new DatasetConfidentialFieldEtl(49, 0L);
} }
@Test(groups = {"needConfig"}) @Test(groups = {"needConfig"})
public void testExtract() public void testExtract() throws Exception {
throws Exception {
confidentialFieldEtl.extract(); confidentialFieldEtl.extract();
} }

View File

@ -13,18 +13,17 @@
*/ */
package metadata.etl.treebuilder; package metadata.etl.treebuilder;
import java.util.Properties;
import org.testng.annotations.BeforeTest; import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test; import org.testng.annotations.Test;
public class DatasetTreeBuildETLTest {
public class DatasetTreeBuildETLTest{
DatasetTreeBuildETL datasetTreeBuildETL; DatasetTreeBuildETL datasetTreeBuildETL;
@BeforeTest @BeforeTest
public void setUp() public void setUp() throws Exception {
throws Exception { datasetTreeBuildETL = new DatasetTreeBuildETL(40, 0L, new Properties());
datasetTreeBuildETL = new DatasetTreeBuildETL(40, 0L);
} }
@Test(groups = {"needConfig"}) @Test(groups = {"needConfig"})