本文整理汇总了Java中wherehows.common.LineageCombiner类的典型用法代码示例。如果您正苦于以下问题:Java LineageCombiner类的具体用法?Java LineageCombiner怎么用?Java LineageCombiner使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
LineageCombiner类属于wherehows.common包,在下文中一共展示了LineageCombiner类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: setUp
import wherehows.common.LineageCombiner; //导入依赖的package包/类
@BeforeTest
public void setUp()
throws SQLException, IOException {
InputStream inputStream = this.getClass().getResourceAsStream(TEST_PROP_FILE_NAME);
testProp = new Properties();
if (inputStream != null) {
testProp.load(inputStream);
} else {
throw new FileNotFoundException("Lack of configuration file for testing: " + TEST_PROP_FILE_NAME);
}
String wherehowsHost = testProp.getProperty("wherehows.db.jdbc.url");
String wherehowsUserName = testProp.getProperty("wherehows.db.username");
String wherehowsPassWord = testProp.getProperty("wherehows.db.password");
Connection conn = DriverManager
.getConnection(wherehowsHost + "?" + "user=" + wherehowsUserName + "&password=" + wherehowsPassWord);
lineageCombiner = new LineageCombiner(conn);
PathAnalyzer.initialize(conn);
}
示例2: setUp
import wherehows.common.LineageCombiner; //导入依赖的package包/类
@BeforeTest
public void setUp()
throws SQLException, IOException {
InputStream inputStream = this.getClass().getResourceAsStream(TEST_PROP_FILE_NAME);
testProp = new Properties();
if (inputStream != null) {
testProp.load(inputStream);
} else {
throw new FileNotFoundException("Lack of configuration file for testing: " + TEST_PROP_FILE_NAME);
}
String wherehowsUrl = testProp.getProperty("wherehows.db.jdbc.url");
String wherehowsUserName = testProp.getProperty("wherehows.db.username");
String wherehowsPassWord = testProp.getProperty("wherehows.db.password");
Connection conn = DriverManager.getConnection(wherehowsUrl, wherehowsUserName, wherehowsPassWord);
lineageCombiner = new LineageCombiner(conn);
PathAnalyzer.initialize(conn);
}
示例3: extractLineage
import wherehows.common.LineageCombiner; //导入依赖的package包/类
/**
* Get one azkaban job's lineage.
* Process :
* 1 get execution log from azkaban service
* 2 get hadoop job id from execution log
* 3 get input, output from execution log and hadoop conf, normalize the path
* 4 construct the Lineage Record
*
* @return one azkaban job's lineage
*/
public static List<LineageRecord> extractLineage(AzExecMessage message) throws Exception {
List<LineageRecord> oneAzkabanJobLineage = new ArrayList<>();
// azkaban job name should have subflow name append in front
String[] flowSequence = message.azkabanJobExecution.getFlowPath().split(":")[1].split("/");
String jobPrefix = "";
for (int i = 1; i < flowSequence.length; i++) {
jobPrefix += flowSequence[i] + ":";
}
//String log = asc.getExecLog(azJobExec.execId, azJobExec.jobName);
String execLogs = message.adc.getExecLog(message.azkabanJobExecution.getFlowExecId(),
jobPrefix + message.azkabanJobExecution.getJobName());
Set<String> hadoopJobIds = AzLogParser.getHadoopJobIdFromLog(execLogs);
for (String hadoopJobId : hadoopJobIds) {
log.debug("Get Hadoop job config: {} from Azkaban job: {}" + hadoopJobId, message.azkabanJobExecution.toString());
// TODO persist this mapping?
String confJson = message.hnne.getConfFromHadoop(hadoopJobId);
AzJsonAnalyzer ja = new AzJsonAnalyzer(confJson, message.azkabanJobExecution,
Integer.valueOf(message.prop.getProperty(Constant.AZ_DEFAULT_HADOOP_DATABASE_ID_KEY)));
List<LineageRecord> oneHadoopJobLineage = ja.extractFromJson();
oneAzkabanJobLineage.addAll(oneHadoopJobLineage);
}
// normalize and combine the path
LineageCombiner lineageCombiner = new LineageCombiner(message.connection);
lineageCombiner.addAll(oneAzkabanJobLineage);
Integer defaultDatabaseId = Integer.valueOf(message.prop.getProperty(Constant.AZ_DEFAULT_HADOOP_DATABASE_ID_KEY));
List<LineageRecord> lineageFromLog =
AzLogParser.getLineageFromLog(execLogs, message.azkabanJobExecution, defaultDatabaseId);
lineageCombiner.addAll(lineageFromLog);
return lineageCombiner.getCombinedLineage();
}
示例4: extractLineage
import wherehows.common.LineageCombiner; //导入依赖的package包/类
/**
* Get one azkaban job's lineage.
* Process :
* 1 get execution log from azkaban service
* 2 get hadoop job id from execution log
* 3 get input, output from execution log and hadoop conf, normalize the path
* 4 construct the Lineage Record
*
* @return one azkaban job's lineage
*/
public static List<LineageRecord> extractLineage(AzExecMessage message)
throws Exception {
List<LineageRecord> oneAzkabanJobLineage = new ArrayList<>();
// azkaban job name should have subflow name append in front
String []flowSequence = message.azkabanJobExecution.getFlowPath().split(":")[1].split("/");
logger.info("not here");
String jobPrefix = "";
for (int i = 0; i < flowSequence.length; i++) {
jobPrefix += flowSequence[i] + ":";
}
logger.info(jobPrefix);
//String log = asc.getExecLog(azJobExec.execId, azJobExec.jobName);
// String log =
// message.adc.getExecLog(message.azkabanJobExecution.getFlowExecId(), jobPrefix + message.azkabanJobExecution.getJobName());
String log = message.adc.getExecLog(message.azkabanJobExecution.getFlowExecId(), message.azkabanJobExecution.getJobName());
Set<String> hadoopJobIds = AzLogParser.getHadoopJobIdFromLog(log);
for (String hadoopJobId : hadoopJobIds) {
logger.debug("Get Hadoop job config: {} from Azkaban job: {}" + hadoopJobId, message.azkabanJobExecution.toString());
// TODO persist this mapping?
String confJson = message.hnne.getConfFromHadoop(hadoopJobId);
AzJsonAnalyzer ja = new AzJsonAnalyzer(confJson, message.azkabanJobExecution,
Integer.valueOf(message.prop.getProperty(Constant.AZ_DEFAULT_HADOOP_DATABASE_ID_KEY)));
List<LineageRecord> oneHadoopJobLineage = ja.extractFromJson();
oneAzkabanJobLineage.addAll(oneHadoopJobLineage);
}
logger.info("finish the part one");
// normalize and combine the path
LineageCombiner lineageCombiner = new LineageCombiner(message.connection);
lineageCombiner.addAll(oneAzkabanJobLineage);
Integer defaultDatabaseId = Integer.valueOf(message.prop.getProperty(Constant.AZ_DEFAULT_HADOOP_DATABASE_ID_KEY));
List<LineageRecord> lineageFromLog = AzLogParser.getLineageFromLog(log, message.azkabanJobExecution, defaultDatabaseId);
lineageCombiner.addAll(lineageFromLog);
logger.info("finish the part two");
return lineageCombiner.getCombinedLineage();
}