本文整理汇总了Java中org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable类的典型用法代码示例。如果您正苦于以下问题:Java NullDBWritable类的具体用法?Java NullDBWritable怎么用?Java NullDBWritable使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
NullDBWritable类属于org.apache.hadoop.mapreduce.lib.db.DBInputFormat包,在下文中一共展示了NullDBWritable类的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testOracleDBRecordReader
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable; //导入依赖的package包/类
/**
* test generate sql script for OracleDBRecordReader.
*/
@Test(timeout = 20000)
public void testOracleDBRecordReader() throws Exception {
DBInputSplit splitter = new DBInputSplit(1, 10);
Configuration configuration = new Configuration();
Connection connect = DriverForTest.getConnection();
DBConfiguration dbConfiguration = new DBConfiguration(configuration);
dbConfiguration.setInputOrderBy("Order");
String[] fields = { "f1", "f2" };
OracleDBRecordReader<NullDBWritable> recorder = new OracleDBRecordReader<NullDBWritable>(
splitter, NullDBWritable.class, configuration, connect,
dbConfiguration, "condition", fields, "table");
assertEquals(
"SELECT * FROM (SELECT a.*,ROWNUM dbif_rno FROM ( SELECT f1, f2 FROM table WHERE condition ORDER BY Order ) a WHERE rownum <= 10 ) WHERE dbif_rno > 1",
recorder.getSelectQuery());
}
示例2: testOracleDBRecordReader
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable; //导入依赖的package包/类
/**
* test generate sql script for OracleDBRecordReader.
*/
@Test(timeout = 2000)
public void testOracleDBRecordReader() throws Exception {
DBInputSplit splitter = new DBInputSplit(1, 10);
Configuration configuration = new Configuration();
Connection connect = DriverForTest.getConnection();
DBConfiguration dbConfiguration = new DBConfiguration(configuration);
dbConfiguration.setInputOrderBy("Order");
String[] fields = { "f1", "f2" };
OracleDBRecordReader<NullDBWritable> recorder = new OracleDBRecordReader<NullDBWritable>(
splitter, NullDBWritable.class, configuration, connect,
dbConfiguration, "condition", fields, "table");
assertEquals(
"SELECT * FROM (SELECT a.*,ROWNUM dbif_rno FROM ( SELECT f1, f2 FROM table WHERE condition ORDER BY Order ) a WHERE rownum <= 10 ) WHERE dbif_rno > 1",
recorder.getSelectQuery());
}
示例3: testOracleDBRecordReader
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable; //导入依赖的package包/类
/**
* test generate sql script for OracleDBRecordReader.
*/
@Test(timeout = 2000)
public void testOracleDBRecordReader() throws Exception {
DBInputSplit splitter = new DBInputSplit(1, 10);
Configuration configuration = new Configuration();
Connection connect = DriverForTest.getConnection();
DBConfiguration dbConfiguration = new DBConfiguration(configuration);
dbConfiguration.setInputOrderBy("Order");
String[] fields = { "f1", "f2" };
OracleDBRecordReader<NullDBWritable> recorder = new OracleDBRecordReader<NullDBWritable>(
splitter, NullDBWritable.class, configuration, connect,
dbConfiguration, "condition", fields, "table");
assertEquals(
"SELECT * FROM (SELECT a.*,ROWNUM dbif_rno FROM ( SELECT f1, f2 FROM table WHERE condition ORDER BY Order ) a WHERE rownum <= 1 + 9 ) WHERE dbif_rno >= 1",
recorder.getSelectQuery());
}
示例4: testDataDrivenDBInputFormatSplitter
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable; //导入依赖的package包/类
/**
* test splitters from DataDrivenDBInputFormat. For different data types may
* be different splitter
*/
@Test(timeout = 10000)
public void testDataDrivenDBInputFormatSplitter() {
DataDrivenDBInputFormat<NullDBWritable> format = new DataDrivenDBInputFormat<NullDBWritable>();
testCommonSplitterTypes(format);
assertEquals(DateSplitter.class, format.getSplitter(Types.TIMESTAMP)
.getClass());
assertEquals(DateSplitter.class, format.getSplitter(Types.DATE).getClass());
assertEquals(DateSplitter.class, format.getSplitter(Types.TIME).getClass());
}
示例5: testDataDrivenDBInputFormat
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable; //导入依赖的package包/类
@Test(timeout = 10000)
public void testDataDrivenDBInputFormat() throws Exception {
JobContext jobContext = mock(JobContext.class);
Configuration configuration = new Configuration();
configuration.setInt(MRJobConfig.NUM_MAPS, 1);
when(jobContext.getConfiguration()).thenReturn(configuration);
DataDrivenDBInputFormat<NullDBWritable> format = new DataDrivenDBInputFormat<NullDBWritable>();
List<InputSplit> splits = format.getSplits(jobContext);
assertEquals(1, splits.size());
DataDrivenDBInputSplit split = (DataDrivenDBInputSplit) splits.get(0);
assertEquals("1=1", split.getLowerClause());
assertEquals("1=1", split.getUpperClause());
// 2
configuration.setInt(MRJobConfig.NUM_MAPS, 2);
DataDrivenDBInputFormat.setBoundingQuery(configuration, "query");
assertEquals("query",
configuration.get(DBConfiguration.INPUT_BOUNDING_QUERY));
Job job = mock(Job.class);
when(job.getConfiguration()).thenReturn(configuration);
DataDrivenDBInputFormat.setInput(job, NullDBWritable.class, "query",
"Bounding Query");
assertEquals("Bounding Query",
configuration.get(DBConfiguration.INPUT_BOUNDING_QUERY));
}
示例6: testOracleDataDrivenDBInputFormat
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable; //导入依赖的package包/类
@Test(timeout = 10000)
public void testOracleDataDrivenDBInputFormat() throws Exception {
OracleDataDrivenDBInputFormat<NullDBWritable> format =
new OracleDataDrivenDBInputFormatForTest();
testCommonSplitterTypes(format);
assertEquals(OracleDateSplitter.class, format.getSplitter(Types.TIMESTAMP)
.getClass());
assertEquals(OracleDateSplitter.class,
format.getSplitter(Types.DATE).getClass());
assertEquals(OracleDateSplitter.class,
format.getSplitter(Types.TIME).getClass());
}
示例7: testCommonSplitterTypes
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable; //导入依赖的package包/类
private void testCommonSplitterTypes(
DataDrivenDBInputFormat<NullDBWritable> format) {
assertEquals(BigDecimalSplitter.class, format.getSplitter(Types.DECIMAL)
.getClass());
assertEquals(BigDecimalSplitter.class, format.getSplitter(Types.NUMERIC)
.getClass());
assertEquals(BooleanSplitter.class, format.getSplitter(Types.BOOLEAN)
.getClass());
assertEquals(BooleanSplitter.class, format.getSplitter(Types.BIT)
.getClass());
assertEquals(IntegerSplitter.class, format.getSplitter(Types.BIGINT)
.getClass());
assertEquals(IntegerSplitter.class, format.getSplitter(Types.TINYINT)
.getClass());
assertEquals(IntegerSplitter.class, format.getSplitter(Types.SMALLINT)
.getClass());
assertEquals(IntegerSplitter.class, format.getSplitter(Types.INTEGER)
.getClass());
assertEquals(FloatSplitter.class, format.getSplitter(Types.DOUBLE)
.getClass());
assertEquals(FloatSplitter.class, format.getSplitter(Types.REAL).getClass());
assertEquals(FloatSplitter.class, format.getSplitter(Types.FLOAT)
.getClass());
assertEquals(TextSplitter.class, format.getSplitter(Types.LONGVARCHAR)
.getClass());
assertEquals(TextSplitter.class, format.getSplitter(Types.CHAR).getClass());
assertEquals(TextSplitter.class, format.getSplitter(Types.VARCHAR)
.getClass());
// if unknown data type splitter is null
assertNull(format.getSplitter(Types.BINARY));
}
示例8: testDataDrivenDBInputFormatSplitter
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable; //导入依赖的package包/类
/**
* test splitters from DataDrivenDBInputFormat. For different data types may
* be different splitter
*/
@Test(timeout = 1000)
public void testDataDrivenDBInputFormatSplitter() {
DataDrivenDBInputFormat<NullDBWritable> format = new DataDrivenDBInputFormat<NullDBWritable>();
testCommonSplitterTypes(format);
assertEquals(DateSplitter.class, format.getSplitter(Types.TIMESTAMP)
.getClass());
assertEquals(DateSplitter.class, format.getSplitter(Types.DATE).getClass());
assertEquals(DateSplitter.class, format.getSplitter(Types.TIME).getClass());
}
示例9: testDataDrivenDBInputFormat
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable; //导入依赖的package包/类
@Test(timeout = 1000)
public void testDataDrivenDBInputFormat() throws Exception {
JobContext jobContext = mock(JobContext.class);
Configuration configuration = new Configuration();
configuration.setInt(MRJobConfig.NUM_MAPS, 1);
when(jobContext.getConfiguration()).thenReturn(configuration);
DataDrivenDBInputFormat<NullDBWritable> format = new DataDrivenDBInputFormat<NullDBWritable>();
List<InputSplit> splits = format.getSplits(jobContext);
assertEquals(1, splits.size());
DataDrivenDBInputSplit split = (DataDrivenDBInputSplit) splits.get(0);
assertEquals("1=1", split.getLowerClause());
assertEquals("1=1", split.getUpperClause());
// 2
configuration.setInt(MRJobConfig.NUM_MAPS, 2);
DataDrivenDBInputFormat.setBoundingQuery(configuration, "query");
assertEquals("query",
configuration.get(DBConfiguration.INPUT_BOUNDING_QUERY));
Job job = mock(Job.class);
when(job.getConfiguration()).thenReturn(configuration);
DataDrivenDBInputFormat.setInput(job, NullDBWritable.class, "query",
"Bounding Query");
assertEquals("Bounding Query",
configuration.get(DBConfiguration.INPUT_BOUNDING_QUERY));
}
示例10: testOracleDataDrivenDBInputFormat
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable; //导入依赖的package包/类
@Test(timeout = 1000)
public void testOracleDataDrivenDBInputFormat() throws Exception {
OracleDataDrivenDBInputFormat<NullDBWritable> format =
new OracleDataDrivenDBInputFormatForTest();
testCommonSplitterTypes(format);
assertEquals(OracleDateSplitter.class, format.getSplitter(Types.TIMESTAMP)
.getClass());
assertEquals(OracleDateSplitter.class,
format.getSplitter(Types.DATE).getClass());
assertEquals(OracleDateSplitter.class,
format.getSplitter(Types.TIME).getClass());
}
示例11: getInputClass
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat.NullDBWritable; //导入依赖的package包/类
public Class<?> getInputClass() {
return conf.getClass(DBConfiguration.INPUT_CLASS_PROPERTY,
NullDBWritable.class);
}