本文整理匯總了Java中org.apache.sqoop.io.CodecMap類的典型用法代碼示例。如果您正苦於以下問題:Java CodecMap類的具體用法?Java CodecMap怎麽用?Java CodecMap使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
CodecMap類屬於org.apache.sqoop.io包,在下文中一共展示了CodecMap類的4個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: testBlobCompressedAvroImportInline
import org.apache.sqoop.io.CodecMap; //導入依賴的package包/類
/**
* Import blob data that is smaller than inline lob limit and compress with
* deflate codec. Blob data should be encoded and saved as Avro bytes.
* @throws IOException
* @throws SQLException
*/
public void testBlobCompressedAvroImportInline()
throws IOException, SQLException {
String [] types = { getBlobType() };
String expectedVal = "This is short BLOB data";
String [] vals = { getBlobInsertStr(expectedVal) };
createTableWithColTypes(types, vals);
runImport(getArgv("--compression-codec", CodecMap.DEFLATE));
Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
DataFileReader<GenericRecord> reader = read(outputFile);
GenericRecord record = reader.next();
// Verify that the data block of the Avro file is compressed with deflate
// codec.
assertEquals(CodecMap.DEFLATE,
reader.getMetaString(DataFileConstants.CODEC));
// Verify that all columns are imported correctly.
ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
String returnVal = new String(buf.array());
assertEquals(getColName(0), expectedVal, returnVal);
}
示例2: testBlobCompressedAvroImportExternal
import org.apache.sqoop.io.CodecMap; //導入依賴的package包/類
/**
* Import blob data that is larger than inline lob limit and compress with
* deflate codec. The reference file should be encoded and saved as Avro
* bytes. Blob data should be saved in LOB file format without compression.
* @throws IOException
* @throws SQLException
*/
public void testBlobCompressedAvroImportExternal()
throws IOException, SQLException {
String [] types = { getBlobType() };
String data = "This is short BLOB data";
String [] vals = { getBlobInsertStr(data) };
createTableWithColTypes(types, vals);
// Set inline lob limit to a small value so that blob data will be
// written to an external file.
runImport(getArgv(
"--inline-lob-limit", "1", "--compression-codec", CodecMap.DEFLATE));
Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
DataFileReader<GenericRecord> reader = read(outputFile);
GenericRecord record = reader.next();
// Verify that the data block of the Avro file is compressed with deflate
// codec.
assertEquals(CodecMap.DEFLATE,
reader.getMetaString(DataFileConstants.CODEC));
// Verify that the reference file is written in Avro bytes.
ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
String returnVal = new String(buf.array());
String expectedStart = "externalLob(lf,_lob/large_obj";
String expectedEnd = getTableNum() + "_m_0000000.lob,68,"
+ data.length() + ")";
assertNotNull(returnVal);
assertTrue("ExpectedStart: " + expectedStart + ", value: " + returnVal, returnVal.startsWith(expectedStart));
assertTrue("ExpectedEnd: " + expectedEnd + ", value: " + returnVal, returnVal.endsWith(expectedEnd));
// Verify that blob data stored in the external lob file is correct.
BlobRef br = BlobRef.parse(returnVal);
Path lobFileDir = new Path(getWarehouseDir(), getTableName());
InputStream in = br.getDataStream(getConf(), lobFileDir);
byte [] bufArray = new byte[data.length()];
int chars = in.read(bufArray);
in.close();
assertEquals(chars, data.length());
returnVal = new String(bufArray);
String expectedVal = data;
assertEquals(getColName(0), returnVal, expectedVal);
}
示例3: testBlobCompressedAvroImportExternal
import org.apache.sqoop.io.CodecMap; //導入依賴的package包/類
/**
* Import blob data that is larger than inline lob limit and compress with
* deflate codec. The reference file should be encoded and saved as Avro
* bytes. Blob data should be saved in LOB file format without compression.
* @throws IOException
* @throws SQLException
*/
public void testBlobCompressedAvroImportExternal()
throws IOException, SQLException {
String [] types = { getBlobType() };
String data = "This is short BLOB data";
String [] vals = { getBlobInsertStr(data) };
createTableWithColTypes(types, vals);
// Set inline lob limit to a small value so that blob data will be
// written to an external file.
runImport(getArgv(
"--inline-lob-limit", "1", "--compression-codec", CodecMap.DEFLATE));
Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
DataFileReader<GenericRecord> reader = read(outputFile);
GenericRecord record = reader.next();
// Verify that the data block of the Avro file is compressed with deflate
// codec.
assertEquals(CodecMap.DEFLATE,
reader.getMetaString(DataFileConstants.CODEC));
// Verify that the reference file is written in Avro bytes.
ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
String returnVal = new String(buf.array());
String expectedStart = "externalLob(lf,_lob/large_obj";
String expectedEnd = getTableNum() + "_m_0000000.lob,68,"
+ data.length() + ")";
assertTrue(returnVal.startsWith(expectedStart));
assertTrue(returnVal.endsWith(expectedEnd));
// Verify that blob data stored in the external lob file is correct.
BlobRef br = BlobRef.parse(returnVal);
Path lobFileDir = new Path(getWarehouseDir(), getTableName());
InputStream in = br.getDataStream(getConf(), lobFileDir);
byte [] bufArray = new byte[data.length()];
int chars = in.read(bufArray);
in.close();
assertEquals(chars, data.length());
returnVal = new String(bufArray);
String expectedVal = data;
assertEquals(getColName(0), returnVal, expectedVal);
}
示例4: testBlobCompressedAvroImportExternal
import org.apache.sqoop.io.CodecMap; //導入依賴的package包/類
/**
* Import blob data that is larger than inline lob limit and compress with
* deflate codec. The reference file should be encoded and saved as Avro
* bytes. Blob data should be saved in LOB file format without compression.
* @throws IOException
* @throws SQLException
*/
public void testBlobCompressedAvroImportExternal()
throws IOException, SQLException {
String [] types = { getBlobType() };
String data = "This is short BLOB data";
String [] vals = { getBlobInsertStr(data) };
createTableWithColTypes(types, vals);
// Set inline lob limit to a small value so that blob data will be
// written to an external file.
runImport(getArgv(
"--inline-lob-limit", "1", "--compression-codec", CodecMap.DEFLATE));
Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
DataFileReader<GenericRecord> reader = read(outputFile);
GenericRecord record = reader.next();
// Verify that the data block of the Avro file is compressed with deflate
// codec.
assertEquals(CodecMap.DEFLATE,
reader.getMetaString(DataFileConstants.CODEC));
// Verify that the reference file is written in Avro bytes.
ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
String returnVal = new String(buf.array());
String expectedVal = "externalLob(lf,_lob/large_obj_task_local_000"
+ getTableNum() + "_m_0000000.lob,68," + data.length() + ")";
assertEquals(expectedVal, returnVal);
// Verify that blob data stored in the external lob file is correct.
BlobRef br = BlobRef.parse(returnVal);
Path lobFileDir = new Path(getWarehouseDir(), getTableName());
InputStream in = br.getDataStream(getConf(), lobFileDir);
byte [] bufArray = new byte[data.length()];
int chars = in.read(bufArray);
in.close();
assertEquals(chars, data.length());
returnVal = new String(bufArray);
expectedVal = data;
assertEquals(getColName(0), returnVal, expectedVal);
}