當前位置: 首頁>>代碼示例>>Java>>正文


Java CodecMap類代碼示例

本文整理匯總了Java中org.apache.sqoop.io.CodecMap的典型用法代碼示例。如果您正苦於以下問題:Java CodecMap類的具體用法?Java CodecMap怎麽用?Java CodecMap使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


CodecMap類屬於org.apache.sqoop.io包,在下文中一共展示了CodecMap類的4個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: testBlobCompressedAvroImportInline

import org.apache.sqoop.io.CodecMap; //導入依賴的package包/類
/**
 * Import blob data that is smaller than inline lob limit and compress with
 * deflate codec. Blob data should be encoded and saved as Avro bytes.
 * @throws IOException
 * @throws SQLException
 */
public void testBlobCompressedAvroImportInline()
    throws IOException, SQLException {
  String [] types = { getBlobType() };
  String expectedVal = "This is short BLOB data";
  String [] vals = { getBlobInsertStr(expectedVal) };

  createTableWithColTypes(types, vals);

  runImport(getArgv("--compression-codec", CodecMap.DEFLATE));

  Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
  DataFileReader<GenericRecord> reader = read(outputFile);
  GenericRecord record = reader.next();

  // Verify that the data block of the Avro file is compressed with deflate
  // codec.
  assertEquals(CodecMap.DEFLATE,
      reader.getMetaString(DataFileConstants.CODEC));

  // Verify that all columns are imported correctly.
  ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
  String returnVal = new String(buf.array());

  assertEquals(getColName(0), expectedVal, returnVal);
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:32,代碼來源:LobAvroImportTestCase.java

示例2: testBlobCompressedAvroImportExternal

import org.apache.sqoop.io.CodecMap; //導入依賴的package包/類
/**
 * Import blob data that is larger than inline lob limit and compress with
 * deflate codec. The reference file should be encoded and saved as Avro
 * bytes. Blob data should be saved in LOB file format without compression.
 * @throws IOException
 * @throws SQLException
 */
public void testBlobCompressedAvroImportExternal()
    throws IOException, SQLException {
  String [] types = { getBlobType() };
  String data = "This is short BLOB data";
  String [] vals = { getBlobInsertStr(data) };

  createTableWithColTypes(types, vals);

  // Set inline lob limit to a small value so that blob data will be
  // written to an external file.
  runImport(getArgv(
      "--inline-lob-limit", "1", "--compression-codec", CodecMap.DEFLATE));

  Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
  DataFileReader<GenericRecord> reader = read(outputFile);
  GenericRecord record = reader.next();

  // Verify that the data block of the Avro file is compressed with deflate
  // codec.
  assertEquals(CodecMap.DEFLATE,
      reader.getMetaString(DataFileConstants.CODEC));

  // Verify that the reference file is written in Avro bytes.
  ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
  String returnVal = new String(buf.array());
  String expectedStart = "externalLob(lf,_lob/large_obj";
  String expectedEnd = getTableNum() + "_m_0000000.lob,68,"
    + data.length() + ")";

  assertNotNull(returnVal);
  assertTrue("ExpectedStart: " + expectedStart + ", value: " + returnVal, returnVal.startsWith(expectedStart));
  assertTrue("ExpectedEnd: " + expectedEnd + ", value: " + returnVal, returnVal.endsWith(expectedEnd));

  // Verify that blob data stored in the external lob file is correct.
  BlobRef br = BlobRef.parse(returnVal);
  Path lobFileDir = new Path(getWarehouseDir(), getTableName());
  InputStream in = br.getDataStream(getConf(), lobFileDir);

  byte [] bufArray = new byte[data.length()];
  int chars = in.read(bufArray);
  in.close();

  assertEquals(chars, data.length());

  returnVal = new String(bufArray);
  String expectedVal = data;

  assertEquals(getColName(0), returnVal, expectedVal);
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:57,代碼來源:LobAvroImportTestCase.java

示例3: testBlobCompressedAvroImportExternal

import org.apache.sqoop.io.CodecMap; //導入依賴的package包/類
/**
 * Import blob data that is larger than inline lob limit and compress with
 * deflate codec. The reference file should be encoded and saved as Avro
 * bytes. Blob data should be saved in LOB file format without compression.
 * @throws IOException
 * @throws SQLException
 */
public void testBlobCompressedAvroImportExternal()
    throws IOException, SQLException {
  String [] types = { getBlobType() };
  String data = "This is short BLOB data";
  String [] vals = { getBlobInsertStr(data) };

  createTableWithColTypes(types, vals);

  // Set inline lob limit to a small value so that blob data will be
  // written to an external file.
  runImport(getArgv(
      "--inline-lob-limit", "1", "--compression-codec", CodecMap.DEFLATE));

  Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
  DataFileReader<GenericRecord> reader = read(outputFile);
  GenericRecord record = reader.next();

  // Verify that the data block of the Avro file is compressed with deflate
  // codec.
  assertEquals(CodecMap.DEFLATE,
      reader.getMetaString(DataFileConstants.CODEC));

  // Verify that the reference file is written in Avro bytes.
  ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
  String returnVal = new String(buf.array());
  String expectedStart = "externalLob(lf,_lob/large_obj";
  String expectedEnd = getTableNum() + "_m_0000000.lob,68,"
    + data.length() + ")";

  assertTrue(returnVal.startsWith(expectedStart));
  assertTrue(returnVal.endsWith(expectedEnd));

  // Verify that blob data stored in the external lob file is correct.
  BlobRef br = BlobRef.parse(returnVal);
  Path lobFileDir = new Path(getWarehouseDir(), getTableName());
  InputStream in = br.getDataStream(getConf(), lobFileDir);

  byte [] bufArray = new byte[data.length()];
  int chars = in.read(bufArray);
  in.close();

  assertEquals(chars, data.length());

  returnVal = new String(bufArray);
  String expectedVal = data;

  assertEquals(getColName(0), returnVal, expectedVal);
}
 
開發者ID:unicredit,項目名稱:zSqoop,代碼行數:56,代碼來源:LobAvroImportTestCase.java

示例4: testBlobCompressedAvroImportExternal

import org.apache.sqoop.io.CodecMap; //導入依賴的package包/類
/**
 * Import blob data that is larger than inline lob limit and compress with
 * deflate codec. The reference file should be encoded and saved as Avro
 * bytes. Blob data should be saved in LOB file format without compression.
 * @throws IOException
 * @throws SQLException
 */
public void testBlobCompressedAvroImportExternal()
    throws IOException, SQLException {
  String [] types = { getBlobType() };
  String data = "This is short BLOB data";
  String [] vals = { getBlobInsertStr(data) };

  createTableWithColTypes(types, vals);

  // Set inline lob limit to a small value so that blob data will be
  // written to an external file.
  runImport(getArgv(
      "--inline-lob-limit", "1", "--compression-codec", CodecMap.DEFLATE));

  Path outputFile = new Path(getTablePath(), "part-m-00000.avro");
  DataFileReader<GenericRecord> reader = read(outputFile);
  GenericRecord record = reader.next();

  // Verify that the data block of the Avro file is compressed with deflate
  // codec.
  assertEquals(CodecMap.DEFLATE,
      reader.getMetaString(DataFileConstants.CODEC));

  // Verify that the reference file is written in Avro bytes.
  ByteBuffer buf = (ByteBuffer) record.get(getColName(0));
  String returnVal = new String(buf.array());
  String expectedVal = "externalLob(lf,_lob/large_obj_task_local_000"
      + getTableNum() + "_m_0000000.lob,68," + data.length() + ")";

  assertEquals(expectedVal, returnVal);

  // Verify that blob data stored in the external lob file is correct.
  BlobRef br = BlobRef.parse(returnVal);
  Path lobFileDir = new Path(getWarehouseDir(), getTableName());
  InputStream in = br.getDataStream(getConf(), lobFileDir);

  byte [] bufArray = new byte[data.length()];
  int chars = in.read(bufArray);
  in.close();

  assertEquals(chars, data.length());

  returnVal = new String(bufArray);
  expectedVal = data;

  assertEquals(getColName(0), returnVal, expectedVal);
}
 
開發者ID:infinidb,項目名稱:sqoop,代碼行數:54,代碼來源:LobAvroImportTestCase.java


注:本文中的org.apache.sqoop.io.CodecMap類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。