当前位置: 首页>>代码示例>>Java>>正文


Java BaseSqoopTestCase类代码示例

本文整理汇总了Java中com.cloudera.sqoop.testutil.BaseSqoopTestCase的典型用法代码示例。如果您正苦于以下问题:Java BaseSqoopTestCase类的具体用法?Java BaseSqoopTestCase怎么用?Java BaseSqoopTestCase使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


BaseSqoopTestCase类属于com.cloudera.sqoop.testutil包,在下文中一共展示了BaseSqoopTestCase类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: createExportFile

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
protected void createExportFile(ColumnGenerator... extraCols)
  throws IOException {
  String ext = ".txt";

  Path tablePath = getTablePath();
  Path filePath = new Path(tablePath, "part0" + ext);

  Configuration conf = new Configuration();
  if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
    conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
  }
  FileSystem fs = FileSystem.get(conf);
  fs.mkdirs(tablePath);
  OutputStream os = fs.create(filePath);

  BufferedWriter w = new BufferedWriter(new OutputStreamWriter(os));
  for (int i = 0; i < 3; i++) {
    String line = getRecordLine(i, extraCols);
    w.write(line);
    LOG.debug("Create Export file - Writing line : " + line);
  }
  w.close();
  os.close();
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:25,代码来源:NetezzaExportManualTest.java

示例2: importData

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
private void importData(String targetDir, SqoopOptions.FileLayout fileLayout) {
  SqoopOptions options;
  options = getSqoopOptions(newConf());
  options.setTableName(TABLE_NAME);
  options.setNumMappers(1);
  options.setFileLayout(fileLayout);
  options.setDeleteMode(true);

  Path warehouse = new Path(BaseSqoopTestCase.LOCAL_WAREHOUSE_DIR);
  options.setTargetDir(new Path(warehouse, targetDir).toString());

  ImportTool importTool = new ImportTool();
  Sqoop importer = new Sqoop(importTool, options.getConf(), options);
  int ret = Sqoop.runSqoop(importer, new String[0]);
  if (0 != ret) {
    fail("Initial import failed with exit code " + ret);
  }
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:19,代码来源:TestMerge.java

示例3: createExportFile

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
protected void createExportFile(ColumnGenerator...extraCols)
  throws IOException, SQLException {
  String ext = ".txt";

  Path tablePath = getTablePath();
  Path filePath = new Path(tablePath, "part0" + ext);

  Configuration conf = new Configuration();
  if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
    conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
  }
  FileSystem fs = FileSystem.get(conf);
  fs.mkdirs(tablePath);
  OutputStream os = fs.create(filePath);


  BufferedWriter w = new BufferedWriter(new OutputStreamWriter(os));
  for (int i = 0; i < 3; i++) {
   String line = getRecordLine(i, extraCols);
   w.write(line);
   LOG.debug("Create Export file - Writing line : " + line);
  }
  w.close();
  os.close();
}
 
开发者ID:unicredit,项目名称:zSqoop,代码行数:26,代码来源:NetezzaExportManualTest.java

示例4: newConf

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
public Configuration newConf() {
  Configuration conf = new Configuration();
  if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
    conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
  }
  conf.set("mapred.job.tracker", "local");
  return conf;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:9,代码来源:TestMerge.java

示例5: clearDir

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
/**
 * Delete all files in a directory for a table.
 */
public void clearDir(String tableName) {
  try {
    FileSystem fs = FileSystem.getLocal(new Configuration());
    Path warehouse = new Path(BaseSqoopTestCase.LOCAL_WAREHOUSE_DIR);
    Path tableDir = new Path(warehouse, tableName);
    fs.delete(tableDir, true);
  } catch (Exception e) {
    fail("Got unexpected exception: " + StringUtils.stringifyException(e));
  }
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:14,代码来源:TestIncrementalImport.java

示例6: getArgListForTable

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
/**
 * Return a list of arguments to import the specified table.
 */
private List<String> getArgListForTable(String tableName, boolean commonArgs,
    boolean isAppend, boolean appendTimestamp) {
  List<String> args = new ArrayList<String>();
  if (commonArgs) {
    CommonArgs.addHadoopFlags(args);
  }
  args.add("--connect");
  args.add(SOURCE_DB_URL);
  args.add("--table");
  args.add(tableName);
  args.add("--warehouse-dir");
  args.add(BaseSqoopTestCase.LOCAL_WAREHOUSE_DIR);
  if (isAppend) {
    args.add("--incremental");
    args.add("append");
    if (!appendTimestamp) {
      args.add("--check-column");
      args.add("ID");
    } else {
      args.add("--check-column");
      args.add("LAST_MODIFIED");
    }
  } else {
    args.add("--incremental");
    args.add("lastmodified");
    args.add("--check-column");
    args.add("LAST_MODIFIED");
  }
  args.add("--columns");
  args.add("ID");
  args.add("-m");
  args.add("1");

  return args;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:39,代码来源:TestIncrementalImport.java

示例7: getArgListForQuery

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
/**
 * Return list of arguments to import by query.
 * @return
 */
private List<String> getArgListForQuery(String query, String directoryName,
  boolean commonArgs, boolean isAppend, boolean appendTimestamp) {
  List<String> args = new ArrayList<String>();
  if (commonArgs) {
    CommonArgs.addHadoopFlags(args);
  }
  args.add("--connect");
  args.add(SOURCE_DB_URL);
  args.add("--query");
  args.add(query);
  args.add("--class-name");
  args.add(directoryName);
  args.add("--target-dir");
  args.add(BaseSqoopTestCase.LOCAL_WAREHOUSE_DIR
    + System.getProperty("file.separator") + directoryName);
  if (isAppend) {
    args.add("--incremental");
    args.add("append");
    if (!appendTimestamp) {
      args.add("--check-column");
      args.add("ID");
    } else {
      args.add("--check-column");
      args.add("LAST_MODIFIED");
    }
  } else {
    args.add("--incremental");
    args.add("lastmodified");
    args.add("--check-column");
    args.add("LAST_MODIFIED");
  }
  args.add("-m");
  args.add("1");

  return args;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:41,代码来源:TestIncrementalImport.java

示例8: read

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
private DataFileReader<GenericRecord> read(Path filename) throws IOException {
  Configuration conf = new Configuration();
  if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
    conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
  }
  FsInput fsInput = new FsInput(filename, conf);
  DatumReader<GenericRecord> datumReader =
    new GenericDatumReader<GenericRecord>();
  return new DataFileReader<GenericRecord>(fsInput, datumReader);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:11,代码来源:TestAvroImport.java

示例9: createTextFile

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
/**
 * Create a data file that gets exported to the db.
 * @param fileNum the number of the file (for multi-file export)
 * @param numRecords how many records to write to the file.
 * @param gzip is true if the file should be gzipped.
 */
protected void createTextFile(int fileNum, int numRecords, boolean gzip,
    ColumnGenerator... extraCols) throws IOException {
  int startId = fileNum * numRecords;

  String ext = ".txt";
  if (gzip) {
    ext = ext + ".gz";
  }
  Path tablePath = getTablePath();
  Path filePath = new Path(tablePath, "part" + fileNum + ext);

  Configuration conf = new Configuration();
  if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
    conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
  }
  FileSystem fs = FileSystem.get(conf);
  fs.mkdirs(tablePath);
  OutputStream os = fs.create(filePath);
  if (gzip) {
    CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
    CompressionCodec codec = ccf.getCodec(filePath);
    os = codec.createOutputStream(os);
  }
  BufferedWriter w = new BufferedWriter(new OutputStreamWriter(os));
  for (int i = 0; i < numRecords; i++) {
    w.write(getRecordLine(startId + i, extraCols));
  }
  w.close();
  os.close();

  if (gzip) {
    verifyCompressedFile(filePath, numRecords);
  }
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:41,代码来源:TestExport.java

示例10: verifyCompressedFile

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
private void verifyCompressedFile(Path f, int expectedNumLines)
    throws IOException {
  Configuration conf = new Configuration();
  if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
    conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
  }
  FileSystem fs = FileSystem.get(conf);
  InputStream is = fs.open(f);
  CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
  CompressionCodec codec = ccf.getCodec(f);
  LOG.info("gzip check codec is " + codec);
  Decompressor decompressor = CodecPool.getDecompressor(codec);
  if (null == decompressor) {
    LOG.info("Verifying gzip sanity with null decompressor");
  } else {
    LOG.info("Verifying gzip sanity with decompressor: "
        + decompressor.toString());
  }
  is = codec.createInputStream(is, decompressor);
  BufferedReader r = new BufferedReader(new InputStreamReader(is));
  int numLines = 0;
  while (true) {
    String ln = r.readLine();
    if (ln == null) {
      break;
    }
    numLines++;
  }

  r.close();
  assertEquals("Did not read back correct number of lines",
      expectedNumLines, numLines);
  LOG.info("gzip sanity check returned " + numLines + " lines; ok.");
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:35,代码来源:TestExport.java

示例11: createSequenceFile

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
/**
 * Create a data file in SequenceFile format that gets exported to the db.
 * @param fileNum the number of the file (for multi-file export).
 * @param numRecords how many records to write to the file.
 * @param className the table class name to instantiate and populate
 *          for each record.
 */
private void createSequenceFile(int fileNum, int numRecords, String className)
    throws IOException {

  try {
    // Instantiate the value record object via reflection.
    Class cls = Class.forName(className, true,
        Thread.currentThread().getContextClassLoader());
    SqoopRecord record = (SqoopRecord) ReflectionUtils.newInstance(
        cls, new Configuration());

    // Create the SequenceFile.
    Configuration conf = new Configuration();
    if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
      conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
    }
    FileSystem fs = FileSystem.get(conf);
    Path tablePath = getTablePath();
    Path filePath = new Path(tablePath, "part" + fileNum);
    fs.mkdirs(tablePath);
    SequenceFile.Writer w = SequenceFile.createWriter(
        fs, conf, filePath, LongWritable.class, cls);

    // Now write the data.
    int startId = fileNum * numRecords;
    for (int i = 0; i < numRecords; i++) {
      record.parse(getRecordLine(startId + i));
      w.append(new LongWritable(startId + i), record);
    }

    w.close();
  } catch (ClassNotFoundException cnfe) {
    throw new IOException(cnfe);
  } catch (RecordParser.ParseError pe) {
    throw new IOException(pe);
  }
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:44,代码来源:TestExport.java

示例12: removeTablePath

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
/** Removing an existing table directory from the filesystem. */
private void removeTablePath() throws IOException {
  Configuration conf = new Configuration();
  if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
    conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
  }
  FileSystem fs = FileSystem.get(conf);
  fs.delete(getTablePath(), true);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:10,代码来源:TestExport.java

示例13: setUp

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
public void setUp() throws IOException, InterruptedException {
  conf = new Configuration();
  if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
    conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
  }
  String tmpDir = System.getProperty("test.build.data", "/tmp/");
  this.outDir = new Path(System.getProperty("java.io.tmpdir"));
  FileSystem fs = FileSystem.get(conf);
  if (fs.exists(outDir)) {
    fs.delete(outDir, true);
  }
  fs.mkdirs(outDir);

  loader = new LargeObjectLoader(conf, outDir);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:16,代码来源:TestLargeObjectLoader.java

示例14: createAvroFile

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
/**
 * Create a data file that gets exported to the db.
 * @param fileNum the number of the file (for multi-file export)
 * @param numRecords how many records to write to the file.
 */
protected void createAvroFile(int fileNum, int numRecords,
    ColumnGenerator... extraCols) throws IOException {

  Path tablePath = getTablePath();
  Path filePath = new Path(tablePath, "part" + fileNum);

  Configuration conf = new Configuration();
  if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
    conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
  }
  FileSystem fs = FileSystem.get(conf);
  fs.mkdirs(tablePath);
  OutputStream os = fs.create(filePath);

  Schema schema = buildAvroSchema(extraCols);
  DatumWriter<GenericRecord> datumWriter =
    new GenericDatumWriter<GenericRecord>();
  DataFileWriter<GenericRecord> dataFileWriter =
    new DataFileWriter<GenericRecord>(datumWriter);
  dataFileWriter.create(schema, os);

  for (int i = 0; i < numRecords; i++) {
    GenericRecord record = new GenericData.Record(schema);
    record.put("id", i);
    record.put("msg", getMsgPrefix() + i);
    addExtraColumns(record, i, extraCols);
    dataFileWriter.append(record);
  }

  dataFileWriter.close();
  os.close();
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:38,代码来源:TestAvroExport.java

示例15: initUtils

import com.cloudera.sqoop.testutil.BaseSqoopTestCase; //导入依赖的package包/类
public void initUtils() throws IOException, MetaException {
  if (initialized) {
    return;
  }
  conf = new Configuration();
  if (!BaseSqoopTestCase.isOnPhysicalCluster()) {
    conf.set(CommonArgs.FS_DEFAULT_NAME, CommonArgs.LOCAL_FS);
  }
  fs = FileSystem.get(conf);
  fs.initialize(fs.getWorkingDirectory().toUri(), conf);
  storageInfo = null;
  SqoopHCatUtilities.setTestMode(true);
}
 
开发者ID:unicredit,项目名称:zSqoop,代码行数:14,代码来源:HCatalogTestUtils.java


注:本文中的com.cloudera.sqoop.testutil.BaseSqoopTestCase类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。