当前位置: 首页>>代码示例>>Java>>正文


Java DistCpConstants类代码示例

本文整理汇总了Java中org.apache.hadoop.tools.DistCpConstants的典型用法代码示例。如果您正苦于以下问题:Java DistCpConstants类的具体用法?Java DistCpConstants怎么用?Java DistCpConstants使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


DistCpConstants类属于org.apache.hadoop.tools包,在下文中一共展示了DistCpConstants类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getTmpFile

import org.apache.hadoop.tools.DistCpConstants; //导入依赖的package包/类
private Path getTmpFile(Path target, Mapper.Context context) {
  Path targetWorkPath = new Path(context.getConfiguration().
      get(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH));

  Path root = target.equals(targetWorkPath)? targetWorkPath.getParent() : targetWorkPath;
  LOG.info("Creating temp file: " +
      new Path(root, ".distcp.tmp." + context.getTaskAttemptID().toString()));
  return new Path(root, ".distcp.tmp." + context.getTaskAttemptID().toString());
}
 
开发者ID:naver,项目名称:hadoop,代码行数:10,代码来源:RetriableFileCopyCommand.java

示例2: assertDefaultValues

import org.apache.hadoop.tools.DistCpConstants; //导入依赖的package包/类
private void assertDefaultValues(DistCpOptions distCpOptions) {
  assertThat(distCpOptions, is(not(nullValue())));
  assertThat(distCpOptions.preserveAttributes().hasNext(), is(false));
  assertThat(distCpOptions.shouldPreserveRawXattrs(), is(false));
  assertThat(distCpOptions.shouldAppend(), is(false));
  assertThat(distCpOptions.shouldAtomicCommit(), is(false));
  assertThat(distCpOptions.getAtomicWorkPath(), is(nullValue()));
  assertThat(distCpOptions.shouldBlock(), is(true));
  assertThat(distCpOptions.getCopyStrategy(), is(DistCpConstants.UNIFORMSIZE));
  assertThat(distCpOptions.shouldDeleteMissing(), is(false));
  assertThat(distCpOptions.shouldIgnoreFailures(), is(false));
  assertThat(distCpOptions.getLogPath(), is(nullValue()));
  assertThat(distCpOptions.getMapBandwidth(), is(DistCpConstants.DEFAULT_BANDWIDTH_MB));
  assertThat(distCpOptions.getMaxMaps(), is(DistCpConstants.DEFAULT_MAPS));
  assertThat(distCpOptions.shouldOverwrite(), is(false));
  assertThat(distCpOptions.shouldSkipCRC(), is(false));
  assertThat(distCpOptions.getSslConfigurationFile(), is(nullValue()));
  assertThat(distCpOptions.shouldSyncFolder(), is(false));
  assertThat(distCpOptions.getTargetPathExists(), is(true));
}
 
开发者ID:HotelsDotCom,项目名称:circus-train,代码行数:21,代码来源:DistCpOptionsParserTest.java

示例3: setup

import org.apache.hadoop.tools.DistCpConstants; //导入依赖的package包/类
/**
 * Implementation of the Mapper::setup() method. This extracts the DistCp-
 * options specified in the Job's configuration, to set up the Job.
 * @param context Mapper's context.
 * @throws IOException On IO failure.
 * @throws InterruptedException If the job is interrupted.
 */
@Override
public void setup(Context context) throws IOException, InterruptedException {
  conf = context.getConfiguration();

  syncFolders = conf.getBoolean(DistCpOptionSwitch.SYNC_FOLDERS.getConfigLabel(), false);
  ignoreFailures = conf.getBoolean(DistCpOptionSwitch.IGNORE_FAILURES.getConfigLabel(), false);
  skipCrc = conf.getBoolean(DistCpOptionSwitch.SKIP_CRC.getConfigLabel(), false);
  overWrite = conf.getBoolean(DistCpOptionSwitch.OVERWRITE.getConfigLabel(), false);
  append = conf.getBoolean(DistCpOptionSwitch.APPEND.getConfigLabel(), false);
  preserve = DistCpUtils.unpackAttributes(conf.get(DistCpOptionSwitch.
      PRESERVE_STATUS.getConfigLabel()));

  targetWorkPath = new Path(conf.get(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH));
  Path targetFinalPath = new Path(conf.get(
          DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH));
  targetFS = targetFinalPath.getFileSystem(conf);

  if (targetFS.exists(targetFinalPath) && targetFS.isFile(targetFinalPath)) {
    overWrite = true; // When target is an existing file, overwrite it.
  }

  if (conf.get(DistCpConstants.CONF_LABEL_SSL_CONF) != null) {
    initializeSSLConf(context);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:33,代码来源:CopyMapper.java

示例4: getConfiguration

import org.apache.hadoop.tools.DistCpConstants; //导入依赖的package包/类
private static Configuration getConfiguration() throws IOException {
  Configuration configuration = getConfigurationForCluster();
  final FileSystem fs = cluster.getFileSystem();
  Path workPath = new Path(TARGET_PATH)
          .makeQualified(fs.getUri(), fs.getWorkingDirectory());
  configuration.set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH,
          workPath.toString());
  configuration.set(DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH,
          workPath.toString());
  configuration.setBoolean(DistCpOptionSwitch.OVERWRITE.getConfigLabel(),
          false);
  configuration.setBoolean(DistCpOptionSwitch.SKIP_CRC.getConfigLabel(),
          false);
  configuration.setBoolean(DistCpOptionSwitch.SYNC_FOLDERS.getConfigLabel(),
          true);
  configuration.set(DistCpOptionSwitch.PRESERVE_STATUS.getConfigLabel(),
          "br");
  return configuration;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:TestCopyMapper.java

示例5: testSetCommitDirectory

import org.apache.hadoop.tools.DistCpConstants; //导入依赖的package包/类
@Test
public void testSetCommitDirectory() {
  try {
    Job job = Job.getInstance(new Configuration());
    Assert.assertEquals(null, CopyOutputFormat.getCommitDirectory(job));

    job.getConfiguration().set(DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH, "");
    Assert.assertEquals(null, CopyOutputFormat.getCommitDirectory(job));

    Path directory = new Path("/tmp/test");
    CopyOutputFormat.setCommitDirectory(job, directory);
    Assert.assertEquals(directory, CopyOutputFormat.getCommitDirectory(job));
    Assert.assertEquals(directory.toString(), job.getConfiguration().
        get(DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH));
  } catch (IOException e) {
    LOG.error("Exception encountered while running test", e);
    Assert.fail("Failed while testing for set Commit Directory");
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:TestCopyOutputFormat.java

示例6: testSetWorkingDirectory

import org.apache.hadoop.tools.DistCpConstants; //导入依赖的package包/类
@Test
public void testSetWorkingDirectory() {
  try {
    Job job = Job.getInstance(new Configuration());
    Assert.assertEquals(null, CopyOutputFormat.getWorkingDirectory(job));

    job.getConfiguration().set(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH, "");
    Assert.assertEquals(null, CopyOutputFormat.getWorkingDirectory(job));

    Path directory = new Path("/tmp/test");
    CopyOutputFormat.setWorkingDirectory(job, directory);
    Assert.assertEquals(directory, CopyOutputFormat.getWorkingDirectory(job));
    Assert.assertEquals(directory.toString(), job.getConfiguration().
        get(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH));
  } catch (IOException e) {
    LOG.error("Exception encountered while running test", e);
    Assert.fail("Failed while testing for set Working Directory");
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:20,代码来源:TestCopyOutputFormat.java

示例7: testDynamicInputChunkContext

import org.apache.hadoop.tools.DistCpConstants; //导入依赖的package包/类
@Test
public void testDynamicInputChunkContext() throws IOException {
  Configuration configuration = new Configuration();
  configuration.set(DistCpConstants.CONF_LABEL_LISTING_FILE_PATH,
      "/tmp/test/file1.seq");
  DynamicInputFormat firstInputFormat = new DynamicInputFormat();
  DynamicInputFormat secondInputFormat = new DynamicInputFormat();
  DynamicInputChunkContext firstContext =
      firstInputFormat.getChunkContext(configuration);
  DynamicInputChunkContext secondContext =
      firstInputFormat.getChunkContext(configuration);
  DynamicInputChunkContext thirdContext =
      secondInputFormat.getChunkContext(configuration);
  DynamicInputChunkContext fourthContext =
      secondInputFormat.getChunkContext(configuration);
  Assert.assertTrue("Chunk contexts from the same DynamicInputFormat " +
      "object should be the same.",firstContext.equals(secondContext));
  Assert.assertTrue("Chunk contexts from the same DynamicInputFormat " +
      "object should be the same.",thirdContext.equals(fourthContext));
  Assert.assertTrue("Contexts from different DynamicInputFormat " +
      "objects should be different.",!firstContext.equals(thirdContext));
}
 
开发者ID:aliyun-beta,项目名称:aliyun-oss-hadoop-fs,代码行数:23,代码来源:TestDynamicInputFormat.java

示例8: setup

import org.apache.hadoop.tools.DistCpConstants; //导入依赖的package包/类
/**
 * Implementation of the Mapper::setup() method. This extracts the DistCp-
 * options specified in the Job's configuration, to set up the Job.
 * @param context Mapper's context.
 * @throws IOException On IO failure.
 * @throws InterruptedException If the job is interrupted.
 */
@Override
public void setup(Context context) throws IOException, InterruptedException {
  conf = context.getConfiguration();

  syncFolders = conf.getBoolean(DistCpOptionSwitch.SYNC_FOLDERS.getConfigLabel(), false);
  ignoreFailures = conf.getBoolean(DistCpOptionSwitch.IGNORE_FAILURES.getConfigLabel(), false);
  skipCrc = conf.getBoolean(DistCpOptionSwitch.SKIP_CRC.getConfigLabel(), false);
  overWrite = conf.getBoolean(DistCpOptionSwitch.OVERWRITE.getConfigLabel(), false);
  preserve = DistCpUtils.unpackAttributes(conf.get(DistCpOptionSwitch.
      PRESERVE_STATUS.getConfigLabel()));

  targetWorkPath = new Path(conf.get(DistCpConstants.CONF_LABEL_TARGET_WORK_PATH));
  Path targetFinalPath = new Path(conf.get(
          DistCpConstants.CONF_LABEL_TARGET_FINAL_PATH));
  targetFS = targetFinalPath.getFileSystem(conf);

  if (targetFS.exists(targetFinalPath) && targetFS.isFile(targetFinalPath)) {
    overWrite = true; // When target is an existing file, overwrite it.
  }

  if (conf.get(DistCpConstants.CONF_LABEL_SSL_CONF) != null) {
    initializeSSLConf(context);
  }
}
 
开发者ID:ict-carch,项目名称:hadoop-plus,代码行数:32,代码来源:CopyMapper.java

示例9: getInputStream

import org.apache.hadoop.tools.DistCpConstants; //导入依赖的package包/类
private static ThrottledInputStream getInputStream(Path path,
    Configuration conf) throws IOException {
  try {
    FileSystem fs = path.getFileSystem(conf);
    long bandwidthMB = conf.getInt(DistCpConstants.CONF_LABEL_BANDWIDTH_MB,
            DistCpConstants.DEFAULT_BANDWIDTH_MB);
    FSDataInputStream in = fs.open(path);
    return new ThrottledInputStream(in, bandwidthMB * 1024 * 1024);
  }
  catch (IOException e) {
    throw new CopyReadException(e);
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:14,代码来源:RetriableFileCopyCommand.java


注:本文中的org.apache.hadoop.tools.DistCpConstants类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。