當前位置: 首頁>>代碼示例>>Java>>正文


Java StringUtils.join方法代碼示例

本文整理匯總了Java中org.apache.hadoop.util.StringUtils.join方法的典型用法代碼示例。如果您正苦於以下問題:Java StringUtils.join方法的具體用法?Java StringUtils.join怎麽用?Java StringUtils.join使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.util.StringUtils的用法示例。


在下文中一共展示了StringUtils.join方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: NameNodeNotinWhitelists

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
@Test
@TestException(exception = FileSystemAccessException.class, msgRegExp = "H05.*")
@TestDir
public void NameNodeNotinWhitelists() throws Exception {
  String dir = TestDirHelper.getTestDir().getAbsolutePath();
  String services = StringUtils.join(",",
    Arrays.asList(InstrumentationService.class.getName(),
                  SchedulerService.class.getName(),
                  FileSystemAccessService.class.getName()));
  Configuration conf = new Configuration(false);
  conf.set("server.services", services);
  conf.set("server.hadoop.name.node.whitelist", "NN");
  Server server = new Server("server", dir, dir, dir, dir, conf);
  server.init();
  FileSystemAccessService fsAccess = (FileSystemAccessService) server.get(FileSystemAccess.class);
  fsAccess.validateNamenode("NNx");
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:18,代碼來源:TestFileSystemAccessService.java

示例2: createTaskAttemptUnsuccessfulCompletionEvent

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
private static
    TaskAttemptUnsuccessfulCompletionEvent
    createTaskAttemptUnsuccessfulCompletionEvent(TaskAttemptImpl taskAttempt,
        TaskAttemptStateInternal attemptState) {
  TaskAttemptUnsuccessfulCompletionEvent tauce =
      new TaskAttemptUnsuccessfulCompletionEvent(
          TypeConverter.fromYarn(taskAttempt.attemptId),
          TypeConverter.fromYarn(taskAttempt.attemptId.getTaskId()
              .getTaskType()), attemptState.toString(),
          taskAttempt.finishTime,
          taskAttempt.container == null ? "UNKNOWN"
              : taskAttempt.container.getNodeId().getHost(),
          taskAttempt.container == null ? -1 
              : taskAttempt.container.getNodeId().getPort(),    
          taskAttempt.nodeRackName == null ? "UNKNOWN" 
              : taskAttempt.nodeRackName,
          StringUtils.join(
              LINE_SEPARATOR, taskAttempt.getDiagnostics()),
              taskAttempt.getCounters(), taskAttempt
              .getProgressSplitBlock().burst());
  return tauce;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:23,代碼來源:TaskAttemptImpl.java

示例3: testSetClasspathWithUserPrecendence

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
@Test (timeout = 120000)
public void testSetClasspathWithUserPrecendence() {
   Configuration conf = new Configuration();
   conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true);
   conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
   Map<String, String> env = new HashMap<String, String>();
   try {
     MRApps.setClasspath(env, conf);
   } catch (Exception e) {
     fail("Got exception while setting classpath");
   }
   String env_str = env.get("CLASSPATH");
   String expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR,
     Arrays.asList(ApplicationConstants.Environment.PWD.$$(), "job.jar/job.jar",
       "job.jar/classes/", "job.jar/lib/*",
       ApplicationConstants.Environment.PWD.$$() + "/*"));
   assertTrue("MAPREDUCE_JOB_USER_CLASSPATH_FIRST set, but not taking effect!",
     env_str.startsWith(expectedClasspath));
 }
 
開發者ID:naver,項目名稱:hadoop,代碼行數:20,代碼來源:TestMRApps.java

示例4: testSetClasspathWithNoUserPrecendence

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
@Test (timeout = 120000)
public void testSetClasspathWithNoUserPrecendence() {
  Configuration conf = new Configuration();
  conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true);
  conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, false);
  Map<String, String> env = new HashMap<String, String>();
  try {
    MRApps.setClasspath(env, conf);
  } catch (Exception e) {
    fail("Got exception while setting classpath");
  }
  String env_str = env.get("CLASSPATH");
  String expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR,
    Arrays.asList("job.jar/job.jar", "job.jar/classes/", "job.jar/lib/*",
      ApplicationConstants.Environment.PWD.$$() + "/*"));
  assertTrue("MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, and job.jar is not in"
    + " the classpath!", env_str.contains(expectedClasspath));
  assertFalse("MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, but taking effect!",
    env_str.startsWith(expectedClasspath));
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:21,代碼來源:TestMRApps.java

示例5: testSetClasspathWithJobClassloader

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
@Test (timeout = 120000)
public void testSetClasspathWithJobClassloader() throws IOException {
  Configuration conf = new Configuration();
  conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true);
  conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true);
  Map<String, String> env = new HashMap<String, String>();
  MRApps.setClasspath(env, conf);
  String cp = env.get("CLASSPATH");
  String appCp = env.get("APP_CLASSPATH");
  assertFalse("MAPREDUCE_JOB_CLASSLOADER true, but job.jar is in the"
    + " classpath!", cp.contains("jar" + ApplicationConstants.CLASS_PATH_SEPARATOR + "job"));
  assertFalse("MAPREDUCE_JOB_CLASSLOADER true, but PWD is in the classpath!",
    cp.contains("PWD"));
  String expectedAppClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR,
    Arrays.asList(ApplicationConstants.Environment.PWD.$$(), "job.jar/job.jar",
      "job.jar/classes/", "job.jar/lib/*",
      ApplicationConstants.Environment.PWD.$$() + "/*"));
  assertEquals("MAPREDUCE_JOB_CLASSLOADER true, but job.jar is not in the app"
    + " classpath!", expectedAppClasspath, appCp);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:21,代碼來源:TestMRApps.java

示例6: createShuffleHandlerFiles

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
private static void createShuffleHandlerFiles(File logDir, String user,
    String appId, String appAttemptId, Configuration conf,
    List<File> fileMap) throws IOException {
  String attemptDir =
      StringUtils.join(Path.SEPARATOR,
          Arrays.asList(new String[] { logDir.getAbsolutePath(),
              ContainerLocalizer.USERCACHE, user,
              ContainerLocalizer.APPCACHE, appId, "output", appAttemptId }));
  File appAttemptDir = new File(attemptDir);
  appAttemptDir.mkdirs();
  System.out.println(appAttemptDir.getAbsolutePath());
  File indexFile = new File(appAttemptDir, "file.out.index");
  fileMap.add(indexFile);
  createIndexFile(indexFile, conf);
  File mapOutputFile = new File(appAttemptDir, "file.out");
  fileMap.add(mapOutputFile);
  createMapOutputFile(mapOutputFile, conf);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:19,代碼來源:TestShuffleHandler.java

示例7: concat

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
/**
 * Concat existing files together.
 * @param f the path to the target destination.
 * @param psrcs the paths to the sources to use for the concatenation.
 *
 * @throws IOException
 */
@Override
public void concat(Path f, Path[] psrcs) throws IOException {
  List<String> strPaths = new ArrayList<String>(psrcs.length);
  for(Path psrc : psrcs) {
    strPaths.add(psrc.toUri().getPath());
  }
  String srcs = StringUtils.join(",", strPaths);

  Map<String, String> params = new HashMap<String, String>();
  params.put(OP_PARAM, Operation.CONCAT.toString());
  params.put(SOURCES_PARAM, srcs);
  HttpURLConnection conn = getConnection(Operation.CONCAT.getMethod(),
      params, f, true);
  HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:23,代碼來源:HttpFSFileSystem.java

示例8: noKerberosKeytabProperty

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
@Test
@TestException(exception = ServiceException.class, msgRegExp = "H01.*")
@TestDir
public void noKerberosKeytabProperty() throws Exception {
  String dir = TestDirHelper.getTestDir().getAbsolutePath();
  String services = StringUtils.join(",",
  Arrays.asList(InstrumentationService.class.getName(),
                SchedulerService.class.getName(),
                FileSystemAccessService.class.getName()));
  Configuration conf = new Configuration(false);
  conf.set("server.services", services);
  conf.set("server.hadoop.authentication.type", "kerberos");
  conf.set("server.hadoop.authentication.kerberos.keytab", " ");
  Server server = new Server("server", dir, dir, dir, dir, conf);
  server.init();
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:17,代碼來源:TestFileSystemAccessService.java

示例9: noKerberosPrincipalProperty

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
@Test
@TestException(exception = ServiceException.class, msgRegExp = "H01.*")
@TestDir
public void noKerberosPrincipalProperty() throws Exception {
  String dir = TestDirHelper.getTestDir().getAbsolutePath();
  String services = StringUtils.join(",",
    Arrays.asList(InstrumentationService.class.getName(),
                  SchedulerService.class.getName(),
                  FileSystemAccessService.class.getName()));
  Configuration conf = new Configuration(false);
  conf.set("server.services", services);
  conf.set("server.hadoop.authentication.type", "kerberos");
  conf.set("server.hadoop.authentication.kerberos.keytab", "/tmp/foo");
  conf.set("server.hadoop.authentication.kerberos.principal", " ");
  Server server = new Server("server", dir, dir, dir, dir, conf);
  server.init();
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:18,代碼來源:TestFileSystemAccessService.java

示例10: kerberosInitializationFailure

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
@Test
@TestException(exception = ServiceException.class, msgRegExp = "H02.*")
@TestDir
public void kerberosInitializationFailure() throws Exception {
  String dir = TestDirHelper.getTestDir().getAbsolutePath();
  String services = StringUtils.join(",",
    Arrays.asList(InstrumentationService.class.getName(),
                  SchedulerService.class.getName(),
                  FileSystemAccessService.class.getName()));
  Configuration conf = new Configuration(false);
  conf.set("server.services", services);
  conf.set("server.hadoop.authentication.type", "kerberos");
  conf.set("server.hadoop.authentication.kerberos.keytab", "/tmp/foo");
  conf.set("server.hadoop.authentication.kerberos.principal", "[email protected]");
  Server server = new Server("server", dir, dir, dir, dir, conf);
  server.init();
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:18,代碼來源:TestFileSystemAccessService.java

示例11: launchHCatCli

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
public void launchHCatCli(String cmdLine)
  throws IOException {
  String tmpFileName = null;


  String tmpDir = System.getProperty("java.io.tmpdir");
  if (options != null) {
    tmpDir = options.getTempDir();
  }
  tmpFileName =
    new File(tmpDir, "hcat-script-"
      + System.currentTimeMillis()).getAbsolutePath();

  writeHCatScriptFile(tmpFileName, cmdLine);
  // Create the argv for the HCatalog Cli Driver.
  String[] argArray = new String[2];
  argArray[0] = "-f";
  argArray[1] = tmpFileName;
  String argLine = StringUtils.join(",", Arrays.asList(argArray));

  if (testMode) {
    LOG.debug("Executing HCatalog CLI in-process with " + argLine);
    executeHCatProgramInProcess(argArray);
  } else {
    LOG.info("Executing external HCatalog CLI process with args :" + argLine);
    executeExternalHCatProgram(Executor.getCurEnvpStrings(), argArray);
  }
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:29,代碼來源:SqoopHCatUtilities.java

示例12: sampling

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
@Test
@TestDir
@SuppressWarnings("unchecked")
public void sampling() throws Exception {
  String dir = TestDirHelper.getTestDir().getAbsolutePath();
  String services = StringUtils.join(",", Arrays.asList(InstrumentationService.class.getName(),
                                                        SchedulerService.class.getName()));
  Configuration conf = new Configuration(false);
  conf.set("server.services", services);
  Server server = new Server("server", dir, dir, dir, dir, conf);
  server.init();
  Instrumentation instrumentation = server.get(Instrumentation.class);

  final AtomicInteger count = new AtomicInteger();

  Instrumentation.Variable<Long> varToSample = new Instrumentation.Variable<Long>() {
    @Override
    public Long getValue() {
      return (long) count.incrementAndGet();
    }
  };
  instrumentation.addSampler("g", "s", 10, varToSample);

  sleep(2000);
  int i = count.get();
  assertTrue(i > 0);

  Map<String, Map<String, ?>> snapshot = instrumentation.getSnapshot();
  Map<String, Map<String, Object>> samplers = (Map<String, Map<String, Object>>) snapshot.get("samplers");
  InstrumentationService.Sampler sampler = (InstrumentationService.Sampler) samplers.get("g").get("s");
  assertTrue(sampler.getRate() > 0);

  server.destroy();
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:35,代碼來源:TestInstrumentationService.java

示例13: testSetClasspathWithFramework

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
@Test (timeout = 3000000)
public void testSetClasspathWithFramework() throws IOException {
  final String FRAMEWORK_NAME = "some-framework-name";
  final String FRAMEWORK_PATH = "some-framework-path#" + FRAMEWORK_NAME;
  Configuration conf = new Configuration();
  conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true);
  conf.set(MRJobConfig.MAPREDUCE_APPLICATION_FRAMEWORK_PATH, FRAMEWORK_PATH);
  Map<String, String> env = new HashMap<String, String>();
  try {
    MRApps.setClasspath(env, conf);
    fail("Failed to catch framework path set without classpath change");
  } catch (IllegalArgumentException e) {
    assertTrue("Unexpected IllegalArgumentException",
        e.getMessage().contains("Could not locate MapReduce framework name '"
            + FRAMEWORK_NAME + "'"));
  }

  env.clear();
  final String FRAMEWORK_CLASSPATH = FRAMEWORK_NAME + "/*.jar";
  conf.set(MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH, FRAMEWORK_CLASSPATH);
  MRApps.setClasspath(env, conf);
  final String stdClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR,
      Arrays.asList("job.jar/job.jar", "job.jar/classes/", "job.jar/lib/*",
          ApplicationConstants.Environment.PWD.$$() + "/*"));
  String expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR,
      Arrays.asList(ApplicationConstants.Environment.PWD.$$(),
          FRAMEWORK_CLASSPATH, stdClasspath));
  assertEquals("Incorrect classpath with framework and no user precedence",
      expectedClasspath, env.get("CLASSPATH"));

  env.clear();
  conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
  MRApps.setClasspath(env, conf);
  expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR,
      Arrays.asList(ApplicationConstants.Environment.PWD.$$(),
          stdClasspath, FRAMEWORK_CLASSPATH));
  assertEquals("Incorrect classpath with framework and user precedence",
      expectedClasspath, env.get("CLASSPATH"));
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:40,代碼來源:TestMRApps.java

示例14: simpleSecurity

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
@Test
@TestDir
public void simpleSecurity() throws Exception {
  String dir = TestDirHelper.getTestDir().getAbsolutePath();
  String services = StringUtils.join(",",
    Arrays.asList(InstrumentationService.class.getName(),
                  SchedulerService.class.getName(),
                  FileSystemAccessService.class.getName()));
  Configuration conf = new Configuration(false);
  conf.set("server.services", services);
  Server server = new Server("server", dir, dir, dir, dir, conf);
  server.init();
  Assert.assertNotNull(server.get(FileSystemAccess.class));
  server.destroy();
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:16,代碼來源:TestFileSystemAccessService.java

示例15: invalidSecurity

import org.apache.hadoop.util.StringUtils; //導入方法依賴的package包/類
@Test
@TestException(exception = ServiceException.class, msgRegExp = "H09.*")
@TestDir
public void invalidSecurity() throws Exception {
  String dir = TestDirHelper.getTestDir().getAbsolutePath();
  String services = StringUtils.join(",",
    Arrays.asList(InstrumentationService.class.getName(),
                  SchedulerService.class.getName(),
                  FileSystemAccessService.class.getName()));
  Configuration conf = new Configuration(false);
  conf.set("server.services", services);
  conf.set("server.hadoop.authentication.type", "foo");
  Server server = new Server("server", dir, dir, dir, dir, conf);
  server.init();
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:16,代碼來源:TestFileSystemAccessService.java


注:本文中的org.apache.hadoop.util.StringUtils.join方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。