當前位置: 首頁>>代碼示例>>Java>>正文


Java StringUtils類代碼示例

本文整理匯總了Java中org.apache.hadoop.util.StringUtils的典型用法代碼示例。如果您正苦於以下問題:Java StringUtils類的具體用法?Java StringUtils怎麽用?Java StringUtils使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


StringUtils類屬於org.apache.hadoop.util包,在下文中一共展示了StringUtils類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: createTableFromSQL

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
public void createTableFromSQL(String sql) throws SQLException {
  Connection dbcon = this.getConnection();

  System.out.println("SQL : " + sql);
  this.dropTableIfExists("TPCH1M_LINEITEM");

  try {
    Statement st = dbcon.createStatement();
    int res = st.executeUpdate(sql);
    System.out.println("Result : " + res);

  } catch (SQLException e) {
    LOG.error("Got SQLException during creating table: " + StringUtils.stringifyException(e));
  }

}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:17,代碼來源:MSSQLTestUtils.java

示例2: recordFailure

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
private void recordFailure(final Mutation m, final long keyBase,
    final long start, IOException e) {
  failedKeySet.add(keyBase);
  String exceptionInfo;
  if (e instanceof RetriesExhaustedWithDetailsException) {
    RetriesExhaustedWithDetailsException aggEx = (RetriesExhaustedWithDetailsException) e;
    exceptionInfo = aggEx.getExhaustiveDescription();
  } else {
    StringWriter stackWriter = new StringWriter();
    PrintWriter pw = new PrintWriter(stackWriter);
    e.printStackTrace(pw);
    pw.flush();
    exceptionInfo = StringUtils.stringifyException(e);
  }
  LOG.error("Failed to mutate: " + keyBase + " after " + (System.currentTimeMillis() - start)
      + "ms; region information: " + getRegionDebugInfoSafe(table, m.getRow()) + "; errors: "
      + exceptionInfo);
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:19,代碼來源:MultiThreadedUpdaterWithACL.java

示例3: buildACL

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
/**
 * Build ACL from the given two Strings.
 * The Strings contain comma separated values.
 *
 * @param aclString build ACL from array of Strings
 */
private void buildACL(String[] userGroupStrings) {
  users = new HashSet<String>();
  groups = new HashSet<String>();
  for (String aclPart : userGroupStrings) {
    if (aclPart != null && isWildCardACLValue(aclPart)) {
      allAllowed = true;
      break;
    }
  }
  if (!allAllowed) {      
    if (userGroupStrings.length >= 1 && userGroupStrings[0] != null) {
      users = StringUtils.getTrimmedStringCollection(userGroupStrings[0]);
    } 
    
    if (userGroupStrings.length == 2 && userGroupStrings[1] != null) {
      groups = StringUtils.getTrimmedStringCollection(userGroupStrings[1]);
      groupsMapping.cacheGroupsAdd(new LinkedList<String>(groups));
    }
  }
}
 
開發者ID:nucypher,項目名稱:hadoop-oss,代碼行數:27,代碼來源:AccessControlList.java

示例4: run

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
@Override
/** {@inheritDoc} */
public int run(SqoopOptions options) {
  if (!init(options)) {
    return 1;
  }

  try {
    HiveImport hiveImport = new HiveImport(options, manager,
        options.getConf(), false);
    hiveImport.importTable(options.getTableName(),
        options.getHiveTableName(), true);
  } catch (IOException ioe) {
    LOG.error("Encountered IOException running create table job: "
        + StringUtils.stringifyException(ioe));
    if (System.getProperty(Sqoop.SQOOP_RETHROW_PROPERTY) != null) {
      throw new RuntimeException(ioe);
    } else {
      return 1;
    }
  } finally {
    destroy(options);
  }

  return 0;
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:27,代碼來源:CreateHiveTableTool.java

示例5: waitForServer

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
/**
 * Blocks the current thread until the server is shut down.
 */
public void waitForServer() {
  while (true) {
    int curState = server.getState();
    if (curState == ServerConstants.SERVER_STATE_SHUTDOWN) {
      LOG.info("Got shutdown notification");
      break;
    }

    try {
      Thread.sleep(100);
    } catch (InterruptedException ie) {
      LOG.info("Interrupted while blocking for server:"
          + StringUtils.stringifyException(ie));
    }
  }
}
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:20,代碼來源:HsqldbMetaStore.java

示例6: run

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
@Override
public int run(Configuration conf, List<String> args) throws IOException {
  String name = StringUtils.popFirstNonOption(args);
  if (name == null) {
    System.err.println("You must specify a name when deleting a " +
        "cache pool.");
    return 1;
  }
  if (!args.isEmpty()) {
    System.err.print("Can't understand arguments: " +
      Joiner.on(" ").join(args) + "\n");
    System.err.println("Usage is " + getShortUsage());
    return 1;
  }
  DistributedFileSystem dfs = AdminHelper.getDFS(conf);
  try {
    dfs.removeCachePool(name);
  } catch (IOException e) {
    System.err.println(AdminHelper.prettifyException(e));
    return 2;
  }
  System.out.println("Successfully removed cache pool " + name + ".");
  return 0;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:25,代碼來源:CacheAdmin.java

示例7: main

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
public static void main(String[] args) {
  Thread.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
  StringUtils.startupShutdownMessage(SharedCacheManager.class, args, LOG);
  try {
    Configuration conf = new YarnConfiguration();
    SharedCacheManager sharedCacheManager = new SharedCacheManager();
    ShutdownHookManager.get().addShutdownHook(
        new CompositeServiceShutdownHook(sharedCacheManager),
        SHUTDOWN_HOOK_PRIORITY);
    sharedCacheManager.init(conf);
    sharedCacheManager.start();
  } catch (Throwable t) {
    LOG.fatal("Error starting SharedCacheManager", t);
    System.exit(-1);
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:17,代碼來源:SharedCacheManager.java

示例8: serviceHadoopConf

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
@Test
@TestDir
public void serviceHadoopConf() throws Exception {
  String dir = TestDirHelper.getTestDir().getAbsolutePath();
  String services = StringUtils.join(",",
    Arrays.asList(InstrumentationService.class.getName(),
                  SchedulerService.class.getName(),
                  FileSystemAccessService.class.getName()));
  Configuration conf = new Configuration(false);
  conf.set("server.services", services);

  Server server = new Server("server", dir, dir, dir, dir, conf);
  server.init();
  FileSystemAccessService fsAccess = (FileSystemAccessService) server.get(FileSystemAccess.class);
  Assert.assertEquals(fsAccess.serviceHadoopConf.get("foo"), "FOO");
  server.destroy();
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:18,代碼來源:TestFileSystemAccessService.java

示例9: kerberosInitializationFailure

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
@Test
@TestException(exception = ServiceException.class, msgRegExp = "H02.*")
@TestDir
public void kerberosInitializationFailure() throws Exception {
  String dir = TestDirHelper.getTestDir().getAbsolutePath();
  String services = StringUtils.join(",",
    Arrays.asList(InstrumentationService.class.getName(),
                  SchedulerService.class.getName(),
                  FileSystemAccessService.class.getName()));
  Configuration conf = new Configuration(false);
  conf.set("server.services", services);
  conf.set("server.hadoop.authentication.type", "kerberos");
  conf.set("server.hadoop.authentication.kerberos.keytab", "/tmp/foo");
  conf.set("server.hadoop.authentication.kerberos.principal", "[email protected]");
  Server server = new Server("server", dir, dir, dir, dir, conf);
  server.init();
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:18,代碼來源:TestFileSystemAccessService.java

示例10: abort

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
/**
 * Abort the application and wait for it to finish.
 * @param t the exception that signalled the problem
 * @throws IOException A wrapper around the exception that was passed in
 */
void abort(Throwable t) throws IOException {
  LOG.info("Aborting because of " + StringUtils.stringifyException(t));
  try {
    downlink.abort();
    downlink.flush();
  } catch (IOException e) {
    // IGNORE cleanup problems
  }
  try {
    handler.waitForFinish();
  } catch (Throwable ignored) {
    process.destroy();
  }
  IOException wrapper = new IOException("pipe child exception");
  wrapper.initCause(t);
  throw wrapper;      
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:23,代碼來源:Application.java

示例11: getConnection

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
private Connection getConnection() {

    if (conn == null) {

      try {
        Connection con = DriverManager.getConnection(HOST_URL,
            DATABASE_USER, DATABASE_PASSWORD);
        conn = con;
        return con;
      } catch (SQLException e) {
        LOG.error("Get SQLException during setting up connection: " + StringUtils.stringifyException(e));
        return null;
      }
    }

    return conn;
  }
 
開發者ID:aliyun,項目名稱:aliyun-maxcompute-data-collectors,代碼行數:18,代碼來源:MSSQLTestUtils.java

示例12: testSetClasspathWithJobClassloader

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
@Test (timeout = 120000)
public void testSetClasspathWithJobClassloader() throws IOException {
  Configuration conf = new Configuration();
  conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true);
  conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_CLASSLOADER, true);
  Map<String, String> env = new HashMap<String, String>();
  MRApps.setClasspath(env, conf);
  String cp = env.get("CLASSPATH");
  String appCp = env.get("APP_CLASSPATH");
  assertFalse("MAPREDUCE_JOB_CLASSLOADER true, but job.jar is in the"
    + " classpath!", cp.contains("jar" + ApplicationConstants.CLASS_PATH_SEPARATOR + "job"));
  assertFalse("MAPREDUCE_JOB_CLASSLOADER true, but PWD is in the classpath!",
    cp.contains("PWD"));
  String expectedAppClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR,
    Arrays.asList(ApplicationConstants.Environment.PWD.$$(), "job.jar/job.jar",
      "job.jar/classes/", "job.jar/lib/*",
      ApplicationConstants.Environment.PWD.$$() + "/*"));
  assertEquals("MAPREDUCE_JOB_CLASSLOADER true, but job.jar is not in the app"
    + " classpath!", expectedAppClasspath, appCp);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:21,代碼來源:TestMRApps.java

示例13: genNewElementMethod

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
void genNewElementMethod(String className, Method method, int indent) {
  String methodName = method.getName();
  String retName = method.getReturnType().getSimpleName();
  Class<?>[] params = method.getParameterTypes();
  echo(indent, "\n",
       "@Override\n",
       "public ", retName, "<", className, topMode ? "> " : "<T>> ",
       methodName, "(");
  if (params.length == 0) {
    puts(0, ") {");
    puts(indent,
         topMode ? "" : "  closeAttrs();\n",
         "  return ", StringUtils.toLowerCase(retName), "_" + "(this, ",
         isInline(className, retName), ");\n", "}");
  } else if (params.length == 1) {
    puts(0, "String selector) {");
    puts(indent,
         "  return setSelector(", methodName, "(), selector);\n", "}");
  } else {
    throwUnhandled(className, method);
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:23,代碼來源:HamletGen.java

示例14: printJobDetails

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
private void printJobDetails() {
  StringBuffer jobDetails = new StringBuffer();
  jobDetails.append("\nHadoop job: " ).append(job.getJobId());
  jobDetails.append("\n=====================================");
  jobDetails.append("\nUser: ").append(job.getUsername()); 
  jobDetails.append("\nJobName: ").append(job.getJobname()); 
  jobDetails.append("\nJobConf: ").append(job.getJobConfPath()); 
  jobDetails.append("\nSubmitted At: ").append(StringUtils.
                      getFormattedTimeWithDiff(dateFormat,
                      job.getSubmitTime(), 0)); 
  jobDetails.append("\nLaunched At: ").append(StringUtils.
                      getFormattedTimeWithDiff(dateFormat,
                      job.getLaunchTime(),
                      job.getSubmitTime()));
  jobDetails.append("\nFinished At: ").append(StringUtils.
                      getFormattedTimeWithDiff(dateFormat,
                      job.getFinishTime(),
                      job.getLaunchTime()));
  jobDetails.append("\nStatus: ").append(((job.getJobStatus() == null) ? 
                    "Incomplete" :job.getJobStatus()));
  printCounters(jobDetails, job.getTotalCounters(), job.getMapCounters(),
      job.getReduceCounters());
  jobDetails.append("\n");
  jobDetails.append("\n=====================================");
  System.out.println(jobDetails.toString());
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:27,代碼來源:HistoryViewer.java

示例15: logStats

import org.apache.hadoop.util.StringUtils; //導入依賴的package包/類
public void logStats() {
  // Log size
  long totalSize = heapSize();
  long freeSize = maxSize - totalSize;
  LruBlockCache.LOG.info("totalSize=" + StringUtils.byteDesc(totalSize) + ", " +
      "freeSize=" + StringUtils.byteDesc(freeSize) + ", " +
      "max=" + StringUtils.byteDesc(this.maxSize) + ", " +
      "blockCount=" + getBlockCount() + ", " +
      "accesses=" + stats.getRequestCount() + ", " +
      "hits=" + stats.getHitCount() + ", " +
      "hitRatio=" + (stats.getHitCount() == 0 ?
        "0" : (StringUtils.formatPercent(stats.getHitRatio(), 2)+ ", ")) + ", " +
      "cachingAccesses=" + stats.getRequestCachingCount() + ", " +
      "cachingHits=" + stats.getHitCachingCount() + ", " +
      "cachingHitsRatio=" + (stats.getHitCachingCount() == 0 ?
        "0,": (StringUtils.formatPercent(stats.getHitCachingRatio(), 2) + ", ")) +
      "evictions=" + stats.getEvictionCount() + ", " +
      "evicted=" + stats.getEvictedCount() + ", " +
      "evictedPerRun=" + stats.evictedPerEviction());
}
 
開發者ID:fengchen8086,項目名稱:ditb,代碼行數:21,代碼來源:LruBlockCache.java


注:本文中的org.apache.hadoop.util.StringUtils類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。