当前位置: 首页>>代码示例>>Java>>正文


Java Charsets.UTF_8属性代码示例

本文整理汇总了Java中org.apache.commons.io.Charsets.UTF_8属性的典型用法代码示例。如果您正苦于以下问题:Java Charsets.UTF_8属性的具体用法?Java Charsets.UTF_8怎么用?Java Charsets.UTF_8使用的例子?那么恭喜您, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在org.apache.commons.io.Charsets的用法示例。


在下文中一共展示了Charsets.UTF_8属性的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: loadStructureDefinitions

private void loadStructureDefinitions(FhirContext theContext, Map<String, StructureDefinition> theCodeSystems, String theClasspath) {
  logD("CareConnect Loading structure definitions from classpath: "+ theClasspath);
  InputStream valuesetText = CareConnectProfileValidationSupport.class.getResourceAsStream(theClasspath);
  if (valuesetText != null) {
    InputStreamReader reader = new InputStreamReader(valuesetText, Charsets.UTF_8);

    Bundle bundle = theContext.newXmlParser().parseResource(Bundle.class, reader);
    for (BundleEntryComponent next : bundle.getEntry()) {
      if (next.getResource() instanceof StructureDefinition) {
        StructureDefinition nextSd = (StructureDefinition) next.getResource();
        nextSd.getText().setDivAsString("");
        String system = nextSd.getUrl();
        if (isNotBlank(system)) {
          theCodeSystems.put(system, nextSd);
        }
      }
    }
  } else {
    log.warn("Unable to load resource: {}", theClasspath);
  }
}
 
开发者ID:nhsconnect,项目名称:careconnect-reference-implementation,代码行数:21,代码来源:CareConnectProfileValidationSupport.java

示例2: extractPassword

String extractPassword(String pwFile) {
  if (pwFile.isEmpty()) {
    // If there is no password file defined, we'll assume that we should do
    // an anonymous bind
    return "";
  }

  StringBuilder password = new StringBuilder();
  try (Reader reader = new InputStreamReader(
      new FileInputStream(pwFile), Charsets.UTF_8)) {
    int c = reader.read();
    while (c > -1) {
      password.append((char)c);
      c = reader.read();
    }
    return password.toString().trim();
  } catch (IOException ioe) {
    throw new RuntimeException("Could not read password file: " + pwFile, ioe);
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:20,代码来源:LdapGroupsMapping.java

示例3: loadStructureDefinitions

private void loadStructureDefinitions(FhirContext theContext, Map<String, StructureDefinition> theCodeSystems, String theClasspath) {
  logD("SNOMEDMOCK Loading structure definitions from classpath: "+ theClasspath);
  InputStream valuesetText = SNOMEDUKMockValidationSupport.class.getResourceAsStream(theClasspath);
  if (valuesetText != null) {
    InputStreamReader reader = new InputStreamReader(valuesetText, Charsets.UTF_8);

    Bundle bundle = theContext.newXmlParser().parseResource(Bundle.class, reader);
    for (BundleEntryComponent next : bundle.getEntry()) {
      if (next.getResource() instanceof StructureDefinition) {
        StructureDefinition nextSd = (StructureDefinition) next.getResource();
        nextSd.getText().setDivAsString("");
        String system = nextSd.getUrl();
        if (isNotBlank(system)) {
          theCodeSystems.put(system, nextSd);
        }
      }
    }
  } else {
    log.warn("Unable to load resource: {}", theClasspath);
  }
}
 
开发者ID:nhsconnect,项目名称:careconnect-reference-implementation,代码行数:21,代码来源:SNOMEDUKMockValidationSupport.java

示例4: checkMetrics

private void checkMetrics(List<byte[]> bytearrlist, int expectedCount) {
  boolean[] foundMetrics = new boolean[expectedMetrics.length];
  for (byte[] bytes : bytearrlist) {
    String binaryStr = new String(bytes, Charsets.UTF_8);
    for (int index = 0; index < expectedMetrics.length; index++) {
      if (binaryStr.indexOf(expectedMetrics[index]) >= 0) {
        foundMetrics[index] = true;
        break;
      }
    }
  }

  for (int index = 0; index < foundMetrics.length; index++) {
    if (!foundMetrics[index]) {
      assertTrue("Missing metrics: " + expectedMetrics[index], false);
    }
  }

  assertEquals("Mismatch in record count: ",
      expectedCount, bytearrlist.size());
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:21,代码来源:TestGangliaMetrics.java

示例5: pump

protected void pump() throws IOException {
  InputStreamReader inputStreamReader = new InputStreamReader(
      stream, Charsets.UTF_8);
  BufferedReader br = new BufferedReader(inputStreamReader);
  String line = null;
  while ((line = br.readLine()) != null) {
    if (type == StreamType.STDOUT) {
      log.info(logPrefix + ": " + line);
    } else {
      log.warn(logPrefix + ": " + line);          
    }
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:13,代码来源:StreamPumper.java

示例6: load

private Map<String, String> load() {
  Map<String, String> loadMap = new HashMap<String, String>();
  
  String filename = getConf().get(NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY, null);
  if (StringUtils.isBlank(filename)) {
    LOG.warn(NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY + " not configured. ");
    return null;
  }
  

  try (BufferedReader reader =
           new BufferedReader(new InputStreamReader(
               new FileInputStream(filename), Charsets.UTF_8))) {
    String line = reader.readLine();
    while (line != null) {
      line = line.trim();
      if (line.length() != 0 && line.charAt(0) != '#') {
        String[] columns = line.split("\\s+");
        if (columns.length == 2) {
          loadMap.put(columns[0], columns[1]);
        } else {
          LOG.warn("Line does not have two columns. Ignoring. " + line);
        }
      }
      line = reader.readLine();
    }
  } catch (Exception e) {
    LOG.warn(filename + " cannot be read.", e);
    return null;
  }
  return loadMap;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:32,代码来源:TableMapping.java

示例7: serialize

/**
 * Serialize the metadata to a set of bytes.
 * @return the serialized bytes
 * @throws IOException
 */
protected byte[] serialize() throws IOException {
  ByteArrayOutputStream buffer = new ByteArrayOutputStream();
  JsonWriter writer = new JsonWriter(
      new OutputStreamWriter(buffer, Charsets.UTF_8));
  try {
    writer.beginObject();
    if (cipher != null) {
      writer.name(CIPHER_FIELD).value(cipher);
    }
    if (bitLength != 0) {
      writer.name(BIT_LENGTH_FIELD).value(bitLength);
    }
    if (created != null) {
      writer.name(CREATED_FIELD).value(created.getTime());
    }
    if (description != null) {
      writer.name(DESCRIPTION_FIELD).value(description);
    }
    if (attributes != null && attributes.size() > 0) {
      writer.name(ATTRIBUTES_FIELD).beginObject();
      for (Map.Entry<String, String> attribute : attributes.entrySet()) {
        writer.name(attribute.getKey()).value(attribute.getValue());
      }
      writer.endObject();
    }
    writer.name(VERSIONS_FIELD).value(versions);
    writer.endObject();
    writer.flush();
  } finally {
    writer.close();
  }
  return buffer.toByteArray();
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:38,代码来源:KeyProvider.java

示例8: readFileToSetWithFileInputStream

@Private
public static void readFileToSetWithFileInputStream(String type,
    String filename, InputStream fileInputStream, Set<String> set)
    throws IOException {
  BufferedReader reader = null;
  try {
    reader = new BufferedReader(
        new InputStreamReader(fileInputStream, Charsets.UTF_8));
    String line;
    while ((line = reader.readLine()) != null) {
      String[] nodes = line.split("[ \t\n\f\r]+");
      if (nodes != null) {
        for (int i = 0; i < nodes.length; i++) {
          nodes[i] = nodes[i].trim();
          if (nodes[i].startsWith("#")) {
            // Everything from now on is a comment
            break;
          }
          if (!nodes[i].isEmpty()) {
            LOG.info("Adding a node \"" + nodes[i] + "\" to the list of "
                + type + " hosts from " + filename);
            set.add(nodes[i]);
          }
        }
      }
    }
  } finally {
    if (reader != null) {
      reader.close();
    }
    fileInputStream.close();
  }
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:33,代码来源:HostsFileReader.java

示例9: readLines

/**
 * Reads the lines in a file.
 * @param fileName
 * @return lines in a String array; null if the file does not exist or if the
 * file name is null
 * @throws IOException
 */
private static String[] readLines(String fileName) throws IOException {
  try {
    if (fileName != null) {
      File file = new File (fileName);
      if (file.exists()) {
        try (
            Reader fileReader = new InputStreamReader(
                new FileInputStream(file), Charsets.UTF_8);
            BufferedReader bufferedReader = new BufferedReader(fileReader)) {
          List<String> lines = new ArrayList<String>();
          String line = null;
          while ((line = bufferedReader.readLine()) != null) {
            lines.add(line);
          }
          if (LOG.isDebugEnabled()) {
            LOG.debug("Loaded IP list of size = " + lines.size() +
                " from file = " + fileName);
          }
          return (lines.toArray(new String[lines.size()]));
        }
      } else {
        LOG.debug("Missing ip list file : "+ fileName);
      }
    }
  } catch (IOException ioe) {
    LOG.error(ioe);
    throw ioe;
  }
  return null;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:37,代码来源:FileBasedIPList.java

示例10: toString

@Override
public String toString(T obj) throws IOException {
  outBuf.reset();
  serializer.serialize(obj);
  byte[] buf = new byte[outBuf.getLength()];
  System.arraycopy(outBuf.getData(), 0, buf, 0, buf.length);
  return new String(Base64.encodeBase64(buf), Charsets.UTF_8);
}
 
开发者ID:nucypher,项目名称:hadoop-oss,代码行数:8,代码来源:DefaultStringifier.java

示例11: readStreamHeader

private BufferedInputStream readStreamHeader() throws IOException {
  // We are flexible enough to allow the compressed stream not to
  // start with the header of BZ. So it works fine either we have
  // the header or not.
  if (super.in != null) {
    bufferedIn.mark(HEADER_LEN);
    byte[] headerBytes = new byte[HEADER_LEN];
    int actualRead = bufferedIn.read(headerBytes, 0, HEADER_LEN);
    if (actualRead != -1) {
      String header = new String(headerBytes, Charsets.UTF_8);
      if (header.compareTo(HEADER) != 0) {
        bufferedIn.reset();
      } else {
        this.isHeaderStripped = true;
        // In case of BYBLOCK mode, we also want to strip off
        // remaining two character of the header.
        if (this.readMode == READ_MODE.BYBLOCK) {
          actualRead = bufferedIn.read(headerBytes, 0,
              SUB_HEADER_LEN);
          if (actualRead != -1) {
            this.isSubHeaderStripped = true;
          }
        }
      }
    }
  }

  if (bufferedIn == null) {
    throw new IOException("Failed to read bzip2 stream.");
  }

  return bufferedIn;

}
 
开发者ID:naver,项目名称:hadoop,代码行数:34,代码来源:BZip2Codec.java

示例12: createCSVPrinter

private CSVPrinter createCSVPrinter(String[] headers, String delimiter, FileOutputStream outputStream) throws IOException {
    final Writer writer = new BufferedWriter(new OutputStreamWriter(outputStream, Charsets.UTF_8));
    final CSVFormat format = CSVFormat.newFormat(delimiter.charAt(0))
            .withHeader((String[]) headers)
            .withQuote(QUOTE)
            .withRecordSeparator(System.lineSeparator());
    return new CSVPrinter(writer, format);
}
 
开发者ID:kenshoo,项目名称:file-format-streaming-converter,代码行数:8,代码来源:XlsxToCsvConverter.java

示例13: getConfiguration

/**
 * Returns the hadoop-auth configuration from HttpFSServer's configuration.
 * <p>
 * It returns all HttpFSServer's configuration properties prefixed with
 * <code>httpfs.authentication</code>. The <code>httpfs.authentication</code>
 * prefix is removed from the returned property names.
 *
 * @param configPrefix parameter not used.
 * @param filterConfig parameter not used.
 *
 * @return hadoop-auth configuration read from HttpFSServer's configuration.
 */
@Override
protected Properties getConfiguration(String configPrefix,
    FilterConfig filterConfig) throws ServletException{
  Properties props = new Properties();
  Configuration conf = HttpFSServerWebApp.get().getConfig();

  props.setProperty(AuthenticationFilter.COOKIE_PATH, "/");
  for (Map.Entry<String, String> entry : conf) {
    String name = entry.getKey();
    if (name.startsWith(CONF_PREFIX)) {
      String value = conf.get(name);
      name = name.substring(CONF_PREFIX.length());
      props.setProperty(name, value);
    }
  }

  String signatureSecretFile = props.getProperty(SIGNATURE_SECRET_FILE, null);
  if (signatureSecretFile == null) {
    throw new RuntimeException("Undefined property: " + SIGNATURE_SECRET_FILE);
  }

  try {
    StringBuilder secret = new StringBuilder();
    Reader reader = new InputStreamReader(new FileInputStream(
        signatureSecretFile), Charsets.UTF_8);
    int c = reader.read();
    while (c > -1) {
      secret.append((char)c);
      c = reader.read();
    }
    reader.close();
    props.setProperty(AuthenticationFilter.SIGNATURE_SECRET, secret.toString());
  } catch (IOException ex) {
    throw new RuntimeException("Could not read HttpFS signature secret file: " + signatureSecretFile);
  }
  return props;
}
 
开发者ID:naver,项目名称:hadoop,代码行数:49,代码来源:HttpFSAuthenticationFilter.java

示例14: writeTo

@Override
public void writeTo(Map map, Class<?> aClass, Type type, Annotation[] annotations,
                    MediaType mediaType, MultivaluedMap<String, Object> stringObjectMultivaluedMap,
                    OutputStream outputStream) throws IOException, WebApplicationException {
  Writer writer = new OutputStreamWriter(outputStream,  Charsets.UTF_8);
  JSONObject.writeJSONString(map, writer);
  writer.write(ENTER);
  writer.flush();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:9,代码来源:JSONMapProvider.java

示例15: writeTo

@Override
public void writeTo(JSONStreamAware jsonStreamAware, Class<?> aClass, Type type, Annotation[] annotations,
                    MediaType mediaType, MultivaluedMap<String, Object> stringObjectMultivaluedMap,
                    OutputStream outputStream) throws IOException, WebApplicationException {
  Writer writer = new OutputStreamWriter(outputStream, Charsets.UTF_8);
  jsonStreamAware.writeJSONString(writer);
  writer.write(ENTER);
  writer.flush();
}
 
开发者ID:naver,项目名称:hadoop,代码行数:9,代码来源:JSONProvider.java


注:本文中的org.apache.commons.io.Charsets.UTF_8属性示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。