本文整理汇总了Java中org.apache.hadoop.io.IOUtils.copyBytes方法的典型用法代码示例。如果您正苦于以下问题:Java IOUtils.copyBytes方法的具体用法?Java IOUtils.copyBytes怎么用?Java IOUtils.copyBytes使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.IOUtils
的用法示例。
在下文中一共展示了IOUtils.copyBytes方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testExcludedCiphers
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
/**
* Test that verifies that excluded ciphers (SSL_RSA_WITH_RC4_128_SHA,
* TLS_ECDH_ECDSA_WITH_RC4_128_SHA,TLS_ECDH_RSA_WITH_RC4_128_SHA,
* TLS_ECDHE_ECDSA_WITH_RC4_128_SHA,TLS_ECDHE_RSA_WITH_RC4_128_SHA) are not
* available for negotiation during SSL connection.
*/
@Test
public void testExcludedCiphers() throws Exception {
URL url = new URL(baseUrl, "/echo?a=b&c=d");
HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
SSLSocketFactory sslSocketF = clientSslFactory.createSSLSocketFactory();
PrefferedCipherSSLSocketFactory testPreferredCipherSSLSocketF
= new PrefferedCipherSSLSocketFactory(sslSocketF,
excludeCiphers.split(","));
conn.setSSLSocketFactory(testPreferredCipherSSLSocketF);
assertFalse("excludedCipher list is empty", excludeCiphers.isEmpty());
try {
InputStream in = conn.getInputStream();
ByteArrayOutputStream out = new ByteArrayOutputStream();
IOUtils.copyBytes(in, out, 1024);
fail("No Ciphers in common, SSLHandshake must fail.");
} catch (SSLHandshakeException ex) {
LOG.info("No Ciphers in common, expected succesful test result.", ex);
}
}
示例2: main
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException{
String uri = "hdfs://localhost:9000/aag.txt";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri), conf);
FSDataInputStream in = null;
in = fs.open(new Path(uri));
try{
IOUtils.copyBytes(in, System.out, 4096, false);
}
finally{
IOUtils.closeStream(in);
}
}
示例3: downFile
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
public ByteArrayInputStream downFile(String hdfsPath) throws IOException
{
InputStream in = fileSystem.open(new Path(hdfsPath));
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try
{
IOUtils.copyBytes(in, bos, conf);
} catch (Exception e)
{
e.printStackTrace();
} finally
{
IOUtils.closeStream(in);
}
return new ByteArrayInputStream(bos.toByteArray());
}
示例4: testCleanupOldJars
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
@Test
public void testCleanupOldJars() throws Exception {
String className = "TestCleanupOldJars";
String folder = TEST_UTIL.getDataTestDir().toString();
File jarFile = ClassLoaderTestHelper.buildJar(
folder, className, null, ClassLoaderTestHelper.localDirPath(conf));
File tmpJarFile = new File(jarFile.getParent(), "/tmp/" + className + ".test.jar");
if (tmpJarFile.exists()) tmpJarFile.delete();
assertFalse("tmp jar file should not exist", tmpJarFile.exists());
IOUtils.copyBytes(new FileInputStream(jarFile),
new FileOutputStream(tmpJarFile), conf, true);
assertTrue("tmp jar file should be created", tmpJarFile.exists());
Path path = new Path(jarFile.getAbsolutePath());
ClassLoader parent = TestCoprocessorClassLoader.class.getClassLoader();
CoprocessorClassLoader.parentDirLockSet.clear(); // So that clean up can be triggered
ClassLoader classLoader = CoprocessorClassLoader.getClassLoader(path, parent, "111", conf);
assertNotNull("Classloader should be created", classLoader);
assertFalse("tmp jar file should be removed", tmpJarFile.exists());
}
示例5: appendFile
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
public void appendFile(String localFile, String hdfsPath)
throws IOException
{
InputStream in = new FileInputStream(localFile);
OutputStream out = fileSystem.append(new Path(hdfsPath));
try
{
IOUtils.copyBytes(in, out, conf);
} catch (Exception e)
{
e.printStackTrace();
} finally
{
IOUtils.closeStream(in);
IOUtils.closeStream(out);
}
}
示例6: upFile
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
public void upFile(InputStream fileInputStream, String hdfsPath)
throws IOException
{
InputStream in = new BufferedInputStream(fileInputStream);
OutputStream out = fileSystem.create(new Path(hdfsPath));
try
{
IOUtils.copyBytes(in, out, conf);
} catch (Exception e)
{
e.printStackTrace();
} finally
{
// close Stream
IOUtils.closeStream(in);
IOUtils.closeStream(out);
}
}
示例7: main
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException {
final String uri = "hdfs://master:8020/user/tom/aa.txt";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(uri), conf);
InputStream in = null;
try {
in = fs.open(new Path(uri));
IOUtils.copyBytes(in, System.out, 4096, false);
} finally {
IOUtils.closeStream(in);
}
}
示例8: writeStreamToFile
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
void writeStreamToFile(InputStream in, PathData target,
boolean lazyPersist) throws IOException {
FSDataOutputStream out = null;
try {
out = create(target, lazyPersist);
IOUtils.copyBytes(in, out, getConf(), true);
} finally {
IOUtils.closeStream(out); // just in case copyBytes didn't
}
}
示例9: appendContent
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
public void appendContent(String content, String hdfsPath)
throws IOException
{
OutputStream out = fileSystem.append(new Path(hdfsPath));
InputStream in = new ByteArrayInputStream(content.getBytes());
IOUtils.copyBytes(in, out, conf);
IOUtils.closeStream(in);
IOUtils.closeStream(out);
}
示例10: printToStdout
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
private void printToStdout(InputStream in) throws IOException {
try {
IOUtils.copyBytes(in, out, getConf(), false);
} finally {
in.close();
}
}
示例11: uploadFiles
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
public static void uploadFiles(final InputStream in, final String outputFile) throws IOException{
String dst = NodeConfig.HDFS_PATH+outputFile;
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
OutputStream out = fs.create(new Path(dst), new Progressable() {
public void progress() {
System.out.print(". ");
}
});
IOUtils.copyBytes(in, out, BUFFER_SIZE, true);
}
示例12: readFile
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
public void readFile(String hdfsPath) throws IOException
{
InputStream in = null;
try
{
in = fileSystem.open(new Path(hdfsPath));
IOUtils.copyBytes(in, System.out, 4096, false);
} finally
{
IOUtils.closeStream(in);
}
}
示例13: uploadFiles
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
public static void uploadFiles(final InputStream in, final String outputFile) throws IOException{
String dst = NodeConfig.HDFS_PATH+outputFile;
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
OutputStream out = fs.create(new Path(dst), () -> System.out.print(". "));
IOUtils.copyBytes(in, out, BUFFER_SIZE, true);
}
示例14: readOut
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
private static String readOut(URL url) throws Exception {
HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
conn.setSSLSocketFactory(clientSslFactory.createSSLSocketFactory());
InputStream in = conn.getInputStream();
ByteArrayOutputStream out = new ByteArrayOutputStream();
IOUtils.copyBytes(in, out, 1024);
return out.toString();
}
示例15: upload
import org.apache.hadoop.io.IOUtils; //导入方法依赖的package包/类
/**
* Write a file in hdfs
*
* @param uri target file uri
* @param content file content
* @throws IOException
*/
public static void upload(String uri, String content) throws IOException {
Path path = new Path(Constants.NAME_NODE + "/" + uri);
if (fs.exists(path))
fs.delete(path, true);
OutputStream out = fs.create(path);
InputStream rf = new ByteArrayInputStream(content.getBytes());
IOUtils.copyBytes(rf, out, 4096, true);
out.close();
}