本文整理汇总了Java中org.apache.hadoop.io.compress.DefaultCodec.setConf方法的典型用法代码示例。如果您正苦于以下问题:Java DefaultCodec.setConf方法的具体用法?Java DefaultCodec.setConf怎么用?Java DefaultCodec.setConf使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.compress.DefaultCodec
的用法示例。
在下文中一共展示了DefaultCodec.setConf方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testIFileWriterWithCodec
import org.apache.hadoop.io.compress.DefaultCodec; //导入方法依赖的package包/类
@Test
/**
* Create an IFile.Writer using GzipCodec since this code does not
* have a compressor when run via the tests (ie no native libraries).
*/
public void testIFileWriterWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs.create(path), Text.class, Text.class,
codec, null);
writer.close();
}
示例2: testIFileReaderWithCodec
import org.apache.hadoop.io.compress.DefaultCodec; //导入方法依赖的package包/类
@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
FSDataOutputStream out = rfs.create(path);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, out, Text.class, Text.class,
codec, null);
writer.close();
FSDataInputStream in = rfs.open(path);
IFile.Reader<Text, Text> reader =
new IFile.Reader<Text, Text>(conf, in, rfs.getFileStatus(path).getLen(),
codec, null);
reader.close();
// test check sum
byte[] ab= new byte[100];
int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length);
assertEquals( readed,reader.checksumIn.getChecksum().length);
}
示例3: testIFileWriterWithCodec
import org.apache.hadoop.io.compress.DefaultCodec; //导入方法依赖的package包/类
@Test
/**
* Create an IFile.Writer using GzipCodec since this codec does not
* have a compressor when run via the tests (ie no native libraries).
*/
public void testIFileWriterWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs, path, Text.class, Text.class,
codec, null);
writer.close();
}
示例4: testIFileReaderWithCodec
import org.apache.hadoop.io.compress.DefaultCodec; //导入方法依赖的package包/类
@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs, path, Text.class, Text.class,
codec, null);
writer.close();
IFile.Reader<Text, Text> reader =
new IFile.Reader<Text, Text>(conf, rfs, path, codec, null);
reader.close();
}
示例5: testIFileWriterWithCodec
import org.apache.hadoop.io.compress.DefaultCodec; //导入方法依赖的package包/类
@Test
/**
* Create an IFile.Writer using GzipCodec since this code does not
* have a compressor when run via the tests (ie no native libraries).
*/
public void testIFileWriterWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs, path, Text.class, Text.class,
codec, null);
writer.close();
}
示例6: testIFileReaderWithCodec
import org.apache.hadoop.io.compress.DefaultCodec; //导入方法依赖的package包/类
@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs, path, Text.class, Text.class,
codec, null);
writer.close();
IFile.Reader<Text, Text> reader =
new IFile.Reader<Text, Text>(conf, rfs, path, codec, null);
reader.close();
// test check sum
byte[] ab= new byte[100];
int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length);
assertEquals( readed,reader.checksumIn.getChecksum().length);
}
示例7: testValueIteratorWithCompression
import org.apache.hadoop.io.compress.DefaultCodec; //导入方法依赖的package包/类
public void testValueIteratorWithCompression() throws Exception {
Path tmpDir = new Path("build/test/test.reduce.task.compression");
Configuration conf = new Configuration();
DefaultCodec codec = new DefaultCodec();
codec.setConf(conf);
for (Pair[] testCase: testCases) {
runValueIterator(tmpDir, testCase, conf, codec);
}
}
示例8: main
import org.apache.hadoop.io.compress.DefaultCodec; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException{
FileSystem fs=new Path("/root").getFileSystem(new Configuration());
DefaultCodec codec=new GzipCodec();
codec.setConf(new Configuration());
// FSDataOutputStream output=fs.create(new Path("/root/test.gz"));
// BufferedWriter writer=new BufferedWriter(new OutputStreamWriter(new GzipCodec().createOutputStream(output)));
// writer.write("I am test!\n");
// writer.write("Is it right?");
// writer.close();
// fs.setVerifyChecksum(false);
// FSDataInputStream input=fs.open(new Path("/root/test.gz"));
// BufferedReader reader=new BufferedReader(new InputStreamReader(codec.createInputStream(input)));
// System.out.println(reader.readLine());
// System.out.println(reader.readLine());
String s="1111111111111111";
byte[] b=Bytes.toBytes(s);
for(int i=0;i<b.length;i++){
String hex=Integer.toHexString(b[i]&0xff);
if(hex.length()==1){
hex='0'+hex;
}
System.out.print(hex);
}
}
示例9: testSeqFileCompression
import org.apache.hadoop.io.compress.DefaultCodec; //导入方法依赖的package包/类
@Test
public void testSeqFileCompression() throws Exception {
DefaultCodec codec = new DefaultCodec();
codec.setConf(new Configuration());
testSeqFile(codec, SequenceFile.CompressionType.RECORD);
testSeqFile(codec, SequenceFile.CompressionType.BLOCK);
}
示例10: testValueIteratorWithCompression
import org.apache.hadoop.io.compress.DefaultCodec; //导入方法依赖的package包/类
@Test
public void testValueIteratorWithCompression() throws Exception {
Path tmpDir = new Path("build/test/test.reduce.task.compression");
Configuration conf = new Configuration();
DefaultCodec codec = new DefaultCodec();
codec.setConf(conf);
for (Pair[] testCase: testCases) {
runValueIterator(tmpDir, testCase, conf, codec);
}
}
示例11: testTextFileCompression
import org.apache.hadoop.io.compress.DefaultCodec; //导入方法依赖的package包/类
@Test
public void testTextFileCompression() throws Exception {
DefaultCodec codec = new DefaultCodec();
codec.setConf(new Configuration());
testTextFile(codec);
}