本文整理匯總了Java中org.apache.hadoop.io.compress.DefaultCodec.setConf方法的典型用法代碼示例。如果您正苦於以下問題:Java DefaultCodec.setConf方法的具體用法?Java DefaultCodec.setConf怎麽用?Java DefaultCodec.setConf使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.io.compress.DefaultCodec
的用法示例。
在下文中一共展示了DefaultCodec.setConf方法的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: testIFileWriterWithCodec
import org.apache.hadoop.io.compress.DefaultCodec; //導入方法依賴的package包/類
@Test
/**
* Create an IFile.Writer using GzipCodec since this code does not
* have a compressor when run via the tests (ie no native libraries).
*/
public void testIFileWriterWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs.create(path), Text.class, Text.class,
codec, null);
writer.close();
}
示例2: testIFileReaderWithCodec
import org.apache.hadoop.io.compress.DefaultCodec; //導入方法依賴的package包/類
@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
FSDataOutputStream out = rfs.create(path);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, out, Text.class, Text.class,
codec, null);
writer.close();
FSDataInputStream in = rfs.open(path);
IFile.Reader<Text, Text> reader =
new IFile.Reader<Text, Text>(conf, in, rfs.getFileStatus(path).getLen(),
codec, null);
reader.close();
// test check sum
byte[] ab= new byte[100];
int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length);
assertEquals( readed,reader.checksumIn.getChecksum().length);
}
示例3: testIFileWriterWithCodec
import org.apache.hadoop.io.compress.DefaultCodec; //導入方法依賴的package包/類
@Test
/**
* Create an IFile.Writer using GzipCodec since this codec does not
* have a compressor when run via the tests (ie no native libraries).
*/
public void testIFileWriterWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs, path, Text.class, Text.class,
codec, null);
writer.close();
}
示例4: testIFileReaderWithCodec
import org.apache.hadoop.io.compress.DefaultCodec; //導入方法依賴的package包/類
@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs, path, Text.class, Text.class,
codec, null);
writer.close();
IFile.Reader<Text, Text> reader =
new IFile.Reader<Text, Text>(conf, rfs, path, codec, null);
reader.close();
}
示例5: testIFileWriterWithCodec
import org.apache.hadoop.io.compress.DefaultCodec; //導入方法依賴的package包/類
@Test
/**
* Create an IFile.Writer using GzipCodec since this code does not
* have a compressor when run via the tests (ie no native libraries).
*/
public void testIFileWriterWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs, path, Text.class, Text.class,
codec, null);
writer.close();
}
示例6: testIFileReaderWithCodec
import org.apache.hadoop.io.compress.DefaultCodec; //導入方法依賴的package包/類
@Test
/** Same as above but create a reader. */
public void testIFileReaderWithCodec() throws Exception {
Configuration conf = new Configuration();
FileSystem localFs = FileSystem.getLocal(conf);
FileSystem rfs = ((LocalFileSystem)localFs).getRaw();
Path path = new Path(new Path("build/test.ifile"), "data");
DefaultCodec codec = new GzipCodec();
codec.setConf(conf);
IFile.Writer<Text, Text> writer =
new IFile.Writer<Text, Text>(conf, rfs, path, Text.class, Text.class,
codec, null);
writer.close();
IFile.Reader<Text, Text> reader =
new IFile.Reader<Text, Text>(conf, rfs, path, codec, null);
reader.close();
// test check sum
byte[] ab= new byte[100];
int readed= reader.checksumIn.readWithChecksum(ab, 0, ab.length);
assertEquals( readed,reader.checksumIn.getChecksum().length);
}
示例7: testValueIteratorWithCompression
import org.apache.hadoop.io.compress.DefaultCodec; //導入方法依賴的package包/類
public void testValueIteratorWithCompression() throws Exception {
Path tmpDir = new Path("build/test/test.reduce.task.compression");
Configuration conf = new Configuration();
DefaultCodec codec = new DefaultCodec();
codec.setConf(conf);
for (Pair[] testCase: testCases) {
runValueIterator(tmpDir, testCase, conf, codec);
}
}
示例8: main
import org.apache.hadoop.io.compress.DefaultCodec; //導入方法依賴的package包/類
public static void main(String[] args) throws IOException{
FileSystem fs=new Path("/root").getFileSystem(new Configuration());
DefaultCodec codec=new GzipCodec();
codec.setConf(new Configuration());
// FSDataOutputStream output=fs.create(new Path("/root/test.gz"));
// BufferedWriter writer=new BufferedWriter(new OutputStreamWriter(new GzipCodec().createOutputStream(output)));
// writer.write("I am test!\n");
// writer.write("Is it right?");
// writer.close();
// fs.setVerifyChecksum(false);
// FSDataInputStream input=fs.open(new Path("/root/test.gz"));
// BufferedReader reader=new BufferedReader(new InputStreamReader(codec.createInputStream(input)));
// System.out.println(reader.readLine());
// System.out.println(reader.readLine());
String s="1111111111111111";
byte[] b=Bytes.toBytes(s);
for(int i=0;i<b.length;i++){
String hex=Integer.toHexString(b[i]&0xff);
if(hex.length()==1){
hex='0'+hex;
}
System.out.print(hex);
}
}
示例9: testSeqFileCompression
import org.apache.hadoop.io.compress.DefaultCodec; //導入方法依賴的package包/類
@Test
public void testSeqFileCompression() throws Exception {
DefaultCodec codec = new DefaultCodec();
codec.setConf(new Configuration());
testSeqFile(codec, SequenceFile.CompressionType.RECORD);
testSeqFile(codec, SequenceFile.CompressionType.BLOCK);
}
示例10: testValueIteratorWithCompression
import org.apache.hadoop.io.compress.DefaultCodec; //導入方法依賴的package包/類
@Test
public void testValueIteratorWithCompression() throws Exception {
Path tmpDir = new Path("build/test/test.reduce.task.compression");
Configuration conf = new Configuration();
DefaultCodec codec = new DefaultCodec();
codec.setConf(conf);
for (Pair[] testCase: testCases) {
runValueIterator(tmpDir, testCase, conf, codec);
}
}
示例11: testTextFileCompression
import org.apache.hadoop.io.compress.DefaultCodec; //導入方法依賴的package包/類
@Test
public void testTextFileCompression() throws Exception {
DefaultCodec codec = new DefaultCodec();
codec.setConf(new Configuration());
testTextFile(codec);
}