本文整理汇总了Java中org.apache.hadoop.io.compress.GzipCodec.setConf方法的典型用法代码示例。如果您正苦于以下问题:Java GzipCodec.setConf方法的具体用法?Java GzipCodec.setConf怎么用?Java GzipCodec.setConf使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.compress.GzipCodec
的用法示例。
在下文中一共展示了GzipCodec.setConf方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: setUp
import org.apache.hadoop.io.compress.GzipCodec; //导入方法依赖的package包/类
@Before
public void setUp() throws Exception {
conf = new SystemConfiguration();
// Create LZO Codec
org.apache.hadoop.conf.Configuration hadoopConf = new org.apache.hadoop.conf.Configuration();
GzipCodec gzipCodec = new GzipCodec();
gzipCodec.setConf(hadoopConf);
codec = gzipCodec;
// Write out test file
baseDir = new File(".", "target/testClientServerFailures/");
baseDir.mkdirs();
fileToStream = new File(baseDir, "test.txt");
if (fileToStream.exists())
fileToStream.delete();
fileToStream.createNewFile();
FileWriter writer = new FileWriter(fileToStream);
BufferedWriter buffWriter = new BufferedWriter(writer);
try {
for (int i = 0; i < testLineCount; i++) {
buffWriter.write(testString);
buffWriter.write('\n');
}
} finally {
buffWriter.close();
writer.close();
}
}
示例2: setUp
import org.apache.hadoop.io.compress.GzipCodec; //导入方法依赖的package包/类
@Before
public void setUp() throws Exception {
// Create LZO Codec
Configuration conf = new Configuration();
GzipCodec gzipCodec = new GzipCodec();
gzipCodec.setConf(conf);
codec = gzipCodec;
// Write out test file
baseDir = new File(".", "target/testSendClientFiles/");
baseDir.mkdirs();
fileToStream = new File(baseDir, "test.txt");
if (fileToStream.exists())
fileToStream.delete();
fileToStream.createNewFile();
FileWriter writer = new FileWriter(fileToStream);
BufferedWriter buffWriter = new BufferedWriter(writer);
try {
for (int i = 0; i < testLineCount; i++) {
buffWriter.write(testString);
buffWriter.write('\n');
}
} finally {
buffWriter.close();
writer.close();
}
// wait for file to be created
while (!fileToStream.exists())
;
}
示例3: setUp
import org.apache.hadoop.io.compress.GzipCodec; //导入方法依赖的package包/类
@Before
public void setUp() throws Exception {
// Create LZO Codec
Configuration conf = new Configuration();
GzipCodec gzipCodec = new GzipCodec();
gzipCodec.setConf(conf);
codec = gzipCodec;
// Write out test file
baseDir = new File(".", "target/testSendClientFilesConflict/");
baseDir.mkdirs();
fileToStream = new File(baseDir, "test.txt");
if (fileToStream.exists())
fileToStream.delete();
fileToStream.createNewFile();
FileWriter writer = new FileWriter(fileToStream);
BufferedWriter buffWriter = new BufferedWriter(writer);
try {
for (int i = 0; i < testLineCount; i++) {
buffWriter.write(testString);
buffWriter.write('\n');
}
} finally {
buffWriter.close();
writer.close();
}
// wait for file to be created
while (!fileToStream.exists())
;
}
示例4: EmoSplitInputStream
import org.apache.hadoop.io.compress.GzipCodec; //导入方法依赖的package包/类
private EmoSplitInputStream(String table, String split)
throws IOException {
if (isEmptySplit(split)) {
_rows = Iterators.emptyIterator();
} else {
// Get the DataStore and begin streaming the split's rows.
CloseableDataStore dataStore = HadoopDataStoreManager.getInstance().getDataStore(_uri, _apiKey, _metricRegistry);
_closer.register(dataStore);
_rows = DataStoreStreaming.getSplit(dataStore, table, split, ReadConsistency.STRONG).iterator();
}
_buffer.clear();
_buffer.limit(0);
GzipCodec gzipCodec = new GzipCodec();
gzipCodec.setConf(new Configuration());
// Set up the pipes
PipedOutputStream pipeRawToGzip = new PipedOutputStream();
_gzipIn = new PipedInputStream(pipeRawToGzip, 10 * 1024 * 1024);
_rawOut = gzipCodec.createOutputStream(pipeRawToGzip);
_closer.register(_gzipIn);
_closer.register(pipeRawToGzip);
// Start the asynchronous buffering thread
_bufferThread = new Thread(new Runnable() {
@Override
public void run() {
streamAndCompressInput();
}
});
_bufferThread.start();
}
示例5: buildCodec
import org.apache.hadoop.io.compress.GzipCodec; //导入方法依赖的package包/类
private GzipCodec buildCodec(Configuration conf) {
GzipCodec gzcodec = new ReusableStreamGzipCodec();
gzcodec.setConf(new Configuration(conf));
return gzcodec;
}
示例6: setUp
import org.apache.hadoop.io.compress.GzipCodec; //导入方法依赖的package包/类
@Before
public void setUp() throws Exception {
bootstrap = new Bootstrap();
bootstrap.loadProfiles(CommandLineProcessorFactory.PROFILE.DB,
CommandLineProcessorFactory.PROFILE.AGENT);
conf = new SystemConfiguration();
// Create LZO Codec
org.apache.hadoop.conf.Configuration hadoopConf = new org.apache.hadoop.conf.Configuration();
GzipCodec gzipCodec = new GzipCodec();
gzipCodec.setConf(hadoopConf);
codec = gzipCodec;
// Write out test file
baseDir = new File(".", "target/testSendClientFiles/");
baseDir.mkdirs();
fileToStream = new File(baseDir, "test.txt");
if (fileToStream.exists())
fileToStream.delete();
fileToStream.createNewFile();
FileWriter writer = new FileWriter(fileToStream);
BufferedWriter buffWriter = new BufferedWriter(writer);
try {
for (int i = 0; i < testLineCount; i++) {
buffWriter.write(testString);
buffWriter.write('\n');
}
} finally {
buffWriter.close();
writer.close();
}
}
示例7: setUp
import org.apache.hadoop.io.compress.GzipCodec; //导入方法依赖的package包/类
@Before
public void setUp() throws Exception {
String arch = System.getProperty("os.arch");
String libPath = null;
if (arch.contains("i386")) {
libPath = new File(".", "src/main/resources/native/Linux-i386-32")
.getAbsolutePath();
} else {
libPath = new File(".", "src/main/resources/native/Linux-amd64-64")
.getAbsolutePath();
}
System.setProperty("java.library.path", libPath);
// Create LZO Codec
Configuration conf = new Configuration();
GzipCodec gzipCodec = new GzipCodec();
gzipCodec.setConf(conf);
codec = gzipCodec;
// Write out test file
baseDir = new File(".", "target/fileLineStreamerTest/");
baseDir.mkdirs();
fileToStream = new File(baseDir, "test.txt");
if (fileToStream.exists())
fileToStream.delete();
fileToStream.createNewFile();
FileWriter writer = new FileWriter(fileToStream);
BufferedWriter buffWriter = new BufferedWriter(writer);
try {
for (int i = 0; i < testLineCount; i++) {
buffWriter.write(testString);
buffWriter.write('\n');
}
} finally {
buffWriter.close();
writer.close();
}
}