本文整理汇总了Java中org.apache.hadoop.hbase.util.CompressionTest类的典型用法代码示例。如果您正苦于以下问题:Java CompressionTest类的具体用法?Java CompressionTest怎么用?Java CompressionTest使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
CompressionTest类属于org.apache.hadoop.hbase.util包,在下文中一共展示了CompressionTest类的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: checkCodecs
import org.apache.hadoop.hbase.util.CompressionTest; //导入依赖的package包/类
/**
* Run test on configured codecs to make sure supporting libs are in place.
*
* @param c
* @throws IOException
*/
private static void checkCodecs(final Configuration c) throws IOException {
// check to see if the codec list is available:
String[] codecs = c.getStrings("hbase.regionserver.codecs", (String[]) null);
if (codecs == null) return;
for (String codec : codecs) {
if (!CompressionTest.testCompression(codec)) {
throw new IOException(
"Compression codec " + codec + " not supported, aborting RS construction");
}
}
}
示例2: readTrailer
import org.apache.hadoop.hbase.util.CompressionTest; //导入依赖的package包/类
private FixedFileTrailer readTrailer() throws IOException {
FixedFileTrailer fft = new FixedFileTrailer();
long seekPoint = this.fileSize - FixedFileTrailer.trailerSize();
this.istream.seek(seekPoint);
fft.deserialize(this.istream);
// Set up the codec.
this.compressAlgo =
Compression.Algorithm.values()[fft.compressionCodec];
CompressionTest.testCompression(this.compressAlgo);
return fft;
}
示例3: checkCodecs
import org.apache.hadoop.hbase.util.CompressionTest; //导入依赖的package包/类
/**
* Run test on configured codecs to make sure supporting libs are in place.
* @param c
* @throws IOException
*/
private static void checkCodecs(final Configuration c) throws IOException {
// check to see if the codec list is available:
String[] codecs = c.getStrings("hbase.regionserver.codecs", (String[]) null);
if (codecs == null) return;
for (String codec : codecs) {
if (!CompressionTest.testCompression(codec)) {
throw new IOException("Compression codec " + codec
+ " not supported, aborting RS construction");
}
}
}
示例4: checkCodecs
import org.apache.hadoop.hbase.util.CompressionTest; //导入依赖的package包/类
/**
* Run test on configured codecs to make sure supporting libs are in place.
*
* @param c
* @throws IOException
*/
private static void checkCodecs(final Configuration c) throws IOException {
// check to see if the codec list is available:
String[] codecs = c.getStrings("hbase.regionserver.codecs", (String[]) null);
if (codecs == null) return;
for (String codec : codecs) {
if (!CompressionTest.testCompression(codec)) {
throw new IOException("Compression codec " + codec +
" not supported, aborting RS construction");
}
}
}
示例5: checkCodecs
import org.apache.hadoop.hbase.util.CompressionTest; //导入依赖的package包/类
/**
* Run test on configured codecs to make sure supporting libs are in place.
* @param c
* @throws IOException
*/
private static void checkCodecs(final Configuration c) throws IOException {
// check to see if the codec list is available:
String [] codecs = c.getStrings("hbase.regionserver.codecs", (String[])null);
if (codecs == null) return;
for (String codec : codecs) {
if (!CompressionTest.testCompression(codec)) {
throw new IOException("Compression codec " + codec +
" not supported, aborting RS construction");
}
}
}
示例6: getSupportness
import org.apache.hadoop.hbase.util.CompressionTest; //导入依赖的package包/类
public static boolean getSupportness() {
try {
File temp = File.createTempFile("test", ".tmp");
CompressionTest.main(new String[] { "file://" + temp.toString(), "lzo" });
} catch (Exception e) {
return false;
}
return true;
}
示例7: checkCompression
import org.apache.hadoop.hbase.util.CompressionTest; //导入依赖的package包/类
private void checkCompression(final HColumnDescriptor hcd)
throws IOException {
if (!this.masterCheckCompression) return;
CompressionTest.testCompression(hcd.getCompression());
CompressionTest.testCompression(hcd.getCompactionCompression());
}
示例8: checkCompressionCodecs
import org.apache.hadoop.hbase.util.CompressionTest; //导入依赖的package包/类
private void checkCompressionCodecs() throws IOException {
for (HColumnDescriptor fam : this.htableDescriptor.getColumnFamilies()) {
CompressionTest.testCompression(fam.getCompression());
CompressionTest.testCompression(fam.getCompactionCompression());
}
}
示例9: checkCompression
import org.apache.hadoop.hbase.util.CompressionTest; //导入依赖的package包/类
private void checkCompression(final HColumnDescriptor hcd)
throws IOException {
if (!this.masterCheckCompression) return;
CompressionTest.testCompression(hcd.getCompression());
CompressionTest.testCompression(hcd.getCompactionCompression());
}
示例10: checkCompressionCodecs
import org.apache.hadoop.hbase.util.CompressionTest; //导入依赖的package包/类
private void checkCompressionCodecs() throws IOException {
for (HColumnDescriptor fam : this.htableDescriptor.getColumnFamilies()) {
CompressionTest.testCompression(fam.getCompression());
CompressionTest.testCompression(fam.getCompactionCompression());
}
}
示例11: checkCompressionCodecs
import org.apache.hadoop.hbase.util.CompressionTest; //导入依赖的package包/类
private void checkCompressionCodecs() throws IOException {
for (HColumnDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
CompressionTest.testCompression(fam.getCompression());
CompressionTest.testCompression(fam.getCompactionCompression());
}
}
示例12: checkCompression
import org.apache.hadoop.hbase.util.CompressionTest; //导入依赖的package包/类
private void checkCompression(final ColumnFamilyDescriptor hcd)
throws IOException {
if (!this.masterCheckCompression) return;
CompressionTest.testCompression(hcd.getCompressionType());
CompressionTest.testCompression(hcd.getCompactionCompressionType());
}
示例13: checkCompressionCodecs
import org.apache.hadoop.hbase.util.CompressionTest; //导入依赖的package包/类
private void checkCompressionCodecs() throws IOException {
for (ColumnFamilyDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
CompressionTest.testCompression(fam.getCompressionType());
CompressionTest.testCompression(fam.getCompactionCompressionType());
}
}