本文整理汇总了Java中org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil类的典型用法代码示例。如果您正苦于以下问题:Java RSUtil类的具体用法?Java RSUtil怎么用?Java RSUtil使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
RSUtil类属于org.apache.hadoop.io.erasurecode.rawcoder.util包,在下文中一共展示了RSUtil类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: RSRawEncoder
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
public RSRawEncoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
throw new HadoopIllegalArgumentException(
"Invalid numDataUnits and numParityUnits");
}
encodeMatrix = new byte[getNumAllUnits() * numDataUnits];
RSUtil.genCauchyMatrix(encodeMatrix, getNumAllUnits(), numDataUnits);
if (isAllowingVerboseDump()) {
DumpUtil.dumpMatrix(encodeMatrix, numDataUnits, getNumAllUnits());
}
gfTables = new byte[getNumAllUnits() * numDataUnits * 32];
RSUtil.initTables(numDataUnits, numParityUnits, encodeMatrix,
numDataUnits * numDataUnits, gfTables);
if (isAllowingVerboseDump()) {
System.out.println(DumpUtil.bytesToHex(gfTables, -1));
}
}
示例2: processErasures
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
private void processErasures(int[] erasedIndexes) {
this.decodeMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
this.invertMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
this.gfTables = new byte[getNumAllUnits() * getNumDataUnits() * 32];
this.erasureFlags = new boolean[getNumAllUnits()];
this.numErasedDataUnits = 0;
for (int i = 0; i < erasedIndexes.length; i++) {
int index = erasedIndexes[i];
erasureFlags[index] = true;
if (index < getNumDataUnits()) {
numErasedDataUnits++;
}
}
generateDecodeMatrix(erasedIndexes);
RSUtil.initTables(getNumDataUnits(), erasedIndexes.length,
decodeMatrix, 0, gfTables);
if (isAllowingVerboseDump()) {
System.out.println(DumpUtil.bytesToHex(gfTables, -1));
}
}
示例3: RSRawEncoderLegacy
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
public RSRawEncoderLegacy(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
assert (getNumDataUnits() + getNumParityUnits() < RSUtil.GF.getFieldSize());
int[] primitivePower = RSUtil.getPrimitivePower(numDataUnits,
numParityUnits);
// compute generating polynomial
int[] gen = {1};
int[] poly = new int[2];
for (int i = 0; i < numParityUnits; i++) {
poly[0] = primitivePower[i];
poly[1] = 1;
gen = RSUtil.GF.multiply(gen, poly);
}
// generating polynomial has all generating roots
generatingPolynomial = gen;
}
示例4: RSRawEncoder
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
public RSRawEncoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
assert (getNumDataUnits() + getNumParityUnits() < RSUtil.GF.getFieldSize());
int[] primitivePower = RSUtil.getPrimitivePower(numDataUnits,
numParityUnits);
// compute generating polynomial
int[] gen = {1};
int[] poly = new int[2];
for (int i = 0; i < numParityUnits; i++) {
poly[0] = primitivePower[i];
poly[1] = 1;
gen = RSUtil.GF.multiply(gen, poly);
}
// generating polynomial has all generating roots
generatingPolynomial = gen;
}
示例5: RSRawEncoder
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
public RSRawEncoder(ErasureCoderOptions coderOptions) {
super(coderOptions);
if (getNumAllUnits() >= RSUtil.GF.getFieldSize()) {
throw new HadoopIllegalArgumentException(
"Invalid numDataUnits and numParityUnits");
}
encodeMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
RSUtil.genCauchyMatrix(encodeMatrix, getNumAllUnits(), getNumDataUnits());
if (allowVerboseDump()) {
DumpUtil.dumpMatrix(encodeMatrix, getNumDataUnits(), getNumAllUnits());
}
gfTables = new byte[getNumAllUnits() * getNumDataUnits() * 32];
RSUtil.initTables(getNumDataUnits(), getNumParityUnits(), encodeMatrix,
getNumDataUnits() * getNumDataUnits(), gfTables);
if (allowVerboseDump()) {
System.out.println(DumpUtil.bytesToHex(gfTables, -1));
}
}
示例6: doDecode
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
@Override
protected void doDecode(ByteArrayDecodingState decodingState) {
int dataLen = decodingState.decodeLength;
CoderUtil.resetOutputBuffers(decodingState.outputs,
decodingState.outputOffsets, dataLen);
prepareDecoding(decodingState.inputs, decodingState.erasedIndexes);
byte[][] realInputs = new byte[getNumDataUnits()][];
int[] realInputOffsets = new int[getNumDataUnits()];
for (int i = 0; i < getNumDataUnits(); i++) {
realInputs[i] = decodingState.inputs[validIndexes[i]];
realInputOffsets[i] = decodingState.inputOffsets[validIndexes[i]];
}
RSUtil.encodeData(gfTables, dataLen, realInputs, realInputOffsets,
decodingState.outputs, decodingState.outputOffsets);
}
示例7: processErasures
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
private void processErasures(int[] erasedIndexes) {
this.decodeMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
this.invertMatrix = new byte[getNumAllUnits() * getNumDataUnits()];
this.gfTables = new byte[getNumAllUnits() * getNumDataUnits() * 32];
this.erasureFlags = new boolean[getNumAllUnits()];
this.numErasedDataUnits = 0;
for (int i = 0; i < erasedIndexes.length; i++) {
int index = erasedIndexes[i];
erasureFlags[index] = true;
if (index < getNumDataUnits()) {
numErasedDataUnits++;
}
}
generateDecodeMatrix(erasedIndexes);
RSUtil.initTables(getNumDataUnits(), erasedIndexes.length,
decodeMatrix, 0, gfTables);
if (allowVerboseDump()) {
System.out.println(DumpUtil.bytesToHex(gfTables, -1));
}
}
示例8: RSRawEncoderLegacy
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
public RSRawEncoderLegacy(ErasureCoderOptions coderOptions) {
super(coderOptions);
assert (getNumDataUnits() + getNumParityUnits() < RSUtil.GF.getFieldSize());
int[] primitivePower = RSUtil.getPrimitivePower(getNumDataUnits(),
getNumParityUnits());
// compute generating polynomial
int[] gen = {1};
int[] poly = new int[2];
for (int i = 0; i < getNumParityUnits(); i++) {
poly[0] = primitivePower[i];
poly[1] = 1;
gen = RSUtil.GF.multiply(gen, poly);
}
// generating polynomial has all generating roots
generatingPolynomial = gen;
}
示例9: RSRawDecoderLegacy
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
public RSRawDecoderLegacy(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
throw new HadoopIllegalArgumentException(
"Invalid numDataUnits and numParityUnits");
}
this.errSignature = new int[numParityUnits];
this.primitivePower = RSUtil.getPrimitivePower(numDataUnits,
numParityUnits);
}
示例10: doDecodeImpl
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
private void doDecodeImpl(ByteBuffer[] inputs, int[] erasedIndexes,
ByteBuffer[] outputs) {
ByteBuffer valid = CoderUtil.findFirstValidInput(inputs);
int dataLen = valid.remaining();
for (int i = 0; i < erasedIndexes.length; i++) {
errSignature[i] = primitivePower[erasedIndexes[i]];
RSUtil.GF.substitute(inputs, dataLen, outputs[i], primitivePower[i]);
}
RSUtil.GF.solveVandermondeSystem(errSignature,
outputs, erasedIndexes.length);
}
示例11: RSRawDecoder
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
public RSRawDecoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
throw new HadoopIllegalArgumentException(
"Invalid getNumDataUnits() and numParityUnits");
}
int numAllUnits = getNumDataUnits() + numParityUnits;
encodeMatrix = new byte[numAllUnits * getNumDataUnits()];
RSUtil.genCauchyMatrix(encodeMatrix, numAllUnits, getNumDataUnits());
if (isAllowingVerboseDump()) {
DumpUtil.dumpMatrix(encodeMatrix, numDataUnits, numAllUnits);
}
}
示例12: doDecode
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
@Override
protected void doDecode(ByteBuffer[] inputs, int[] erasedIndexes,
ByteBuffer[] outputs) {
prepareDecoding(inputs, erasedIndexes);
ByteBuffer[] realInputs = new ByteBuffer[getNumDataUnits()];
for (int i = 0; i < getNumDataUnits(); i++) {
realInputs[i] = inputs[validIndexes[i]];
}
RSUtil.encodeData(gfTables, realInputs, outputs);
}
示例13: doEncode
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
@Override
protected void doEncode(ByteBuffer[] inputs, ByteBuffer[] outputs) {
// parity units + data units
ByteBuffer[] all = new ByteBuffer[outputs.length + inputs.length];
if (isAllowingChangeInputs()) {
System.arraycopy(outputs, 0, all, 0, outputs.length);
System.arraycopy(inputs, 0, all, outputs.length, inputs.length);
} else {
System.arraycopy(outputs, 0, all, 0, outputs.length);
/**
* Note when this coder would be really (rarely) used in a production
* system, this can be optimized to cache and reuse the new allocated
* buffers avoiding reallocating.
*/
ByteBuffer tmp;
for (int i = 0; i < inputs.length; i++) {
tmp = ByteBuffer.allocate(inputs[i].remaining());
tmp.put(inputs[i]);
tmp.flip();
all[outputs.length + i] = tmp;
}
}
// Compute the remainder
RSUtil.GF.remainder(all, generatingPolynomial);
}
示例14: RSRawDecoder
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
public RSRawDecoder(int numDataUnits, int numParityUnits) {
super(numDataUnits, numParityUnits);
if (numDataUnits + numParityUnits >= RSUtil.GF.getFieldSize()) {
throw new HadoopIllegalArgumentException(
"Invalid numDataUnits and numParityUnits");
}
this.errSignature = new int[numParityUnits];
this.primitivePower = RSUtil.getPrimitivePower(numDataUnits,
numParityUnits);
}
示例15: doDecodeImpl
import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; //导入依赖的package包/类
private void doDecodeImpl(ByteBuffer[] inputs, int[] erasedIndexes,
ByteBuffer[] outputs) {
ByteBuffer valid = findFirstValidInput(inputs);
int dataLen = valid.remaining();
for (int i = 0; i < erasedIndexes.length; i++) {
errSignature[i] = primitivePower[erasedIndexes[i]];
RSUtil.GF.substitute(inputs, dataLen, outputs[i], primitivePower[i]);
}
RSUtil.GF.solveVandermondeSystem(errSignature,
outputs, erasedIndexes.length);
}