本文整理匯總了Java中org.apache.hadoop.io.WritableComparator.hashBytes方法的典型用法代碼示例。如果您正苦於以下問題:Java WritableComparator.hashBytes方法的具體用法?Java WritableComparator.hashBytes怎麽用?Java WritableComparator.hashBytes使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.io.WritableComparator
的用法示例。
在下文中一共展示了WritableComparator.hashBytes方法的7個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: getPartition
import org.apache.hadoop.io.WritableComparator; //導入方法依賴的package包/類
/**
* Use (the specified slice of the array returned by)
* {@link BinaryComparable#getBytes()} to partition.
*/
@Override
public int getPartition(BinaryComparable key, V value, int numPartitions) {
int length = key.getLength();
int leftIndex = (leftOffset + length) % length;
int rightIndex = (rightOffset + length) % length;
int hash = WritableComparator.hashBytes(key.getBytes(),
leftIndex, rightIndex - leftIndex + 1);
return (hash & Integer.MAX_VALUE) % numPartitions;
}
示例2: hashCode
import org.apache.hadoop.io.WritableComparator; //導入方法依賴的package包/類
@Override
public int hashCode() {
return WritableComparator.hashBytes(keyBuffer, 0, getKeyLength());
}
示例3: hashCode
import org.apache.hadoop.io.WritableComparator; //導入方法依賴的package包/類
public int hashCode() {
return WritableComparator.hashBytes(this.getBytes(), this.getLength());
}
示例4: map
import org.apache.hadoop.io.WritableComparator; //導入方法依賴的package包/類
@SuppressWarnings("unchecked")
public void map(WritableComparable key, Writable value,
OutputCollector<IntWritable, RecordStatsWritable> output,
Reporter reporter) throws IOException {
// Set up rawKey and rawValue on the first call to 'map'
if (recordId == -1) {
rawKey = createRaw(key.getClass());
rawValue = createRaw(value.getClass());
}
++recordId;
if (this.key == sortOutput) {
// Check if keys are 'sorted' if this
// record is from sort's output
if (prevKey == null) {
prevKey = key;
keyClass = prevKey.getClass();
} else {
// Sanity check
if (keyClass != key.getClass()) {
throw new IOException("Type mismatch in key: expected " +
keyClass.getName() + ", received " +
key.getClass().getName());
}
// Check if they were sorted correctly
if (prevKey.compareTo(key) > 0) {
throw new IOException("The 'map-reduce' framework wrongly" +
" classifed (" + prevKey + ") > (" +
key + ") "+ "for record# " + recordId);
}
prevKey = key;
}
// Check if the sorted output is 'partitioned' right
int keyPartition =
partitioner.getPartition(key, value, noSortReducers);
if (partition != keyPartition) {
throw new IOException("Partitions do not match for record# " +
recordId + " ! - '" + partition + "' v/s '" +
keyPartition + "'");
}
}
// Construct the record-stats and output (this.key, record-stats)
byte[] keyBytes = rawKey.getRawBytes(key);
int keyBytesLen = rawKey.getRawBytesLength(key);
byte[] valueBytes = rawValue.getRawBytes(value);
int valueBytesLen = rawValue.getRawBytesLength(value);
int keyValueChecksum =
(WritableComparator.hashBytes(keyBytes, keyBytesLen) ^
WritableComparator.hashBytes(valueBytes, valueBytesLen));
output.collect(this.key,
new RecordStatsWritable((keyBytesLen+valueBytesLen),
1, keyValueChecksum)
);
}
示例5: hashCode
import org.apache.hadoop.io.WritableComparator; //導入方法依賴的package包/類
@Override
public int hashCode() {
return WritableComparator.hashBytes(ba.bytes(), ba.length());
}
示例6: hashCode
import org.apache.hadoop.io.WritableComparator; //導入方法依賴的package包/類
/**
* Return a hash of the bytes returned from {#getBytes()}.
* @see org.apache.hadoop.io.WritableComparator#hashBytes(byte[],int)
*/
public int hashCode() {
return WritableComparator.hashBytes(getBytes(), getLength());
}
示例7: hashCode
import org.apache.hadoop.io.WritableComparator; //導入方法依賴的package包/類
/**
* @param b value
* @param length length of the value
* @return Runs {@link WritableComparator#hashBytes(byte[], int)} on the
* passed in array. This method is what {@link org.apache.hadoop.io.Text} and
* {@link org.apache.hadoop.hbase.io.ImmutableBytesWritable} use calculating hash code.
*/
public static int hashCode(final byte [] b, final int length) {
return WritableComparator.hashBytes(b, length);
}