本文整理汇总了Java中org.apache.hadoop.io.WritableComparator.hashBytes方法的典型用法代码示例。如果您正苦于以下问题:Java WritableComparator.hashBytes方法的具体用法?Java WritableComparator.hashBytes怎么用?Java WritableComparator.hashBytes使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.WritableComparator
的用法示例。
在下文中一共展示了WritableComparator.hashBytes方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getPartition
import org.apache.hadoop.io.WritableComparator; //导入方法依赖的package包/类
/**
* Use (the specified slice of the array returned by)
* {@link BinaryComparable#getBytes()} to partition.
*/
@Override
public int getPartition(BinaryComparable key, V value, int numPartitions) {
int length = key.getLength();
int leftIndex = (leftOffset + length) % length;
int rightIndex = (rightOffset + length) % length;
int hash = WritableComparator.hashBytes(key.getBytes(),
leftIndex, rightIndex - leftIndex + 1);
return (hash & Integer.MAX_VALUE) % numPartitions;
}
示例2: hashCode
import org.apache.hadoop.io.WritableComparator; //导入方法依赖的package包/类
@Override
public int hashCode() {
return WritableComparator.hashBytes(keyBuffer, 0, getKeyLength());
}
示例3: hashCode
import org.apache.hadoop.io.WritableComparator; //导入方法依赖的package包/类
public int hashCode() {
return WritableComparator.hashBytes(this.getBytes(), this.getLength());
}
示例4: map
import org.apache.hadoop.io.WritableComparator; //导入方法依赖的package包/类
@SuppressWarnings("unchecked")
public void map(WritableComparable key, Writable value,
OutputCollector<IntWritable, RecordStatsWritable> output,
Reporter reporter) throws IOException {
// Set up rawKey and rawValue on the first call to 'map'
if (recordId == -1) {
rawKey = createRaw(key.getClass());
rawValue = createRaw(value.getClass());
}
++recordId;
if (this.key == sortOutput) {
// Check if keys are 'sorted' if this
// record is from sort's output
if (prevKey == null) {
prevKey = key;
keyClass = prevKey.getClass();
} else {
// Sanity check
if (keyClass != key.getClass()) {
throw new IOException("Type mismatch in key: expected " +
keyClass.getName() + ", received " +
key.getClass().getName());
}
// Check if they were sorted correctly
if (prevKey.compareTo(key) > 0) {
throw new IOException("The 'map-reduce' framework wrongly" +
" classifed (" + prevKey + ") > (" +
key + ") "+ "for record# " + recordId);
}
prevKey = key;
}
// Check if the sorted output is 'partitioned' right
int keyPartition =
partitioner.getPartition(key, value, noSortReducers);
if (partition != keyPartition) {
throw new IOException("Partitions do not match for record# " +
recordId + " ! - '" + partition + "' v/s '" +
keyPartition + "'");
}
}
// Construct the record-stats and output (this.key, record-stats)
byte[] keyBytes = rawKey.getRawBytes(key);
int keyBytesLen = rawKey.getRawBytesLength(key);
byte[] valueBytes = rawValue.getRawBytes(value);
int valueBytesLen = rawValue.getRawBytesLength(value);
int keyValueChecksum =
(WritableComparator.hashBytes(keyBytes, keyBytesLen) ^
WritableComparator.hashBytes(valueBytes, valueBytesLen));
output.collect(this.key,
new RecordStatsWritable((keyBytesLen+valueBytesLen),
1, keyValueChecksum)
);
}
示例5: hashCode
import org.apache.hadoop.io.WritableComparator; //导入方法依赖的package包/类
@Override
public int hashCode() {
return WritableComparator.hashBytes(ba.bytes(), ba.length());
}
示例6: hashCode
import org.apache.hadoop.io.WritableComparator; //导入方法依赖的package包/类
/**
* Return a hash of the bytes returned from {#getBytes()}.
* @see org.apache.hadoop.io.WritableComparator#hashBytes(byte[],int)
*/
public int hashCode() {
return WritableComparator.hashBytes(getBytes(), getLength());
}
示例7: hashCode
import org.apache.hadoop.io.WritableComparator; //导入方法依赖的package包/类
/**
* @param b value
* @param length length of the value
* @return Runs {@link WritableComparator#hashBytes(byte[], int)} on the
* passed in array. This method is what {@link org.apache.hadoop.io.Text} and
* {@link org.apache.hadoop.hbase.io.ImmutableBytesWritable} use calculating hash code.
*/
public static int hashCode(final byte [] b, final int length) {
return WritableComparator.hashBytes(b, length);
}