本文整理汇总了Java中org.apache.flink.types.ByteValue类的典型用法代码示例。如果您正苦于以下问题:Java ByteValue类的具体用法?Java ByteValue怎么用?Java ByteValue使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
ByteValue类属于org.apache.flink.types包,在下文中一共展示了ByteValue类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getSortedTestData
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@Override
protected ByteValue[] getSortedTestData() {
Random rnd = new Random(874597969123412338L);
int rndByte = rnd.nextInt(Byte.MAX_VALUE);
if (rndByte < 0) {
rndByte = -rndByte;
}
if (rndByte == Byte.MAX_VALUE) {
rndByte -= 3;
}
if (rndByte <= 2) {
rndByte += 3;
}
return new ByteValue[]{
new ByteValue(Byte.MIN_VALUE),
new ByteValue(Integer.valueOf(-rndByte).byteValue()),
new ByteValue(Integer.valueOf(-1).byteValue()),
new ByteValue(Integer.valueOf(0).byteValue()),
new ByteValue(Integer.valueOf(1).byteValue()),
new ByteValue(Integer.valueOf(2).byteValue()),
new ByteValue(Integer.valueOf(rndByte).byteValue()),
new ByteValue(Byte.MAX_VALUE)};
}
示例2: add
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@Override
public boolean add(ByteValue value) {
int newPosition = position + 1;
if (newPosition > data.length) {
if (isBounded) {
return false;
} else {
ensureCapacity(newPosition);
}
}
data[position] = value.getValue();
position = newPosition;
return true;
}
示例3: addAll
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@Override
public boolean addAll(ValueArray<ByteValue> other) {
ByteValueArray source = (ByteValueArray) other;
int sourceSize = source.position;
int newPosition = position + sourceSize;
if (newPosition > data.length) {
if (isBounded) {
return false;
} else {
ensureCapacity(newPosition);
}
}
System.arraycopy(source.data, 0, data, position, sourceSize);
position = newPosition;
return true;
}
示例4: createValueArray
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
/**
* Produce a {@code ValueArray} for the given {@code Value} type.
*
* @param cls {@code Value} class
* @return {@code ValueArray} for given {@code Value} class
*/
@SuppressWarnings("unchecked")
public static <T> ValueArray<T> createValueArray(Class<? extends Value> cls) {
if (ByteValue.class.isAssignableFrom(cls)) {
return (ValueArray<T>) new ByteValueArray();
} else if (CharValue.class.isAssignableFrom(cls)) {
return (ValueArray<T>) new CharValueArray();
} else if (DoubleValue.class.isAssignableFrom(cls)) {
return (ValueArray<T>) new DoubleValueArray();
} else if (FloatValue.class.isAssignableFrom(cls)) {
return (ValueArray<T>) new FloatValueArray();
} else if (IntValue.class.isAssignableFrom(cls)) {
return (ValueArray<T>) new IntValueArray();
} else if (LongValue.class.isAssignableFrom(cls)) {
return (ValueArray<T>) new LongValueArray();
} else if (NullValue.class.isAssignableFrom(cls)) {
return (ValueArray<T>) new NullValueArray();
} else if (ShortValue.class.isAssignableFrom(cls)) {
return (ValueArray<T>) new ShortValueArray();
} else if (StringValue.class.isAssignableFrom(cls)) {
return (ValueArray<T>) new StringValueArray();
} else {
throw new IllegalArgumentException("Unable to create unbounded ValueArray for type " + cls);
}
}
示例5: map
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@Override
public Tuple3<T, T, ByteValue> map(Edge<T, Tuple3<ET, Degrees, Degrees>> value)
throws Exception {
Tuple3<ET, Degrees, Degrees> degrees = value.f2;
long sourceDegree = degrees.f1.getDegree().getValue();
long targetDegree = degrees.f2.getDegree().getValue();
if (sourceDegree < targetDegree ||
(sourceDegree == targetDegree && value.f0.compareTo(value.f1) < 0)) {
output.f0 = value.f0;
output.f1 = value.f1;
output.f2 = forward;
} else {
output.f0 = value.f1;
output.f1 = value.f0;
output.f2 = reverse;
}
return output;
}
示例6: testOrderErased
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@Test
public void testOrderErased() {
SingleInputSemanticProperties sProps = new SingleInputSemanticProperties();
SemanticPropUtil.getSemanticPropsSingleFromString(sProps, new String[]{"1; 4"}, null, null, tupleInfo, tupleInfo);
Ordering o = new Ordering();
o.appendOrdering(4, LongValue.class, Order.DESCENDING);
o.appendOrdering(1, IntValue.class, Order.ASCENDING);
o.appendOrdering(6, ByteValue.class, Order.DESCENDING);
RequestedLocalProperties rlProp = new RequestedLocalProperties();
rlProp.setOrdering(o);
RequestedLocalProperties filtered = rlProp.filterBySemanticProperties(sProps, 0);
assertNull(filtered);
}
示例7: testRangePartitioningErased
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@Test
public void testRangePartitioningErased() {
SingleInputSemanticProperties sProp = new SingleInputSemanticProperties();
SemanticPropUtil.getSemanticPropsSingleFromString(sProp, new String[]{"1;2"}, null, null, tupleInfo, tupleInfo);
Ordering o = new Ordering();
o.appendOrdering(3, LongValue.class, Order.DESCENDING);
o.appendOrdering(1, IntValue.class, Order.ASCENDING);
o.appendOrdering(6, ByteValue.class, Order.DESCENDING);
RequestedGlobalProperties rgProps = new RequestedGlobalProperties();
rgProps.setRangePartitioned(o);
RequestedGlobalProperties filtered = rgProps.filterBySemanticProperties(sProp, 0);
assertNull(filtered);
}
示例8: convert
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@SuppressWarnings("unchecked")
private<T> T convert(Record flinkType, int pos, Class<T> hadoopType) {
if(hadoopType == LongWritable.class ) {
return (T) new LongWritable((flinkType.getField(pos, LongValue.class)).getValue());
}
if(hadoopType == org.apache.hadoop.io.Text.class) {
return (T) new Text((flinkType.getField(pos, StringValue.class)).getValue());
}
if(hadoopType == org.apache.hadoop.io.IntWritable.class) {
return (T) new IntWritable((flinkType.getField(pos, IntValue.class)).getValue());
}
if(hadoopType == org.apache.hadoop.io.FloatWritable.class) {
return (T) new FloatWritable((flinkType.getField(pos, FloatValue.class)).getValue());
}
if(hadoopType == org.apache.hadoop.io.DoubleWritable.class) {
return (T) new DoubleWritable((flinkType.getField(pos, DoubleValue.class)).getValue());
}
if(hadoopType == org.apache.hadoop.io.BooleanWritable.class) {
return (T) new BooleanWritable((flinkType.getField(pos, BooleanValue.class)).getValue());
}
if(hadoopType == org.apache.hadoop.io.ByteWritable.class) {
return (T) new ByteWritable((flinkType.getField(pos, ByteValue.class)).getValue());
}
throw new RuntimeException("Unable to convert Flink type ("+flinkType.getClass().getCanonicalName()+") to Hadoop.");
}
示例9: testValueTypes
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@Test
public void testValueTypes() throws Exception {
final String inputData = "ABC,true,1,2,3,4,5.0,6.0\nBCD,false,1,2,3,4,5.0,6.0";
final String dataPath = createInputData(inputData);
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<Tuple8<StringValue, BooleanValue, ByteValue, ShortValue, IntValue, LongValue, FloatValue, DoubleValue>> data =
env.readCsvFile(dataPath).types(StringValue.class, BooleanValue.class, ByteValue.class, ShortValue.class, IntValue.class, LongValue.class, FloatValue.class, DoubleValue.class);
List<Tuple8<StringValue, BooleanValue, ByteValue, ShortValue, IntValue, LongValue, FloatValue, DoubleValue>> result = data.collect();
expected = inputData;
compareResultAsTuples(result, expected);
}
示例10: testWithValueType
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@Test
public void testWithValueType() throws Exception {
CsvReader reader = getCsvReader();
DataSource<Tuple8<StringValue, BooleanValue, ByteValue, ShortValue, IntValue, LongValue, FloatValue, DoubleValue>> items =
reader.types(StringValue.class, BooleanValue.class, ByteValue.class, ShortValue.class, IntValue.class, LongValue.class, FloatValue.class, DoubleValue.class);
TypeInformation<?> info = items.getType();
Assert.assertEquals(true, info.isTupleType());
Assert.assertEquals(Tuple8.class, info.getTypeClass());
}
示例11: testValueTypes
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@Test
public void testValueTypes() {
helperValueType("StringValue", StringValue.class);
helperValueType("IntValue", IntValue.class);
helperValueType("ByteValue", ByteValue.class);
helperValueType("ShortValue", ShortValue.class);
helperValueType("CharValue", CharValue.class);
helperValueType("DoubleValue", DoubleValue.class);
helperValueType("FloatValue", FloatValue.class);
helperValueType("LongValue", LongValue.class);
helperValueType("BooleanValue", BooleanValue.class);
helperValueType("ListValue", ListValue.class);
helperValueType("MapValue", MapValue.class);
helperValueType("NullValue", NullValue.class);
}
示例12: getTestData
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@Override
protected ByteValue[] getTestData() {
Random rnd = new Random(874597969123412341L);
byte byteArray[] = new byte[1];
rnd.nextBytes(byteArray);
return new ByteValue[] {new ByteValue((byte) 0), new ByteValue((byte) 1), new ByteValue((byte) -1),
new ByteValue(Byte.MAX_VALUE), new ByteValue(Byte.MIN_VALUE),
new ByteValue(byteArray[0]), new ByteValue((byte) -byteArray[0])};
}
示例13: getValidTestResults
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@Override
public ByteValue[] getValidTestResults() {
return new ByteValue[] {
new ByteValue((byte) 0), new ByteValue((byte) 1), new ByteValue((byte) 76), new ByteValue((byte) -66),
new ByteValue(Byte.MAX_VALUE), new ByteValue(Byte.MIN_VALUE), new ByteValue((byte) 19)
};
}
示例14: translate
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@Override
public LongValueWithProperHashCode translate(ByteValue value, LongValueWithProperHashCode reuse)
throws Exception {
if (reuse == null) {
reuse = new LongValueWithProperHashCode();
}
reuse.setValue(value.getValue() & 0xff);
return reuse;
}
示例15: testToByteValue
import org.apache.flink.types.ByteValue; //导入依赖的package包/类
@Test
public void testToByteValue() throws Exception {
TranslateFunction<LongValue, ByteValue> translator = new LongValueToUnsignedByteValue();
Assert.assertEquals(new ByteValue((byte) 0),
translator.translate(new LongValue(0L), byteValue));
Assert.assertEquals(new ByteValue(Byte.MIN_VALUE),
translator.translate(new LongValue(Byte.MAX_VALUE + 1), byteValue));
Assert.assertEquals(new ByteValue((byte) -1),
translator.translate(new LongValue(LongValueToUnsignedByteValue.MAX_VERTEX_COUNT - 1), byteValue));
}