当前位置: 首页>>代码示例>>Java>>正文


Java ByteWritable类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.io.ByteWritable的典型用法代码示例。如果您正苦于以下问题:Java ByteWritable类的具体用法?Java ByteWritable怎么用?Java ByteWritable使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


ByteWritable类属于org.apache.hadoop.hive.serde2.io包,在下文中一共展示了ByteWritable类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getConstantIntValue

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
public static Integer getConstantIntValue(ObjectInspector[] arguments, int i)
        throws UDFArgumentTypeException {
    Object constValue = ((ConstantObjectInspector) arguments[i]).getWritableConstantValue();
    if (constValue == null) {
        return null;
    }
    int v;
    if (constValue instanceof IntWritable) {
        v = ((IntWritable) constValue).get();
    } else if (constValue instanceof ShortWritable) {
        v = ((ShortWritable) constValue).get();
    } else if (constValue instanceof ByteWritable) {
        v = ((ByteWritable) constValue).get();
    } else {
        throw new UDFArgumentTypeException(i, "_FUNC_ only takes INT/SHORT/BYTE types as "
                + getArgOrder(i) + " argument, got " + constValue.getClass());
    }
    return v;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:20,代码来源:BackportUtils.java

示例2: writePrimitive

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
private void writePrimitive(final Writable value) {
  if (value == null) {
    return;
  }
  if (value instanceof DoubleWritable) {
    recordConsumer.addDouble(((DoubleWritable) value).get());
  } else if (value instanceof BooleanWritable) {
    recordConsumer.addBoolean(((BooleanWritable) value).get());
  } else if (value instanceof FloatWritable) {
    recordConsumer.addFloat(((FloatWritable) value).get());
  } else if (value instanceof IntWritable) {
    recordConsumer.addInteger(((IntWritable) value).get());
  } else if (value instanceof LongWritable) {
    recordConsumer.addLong(((LongWritable) value).get());
  } else if (value instanceof ShortWritable) {
    recordConsumer.addInteger(((ShortWritable) value).get());
  } else if (value instanceof ByteWritable) {
    recordConsumer.addInteger(((ByteWritable) value).get());
  } else if (value instanceof BigDecimalWritable) {
    throw new UnsupportedOperationException("BigDecimal writing not implemented");
  } else if (value instanceof BinaryWritable) {
    recordConsumer.addBinary(((BinaryWritable) value).getBinary());
  } else {
    throw new IllegalArgumentException("Unknown value type: " + value + " " + value.getClass());
  }
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:27,代码来源:DataWritableWriter.java

示例3: evaluate

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
public IntWritable evaluate(ByteWritable a, IntWritable b) {
  if (a == null || b == null) {
    return null;
  }
  intWritable.set(a.get() >> b.get());
  return intWritable;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:8,代码来源:UDFOPBitShiftRight.java

示例4: evaluate

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
public IntWritable evaluate(ByteWritable a, IntWritable b) {
  if (a == null || b == null) {
    return null;
  }
  intWritable.set(a.get() << b.get());
  return intWritable;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:8,代码来源:UDFOPBitShiftLeft.java

示例5: evaluate

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
public IntWritable evaluate(ByteWritable a, IntWritable b) {
  if (a == null || b == null) {
    return null;
  }
  intWritable.set(a.get() >>> b.get());
  return intWritable;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:8,代码来源:UDFOPBitShiftRightUnsigned.java

示例6: write

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
@Override
public Result write(Writable writable, Generator generator) {
    if (writable instanceof ByteWritable) {
        generator.writeNumber(((ByteWritable) writable).get());
    }
    else if (writable instanceof DoubleWritable) {
        generator.writeNumber(((DoubleWritable) writable).get());
    }
    else if (writable instanceof ShortWritable) {
        generator.writeNumber(((ShortWritable) writable).get());
    }
    // HiveDecimal - Hive 0.11+
    else if (writable != null && HiveConstants.DECIMAL_WRITABLE.equals(writable.getClass().getName())) {
        generator.writeString(writable.toString());
    }
    // pass the UNIX epoch
    else if (writable instanceof TimestampWritable) {
        long ts = ((TimestampWritable) writable).getTimestamp().getTime();
        Calendar cal = Calendar.getInstance();
        cal.setTimeInMillis(ts);
        generator.writeString(DatatypeConverter.printDateTime(cal));
    }
    // HiveDate - Hive 0.12+
    else if (writable != null && HiveConstants.DATE_WRITABLE.equals(writable.getClass().getName())) {
        generator.writeString(DateWritableWriter.toES(writable));
    }
    // HiveVarcharWritable - Hive 0.12+
    else if (writable != null && HiveConstants.VARCHAR_WRITABLE.equals(writable.getClass().getName())) {
        generator.writeString(writable.toString());
    }
    // HiveChar - Hive 0.13+
    else if (writable != null && HiveConstants.CHAR_WRITABLE.equals(writable.getClass().getName())) {
        generator.writeString(StringUtils.trim(writable.toString()));
    }
    else {
        return super.write(writable, generator);
    }

    return Result.SUCCESFUL();
}
 
开发者ID:xushjie1987,项目名称:es-hadoop-v2.2.0,代码行数:41,代码来源:HiveWritableValueWriter.java

示例7: toComparable

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
static Comparable<?> toComparable(PrimitiveCategory category, Object literal) {
  String stringLiteral;
  switch (category) {
  case STRING:
    return new Text((String) literal);
  case BOOLEAN:
    return new BooleanWritable((Boolean) literal);
  case BYTE:
    return new ByteWritable(((Long) literal).byteValue());
  case SHORT:
    return new ShortWritable(((Long) literal).shortValue());
  case INT:
    return new IntWritable(((Long) literal).intValue());
  case LONG:
    return new LongWritable((Long) literal);
  case FLOAT:
    return new FloatWritable(((Double) literal).floatValue());
  case DOUBLE:
    return new DoubleWritable((Double) literal);
  case TIMESTAMP:
    return new TimestampWritable((Timestamp) literal);
  case DATE:
    return (DateWritable) literal;
  case CHAR:
    stringLiteral = (String) literal;
    return new HiveCharWritable(new HiveChar(stringLiteral, stringLiteral.length()));
  case VARCHAR:
    stringLiteral = (String) literal;
    return new HiveVarcharWritable(new HiveVarchar(stringLiteral, stringLiteral.length()));
  case DECIMAL:
    return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal));
  default:
    throw new IllegalArgumentException("Unsupported category: " + category);
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:36,代码来源:EvaluatorFactory.java

示例8: getObjectFromWritable

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
private Object getObjectFromWritable(Writable w) {
	if (w instanceof IntWritable) {
		// int
		return ((IntWritable) w).get();
	} else if (w instanceof ShortWritable) {
		// short
		return ((ShortWritable) w).get();
	} else if (w instanceof ByteWritable) {
		// byte
		return ((ByteWritable) w).get();
	} else if (w instanceof BooleanWritable) {
		// boolean
		return ((BooleanWritable) w).get();
	} else if (w instanceof LongWritable) {
		// long
		return ((LongWritable) w).get();
	} else if (w instanceof FloatWritable) {
		// float
		return ((FloatWritable) w).get();
	} else if (w instanceof DoubleWritable) {
		// double
		return ((DoubleWritable) w).get();
	}else if (w instanceof NullWritable) {
		//null
		return null;
	} else {
		// treat as string
		return w.toString();
	}
}
 
开发者ID:vroyer,项目名称:hive-solr-search,代码行数:31,代码来源:SolrWriter.java

示例9: get

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
@Override
public byte get(Object o) {
  // Accept int writables and convert them.
  if (o instanceof IntWritable) {
    return (byte) ((IntWritable) o).get();
  }
  return ((ByteWritable) o).get();
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:9,代码来源:ParquetByteInspector.java

示例10: createPrimitive

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
private Writable createPrimitive(final Object obj, final PrimitiveObjectInspector inspector)
    throws SerDeException {
  if (obj == null) {
    return null;
  }
  switch (inspector.getPrimitiveCategory()) {
  case VOID:
    return null;
  case BOOLEAN:
    return new BooleanWritable(((BooleanObjectInspector) inspector).get(obj) ? Boolean.TRUE : Boolean.FALSE);
  case BYTE:
    return new ByteWritable((byte) ((ByteObjectInspector) inspector).get(obj));
  case DOUBLE:
    return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
  case FLOAT:
    return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
  case INT:
    return new IntWritable(((IntObjectInspector) inspector).get(obj));
  case LONG:
    return new LongWritable(((LongObjectInspector) inspector).get(obj));
  case SHORT:
    return new ShortWritable((short) ((ShortObjectInspector) inspector).get(obj));
  case STRING:
    return new BinaryWritable(Binary.fromString(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj)));
  default:
    throw new SerDeException("Unknown primitive : " + inspector.getPrimitiveCategory());
  }
}
 
开发者ID:apache,项目名称:parquet-mr,代码行数:29,代码来源:ParquetHiveSerDe.java

示例11: TestCopyByte

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
/**
 * Tests that after copying a lazy byte object, calling materialize on the original and the
 * copy doesn't advance the tree reader twice
 * @throws Exception
 */
@Test
public void TestCopyByte() throws Exception {
  ReaderWriterProfiler.setProfilerOptions(null);
  OrcLazyByte lazyByte = new OrcLazyByte(new LazyByteTreeReader(0, 0) {
    int nextCalls = 0;

    @Override
    public Object next(Object previous) throws IOException {
      if (nextCalls == 0) {
        return new ByteWritable((byte) 1);
      }

      throw new IOException("next should only be called once");
    }

    @Override
    protected boolean seekToRow(long currentRow) throws IOException {
      return true;
    }
  });

  ByteObjectInspector byteOI = (ByteObjectInspector)
      OrcLazyObjectInspectorUtils.createLazyObjectInspector(TypeInfoFactory.byteTypeInfo);

  OrcLazyByte lazyByte2 = (OrcLazyByte) byteOI.copyObject(lazyByte);

  Assert.assertEquals(1, ((ByteWritable) lazyByte.materialize()).get());
  Assert.assertEquals(1, ((ByteWritable) lazyByte2.materialize()).get());
}
 
开发者ID:facebookarchive,项目名称:hive-dwrf,代码行数:35,代码来源:TestObjectInspector.java

示例12: byteType

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
@Override
protected Class<? extends Writable> byteType() {
    return ByteWritable.class;
}
 
开发者ID:xushjie1987,项目名称:es-hadoop-v2.2.0,代码行数:5,代码来源:HiveValueReader.java

示例13: processByte

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
@Override
protected Object processByte(Byte value) {
    return new ByteWritable(value);
}
 
开发者ID:xushjie1987,项目名称:es-hadoop-v2.2.0,代码行数:5,代码来源:HiveValueReader.java

示例14: testByte

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
@Test
public void testByte() {
    // byte is not recognized by the schema
    assertEquals("127", hiveTypeToJson(new MyHiveType(new ByteWritable(Byte.MAX_VALUE), byteTypeInfo)));
}
 
开发者ID:xushjie1987,项目名称:es-hadoop-v2.2.0,代码行数:6,代码来源:HiveTypeToJsonTest.java

示例15: toWritableObjectInternal

import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
@Override
protected Object toWritableObjectInternal(Object value) throws UnexpectedTypeException {
  return new ByteWritable((Byte) value);
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:5,代码来源:DefaultConverterFactory.java


注:本文中的org.apache.hadoop.hive.serde2.io.ByteWritable类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。