当前位置: 首页>>代码示例>>Java>>正文


Java HiveVarchar类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.common.type.HiveVarchar的典型用法代码示例。如果您正苦于以下问题:Java HiveVarchar类的具体用法?Java HiveVarchar怎么用?Java HiveVarchar使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


HiveVarchar类属于org.apache.hadoop.hive.common.type包,在下文中一共展示了HiveVarchar类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getTransformedWritable

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
  HiveVarchar value = columnType.getPrimitiveJavaObject(object.get());

  if(value != null) {
    String transformedValue = transformer.transform(value.getValue());

    if(transformedValue != null) {
      writable.set(transformedValue);

      return writable;
    }
  }

  return null;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:17,代码来源:BaseMaskUDF.java

示例2: convertClobType

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
private Object convertClobType(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  ClobRef cr = (ClobRef) val;
  String s = cr.isExternal() ? cr.toString() : cr.getData();

  if (hfsType == HCatFieldSchema.Type.STRING) {
    return s;
  } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
    VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
    HiveVarchar hvc = new HiveVarchar(s, vti.getLength());
    return hvc;
  } else if (hfsType == HCatFieldSchema.Type.CHAR) {
    CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
    HiveChar hc = new HiveChar(s, cti.getLength());
    return hc;
  }
  return null;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:19,代码来源:SqoopHCatImportHelper.java

示例3: testStringTypes

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testStringTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "char(14)", Types.CHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "string to test", "string to test", KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
        "char(14)", Types.CHAR, HCatFieldSchema.Type.CHAR, 14, 0,
        new HiveChar("string to test", 14), "string to test",
        KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
        "char(14)", Types.CHAR, HCatFieldSchema.Type.VARCHAR, 14, 0,
        new HiveVarchar("string to test", 14), "string to test",
        KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3),
      "longvarchar", Types.LONGVARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "string to test", "string to test", KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:24,代码来源:HCatalogImportTest.java

示例4: testTableCreation

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testTableCreation() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      new HiveVarchar("1", 20), "1", KeyType.STATIC_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      new HiveVarchar("2", 20), "2", KeyType.DYNAMIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--create-hcatalog-table");
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols,
    null, true, false);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:18,代码来源:HCatalogImportTest.java

示例5: testTableCreationWithPartition

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testTableCreationWithPartition() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
     new HiveVarchar("1", 20), "1", KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
     new HiveVarchar("2", 20), "2", KeyType.STATIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hive-partition-key");
  addlArgsArray.add("col1");
  addlArgsArray.add("--hive-partition-value");
  addlArgsArray.add("2");
  addlArgsArray.add("--create-hcatalog-table");
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:21,代码来源:HCatalogImportTest.java

示例6: testTableCreationWithMultipleStaticPartKeys

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testTableCreationWithMultipleStaticPartKeys() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
      new HiveVarchar("1", 20), "1", KeyType.STATIC_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
      new HiveVarchar("2", 20), "2", KeyType.STATIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hcatalog-partition-keys");
  addlArgsArray.add("col0,col1");
  addlArgsArray.add("--hcatalog-partition-values");
  addlArgsArray.add("1,2");
  addlArgsArray.add("--create-hcatalog-table");
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:21,代码来源:HCatalogImportTest.java

示例7: testTableCreationWithStorageStanza

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testTableCreationWithStorageStanza() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
      new HiveVarchar("1", 20), "1", KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
      new HiveVarchar("2", 20), "2", KeyType.STATIC_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--hive-partition-key");
  addlArgsArray.add("col1");
  addlArgsArray.add("--hive-partition-value");
  addlArgsArray.add("2");
  addlArgsArray.add("--create-hcatalog-table");
  addlArgsArray.add("--hcatalog-storage-stanza");
  addlArgsArray.add(HCatalogTestUtils.STORED_AS_TEXT);
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:23,代码来源:HCatalogImportTest.java

示例8: testCreateTableWithPreExistingTable

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testCreateTableWithPreExistingTable() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
      new HiveVarchar("1", 20), "1", KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0,
      new HiveVarchar("2", 20), "2", KeyType.DYNAMIC_KEY), };
  List<String> addlArgsArray = new ArrayList<String>();
  addlArgsArray.add("--create-hcatalog-table");
  setExtraArgs(addlArgsArray);
  try {
    // Precreate table
    utils.createHCatTable(CreateMode.CREATE, TOTAL_RECORDS, table, cols);
    runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols,
      null, true, false);
    fail("HCatalog job with --create-hcatalog-table and pre-existing"
      + " table should fail");
  } catch (Exception e) {
    LOG.debug("Caught expected exception while running "
      + " create-hcatalog-table with pre-existing table test", e);
  }
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:26,代码来源:HCatalogImportTest.java

示例9: testStringTypes

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
public void testStringTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "char(14)", Types.CHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "string to test", "string to test", KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
        "char(14)", Types.CHAR, HCatFieldSchema.Type.CHAR, 14, 0,
        new HiveChar("string to test", 14), "string to test",
        KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
        "char(14)", Types.CHAR, HCatFieldSchema.Type.VARCHAR, 14, 0,
        new HiveVarchar("string to test", 14), "string to test",
        KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3),
      "longvarchar", Types.LONGVARCHAR, HCatFieldSchema.Type.STRING, 0, 0,
      "string to test", "string to test", KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:23,代码来源:HCatalogExportTest.java

示例10: testStringColumnsMetadata

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
@Test
public void testStringColumnsMetadata() throws Exception {
  String query = "select varchar_field, char_field, string_field from hive.readtest";

  Map<String, Integer> expectedResult = Maps.newHashMap();
  expectedResult.put("varchar_field", 50);
  expectedResult.put("char_field", 10);
  expectedResult.put("string_field", HiveVarchar.MAX_VARCHAR_LENGTH);

  verifyColumnsMetadata(client.createPreparedStatement(query).get()
      .getPreparedStatement().getColumnsList(), expectedResult);

  try {
    test("alter session set `%s` = true", ExecConstants.EARLY_LIMIT0_OPT_KEY);
    verifyColumnsMetadata(client.createPreparedStatement(String.format("select * from (%s) t limit 0", query)).get()
            .getPreparedStatement().getColumnsList(), expectedResult);
  } finally {
    test("alter session reset `%s`", ExecConstants.EARLY_LIMIT0_OPT_KEY);
  }
}
 
开发者ID:axbaretto,项目名称:drill,代码行数:21,代码来源:TestHiveStorage.java

示例11: getJavaObjectFromPrimitiveData

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
private static Object getJavaObjectFromPrimitiveData(Object data, ObjectInspector objInsp) {
    assert(objInsp.getCategory() == Category.PRIMITIVE);
    if (data == null) {
        return null;
    }
    if (data instanceof BytesWritable && objInsp instanceof WritableHiveDecimalObjectInspector) {
        // BytesWritable cannot be directly cast to HiveDecimalWritable
        WritableHiveDecimalObjectInspector oi = (WritableHiveDecimalObjectInspector) objInsp;
        data = oi.create(((BytesWritable) data).getBytes(), oi.scale());
    }
    Object obj = ObjectInspectorUtils.copyToStandardJavaObject(data, objInsp);
    if (obj instanceof HiveDecimal) {
        obj = ((HiveDecimal) obj).bigDecimalValue();
    } else if (obj instanceof HiveVarchar || obj instanceof HiveChar) {
        obj = obj.toString();
    } else if (obj instanceof byte[]) {
        obj = Hex.encodeHexString((byte[]) obj);
    }
    return obj;
}
 
开发者ID:EXASOL,项目名称:hadoop-etl-udfs,代码行数:21,代码来源:HdfsSerDeImportService.java

示例12: getJavaObjectFromFieldData

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
private static Object getJavaObjectFromFieldData(Object data, ObjectInspector objInsp) {
    if (data == null) {
        return null;
    }
    if (objInsp.getCategory() == Category.PRIMITIVE) {
        Object obj = ObjectInspectorUtils.copyToStandardJavaObject(data, objInsp);
        if (obj instanceof HiveDecimal) {
            obj = ((HiveDecimal) obj).bigDecimalValue();
        } else if (obj instanceof HiveVarchar || obj instanceof HiveChar) {
            obj = obj.toString();
        } else if (obj instanceof byte[]) {
            obj = Hex.encodeHexString((byte[]) obj);
        }
        return obj;
    } else if (objInsp.getCategory() == Category.LIST) {
        return getJsonArrayFromFieldData(data, objInsp, Json.createBuilderFactory(null)).build().toString();
    } else {
        return getJsonObjectFromFieldData(data, objInsp, Json.createBuilderFactory(null)).build().toString();
    }
}
 
开发者ID:EXASOL,项目名称:hadoop-etl-udfs,代码行数:21,代码来源:HdfsSerDeImportService.java

示例13: writeVarchar

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
@Test
public void writeVarchar() throws IOException {
  List<Object> values = new ArrayList<>();
  values.add("hello");
  values.add(new HiveVarchar("world", 1));
  values.add(null);

  write(TypeInfoFactory.getVarcharTypeInfo(1), values);

  try (OrcReader reader = getOrcReader()) {
    assertThat(reader.hasNext(), is(true));
    assertThat(((HiveVarchar) reader.next().get(0)).getValue(), is("h"));

    assertThat(reader.hasNext(), is(true));
    assertThat(((HiveVarchar) reader.next().get(0)).getValue(), is("w"));

    assertThat(reader.hasNext(), is(true));
    assertThat(reader.next().get(0), is(nullValue()));

    assertThat(reader.hasNext(), is(false));
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:23,代码来源:OrcFileTest.java

示例14: getPrimitiveJavaObject

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
@Override
public HiveVarchar getPrimitiveJavaObject(Object o) {
<#if mode == "Optional">
  if (o == null) {
    return null;
  }
  final NullableVarCharHolder h = (NullableVarCharHolder)o;
<#else>
  final VarCharHolder h = (VarCharHolder)o;
</#if>
  final String s = StringFunctionHelpers.toStringFromUTF8(h.start, h.end, h.buffer);
  return new HiveVarchar(s, HiveVarchar.MAX_VARCHAR_LENGTH);
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:14,代码来源:ObjectInspectors.java

示例15: convertStringTypes

import org.apache.hadoop.hive.common.type.HiveVarchar; //导入依赖的package包/类
private Object convertStringTypes(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  if (hfsType == HCatFieldSchema.Type.STRING
      || hfsType == HCatFieldSchema.Type.VARCHAR
      || hfsType == HCatFieldSchema.Type.CHAR) {
    String str = val.toString();
    if (doHiveDelimsReplacement) {
      str = FieldFormatter.hiveStringReplaceDelims(str,
        hiveDelimsReplacement, hiveDelimiters);
    }
    if (hfsType == HCatFieldSchema.Type.STRING) {
      return str;
    } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
      VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
      HiveVarchar hvc = new HiveVarchar(str, vti.getLength());
      return hvc;
    } else if (hfsType == HCatFieldSchema.Type.CHAR) {
      CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
      HiveChar hc = new HiveChar(val.toString(), cti.getLength());
      return hc;
    }
  } else if (hfsType == HCatFieldSchema.Type.DECIMAL) {
    BigDecimal bd = new BigDecimal(val.toString(), MathContext.DECIMAL128);
    HiveDecimal hd = HiveDecimal.create(bd);
    return hd;
  }
  return null;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:29,代码来源:SqoopHCatImportHelper.java


注:本文中的org.apache.hadoop.hive.common.type.HiveVarchar类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。