當前位置: 首頁>>代碼示例>>Java>>正文


Java TypeInfo.equals方法代碼示例

本文整理匯總了Java中org.apache.hadoop.hive.serde2.typeinfo.TypeInfo.equals方法的典型用法代碼示例。如果您正苦於以下問題:Java TypeInfo.equals方法的具體用法?Java TypeInfo.equals怎麽用?Java TypeInfo.equals使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.hive.serde2.typeinfo.TypeInfo的用法示例。


在下文中一共展示了TypeInfo.equals方法的6個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: getObjectInspector

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
private ObjectInspector getObjectInspector(final TypeInfo typeInfo) {
    if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.dateTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
    } else {
        throw new UnsupportedOperationException("Unknown field type: " + typeInfo);
    }
}
 
開發者ID:shunfei,項目名稱:indexr,代碼行數:22,代碼來源:ArrayWritableObjectInspector.java

示例2: convertToIndexRSchema

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
private static SegmentSchema convertToIndexRSchema(List<String> columnNames,
                                                   List<TypeInfo> columnTypes,
                                                   Set<String> indexColumns) throws IOException {
    List<ColumnSchema> schemas = new ArrayList<ColumnSchema>();
    for (int i = 0; i < columnNames.size(); i++) {
        String currentColumn = columnNames.get(i);
        TypeInfo currentType = columnTypes.get(i);
        SQLType convertedType = null;

        if (currentType.equals(TypeInfoFactory.intTypeInfo)) {
            convertedType = SQLType.INT;
        } else if (currentType.equals(TypeInfoFactory.longTypeInfo)) {
            convertedType = SQLType.BIGINT;
        } else if (currentType.equals(TypeInfoFactory.floatTypeInfo)) {
            convertedType = SQLType.FLOAT;
        } else if (currentType.equals(TypeInfoFactory.doubleTypeInfo)) {
            convertedType = SQLType.DOUBLE;
        } else if (currentType.equals(TypeInfoFactory.stringTypeInfo)) {
            convertedType = SQLType.VARCHAR;
        } else if (currentType.equals(TypeInfoFactory.dateTypeInfo)) {
            convertedType = SQLType.DATE;
        } else if (currentType.equals(TypeInfoFactory.timestampTypeInfo)) {
            convertedType = SQLType.DATETIME;
        } else {
            throw new IOException("can't recognize this type [" + currentType.getTypeName() + "]");
        }

        boolean isIndexed = indexColumns.contains(currentColumn.toLowerCase());
        schemas.add(new ColumnSchema(currentColumn, convertedType, isIndexed));
    }
    return new SegmentSchema(schemas);
}
 
開發者ID:shunfei,項目名稱:indexr,代碼行數:33,代碼來源:IndexROutputFormat.java

示例3: setReadColumns

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
/**
 * Sets which fields are to be read from the ORC file
 */
static void setReadColumns(Configuration conf, StructTypeInfo actualStructTypeInfo) {
  StructTypeInfo readStructTypeInfo = getTypeInfo(conf);
  LOG.info("Read StructTypeInfo: {}", readStructTypeInfo);

  List<Integer> ids = new ArrayList<>();
  List<String> names = new ArrayList<>();

  List<String> readNames = readStructTypeInfo.getAllStructFieldNames();
  List<String> actualNames = actualStructTypeInfo.getAllStructFieldNames();

  for (int i = 0; i < actualNames.size(); i++) {
    String actualName = actualNames.get(i);
    if (readNames.contains(actualName)) {
      // make sure they are the same type
      TypeInfo actualTypeInfo = actualStructTypeInfo.getStructFieldTypeInfo(actualName);
      TypeInfo readTypeInfo = readStructTypeInfo.getStructFieldTypeInfo(actualName);
      if (!actualTypeInfo.equals(readTypeInfo)) {
        throw new IllegalStateException("readTypeInfo [" + readTypeInfo + "] does not match actualTypeInfo ["
            + actualTypeInfo + "]");
      }
      // mark the column as to-be-read
      ids.add(i);
      names.add(actualName);
    }
  }
  if (ids.size() == 0) {
    throw new IllegalStateException("None of the selected columns were found in the ORC file.");
  }
  LOG.info("Set column projection on columns: {} ({})", ids, names);
  ColumnProjectionUtils.appendReadColumns(conf, ids, names);
}
 
開發者ID:HotelsDotCom,項目名稱:corc,代碼行數:35,代碼來源:CorcInputFormat.java

示例4: getFieldObjectInspector

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
/**
 * Given a Hive column type, returns the ObjectInspector that will be used to
 * get data from the field. Currently using the the standard Writable object
 * inspectors.
 * TODO: Support all types
 */
private ObjectInspector getFieldObjectInspector(final TypeInfo typeInfo) {
  if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
  }  else if (typeInfo instanceof DecimalTypeInfo) {
    return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
        (DecimalTypeInfo) typeInfo);
  } else if (typeInfo instanceof VarcharTypeInfo) {
    return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
        (VarcharTypeInfo) typeInfo);
  } else if (typeInfo instanceof CharTypeInfo) {
    return PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(
        (CharTypeInfo) typeInfo);
  } else {
    throw new UnsupportedOperationException("Unknown field type: " + typeInfo);
  }
}
 
開發者ID:cloudera,項目名稱:RecordServiceClient,代碼行數:33,代碼來源:RecordServiceObjectInspector.java

示例5: convertType

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
private static Type convertType(final String name, final TypeInfo typeInfo, final Repetition repetition) {
  if (typeInfo.getCategory().equals(Category.PRIMITIVE)) {
    if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
      return new PrimitiveType(repetition, PrimitiveTypeName.BINARY, name);
    } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo) ||
        typeInfo.equals(TypeInfoFactory.shortTypeInfo) ||
        typeInfo.equals(TypeInfoFactory.byteTypeInfo)) {
      return new PrimitiveType(repetition, PrimitiveTypeName.INT32, name);
    } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
      return new PrimitiveType(repetition, PrimitiveTypeName.INT64, name);
    } else if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
      return new PrimitiveType(repetition, PrimitiveTypeName.DOUBLE, name);
    } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
      return new PrimitiveType(repetition, PrimitiveTypeName.FLOAT, name);
    } else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
      return new PrimitiveType(repetition, PrimitiveTypeName.BOOLEAN, name);
    } else if (typeInfo.equals(TypeInfoFactory.binaryTypeInfo)) {
      // TODO : binaryTypeInfo is a byte array. Need to map it
      throw new UnsupportedOperationException("Binary type not implemented");
    } else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
      throw new UnsupportedOperationException("Timestamp type not implemented");
    } else if (typeInfo.equals(TypeInfoFactory.voidTypeInfo)) {
      throw new UnsupportedOperationException("Void type not implemented");
    } else if (typeInfo.equals(TypeInfoFactory.unknownTypeInfo)) {
      throw new UnsupportedOperationException("Unknown type not implemented");
    } else {
      throw new IllegalArgumentException("Unknown type: " + typeInfo);
    }
  } else if (typeInfo.getCategory().equals(Category.LIST)) {
    return convertArrayType(name, (ListTypeInfo) typeInfo);
  } else if (typeInfo.getCategory().equals(Category.STRUCT)) {
    return convertStructType(name, (StructTypeInfo) typeInfo);
  } else if (typeInfo.getCategory().equals(Category.MAP)) {
    return convertMapType(name, (MapTypeInfo) typeInfo);
  } else if (typeInfo.getCategory().equals(Category.UNION)) {
    throw new UnsupportedOperationException("Union type not implemented");
  } else {
    throw new IllegalArgumentException("Unknown type: " + typeInfo);
  }
}
 
開發者ID:apache,項目名稱:parquet-mr,代碼行數:41,代碼來源:HiveSchemaConverter.java

示例6: getObjectInspector

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; //導入方法依賴的package包/類
private ObjectInspector getObjectInspector(final TypeInfo typeInfo) {
  if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
    return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
  } else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
    return ParquetPrimitiveInspectorFactory.parquetStringInspector;
  } else if (typeInfo.getCategory().equals(Category.STRUCT)) {
    return new ArrayWritableObjectInspector((StructTypeInfo) typeInfo);
  } else if (typeInfo.getCategory().equals(Category.LIST)) {
    final TypeInfo subTypeInfo = ((ListTypeInfo) typeInfo).getListElementTypeInfo();
    return new ParquetHiveArrayInspector(getObjectInspector(subTypeInfo));
  } else if (typeInfo.getCategory().equals(Category.MAP)) {
    final TypeInfo keyTypeInfo = ((MapTypeInfo) typeInfo).getMapKeyTypeInfo();
    final TypeInfo valueTypeInfo = ((MapTypeInfo) typeInfo).getMapValueTypeInfo();
    if (keyTypeInfo.equals(TypeInfoFactory.stringTypeInfo) || keyTypeInfo.equals(TypeInfoFactory.byteTypeInfo)
            || keyTypeInfo.equals(TypeInfoFactory.shortTypeInfo)) {
      return new DeepParquetHiveMapInspector(getObjectInspector(keyTypeInfo), getObjectInspector(valueTypeInfo));
    } else {
      return new StandardParquetHiveMapInspector(getObjectInspector(keyTypeInfo), getObjectInspector(valueTypeInfo));
    }
  } else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
    throw new UnsupportedOperationException("timestamp not implemented yet");
  } else if (typeInfo.equals(TypeInfoFactory.byteTypeInfo)) {
    return ParquetPrimitiveInspectorFactory.parquetByteInspector;
  } else if (typeInfo.equals(TypeInfoFactory.shortTypeInfo)) {
    return ParquetPrimitiveInspectorFactory.parquetShortInspector;
  } else {
    throw new IllegalArgumentException("Unknown field info: " + typeInfo);
  }

}
 
開發者ID:apache,項目名稱:parquet-mr,代碼行數:39,代碼來源:ArrayWritableObjectInspector.java


注:本文中的org.apache.hadoop.hive.serde2.typeinfo.TypeInfo.equals方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。