当前位置: 首页>>代码示例>>Java>>正文


Java DataTypes.DateType方法代码示例

本文整理汇总了Java中org.apache.spark.sql.types.DataTypes.DateType方法的典型用法代码示例。如果您正苦于以下问题:Java DataTypes.DateType方法的具体用法?Java DataTypes.DateType怎么用?Java DataTypes.DateType使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.spark.sql.types.DataTypes的用法示例。


在下文中一共展示了DataTypes.DateType方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getJdbcTypeString

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
public static String getJdbcTypeString(org.apache.spark.sql.types.DataType dataType, boolean isPrimaryKeyOrIndexKey, boolean isText) {
    int maxVarcharLength = isPrimaryKeyOrIndexKey ? 150 : 250;
    String sqlTypeForString = isText ? "TEXT" : String.format("VARCHAR(%s)", maxVarcharLength);
    if (dataType == DataTypes.TimestampType || dataType == DataTypes.DateType) {
        return "DATETIME";
    } else if (dataType == DataTypes.StringType) {
        return sqlTypeForString;
    } else if (dataType == DataTypes.IntegerType) {
        return "INT";
    } else if (dataType == DataTypes.LongType) {
        return "BIGINT";
    } else if (dataType == DataTypes.FloatType) {
        return "FLOAT";
    } else if (dataType == DataTypes.DoubleType) {
        return "DOUBLE";
    } else if (dataType == DataTypes.BooleanType) {
        return "TINYINT";
    } else if (dataType == DataTypes.ByteType) {
        return "SMALLINT";
    } else if (dataType instanceof org.apache.spark.sql.types.DecimalType) {
        org.apache.spark.sql.types.DecimalType decimalType = (org.apache.spark.sql.types.DecimalType) dataType;
        return String.format("DECIMAL(%d,%d)", decimalType.precision(), decimalType.scale());
    } else {
        throw new RuntimeException(String.format("Unsupported property type for JDBC: %s", dataType));
    }
}
 
开发者ID:uber,项目名称:uberscriptquery,代码行数:27,代码来源:JdbcUtils.java

示例2: getDataTypeFromReturnType

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
private static DataType getDataTypeFromReturnType(Method method) {
    String typeName = method.getReturnType().getSimpleName();
    switch (typeName) {
    case "int":
    case "Integer":
        return DataTypes.IntegerType;
    case "long":
    case "Long":
        return DataTypes.LongType;
    case "float":
    case "Float":
        return DataTypes.FloatType;
    case "boolean":
    case "Boolean":
        return DataTypes.BooleanType;
    case "double":
    case "Double":
        return DataTypes.DoubleType;
    case "String":
        return DataTypes.StringType;
    case "Date":
    case "date":
        return DataTypes.DateType;
    case "Timestamp":
        return DataTypes.TimestampType;
    case "short":
    case "Short":
        return DataTypes.ShortType;
    case "Object":
        return DataTypes.BinaryType;
    default:
        log.debug("Using default for type [{}]", typeName);
        return DataTypes.BinaryType;
    }
}
 
开发者ID:jgperrin,项目名称:net.jgp.labs.spark.datasources,代码行数:36,代码来源:SparkBeanUtils.java

示例3: convertSqlTypeToSparkSqlDataType

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
public static org.apache.spark.sql.types.DataType convertSqlTypeToSparkSqlDataType(int sqlType) {
    if (sqlType == java.sql.Types.BOOLEAN) {
        return DataTypes.BooleanType;
    } else if (sqlType == Types.TINYINT) {
        return DataTypes.ByteType;
    } else if (sqlType == Types.SMALLINT) {
        return DataTypes.ShortType;
    } else if (sqlType == java.sql.Types.INTEGER) {
        return DataTypes.IntegerType;
    } else if (sqlType == java.sql.Types.BIGINT) {
        return DataTypes.LongType;
    } else if (sqlType == Types.DECIMAL) {
        return DataTypes.createDecimalType();
    } else if (sqlType == java.sql.Types.FLOAT) {
        return DataTypes.FloatType;
    } else if (sqlType == java.sql.Types.DOUBLE) {
        return DataTypes.DoubleType;
    } else if (sqlType == Types.DATE) {
        return DataTypes.DateType;
    } else if (sqlType == Types.TIME) {
        return DataTypes.TimestampType;
    } else if (sqlType == Types.TIMESTAMP) {
        return DataTypes.TimestampType;
    } else if (sqlType == java.sql.Types.VARCHAR) {
        return DataTypes.StringType;
    } else {
        logger.warn(String.format("Using string for unsupported sql type %s", sqlType));
        return DataTypes.StringType;
    }
}
 
开发者ID:uber,项目名称:uberscriptquery,代码行数:31,代码来源:SparkUtils.java

示例4: testToRowValueDate

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testToRowValueDate() {
  DataType field = DataTypes.DateType;

  DateTime dateObj = DateTime.parse("2017-01-01T00:00:00"); // Pass-thru the TZ
  Date sqlDate = new Date(dateObj.getMillis());

  assertEquals("Invalid Long", sqlDate, RowUtils.toRowValue(dateObj.getMillis(), field));
  assertEquals("Invalid String", sqlDate, RowUtils.toRowValue("2017-001", field)); // ISO Date format
  assertEquals("Invalid Date", sqlDate, RowUtils.toRowValue(dateObj.toDate(), field));
  assertEquals("Invalid DateTime", sqlDate, RowUtils.toRowValue(dateObj, field));

  thrown.expect(RuntimeException.class);
  thrown.expectMessage(JUnitMatchers.containsString("Invalid or unrecognized input format"));
  RowUtils.toRowValue(123, field);
}
 
开发者ID:cloudera-labs,项目名称:envelope,代码行数:17,代码来源:TestRowUtils.java

示例5: indexrSchemaToSparkSchema

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
public static List<StructField> indexrSchemaToSparkSchema(SegmentSchema schema) {
    List<StructField> fields = new ArrayList<>();
    for (ColumnSchema cs : schema.getColumns()) {
        DataType dataType;
        switch (cs.getSqlType()) {
            case INT:
                dataType = DataTypes.IntegerType;
                break;
            case BIGINT:
                dataType = DataTypes.LongType;
                break;
            case FLOAT:
                dataType = DataTypes.FloatType;
                break;
            case DOUBLE:
                dataType = DataTypes.DoubleType;
                break;
            case VARCHAR:
                dataType = DataTypes.StringType;
                break;
            case DATE:
                dataType = DataTypes.DateType;
                break;
            case DATETIME:
                dataType = DataTypes.TimestampType;
                break;
            default:
                throw new IllegalStateException("Unsupported type: " + cs.getSqlType());
        }
        fields.add(new StructField(cs.getName(), dataType, scala.Boolean.box(false), Metadata.empty()));
    }
    return fields;
}
 
开发者ID:shunfei,项目名称:indexr,代码行数:34,代码来源:IndexRUtil.java

示例6: parseDataType

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
private static DataType parseDataType(Config fieldsConfig) {
  String type = fieldsConfig.getString(FIELD_TYPE_CONFIG);
  switch (type) {
    case "string":
      return DataTypes.StringType;
    case "byte":
      return DataTypes.ByteType;
    case "short":
      return DataTypes.ShortType;
    case "int":
      return DataTypes.IntegerType;
    case "long":
      return DataTypes.LongType;
    case "float":
      return DataTypes.FloatType;
    case "double":
      return DataTypes.DoubleType;
    case "decimal":
      ConfigUtils.assertConfig(fieldsConfig, DECIMAL_SCALE_CONFIG);
      ConfigUtils.assertConfig(fieldsConfig, DECIMAL_PRECISION_CONFIG);
      return DataTypes.createDecimalType(
              fieldsConfig.getInt(DECIMAL_SCALE_CONFIG),
              fieldsConfig.getInt(DECIMAL_PRECISION_CONFIG));
    case "boolean":
      return DataTypes.BooleanType;
    case "binary":
      return DataTypes.BinaryType;
    case "date":
      return DataTypes.DateType;
    case "timestamp":
      return DataTypes.TimestampType;
    case "array":
    case "map":
    case "struct":
      throw new RuntimeException("Schema check does not currently support complex types");
    default:
      throw new RuntimeException("Unknown type: " + type);
  }
}
 
开发者ID:cloudera-labs,项目名称:envelope,代码行数:40,代码来源:CheckSchemaDatasetRule.java


注:本文中的org.apache.spark.sql.types.DataTypes.DateType方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。