当前位置: 首页>>代码示例>>Java>>正文


Java DataTypes.createDecimalType方法代码示例

本文整理汇总了Java中org.apache.spark.sql.types.DataTypes.createDecimalType方法的典型用法代码示例。如果您正苦于以下问题:Java DataTypes.createDecimalType方法的具体用法?Java DataTypes.createDecimalType怎么用?Java DataTypes.createDecimalType使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.spark.sql.types.DataTypes的用法示例。


在下文中一共展示了DataTypes.createDecimalType方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: convertSqlTypeToSparkSqlDataType

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
public static org.apache.spark.sql.types.DataType convertSqlTypeToSparkSqlDataType(int sqlType) {
    if (sqlType == java.sql.Types.BOOLEAN) {
        return DataTypes.BooleanType;
    } else if (sqlType == Types.TINYINT) {
        return DataTypes.ByteType;
    } else if (sqlType == Types.SMALLINT) {
        return DataTypes.ShortType;
    } else if (sqlType == java.sql.Types.INTEGER) {
        return DataTypes.IntegerType;
    } else if (sqlType == java.sql.Types.BIGINT) {
        return DataTypes.LongType;
    } else if (sqlType == Types.DECIMAL) {
        return DataTypes.createDecimalType();
    } else if (sqlType == java.sql.Types.FLOAT) {
        return DataTypes.FloatType;
    } else if (sqlType == java.sql.Types.DOUBLE) {
        return DataTypes.DoubleType;
    } else if (sqlType == Types.DATE) {
        return DataTypes.DateType;
    } else if (sqlType == Types.TIME) {
        return DataTypes.TimestampType;
    } else if (sqlType == Types.TIMESTAMP) {
        return DataTypes.TimestampType;
    } else if (sqlType == java.sql.Types.VARCHAR) {
        return DataTypes.StringType;
    } else {
        logger.warn(String.format("Using string for unsupported sql type %s", sqlType));
        return DataTypes.StringType;
    }
}
 
开发者ID:uber,项目名称:uberscriptquery,代码行数:31,代码来源:SparkUtils.java

示例2: testAgeRangeFloat

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testAgeRangeFloat() {
  StructType schema = new StructType(new StructField[] {
      new StructField("name", DataTypes.StringType, false, Metadata.empty()),
      new StructField("nickname", DataTypes.StringType, false, Metadata.empty()),
      new StructField("age", DataTypes.FloatType, false, Metadata.empty()),
      new StructField("candycrushscore", DataTypes.createDecimalType(), false, Metadata.empty())
  });

  Map<String, Object> configMap = new HashMap<>();
  configMap.put("fields", Lists.newArrayList("age"));
  configMap.put("fieldtype", "float");
  configMap.put("range", Lists.newArrayList(0.1,105.0));
  Config config = ConfigFactory.parseMap(configMap);

  RangeRowRule rule = new RangeRowRule();
  rule.configure("agerange", config);

  Row row1 = new RowWithSchema(schema, "Ian", "Ian", 34.0f, new BigDecimal("0.00"));
  assertTrue("Row should pass rule", rule.check(row1));

  Row row2 = new RowWithSchema(schema, "Webster1", "Websta1", 110.0f, new BigDecimal("450.10"));
  assertFalse("Row should not pass rule", rule.check(row2));

  Row row3 = new RowWithSchema(schema, "", "Ian1", 110.0f, new BigDecimal("450.10"));
  assertFalse("Row should not pass rule", rule.check(row3));

  Row row4 = new RowWithSchema(schema, "First Last", "Ian Last", 100.0f, new BigDecimal("450.10"));
  assertTrue("Row should pass rule", rule.check(row4));
}
 
开发者ID:cloudera-labs,项目名称:envelope,代码行数:31,代码来源:TestRangeRowRule.java

示例3: parseDataType

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
private static DataType parseDataType(Config fieldsConfig) {
  String type = fieldsConfig.getString(FIELD_TYPE_CONFIG);
  switch (type) {
    case "string":
      return DataTypes.StringType;
    case "byte":
      return DataTypes.ByteType;
    case "short":
      return DataTypes.ShortType;
    case "int":
      return DataTypes.IntegerType;
    case "long":
      return DataTypes.LongType;
    case "float":
      return DataTypes.FloatType;
    case "double":
      return DataTypes.DoubleType;
    case "decimal":
      ConfigUtils.assertConfig(fieldsConfig, DECIMAL_SCALE_CONFIG);
      ConfigUtils.assertConfig(fieldsConfig, DECIMAL_PRECISION_CONFIG);
      return DataTypes.createDecimalType(
              fieldsConfig.getInt(DECIMAL_SCALE_CONFIG),
              fieldsConfig.getInt(DECIMAL_PRECISION_CONFIG));
    case "boolean":
      return DataTypes.BooleanType;
    case "binary":
      return DataTypes.BinaryType;
    case "date":
      return DataTypes.DateType;
    case "timestamp":
      return DataTypes.TimestampType;
    case "array":
    case "map":
    case "struct":
      throw new RuntimeException("Schema check does not currently support complex types");
    default:
      throw new RuntimeException("Unknown type: " + type);
  }
}
 
开发者ID:cloudera-labs,项目名称:envelope,代码行数:40,代码来源:CheckSchemaDatasetRule.java

示例4: testAgeRangeInt

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testAgeRangeInt() {
  StructType schema = new StructType(new StructField[] {
      new StructField("name", DataTypes.StringType, false, Metadata.empty()),
      new StructField("nickname", DataTypes.StringType, false, Metadata.empty()),
      new StructField("age", DataTypes.IntegerType, false, Metadata.empty()),
      new StructField("candycrushscore", DataTypes.createDecimalType(), false, Metadata.empty())
  });

  Map<String, Object> configMap = new HashMap<>();
  configMap.put("fields", Lists.newArrayList("age"));
  configMap.put("fieldtype", "int");
  configMap.put("range", Lists.newArrayList(0,105));
  Config config = ConfigFactory.parseMap(configMap);

  RangeRowRule rule = new RangeRowRule();
  rule.configure("agerange", config);

  Row row1 = new RowWithSchema(schema, "Ian", "Ian", 34, new BigDecimal("0.00"));
  assertTrue("Row should pass rule", rule.check(row1));

  Row row2 = new RowWithSchema(schema, "Webster1", "Websta1", 110, new BigDecimal("450.10"));
  assertFalse("Row should not pass rule", rule.check(row2));

  Row row3 = new RowWithSchema(schema, "", "Ian1", 106, new BigDecimal("450.10"));
  assertFalse("Row should not pass rule", rule.check(row3));

  Row row4 = new RowWithSchema(schema, "First Last", "Ian Last", 105, new BigDecimal("450.10"));
  assertTrue("Row should pass rule", rule.check(row4));
}
 
开发者ID:cloudera-labs,项目名称:envelope,代码行数:31,代码来源:TestRangeRowRule.java

示例5: testAgeRangeLong

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testAgeRangeLong() {
  StructType schema = new StructType(new StructField[] {
      new StructField("name", DataTypes.StringType, false, Metadata.empty()),
      new StructField("nickname", DataTypes.StringType, false, Metadata.empty()),
      new StructField("age", DataTypes.LongType, false, Metadata.empty()),
      new StructField("candycrushscore", DataTypes.createDecimalType(), false, Metadata.empty())
  });

  Map<String, Object> configMap = new HashMap<>();
  configMap.put("fields", Lists.newArrayList("age"));
  configMap.put("range", Lists.newArrayList(0l,105l));
  Config config = ConfigFactory.parseMap(configMap);

  RangeRowRule rule = new RangeRowRule();
  rule.configure("agerange", config);

  Row row1 = new RowWithSchema(schema, "Ian", "Ian", 34l, new BigDecimal("0.00"));
  assertTrue("Row should pass rule", rule.check(row1));

  Row row2 = new RowWithSchema(schema, "Webster1", "Websta1", 110l, new BigDecimal("450.10"));
  assertFalse("Row should not pass rule", rule.check(row2));

  Row row3 = new RowWithSchema(schema, "", "Ian1", 110l, new BigDecimal("450.10"));
  assertFalse("Row should not pass rule", rule.check(row3));

  Row row4 = new RowWithSchema(schema, "First Last", "Ian Last", 100l, new BigDecimal("450.10"));
  assertTrue("Row should pass rule", rule.check(row4));
}
 
开发者ID:cloudera-labs,项目名称:envelope,代码行数:30,代码来源:TestRangeRowRule.java

示例6: testAgeRangeDouble

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testAgeRangeDouble() {
  StructType schema = new StructType(new StructField[] {
      new StructField("name", DataTypes.StringType, false, Metadata.empty()),
      new StructField("nickname", DataTypes.StringType, false, Metadata.empty()),
      new StructField("age", DataTypes.DoubleType, false, Metadata.empty()),
      new StructField("candycrushscore", DataTypes.createDecimalType(), false, Metadata.empty())
  });

  Map<String, Object> configMap = new HashMap<>();
  configMap.put("fields", Lists.newArrayList("age"));
  configMap.put("fieldtype", "float");
  configMap.put("range", Lists.newArrayList(0.1,105.0));
  Config config = ConfigFactory.parseMap(configMap);

  RangeRowRule rule = new RangeRowRule();
  rule.configure("agerange", config);

  Row row1 = new RowWithSchema(schema, "Ian", "Ian", 34.0, new BigDecimal("0.00"));
  assertTrue("Row should pass rule", rule.check(row1));

  Row row2 = new RowWithSchema(schema, "Webster1", "Websta1", 110.0, new BigDecimal("450.10"));
  assertFalse("Row should not pass rule", rule.check(row2));

  Row row3 = new RowWithSchema(schema, "", "Ian1", 110.0, new BigDecimal("450.10"));
  assertFalse("Row should not pass rule", rule.check(row3));

  Row row4 = new RowWithSchema(schema, "First Last", "Ian Last", 100.0, new BigDecimal("450.10"));
  assertTrue("Row should pass rule", rule.check(row4));
}
 
开发者ID:cloudera-labs,项目名称:envelope,代码行数:31,代码来源:TestRangeRowRule.java

示例7: testAgeRangeDecimal

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testAgeRangeDecimal() {
  StructType schema = new StructType(new StructField[] {
      new StructField("name", DataTypes.StringType, false, Metadata.empty()),
      new StructField("nickname", DataTypes.StringType, false, Metadata.empty()),
      new StructField("age", DataTypes.DoubleType, false, Metadata.empty()),
      new StructField("candycrushscore", DataTypes.createDecimalType(), false, Metadata.empty())
  });

  Map<String, Object> configMap = new HashMap<>();
  configMap.put("fields", Lists.newArrayList("candycrushscore"));
  configMap.put("fieldtype", "decimal");
  configMap.put("range", Lists.newArrayList("-1.56","400.45"));
  Config config = ConfigFactory.parseMap(configMap);

  RangeRowRule rule = new RangeRowRule();
  rule.configure("agerange", config);

  Row row1 = new RowWithSchema(schema, "Ian", "Ian", 34.0, new BigDecimal("-1.00"));
  assertTrue("Row should pass rule", rule.check(row1));

  Row row2 = new RowWithSchema(schema, "Webster1", "Websta1", 110.0, new BigDecimal("-1.57"));
  assertFalse("Row should not pass rule", rule.check(row2));

  Row row3 = new RowWithSchema(schema, "", "Ian1", 110.0, new BigDecimal("450.10"));
  assertFalse("Row should not pass rule", rule.check(row3));

  Row row4 = new RowWithSchema(schema, "First Last", "Ian Last", 100.0, new BigDecimal("400.45"));
  assertTrue("Row should pass rule", rule.check(row4));
}
 
开发者ID:cloudera-labs,项目名称:envelope,代码行数:31,代码来源:TestRangeRowRule.java

示例8: testToRowValueDecimal

import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testToRowValueDecimal() {
  DataType defaultField = DataTypes.createDecimalType(); // precision 10, scale 0

  BigDecimal defaultDecimal = new BigDecimal("10");

  assertEquals("Invalid double", defaultDecimal, RowUtils.toRowValue(10.157D, defaultField));
  assertEquals("Invalid string", defaultDecimal, RowUtils.toRowValue("10.157", defaultField));
  assertEquals("Invalid BigDecimal", defaultDecimal, RowUtils.toRowValue(new BigDecimal("10"), defaultField));

  assertEquals("Invalid long", defaultDecimal, RowUtils.toRowValue(10L, defaultField));
  assertEquals("Invalid BigInteger", defaultDecimal, RowUtils.toRowValue(new BigInteger("10"), defaultField));

  assertEquals("Invalid precision", 2,
      ((BigDecimal) RowUtils.toRowValue("10.157", defaultField)).precision());
  assertEquals("Invalid scale", 0,
      ((BigDecimal) RowUtils.toRowValue("10.157", defaultField)).scale());

  try {
    RowUtils.toRowValue(ByteBuffer.allocate(1), defaultField);
    fail("Expected a RuntimeException for invalid type");
  } catch (RuntimeException e) {
    assertThat(e.getMessage(), JUnitMatchers.containsString("Invalid or unrecognized input format"));
  }

  DataType customField = DataTypes.createDecimalType(3, 2);

  BigDecimal customDecimal = new BigDecimal("1.23");

  assertEquals("Invalid double", customDecimal, RowUtils.toRowValue(1.23D, customField));
  assertEquals("Invalid string", customDecimal, RowUtils.toRowValue("1.23", customField));
  assertEquals("Invalid BigDecimal", customDecimal, RowUtils.toRowValue(new BigDecimal("1.23"), customField));

  assertEquals("Invalid long", customDecimal, RowUtils.toRowValue(123L, customField));
  assertEquals("Invalid BigInteger", customDecimal, RowUtils.toRowValue(new BigInteger("123"), customField));

  assertEquals("Invalid precision", 3,
      ((BigDecimal) RowUtils.toRowValue("1.23", customField)).precision());
  assertEquals("Invalid scale", 2,
      ((BigDecimal) RowUtils.toRowValue("1.23", customField)).scale());
}
 
开发者ID:cloudera-labs,项目名称:envelope,代码行数:42,代码来源:TestRowUtils.java


注:本文中的org.apache.spark.sql.types.DataTypes.createDecimalType方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。