本文整理汇总了Java中org.apache.spark.sql.types.DataTypes.createStructField方法的典型用法代码示例。如果您正苦于以下问题:Java DataTypes.createStructField方法的具体用法?Java DataTypes.createStructField怎么用?Java DataTypes.createStructField使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.spark.sql.types.DataTypes
的用法示例。
在下文中一共展示了DataTypes.createStructField方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: init
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Override
public StructField init(Evaluator evaluator){
OutputField field = getField();
DataType dataType = field.getDataType();
if(dataType == null){
try {
dataType = OutputUtil.getDataType(field.getOutputField(), (ModelEvaluator<?>)evaluator);
this.formatString = false;
} catch(PMMLException pe){
dataType = DataType.STRING;
this.formatString = true;
}
}
return DataTypes.createStructField(getColumnName(), SchemaUtil.translateDataType(dataType), false);
}
示例2: testSet
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testSet() {
StructField field1 = DataTypes.createStructField("field1", DataTypes.StringType, true);
StructField field2 = DataTypes.createStructField("field2", DataTypes.IntegerType, true);
StructField field3 = DataTypes.createStructField("field3", DataTypes.FloatType, true);
StructType schema = DataTypes.createStructType(Lists.newArrayList(field1, field2, field3));
Row row = new RowWithSchema(schema, "hello", 1, 2.0);
Row setRow = RowUtils.set(row, "field2", 100);
setRow = RowUtils.set(setRow, "field1", "world");
assertEquals(setRow.length(), 3);
assertEquals(RowUtils.get(setRow, "field1"), "world");
assertEquals(RowUtils.get(setRow, "field2"), 100);
assertEquals(RowUtils.get(setRow, "field3"), 2.0);
}
示例3: testAppendWithSchema
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testAppendWithSchema() {
StructField field1 = DataTypes.createStructField("field1", DataTypes.StringType, true);
StructField field2 = DataTypes.createStructField("field2", DataTypes.IntegerType, true);
StructField field3 = DataTypes.createStructField("field3", DataTypes.FloatType, true);
StructType schema = DataTypes.createStructType(Lists.newArrayList(field1, field2, field3));
Row row = new RowWithSchema(schema, "hello", 1, 2.0);
Row appendRow = RowUtils.append(row, "field4", DataTypes.BooleanType, true);
appendRow = RowUtils.append(appendRow, "field5", DataTypes.StringType, "world");
assertEquals(appendRow.length(), 5);
assertEquals(RowUtils.get(appendRow, "field1"), "hello");
assertEquals(RowUtils.get(appendRow, "field2"), 1);
assertEquals(RowUtils.get(appendRow, "field3"), 2.0);
assertEquals(RowUtils.get(appendRow, "field4"), true);
assertEquals(RowUtils.get(appendRow, "field5"), "world");
}
示例4: testAppendFields
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testAppendFields() {
StructField field1 = DataTypes.createStructField("field1", DataTypes.StringType, true);
StructField field2 = DataTypes.createStructField("field2", DataTypes.IntegerType, true);
StructField field3 = DataTypes.createStructField("field3", DataTypes.FloatType, true);
StructType schema = DataTypes.createStructType(Lists.newArrayList(field1, field2, field3));
StructField field4 = DataTypes.createStructField("field4", DataTypes.BooleanType, true);
StructField field5 = DataTypes.createStructField("field5", DataTypes.StringType, true);
StructType appendSchema = RowUtils.appendFields(schema, Lists.newArrayList(field4, field5));
assertEquals(appendSchema.length(), 5);
assertEquals(appendSchema.fields()[0], field1);
assertEquals(appendSchema.fields()[1], field2);
assertEquals(appendSchema.fields()[2], field3);
assertEquals(appendSchema.fields()[3], field4);
assertEquals(appendSchema.fields()[4], field5);
}
示例5: testStructTypeFor
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testStructTypeFor() {
List<String> fieldNames = Lists.newArrayList("field1", "field2", "field3", "field4", "field5", "field6");
List<String> fieldTypes = Lists.newArrayList("string", "float", "double", "int", "long", "boolean");
StructType structFromRowUtils = RowUtils.structTypeFor(fieldNames, fieldTypes);
StructField field1 = DataTypes.createStructField("field1", DataTypes.StringType, true);
StructField field2 = DataTypes.createStructField("field2", DataTypes.FloatType, true);
StructField field3 = DataTypes.createStructField("field3", DataTypes.DoubleType, true);
StructField field4 = DataTypes.createStructField("field4", DataTypes.IntegerType, true);
StructField field5 = DataTypes.createStructField("field5", DataTypes.LongType, true);
StructField field6 = DataTypes.createStructField("field6", DataTypes.BooleanType, true);
StructType structFromAPI = DataTypes.createStructType(Lists.newArrayList(field1, field2, field3, field4, field5, field6));
assertEquals(structFromRowUtils, structFromAPI);
}
示例6: init
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Override
public StructField init(Evaluator evaluator){
TargetField field = getField();
DataType dataType = field.getDataType();
return DataTypes.createStructField(getColumnName(), SchemaUtil.translateDataType(dataType), false);
}
示例7: testSubsetSchemaSomeFields
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testSubsetSchemaSomeFields() {
StructField field1 = DataTypes.createStructField("field1", DataTypes.StringType, true);
StructField field2 = DataTypes.createStructField("field2", DataTypes.IntegerType, true);
StructField field3 = DataTypes.createStructField("field3", DataTypes.FloatType, true);
StructType schema = DataTypes.createStructType(Lists.newArrayList(field1, field2, field3));
StructType subset = RowUtils.subsetSchema(schema, Lists.newArrayList("field1", "field3"));
assertEquals(subset.fields().length, 2);
assertEquals(subset.fields()[0].name(), "field1");
assertEquals(subset.fields()[1].name(), "field3");
}
示例8: testCompareTimestamp
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testCompareTimestamp() {
StructField field1 = DataTypes.createStructField("field1", DataTypes.StringType, true);
StructField field2 = DataTypes.createStructField("field2", DataTypes.LongType, true);
StructField field3 = DataTypes.createStructField("field3", DataTypes.FloatType, true);
StructType schema = DataTypes.createStructType(Lists.newArrayList(field1, field2, field3));
Row row1 = new RowWithSchema(schema, "hello", 1L, 2.0);
Row row2 = new RowWithSchema(schema, "hello", 10L, -2.0);
Row row3 = new RowWithSchema(schema, "world", 1L, -2000.0);
assertEquals(RowUtils.compareTimestamp(row1, row2, "field2"), -1);
assertEquals(RowUtils.compareTimestamp(row2, row1, "field2"), 1);
assertEquals(RowUtils.compareTimestamp(row1, row3, "field2"), 0);
}
示例9: testSubsetSchemaNoFields
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testSubsetSchemaNoFields() {
StructField field1 = DataTypes.createStructField("field1", DataTypes.StringType, true);
StructField field2 = DataTypes.createStructField("field2", DataTypes.IntegerType, true);
StructField field3 = DataTypes.createStructField("field3", DataTypes.FloatType, true);
StructType schema = DataTypes.createStructType(Lists.newArrayList(field1, field2, field3));
StructType subset = RowUtils.subsetSchema(schema, Lists.<String>newArrayList());
assertEquals(subset.fields().length, 0);
}
示例10: testSubtractSchemaSomeFields
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testSubtractSchemaSomeFields() {
StructField field1 = DataTypes.createStructField("field1", DataTypes.StringType, true);
StructField field2 = DataTypes.createStructField("field2", DataTypes.IntegerType, true);
StructField field3 = DataTypes.createStructField("field3", DataTypes.FloatType, true);
StructType schema = DataTypes.createStructType(Lists.newArrayList(field1, field2, field3));
StructType subset = RowUtils.subtractSchema(schema, Lists.newArrayList("field1", "field3"));
assertEquals(subset.fields().length, 1);
assertEquals(subset.fields()[0].name(), "field2");
}
示例11: testSubtractSchemaAllFields
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testSubtractSchemaAllFields() {
StructField field1 = DataTypes.createStructField("field1", DataTypes.StringType, true);
StructField field2 = DataTypes.createStructField("field2", DataTypes.IntegerType, true);
StructField field3 = DataTypes.createStructField("field3", DataTypes.FloatType, true);
StructType schema = DataTypes.createStructType(Lists.newArrayList(field1, field2, field3));
StructType subset = RowUtils.subtractSchema(schema, Lists.newArrayList("field1", "field2", "field3"));
assertEquals(subset.fields().length, 0);
}
示例12: testSubtractSchemaNoFields
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testSubtractSchemaNoFields() {
StructField field1 = DataTypes.createStructField("field1", DataTypes.StringType, true);
StructField field2 = DataTypes.createStructField("field2", DataTypes.IntegerType, true);
StructField field3 = DataTypes.createStructField("field3", DataTypes.FloatType, true);
StructType schema = DataTypes.createStructType(Lists.newArrayList(field1, field2, field3));
StructType subset = RowUtils.subtractSchema(schema, Lists.<String>newArrayList());
assertEquals(subset.fields().length, 3);
assertEquals(subset.fields()[0].name(), "field1");
assertEquals(subset.fields()[1].name(), "field2");
assertEquals(subset.fields()[2].name(), "field3");
}
示例13: testSubsetRowAllFields
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testSubsetRowAllFields() {
StructField field1 = DataTypes.createStructField("field1", DataTypes.StringType, true);
StructField field2 = DataTypes.createStructField("field2", DataTypes.IntegerType, true);
StructField field3 = DataTypes.createStructField("field3", DataTypes.FloatType, true);
StructType schema = DataTypes.createStructType(Lists.newArrayList(field1, field2, field3));
Row row = new RowWithSchema(schema, "hello", 1, 2.0);
Row subsetRow = RowUtils.subsetRow(row, schema);
assertEquals(row, subsetRow);
}
示例14: testSubsetRowSomeFields
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testSubsetRowSomeFields() {
StructField field1 = DataTypes.createStructField("field1", DataTypes.StringType, true);
StructField field2 = DataTypes.createStructField("field2", DataTypes.IntegerType, true);
StructField field3 = DataTypes.createStructField("field3", DataTypes.FloatType, true);
StructType schema = DataTypes.createStructType(Lists.newArrayList(field1, field2, field3));
StructType subsetSchema = DataTypes.createStructType(Lists.newArrayList(field1, field3));
Row row = new RowWithSchema(schema, "hello", 1, 2.0);
Row subsetRow = RowUtils.subsetRow(row, subsetSchema);
assertEquals(subsetRow.length(), 2);
assertEquals(subsetRow.get(0), "hello");
assertEquals(subsetRow.get(1), 2.0);
}
示例15: testSubsetRowNoFields
import org.apache.spark.sql.types.DataTypes; //导入方法依赖的package包/类
@Test
public void testSubsetRowNoFields() {
StructField field1 = DataTypes.createStructField("field1", DataTypes.StringType, true);
StructField field2 = DataTypes.createStructField("field2", DataTypes.IntegerType, true);
StructField field3 = DataTypes.createStructField("field3", DataTypes.FloatType, true);
StructType schema = DataTypes.createStructType(Lists.newArrayList(field1, field2, field3));
StructType subsetSchema = DataTypes.createStructType(Lists.<StructField>newArrayList());
Row row = new RowWithSchema(schema, "hello", 1, 2.0);
Row subsetRow = RowUtils.subsetRow(row, subsetSchema);
assertEquals(subsetRow.length(), 0);
}