当前位置: 首页>>代码示例>>Scala>>正文


Scala GenericMutableRow类代码示例

本文整理汇总了Scala中org.apache.spark.sql.catalyst.expressions.GenericMutableRow的典型用法代码示例。如果您正苦于以下问题:Scala GenericMutableRow类的具体用法?Scala GenericMutableRow怎么用?Scala GenericMutableRow使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了GenericMutableRow类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。

示例1: PointZMUDT

//设置package包名称以及导入依赖的类
package com.esri.udt

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
import org.apache.spark.sql.types._


class PointZMUDT extends UserDefinedType[PointZMType] {

  override def sqlType: DataType = StructType(Seq(
    StructField("x", DoubleType, false),
    StructField("y", DoubleType, false),
    StructField("z", DoubleType, false),
    StructField("m", DoubleType, false)
  ))

  override def serialize(obj: Any): InternalRow = {
    obj match {
      case PointZMType(x, y, z, m) => {
        val row = new GenericMutableRow(4)
        row.setDouble(0, x)
        row.setDouble(1, y)
        row.setDouble(2, z)
        row.setDouble(3, m)
        row
      }
    }
  }

  override def deserialize(datum: Any): PointZMType = {
    datum match {
      case row: InternalRow => PointZMType(row.getDouble(0), row.getDouble(1), row.getDouble(2), row.getDouble(3))
    }
  }

  override def userClass: Class[PointZMType] = classOf[PointZMType]

  override def pyUDT: String = "com.esri.udt.PointZMUDT"

  override def typeName: String = "pointZM"

  override def equals(o: Any): Boolean = {
    o match {
      case v: PointZMUDT => true
      case _ => false
    }
  }

  // see [SPARK-8647], this achieves the needed constant hash code without constant no.
  override def hashCode(): Int = classOf[PointZMUDT].getName.hashCode()

  override def asNullable: PointZMUDT = this

} 
开发者ID:mraad,项目名称:spark-gdb,代码行数:55,代码来源:PointZMUDT.scala

示例2: PointMUDT

//设置package包名称以及导入依赖的类
package com.esri.udt

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
import org.apache.spark.sql.types._


class PointMUDT extends UserDefinedType[PointMType] {

  override def sqlType: DataType = StructType(Seq(
    StructField("x", DoubleType, false),
    StructField("y", DoubleType, false),
    StructField("m", DoubleType, false)
  ))

  override def serialize(obj: Any): InternalRow = {
    obj match {
      case PointMType(x, y, m) => {
        val row = new GenericMutableRow(3)
        row.setDouble(0, x)
        row.setDouble(1, y)
        row.setDouble(2, m)
        row
      }
    }
  }

  override def deserialize(datum: Any): PointMType = {
    datum match {
      case row: InternalRow => PointMType(row.getDouble(0), row.getDouble(1), row.getDouble(2))
    }
  }

  override def userClass: Class[PointMType] = classOf[PointMType]

  override def pyUDT: String = "com.esri.udt.PointMUDT"

  override def typeName: String = "pointM"

  override def equals(o: Any): Boolean = {
    o match {
      case v: PointMUDT => true
      case _ => false
    }
  }

  // see [SPARK-8647], this achieves the needed constant hash code without constant no.
  override def hashCode(): Int = classOf[PointMUDT].getName.hashCode()

  override def asNullable: PointMUDT = this

} 
开发者ID:mraad,项目名称:spark-gdb,代码行数:53,代码来源:PointMUDT.scala

示例3: PointZUDT

//设置package包名称以及导入依赖的类
package com.esri.udt

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
import org.apache.spark.sql.types._


class PointZUDT extends UserDefinedType[PointZType] {

  override def sqlType: DataType = StructType(Seq(
    StructField("x", DoubleType, false),
    StructField("y", DoubleType, false),
    StructField("z", DoubleType, false)
  ))

  override def serialize(obj: Any): InternalRow = {
    obj match {
      case PointZType(x, y, z) => {
        val row = new GenericMutableRow(3)
        row.setDouble(0, x)
        row.setDouble(1, y)
        row.setDouble(2, z)
        row
      }
    }
  }

  override def deserialize(datum: Any): PointZType = {
    datum match {
      case row: InternalRow => PointZType(row.getDouble(0), row.getDouble(1), row.getDouble(2))
    }
  }

  override def userClass: Class[PointZType] = classOf[PointZType]

  override def pyUDT: String = "com.esri.udt.PointZUDT"

  override def typeName: String = "pointZ"

  override def equals(o: Any): Boolean = {
    o match {
      case v: PointZUDT => true
      case _ => false
    }
  }

  // see [SPARK-8647], this achieves the needed constant hash code without constant no.
  override def hashCode(): Int = classOf[PointZUDT].getName.hashCode()

  override def asNullable: PointZUDT = this

} 
开发者ID:mraad,项目名称:spark-gdb,代码行数:53,代码来源:PointZUDT.scala

示例4: PointUDT

//设置package包名称以及导入依赖的类
package com.esri.udt

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.GenericMutableRow
import org.apache.spark.sql.types._


class PointUDT extends UserDefinedType[PointType] {

  override def sqlType: DataType = StructType(Seq(
    StructField("x", DoubleType, false),
    StructField("y", DoubleType, false)
  ))

  override def serialize(obj: Any): InternalRow = {
    obj match {
      case PointType(x, y) => {
        val row = new GenericMutableRow(2)
        row.setDouble(0, x)
        row.setDouble(1, y)
        row
      }
    }
  }

  override def deserialize(datum: Any): PointType = {
    datum match {
      case row: InternalRow => PointType(row.getDouble(0), row.getDouble(1))
    }
  }

  override def userClass: Class[PointType] = classOf[PointType]

  override def pyUDT: String = "com.esri.udt.PointUDT"

  override def typeName: String = "point"

  override def equals(o: Any): Boolean = {
    o match {
      case v: PointUDT => true
      case _ => false
    }
  }

  // see [SPARK-8647], this achieves the needed constant hash code without constant no.
  override def hashCode(): Int = classOf[PointUDT].getName.hashCode()

  override def asNullable: PointUDT = this

} 
开发者ID:mraad,项目名称:spark-gdb,代码行数:51,代码来源:PointUDT.scala


注:本文中的org.apache.spark.sql.catalyst.expressions.GenericMutableRow类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。