本文整理汇总了Scala中org.apache.spark.sql.types.Metadata类的典型用法代码示例。如果您正苦于以下问题:Scala Metadata类的具体用法?Scala Metadata怎么用?Scala Metadata使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Metadata类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: FieldPointType
//设置package包名称以及导入依赖的类
package com.esri.gdb
import java.nio.ByteBuffer
import com.esri.udt.{PointType, PointUDT}
import org.apache.spark.sql.types.Metadata
object FieldPointType extends Serializable {
def apply(name: String,
nullValueAllowed: Boolean,
xOrig: Double,
yOrig: Double,
xyScale: Double,
metadata: Metadata) = {
new FieldPointType(name, nullValueAllowed, xOrig, yOrig, xyScale, metadata)
}
}
class FieldPointType(name: String,
nullValueAllowed: Boolean,
xOrig: Double,
yOrig: Double,
xyScale: Double,
metadata: Metadata)
extends FieldBytes(name, new PointUDT(), nullValueAllowed, metadata) {
override def readValue(byteBuffer: ByteBuffer, oid: Int) = {
val blob = getByteBuffer(byteBuffer)
blob.getVarUInt() // geomType
val vx = blob.getVarUInt()
val vy = blob.getVarUInt()
val x = (vx - 1.0) / xyScale + xOrig
val y = (vy - 1.0) / xyScale + yOrig
new PointType(x, y)
}
}
示例2: FieldPointZType
//设置package包名称以及导入依赖的类
package com.esri.gdb
import java.nio.ByteBuffer
import com.esri.udt.{PointZType, PointZUDT}
import org.apache.spark.sql.types.Metadata
object FieldPointZType extends Serializable {
def apply(name: String,
nullValueAllowed: Boolean,
xOrig: Double,
yOrig: Double,
zOrig: Double,
xyScale: Double,
zScale: Double,
metadata: Metadata
) = {
new FieldPointZType(name, nullValueAllowed, xOrig, yOrig, zOrig, xyScale, zScale, metadata)
}
}
class FieldPointZType(name: String,
nullValueAllowed: Boolean,
xOrig: Double,
yOrig: Double,
zOrig: Double,
xyScale: Double,
zScale: Double,
metadata: Metadata)
extends FieldBytes(name, new PointZUDT(), nullValueAllowed, metadata) {
override def readValue(byteBuffer: ByteBuffer, oid: Int) = {
val blob = getByteBuffer(byteBuffer)
val geomType = blob.getVarUInt
val vx = blob.getVarUInt
val vy = blob.getVarUInt
val vz = blob.getVarUInt
val x = (vx - 1.0) / xyScale + xOrig
val y = (vy - 1.0) / xyScale + yOrig
val z = (vz - 1.0) / zScale + zOrig
new PointZType(x, y, z)
}
}
示例3: FieldPoly
//设置package包名称以及导入依赖的类
package com.esri.gdb
import java.nio.ByteBuffer
import com.esri.core.geometry.MultiPath
import org.apache.spark.sql.types.{DataType, Metadata}
@deprecated("not used", "0.4")
abstract class FieldPoly(name: String,
dataType: DataType,
nullValueAllowed: Boolean,
xOrig: Double,
yOrig: Double,
xyScale: Double,
metadata: Metadata)
extends FieldBytes(name, dataType, nullValueAllowed, metadata) {
protected var dx = 0L
protected var dy = 0L
def addPath(byteBuffer: ByteBuffer, numCoordinates: Int, path: MultiPath) = {
0 until numCoordinates foreach (n => {
dx += byteBuffer.getVarInt
dy += byteBuffer.getVarInt
val x = dx / xyScale + xOrig
val y = dy / xyScale + yOrig
n match {
case 0 => path.startPath(x, y)
case _ => path.lineTo(x, y)
}
})
path
}
}
示例4: FieldPolylineType
//设置package包名称以及导入依赖的类
package com.esri.gdb
import com.esri.udt.{PolylineType, PolylineUDT}
import org.apache.spark.sql.types.Metadata
object FieldPolylineType extends Serializable {
def apply(name: String,
nullValueAllowed: Boolean,
xOrig: Double,
yOrig: Double,
xyScale: Double,
metadata: Metadata) = {
new FieldPolylineType(name, nullValueAllowed, xOrig, yOrig, xyScale, metadata)
}
}
class FieldPolylineType(name: String,
nullValueAllowed: Boolean,
xOrig: Double,
yOrig: Double,
xyScale: Double,
metadata: Metadata)
extends FieldPoly2Type[PolylineType](name, new PolylineUDT(), nullValueAllowed, xOrig, yOrig, xyScale, metadata) {
override def createPolyType(xmin: Double, ymin: Double, xmax: Double, ymax: Double, xyNum: Array[Int], xyArr: Array[Double]): PolylineType = {
PolylineType(xmin, ymin, xmax, ymax, xyNum, xyArr)
}
}
示例5: FieldPolygonType
//设置package包名称以及导入依赖的类
package com.esri.gdb
import com.esri.udt.{PolygonType, PolygonUDT}
import org.apache.spark.sql.types.Metadata
object FieldPolygonType extends Serializable {
def apply(name: String,
nullValueAllowed: Boolean,
xOrig: Double,
yOrig: Double,
xyScale: Double,
metadata: Metadata) = {
new FieldPolygonType(name, nullValueAllowed, xOrig, yOrig, xyScale, metadata)
}
}
class FieldPolygonType(name: String,
nullValueAllowed: Boolean,
xOrig: Double,
yOrig: Double,
xyScale: Double,
metadata: Metadata)
extends FieldPoly2Type[PolygonType](name, new PolygonUDT(), nullValueAllowed, xOrig, yOrig, xyScale, metadata) {
override def createPolyType(xmin: Double, ymin: Double, xmax: Double, ymax: Double, xyNum: Array[Int], xyArr: Array[Double]): PolygonType = {
PolygonType(xmin, ymin, xmax, ymax, xyNum, xyArr)
}
}
示例6: FieldBytes
//设置package包名称以及导入依赖的类
package com.esri.gdb
import java.nio.ByteBuffer
import org.apache.spark.sql.types.{DataType, Metadata}
abstract class FieldBytes(name: String,
dataType: DataType,
nullValueAllowed: Boolean,
metadata: Metadata = Metadata.empty
)
extends Field(name, dataType, nullValueAllowed, metadata) {
protected var m_bytes = new Array[Byte](1024)
def getByteBuffer(byteBuffer: ByteBuffer) = {
val numBytes = fillVarBytes(byteBuffer)
ByteBuffer.wrap(m_bytes, 0, numBytes)
}
def fillVarBytes(byteBuffer: ByteBuffer) = {
val numBytes = byteBuffer.getVarUInt.toInt
if (numBytes > m_bytes.length) {
m_bytes = new Array[Byte](numBytes)
}
0 until numBytes foreach {
m_bytes(_) = byteBuffer.get
}
numBytes
}
}
示例7: FieldDateTime
//设置package包名称以及导入依赖的类
package com.esri.gdb
import java.nio.ByteBuffer
import java.sql.Timestamp
import org.apache.spark.sql.types.{Metadata, TimestampType}
class FieldDateTime(name: String, nullValueAllowed: Boolean, metadata:Metadata)
extends Field(name, TimestampType, nullValueAllowed, metadata) {
override def readValue(byteBuffer: ByteBuffer, oid: Int) = {
val numDays = byteBuffer.getDouble
// convert days since 12/30/1899 to 1/1/1970
val unixDays = numDays - 25569
val millis = (unixDays * 1000 * 60 * 60 * 24).ceil.toLong
new Timestamp(millis)
}
}
示例8: FieldPointMType
//设置package包名称以及导入依赖的类
package com.esri.gdb
import java.nio.ByteBuffer
import com.esri.udt.{PointMType, PointMUDT}
import org.apache.spark.sql.types.Metadata
object FieldPointMType extends Serializable {
def apply(name: String,
nullValueAllowed: Boolean,
xOrig: Double,
yOrig: Double,
mOrig: Double,
xyScale: Double,
mScale: Double,
metadata: Metadata
) = {
new FieldPointMType(name, nullValueAllowed, xOrig, yOrig, mOrig, xyScale, mScale, metadata)
}
}
class FieldPointMType(name: String,
nullValueAllowed: Boolean,
xOrig: Double,
yOrig: Double,
mOrig: Double,
xyScale: Double,
mScale: Double,
metadata: Metadata)
extends FieldBytes(name, new PointMUDT(), nullValueAllowed, metadata) {
override def readValue(byteBuffer: ByteBuffer, oid: Int) = {
val blob = getByteBuffer(byteBuffer)
val geomType = blob.getVarUInt()
val vx = blob.getVarUInt
val vy = blob.getVarUInt
val vm = blob.getVarUInt
val x = (vx - 1.0) / xyScale + xOrig
val y = (vy - 1.0) / xyScale + yOrig
val m = (vm - 1.0) / mScale + mOrig
new PointMType(x, y, m)
}
}
示例9: FieldPolylineMType
//设置package包名称以及导入依赖的类
package com.esri.gdb
import com.esri.udt.{PolylineMType, PolylineMUDT}
import org.apache.spark.sql.types.Metadata
object FieldPolylineMType extends Serializable {
def apply(name: String,
nullValueAllowed: Boolean,
xOrig: Double,
yOrig: Double,
mOrig: Double,
xyScale: Double,
mScale: Double,
metadata: Metadata) = {
new FieldPolylineMType(name, nullValueAllowed, xOrig, yOrig, mOrig, xyScale, mScale, metadata)
}
}
class FieldPolylineMType(name: String,
nullValueAllowed: Boolean,
xOrig: Double,
yOrig: Double,
mOrig: Double,
xyScale: Double,
mScale: Double,
metadata: Metadata)
extends FieldPoly3Type[PolylineMType](name, new PolylineMUDT(), nullValueAllowed, xOrig, yOrig, mOrig, xyScale, mScale, metadata) {
override def createPolyMType(xmin: Double, ymin: Double, xmax: Double, ymax: Double, xyNum: Array[Int], xyArr: Array[Double]) = {
PolylineMType(xmin, ymin, xmax, ymax, xyNum, xyArr)
}
}