本文整理汇总了Scala中org.apache.spark.sql.catalyst.analysis.FunctionRegistry类的典型用法代码示例。如果您正苦于以下问题:Scala FunctionRegistry类的具体用法?Scala FunctionRegistry怎么用?Scala FunctionRegistry使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了FunctionRegistry类的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: Functions
//设置package包名称以及导入依赖的类
package org.apache.spark.sql.sparkline
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
import org.apache.spark.sql.catalyst.expressions.Expression
import org.sparkline.spark.udfs.{ByteSize, Contains, Contains_Interpreted, Contains2}
import scala.reflect.ClassTag
object Functions {
private def registerExpression[T <: Expression](sqlContext : SQLContext,
nm : String)(implicit cTag : ClassTag[T]) : Unit = {
val (fnName, (eInfo, fb)) = FunctionRegistry.expression[T](nm)
sqlContext.functionRegistry.registerFunction(fnName, eInfo, fb)
}
def registerFunctions(implicit sqlContext : SQLContext) : Unit = {
registerExpression[ByteSize](sqlContext, "bitmap_size")
registerExpression[Contains](sqlContext, "bitmap_contains_basic")
registerExpression[Contains2](sqlContext, "bitmap_contains")
registerExpression[Contains_Interpreted](sqlContext, "bitmap_contains_i")
//registerExpression[Bitmap](sqlContext, "bitmap")
}
}
示例2: XDSessionCatalog
//设置package包名称以及导入依赖的类
package org.apache.spark.sql.crossdata.catalyst.catalog
import org.apache.hadoop.conf.Configuration
import org.apache.spark.sql.catalyst.CatalystConf
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
import org.apache.spark.sql.catalyst.catalog.{ExternalCatalog, FunctionResourceLoader, GlobalTempViewManager, SessionCatalog}
import org.apache.spark.sql.crossdata.catalyst.catalog.temporary.XDTemporaryCatalog
class XDSessionCatalog(
val temporaryCatalog: XDTemporaryCatalog,
externalCatalog: ExternalCatalog,
globalTempViewManager: GlobalTempViewManager,
functionResourceLoader: FunctionResourceLoader,
functionRegistry: FunctionRegistry,
conf: CatalystConf,
hadoopConf: Configuration) extends SessionCatalog(
externalCatalog,
globalTempViewManager,
functionResourceLoader,
functionRegistry,
conf,
hadoopConf
)
示例3: Jython
//设置package包名称以及导入依赖的类
package org.apache.spark.sql
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
import org.apache.spark.sql.jython.UserDefinedJythonFunction
/**
* Created by mariu_000 on 2016-11-09.
*/
object Jython {
/**
* Created by mariu_000 on 2016-11-09.
*/
implicit class JythonUDFRegistration(udfRegistration: UDFRegistration) extends Logging {
private def functionRegistry: FunctionRegistry = {
val field = this.udfRegistration.getClass.getDeclaredField("functionRegistry")
field.setAccessible(true)
field.get(this.udfRegistration).asInstanceOf[FunctionRegistry]
}
protected[sql] def registerJythonUDF(name: String, udf: UserDefinedJythonFunction): Unit = {
log.debug(
s"""
| Registering new JythonUDF:
| name: $name
| dataType: ${udf.dataType}
""".stripMargin)
functionRegistry.registerFunction(name, udf.builder)
}
}
}