当前位置: 首页>>代码示例>>Scala>>正文


Scala FunctionRegistry类代码示例

本文整理汇总了Scala中org.apache.spark.sql.catalyst.analysis.FunctionRegistry的典型用法代码示例。如果您正苦于以下问题:Scala FunctionRegistry类的具体用法?Scala FunctionRegistry怎么用?Scala FunctionRegistry使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了FunctionRegistry类的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。

示例1: Functions

//设置package包名称以及导入依赖的类
package org.apache.spark.sql.sparkline

import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
import org.apache.spark.sql.catalyst.expressions.Expression
import org.sparkline.spark.udfs.{ByteSize, Contains, Contains_Interpreted, Contains2}

import scala.reflect.ClassTag

object Functions {

  private def registerExpression[T <: Expression](sqlContext : SQLContext,
                                                  nm : String)(implicit cTag : ClassTag[T]) : Unit = {
    val (fnName, (eInfo, fb)) = FunctionRegistry.expression[T](nm)
    sqlContext.functionRegistry.registerFunction(fnName, eInfo, fb)
  }

  def registerFunctions(implicit sqlContext : SQLContext) : Unit = {
    registerExpression[ByteSize](sqlContext, "bitmap_size")
    registerExpression[Contains](sqlContext, "bitmap_contains_basic")
    registerExpression[Contains2](sqlContext, "bitmap_contains")
    registerExpression[Contains_Interpreted](sqlContext, "bitmap_contains_i")
    //registerExpression[Bitmap](sqlContext, "bitmap")
  }

} 
开发者ID:SparklineData,项目名称:spark-functions,代码行数:27,代码来源:Functions.scala

示例2: XDSessionCatalog

//设置package包名称以及导入依赖的类
package org.apache.spark.sql.crossdata.catalyst.catalog

import org.apache.hadoop.conf.Configuration
import org.apache.spark.sql.catalyst.CatalystConf
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
import org.apache.spark.sql.catalyst.catalog.{ExternalCatalog, FunctionResourceLoader, GlobalTempViewManager, SessionCatalog}

import org.apache.spark.sql.crossdata.catalyst.catalog.temporary.XDTemporaryCatalog

class XDSessionCatalog(
                        val temporaryCatalog: XDTemporaryCatalog,
                        externalCatalog: ExternalCatalog,
                        globalTempViewManager: GlobalTempViewManager,
                        functionResourceLoader: FunctionResourceLoader,
                        functionRegistry: FunctionRegistry,
                        conf: CatalystConf,
                        hadoopConf: Configuration) extends SessionCatalog(
  externalCatalog,
  globalTempViewManager,
  functionResourceLoader,
  functionRegistry,
  conf,
  hadoopConf
) 
开发者ID:nagyistge,项目名称:crossdata-spark2,代码行数:25,代码来源:XDSessionCatalog.scala

示例3: Jython

//设置package包名称以及导入依赖的类
package org.apache.spark.sql

import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
import org.apache.spark.sql.jython.UserDefinedJythonFunction

/**
  * Created by mariu_000 on 2016-11-09.
  */
object Jython {


  /**
    * Created by mariu_000 on 2016-11-09.
    */

  implicit class JythonUDFRegistration(udfRegistration: UDFRegistration) extends Logging {

    private def functionRegistry: FunctionRegistry = {
      val field = this.udfRegistration.getClass.getDeclaredField("functionRegistry")
      field.setAccessible(true)
      field.get(this.udfRegistration).asInstanceOf[FunctionRegistry]
    }

    protected[sql] def registerJythonUDF(name: String, udf: UserDefinedJythonFunction): Unit = {
      log.debug(
        s"""
           | Registering new JythonUDF:
           | name: $name
           | dataType: ${udf.dataType}
     """.stripMargin)

      functionRegistry.registerFunction(name, udf.builder)
    }
  }
} 
开发者ID:mariusvniekerk,项目名称:spark-jython-udf,代码行数:37,代码来源:Jython.scala


注:本文中的org.apache.spark.sql.catalyst.analysis.FunctionRegistry类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。