当前位置: 首页>>代码示例>>Scala>>正文


Scala Progressable类代码示例

本文整理汇总了Scala中org.apache.hadoop.util.Progressable的典型用法代码示例。如果您正苦于以下问题:Scala Progressable类的具体用法?Scala Progressable怎么用?Scala Progressable使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了Progressable类的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。

示例1: BaseOutputFormat

//设置package包名称以及导入依赖的类
package kr.acon.lib.io

import java.io.DataOutputStream

import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.io.compress.GzipCodec
import org.apache.hadoop.mapred.FileOutputFormat
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.mapred.RecordWriter
import org.apache.hadoop.util.Progressable
import org.apache.hadoop.util.ReflectionUtils

import it.unimi.dsi.fastutil.longs.LongOpenHashBigSet

abstract class BaseOutputFormat extends FileOutputFormat[Long, LongOpenHashBigSet] {
  @inline def getRecordWriter(out: DataOutputStream): RecordWriter[Long, LongOpenHashBigSet]

  @inline override def getRecordWriter(ignored: FileSystem,
                               job: JobConf,
                               name: String,
                               progress: Progressable) = {
    val isCompressed = FileOutputFormat.getCompressOutput(job)
    if (!isCompressed) {
      val file = FileOutputFormat.getTaskOutputPath(job, name)
      val fs = file.getFileSystem(job)
      val fileOut = fs.create(file, progress)
      getRecordWriter(fileOut)
    } else {
      val codecClass = FileOutputFormat.getOutputCompressorClass(job, classOf[GzipCodec])
      val codec = ReflectionUtils.newInstance(codecClass, job)
      val file = FileOutputFormat.getTaskOutputPath(job, name + codec.getDefaultExtension())
      val fs = file.getFileSystem(job)
      val fileOut = fs.create(file, progress)
      val fileOutWithCodec = new DataOutputStream(codec.createOutputStream(fileOut))
      getRecordWriter(fileOutWithCodec)
    }
  }
} 
开发者ID:chan150,项目名称:TrillionG,代码行数:39,代码来源:BaseOutputFormat.scala

示例2: ByteArrayOutputFormat

//设置package包名称以及导入依赖的类
package org.broadinstitute.hail.io.hadoop

import java.io.{DataOutputStream, IOException}
import org.apache.hadoop.fs._
import org.apache.hadoop.io._
import org.apache.hadoop.mapred._
import org.apache.hadoop.util.Progressable

class ByteArrayOutputFormat extends FileOutputFormat[NullWritable, BytesOnlyWritable] {

  class ByteArrayRecordWriter(out: DataOutputStream) extends RecordWriter[NullWritable, BytesOnlyWritable] {

    def write(key: NullWritable, value: BytesOnlyWritable) {
      if (value != null)
        value.write(out)
    }

    def close(reporter: Reporter) {
      out.close()
    }
  }

  override def getRecordWriter(ignored: FileSystem, job: JobConf,
    name: String, progress: Progressable): RecordWriter[NullWritable, BytesOnlyWritable] = {
    val file: Path = FileOutputFormat.getTaskOutputPath(job, name)
    val fs: FileSystem = file.getFileSystem(job)
    val fileOut: FSDataOutputStream = fs.create(file, progress)
    new ByteArrayRecordWriter(fileOut)
  }
} 
开发者ID:Sun-shan,项目名称:Hail_V2,代码行数:31,代码来源:ByteArrayOutputFOrmat.scala


注:本文中的org.apache.hadoop.util.Progressable类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。