当前位置: 首页>>代码示例>>Scala>>正文


Scala DataOutputStream类代码示例

本文整理汇总了Scala中java.io.DataOutputStream的典型用法代码示例。如果您正苦于以下问题:Scala DataOutputStream类的具体用法?Scala DataOutputStream怎么用?Scala DataOutputStream使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了DataOutputStream类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。

示例1: CSR4OutputFormat

//设置package包名称以及导入依赖的类
package kr.acon.lib.io

import java.io.DataOutputStream

import kr.acon.lib.io.recordwriter.CSR4RecordWriter
import kr.acon.lib.io.recordwriter.CSR6RecordWriter
import kr.acon.lib.io.recordwriter.CSR8RecordWriter

class CSR4OutputFormat
    extends BaseOutputFormat {
  @inline final override def getRecordWriter(out: DataOutputStream) = new CSR4RecordWriter(out)
}

class CSR6OutputFormat
    extends BaseOutputFormat {
  @inline final override def getRecordWriter(out: DataOutputStream) = new CSR6RecordWriter(out)
}

class CSR8OutputFormat
    extends BaseOutputFormat {
  @inline final override def getRecordWriter(out: DataOutputStream) = new CSR8RecordWriter(out)
} 
开发者ID:chan150,项目名称:TrillionG,代码行数:23,代码来源:CSROutputFormat.scala

示例2: ADJ4OutputFormat

//设置package包名称以及导入依赖的类
package kr.acon.lib.io

import java.io.DataOutputStream

import kr.acon.lib.io.recordwriter.ADJ6RecordWriter
import kr.acon.lib.io.recordwriter.ADJ8RecordWriter
import kr.acon.lib.io.recordwriter.ADJ4RecordWriter

class ADJ4OutputFormat
    extends BaseOutputFormat {
  @inline final override def getRecordWriter(out: DataOutputStream) = new ADJ4RecordWriter(out)
}

class ADJ6OutputFormat
    extends BaseOutputFormat {
  @inline final override def getRecordWriter(out: DataOutputStream) = new ADJ6RecordWriter(out)
}

class ADJ8OutputFormat
    extends BaseOutputFormat {
  @inline final override def getRecordWriter(out: DataOutputStream) = new ADJ8RecordWriter(out)
} 
开发者ID:chan150,项目名称:TrillionG,代码行数:23,代码来源:ADJOutputFormat.scala

示例3: BaseOutputFormat

//设置package包名称以及导入依赖的类
package kr.acon.lib.io

import java.io.DataOutputStream

import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.io.compress.GzipCodec
import org.apache.hadoop.mapred.FileOutputFormat
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.mapred.RecordWriter
import org.apache.hadoop.util.Progressable
import org.apache.hadoop.util.ReflectionUtils

import it.unimi.dsi.fastutil.longs.LongOpenHashBigSet

abstract class BaseOutputFormat extends FileOutputFormat[Long, LongOpenHashBigSet] {
  @inline def getRecordWriter(out: DataOutputStream): RecordWriter[Long, LongOpenHashBigSet]

  @inline override def getRecordWriter(ignored: FileSystem,
                               job: JobConf,
                               name: String,
                               progress: Progressable) = {
    val isCompressed = FileOutputFormat.getCompressOutput(job)
    if (!isCompressed) {
      val file = FileOutputFormat.getTaskOutputPath(job, name)
      val fs = file.getFileSystem(job)
      val fileOut = fs.create(file, progress)
      getRecordWriter(fileOut)
    } else {
      val codecClass = FileOutputFormat.getOutputCompressorClass(job, classOf[GzipCodec])
      val codec = ReflectionUtils.newInstance(codecClass, job)
      val file = FileOutputFormat.getTaskOutputPath(job, name + codec.getDefaultExtension())
      val fs = file.getFileSystem(job)
      val fileOut = fs.create(file, progress)
      val fileOutWithCodec = new DataOutputStream(codec.createOutputStream(fileOut))
      getRecordWriter(fileOutWithCodec)
    }
  }
} 
开发者ID:chan150,项目名称:TrillionG,代码行数:39,代码来源:BaseOutputFormat.scala

示例4: WritableSerializer

//设置package包名称以及导入依赖的类
package org.hammerlab.hadoop.kryo

import java.io.{ DataInputStream, DataOutputStream }

import com.esotericsoftware.kryo.io.{ Input, Output }
import com.esotericsoftware.kryo.{ Kryo, Serializer }
import org.apache.hadoop.io.Writable

class WritableSerializer[T <: Writable](ctorArgs: Any*) extends Serializer[T] {
  override def read(kryo: Kryo, input: Input, clz: Class[T]): T = {
    val t = clz.newInstance()
    t.readFields(new DataInputStream(input))
    t
  }

  override def write(kryo: Kryo, output: Output, t: T): Unit = {
    t.write(new DataOutputStream(output))
  }
} 
开发者ID:hammerlab,项目名称:spark-util,代码行数:20,代码来源:WritableSerializer.scala

示例5: WeatherRequested

//设置package包名称以及导入依赖的类
package zeroweather.message

case class WeatherRequested(countryCode: String, city: String)

object WeatherRequested {
  //TODO: Replace with case class codecs once the new release of msgpack4s is published
  import java.io.{ DataInputStream, DataOutputStream }

  import org.velvia.msgpack.RawStringCodecs._
  import org.velvia.msgpack._

  implicit object WeatherRequestedMsgPackCodec extends Codec[WeatherRequested] {
    def pack(out: DataOutputStream, item: WeatherRequested) = {
      out.write(0x01 | Format.MP_FIXARRAY)
      StringCodec.pack(out, item.countryCode)
      StringCodec.pack(out, item.city)
    }

    val unpackFuncMap: FastByteMap[WeatherRequestedMsgPackCodec.UnpackFunc] = FastByteMap[UnpackFunc](
      (0x01 | Format.MP_FIXARRAY).toByte -> { in: DataInputStream =>
        val countryCode = StringCodec.unpack(in)
        val city = StringCodec.unpack(in)

        WeatherRequested(countryCode, city)
      }
    )
  }
} 
开发者ID:sbilinski,项目名称:zeroweather,代码行数:29,代码来源:WeatherRequested.scala

示例6: Weather

//设置package包名称以及导入依赖的类
package zeroweather.message

case class Weather(timestamp: Long, countryCode: String, city: String, temperatureInCelsius: BigDecimal)

object Weather {
  //TODO: Replace with case class codecs once the new release of msgpack4s is published
  import java.io.{ DataInputStream, DataOutputStream }

  import org.velvia.msgpack.RawStringCodecs._
  import org.velvia.msgpack.SimpleCodecs._
  import org.velvia.msgpack._

  implicit object WeatherMsgPackCodec extends Codec[zeroweather.message.Weather] {
    def pack(out: DataOutputStream, item: Weather) = {
      out.write(0x01 | Format.MP_FIXARRAY)
      LongCodec.pack(out, item.timestamp)
      StringCodec.pack(out, item.countryCode)
      StringCodec.pack(out, item.city)
      StringCodec.pack(out, item.temperatureInCelsius.toString)
    }

    val unpackFuncMap: FastByteMap[WeatherMsgPackCodec.UnpackFunc] = FastByteMap[UnpackFunc](
      (0x01 | Format.MP_FIXARRAY).toByte -> { in: DataInputStream =>
        val timestamp = LongCodec.unpack(in)
        val countryCode = StringCodec.unpack(in)
        val city = StringCodec.unpack(in)
        val celsius = BigDecimal(StringCodec.unpack(in))

        Weather(timestamp, countryCode, city, celsius)
      }
    )
  }
} 
开发者ID:sbilinski,项目名称:zeroweather,代码行数:34,代码来源:Weather.scala

示例7: ByteArrayOutputFormat

//设置package包名称以及导入依赖的类
package org.broadinstitute.hail.io.hadoop

import java.io.{DataOutputStream, IOException}
import org.apache.hadoop.fs._
import org.apache.hadoop.io._
import org.apache.hadoop.mapred._
import org.apache.hadoop.util.Progressable

class ByteArrayOutputFormat extends FileOutputFormat[NullWritable, BytesOnlyWritable] {

  class ByteArrayRecordWriter(out: DataOutputStream) extends RecordWriter[NullWritable, BytesOnlyWritable] {

    def write(key: NullWritable, value: BytesOnlyWritable) {
      if (value != null)
        value.write(out)
    }

    def close(reporter: Reporter) {
      out.close()
    }
  }

  override def getRecordWriter(ignored: FileSystem, job: JobConf,
    name: String, progress: Progressable): RecordWriter[NullWritable, BytesOnlyWritable] = {
    val file: Path = FileOutputFormat.getTaskOutputPath(job, name)
    val fs: FileSystem = file.getFileSystem(job)
    val fileOut: FSDataOutputStream = fs.create(file, progress)
    new ByteArrayRecordWriter(fileOut)
  }
} 
开发者ID:Sun-shan,项目名称:Hail_V2,代码行数:31,代码来源:ByteArrayOutputFOrmat.scala

示例8: TransactionGenerator

//设置package包名称以及导入依赖的类
package com.bwsw.sj.transaction.generator.server

import java.io.{DataInputStream, DataOutputStream}
import java.net.Socket
import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger, AtomicLong}

import com.bwsw.sj.common.utils.TransactionGeneratorLiterals

class TransactionGenerator(socket: Socket, doesServerWork: AtomicBoolean) extends Runnable {
  private val counter = new AtomicInteger(0)
  private val currentMillis = new AtomicLong(0)
  private val inputStream = new DataInputStream(socket.getInputStream)
  private val outputStream = new DataOutputStream(socket.getOutputStream)
  private val scale = TransactionGeneratorLiterals.scale

  override def run(): Unit = {
    try {
      while (doesServerWork.get()) {
        if (isClientAvailable) {
          val id = generateID()
          send(id)
        } else {
          close()
          return
        }
      }
    } catch {
      case ex: Exception =>
        close()
    }
  }

  private def isClientAvailable = {
    val clientRequestStatus = inputStream.read()

    clientRequestStatus != -1
  }

  private def generateID() = this.synchronized {
    val now = System.currentTimeMillis()
    if (now - currentMillis.get > 0) {
      currentMillis.set(now)
      counter.set(0)
    }
    now * scale + counter.getAndIncrement()
  }

  private def send(id: Long) {
    outputStream.writeLong(id)
  }

  private def close() = {
    inputStream.close()
    outputStream.close()
    socket.close()
  }
} 
开发者ID:bwsw,项目名称:sj-platform,代码行数:58,代码来源:TransactionGenerator.scala

示例9: ImmutableTest

//设置package包名称以及导入依赖的类
package com.github.sorhus.webalytics.cruft

import java.io.{DataOutputStream, File, FileOutputStream, RandomAccessFile}
import java.nio.channels.FileChannel.MapMode

import org.roaringbitmap.RoaringBitmap
import org.roaringbitmap.buffer.ImmutableRoaringBitmap

object ImmutableTest extends App {

  val outFile = new File(args(0))
  outFile.createNewFile()
  val fos = new FileOutputStream(outFile)
  val dos = new DataOutputStream(fos)
  val x = RoaringBitmap.bitmapOf(1,3,5)
  x.runOptimize()
  x.serialize(dos)
  dos.close()

  val inFile = new RandomAccessFile(args(0), "r")
  val memoryMapped = inFile.getChannel.map(MapMode.READ_ONLY, 0, inFile.length())
  val bb = memoryMapped.slice()
  val bitset = new ImmutableRoaringBitmap(bb)

  println(ImmutableRoaringBitmap.and(bitset,bitset).getCardinality)
  println(ImmutableRoaringBitmap.andCardinality(bitset,bitset))

  inFile.close()
} 
开发者ID:sorhus,项目名称:webalytics,代码行数:30,代码来源:ImmutableTest.scala

示例10: AMQContentHeader

//设置package包名称以及导入依赖的类
package chana.mq.amqp.model

import chana.mq.amqp.method.UnknownClassOrMethodId
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import java.io.DataInputStream
import java.io.DataOutputStream
import java.io.IOException

object AMQContentHeader {

  def readFrom(payload: Array[Byte]): BasicProperties = {
    val in = new DataInputStream(new ByteArrayInputStream(payload))
    readFrom(in)
  }

  @throws(classOf[IOException])
  def readFrom(in: DataInputStream): BasicProperties = {
    in.readShort() match {
      case 60      => BasicProperties.readFrom(in)
      case classId => throw new UnknownClassOrMethodId(classId)
    }
  }
}
abstract class AMQContentHeader extends Cloneable {

  
  def weight: Short = 0
  def bodySize: Long

  @throws(classOf[IOException])
  def writeTo(out: DataOutputStream, bodySize: Long) {
    out.writeShort(weight)
    out.writeLong(bodySize)
    writePropertiesTo(new ContentHeaderPropertyWriter(new ValueWriter(out)))
  }

  @throws(classOf[IOException])
  def writePropertiesTo(writer: ContentHeaderPropertyWriter)

  @throws(classOf[IOException])
  def toFrame(channelNumber: Int, bodySize: Long): Frame = {
    val out = new ByteArrayOutputStream()
    val os = new DataOutputStream(out)
    os.writeShort(classId)
    writeTo(os, bodySize)
    os.flush()
    Frame(Frame.HEADER, channelNumber, out.toByteArray)
  }

  @throws(classOf[CloneNotSupportedException])
  override def clone(): AnyRef = super.clone()
} 
开发者ID:qingmang-team,项目名称:chanamq,代码行数:54,代码来源:AMQContentHeader.scala

示例11: PushRegistry

//设置package包名称以及导入依赖的类
package org.lyranthe.prometheus.client

import java.io.DataOutputStream
import java.net.{HttpURLConnection, URL}

import org.lyranthe.prometheus.client.registry._

import scala.util.control.NonFatal

class PushRegistry(host: String,
                   port: Int,
                   job: String,
                   additionalLabels: Seq[(String, String)])
    extends DefaultRegistry {
  final private val url = {
    val extra =
      if (additionalLabels.isEmpty) ""
      else
        "/" + additionalLabels
          .flatMap(labels => Vector(labels._1, labels._2))
          .mkString("/")
    new URL("http", host, port, s"/metrics/job/$job$extra")
  }

  def push(): Either[Throwable, Boolean] = {
    val conn = url.openConnection().asInstanceOf[HttpURLConnection]
    try {
      conn.setRequestMethod("PUT")
      conn.setRequestProperty("Content-Type", "text/plain; version=0.0.4")
      conn.setConnectTimeout(10 * 1000) // 10s
      conn.setReadTimeout(10 * 1000)    // 10s
      conn.setDoOutput(true)
      conn.connect()
      val output = new DataOutputStream(conn.getOutputStream)
      output.write(TextFormat.output(collect()))
      output.flush()
      val responseCode = conn.getResponseCode
      output.close()

      Right(responseCode == 202)
    } catch {
      case NonFatal(t) => Left(t)
    } finally {
      conn.disconnect()
    }
  }
}

object PushRegistry {
  def apply(host: String, port: Int = 9091)(
      job: String,
      instance: Option[String],
      additionalLabels: (String, String)*): PushRegistry =
    new PushRegistry(host,
                     port,
                     job,
                     instance.map("instance" -> _).toSeq ++ additionalLabels)
} 
开发者ID:fiadliel,项目名称:prometheus_client_scala,代码行数:59,代码来源:PushRegistry.scala

示例12: StreamDotProductHandler

//设置package包名称以及导入依赖的类
package com.workday.warp.handlers

import java.io.{DataOutputStream, InputStream, OutputStream}

import com.amazonaws.services.lambda.runtime.{Context, RequestHandler, RequestStreamHandler}
import com.workday.warp.models.{immutable, mutable}
import org.json4s.{DefaultFormats, JValue}
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import org.pmw.tinylog.Logger

import scala.io.Source


object StreamDotProductHandler extends RequestStreamHandler {

  override def handleRequest(input: InputStream, output: OutputStream, context: Context): Unit = {
    implicit val formats = DefaultFormats
    val json: JValue = parse(Source.fromInputStream(input).mkString)
    input.close()

    Logger.info(s"invoked with $json")

    val request: immutable.DotProductRequest = json.extract[immutable.DotProductRequest]
    val result: Double = request.vectorA dot request.vectorB
    val response: String = compact(render("result" -> result))

    val dataOutput: DataOutputStream = new DataOutputStream(output)
    dataOutput.writeChars(response)
    dataOutput.close()
    output.close()
  }
} 
开发者ID:tomnis,项目名称:scala-gradle-aws-lambda-example,代码行数:34,代码来源:Handlers.scala


注:本文中的java.io.DataOutputStream类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。