本文整理汇总了Scala中java.io.OutputStreamWriter类的典型用法代码示例。如果您正苦于以下问题:Scala OutputStreamWriter类的具体用法?Scala OutputStreamWriter怎么用?Scala OutputStreamWriter使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了OutputStreamWriter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: HDFS
//设置package包名称以及导入依赖的类
package org.mireynol.util
import java.io.{BufferedInputStream, OutputStreamWriter}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.mutable.ListBuffer
import scala.io.Source
object HDFS {
def log : Logger = LoggerFactory.getLogger( HDFS.getClass )
val hadoop : FileSystem = {
val conf = new Configuration( )
conf.set( "fs.defaultFS", "hdfs://localhost:9000" )
FileSystem.get( conf )
}
def readAndMap( path : String, mapper : ( String ) => Unit ) = {
if ( hadoop.exists( new Path( path ) ) ) {
val is = new BufferedInputStream( hadoop.open( new Path( path ) ) )
Source.fromInputStream( is ).getLines( ).foreach( mapper )
}
else {
// TODO - error logic here
}
}
def write( filename : String, content : Iterator[ String ] ) = {
val path = new Path( filename )
val out = new OutputStreamWriter( hadoop.create( path, false ) )
content.foreach( str => out.write( str + "\n" ) )
out.flush( )
out.close( )
}
def ls( path : String ) : List[ String ] = {
val files = hadoop.listFiles( new Path( path ), false )
val filenames = ListBuffer[ String ]( )
while ( files.hasNext ) filenames += files.next( ).getPath( ).toString( )
filenames.toList
}
def rm( path : String, recursive : Boolean ) : Unit = {
if ( hadoop.exists( new Path( path ) ) ) {
println( "deleting file : " + path )
hadoop.delete( new Path( path ), recursive )
}
else {
println( "File/Directory" + path + " does not exist" )
log.warn( "File/Directory" + path + " does not exist" )
}
}
def cat( path : String ) = Source.fromInputStream( hadoop.open( new Path( path ) ) ).getLines( ).foreach( println )
}
示例2: GZipServletResponseWrapper
//设置package包名称以及导入依赖的类
package co.informatica.mvc.filters
import java.io.{IOException, OutputStreamWriter, PrintWriter}
import javax.servlet.ServletOutputStream
import javax.servlet.http.{HttpServletResponse, HttpServletResponseWrapper}
class GZipServletResponseWrapper(val response: HttpServletResponse) extends HttpServletResponseWrapper(response) {
private var gzipOutputStream: GZipServletOutputStream = null
private var printWriter: PrintWriter = null
def close(): Unit = { //PrintWriter.close does not throw exceptions.
//Hence no try-catch block.
if (this.printWriter != null) this.printWriter.close()
if (this.gzipOutputStream != null) this.gzipOutputStream.close
}
override def flushBuffer(): Unit = { //PrintWriter.flush() does not throw exception
if (this.printWriter != null) this.printWriter.flush()
var exception1: IOException = null
try
if (this.gzipOutputStream != null) this.gzipOutputStream.flush
catch {
case e: IOException =>
exception1 = e
}
var exception2: IOException = null
try
super.flushBuffer()
catch {
case e: IOException =>
exception2 = e
}
if (exception1 != null) throw exception1
if (exception2 != null) throw exception2
}
override def getOutputStream: ServletOutputStream = {
if (this.printWriter != null) throw new IllegalStateException("PrintWriter obtained already - cannot get OutputStream")
if (this.gzipOutputStream == null) this.gzipOutputStream = new GZipServletOutputStream(getResponse.getOutputStream)
this.gzipOutputStream
}
override def getWriter: PrintWriter = {
if (this.printWriter == null && this.gzipOutputStream != null) throw new IllegalStateException("OutputStream obtained already - cannot get PrintWriter")
if (this.printWriter == null) {
this.gzipOutputStream = new GZipServletOutputStream(getResponse.getOutputStream)
this.printWriter = new PrintWriter(new OutputStreamWriter(this.gzipOutputStream, getResponse.getCharacterEncoding))
}
this.printWriter
}
override def setContentLength(len: Int): Unit = {
//ignore, since content length of zipped content
//does not match content length of unzipped content.
}
}
示例3: SessionDataFileHDFSWriter
//设置package包名称以及导入依赖的类
package com.malaska.spark.training.streaming.dstream.sessionization
import java.io.BufferedWriter
import java.io.FileWriter
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.conf.Configuration
import java.io.OutputStreamWriter
import org.apache.hadoop.fs.Path
import java.util.Random
object SessionDataFileHDFSWriter {
val eol = System.getProperty("line.separator");
def main(args: Array[String]) {
if (args.length == 0) {
println("SessionDataFileWriter {tempDir} {distDir} {numberOfFiles} {numberOfEventsPerFile} {waitBetweenFiles}");
return;
}
val conf = new Configuration
conf.addResource(new Path("/etc/hadoop/conf/core-site.xml"))
conf.addResource(new Path("/etc/hadoop/conf/mapred-site.xml"))
conf.addResource(new Path("/etc/hadoop/conf/hdfs-site.xml"))
val fs = FileSystem.get(new Configuration)
val rootTempDir = args(0)
val rootDistDir = args(1)
val files = args(2).toInt
val loops = args(3).toInt
val waitBetweenFiles = args(4).toInt
val r = new Random
for (f <- 1 to files) {
val rootName = "/weblog." + System.currentTimeMillis()
val tmpPath = new Path(rootTempDir + rootName + ".tmp")
val writer = new BufferedWriter(new OutputStreamWriter(fs.create(tmpPath)))
print(f + ": [")
val randomLoops = loops + r.nextInt(loops)
for (i <- 1 to randomLoops) {
writer.write(SessionDataGenerator.getNextEvent + eol)
if (i%100 == 0) {
print(".")
}
}
println("]")
writer.close
val distPath = new Path(rootDistDir + rootName + ".dat")
fs.rename(tmpPath, distPath)
Thread.sleep(waitBetweenFiles)
}
println("Done")
}
}
示例4: ZipImplicits
//设置package包名称以及导入依赖的类
package anchorman.docx
import anchorman.media.MediaFile
import java.io.OutputStreamWriter
import java.util.zip.{ZipEntry, ZipOutputStream}
import scala.xml.{MinimizeMode, NodeSeq, XML}
object ZipImplicits {
implicit class ZipOutputStreamOps(out: ZipOutputStream) {
def writeXmlFile(path: String, nodes: NodeSeq): Unit = {
out.putNextEntry(new ZipEntry(path))
val writer = new OutputStreamWriter(out)
try XML.write(writer, nodes.head, "UTF-8", true, null, MinimizeMode.Always)
finally writer.flush()
}
def writeMediaFile(path: String, file: MediaFile): Unit = {
out.putNextEntry(new ZipEntry(path))
try out.write(file.content)
finally out.flush()
}
}
}
示例5: writeJson
//设置package包名称以及导入依赖的类
package io.circe
import java.io.{ BufferedWriter, ByteArrayOutputStream, OutputStreamWriter, StringWriter, Writer }
import java.nio.ByteBuffer
package object jackson extends WithJacksonMapper with JacksonParser with JacksonCompat {
private[this] def writeJson(w: Writer, j: Json): Unit = {
val gen = jsonGenerator(w)
makeWriter(mapper).writeValue(gen, j)
w.flush()
}
final def jacksonPrint(json: Json): String = {
val sw = new StringWriter
writeJson(sw, json)
sw.toString
}
private[this] class EnhancedByteArrayOutputStream extends ByteArrayOutputStream {
def toByteBuffer: ByteBuffer = ByteBuffer.wrap(this.buf, 0, this.size)
}
final def jacksonPrintByteBuffer(json: Json): ByteBuffer = {
val bytes = new EnhancedByteArrayOutputStream
writeJson(new BufferedWriter(new OutputStreamWriter(bytes, "UTF-8")), json)
bytes.toByteBuffer
}
}
示例6: TextFile
//设置package包名称以及导入依赖的类
package com.ox.bigdata.util.file
import java.io.{File, FileOutputStream, OutputStreamWriter}
import com.ox.bigdata.util.Using
class TextFile(filename: String) extends Using {
def write(data: List[List[String]]): Unit = {
val strData = data.map(x => x.mkString(",") + ",").mkString("\n") + "\n"
writeToFile(filename, strData)
}
def write(data: String): Unit = {
writeToFile(filename, data)
}
def delete(): Unit = {
new File(filename) delete()
}
private def writeToFile(fileName: String, data: String): Unit = {
using(new OutputStreamWriter(new FileOutputStream(fileName),"UTF-8")) {
fileWriter => fileWriter.write(data)
}
}
}
示例7: PrometheusService
//设置package包名称以及导入依赖的类
package com.example.akka.http
import java.io.{ OutputStreamWriter, PipedInputStream, PipedOutputStream }
import scala.concurrent.{ ExecutionContext, Future }
import akka.http.scaladsl.model.{ HttpCharsets, HttpEntity, MediaType }
import akka.http.scaladsl.server.{ Directives, Route }
import akka.stream.scaladsl.StreamConverters
import io.prometheus.client.CollectorRegistry
import io.prometheus.client.exporter.common.TextFormat
object PrometheusService extends Directives {
lazy val prometheusTextType =
MediaType.customWithFixedCharset("text", "plain", HttpCharsets.`UTF-8`, params = Map("version" -> "0.0.4"))
def route(implicit executionContext: ExecutionContext): Route = {
path("metrics") {
complete {
val in = new PipedInputStream
val out = new OutputStreamWriter(new PipedOutputStream(in), HttpCharsets.`UTF-8`.value)
val byteSource = StreamConverters.fromInputStream(() => in)
Future {
try {
TextFormat.write004(out, CollectorRegistry.defaultRegistry.metricFamilySamples())
out.flush()
} finally {
out.close()
}
}
HttpEntity(prometheusTextType, byteSource)
}
}
}
}
示例8: TextFile
//设置package包名称以及导入依赖的类
package com.boost.bigdata.utils.file
import java.io.{File, FileOutputStream, OutputStreamWriter}
import com.boost.bigdata.utils.Using
class TextFile(filename: String) extends Using {
def write(data: List[List[String]]): Unit = {
val strData = data.map(x => x.mkString(",") + ",").mkString("\n") + "\n"
writeToFile(filename, strData)
}
def write(data: String): Unit = {
writeToFile(filename, data)
}
def delete(): Unit = {
new File(filename) delete()
}
private def writeToFile(fileName: String, data: String): Unit = {
using(new OutputStreamWriter(new FileOutputStream(fileName),"UTF-8")) {
fileWriter => fileWriter.write(data)
}
}
}
示例9: SocketClient
//设置package包名称以及导入依赖的类
package socket.benchmark
import java.io.{OutputStreamWriter, PrintWriter}
import java.net.{InetSocketAddress, Socket}
class SocketClient(val serverAddress: InetSocketAddress, msgCount: Int) {
val serverSocket = {
val socket = new Socket()
socket.setSoTimeout(1000)
socket.connect(serverAddress)
socket
}
def sendAndForgetBlocking(msg: String) = {
val (elapsed, _) = measure {
1 to msgCount foreach { i =>
writeBlockingMsg(s"$i$msg")
}
}
elapsed
}
def close() = serverSocket.close()
private def writeBlockingMsg(msg: String): Unit = {
val out = new PrintWriter(new OutputStreamWriter(serverSocket.getOutputStream, "utf-8"), true)
out.println(msg)
out.flush()
}
private def measure[T](callback: ? T): (Long, T) = {
val start = System.currentTimeMillis
val res = callback
val elapsed = System.currentTimeMillis - start
(elapsed, res)
}
}
示例10: RSAKeyGenerator
//设置package包名称以及导入依赖的类
package allawala.chassis.util
import java.io.{File, FileOutputStream, OutputStreamWriter}
import java.nio.file.Paths
import java.security.{Key, KeyPair, KeyPairGenerator, Security}
import com.typesafe.scalalogging.StrictLogging
import org.bouncycastle.jce.provider.BouncyCastleProvider
import org.bouncycastle.util.io.pem.{PemObject, PemWriter}
object RSAKeyGenerator extends StrictLogging {
private val KeySize = 2048
private val homeDir = System.getProperty("user.home")
private class PemFile(val key: Key, val description: String) {
private val pemObject = new PemObject(description, key.getEncoded)
def write(file: File): Unit = {
val pemWriter = new PemWriter(new OutputStreamWriter(new FileOutputStream(file)))
try {
pemWriter.writeObject(this.pemObject)
}
finally {
pemWriter.close()
}
}
}
private def generateKeyPair(): KeyPair = {
val generator = KeyPairGenerator.getInstance("RSA", "BC")
generator.initialize(KeySize)
val keyPair = generator.generateKeyPair
keyPair
}
private def writePem(key: Key, description: String, filename: String): Unit = {
val path = Paths.get(homeDir, filename)
val file = path.toFile
val pemFile = new PemFile(key, description)
pemFile.write(file)
logger.debug(s"Writing $description to $path/$filename")
}
def generate(): Unit = {
Security.addProvider(new BouncyCastleProvider)
val keyPair = generateKeyPair()
val privateKey = keyPair.getPrivate
val publicKey = keyPair.getPublic
writePem(privateKey, "RSA PRIVATE KEY","id_rsa")
writePem(publicKey, "RSA PUBLIC KEY", "id_rsa.pub")
}
}
示例11: parseRecodingTable
//设置package包名称以及导入依赖的类
package uk.ac.ncl.openlab.intake24.sql.tools.food.localisation
import uk.ac.ncl.openlab.intake24.AssociatedFood
import java.io.OutputStreamWriter
import uk.ac.ncl.openlab.intake24.FoodHeader
import uk.ac.ncl.openlab.intake24.PortionSizeMethod
import java.io.FileWriter
import au.com.bytecode.opencsv.CSVWriter
import uk.ac.ncl.openlab.intake24.NewLocalFoodRecord
import java.io.File
import uk.ac.ncl.openlab.intake24.UserFoodHeader
trait RecodingTableParser {
def parseRecodingTable(path: String): RecodingTable
}
trait RecodingTableUtil {
def buildRecodedLocalFoodRecords(logPath: Option[String], englishLocaleName: String, localNutrientTableId: String,
indexableFoods: Seq[UserFoodHeader], recodingTable: RecodingTable, translatedAssociatedFoods: Map[String, Seq[AssociatedFood]]) = {
val logWriter = new CSVWriter(logPath.map(logPath => new FileWriter(new File(logPath))).getOrElse(new OutputStreamWriter(System.out)))
logWriter.writeNext(Array("Intake24 code", "English food description", "Coding decision", s"$englishLocaleName description"))
val records = indexableFoods.foldLeft(Map[String, NewLocalFoodRecord]()) {
(result, header) =>
val logHeaderCols = Array(header.code, header.localDescription)
val associatedFoods = translatedAssociatedFoods.getOrElse(header.code, Seq())
val portionSizeMethods = Seq() // Cannot be set here due to circular dependencies
recodingTable.existingFoodsCoding.get(header.code) match {
case Some(DoNotUse) => {
logWriter.writeNext(logHeaderCols ++ Array(s"Not using in $englishLocaleName locale"))
result + (header.code -> NewLocalFoodRecord(None, true, Map(), Seq(), Seq(), Seq()))
}
case Some(UseUKFoodTable(localDescription)) => {
logWriter.writeNext(logHeaderCols ++ Array("Inheriting UK food composition table code", localDescription))
result + (header.code -> NewLocalFoodRecord(Some(localDescription), false, Map(), portionSizeMethods, associatedFoods, Seq()))
}
case Some(UseLocalFoodTable(localDescription, localTableRecordId)) => {
logWriter.writeNext(logHeaderCols ++ Array(s"Using $localNutrientTableId food composition table code", localDescription, localTableRecordId))
result + (header.code -> NewLocalFoodRecord(Some(localDescription), false, Map(localNutrientTableId -> localTableRecordId), portionSizeMethods, associatedFoods, Seq()))
}
case None =>
logWriter.writeNext(logHeaderCols ++ Array(s"Not in $englishLocaleName recoding table!"))
result
}
}
logWriter.close()
records
}
}
示例12: GenTestFolder
//设置package包名称以及导入依赖的类
package com.cloudera.sa.cap1.largefileutil
import java.io.{BufferedWriter, OutputStreamWriter}
import java.util.Random
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
object GenTestFolder {
def main(args:Array[String]): Unit = {
val fs = FileSystem.get(new Configuration())
val r = new Random
val numOfFiles = args(0).toInt
val path = args(1)
var currentPath = path
for (i <- 0 until numOfFiles) {
val n = r.nextInt(100)
if (n % 12 == 0) {
currentPath = path
}
if (n < 30) {
//Make empty file
fs.create(new Path(currentPath + "/" + "zeroFile." + i + ".txt")).close()
println("new zero file:" + new Path(currentPath + "/" + "zeroFile." + i + ".txt"))
} else if (n < 80) {
//Make data file
val stream = fs.create(new Path(currentPath + "/" + "dataFile." + i + ".txt"))
val writer = new BufferedWriter(new OutputStreamWriter(stream))
writer.write("Foobar " + n + " " + i)
writer.close()
println("new full file:" + new Path(currentPath + "/" + "dataFile." + i + ".txt"))
} else {
//make new folder
fs.mkdirs(new Path(currentPath + "/" + "folder." + i))
currentPath = currentPath + "/" + "folder." + i
println("new Folder:" + new Path(currentPath + "/" + "folder." + i))
}
}
}
}
示例13: FileDestination
//设置package包名称以及导入依赖的类
import java.io.{BufferedWriter, FileOutputStream, OutputStreamWriter}
import scala.concurrent.duration.{FiniteDuration, SECONDS}
import com.github.nscala_time.time.Imports._
import org.joda.time.format._
import akka.util.Timeout
package hyperion {
class FileDestination(id: String, fileName: String, template: String) extends Pipe with Tickable {
def selfId = id
val writer = new BufferedWriter(new OutputStreamWriter(
new FileOutputStream(fileName), "utf-8"))
var lastMessage = DateTime.now
implicit val timeout = Timeout(FiniteDuration(1, SECONDS))
val msgTemplate = if (template == "") new MessageTemplate("<$PRIO> $DATE $HOST $PROGRAM $PID : $MESSAGE \n") else new MessageTemplate(template)
var processed = 0
override def preStart() = {
super.preStart()
startTicking(FiniteDuration(0, SECONDS), FiniteDuration(1, SECONDS))
}
override def postStop() = {
stopTicking()
writer.close()
super.postStop()
}
def process = {
case msg: Message => {
writer.write(msgTemplate.format(msg))
processed += 1
lastMessage = DateTime.now
}
case Tick => {
if (DateTime.now.minus(lastMessage.getMillis).getMillis > 1000L) {
writer.flush()
}
}
case StatsRequest => {
sender ! StatsResponse(Map[String, Int]( "processed" -> processed))
}
}
}
}
示例14: HelloLambda
//设置package包名称以及导入依赖的类
package com.virtuslab.lambda.hello
import java.io.{InputStream, OutputStream, OutputStreamWriter}
import com.amazonaws.services.lambda.runtime.{Context, RequestStreamHandler}
import com.virtuslab.lambda.hello.HelloLambda.formats
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization._
object HelloLambda {
implicit val formats = Serialization.formats(NoTypeHints)
}
class HelloLambda extends RequestStreamHandler {
override def handleRequest(input: InputStream, output: OutputStream, context: Context): Unit = {
context.getLogger.log(s"Log: ${context.getRemainingTimeInMillis}")
println(s"Test ${context.getAwsRequestId}")
try {
val writer = new OutputStreamWriter(output, "UTF-8")
writer.write(write(s"Fetched with ID: ${context.getAwsRequestId}"))
writer.flush()
writer.close()
} catch {
case e: Throwable => context.getLogger.log(s"exception? -> ${e.getCause}")
}
}
}
示例15: Response
//设置package包名称以及导入依赖的类
package com.virtuslab.lambda.http
import java.io.{InputStream, OutputStream, OutputStreamWriter}
import com.amazonaws.services.lambda.runtime.{Context, RequestStreamHandler}
import org.json4s.NoTypeHints
import org.json4s.jackson.Serialization
import org.json4s.jackson.Serialization._
import HttpLambda.formats
case class Response(body: Option[String] = None,
statusCode: Int = 200,
headers: Map[String, Any] = Map.empty[String, Any])
object HttpLambda {
implicit val formats = Serialization.formats(NoTypeHints)
}
class HttpLambda extends RequestStreamHandler {
override def handleRequest(input: InputStream, output: OutputStream, context: Context): Unit = {
context.getLogger.log(s"Log: ${context.getRemainingTimeInMillis}")
println(s"Test ${context.getAwsRequestId}")
try {
val response = write(Response(Some(s"Fetched with ID: ${context.getAwsRequestId}")))
context.getLogger.log(s"Generated response is: ${response}")
val writer = new OutputStreamWriter(output, "UTF-8")
writer.write(response)
writer.flush()
writer.close()
} catch {
case e: Throwable => context.getLogger.log(s"exception? -> ${e}")
}
}
}