当前位置: 首页>>代码示例>>Scala>>正文


Scala FileUtils类代码示例

本文整理汇总了Scala中org.apache.commons.io.FileUtils的典型用法代码示例。如果您正苦于以下问题:Scala FileUtils类的具体用法?Scala FileUtils怎么用?Scala FileUtils使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了FileUtils类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。

示例1: DsStoreRemover

//设置package包名称以及导入依赖的类
package org.bruchez.olivier.dsstoreremover

import org.apache.commons.io.FileUtils
import scala.util._

object DsStoreRemover {
  def main(args: Array[String]): Unit =
    Try(Arguments(args)) match {
      case Failure(_) ?
        println("Usage: DsStoreRemover <directory to clean> <trash directory>")
        System.exit(-1)
      case Success(arguments) ?
        DsStoreRemover(arguments).clean()
    }
}

case class DsStoreRemover(arguments: Arguments) {
  def clean(): Unit = {
    val allFiles = Files.filesInDirectory(arguments.directoryToClean, recursive = true, includeDirectories = false)
    val trashDirectoryPrefixPath = arguments.trashDirectory.getCanonicalPath

    for {
      file ? allFiles
      if !file.getCanonicalPath.startsWith(trashDirectoryPrefixPath)
      if Files.isMacOsMetadataFile(file)
    } {
      val destinationFile = Files.nonExistingFile(arguments.trashDirectory, file.getName)

      if (arguments.readOnly) {
        println(s"File '${file.getCanonicalPath}' would be moved to '${destinationFile.getCanonicalPath}' (read only)")
      } else {
        println(s"Moving '${file.getCanonicalPath}' to '${destinationFile.getCanonicalPath}'")
        FileUtils.moveFile(file, destinationFile)
      }
    }
  }
} 
开发者ID:obruchez,项目名称:ds-store-remover,代码行数:38,代码来源:DsStoreRemover.scala

示例2: SnapshotGenerationSpec

//设置package包名称以及导入依赖的类
package com.commodityvectors.snapshotmatchers

import java.io.File

import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfterEach, Matchers, fixture}

import scala.util.Try

class SnapshotGenerationSpec extends fixture.WordSpec with Matchers with SnapshotMatcher with BeforeAndAfterEach {

  val snapshotFolder: String = "scalatest-snapshot-matcher-core/src/test/__snapshots__"
  val currentSpecPath: String = s"$snapshotFolder/com/commodityvectors/snapshotmatchers/SnapshotGenerationSpec"

  override def afterEach(): Unit = {
    Try(FileUtils.deleteDirectory(new File(snapshotFolder)))
  }

  "SnapshotMatcher" should {
    "generate snapshot file with expectation" in { implicit test =>
      val value: Int = 1
      value should matchSnapshot[Int]()
      FileUtils.readFileToString(
        new File(s"$currentSpecPath/snapshotmatcher-should-generate-snapshot-file-with-expectation.snap")
      ) shouldEqual "1"
    }

    "generate file with custom id" in { implicit test =>
      val value = 10
      value should matchSnapshot[Int]("customId")
      FileUtils.readFileToString(
        new File(s"$currentSpecPath/customId.snap")
      ) shouldEqual "10"
    }
  }
} 
开发者ID:commodityvectors,项目名称:scalatest-snapshot-matchers,代码行数:37,代码来源:SnapshotGenerationSpec.scala

示例3: PlayJsonSnapshotMatcherSpec

//设置package包名称以及导入依赖的类
package com.commodityvectors.snapshotmatchers.playJson

import java.io.File

import com.commodityvectors.snapshotmatchers.{SnapshotMatcher, SnapshotSerializer}
import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfterEach, Matchers, fixture}
import play.api.libs.json.{Format, JsValue, Json}

import scala.util.Try

class PlayJsonSnapshotMatcherSpec extends fixture.WordSpec with Matchers with SnapshotMatcher with PlayJsonSnapshotMatcher with BeforeAndAfterEach {
  case class Test(value: Int)
  implicit lazy val jsonFormat: Format[Test] = Json.format[Test]

  val snapshotFolder: String = "scalatest-snapshot-matcher-play-json/src/test/__snapshots__"
  val currentSpecPath: String = s"$snapshotFolder/com/commodityvectors/snapshotmatchers/playJson/PlayJsonSnapshotMatcherSpec"

  override def afterEach(): Unit = {
    Try(FileUtils.deleteDirectory(new File(snapshotFolder)))
  }

  "PlayJsonSnapshotMatcherSpec" should {
    "pretty print json" in { implicit test =>
      val instance = Test(1)
      SnapshotSerializer.serialize(Json.toJson(instance)) shouldEqual
        s"""{
           |  "value" : 1
           |}""".stripMargin
    }

    "generate json snapshot file" in { implicit test =>
      val instance = Test(1)
      Json.toJson(instance) should matchSnapshot[JsValue]("customId")
      FileUtils.readFileToString(
        new File(s"$currentSpecPath/customId.snap")
      ) shouldEqual
        s"""{
           |  "value" : 1
           |}""".stripMargin
    }

    "allow deserialization" in { implicit test =>
      val instance = Test(1)
      Json.toJson(instance) should matchSnapshot[JsValue]("anotherId")
      "anotherId" should deserializeAs(instance)
    }
  }
} 
开发者ID:commodityvectors,项目名称:scalatest-snapshot-matchers,代码行数:50,代码来源:PlayJsonSnapshotMatcherSpec.scala

示例4: beforeAll

//设置package包名称以及导入依赖的类
package akka.persistence

import java.io.File

import org.apache.commons.io.FileUtils

trait PluginCleanup extends PluginSpec {
  val storageLocations = List(
    "akka.persistence.journal.leveldb.dir",
    "akka.persistence.snapshot-store.local.dir").map(s ? new File(system.settings.config.getString(s)))

  override def beforeAll() {
    storageLocations.foreach(FileUtils.deleteDirectory)
    super.beforeAll()
  }

  override def afterAll() {
    super.afterAll()
    storageLocations.foreach(FileUtils.deleteDirectory)
  }
} 
开发者ID:love1314sea,项目名称:akka-2.3.16,代码行数:22,代码来源:PluginCleanup.scala

示例5: FileBasedMailboxSpec

//设置package包名称以及导入依赖的类
package akka.actor.mailbox.filebased

import language.postfixOps

import akka.actor.mailbox._
import scala.concurrent.duration._
import org.apache.commons.io.FileUtils
import akka.dispatch.Mailbox

object FileBasedMailboxSpec {
  val config = """
    File-dispatcher {
      mailbox-type = akka.actor.mailbox.filebased.FileBasedMailboxType
      throughput = 1
      file-based.directory-path = "file-based"
      file-based.circuit-breaker.max-failures = 5
      file-based.circuit-breaker.call-timeout = 5 seconds
    }
               """
}

@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
class FileBasedMailboxSpec extends DurableMailboxSpec("File", FileBasedMailboxSpec.config) {

  val settings = new FileBasedMailboxSettings(system.settings, system.settings.config.getConfig("File-dispatcher"))

  "FileBasedMailboxSettings" must {
    "read the file-based section" in {
      settings.QueuePath should be("file-based")
      settings.CircuitBreakerMaxFailures should be(5)
      settings.CircuitBreakerCallTimeout should be(5 seconds)
    }
  }

  private[akka] def isDurableMailbox(m: Mailbox): Boolean = m.messageQueue.isInstanceOf[FileBasedMessageQueue]

  def clean(): Unit = FileUtils.deleteDirectory(new java.io.File(settings.QueuePath))

  override def atStartup() {
    clean()
    super.atStartup()
  }

  override def afterTermination() {
    clean()
    super.afterTermination()
  }
} 
开发者ID:love1314sea,项目名称:akka-2.3.16,代码行数:49,代码来源:FileBasedMailboxSpec.scala

示例6: TilingServiceSpec

//设置package包名称以及导入依赖的类
package transform.tiling

import java.io.File
import org.apache.commons.io.FileUtils
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.test._
import play.api.test.Helpers._
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import scala.concurrent.Await
import scala.concurrent.duration._

@RunWith(classOf[JUnitRunner])
class TilingServiceSpec extends Specification {

  val TEST_IMAGE = new File("test/resources/transform/tiling/Ptolemy_map_15th_century.jpg")

  val TMP_DIR = {
    val dir = new File("test/resources/transform/tiling/tmp")
    if (dir.exists)
      FileUtils.deleteDirectory(dir)
    dir
  }

  "The Tiling function" should {

    "create proper Zoomify tiles from the test image" in {
      Await.result(TilingService.createZoomify(TEST_IMAGE, TMP_DIR), 10 seconds)

      TMP_DIR.exists must equalTo(true)
      TMP_DIR.list.size must equalTo(2)
      new File(TMP_DIR, "ImageProperties.xml").exists must equalTo(true)

      val tileGroup0 = new File(TMP_DIR, "TileGroup0")
      tileGroup0.exists must equalTo(true)

      tileGroup0.list.size must equalTo(65)
      tileGroup0.list.filter(_.endsWith(".jpg")).size must equalTo(65)

      FileUtils.deleteDirectory(TMP_DIR)

      success
    }

  }

} 
开发者ID:pelagios,项目名称:recogito2,代码行数:49,代码来源:TilingServiceSpec.scala

示例7: ActorSystemSpec

//设置package包名称以及导入依赖的类
package pt.tecnico.dsi.ldap.akka

import java.io.File

import akka.actor.{ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit}
import com.typesafe.config.ConfigFactory
import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

abstract class ActorSystemSpec extends TestKit(ActorSystem("akka-ldap", ConfigFactory.load()))
  with Matchers
  with ImplicitSender
  with WordSpecLike
  with BeforeAndAfterAll
  with LazyLogging {

  val settings = new Settings()

  val ldapActor = system.actorOf(Props(new LdapActor(settings)), "ldap")

  private var seqCounter = 0L
  def nextSeq(): Long = {
    val ret = seqCounter
    seqCounter += 1
    ret
  }

  val storageLocations = List(
    "akka.persistence.journal.leveldb.dir",
    "akka.persistence.journal.leveldb-shared.store.dir",
    "akka.persistence.snapshot-store.local.dir"
  ).map(s ? new File(system.settings.config.getString(s)))

  override protected def beforeAll(): Unit = {
    super.beforeAll()
    storageLocations.foreach(FileUtils.deleteDirectory)
  }

  override protected def afterAll(): Unit = {
    super.afterAll()
    storageLocations.foreach(FileUtils.deleteDirectory)
    shutdown(verifySystemShutdown = true)
  }
} 
开发者ID:ist-dsi,项目名称:akka-ldap,代码行数:47,代码来源:ActorSystemSpec.scala

示例8: BackupMetadataTest

//设置package包名称以及导入依赖的类
package com.unity.analytics.spark.utils.parquet

import java.io.{File, StringReader}
import java.sql.DriverManager

import org.apache.commons.io.FileUtils
import org.h2.tools.{RunScript, Server}

class BackupMetadataTest extends BaseTest {

  val dbDriver = "org.h2.Driver"
  val server = Server.createTcpServer("-tcpPort", "9999")
  val jdbcDB = "dummydb"
  val jdbcUrl = s"jdbc:h2:mem:$jdbcDB;DATABASE_TO_UPPER=FALSE;MODE=MYSQL"
  val jdbcConfig = Map(
    "url" -> jdbcUrl
  )
  val backendConnection = DriverManager.getConnection(jdbcUrl)

  val backupId = "dummyBackupId"

  override def beforeAll(): Unit = {
    super.beforeAll()
    println("INITIALIZE H2")
    // Initialize H2
    val reader = new StringReader(FileUtils.readFileToString(new File("src/test/scala/resources/backup_metadata.sql")).replace('`', '"'))
    RunScript.execute(backendConnection, reader)
  }

  test("Test reading and writing backup metadata") {
    val entries = Array(
      BackupEntry("hdfs://hello", "s3a://hello", 100, 10),
      BackupEntry("/source", "/destination", 200, 2)
    ).sortBy(_.destDir)
    BackupMetadata.write(backupId, entries, jdbcConfig)
    val metadata = BackupMetadata.read(backupId, jdbcConfig).get
    assert(metadata.backupId === backupId)
    assert(metadata.backupEntries.length === entries.length)
    val output = metadata.backupEntries.sortBy(_.destDir)
    entries.zip(output).foreach(e => {
      assert(e._1 === e._2)
    })
  }
} 
开发者ID:UnityTech,项目名称:parquet-s3-backup,代码行数:45,代码来源:BackupMetadataTest.scala

示例9: get

//设置package包名称以及导入依赖的类
package pl.mojepanstwo.sap.toakoma.services

import java.net.URL
import java.io.File
import org.apache.commons.io.FileUtils
import org.jsoup.nodes.Document
import com.gargoylesoftware.htmlunit._
import org.jsoup.Jsoup

trait Scraper {
  def get(url: String) : Document
  def dowloadFile(fileUrl:String, filePath:String) : String
}

class DefaultScraperService extends Scraper {

  val webClient = new WebClient

  def get(url: String) : Document = {
      webClient.setRefreshHandler(new RefreshHandler {
        override def handleRefresh(page: Page, url: URL, i: Int): Unit = webClient.getPage(url)
      })
      val apPage: Page = webClient.getPage(url)
      Jsoup.parse(apPage.getWebResponse.getContentAsString)
  }

  def dowloadFile(fileUrl:String, filePath:String) : String = {
    val url = new URL(fileUrl)
    val tmp = new File(filePath)
    FileUtils.copyURLToFile(url, tmp)
    tmp.getAbsolutePath()
  }

} 
开发者ID:PrawoPolskie,项目名称:toakoma,代码行数:35,代码来源:Scraper.scala

示例10: ExamineAndTrainOptions

//设置package包名称以及导入依赖的类
package com.databricks.apps.twitterClassifier

import java.io.File
import com.github.acrisci.commander.Program

abstract sealed case class ExamineAndTrainOptions(
  overWrite: Boolean = false,
  verbose: Boolean = false,
  tweetDirectory: File = new File(System.getProperty("user.home"), "/sparkTwitter/tweets/"),
  modelDirectory: File = new File(System.getProperty("user.home"), "/sparkTwitter/modelDirectory/"),
  numClusters: Int = 10,
  numIterations: Int = 100
)

object ExamineAndTrainOptions {
  val _program = new Program()
    .version("2.0.0")
    .option(flags="-v, --verbose", description="Generate output to show progress")
    .option(flags="-w, --overWrite", description="Overwrite model from a previous run")
    .usage("ExamineAndTrain [Options] <tweetDirectory> <modelDirectory> <numClusters> <numIterations>")

  def parse(args: Array[String]): ExamineAndTrainOptions = {
    val program: Program = _program.parse(args)
    if (program.args.length!=program.usage.split(" ").length-2) program.help

    val options = new ExamineAndTrainOptions(
      overWrite = program.overWrite,
      verbose = program.verbose,
      tweetDirectory = new File(program.args.head.replaceAll("^~", System.getProperty("user.home"))),
      modelDirectory = new File(program.args(1).replaceAll("^~", System.getProperty("user.home"))),
      numClusters = program.args(2).toInt,
      numIterations = program.args(3).toInt
    ){}
    import options._

    if (!tweetDirectory.exists) {
      System.err.println(s"${ tweetDirectory.getCanonicalPath } does not exist. Did you run Collect yet?")
      System.exit(-1)
    }
    if (modelDirectory.exists) {
      if (options.overWrite) {
        import org.apache.commons.io.FileUtils
        println("Replacing pre-existing model")
        FileUtils.deleteDirectory(modelDirectory)
      } else {
        System.err.println("Model already exists and --overWrite option was not specified")
        System.exit(-3)
      }
    }
    if (numClusters<1) {
      System.err.println(s"At least 1 clusters must be specified")
      System.exit(-3)
    }
    if (numIterations<1) {
      System.err.println(s"At least 1 iteration must be specified")
      System.exit(-4)
    }

    options
  }
} 
开发者ID:krish121,项目名称:Spark-reference-applications,代码行数:62,代码来源:ExamineAndTrainOptions.scala

示例11: around

//设置package包名称以及导入依赖的类
package se.digiplant.imagemagick.plugin

import org.specs2.mutable.Around
import org.specs2.specification.Scope
import org.specs2.execute.{AsResult, Result}
import play.api.test._
import play.api.test.Helpers._
import java.io.File
import org.apache.commons.io.FileUtils
import util.Random

trait ScalrContext extends Around with TempFile {

  implicit val app: FakeApplication = new FakeApplication(
    additionalConfiguration = Map(
      "res.default" -> "tmp/default",
      "res.imagemagickcache" -> "tmp/imagemagickcache",
      "imagemagick.cache" -> "imagemagickcache",
      "imagemagick.cachedir" -> "tmp/imagemagickcachedir"
    )
  )

  def around[T : AsResult](t: =>T) = Helpers.running(app) {
    val result = AsResult.effectively(t)

    tmp.delete()

    result
  }
}

trait TempFile extends Scope {
  lazy val tmp = new File("tmp")
  lazy val logo = new File("test/resources/digiplant.jpg")
  lazy val LargeLogo = new File("test/resources/digiplant_large.jpg")

  def testFile: File = {
    tmp.mkdir()
    val chars = ('a' to 'z') ++ ('A' to 'Z') ++ ('1' to '9')
    val rand = (1 to 20).map(x => chars(Random.nextInt(chars.length))).mkString
    val tmpFile = new File("tmp", rand + ".jpg")
    FileUtils.copyFile(logo, tmpFile)
    tmpFile
  }
  def largeTestFile: File = {
    tmp.mkdir()
    val chars = ('a' to 'z') ++ ('A' to 'Z') ++ ('1' to '9')
    val rand = (1 to 20).map(x => chars(Random.nextInt(chars.length))).mkString
    val tmpFile = new File("tmp", rand + ".jpg")
    FileUtils.copyFile(LargeLogo, tmpFile)
    tmpFile
  }
} 
开发者ID:digiPlant,项目名称:play-imagemagick,代码行数:54,代码来源:Spec.scala

示例12: ScalingTestResultFiles

//设置package包名称以及导入依赖的类
package mesosphere.mesos.scale

import java.io.File

import org.apache.commons.io.FileUtils
import play.api.libs.json._

object ScalingTestResultFiles {
  val resultDir: File = new File("./target")
  def jsonFile(name: String): File = new File(resultDir, s"scaleTest-$name.json")

  def writeJson[T](name: String, json: T)(implicit writes: Writes[T]): Unit = {
    FileUtils.write(jsonFile(name), Json.prettyPrint(Json.toJson(json)))
  }

  def readJson[T](name: String)(implicit reads: Reads[T]): T = {
    val fileString = FileUtils.readFileToString(jsonFile(name))
    val fileJson = Json.parse(fileString)
    Json.fromJson(fileJson).get
  }

  val relativeTimestampMs: String = "relativeTimestampMs"

  def addTimestamp(startTime: Long)(value: JsValue): JsObject = {
    value.transform(__.json.update((__ \ relativeTimestampMs).json.put(JsNumber(System.currentTimeMillis() - startTime)))).get
  }
} 
开发者ID:xiaozai512,项目名称:marathon,代码行数:28,代码来源:ScalingTestResultFiles.scala

示例13: GenerateDataSet

//设置package包名称以及导入依赖的类
import java.io._

import org.apache.commons.io.FileUtils

import scala.util.Random

class GenerateDataSet {
var z:Int=5

  def generateRandomFileTest(sizeOfDataSet:Int): Unit ={
    FileUtils.deleteQuietly(new File("test.txt"))
    val writer = new PrintWriter(new File("test.txt" ))

    val rg=new Random()
    writer.write(((rg.nextGaussian() * z )+20).toString + "\t" + ((rg.nextGaussian() * z )+20).toString)
    for (x <- 20 to 120 by 30) {
      for (y <- 20 to 120 by 30) {
        for(i<-1 to ((sizeOfDataSet-1)/16).toInt) {
          writer.write(("\n"+((rg.nextGaussian() * z )+x).toString + "\t" + ((rg.nextGaussian() * z )+y).toString))
        }
      }
    }

    writer.close()
  }
} 
开发者ID:AndyFou,项目名称:kmeans_contributions,代码行数:27,代码来源:GenerateDataSet.scala

示例14: ConfigModelsSpec

//设置package包名称以及导入依赖的类
import java.nio.file.{Path, Paths}

import org.apache.commons.io.FileUtils

import org.specs2.mutable.Specification
import spray.json._
import com.pacbio.secondary.smrtlink.models.ConfigModels._
import com.pacbio.secondary.smrtlink.models.ConfigModelsJsonProtocol


class ConfigModelsSpec extends Specification{

  import ConfigModelsJsonProtocol._

  sequential

  val RESOURCE_DIR = "smrtlink-system-configs"

  def getTestResource(name: String): Path =
    Paths.get(getClass.getResource(s"$RESOURCE_DIR/$name").toURI)

  "Sanity serialization of SL System config 2.0" should {
    "Load test file successfully" in {
      val name = "smrtlink-system-config.json"
      val p = getTestResource(name)
      val sx = FileUtils.readFileToString(p.toFile, "UTF-8")
      val jx = sx.parseJson
      val config = jx.convertTo[RootSmrtflowConfig]
      config.comment must beSome
      config.smrtflow.server.port === 8077
    }

    "Load credentials file successfully" in {
      val name = "wso2-credentials.json"
      val p = getTestResource(name)
      val sx = FileUtils.readFileToString(p.toFile, "UTF-8")
      val jx = sx.parseJson
      val creds = jx.convertTo[Wso2Credentials]
      creds.wso2User === "jsnow"
      creds.wso2Password === "r+l=j"
    }
  }
} 
开发者ID:PacificBiosciences,项目名称:smrtflow,代码行数:44,代码来源:ConfigModelsSpec.scala

示例15: SimpleDataTransferOptions

//设置package包名称以及导入依赖的类
package com.pacbio.secondary.analysis.jobtypes

import java.nio.file.{Files, Paths}

import com.pacbio.secondary.analysis.jobs._
import com.pacbio.secondary.analysis.jobs.JobModels._
import com.pacbio.secondary.analysis.jobs.JobModels.JobConstants.GENERAL_PROJECT_ID
import com.pacbio.secondary.analysis.tools.timeUtils
import org.apache.commons.io.FileUtils
import org.joda.time.{DateTime => JodaDateTime}

// DataTransfer
case class SimpleDataTransferOptions(src: String,
                                     dest: String,
                                     override val projectId: Int = GENERAL_PROJECT_ID) extends BaseJobOptions {
  def toJob = new SimpleDataTransferJob(this)
}



class SimpleDataTransferJob(opts: SimpleDataTransferOptions)
  extends BaseCoreJob(opts: SimpleDataTransferOptions)
  with timeUtils{

  type Out = ResultSuccess
  val jobTypeId = JobTypeId("simple_data_transfer")

  def run(job: JobResourceBase, resultsWriter: JobResultWriter): Either[ResultFailed, ResultSuccess] = {
    val startedAt = JodaDateTime.now()

    val srcP = Paths.get(opts.src)
    val destP = Paths.get(opts.dest)

    if (Files.isDirectory(srcP)) {
      logger.info(s"copying directory $srcP to $destP")
      FileUtils.copyDirectory(srcP.toFile, destP.toFile)
    } else {
      logger.info(s"copying file from $srcP to $destP")
      FileUtils.copyFile(srcP.toFile, destP.toFile)
    }
    //
    val msg = s"completed transferring files from: ${srcP.toString} to ${destP.toString}"
    logger.info(msg)

    val runTimeSec = computeTimeDeltaFromNow(startedAt)
    Right(ResultSuccess(job.jobId, jobTypeId.toString, "Completed running", runTimeSec, AnalysisJobStates.SUCCESSFUL, host))
  }


} 
开发者ID:PacificBiosciences,项目名称:smrtflow,代码行数:51,代码来源:SimpleDataTransferJob.scala


注:本文中的org.apache.commons.io.FileUtils类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。