本文整理汇总了Scala中org.apache.spark.graphx.GraphLoader类的典型用法代码示例。如果您正苦于以下问题:Scala GraphLoader类的具体用法?Scala GraphLoader怎么用?Scala GraphLoader使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了GraphLoader类的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: RDFGraphPIClustering
//设置package包名称以及导入依赖的类
package net.sansa_stack.examples.spark.ml.clustering
import scala.collection.mutable
import org.apache.spark.sql.SparkSession
import org.apache.log4j.{ Level, Logger }
import org.apache.spark.graphx.GraphLoader
import net.sansa_stack.ml.spark.clustering.{ RDFGraphPICClustering => RDFGraphPICClusteringAlg }
object RDFGraphPIClustering {
def main(args: Array[String]) = {
if (args.length < 3) {
System.err.println(
"Usage: RDFGraphPIClustering <input> <k> <numIterations>")
System.exit(1)
}
val input = args(0) //"src/main/resources/BorderFlow_Sample1.txt"
val k = args(1).toInt
val numIterations = args(2).toInt
val optionsList = args.drop(3).map { arg =>
arg.dropWhile(_ == '-').split('=') match {
case Array(opt, v) => (opt -> v)
case _ => throw new IllegalArgumentException("Invalid argument: " + arg)
}
}
val options = mutable.Map(optionsList: _*)
options.foreach {
case (opt, _) => throw new IllegalArgumentException("Invalid option: " + opt)
}
println("============================================")
println("| Power Iteration Clustering example |")
println("============================================")
val sparkSession = SparkSession.builder
.master("local[*]")
.appName(" Power Iteration Clustering example (" + input + ")")
.getOrCreate()
Logger.getRootLogger.setLevel(Level.ERROR)
// Load the graph
val graph = GraphLoader.edgeListFile(sparkSession.sparkContext, input)
val model = RDFGraphPICClusteringAlg(sparkSession, graph, k, numIterations).run()
val clusters = model.assignments.collect().groupBy(_.cluster).mapValues(_.map(_.id))
val assignments = clusters.toList.sortBy { case (k, v) => v.length }
val assignmentsStr = assignments
.map {
case (k, v) =>
s"$k -> ${v.sorted.mkString("[", ",", "]")}"
}.mkString(",")
val sizesStr = assignments.map {
_._2.size
}.sorted.mkString("(", ",", ")")
println(s"Cluster assignments: $assignmentsStr\ncluster sizes: $sizesStr")
sparkSession.stop
}
}
示例2: MeasureTest
//设置package包名称以及导入依赖的类
package ml.sparkling.graph.operators
import org.apache.log4j.Logger
import org.apache.spark.SparkContext
import org.apache.spark.graphx.{Graph, GraphLoader}
import org.scalatest._
abstract class MeasureTest(implicit sc:SparkContext) extends FlatSpec with BeforeAndAfterAll with GivenWhenThen with Matchers with BeforeAndAfterEach{
def time[T](str: String)(thunk: => T): (T,Long) = {
logger.info(s"$str...")
val t1 = System.currentTimeMillis
val x = thunk
val t2 = System.currentTimeMillis
val diff=t2 - t1
logger.info(s"$diff ms")
(x,diff)
}
val logger=Logger.getLogger(this.getClass)
def loadGraph(file:String)={
val out: Graph[Int, Int] =GraphLoader.edgeListFile(sc,file.toString)
out.vertices.setName(s"Graph vertices ${file}")
out.edges.setName(s"Graph edges ${file}")
out.triplets.setName(s"Graph triplets ${file}")
out
out
}
override def beforeEach(testData: TestData) = {
logger.info(s"${Console.GREEN} Running test ${testData.name} ${Console.RESET} ")
}
}
示例3: IterativeComputation
//设置package包名称以及导入依赖的类
package ml.sparkling.graph.api.operators
import org.apache.spark.graphx.GraphLoader
import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.{BeforeAndAfter, FlatSpec}
class IterativeComputation$Test extends FlatSpec with BeforeAndAfter{
val master = "local[*]"
def appName:String="InterativeComputationTest"
implicit val sc:SparkContext= {
val conf = new SparkConf()
.setMaster(master)
.setAppName(appName)
new SparkContext(conf)
}
after {
if(!sc.isStopped){
sc.stop()
}
}
def loadGraph(file:String)={
GraphLoader.edgeListFile(sc,file.toString)
}
"Correct number of vertices " should "be returned" in{
//Given("Graph")
val graph=loadGraph(getClass.getResource("/graph").toString)
//When("Taking size")
val bucketSize: Long = IterativeComputation.wholeGraphBucket(graph)
//Then("")
assert(graph.numVertices==bucketSize)
}
}