• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Scala GraphLoader类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Scala中org.apache.spark.graphx.GraphLoader的典型用法代码示例。如果您正苦于以下问题:Scala GraphLoader类的具体用法?Scala GraphLoader怎么用?Scala GraphLoader使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了GraphLoader类的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。

示例1: RDFGraphPIClustering

//设置package包名称以及导入依赖的类
package net.sansa_stack.examples.spark.ml.clustering

import scala.collection.mutable
import org.apache.spark.sql.SparkSession
import org.apache.log4j.{ Level, Logger }
import org.apache.spark.graphx.GraphLoader
import net.sansa_stack.ml.spark.clustering.{ RDFGraphPICClustering => RDFGraphPICClusteringAlg }

object RDFGraphPIClustering {
  def main(args: Array[String]) = {
    if (args.length < 3) {
      System.err.println(
        "Usage: RDFGraphPIClustering <input> <k> <numIterations>")
      System.exit(1)
    }
    val input = args(0) //"src/main/resources/BorderFlow_Sample1.txt"
    val k = args(1).toInt
    val numIterations = args(2).toInt
    val optionsList = args.drop(3).map { arg =>
      arg.dropWhile(_ == '-').split('=') match {
        case Array(opt, v) => (opt -> v)
        case _             => throw new IllegalArgumentException("Invalid argument: " + arg)
      }
    }
    val options = mutable.Map(optionsList: _*)

    options.foreach {
      case (opt, _) => throw new IllegalArgumentException("Invalid option: " + opt)
    }
    println("============================================")
    println("| Power Iteration Clustering   example     |")
    println("============================================")

    val sparkSession = SparkSession.builder
      .master("local[*]")
      .appName(" Power Iteration Clustering example (" + input + ")")
      .getOrCreate()
    Logger.getRootLogger.setLevel(Level.ERROR)

    // Load the graph 
    val graph = GraphLoader.edgeListFile(sparkSession.sparkContext, input)

    val model = RDFGraphPICClusteringAlg(sparkSession, graph, k, numIterations).run()

    val clusters = model.assignments.collect().groupBy(_.cluster).mapValues(_.map(_.id))
    val assignments = clusters.toList.sortBy { case (k, v) => v.length }
    val assignmentsStr = assignments
      .map {
        case (k, v) =>
          s"$k -> ${v.sorted.mkString("[", ",", "]")}"
      }.mkString(",")
    val sizesStr = assignments.map {
      _._2.size
    }.sorted.mkString("(", ",", ")")
    println(s"Cluster assignments: $assignmentsStr\ncluster sizes: $sizesStr")

    sparkSession.stop
  }

} 
开发者ID:SANSA-Stack,项目名称:SANSA-Examples,代码行数:61,代码来源:RDFGraphPIClustering.scala


示例2: MeasureTest

//设置package包名称以及导入依赖的类
package ml.sparkling.graph.operators

import org.apache.log4j.Logger
import org.apache.spark.SparkContext
import org.apache.spark.graphx.{Graph, GraphLoader}
import org.scalatest._


abstract class MeasureTest(implicit sc:SparkContext)  extends FlatSpec with BeforeAndAfterAll with GivenWhenThen with Matchers with BeforeAndAfterEach{
  def time[T](str: String)(thunk: => T): (T,Long) = {
    logger.info(s"$str...")
    val t1 = System.currentTimeMillis
    val x = thunk
    val t2 = System.currentTimeMillis
    val diff=t2 - t1
    logger.info(s"$diff ms")
    (x,diff)
  }

  val logger=Logger.getLogger(this.getClass)

  def loadGraph(file:String)={
    val out: Graph[Int, Int] =GraphLoader.edgeListFile(sc,file.toString)
    out.vertices.setName(s"Graph vertices ${file}")
    out.edges.setName(s"Graph edges ${file}")
    out.triplets.setName(s"Graph triplets ${file}")
    out
    out
  }


  override def  beforeEach(testData: TestData) = {
    logger.info(s"${Console.GREEN} Running test ${testData.name} ${Console.RESET} ")
  }


} 
开发者ID:sparkling-graph,项目名称:sparkling-graph,代码行数:38,代码来源:MeasureTest.scala


示例3: IterativeComputation

//设置package包名称以及导入依赖的类
package ml.sparkling.graph.api.operators

import org.apache.spark.graphx.GraphLoader
import org.apache.spark.{SparkConf, SparkContext}
import org.scalatest.{BeforeAndAfter, FlatSpec}


class IterativeComputation$Test extends FlatSpec with BeforeAndAfter{

  val master = "local[*]"
  def appName:String="InterativeComputationTest"

  implicit val sc:SparkContext= {
    val conf = new SparkConf()
      .setMaster(master)
      .setAppName(appName)
    new SparkContext(conf)
  }

  after {
    if(!sc.isStopped){
      sc.stop()
    }
  }

  def loadGraph(file:String)={
    GraphLoader.edgeListFile(sc,file.toString)
  }

  "Correct number of vertices " should "be returned" in{
    //Given("Graph")
    val graph=loadGraph(getClass.getResource("/graph").toString)
    //When("Taking size")
    val bucketSize: Long = IterativeComputation.wholeGraphBucket(graph)
    //Then("")
    assert(graph.numVertices==bucketSize)
  }

} 
开发者ID:sparkling-graph,项目名称:sparkling-graph,代码行数:40,代码来源:IterativeComputation$Test.scala



注:本文中的org.apache.spark.graphx.GraphLoader类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Scala ThreadLocalRandom类代码示例发布时间:2022-05-23
下一篇:
Scala JsObject类代码示例发布时间:2022-05-23
热门推荐
热门话题
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap