
package com.test
import org.apache.spark.graphx._
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, SparkSession}
import scala.collection.mutable.ListBuffer
object GraphxDHDemo {
def main(args: Array[String]): Unit = {
val spark = SparkSession.builder().master("local[*]").appName("yy").getOrCreate()
val sc = spark.sparkContext
sc.setLogLevel("ERROR")
val input_1 = ListBuffer(
1,2,3,4,5,6,7,8,9,10
)
val input_2 = ListBuffer(
List(3,1)
,List(1,4)
,List(2,5)
,List(5,6)
,List(7,3)
,List(9,7)
,List(10,3)
,List(8,7)
)
// 构建顶点的rdd
val input1 = input_1.map(x => (x.toLong,(x.toString,1)))
// 构建边的rdd
val input2 = input_2.distinct.map(x => Edge(x(0).toLong, x(1).toLong, 1))
//创建顶点RDD
val users: RDD[(VertexId, (String, PartitionID))] = sc.makeRDD(input1)
//创建各顶点间关系的RDD
val 欢迎分享,转载请注明来源:内存溢出
微信扫一扫
支付宝扫一扫
评论列表(0条)