Spark Pregel无法与Java配合使用。

3

我正在使用Java API与GraphX和Pregel进行工作。我尝试实现一个最大值算法(给定一个加权图,输出是最大权重)。但我的实现不起作用:

public class Main {

public static void main(String[] args){
    SparkConf conf = new SparkConf().setAppName("MaxValue").setMaster("spark://home:7077");

    JavaSparkContext sc = new JavaSparkContext(conf);

    JavaRDD<String> text_file = sc.textFile(args[0]);

    JavaRDD<String[]> text_file_arr = text_file.map(l -> l.split(" "));

    //cache
    text_file_arr.cache();

    //create the vertex RDD
    RDD<Tuple2<Object, Integer>> verteces = text_file_arr.map(
            t-> new Tuple2<>((Object) Long.parseLong(t[0]), Integer.parseInt(t[t.length-1]))
    ).rdd();

    //create edge RDD
    RDD<Edge<Boolean>> edges = text_file_arr
            .flatMap( l -> {
                List<Edge<Boolean>> edgeList = new ArrayList<>();
                long src = Long.parseLong(l[0]);
                for (int i = 1;i<l.length-1;++i){
                    edgeList.add(new Edge(src,Long.parseLong(l[i]),true));
                }
                return edgeList.iterator();
            })
            .rdd();
    //create the graph
    Graph<Integer,Boolean> graph = Graph.apply(
            verteces,
            edges,
            Integer.MIN_VALUE,
            StorageLevel.MEMORY_AND_DISK(),
            StorageLevel.MEMORY_AND_DISK(),
            ClassTag$.MODULE$.apply(Integer.class),
            ClassTag$.MODULE$.apply(Boolean.class)
    );

    graph.edges().toJavaRDD().collect().forEach(System.out::print);
    graph.vertices().toJavaRDD().collect().forEach(System.out::print);

    GraphOps<Integer,Boolean> graph_ops = new GraphOps<>(
            graph,
            ClassTag$.MODULE$.apply(Integer.class),
            ClassTag$.MODULE$.apply(Boolean.class)
    );
    //run pregel
    Graph<Integer,Boolean> graph_pregel = graph_ops.pregel(
            Integer.MIN_VALUE,
            3,
            EdgeDirection.Either(),
            new VProg(),
            new SendMsg(),
            new Merge(),
            ClassTag$.MODULE$.apply(Integer.class)
    );

    graph_pregel.vertices().toJavaRDD().saveAsTextFile("out");



    }
}

这些是VProg、SendMsg和Merge类。

class SendMsg extends AbstractFunction1<EdgeTriplet<Integer,Boolean>, Iterator<Tuple2<Object, Integer>>> implements Serializable {

    @Override
    public Iterator<Tuple2<Object, Integer>> apply(EdgeTriplet<Integer, Boolean> et) {
        System.out.println(et.srcId()+" ---> "+et.dstId()+" with: "+et.srcAttr()+" ---> "+et.dstId());

        if (et.srcAttr() > et.dstAttr()) {
            return JavaConverters.asScalaIteratorConverter(Arrays.asList(et.toTuple()._1()).iterator()).asScala();
        }else{
            return JavaConverters.asScalaIteratorConverter(new ArrayList<Tuple2<Object, Integer>>().iterator()).asScala();
        }
    }
}

class VProg extends AbstractFunction3<Object, Integer, Integer, Integer> implements Serializable{
    @Override
    public Integer apply(Object l, Integer treeNodeThis, Integer treeNodeIn) {
        if (treeNodeThis > treeNodeIn) {
            System.out.println(l + " : " + treeNodeThis);
            return treeNodeThis;
        } else {
            System.out.println(l + " : " + treeNodeIn);
            return treeNodeIn;
        }
    }
}

class Merge extends AbstractFunction2<Integer, Integer, Integer> implements Serializable{
    @Override
    public Integer apply(Integer n1, Integer n2) {
        return (n1>n2)? n1:n2;
    }
}

问题在于,当节点运行VProg后,会执行SendMsg但是数值没有更新。这意味着,VProg返回新值,但图仍然是输入的图形。我也尝试了其他算法,但出现了相同的问题。也许我编写的类VProg,SendMsg或Merge有误?
该图与7个节点连接,并且每个节点的值都为2^nodenumber。
我也尝试使用Pregel类,但还是有同样的问题... 我正在使用Spark 2.0.0和Java 8。
1个回答

1
经过多次尝试,我认为Spark-Pregel Java API存在bug。我用Scala实现了相同的算法,它可以正常工作。
object Main {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("ScalaMaxValue").setMaster("spark://home:7077")
    val sc = new SparkContext(conf)

    val text_file_arr: RDD[Array[String]] =  sc.textFile(args(0)).map(l => l.split(" "))

    val vertices: RDD[(VertexId, Int)] = text_file_arr.map(t => (t(0).toLong, t(t.length - 1).toInt))

    val edges: RDD[Edge[Boolean]] = text_file_arr.flatMap(l => {
      val edgeList = new ListBuffer[Edge[Boolean]] //: List[Edge[Boolean]] = List()
      val i = 0;
      val src = l(0).toLong
      for (i <- 0 to (l.length - 1)) {
        val edge = Edge(src, l(i).toLong, true)
        edgeList += edge
      }
      edgeList.toList
    });

    val graph = Graph(vertices,edges,Int.MinValue)

    val graph_pregel = Pregel(graph,Int.MinValue,Int.MaxValue)(vProg,sendMsg,merge)

    //graph_pregel.vertices.saveAsTextFile("out")

    println(graph_pregel.vertices.collect()(0))
  }

  def vProg(id:VertexId, act: Int, other: Int): Int = {
    if (other<act){
      act
    }else{
      other
    }
  }

  def sendMsg(et : EdgeTriplet[Int,Boolean]) : Iterator[(VertexId, Int)] = {
    if(et.srcAttr > et.dstAttr){
      Iterator((et.dstId,et.srcAttr))
    }else{
      Iterator.empty
    }
  }

  def merge(n1:Int, n2:Int): Int = {
    if (n1<n2) n2 else n1
  }
}

输入格式为:

#nodeID #neighborID_1 ...  #neighborID_N #value
. . .

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接