Spark类转换异常:无法将FiniteDuration实例分配给RpcTimeout.duration字段

4

这是什么?类路径上的错误库?应该尝试什么?

05:28:20.142 [main] ERROR org.apache.spark.deploy.yarn.ApplicationMaster - Uncaught exception: 
org.apache.spark.SparkException: Exception thrown in awaitResult
    at org.apache.spark.rpc.RpcTimeout$$anonfun$1.applyOrElse(RpcTimeout.scala:77) ~[spark-core_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.rpc.RpcTimeout$$anonfun$1.applyOrElse(RpcTimeout.scala:75) ~[spark-core_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:36) ~[bmss-producer-1.0.8-SNAPSHOT-dist.jar:1.0.8-SNAPSHOT]
    at org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:59) ~[spark-core_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:59) ~[spark-core_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at scala.PartialFunction$OrElse.apply(PartialFunction.scala:167) ~[bmss-producer-1.0.8-SNAPSHOT-dist.jar:1.0.8-SNAPSHOT]
    at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:83) ~[spark-core_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:100) ~[spark-core_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:108) ~[spark-core_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.deploy.yarn.ApplicationMaster.runAMEndpoint(ApplicationMaster.scala:384) ~[spark-yarn_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.deploy.yarn.ApplicationMaster.waitForSparkDriver(ApplicationMaster.scala:584) ~[spark-yarn_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.deploy.yarn.ApplicationMaster.runExecutorLauncher(ApplicationMaster.scala:433) [spark-yarn_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.deploy.yarn.ApplicationMaster.run(ApplicationMaster.scala:256) [spark-yarn_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.deploy.yarn.ApplicationMaster$$anonfun$main$1.apply$mcV$sp(ApplicationMaster.scala:775) [spark-yarn_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:67) [spark-core_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run(SparkHadoopUtil.scala:66) [spark-core_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_141]
    at javax.security.auth.Subject.doAs(Subject.java:422) [?:1.8.0_141]
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920) [avro-tools-1.7.6-cdh5.10.1.jar:1.7.6-cdh5.10.1]
    at org.apache.spark.deploy.SparkHadoopUtil.runAsSparkUser(SparkHadoopUtil.scala:66) [spark-core_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.deploy.yarn.ApplicationMaster$.main(ApplicationMaster.scala:773) [spark-yarn_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.deploy.yarn.ExecutorLauncher$.main(ApplicationMaster.scala:796) [spark-yarn_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.deploy.yarn.ExecutorLauncher.main(ApplicationMaster.scala) [spark-yarn_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
Caused by: java.lang.RuntimeException: java.lang.ClassCastException: cannot assign instance of scala.concurrent.duration.FiniteDuration to field org.apache.spark.rpc.RpcTimeout.duration of type scala.concurrent.duration.FiniteDuration in instance of org.apache.spark.rpc.RpcTimeout
    at java.io.ObjectStreamClass$FieldReflector.setObjFieldValues(ObjectStreamClass.java:2083)
    at java.io.ObjectStreamClass.setObjFieldValues(ObjectStreamClass.java:1261)
    at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1996)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
    at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
    at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
    at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75)
    at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:108)
    at org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$deserialize$1$$anonfun$apply$1.apply(NettyRpcEnv.scala:259)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
    at org.apache.spark.rpc.netty.NettyRpcEnv.deserialize(NettyRpcEnv.scala:308)
    at org.apache.spark.rpc.netty.NettyRpcEnv$$anonfun$deserialize$1.apply(NettyRpcEnv.scala:258)
    at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
    at org.apache.spark.rpc.netty.NettyRpcEnv.deserialize(NettyRpcEnv.scala:257)
    at org.apache.spark.rpc.netty.NettyRpcHandler.internalReceive(NettyRpcEnv.scala:577)
    at org.apache.spark.rpc.netty.NettyRpcHandler.receive(NettyRpcEnv.scala:562)
    at org.apache.spark.network.server.TransportRequestHandler.processRpcRequest(TransportRequestHandler.java:159)
    at org.apache.spark.network.server.TransportRequestHandler.handle(TransportRequestHandler.java:107)
    at org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:119)
    at org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:51)
    at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
    at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346)
    at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:266)
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
    at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346)
    at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102)
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
    at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346)
    at org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:85)
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
    at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346)
    at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1294)
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367)
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353)
    at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:911)
    at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
    at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:652)
    at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:575)
    at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:489)
    at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:451)
    at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:140)
    at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144)
    at java.lang.Thread.run(Thread.java:745)

    at org.apache.spark.network.client.TransportResponseHandler.handle(TransportResponseHandler.java:189) ~[spark-network-common_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:121) ~[spark-network-common_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:51) ~[spark-network-common_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:266) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:85) ~[spark-network-common_2.11-2.1.0.cloudera1.jar:2.1.0.cloudera1]
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:346) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1294) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:367) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:353) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:911) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:652) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:575) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:489) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:451) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:140) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144) ~[netty-all-4.0.42.Final.jar:4.0.42.Final]
    at java.lang.Thread.run(Thread.java:748) ~[?:1.8.0_141]
2个回答

1
Scala类被打包到Spark应用程序分发中,这会导致类路径中出现错误版本的类。解决方法是将所有冲突依赖项的作用域更改为“provided”。

你找到任何解决方案了吗? - Kal-ko

0
你是否使用Shadow Jar来包含一些依赖于scala-library的库?
你所要做的就是排除scala-library。
在我的情况下,我使用cosine-lsh-join-spark_2.11,它依赖于scala-library。如果我没有排除scala-library,我会遇到与你相同的异常。
compile('com.soundcloud:cosine-lsh-join-spark_2.11:1.0.5') { exclude module: 'scala-library' }

网页内容由stack overflow 提供, 点击上面的
可以查看英文原文,
原文链接