问题描述
16/03/0317:36:03WARNTaskSetManager:Losttask0.0instage0.0(TID0,localhost):java.net.NoRouteToHostException:Noroutetohostjava.net.PlainSocketImpl.socketConnect(NativeMethod)java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:339)java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:200)java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:182)java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)java.net.Socket.connect(Socket.java:579)sun.net.NetworkClient.doConnect(NetworkClient.java:175)sun.net.www.http.HttpClient.openServer(HttpClient.java:432)sun.net.www.http.HttpClient.openServer(HttpClient.java:527)sun.net.www.http.HttpClient.<init>(HttpClient.java:211)sun.net.www.http.HttpClient.New(HttpClient.java:308)sun.net.www.http.HttpClient.New(HttpClient.java:326)sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:997)sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:933)sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:851)org.apache.spark.util.Utils$.fetchFile(Utils.scala:375)org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$6.apply(Executor.scala:325)org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$6.apply(Executor.scala:323)scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:772)scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:98)scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:98)scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:226)scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:39)scala.collection.mutable.HashMap.foreach(HashMap.scala:98)scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:771)org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:323)org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:158)java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)java.lang.Thread.run(Thread.java:745)16/03/0317:36:03INFOTaskSetManager:Startingtask0.1instage0.0(TID1,localhost,PROCESS_LOCAL,1240bytes)16/03/0317:36:06INFOTaskSetManager:Losttask0.1instage0.0(TID1)onexecutorlocalhost:java.net.NoRouteToHostException(Noroutetohost)[duplicate1]16/03/0317:36:06INFOTaskSetManager:Startingtask0.2instage0.0(TID2,localhost,PROCESS_LOCAL,1240bytes)16/03/0317:36:09INFOTaskSetManager:Losttask0.2instage0.0(TID2)onexecutorlocalhost:java.net.NoRouteToHostException(Noroutetohost)[duplicate2]16/03/0317:36:09INFOTaskSetManager:Startingtask0.3instage0.0(TID3,localhost,PROCESS_LOCAL,1240bytes)16/03/0317:36:12INFOTaskSetManager:Losttask0.3instage0.0(TID3)onexecutorlocalhost:java.net.NoRouteToHostException(Noroutetohost)[duplicate3]16/03/0317:36:12ERRORTaskSetManager:Task0instage0.0failed4times;abortingjob16/03/0317:36:12INFOTaskSchedulerImpl:RemovedTaskSet0.0,whosetaskshaveallcompleted,frompool16/03/0317:36:12INFOTaskSchedulerImpl:Cancellingstage016/03/0317:36:12INFODAGScheduler:Failedtorunfirstattest.scala:15Exceptioninthread"main"org.apache.spark.SparkException:Jobabortedduetostagefailure:Task0instage0.0failed4times,mostrecentfailure:Losttask0.3instage0.0(TID3,localhost):java.net.NoRouteToHostException:Noroutetohostjava.net.PlainSocketImpl.socketConnect(NativeMethod)java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:339)java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:200)java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:182)java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)java.net.Socket.connect(Socket.java:579)sun.net.NetworkClient.doConnect(NetworkClient.java:175)sun.net.www.http.HttpClient.openServer(HttpClient.java:432)sun.net.www.http.HttpClient.openServer(HttpClient.java:527)sun.net.www.http.HttpClient.<init>(HttpClient.java:211)sun.net.www.http.HttpClient.New(HttpClient.java:308)sun.net.www.http.HttpClient.New(HttpClient.java:326)sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:997)sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:933)sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:851)org.apache.spark.util.Utils$.fetchFile(Utils.scala:375)org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$6.apply(Executor.scala:325)org.apache.spark.executor.Executor$$anonfun$org$apache$spark$executor$Executor$$updateDependencies$6.apply(Executor.scala:323)scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:772)scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:98)scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:98)scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:226)scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:39)scala.collection.mutable.HashMap.foreach(HashMap.scala:98)scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:771)org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$updateDependencies(Executor.scala:323)org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:158)java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)java.lang.Thread.run(Thread.java:745)Driverstacktrace:atorg.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1185)atorg.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1174)atorg.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1173)atscala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)atscala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)atorg.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1173)atorg.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)atorg.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)atscala.Option.foreach(Option.scala:236)atorg.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:688)atorg.apache.spark.scheduler.DAGSchedulerEventProcessActor$$anonfun$receive$2.applyOrElse(DAGScheduler.scala:1391)atakka.actor.ActorCell.receiveMessage(ActorCell.scala:498)atakka.actor.ActorCell.invoke(ActorCell.scala:456)atakka.dispatch.Mailbox.processMailbox(Mailbox.scala:237)atakka.dispatch.Mailbox.run(Mailbox.scala:219)atakka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386)atscala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)atscala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)atscala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)atscala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)