问题描述
我现在只部署了一个节点来测试,但是发现无法读取hdfs上的文件,每次都抛出这个io异常。求大神拯救nero01为主机名,192.168.189.101为ip地址。scala>textFile.countjava.io.IOException:Failedonlocalexception:com.google.protobuf.InvalidProtocolBufferException:Messagemissingrequiredfields:callId,status;HostDetails:localhostis:"nero01/192.168.189.101";destinationhostis:"nero01":9000;atorg.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:760)atorg.apache.hadoop.ipc.Client.call(Client.java:1229)atorg.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:202)atsun.proxy.$Proxy16.getFileInfo(UnknownSource)atsun.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)atjava.lang.reflect.Method.invoke(Method.java:597)atorg.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:164)atorg.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:83)atsun.proxy.$Proxy16.getFileInfo(UnknownSource)atorg.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:628)atorg.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1532)atorg.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:803)atorg.apache.hadoop.fs.FileSystem.globStatusInternal(FileSystem.java:1635)atorg.apache.hadoop.fs.FileSystem.globStatus(FileSystem.java:1581)atorg.apache.hadoop.mapred.FileInputFormat.listStatus(FileInputFormat.java:174)atorg.apache.hadoop.mapred.FileInputFormat.getSplits(FileInputFormat.java:205)atorg.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:201)atorg.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:205)atorg.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:203)atscala.Option.getOrElse(Option.scala:120)atorg.apache.spark.rdd.RDD.partitions(RDD.scala:203)atorg.apache.spark.rdd.MappedRDD.getPartitions(MappedRDD.scala:28)atorg.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:205)atorg.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:203)atscala.Option.getOrElse(Option.scala:120)atorg.apache.spark.rdd.RDD.partitions(RDD.scala:203)atorg.apache.spark.SparkContext.runJob(SparkContext.scala:1328)atorg.apache.spark.rdd.RDD.count(RDD.scala:910)at$iwC$$iwC$$iwC$$iwC.<init>(<console>:15)at$iwC$$iwC$$iwC.<init>(<console>:20)at$iwC$$iwC.<init>(<console>:22)at$iwC.<init>(<console>:24)at<init>(<console>:26)at.<init>(<console>:30)at.<clinit>(<console>)at.<init>(<console>:7)at.<clinit>(<console>)at$print(<console>)atsun.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)atjava.lang.reflect.Method.invoke(Method.java:597)atorg.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:852)atorg.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1125)atorg.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:674)atorg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:705)atorg.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:669)atorg.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:828)atorg.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:873)atorg.apache.spark.repl.SparkILoop.command(SparkILoop.scala:785)atorg.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:628)atorg.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:636)atorg.apache.spark.repl.SparkILoop.loop(SparkILoop.scala:641)atorg.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:968)atorg.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)atorg.apache.spark.repl.SparkILoop$$anonfun$process$1.apply(SparkILoop.scala:916)atscala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)atorg.apache.spark.repl.SparkILoop.process(SparkILoop.scala:916)atorg.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1011)atorg.apache.spark.repl.Main$.main(Main.scala:31)atorg.apache.spark.repl.Main.main(Main.scala)atsun.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)atjava.lang.reflect.Method.invoke(Method.java:597)atorg.apache.spark.deploy.SparkSubmit$.launch(SparkSubmit.scala:358)atorg.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:75)atorg.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)Causedby:com.google.protobuf.InvalidProtocolBufferException:Messagemissingrequiredfields:callId,statusatcom.google.protobuf.UninitializedMessageException.asInvalidProtocolBufferException(UninitializedMessageException.java:81)atorg.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos$RpcResponseHeaderProto$Builder.buildParsed(RpcPayloadHeaderProtos.java:1094)atorg.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos$RpcResponseHeaderProto$Builder.access$1300(RpcPayloadHeaderProtos.java:1028)atorg.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos$RpcResponseHeaderProto.parseDelimitedFrom(RpcPayloadHeaderProtos.java:986)atorg.apache.hadoop.ipc.Client$Connection.receiveResponse(Client.java:938)atorg.apache.hadoop.ipc.Client$Connection.run(Client.java:836)有人遇过这类问题吗?能分析到原因解决掉的话,可以追加分
解决方案
解决方案二:
解决方案三:
这个问题你解决了吗?我也遇到一样的问题。
解决方案四:
这个问题你解决了吗?我也遇到一样的问题。
解决方案五:
这个问题你解决了吗?我也遇到一样的问题。