问题描述
- java连接hadoop hdfs文件系统报错 10C
- 报错信息:
java.io.IOException: Failed on local exception: com.google.protobuf.InvalidProtocolBufferException: Protocol message end-group tag did not match expected tag.; Host Details : local host is: ""localhost.localdomain/127.0.0.1""; destination host is: ""172.16.6.57"":9000;
at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:763)
at org.apache.hadoop.ipc.Client.call(Client.java:1229)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:202)
at $Proxy9.create(Unknown Source)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:164)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:83)
at $Proxy9.create(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:193)
at org.apache.hadoop.hdfs.DFSOutputStream.(DFSOutputStream.java:1324)
at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:1343)
at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1255)
at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1212)
at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:276)
at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:265)
at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:82)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:886)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:781)
at com.zk.hdfs.FileCopyToHdfs.uploadToHdfs(FileCopyToHdfs.java:44)
at com.zk.hdfs.FileCopyToHdfs.main(FileCopyToHdfs.java:21)
Caused by: com.google.protobuf.InvalidProtocolBufferException: Protocol message end-group tag did not match expected tag.
at com.google.protobuf.InvalidProtocolBufferException.invalidEndTag(InvalidProtocolBufferException.java:73)
at com.google.protobuf.CodedInputStream.checkLastTagWas(CodedInputStream.java:124)
at com.google.protobuf.AbstractMessageLite$Builder.mergeFrom(AbstractMessageLite.java:213)
at com.google.protobuf.AbstractMessage$Builder.mergeFrom(AbstractMessage.java:746)
at com.google.protobuf.AbstractMessage$Builder.mergeFrom(AbstractMessage.java:238)
at com.google.protobuf.AbstractMessageLite$Builder.mergeDelimitedFrom(AbstractMessageLite.java:282)
at com.google.protobuf.AbstractMessage$Builder.mergeDelimitedFrom(AbstractMessage.java:760)
at com.google.protobuf.AbstractMessageLite$Builder.mergeDelimitedFrom(AbstractMessageLite.java:288)
at com.google.protobuf.AbstractMessage$Builder.mergeDelimitedFrom(AbstractMessage.java:752)
at org.apache.hadoop.ipc.protobuf.RpcPayloadHeaderProtos$RpcResponseHeaderProto.parseDelimitedFrom(RpcPayloadHeaderProtos.java:985)
at org.apache.hadoop.ipc.Client$Connection.receiveResponse(Client.java:938)
at org.apache.hadoop.ipc.Client$Connection.run(Client.java:836)代码是在网上找的:package com.zk.hdfs;
import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;public class FileCopyToHdfs {
public static void main(String[] args) throws Exception { try { uploadToHdfs(); //deleteFromHdfs(); //getDirectoryFromHdfs();
// appendToHdfs();
// readFromHdfs();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
finally
{
System.out.println(""SUCCESS"");
}
}
/**上传文件到HDFS上去*/public static void uploadToHdfs() throws FileNotFoundExceptionIOException { String localSrc = ""e:/test.txt""; String dst = ""hdfs://172.16.6.57:9000/user/abc/zk/test1.txt""; InputStream in = new BufferedInputStream(new FileInputStream(localSrc)); Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(dst) conf); OutputStream out = fs.create(new Path(dst) new Progressable() { public void progress() { System.out.print("".""); } }); IOUtils.copyBytes(in out 4096 true); }
}
总是报连接问题,网上搜不到资料,大牛帮下忙啊
解决方案
访问Hadoop的HDFS文件系统的Java实现
hadoop 的HDFS文件系统
Hadoop HDFS文件系统通过java FileSystem 实现上传下载等
解决方案二:
链接地址 跟你的电脑IP一样吗?
解决方案三:
local host is: ""localhost.localdomain/127.0.0.1""; destination host is: ""172.16.6.57"":9000; 这访问ip问题吧