客户端远程访问高可用(HA)hdfs
当hadoop namenode是HA集群时,hdfs可能动态切换hdfs主节点,客户端远程访问hdfs有两种实现方法:
方法1:配置多个hdfs地址,每次操作先判断可用的hdfs地址。
形如:hdfs://192.168.2.102:9000,hdfs://192.168.2.101:9000,以逗号(,)隔开
private void hdfsInit(String hdfs) { HdfsPath.setHdfs(hdfs); String[] pathes = HdfsPath.getHdfs().split(","); for(String path: pathes) { Configuration conf = new Configuration(); conf.set("fs.defaultFS", path); conf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()); try (FileSystem fs = FileSystem.newInstance(conf)) { fs.listStatus(new Path("/")); HdfsPath.setHdfs(path); return ; } catch (IOException e) { logger.warn(path+"拒绝连接;"+e); } } logger.error("hdfs配置错误!"+hdfs); }
方法2:将所有关于namenode的参数写入Configuration对象中。 代码:
import java.io.BufferedInputStream; import java.io.FileInputStream; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; public class Demo { private static void uploadToHdfs() throws Exception { //本地文件地址 String localSrc = "d:/PowerDesigner15_Evaluation.rar"; //存放在hdfs的目的地址 String dest = "/user/PowerDesigner15_Evaluation.rar"; InputStream in = new BufferedInputStream(new FileInputStream(localSrc)); //得到配置对象 Configuration conf = new Configuration(); conf.set("fs.defaultFS", "hdfs://ns1"); conf.set("dfs.nameservices", "ns1"); conf.set("dfs.ha.namenodes.ns1", "nn1,nn2"); conf.set("dfs.namenode.rpc-address.ns1.nn1", "192.168.2.101:9000"); conf.set("dfs.namenode.rpc-address.ns1.nn2", "192.168.2.102:9000"); conf.set("dfs.client.failover.proxy.provider.ns1", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"); //文件系统 FileSystem fs = FileSystem.get(new URI("hdfs://ns1"), conf, "hadoop"); //输出流 OutputStream out = fs.create(new Path(dest)); //连接两个流,形成通道,使输入流向输出流传输数据 IOUtils.copyBytes(in, out, 4096, true); } public static void main(String[] args) throws Exception{ // TODO Auto-generated method stub uploadToHdfs(); } }