hdfs遍历文件方法
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
class HelloWord {
public static void main(String[] args){
Hdfs();
}
//方法1:
public static void Hdfs(){
try{
Configuration conf = new Configuration();
conf.set("fs.defaultFS","hdfs://10.8.6.126:8020");
FileSystem fs = null;
//fs = FileSystem.get(new URI("hdfs://10.8.6.126:8020"),conf); //这两种方式都可以配置hdfs ip
fs = FileSystem.get(conf);
RemoteIterator<LocatedFileStatus> lt = fs.listFiles(new Path("hdfs://10.8.6.126:8020/ada/lyy/App"), true);
while (lt.hasNext()) {
LocatedFileStatus file = lt.next();
if(file.isFile())
{
Path path = file.getPath();
System.out.println("文件:["+path.toString()+"]");
System.out.println("文件名:["+path.getName.toString()+"]"); //只是文件名,没有路径信息
}else{
Path path = file.getPath();
System.out.println("目录:["+path.toString()+"]");
}
}
}
catch( IOException e){
System.out.println(e.getStackTrace());
}
}
}
//方法2:
public static void Hdfs(){
try{
Configuration conf = new Configuration();
conf.set("fs.defaultFS","hdfs://10.8.6.126:8020");
FileSystem fs = null;
//fs = FileSystem.get(new URI("hdfs://10.8.6.126:8020"),conf); //这两种方式都可以配置hdfs ip
fs = FileSystem.get(conf);
Path path = new Path("/shell");
//通过fs的listStatus方法获取一个指定path的所有文件信息(status),因此我们需要传入一个hdfs的路径,返回的是一个filStatus数组
FileStatus[] fileStatuses = fs.listStatus(path);
for (FileStatus fileStatus : fileStatuses) {
//判断当前迭代对象是否是目录
boolean isDir = fileStatus.isDirectory();
//获取当前文件的绝对路径
String fullPath = fileStatus.getPath().toString();
System.out.println("isDir:" + isDir + ",Path:" + fullPath);
}
}
同时读取本地和hdfs目录:
public class TestHdfs{
public static void main(String[] args) {
//读取配置文件
Configuration conf=new Configuration();
//String path = "hdfs://10.8.6.126:8020/ada/lyy/data/NaiveBayesModel.model";
String path = "file:///home/liyanyan/cluster/NaiveBayesModel.model";
String classfile = "";
//读取配置文件
FileStatus[] listFile = null;
FileSystem fs = null;
try {
if(path.startsWith("hdfs:")){
conf.set("fs.defaultFS",path.substring(0,path.indexOf('/', path.indexOf(':') + 3)));
fs = FileSystem.get(conf);
}else if(path.startsWith("file:")){
fs=FileSystem.getLocal(conf);
//获取文件目录
}
listFile =fs.listStatus(new Path(path+"/metadata"), new RegxRejectPathFilter("^[._]+\\w+.*$"));
if(listFile.length != 1){
}else{
classfile = listFile[0].getPath().toString();
}
}catch(IOException e){
e.printStackTrace();
}
System.out.println("classfile = "+classfile.replaceFirst("file:","file://"));
}
private static class RegxRejectPathFilter implements PathFilter {
private final String regex;
public RegxRejectPathFilter(String regex) {
this.regex=regex;
}
@Override
public boolean accept(Path path) {
// TODO 自动生成的方法存根
boolean flag=path.getName().toString().matches(regex);
return !flag;
}
}
}