hadoop文件读取并输出

package a;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;

 


public class a2 extends FSDataInputStream{

/**
* @param args
*/
public a2(InputStream in){
super(in);
}
public static String readline(Configuration conf,String remoteFilePath){
try (FileSystem fs = FileSystem.get(conf)) {
Path remotePath = new Path(remoteFilePath);
FSDataInputStream in = fs.open(remotePath);
BufferedReader d = new BufferedReader(new InputStreamReader(in));
String line = null;
if ((line = d.readLine()) != null) {
d.close();
in.close();
return line;
}
return null;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
return null;
}

}

public static void main(String[] args) {
// TODO Auto-generated method stub
Configuration conf=new Configuration();
conf.set("fs.default.name","hdfs://localhost:9000");
String remoteFilePath="/test";

System.out.println("连接到+remoteFilePath);
System.out.println(a2.readline(conf, remoteFilePath));
System.out.println("\n输出完毕");
}

}

posted @ 2020-10-25 22:23  溜了溜  阅读(432)  评论(0编辑  收藏  举报