HDFS java常用API

1、HDFS java常用API 及hadoop-eclipse-plugin-1.1.2.jar插件的使用

 
HDFS的java访问接口——FileSystem
    create:创建文件;
    open:读取文件;
    delete:删除文件;
 
    mkdirs:创建目录;
    delete:删除文件或者目录;
    listStatus:列出目录的内容;
    getFileStatus:显示文件系统的目录和文件的元数据信息;
  
import java.io.FileInputStream;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;


public class BasicOperation {

	private static final String PATH="hdfs://192.168.255.132:9000/";
	/**
	 * @param args
	 * @throws Throwable 
	 * @author nwpulisz 
	 */
	public static void main(String[] args) throws Throwable {
		// TODO Auto-generated method stub
		URI uri = new URI(PATH);
		FileSystem fileSystem = getFileSystem(uri);
		list(fileSystem,PATH);
		readFileFromServer(fileSystem,PATH+"hello");
		//putFiletoServer(fileSystem, PATH+"up.txt");
		//mkdirOnServer(fileSystem,PATH+"Yang" );
	    //rmdirOnServer(fileSystem, PATH+"Li");
		
	}
	
	private static FileSystem getFileSystem(URI uri) throws Throwable{	
		//获取FileSystem对象,FileSystem的获取方式:不能new,只能通过静态方法获取;
		FileSystem fileSystem = FileSystem.get(uri, new Configuration());
		return fileSystem;	
	}
	
	private static void readFileFromServer(FileSystem fileSystem,String path) throws Throwable{
		//读取服务器上的文件
		FSDataInputStream openStream = fileSystem.open(new Path(path));
		IOUtils.copyBytes(openStream, System.out, new Configuration());//把文件流复制到输出流,控制台输出
		IOUtils.closeStream(openStream);
	}
	
	private static void putFiletoServer(FileSystem fileSystem,String path) throws Throwable{
		//上传本地文件至服务器
		FSDataOutputStream openStream = fileSystem.create(new Path(path));
		FileInputStream fileInputStream = new FileInputStream("up.txt"); //把本地文件up.txt转换为输入流
		IOUtils.copyBytes(fileInputStream, openStream, new Configuration());
		IOUtils.closeStream(fileInputStream);
	}
	
	private static void mkdirOnServer(FileSystem fileSystem,String path) throws Throwable{
		//创建目录
		fileSystem.mkdirs(new Path(path));
	}
	private static void rmdirOnServer(FileSystem fileSystem,String path) throws Throwable{
		//删除目录或者文件都可以
		fileSystem.delete(new Path(path),true);
	}
	
	private static void list(FileSystem fileSystem,String path) throws Throwable{
		//列出对应路径下的目录文件
		FileStatus[] listStatus = fileSystem.listStatus(new Path(path));
		for(FileStatus fileStatus:listStatus){
			String isDir = fileStatus.isDir()?"目录":"文件";
			String name = fileStatus.getPath().getName();
			System.out.println(isDir+"\t"+name);
		}
	}
	
}

  

 

2、借助hadoop-eclipse-plugin-1.1.2.jar插件,实现对HDFS文件的操作
    基本配置如下:
        
连接到DFS如下:
          
可以直接对HDFS进行操作。
 
    
 





posted @ 2016-03-28 16:57  nwpulisz  阅读(777)  评论(0编辑  收藏  举报