HDFS操作

1、创建HDFS目录

 1 import java.io.IOException;
 2 
 3 import org.apache.hadoop.conf.Configuration;
 4 import org.apache.hadoop.fs.FileSystem;
 5 import org.apache.hadoop.fs.Path;
 6 
 7 //创建HDFS目录
 8 public class MakeDir {
 9     public static void main(String[] args) throws IOException{
10         Configuration configuration=new Configuration();
11         FileSystem fs=FileSystem.get(configuration);
12         Path path=new Path("/user/hadoop/data/");
13         fs.create(path);
14         fs.close();
15     }
16 }
View Code

2、删除HDFS目录

 1 import java.io.IOException;
 2 
 3 import org.apache.hadoop.conf.Configuration;
 4 import org.apache.hadoop.fs.FileSystem;
 5 import org.apache.hadoop.fs.Path;
 6 
 7 public class DeleteDir {
 8     public static void main(String[] args) throws IOException {
 9         Configuration configuration=new Configuration();
10         FileSystem fs=FileSystem.get(configuration);
11         Path path=new Path("/usr/hadoop/data");
12         fs.delete(path);
13         fs.close();
14     }
15 }
View Code

3、HDFS写文件

 1 import java.io.IOException;
 2 
 3 import org.apache.hadoop.conf.Configuration;
 4 import org.apache.hadoop.fs.FSDataOutputStream;
 5 import org.apache.hadoop.fs.FileSystem;
 6 import org.apache.hadoop.fs.Path;
 7 
 8 public class WriteFile {
 9     public static void main(String[] args) throws IOException {
10         Configuration configuration = new Configuration();
11         FileSystem fs = FileSystem.get(configuration);
12         Path path = new Path("/user/hadoop/data/write.txt");
13         FSDataOutputStream outputStream = fs.create(path);
14         outputStream.writeUTF("hello");
15         fs.close();
16     }
17 }
View Code

4、HDFS读文件

 1 import java.io.IOException;
 2 
 3 import org.apache.hadoop.conf.Configuration;
 4 import org.apache.hadoop.fs.FSDataInputStream;
 5 import org.apache.hadoop.fs.FileStatus;
 6 import org.apache.hadoop.fs.FileSystem;
 7 import org.apache.hadoop.fs.Path;
 8 
 9 
10 public class ReadFile {
11     public static void main(String[] args) throws IOException {
12         Configuration configuration=new Configuration();
13         FileSystem fs=FileSystem.get(configuration);
14         Path path=new Path("/user/hadoop/data/write.txt");
15         if (fs.exists(path)) {
16             FSDataInputStream is=fs.open(path);
17             FileStatus fileStatus=fs.getFileStatus(path);
18             byte[] buffer=new byte[Integer.parseInt(String.valueOf(fileStatus.getLen()))];
19             is.readFully(0,buffer);
20             is.close();
21             fs.close();
22             System.out.println(buffer.toString());
23         }
24     }
25 }
View Code

5、上传本地文件到HDFS

 1 import java.io.IOException;
 2 
 3 import org.apache.hadoop.conf.Configuration;
 4 import org.apache.hadoop.fs.FileSystem;
 5 import org.apache.hadoop.fs.Path;
 6 
 7 
 8 public class CopyFromLocalFile {
 9     public static void main(String[] args) throws IOException {
10         Configuration configuration=new Configuration();
11         FileSystem fs=FileSystem.get(configuration);
12         Path srcPath=new Path("/home/hadoop/word.txt");
13         Path dsPath=new Path("/user/hadoop/data/");
14         fs.copyFromLocalFile(srcPath, dsPath);
15         fs.close();
16     }
17 }
View Code

6、删除文件

 

 1 import java.io.IOException;
 2 
 3 import org.apache.hadoop.conf.Configuration;
 4 import org.apache.hadoop.fs.FileSystem;
 5 import org.apache.hadoop.fs.Path;
 6 
 7 public class DeleteFile {
 8     public static void main(String[] args) throws IOException {
 9         Configuration configuration=new Configuration();
10         FileSystem fs=FileSystem.get(configuration);
11         Path path=new Path("/usr/hadoop/data/word.txt");
12         fs.delete(path);
13         fs.close();
14     }
15 }
View Code

 

7、获取给定目录下的所有子目录以及子文件

 

 1 //获取给定目录下的所有子目录以及子文件 
 2 import java.io.IOException;
 3 
 4 import org.apache.hadoop.conf.Configuration;
 5 import org.apache.hadoop.fs.FileStatus;
 6 import org.apache.hadoop.fs.FileSystem;
 7 import org.apache.hadoop.fs.Path;
 8 
 9 
10 public class GetAllChildFile {
11     static Configuration configuration=new Configuration();
12     public static void main(String[] args) throws IOException {
13         FileSystem fs=FileSystem.get(configuration);
14         Path path=new Path("/user/hadoop");
15         getFile(path,fs);
16     }
17     
18     public static void  getFile(Path path,FileSystem fs) throws IOException {
19         FileStatus[] fileStatus=fs.listStatus(path);
20         for (int i = 0; i < fileStatus.length; i++) {
21             if (fileStatus[i].isDir()) {
22                 Path path2=new Path(fileStatus[i].getPath().toString());
23                 getFile(path2, fs);
24             }
25             else {
26                 System.out.println(fileStatus[i].getPath().toString());
27             }
28         }
29     }
30 }
View Code

 

8、HDFS集群上所有节点名称信息

 

 1 //HDFS集群上所有节点名称信息 
 2 import java.io.IOException;
 3 
 4 import org.apache.hadoop.conf.Configuration;
 5 import org.apache.hadoop.fs.FileSystem;
 6 import org.apache.hadoop.hdfs.DistributedFileSystem;
 7 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 8 
 9 public class DataNodeInfo {
10     public static void main(String[] args) throws IOException {
11         getHDFSNode();
12     }
13     public static void getHDFSNode() throws IOException {
14         Configuration configuration=new Configuration();
15         FileSystem fileSystem=FileSystem.get(configuration);
16         
17         DistributedFileSystem distributedFileSystem=(DistributedFileSystem)fileSystem;
18         DatanodeInfo[] datanodeInfo=distributedFileSystem.getDataNodeStats();
19         for (int i = 0; i < datanodeInfo.length; i++) {
20             System.out.println("DataNode_"+i+"_Node:"+datanodeInfo[i].getHostName());
21         }
22     }
23 }
View Code

 

 

posted @ 2013-09-10 10:51  liutoutou  阅读(221)  评论(0编辑  收藏  举报