HDFS脚本

环境准备

vim .bash_profile

export CLASSPATH=.:/home/hadoop/work/hadoop2.6jar/*.jar
source .bash_profile

1.上传文件到hdfs

vim CopyFile.java

 1 import org.apache.hadoop.conf.Configuration;
 2 import org.apache.hadoop.fs.FileStatus;
 3 import org.apache.hadoop.fs.FileSystem;
 4 import org.apache.hadoop.fs.Path;
 5 
 6 public class CopyFile {
 7 public static void main(String[] args) throws Exception {
 8         Configuration conf=new Configuration();
 9         FileSystem hdfs=FileSystem.get(conf);
10 
11         Path src =new Path("/home/hadoop/lijieran.txt");
12         Path dst =new Path("hdfs://h201:9000/user/hadoop/");
13 
14         hdfs.copyFromLocalFile(src, dst);
15                 //hdfs.copyFromLocalFile(ture,ture,src, dst);
16         System.out.println("Upload to"+conf.get("fs.default.name"));
17 
18         FileStatus files[]=hdfs.listStatus(dst);
19 
20         for(FileStatus file:files){
21             System.out.println(file.getPath());
22         }
23     }
24 }

/usr/jdk1.7.0_25/bin/javac CopyFile.java

 /usr/jdk1.7.0_25/bin/java CopyFile

*****如果保错 :Wrong FS: hdfs://h101:9000/user/hadoop, expected: file:///***

把core-site.xml和hdfs-site.xml放到当前目录下

2、下载hdfs文件到本地

vim ToFile.java

 1 import org.apache.hadoop.conf.Configuration;
 2 import org.apache.hadoop.fs.FileStatus;
 3 import org.apache.hadoop.fs.FileSystem;
 4 import org.apache.hadoop.fs.Path;
 5 
 6 public class ToFile {
 7 public static void main(String[] args) throws Exception {
 8         Configuration conf=new Configuration();
 9         FileSystem hdfs=FileSystem.get(conf);
10 
11         Path src =new Path("hdfs://h201:9000/user/hadoop/lijieran.txt");
12         Path dst =new Path("/home/hadoop/work");
13 
14         hdfs.copyToLocalFile(src, dst);
15 
16         System.out.println("Download to"+conf.get("fs.default.name"));
17 
18         FileStatus files[]=hdfs.listStatus(src);
19 
20         for(FileStatus file:files){
21             System.out.println(file.getPath());
22         }
23     }
24 }

3、往hdfs下文件写数据

 1 import org.apache.hadoop.conf.Configuration;
 2 import org.apache.hadoop.fs.FSDataOutputStream;
 3 import org.apache.hadoop.fs.FileSystem;
 4 import org.apache.hadoop.fs.Path;
 5 
 6 public class CreateFile {
 7 
 8     public static void main(String[] args) throws Exception {
 9 
10         Configuration conf=new Configuration();
11         FileSystem hdfs=FileSystem.get(conf);
12 
13         byte[] buff="hello hadoop world!\n".getBytes();//byte字节类型,把字符串转化成字节类型
14         Path dfs=new Path("hdfs://h101:9000/user/hadoop/hellow.txt");//目标端创建hellow.txt文件
15         FSDataOutputStream outputStream=hdfs.create(dfs);//输出流创建
16         outputStream.write(buff,0,buff.length);//在空文件中写入 写的(字节类型,int,int)从0开始到结束都写入

4、创建hdfs目录

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class CreateDir {
    public static void main(String[] args) throws Exception{

        Configuration conf=new Configuration();
        FileSystem hdfs=FileSystem.get(conf);

        Path dfs=new Path("hdfs://h101:9000/user/hadoop/TestDir");
        hdfs.mkdirs(dfs);
    }
}

创建文件touchs

5.重命名hdfs文件

 1 import org.apache.hadoop.conf.Configuration;
 2 import org.apache.hadoop.fs.FileSystem;
 3 import org.apache.hadoop.fs.Path;
 4 
 5 public class Rename{
 6     public static void main(String[] args) throws Exception {
 7 
 8         Configuration conf=new Configuration();
 9         FileSystem hdfs=FileSystem.get(conf);
10 
11         Path frpaht=new Path("hdfs://h101:9000/user/hadoop/b1"); //旧的文件名
12         Path topath=new Path("hdfs://h101:9000/user/hadoop/bb111");//新的文件名
13 
14         boolean isRename=hdfs.rename(frpaht, topath);//boolean型的括号里(源端,目标端)
15 
16         String result=isRename?"成功":"失败";
17 
18         System.out.println("文件重命名结果为:"+result);
19 
20     }
21 }

6、删除hdfs文件

 1 import org.apache.hadoop.conf.Configuration; 
 2 import org.apache.hadoop.fs.FileSystem; 
 3 import org.apache.hadoop.fs.Path; 
 4 
 5 public class DeleteFile { 
 6 
 7     public static void main(String[] args) throws Exception { 
 8 
 9         Configuration conf=new Configuration(); 
10         FileSystem hdfs=FileSystem.get(conf); 
11 
12         Path delef=new Path("hdfs://h101:9000/user/hadoop/bb111"); 
13         
14         boolean isDeleted=hdfs.delete(delef,false); 
15 
16         //递归删除  删除目录
17         //boolean isDeleted=hdfs.delete(delef,true); 
19         System.out.println("Delete?"+isDeleted); 
20     } 
21 } 

7、查看文件是否为真

 1 import org.apache.hadoop.conf.Configuration; 
 2 import org.apache.hadoop.fs.FileSystem; 
 3 import org.apache.hadoop.fs.Path;  
 4 
 5 public class CheckFile { 
 6     public static void main(String[] args) throws Exception { 
 7 
 8         Configuration conf=new Configuration(); 
 9 
10         FileSystem hdfs=FileSystem.get(conf); 
11 
12         Path findf=new Path("hdfs://h101:9000/user/hadoop/hellow.txt"); 
13 
14         boolean isExists=hdfs.exists(findf); 
15 
16         System.out.println("Exist?"+isExists); 
17 
18     } 
19 } 

8、 查看HDFS文件最好修改时间

 1 import org.apache.hadoop.conf.Configuration;
 2 import org.apache.hadoop.fs.FileStatus;
 3 import org.apache.hadoop.fs.FileSystem;
 4 import org.apache.hadoop.fs.Path;
 5 
 6 
 7 public class GetLTime {
 8 
 9 
10     public static void main(String[] args) throws Exception {
11 
12         Configuration conf=new Configuration();
13         FileSystem hdfs=FileSystem.get(conf);
14 
15         Path fpath =new Path("hdfs://h101:9000/user/hadoop/hellow.txt");
16 
17         FileStatus fileStatus=hdfs.getFileStatus(fpath);
18 
19         long modiTime=fileStatus.getModificationTime();
20 
21         System.out.println("file1.txt的修改时间是"+modiTime);
22     }
23 }

****时间格式:Coordinated Universal Time(CUT) 协调世界时

9、查看hdfs文件

 1 import java.io.InputStream;
 2 import java.net.URL;
 3 
 4 import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
 5 import org.apache.hadoop.io.IOUtils;
 6 
 7 public class URLcat{
 8         static {
 9                 URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
10         }
11 
12         public static void main(String[] args) throws Exception {
13                 InputStream in = null;
14                 try {
15                         in = new URL(args[0]).openStream();//args[0]=hdfs://h201:9000/user/hadoop/lijieran.txt
16                         IOUtils.copyBytes(in, System.out, 4096, false);
17                 } finally {
18                         IOUtils.closeStream(in);
19                 }
20         }
21 }

/usr/jdk/bin/java 文件 hdfs://h201:9000/user/hadoop/lijieran.txt

 

posted @ 2018-03-04 20:59  蜘蛛侠0  阅读(372)  评论(0编辑  收藏  举报