课堂测试2
package com.immoc.hadoop;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.*;
import java.net.URI;
import java.util.Scanner;
/**
* Hadoop HDFS Java API 操作
*/
public class HDFS_Classtest2 {
public static final String HDFS_PATH = "hdfs://192.168.16.132:8020";
FileSystem fileSystem = null;
Configuration configuration = null;
/**
* 创建HDFS目录
*/
public void mkdir() throws Exception {
fileSystem.mkdirs(new Path("/信1605-1班/20163694"));
}
/**
* [1]:新建:新建文本文件;
*/
@Test
public int create() throws Exception {
FSDataOutputStream output = fileSystem.create(new Path("/信1605-1班/20163694/李婷婷.txt"));
output.write("hello hadoop 20163694李婷婷\n".getBytes());
System.out.println("新建:/信1605-1班/20163694/李婷婷.txt成功");
output.flush();
output.close();
return 0;
}
/**
* [2] 打开:打开指定目录下的文本文件,并将文本内容显示在编辑区
*/
@Test
public int cat() throws Exception {
FSDataInputStream in = fileSystem.open(new Path("/信1605-1班/20163694/李婷婷.txt"));
System.out.println("\n文件内容为:");
IOUtils.copyBytes(in, System.out, 1024);
System.out.println("");
in.close();
return 0;
}
/**
* (3) 编辑:向 HDFS 中指定的文件追加内容,由用户指定内容追加到原有文件的开头
* 或结尾;
*/
@Test
public int add(String str ) throws Exception {
FSDataOutputStream output = fileSystem.create(new Path("/信1605-1班/20163694/李婷婷.txt"));
String s2 = "hello hadoop 20163694李婷婷";
String s1 = s2+str;
output.write(s1.getBytes());
output.flush();
output.close();
return 0;
}
/**
* 移动文件
*/
@Test
public int mv() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.default.name","hdfs://localhost:9000");
String remoteFilePath = "hdfs:///user/hadoop/text.txt"; // 源文件 HDFS 路径
String remoteToFilePath = "hdfs:///user/hadoop/new.txt"; // 目的 HDFS 路径
FileSystem fs = FileSystem.get(conf);
Path srcPath = new Path(remoteFilePath);
Path dstPath = new Path(remoteToFilePath);
boolean result = fs.rename(srcPath, dstPath);
fs.close();
return result;
}
@Before
public void setUp() throws Exception {
System.out.println("连接到HDFS文件操作系统");
configuration = new Configuration();
fileSystem = FileSystem.get(new URI(HDFS_PATH), configuration, "jasmine");
}
@After
public void tearDown() throws Exception {
configuration = null;
fileSystem = null;
System.out.println("操作结束,已清除缓存\n\n\n\n\n");
}
}
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.*;
import java.net.URI;
import java.util.Scanner;
/**
* Hadoop HDFS Java API 操作
*/
public class HDFS_Classtest2 {
public static final String HDFS_PATH = "hdfs://192.168.16.132:8020";
FileSystem fileSystem = null;
Configuration configuration = null;
/**
* 创建HDFS目录
*/
public void mkdir() throws Exception {
fileSystem.mkdirs(new Path("/信1605-1班/20163694"));
}
/**
* [1]:新建:新建文本文件;
*/
@Test
public int create() throws Exception {
FSDataOutputStream output = fileSystem.create(new Path("/信1605-1班/20163694/李婷婷.txt"));
output.write("hello hadoop 20163694李婷婷\n".getBytes());
System.out.println("新建:/信1605-1班/20163694/李婷婷.txt成功");
output.flush();
output.close();
return 0;
}
/**
* [2] 打开:打开指定目录下的文本文件,并将文本内容显示在编辑区
*/
@Test
public int cat() throws Exception {
FSDataInputStream in = fileSystem.open(new Path("/信1605-1班/20163694/李婷婷.txt"));
System.out.println("\n文件内容为:");
IOUtils.copyBytes(in, System.out, 1024);
System.out.println("");
in.close();
return 0;
}
/**
* (3) 编辑:向 HDFS 中指定的文件追加内容,由用户指定内容追加到原有文件的开头
* 或结尾;
*/
@Test
public int add(String str ) throws Exception {
FSDataOutputStream output = fileSystem.create(new Path("/信1605-1班/20163694/李婷婷.txt"));
String s2 = "hello hadoop 20163694李婷婷";
String s1 = s2+str;
output.write(s1.getBytes());
output.flush();
output.close();
return 0;
}
/**
* 移动文件
*/
@Test
public int mv() throws Exception {
Configuration conf = new Configuration();
conf.set("fs.default.name","hdfs://localhost:9000");
String remoteFilePath = "hdfs:///user/hadoop/text.txt"; // 源文件 HDFS 路径
String remoteToFilePath = "hdfs:///user/hadoop/new.txt"; // 目的 HDFS 路径
FileSystem fs = FileSystem.get(conf);
Path srcPath = new Path(remoteFilePath);
Path dstPath = new Path(remoteToFilePath);
boolean result = fs.rename(srcPath, dstPath);
fs.close();
return result;
}
@Before
public void setUp() throws Exception {
System.out.println("连接到HDFS文件操作系统");
configuration = new Configuration();
fileSystem = FileSystem.get(new URI(HDFS_PATH), configuration, "jasmine");
}
@After
public void tearDown() throws Exception {
configuration = null;
fileSystem = null;
System.out.println("操作结束,已清除缓存\n\n\n\n\n");
}
}