123

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class MainClass {
public static void main(String[] args) throws IOException {
Configuration conf=new Configuration();
conf.set("fs.default.name", "hdfs://localhost:9000");
Path inFile =new Path("lzq/hdfstest1.txt");
FileSystem hdfs=FileSystem.get(conf);
FSDataOutputStream outputStream=hdfs.create(inFile);
outputStream.writeUTF("信1605-1班20163594李志强HDFS课堂测试");
outputStream.flush();
outputStream.close();
}
}

 

 

 

 

 

 

 

 

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class Read {
public static void main(String[] args) throws IOException {
Configuration conf=new Configuration();
conf.set("fs.default.name", "hdfs://localhost:9000");
Path inFile =new Path("/user/hadoop/lzq/hdfstest1.txt");
Path newFile =new Path("/user/hadoop/lzq/hdfstest2.txt");


FileSystem hdfs=FileSystem.get(conf);
FSDataOutputStream outputStream=hdfs.create(newFile);

FSDataInputStream inputStream=hdfs.open(inFile);
String str = inputStream.readUTF();
inputStream.close();

outputStream.writeUTF(str);
outputStream.flush();
outputStream.close();

inputStream=hdfs.open(newFile);
System.out.println(inputStream.readUTF());
}
}

posted @ 2018-09-30 20:40  冰柠檬❤  阅读(127)  评论(0编辑  收藏  举报