HDFS读写程序小测试
WriteHdfs:
package aa; import java.io.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; public class WriteHdfs { public static void main(String[] args) { Configuration conf = new Configuration(); try { FileSystem fs = FileSystem.get(conf); Path f = new Path("ttt.txt"); System.out.println(fs.isFile(f)); System.out.println(f.toString()); FSDataOutputStream os = fs.create(f, true); os.writeChars("test"); os.flush(); os.close(); } catch (IOException e) { e.printStackTrace(); } } }
ReadHdfs:
package aa; import java.io.InputStream; import java.net.URI; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.IOUtils; public class ReadHdfs { public static void main(String[] args) throws Exception { String uri = "ttt.txt"; Configuration conf = new Configuration(); FileSystem fs = FileSystem. get(URI.create (uri), conf); InputStream in = null; try { in = fs.open( new Path(uri)); IOUtils.copyBytes(in, System.out, 4096, false); } finally { IOUtils.closeStream(in); } } }
先写后读输出:
log4j:WARN No appenders could be found for logger (org.apache.hadoop.metrics2.lib.MutableMetricsFactory). log4j:WARN Please initialize the log4j system properly. log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
t e s t