HDFS内容追加
配置:hdfs-site.xml
<property> <name>dfs.support.append</name> <value>true</value> </property>
追加一个文件
package com.wyp; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; import java.io.*; import java.net.URI; /** * blog: http://www.iteblog.com/ * Date: 14-1-2 * Time: 下午6:09 */ public class AppendContent { public static void main(String[] args) { String hdfs_path = "hdfs://mycluster/home/wyp/wyp.txt";//文件路径 Configuration conf = new Configuration(); conf.setBoolean("dfs.support.append", true); String inpath = "/home/wyp/append.txt"; FileSystem fs = null; try { fs = FileSystem.get(URI.create(hdfs_path), conf); //要追加的文件流,inpath为文件 InputStream in = new BufferedInputStream(new FileInputStream(inpath)); OutputStream out = fs.append(new Path(hdfs_path)); IOUtils.copyBytes(in, out, 4096, true); } catch (IOException e) { e.printStackTrace(); } } }
追加一行内容
/** * 以append方式将内容添加到HDFS上文件的末尾;注意:文件更新,需要在hdfs-site.xml中添<property><name>dfs. * append.support</name><value>true</value></property> */ private static void appendToHdfs() throws FileNotFoundException, IOException { String dst = "hdfs://192.168.1.11:9000/usr/yujing/test.txt"; Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(URI.create(dst), conf); FSDataOutputStream out = fs.append(new Path(dst)); int readLen = "zhangzk add by hdfs java api".getBytes().length; while (-1 != readLen) { out.write("zhangzk add by hdfs java api".getBytes(), 0, readLen); } out.close(); fs.close(); }
版权声明:本文为博主原创文章,未经博主允许不得转载。