hadoop lzo跟hive RCFile安装配置详解
下边是我的安装和测试记录。
1.gcc的安装:yum install lib* glibc* gcc* 如果lzo编译出错时可能需要安装,(namenode和datanode都需要) 执行yum install lib* glibc* gcc*,会自动判断需要update的
2.安装ant 下载http://mirror.bjtu.edu.cn/apache/ant/binaries/,
安装并设置环境变量 tar -jxvf apache-ant-1.8.2-bin.tar.bz2
添加ant的环境变量: vi /etc/profile
export ANT_HOME=/usr/local/apache-ant-1.8.2
export PATH=$PATH:$ANT_HOME/bin
source /etc/profile 使配置文件生效
3.安装lzo库 http://www.oberhumer.com/opensource/lzo/download/lzo-2.04.tar.gz
tar -zxf lzo-2.04.tar.gz cd lzo-2.04
./configure --enable-shared
make && make install
1)拷贝/usr/local/lib目录下的lzo库文件到/usr/lib(32位平台),或/usr/lib64(64位平台)
2)在/etc/ld.so.conf.d/目录下新建lzo.conf文件,写入lzo库文件的路径,然后运行/sbin/ldconfig -v,使配置生效
4.安装lzo编码/解码器(注意32位跟64位的差别)
http://pkgs.repoforge.org/lzo/
lzo-devel-2.04-1.el5.rf.i386.rpm
lzo-2.04-1.el5.rf.i386.rpm
这里是由于缺少lzo-devel依赖的原因,lzo-devel有lzo- 2.04-1.el5.rf的依赖
rpm -ivh lzo-2.04-1.el5.rf.i386.rpm rpm -ivh lzo-devel-2.04-1.el5.rf.i386.rpm
5.编译hadoop lzo jar
https://github.com/kevinweil/hadoop-lzo/downloads 上下载最新版本源码 目前为hadoop-lzo-master.zip
unzip hadoop-lzo-master.zip
cd hadoop-lzo-master
32位服务器 export CFLAGS=-m32 export CXXFLAGS=-m32
64位服务器 export CFLAGS=-m64 export CXXFLAGS=-m64
ant compile-native tar 这是可以看到build目录中,hadoop-lzo*.jar就躺在里面
5.编译成功后,还需要把编码/解码器以及native库拷贝到$HADOOP_HOME/lib目录下
cp build/hadoop-lzo-0.4.17-SNAPSHOT.jar /usr/local/hadoop-1.1.2/lib/
tar -cBf - -C build/native . | tar -xBvf - -C /usr/local/hadoop-1.1.2/lib/native
chown -R hadoop:hadoop /usr/local/hadoop-1.1.2/lib/
6.安装lzop 下载 http://www.lzop.org/download/lzop-1.03.tar.gz
解压缩 tar -zxf lzop-1.03.tar.gz cd lzop-1.03
编译 ./configure && make && make install
7.配置xml(不能有空格和空行)
mapred-site.xml
<property>
<name>mapred.compress.map.output</name>
<value>true</value>
</property>
<property>
<name>mapred.map.output.compression.codec</name>
<value>com.hadoop.compression.lzo.LzoCodec</value>
</property>
core-site.xml
<property>
<name>io.compression.codecs</name> <value>org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec,com.hadoop.compression.lzo.LzoCodec,com.hadoop.compression.lzo.LzopCodec,org.apache.hadoop.io.compress.BZip2Codec</value> </property>
<property>
<name>io.compression.codec.lzo.class</name>
<value>com.hadoop.compression.lzo.LzoCodec</value>
</property>
8.在$HADOOP_HOME/bin/hadoop中加入JAVA_LIBRARY_PATH=/usr/local/hadoop/build/native/Linux-i386XX/lib (必杀)这个确实管用 9.#单机运行生成lzo index文件 hadoop jar /usr/local/hadoop-1.1.2/lib/hadoop-lzo-*.jar com.hadoop.compression.lzo.LzoIndexer /tmp/result_appdown_2013-04-13_sort_k2.lzo
#Mapreduce集群生成lzo index文件 hadoop jar /opt/hadoop/lib/hadoop-lzo-*.jar com.hadoop.compression.lzo.DistributedLzoIndexer /pub/cc/20110616/18.log.lzo
10.RCFile表使用lzo压缩包
create external table f_test_external_lzo_textfile ( day_key string,
uip string,
appkey string
) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
STORED AS
INPUTFORMAT 'com.hadoop.mapred.DeprecatedLzoTextInputFormat'
OUTPUTFORMAT'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
LOCATION '/tmp';
create external table f_test_external_lzo_rcfile ( day_key string,
uip string,
appkey string
) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
STORED AS RCFILE;
文件不能直接导入rcfile表,要从textfile导入。 insert overwrite table f_test_external_lzo_rcfile select * from f_test_external_lzo_textfile;