|NO.Z.00051|——————————|BigDataEnd|——|Hadoop&Flume.V14|——|Flume.v14|高级特性|拦截器|高可用|选择器.v06|
一、高可用案例:案例:实现Agent的故障转移
### --- 配置环境在linux121、linux122上部署Flume、修改环境变量
~~~ # 在liunx123上执行
[root@linux123 ~]# cd /opt/yanqi/servers/
[root@linux123 servers]# scp -r flume-1.9.0/ linux121:$PWD
[root@linux123 servers]# scp -r flume-1.9.0/ linux122:$PWD
# 在liunx123上执行
[root@linux123 ~]# cd /opt/yanqi/servers/
[root@linux123 servers]# scp -r flume-1.9.0/ linux121:$PWD
[root@linux123 servers]# scp -r flume-1.9.0/ linux122:$PWD
# 在linux121、linux122上分别执行
[root@linux121 ~]# vim /etc/profile
##FLUME_HOME
export FLUME_HOME=/opt/yanqi/servers/flume-1.9.0
export PATH=$PATH:$FLUME_HOME/bin
[root@linux121 ~]# source /etc/profile
二、conf文件linux123:flume-taildir-avro.conf
[root@linux123 ~]# vim $FLUME_HOME/conf/flume-taildir-avro.conf
# agent name
a1.sources = r1
a1.channels = c1
a1.sinks = k1 k2
# source
a1.sources.r1.type = TAILDIR
a1.sources.r1.positionFile = /root/flume_log/taildir_position.json
a1.sources.r1.filegroups = f1
a1.sources.r1.filegroups.f1 = /tmp/root/.*log
a1.sources.r1.fileHeader = true
# interceptor
a1.sources.r1.interceptors = i1 i2
a1.sources.r1.interceptors.i1.type = static
a1.sources.r1.interceptors.i1.key = Type
a1.sources.r1.interceptors.i1.value = LOGIN
# 在event header添加了时间戳
a1.sources.r1.interceptors.i2.type = timestamp
# channel
a1.channels.c1.type = memory
a1.channels.c1.capacity = 10000
a1.channels.c1.transactionCapacity = 500
# sink group
a1.sinkgroups = g1
a1.sinkgroups.g1.sinks = k1 k2
# set sink1
a1.sinks.k1.type = avro
a1.sinks.k1.hostname = linux121
a1.sinks.k1.port = 9999
# set sink2
a1.sinks.k2.type = avro
a1.sinks.k2.hostname = linux122
a1.sinks.k2.port = 9999
# set failover
a1.sinkgroups.g1.processor.type = failover
a1.sinkgroups.g1.processor.priority.k1 = 100
a1.sinkgroups.g1.processor.priority.k2 = 60
a1.sinkgroups.g1.processor.maxpenalty = 10000
a1.sources.r1.channels = c1
a1.sinks.k1.channel = c1
a1.sinks.k2.channel = c1
### --- linux121:flume-avro-hdfs.conf
[root@linux121 ~]# vim $FLUME_HOME/conf/flume-avro-hdfs.conf
# set Agent name
a2.sources = r1
a2.channels = c1
a2.sinks = k1
# Source
a2.sources.r1.type = avro
a2.sources.r1.bind = linux121
a2.sources.r1.port = 9999
# interceptor
a2.sources.r1.interceptors = i1
a2.sources.r1.interceptors.i1.type = static
a2.sources.r1.interceptors.i1.key = Collector
a2.sources.r1.interceptors.i1.value = linux121
# set channel
a2.channels.c1.type = memory
a2.channels.c1.capacity = 10000
a2.channels.c1.transactionCapacity = 500
# HDFS Sink
a2.sinks.k1.type=hdfs
a2.sinks.k1.hdfs.path=hdfs://linux121:9000/flume/failover/
a2.sinks.k1.hdfs.fileType=DataStream
a2.sinks.k1.hdfs.writeFormat=TEXT
a2.sinks.k1.hdfs.rollInterval=60
a2.sinks.k1.hdfs.filePrefix=%Y-%m-%d
a2.sinks.k1.hdfs.minBlockReplicas=1
a2.sinks.k1.hdfs.rollSize=0
a2.sinks.k1.hdfs.rollCount=0
a2.sinks.k1.hdfs.idleTimeout=0
a2.sources.r1.channels = c1
a2.sinks.k1.channel=c1
[root@linux121 ~]# vim $FLUME_HOME/conf/flume-avro-hdfs.conf
# set Agent name
a2.sources = r1
a2.channels = c1
a2.sinks = k1
# Source
a2.sources.r1.type = avro
a2.sources.r1.bind = linux121
a2.sources.r1.port = 9999
# interceptor
a2.sources.r1.interceptors = i1
a2.sources.r1.interceptors.i1.type = static
a2.sources.r1.interceptors.i1.key = Collector
a2.sources.r1.interceptors.i1.value = linux121
# set channel
a2.channels.c1.type = memory
a2.channels.c1.capacity = 10000
a2.channels.c1.transactionCapacity = 500
# HDFS Sink
a2.sinks.k1.type=hdfs
a2.sinks.k1.hdfs.path=hdfs://linux121:9000/flume/failover/
a2.sinks.k1.hdfs.fileType=DataStream
a2.sinks.k1.hdfs.writeFormat=TEXT
a2.sinks.k1.hdfs.rollInterval=60
a2.sinks.k1.hdfs.filePrefix=%Y-%m-%d
a2.sinks.k1.hdfs.minBlockReplicas=1
a2.sinks.k1.hdfs.rollSize=0
a2.sinks.k1.hdfs.rollCount=0
a2.sinks.k1.hdfs.idleTimeout=0
a2.sources.r1.channels = c1
a2.sinks.k1.channel=c1
三、分别在linux121、linux122、linux123上启动对应服务(先启动下游的agent)
### --- linux121
[root@linux122 ~]# flume-ng agent --name a2 --conf-file $FLUME_HOME/conf/flume-avro-hdfs.conf
~~连接信息
INFO ipc.NettyServer: [id: 0x009e5a15, /192.168.1.123:35300 => /192.168.1.121:9999] OPEN
INFO ipc.NettyServer: [id: 0x009e5a15, /192.168.1.123:35300 => /192.168.1.121:9999] BOUND: /192.168.1.121:9999
INFO ipc.NettyServer: [id: 0x009e5a15, /192.168.1.123:35300 => /192.168.1.121:9999] CONNECTED: /192.168.1.123:35300
### --- linux122
[root@linux122 ~]# flume-ng agent --name a3 --conf-file $FLUME_HOME/conf/flume-avro-hdfs.conf
~~连接信息
INFO ipc.NettyServer: [id: 0x75841b6c, /192.168.1.123:44550 => /192.168.1.122:9999] OPEN
INFO ipc.NettyServer: [id: 0x75841b6c, /192.168.1.123:44550 => /192.168.1.122:9999] BOUND: /192.168.1.122:9999
INFO ipc.NettyServer: [id: 0x75841b6c, /192.168.1.123:44550 => /192.168.1.122:9999] CONNECTED: /192.168.1.123:44550
### --- linux123
[root@linux122 ~]# flume-ng agent --name a1 --conf-file $FLUME_HOME/conf/flume-taildir-avro.conf
四、先hive.log中写入数据,检查HDFS目录
### --- 写入错误信息
[root@linux123 ~]# hive -e "show databasess"
#Linux123输出信息
INFO taildir.ReliableTaildirEventReader: Opening file: /tmp/root/hive.log, inode: 135825763, pos: 111033
### --- linux121输出信息
INFO hdfs.HDFSDataStream: Serializer = TEXT, UseRawLocalFileSystem = false
INFO hdfs.BucketWriter: Creating hdfs://linux121:9000/flume/failover//2021-08-28.1630139472808.tmp
#写入错误信息
[root@linux123 ~]# hive -e "show databasess"
#Linux123输出信息
INFO taildir.ReliableTaildirEventReader: Opening file: /tmp/root/hive.log, inode: 135825763, pos: 111033
#linux121输出信息
INFO hdfs.HDFSDataStream: Serializer = TEXT, UseRawLocalFileSystem = false
INFO hdfs.BucketWriter: Creating hdfs://linux121:9000/flume/failover//2021-08-28.1630139472808.tmp
#hdfs日志信息
[root@linux123 ~]# hdfs dfs -ls /flume/failover/2021-08-28.1630139472808
-rw-r--r-- 3 root supergroup 6442 2021-08-28 16:32 /flume/failover/2021-08-28.1630139472808
五、杀掉一个Agent,看看另外Agent是否能启动
[root@linux121 ~]#jps
18336 Application
[root@linux121 ~]# kill -9 18336
[root@linux121 ~]#jps
18336 Application
[root@linux121 ~]# kill -9 18336
#linux123输出错误信息
Caused by: java.net.ConnectException: Connection refused: linux121/192.168.1.121:9999
#linux122输出日志信息
INFO hdfs.BucketWriter: Creating hdfs://linux121:9000/flume/failover//2021-08-28.1630139891245.tmp
Walter Savage Landor:strove with none,for none was worth my strife.Nature I loved and, next to Nature, Art:I warm'd both hands before the fire of life.It sinks, and I am ready to depart
——W.S.Landor
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 全程不用写代码,我用AI程序员写了一个飞机大战
· MongoDB 8.0这个新功能碉堡了,比商业数据库还牛
· 记一次.NET内存居高不下排查解决与启示
· 白话解读 Dapr 1.15:你的「微服务管家」又秀新绝活了
· DeepSeek 开源周回顾「GitHub 热点速览」