elasticsearch 生命周期策略配置以及模版索引设置

  • elastcisearch 生命周期策略
PUT _ilm/policy/deeplang-logger-policy
{
  "policy" : {
      "phases" : {
        "warm" : {
          "min_age" : "7d",
          "actions" : {
            "allocate" : {
              "number_of_replicas" : 1,
              "include" : { },
              "exclude" : { },
              "require" : { }
            },
            "set_priority" : {
              "priority" : 50
            }
          }
        },
        "cold" : {
          "min_age" : "30d",
          "actions" : {
            "set_priority" : {
              "priority" : 0
            }
          }
        },
        "hot" : {
          "min_age" : "0ms",
          "actions" : {
            "set_priority" : {
              "priority" : 100
            },
            "rollover" : {
              "max_primary_shard_size" : "10gb",
              "max_age" : "30d",
              "max_docs" : 10000000
            }
          }
        }
      }
    }
}
  • 模版
# 模版
PUT _template/deeplang-nginxaccess-tmplate
{
  "index_patterns": ["deeplang-nginxaccess*"],
  "settings": {
    "number_of_replicas": 1,
    "number_of_shards": 1,
    "refresh_interval": "10s",
    "index.lifecycle.name":"deeplang-logger-policy",
    "index.lifecycle.rollover_alias":"deeplang-nginxaccess"
  }
}
  • 索引创建
PUT deeplang-nginxaccess-000001
{
  "aliases": {
    "deeplang-nginxaccess": {"is_write_index":true}
  }
}
  • filebeat 安装并将日志打进es
# 安装
wget https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-7.17.8-linux-x86_64.tar.gz
tar -xf filebeat-7.17.8-linux-x86_64.tar.gz
mv filebeat-6.2.4-linux-x86_64 filebeat 
cd filebeat

# vim filebeat.yml
filebeat.inputs:
- type: log
  id: deeplang-nginxaccess-id
  enabled: true
  paths:
    - /lingowhale/project/volume-production-nginx/production-nginx/access.log
  fields:
    source: nginxaccess
filebeat.config.modules:
  path: ${path.config}/modules.d/*.yml
  reload.enabled: true
# ======================= Elasticsearch template setting =======================
setup.ilm.enabled: false

# =================================== Kibana ===================================
setup.kibana:
  host: "31.9.2.2:5601"

# ---------------------------- Elasticsearch Output ----------------------------
output.elasticsearch:
  # Array of hosts to connect to.
  hosts: ["172.16.1.1:9200"]
  indices:
    - index: "deeplang-nginxaccess"
      when.equals:
        fields:
          source: "nginxaccess"
# ================================= Processors =================================
processors:
  - add_host_metadata:
      when.not.contains.tags: forwarded
  - add_cloud_metadata: ~
  - add_docker_metadata: ~
  - add_kubernetes_metadata: ~



# 启动filebeat
nohup ./filebeat -e -c filebeat.yml > filebeat.log &

# 定时任务编写
crontab  -l
* * * * * /bin/bash /opt/scripts/monitorlog.sh > /dev/null &

# 脚本内容
[root@filebeat]# cat /opt/scripts/monitorlog.sh
#!/bin/sh
process_num=`ps -ef |grep filebeat |grep -v 'grep' |wc -l`
if [ ${process_num} -eq 0 ];then
  cd /data/filebeat && nohup ./filebeat -e -c filebeat.yml > filebeat.log &
else
  echo "进程运行---"
fi

  • 参考官网
https://www.elastic.co/guide/en/elasticsearch/reference/7.5/getting-started-index-lifecycle-management.html
posted @ 2023-07-20 13:19  lixinliang  阅读(203)  评论(1编辑  收藏  举报