解决logstash生成日志重复问题

如果线程过多,在插入到logstash的时候会信息重复

input {
    tcp {
        port => 5000
        codec => json {
            charset => "UTF-8"
        }
    }
}
## Add your filters / logstash plugins configuration here
filter {
  dissect {
    mapping => {
      "message" => "%{ts} %{msg}"
    }
  }
  date {
    match => [ "ts", "ISO8601" ]
  }
  fingerprint {
    source => "message"
    target => "[@metadata][fingerprint]"
    method => "MD5"
    key => "test"
  }
  ruby {
    code => "event.set('@metadata[prefix]', event.get('@timestamp').to_i.to_s(16))"
  }
}
output {
    elasticsearch {
        hosts => "elasticsearch:9200"
        user => "elastic"
        password => "1234qwert"
        document_id => "%{[@metadata][prefix]}%{[@metadata][fingerprint]}"
    }
    stdout { codec => rubydebug { metadata => true }}
}

 

posted @ 2021-01-27 16:45  fat_girl_spring  阅读(1099)  评论(0编辑  收藏  举报