ElastAlert Kibana plugin for Kibana 5.6.8 - lyonwang/TechNotes GitHub Wiki
References
elastalert-kibana-plugin 5.6.8
Docker 建置測試環境
Pull ELK Docker images
Docker pull elasticsearch:5.6.8
Docker pull logstash:5.6.8
Docker pull kibana:5.6.8
Docker pull bitsensor/elastalert
Rebuild logstash docker image
main.conf
input {
beats {
# The port to listen on for filebeat connections.
port => 5044
}
}
filter {
if [category] == "autotesting" {
json {
source => "message"
remove_field => "message"
}
date {
match => ["startTime", "yyyy/MM/dd HH:mm:ss SSS", ISO8601]
timezone => "Etc/UTC"
}
} else if [category] == "iis" {
if [message] =~ "^#" {
drop {}
}
grok {
match => ["message", "%{TIMESTAMP_ISO8601:log_timestamp} %{NOTSPACE:sitename} %{IP:serverip} %{WORD:method} %{URIPATH:uristem} %{NOTSPACE:uriquery} %{INT:port:int} %{NOTSPACE:username} %{IPORHOST:clientip} %{NOTSPACE:clienthttpversion} %{NOTSPACE:useragent} %{NOTSPACE:cookie} %{NOTSPACE:referer} %{NOTSPACE:cshost} %{NUMBER:response} %{INT:subresponse:int} %{INT:win32response:int} %{INT:sentbytes:int} %{INT:recvbytes:int} %{INT:timetaken:int} %{NOTSPACE:requestid} %{NOTSPACE:requeststarttime} %{NOTSPACE:xforwardedfor} %{NOTSPACE:reqaccept} %{NOTSPACE:reqacceptencoding} %{NOTSPACE:reqcontentlength} %{NOTSPACE:reqcontenttype} %{NOTSPACE:rescontenttype}"]
remove_field => "message"
}
date {
match => [ "log_timestamp", "YYYY-MM-dd HH:mm:ss" ]
timezone => "Etc/UTC"
remove_field => "log_timestamp"
}
useragent {
source => "useragent"
remove_field => "useragent"
}
grok {
match => ["xforwardedfor", "%{NOTSPACE:deviceip},%{NOTSPACE:otherip}"]
}
geoip {
source => "deviceip"
remove_field => [ "deviceip", "otherip" ]
}
mutate {
lowercase => [uristem]
remove_field => [ "build", "beat", "input_type", "offset", "source", "type" ]
remove_tag => "beats_input_codec_plain_applied"
}
if "letouzeus" in [uristem] {
mutate {
add_tag => "letou"
}
} else if "vwinzeus" in [uristem] {
mutate {
add_tag => "vwin"
}
} else if "633zeus" in [uristem] {
mutate {
add_tag => "633"
}
} else if "o8zeus" in [uristem] {
mutate {
add_tag => "o8"
}
} else if "mrcatzeus" in [uristem] {
mutate {
add_tag => "mrcat"
}
}
} else if [category] == "gb-iis" {
if [message] =~ "^#" {
drop {}
}
grok {
match => ["message", "%{TIMESTAMP_ISO8601:log_timestamp} %{NOTSPACE:sitename} %{IP:serverip} %{WORD:method} %{URIPATH:uristem} %{NOTSPACE:uriquery} %{INT:port:int} %{NOTSPACE:username} %{IPORHOST:clientip} %{NOTSPACE:clienthttpversion} %{NOTSPACE:useragent} %{NOTSPACE:cookie} %{NOTSPACE:referer} %{NOTSPACE:cshost} %{NUMBER:response} %{INT:subresponse:int} %{INT:win32response:int} %{INT:sentbytes:int} %{INT:recvbytes:int} %{INT:timetaken:int} %{NOTSPACE:requestid} %{NOTSPACE:requeststarttime} %{NOTSPACE:xforwardedfor} %{NOTSPACE:reqaccept} %{NOTSPACE:reqacceptencoding} %{NOTSPACE:reqcontentlength} %{NOTSPACE:reqcontenttype} %{NOTSPACE:rescontenttype}"]
remove_field => "message"
}
date {
match => [ "log_timestamp", "YYYY-MM-dd HH:mm:ss" ]
timezone => "Etc/UTC"
remove_field => "log_timestamp"
}
useragent {
source => "useragent"
remove_field => "useragent"
}
grok {
match => ["xforwardedfor", "%{NOTSPACE:deviceip},%{NOTSPACE:otherip}"]
}
geoip {
source => "deviceip"
remove_field => [ "deviceip", "otherip" ]
}
mutate {
lowercase => [uristem]
remove_field => [ "build", "beat", "input_type", "offset", "source", "type" ]
remove_tag => "beats_input_codec_plain_applied"
}
} else if [category] == "business" {
grok {
match => ["message", "%{TIMESTAMP_ISO8601:log_timestamp} \| %{WORD:log_level} \| %{GREEDYDATA:log_message
}"]
remove_field => "message"
}
date{
match => ["log_timestamp", ISO8601]
timezone => "Etc/UTC"
remove_field => "log_timestamp"
}
json {
source => "log_message"
remove_field => "log_message"
}
ruby {
code => "
event.get('Message').each {|k, v|
event.set(k, v)
}
event.remove('Message')"
}
mutate {
remove_field => [ "build", "best", "input_type", "offset", "source", "type" ]
remove_tag => "beats_input_codec_plain_applied"
}
} else if [category] == "application" {
grok {
match => ["message", "%{TIMESTAMP_ISO8601:log_timestamp} \| %{WORD:log_level} \| %{GREEDYDATA:log_message}"]
remove_field => "message"
}
date {
match => [ "log_timestamp", ISO8601]
timezone => "Etc/UTC"
remove_field => "log_timestamp"
}
json {
source => "log_message"
remove_field => "log_message"
}
mutate {
remove_field => [ "build", "beat", "input_type", "offset", "source", "type" ]
remove_tag => "beats_input_codec_plain_applied"
}
} else if [category] == "eventstorage" {
date {
match => [ "Timestamp", ISO8601]
timezone => "Etc/UTC"
remove_field => "Timestamp"
}
mutate {
remove_field => [ "tags", "build", "beat", "input_type", "offset", "source", "type" ]
remove_tag => "beats_input_codec_plain_applied"
}
}
}
output {
if [category] == "application" {
elasticsearch {
hosts => ["172.19.3.79:9200"]
index => "application-%{+yyyy.MM.dd}"
}
} else if [category] == "iis" {
elasticsearch {
hosts => ["172.19.3.79:9200"]
index => "iis-%{+yyyy.MM.dd}"
}
} else if [category] == "business"{
elasticsearch {
hosts => ["172.19.3.79:9200"]
index => "business-%{+yyyy.MM.dd}"
}
} else if [category] == "gb-iis" {
elasticsearch {
hosts => ["172.19.3.79:9200"]
index => "gb-iis-%{+yyyy.MM.dd}"
}
} else if [category] == "qa-business" {
elasticsearch {
hosts => ["172.19.3.79:9200"]
index => "qa-business-%{+yyyy.MM.dd}"
}
} else if [category] == "qa-messaging" {
elasticsearch {
hosts => ["172.19.3.79:9200"]
index => "qa-messaging-%{+yyyy.MM.dd}"
}
} else if [category] == "messaging" {
elasticsearch {
hosts => ["172.19.3.79:9200"]
index => "messaging-%{+yyyy.MM.dd}"
}
} else if [category] == "autotesting" {
elasticsearch {
hosts => ["172.19.3.79:9200"]
index => "autotesting-%{+yyyy.MM.dd}"
}
} else if [category] == "nginx-access" {
elasticsearch {
hosts => ["172.19.3.79:9200"]
index => "nginx-access-%{+yyyy.MM.dd}"
}
} else if [category] == "nginx-error" {
elasticsearch {
hosts => ["172.19.3.79:9200"]
index => "nginx-error-%{+yyyy.MM.dd}"
}
}
}
Dockerfile
FROM logstash:5.6.8
COPY ./main.conf /etc/logstash/conf.d/main.conf
CMD ["-f", "/etc/logstash/conf.d/main.conf"]
Docker build command
sudo docker build --force-rm --rm my-logstash:5.6.8 .
Rebuild kibana with ElastAlet plugin installtion and setting
Dockerfile
FROM kibana:5.6.8
RUN /usr/share/kibana/bin/kibana-plugin install 'https://git.bitsensor.io/front-end/elastalert-kibana-plugin/builds/artifacts/5.6.8/raw/artifact/elastalert-kibana-plugin-latest.zip?job=build'
RUN echo 'elastalert.serverHost: 172.19.3.79' >> /etc/kibana/kibana.yml
Docker build command
sudo docker build --force-rm --rm my-kibana:5.6.8 .
Start all
ElastAlert
clone source
git clone https://github.com/bitsensor/elastalert.git; cd elastalert
Edit elasticsearch host in pwd
/config/elastalert.yaml
Run Container
sudo docker run -d -p 3030:3030 \
-v `pwd`/config/elastalert.yaml:/opt/elastalert/config.yaml \
-v `pwd`/config/config.json:/opt/elastalert-server/config/config.json \
-v `pwd`/rules:/opt/elastalert/rules \
-v `pwd`/rule_templates:/opt/elastalert/rule_templates \
--name elastalert bitsensor/elastalert:latest
Docker Compose for ELK
docker-compose.yml
version: '3.1'
services:
elasticsearch:
image: elasticsearch:5.6.8
ports:
- 9200:9200
- 9300:9300
kibana:
image: my-kibana:5.6.8
ports:
- 5601:5601
depends_on:
- elasticsearch
logstash:
image: my-logstash:5.6.8
ports:
- 5044:5044
depends_on:
- elasticsearch
Run Containers
sudo docker-compose up -d