Last active
June 10, 2016 13:44
-
-
Save nicolaihald/2ef68debbdc342cd233031edc02f4044 to your computer and use it in GitHub Desktop.
logstash.searchlogger.conf
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
input { | |
redis { | |
host => "10.78.35.102" | |
data_type => "list" | |
key => "filebeat" | |
codec => json { | |
charset => "CP1252" | |
} | |
} | |
} | |
filter { | |
#mutate { | |
# add_field => { "TEST_SPECIAL_CHARACTERS" => "Tesing æøåÆØÅ" } | |
#} | |
# SEARCHLOGGER | |
# ################### | |
if [type] == "searchlogger" { | |
# IMPORTANT: | |
# When using the redis-input plugin (and filebeat), the actual logdata (in our case, a json-object) | |
# is passed to logstash, wrapped in an json-object: | |
json { | |
source => "message" | |
remove_field => ["message"] | |
} | |
# In most of my Grok stages. The default value is “_grokparsefailure”, which we don’t need in production. | |
# (however, custom failure and success tags are very helpful when debugging Logstash configs) | |
#grok { | |
# match => { | |
# "message" => ".+?; ((([a-zA-Z]+(\.|;|:))+) )+?\[%{WORD:collection}\].+?path=%{DATA:endpoint} params=\{%{DATA:params}\}.+?\{%{WORD:action}=\[%{DATA:docId}" | |
# } | |
# tag_on_failure => [] | |
#} | |
#if ( [logdata][message][Session][ClientIp] and [logdata][message][Session][ClientIp] !~ "10." ) { | |
if ( [logdata][message][Session][ClientIp] =~ /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/ ) { | |
geoip { | |
database => "/data/geoip/GeoLiteCity.dat" | |
source => "[logdata][message][Session][ClientIp]" | |
#target => "geoip_client" | |
} | |
} | |
mutate { | |
convert => { "[logdata][message][Result][Hits]" => "integer" } | |
} | |
kv { | |
field_split => "&?" | |
source => "[logdata][message][RequestUri]" | |
include_keys => [ "searchDirection", "pageNumber" ] | |
target => "[logdata][message][Query][Params]" | |
} | |
date { | |
match => ["[logtime]", "YYYY-MM-dd HH:mm:ss.SSSS"] | |
target => "@timestamp" | |
#add_field => { "debug" => "timestampMatched"} | |
} | |
} | |
} | |
output { | |
#stdout { codec => dots } | |
#stdout { codec => rubydebug } | |
if [type] == "searchlogger" { | |
#if "_grokparsefailure" not in [tags] { | |
elasticsearch { | |
hosts => ["http://elasticsearch:9200"] | |
index => "logstash-%{+YYYY.MM.dd}" | |
} | |
#} | |
} | |
} | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment