|
Größe: 4731
Kommentar:
|
Größe: 5249
Kommentar:
|
| Gelöschter Text ist auf diese Art markiert. | Hinzugefügter Text ist auf diese Art markiert. |
| Zeile 1: | Zeile 1: |
| = Logstash = | = Config = |
| Zeile 175: | Zeile 175: |
= Pattern = == jobdb == {{{ DATESTAMP_LSF %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR} DATESTAMP_LSF_SHORT %{DAY} %{MONTH} %{MONTHDAY} %{TIME} DATESTAMP_MYSQL %{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{TIME} ZERODATE 0000-00-00 00:00:00 DATESTAMP_MYSQL_OR_ZERODATE (?:%{DATESTAMP_MYSQL}|%{ZERODATE}) }}} == smb == {{{ SAMBADATE %{YEAR}/%{MONTHNUM}/%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND} }}} == spm == {{{ SPMDATE %{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND} SENDER %{WORD}::%{DATA} }}} |
Config
Apache
input{
# file { "path" => "/data/teufelsmuehle/access_log" # "/data/apache2/other_vhosts_access_all.log"
file { "path" => "/root/apfelreich_access.log" # "/data/apache2/other_vhosts_access_all.log"
"tags" => "apfelreich"
"start_position" => "beginning"
"sincedb_path" => "/dev/null"
}
#stdin {}
}
filter{
grok {
match => [ "message", "%{COMBINEDAPACHELOG}" ]
# www.apfelreich.net:80 127.0.0.1 - - [31/May/2015:06:27:28 +0200] "GET /index.html HTTP/1.1" 200 454 "-" "monit/5.4"
}
mutate {
gsub => [
# remove ? from referrer
"referrer", "\?", " "]
}
mutate {
gsub => [
# remove & from referrer
"referrer", "\&", " "]
}
# key-value-match in referrer
kv{ source => "referrer"}
date { locale => "en" match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ] }
mutate {
convert => { "bytes" => "integer" }
}
geoip{
source => "clientip"
}
mutate {
tags => [ "geoip" ]
# 'coords' will be kept, 'tmplat' is temporary.
# Both of these new fields are strings.
add_field => [ "coords", "%{geoip.longitude}",
"tmplat", "%{geoip.latitude}" ]
}
mutate {
tags => [ "geoip" ]
# Merge 'tmplat' into 'coords'
merge => [ "coords", "tmplat" ]
}
mutate {
tags => [ "geoip" ]
# Convert our new array of strings back to float
convert => [ "coords", "float" ]
# Delete our temporary latitude field
remove => [ "tmplat" ]
}
}
output{
#stdout { codec => rubydebug }
elasticsearch { host => "labor05" }
}
SMB
input {
# stdin { }
file {
path => "/var/log/samba/log.10*"
sincedb_path => "/dev/null" # remove for production
start_position => "beginning" # remove for production
type => "samba"
codec => plain { charset => "ISO-8859-1" } # necessary for ancient windows encoding
}
}
filter {
multiline {
pattern => "^\s" # line beginning with whitespace...
what => "previous" # ... belong to the previous line
}
mutate {
gsub => [
# remove linefeed
"message", "\n", ""]
}
grok{
patterns_dir => "/etc/logstash/patterns"
match => { "message" => "\[%{SAMBADATE:[@metadata][timestamp]}\, %{INT:loglevel}\] %{PROG:process}:%{POSINT:pid}\(%{PROG:function}\) %{GREEDYDATA:rest}" }
}
date {
locale => "en" # enforcing locale as date relies on the systems setting
timezone => "UTC" # as machine clock is in UTC
match => [ "[@metadata][timestamp]", "YYYY-MM-dd HH:mm:ss,SSS", "YYYY/MM/dd HH:mm:ss.SSSSSS" ] # updating directly the metadata
}
if "service" in [rest]{
grok {
match => { "rest" => "%{DATA} service %{GREEDYDATA:service}" }
}
}
}
output {
# stdout { codec => rubydebug { } }
if "_grokparsefailure" not in [tags] { # don't consider lines without log message
elasticsearch { node_name => "samba" workers => 2 }
#host => "elkstack01" protocol => "http" user =>"es_admin" password => "password" } # name of this nodes & where to send data
}
}
SPM
input {
#stdin { }
file {
path => "/var/log/spm/*.log"
sincedb_path => "/dev/null" # remove for production
start_position => "beginning" # remove for production
type => "spm"
# #codec => plain { charset => "ISO-8859-1" } # necessary for ancient windows encoding
}
}
filter {
multiline {
pattern => "^\s" # line beginning with whitespace...
what => "previous" # ... belong to the previous line
}
multiline {
pattern => ".*---------------------- update resources ----------------------.*"
what => "next"
}
mutate {
gsub => [
# remove linefeed
"message", "\n", ""]
}
grok{
patterns_dir => "/etc/logstash/patterns"
match => { "message" => "\[%{WORD:loglevel}%{SPACE}\] %{SPMDATE:[@metadata][timestamp]} \: %{DATA:logmessage} \[%{SENDER:sender}\]" }
}
date {
locale => "en" # enforcing locale as date relies on the systems setting
timezone => "UTC" # as machine clock is in UTC
match => [ "[@metadata][timestamp]", "YYYY-MM-dd HH:mm:ss,SSS", "YYYY/MM/dd HH:mm:ss.SSSSSS" ] # updating directly the metadata
}
}
output {
# stdout { codec => rubydebug { } }
if "_grokparsefailure" not in [tags] { # don't consider lines without log message
elasticsearch { node_name => "spm" }
# host => ["elkstack01", "elkstack02"] protocol => "http" user =>"es_admin" password => "password"} # name of this nodes & where to send data
}
}
Pattern
jobdb
DATESTAMP_LSF %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}
DATESTAMP_LSF_SHORT %{DAY} %{MONTH} %{MONTHDAY} %{TIME}
DATESTAMP_MYSQL %{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{TIME}
ZERODATE 0000-00-00 00:00:00
DATESTAMP_MYSQL_OR_ZERODATE (?:%{DATESTAMP_MYSQL}|%{ZERODATE})
smb
SAMBADATE %{YEAR}/%{MONTHNUM}/%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND}
spm
SPMDATE %{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND}
SENDER %{WORD}::%{DATA}