Unterschiede zwischen den Revisionen 15 und 18 (über 3 Versionen hinweg)
Revision 15 vom 2017-05-02 19:51:42
Größe: 21288
Autor: Robert
Kommentar:
Revision 18 vom 2021-08-16 12:43:44
Größe: 31564
Autor: Robert
Kommentar:
Gelöschter Text ist auf diese Art markiert. Hinzugefügter Text ist auf diese Art markiert.
Zeile 53: Zeile 53:

{{{
input {
  beats {
      port => 5044
      ssl => true
      ssl_certificate_authorities => ["/etc/logstash/ssl/e.pem"]
      ssl_certificate => "/etc/logstash/ssl/e.pem"
      ssl_key => "/etc/logstash/ssl/e_pkcs8_pem.key"
      ssl_verify_mode => "force_peer"
      tls_min_version => "1.2"
      }
}


filter {
if "metricbeat" not in [tags] and "heartbeat" not in [tags] {

  mutate {
    gsub => [
     # replace all tabs with spaces
      "message", "\t", " "
    ]
  }

  #mutate {
  # remove_field => [ "[host]" ]
  #}

  if "dynatrace" in [tags] {
    grok {
      patterns_dir => ["/etc/logstash/dynatrace_patterns"]
      match => { "message" => "%{DATESTAMP_DT:datestamp_dt} UTC %{WORD:loglevel} \[%{DATA:logger}\]%{SPACE}%{GREEDYDATA:logmessage}" }
    remove_field => ["message"]
    }
  date {
    # 2016-08-22 04:18:05
    match => [ "datestamp_dt", "YYYY-MM-dd HH:mm:ss" ]
    timezone => "UTC"
    target => "@timestamp"
  }

  }

  else if "postgres" in [tags] {
    grok {
       match => { "message" => "%{TIMESTAMP_ISO8601:date} %{WORD} \[%{NUMBER}\] %{WORD:loglevel}\:%{SPACE}%{GREEDYDATA:logmessage}" }
       remove_field => ["message"]
    }
  date {
    # 2017-05-02 11:11:42.940
    match => [ "date", "YYYY-MM-dd HH:mm:ss.SSS" ]
    target => "@timestamp"
  }

  }

  else if "apache_access" in [tags] {
    grok {
    match => { "message" => "%{COMMONAPACHELOG}" }
    remove_field => ["message"]
    }
    date {
      match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
    }

  }


   throttle {
    before_count => -1
    after_count => 1
    period => 3600
    max_age => 7200
    key => "%{logmessage}"
    add_tag => "throttled"
  }
if [loglevel] == "SEVERE"
and "throttled" not in [tags]
and "Failed to start DB Agent" not in [logmessage]
and "Failed to update DB Agent state" not in [logmessage]
and "Java detection failed" not in [logmessage] {
  mutate { add_tag => "mail" }
}

if "LOG" not in [loglevel]
and "throttled" not in [tags]
and "postgres" in [tags]
{
  mutate { add_tag => "mail" }
}
#and "has already been removed" not in [logmessage]
# or "The provided email configuration was not valid: At least one receiver address required for sending email with subject" in [logmessage]
if "throttled" not in [tags] and "canceling autovacuum task" not in [logmessage] and "automatic analyze of table" not in [logmessage] {
  if "Consider increasing the configuration parameter" in [logmessage]
    or "One of the time table entries for job" in [logmessage]
    or "Can not create Memory Snapshot because the license is exhausted for Agent Instance" in [logmessage]
    or "Sending email caused an exception" in [logmessage]
    or "no pg_hba.conf entry for host" in [logmessage]
    or "unsupported frontend protocol" in [logmessage]
    or "org.apache.commons.mail.EmailException" in [logmessage]
    or "Failed to process report for schedule" in [logmessage]
    or "DelegatingDataSource" in [logger]
  {

    mutate { add_tag => "ticket_wartung" }

  }

#"matches no system profile - does not consume a license" in [logmessage]
  if "UNHEALTHY" in [logmessage]
  {
# if "CODO" not in [logmessage] and "FERNV" not in [logmessage] and "IBIS" not in [logmessage]
# {

    mutate { add_tag => "ticket_betrieb" }
# }
  }
}

# or "The provided email configuration was not valid" in [logmessage]
#and "CODO" not in [logmessage] and "FERNV" not in [logmessage] and "IBIS" not in [logmessage]
if [loglevel] == "WARNING" and "throttled" not in [tags] and "postgres" not in [tags] {
  if "No space left on device" in [logmessage]
      or "There may be an issue with the time synchronization with agent" in [logmessage]
      or "Global Agent Time significantly behind current system time" in [logmessage]
      or "Unable to update agent drift" in [logmessage]
      or "Potentially skipped data between" in [logmessage]
      or "Can not create Memory Snapshot" in [logmessage]
      or "Cannot create report, server is low on memory" in [logmessage]
      or "affected by old virtual agent time" in [logmessage]
      or "Check for outdated" in [logmessage]
      or "NullPointerException" in [logmessage]
      or "Unable to initialize the stored session manager" in [logmessage]
      or "TransactionConfigReference is null" in [logmessage]
      or "dump creation command failed" in [logmessage]
      or "Could not create XML Data Export due to an internal error" in [logmessage]
      or "Sending the email to the following server failed" in [logmessage]
      or "matches no system profile - does not consume a license" in [logmessage]
      {
 mutate { add_tag => "mail" }
    }}
# and "CODO" not in [logmessage] and "FERNV" not in [logmessage] and "IBIS" not in [logmessage]
if [loglevel] == "WARNING" and "throttled" not in [tags] and "postgres" not in [tags]{
  if [logger] == "QueueWatchdog" or
        [logger] == "WatchDog" or
        [logger] == "Measure" or
        [logger] == "Scheduler" or
        [logger] == "FileTools" or
        [logger] == "AggressiveThroughputAdaptingHealthAction"


        {
          mutate {
            add_tag => "mail"
            }
          }
      }
   }

if "heartbeat" in [tags] {
   throttle {
    before_count => 2
    after_count => 2
    period => 3600
    max_age => 7200
    key => "%{[monitor][status]}"
    add_tag => "throttled"
  }

  if "throttled" not in [tags] {
    if [monitor][status] == "down" {
      mutate {
            add_tag => "mail"
           }
       }
  }
} #heartbeat in tags

#if "metricbeat" in [tags] {
# mutate {
# add_field => { "disk_percent" => "%{[system][filesystem][used][pct]}"}
# convert => { "disk_percent" => "float" }
# }
# if [disk_percent] > 0.9 {
# mutate {
# add_tag => "high_disk_fill"
# }
# }
#
# throttle {
# before_count => -1
# after_count => 1
# period => 3600
# max_age => 7200
# key => "%{limit}%{[beat][hostname]}"
# add_tag => "throttled"
# }

# if "throttled" not in [tags] {
# if [limit] == "over" {
# mutate {
# add_tag => "mail"
# }
# }
# }

# }

} # filter


output {
    #if "_grokparsefailure" in [tags] {
    # stdout { codec => "rubydebug" }
    #}
    if "_grokparsefailure" not in [tags] {
      if "metricbeat" not in [tags] and "heartbeat" not in [tags] and "postgres" not in [tags] {
        elasticsearch {
            hosts => ["eagleeye-a3001:9200"]
            manage_template => false
            index => "%{[@metadata][beat]}-%{+YYYY.MM}"
            user => "logstash_writer"
            password => "ieM9echuu4cohRah4eew"
            # obsolete: document_type => "%{[@metadata][type]}"
        }
        }
      else if "postgres" in [tags] {
        elasticsearch {
            hosts => ["eagleeye-a3001:9200"]
            manage_template => false
            index => "postgres-%{+YYYY.MM}"
            user => "logstash_writer"
            password => "ieM9echuu4cohRah4eew"

            # obsolete: document_type => "%{[@metadata][type]}"
        }
        }
      else {
         elasticsearch {
            hosts => ["eagleeye-a3001:9200"]
            index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM}"
            user => "logstash_writer"
            password => "ieM9echuu4cohRah4eew"

        }
      }
        if "mail" in [tags] {
          if "heartbeat" in [tags]{
            email {
                to => "robert@"
                from => "host@localhost"
                body => "%{[url][domain]} is down from %{[observer][hostname]}\n%{[error][message]}"
                subject => "%{[url][domain]} is down from %{[observer][hostname]}. This message is suppressed for 1h."
                address => "gateway.dhl.com"
              }
             } # heartbeat in tags
          else if "postgres" in [tags]{
            email {
                to => "robert"
                from => "e"
                body => "%{date} %{logmessage}"
                subject => "[logstash] %{loglevel} message in PostgreSQL Server Log on %{[agent][hostname]}"
                address => "gateway.dhl.com"
              }
             } # postgres in tags
           else if "metricbeat" in [tags] {
             email {
                to => "robert"
                from => ""
                body => "%{date} Value %{[system][filesystem][used][pct]} too high for %{system.filesystem.mount_point}"
                subject => "Threshold exceeded on %{[agent][hostname]}"
                address => "gateway.dhl.com"
             }
           }
           else {
            email {
                to => "rm"
                from => "e"
                body => "%{datestamp_dt} %{logmessage}"
                subject => "[logstash] %{loglevel} message in Dynatrace Server Log on %{[agent][hostname]}"
                address => "gateway.dhl.com"
            } # email
           } # else not heartbeat
          } # mail in tags

          if "ticket_wartung" in [tags] {
            email {
                to => "e"
                from => "eagleeye@deutschepost.de"
                body => "Schema:"
                subject => "%{[agent][hostname]}: %{loglevel} %{logmessage}"
                address => "gom"
              }
             } # ticket_wartung in tags
          if "ticket_betrieb" in [tags] {
            email {
                to => "ro"
                from => "e"
                body => "Schema: P '%{[agent][hostname]}: %{loglevel} %{logmessage}' "
                subject => "%{[agent][hostname]}: %{loglevel} %{logmessage}"
                address => ".com"
              }
             } # ticket_betrieb in tags
    } # grokparsefailure
} # output


}}}

Config

if

input{
stdin{
tags => ["blubb"]
}
}

filter{
if "bla" in [tags]{
mutate { add_tag => "field in field" }
}
if "blubb" in [tags]{
mutate { add_tag => "field at field" }
}
if "ERROR" in [message] {
mutate { add_tag => "ERROR" }
}
}

output{
stdout{
codec => rubydebug
}
}

Beats

input {
  beats {
      port => 5044
      }
}

output {
  elasticsearch {
     hosts => "labor05:9200"
     manage_template => false
     index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
     document_type => "%{[@metadata][type]}"
     }
}

Dynatrace

input {
  beats {
      port => 5044
      ssl =>  true
      ssl_certificate_authorities => ["/etc/logstash/ssl/e.pem"]
      ssl_certificate => "/etc/logstash/ssl/e.pem"
      ssl_key => "/etc/logstash/ssl/e_pkcs8_pem.key"
      ssl_verify_mode => "force_peer"
      tls_min_version => "1.2"
      }
}


filter {
if "metricbeat" not in [tags] and "heartbeat" not in [tags] {

  mutate {
    gsub => [
     # replace all tabs with spaces
      "message", "\t", "    "
    ]
  }

  #mutate {
  #  remove_field => [ "[host]" ]
  #}

  if "dynatrace" in [tags] {
    grok {
      patterns_dir => ["/etc/logstash/dynatrace_patterns"]
      match => { "message" => "%{DATESTAMP_DT:datestamp_dt} UTC %{WORD:loglevel} \[%{DATA:logger}\]%{SPACE}%{GREEDYDATA:logmessage}" }
    remove_field => ["message"]
    }
  date {
    #  2016-08-22 04:18:05
    match => [ "datestamp_dt", "YYYY-MM-dd HH:mm:ss" ]
    timezone => "UTC"
    target => "@timestamp"
  }

  }

  else if "postgres" in [tags] {
    grok {
       match => { "message" => "%{TIMESTAMP_ISO8601:date} %{WORD} \[%{NUMBER}\] %{WORD:loglevel}\:%{SPACE}%{GREEDYDATA:logmessage}" }
       remove_field => ["message"]
    }
  date {
    #  2017-05-02 11:11:42.940
    match => [ "date", "YYYY-MM-dd HH:mm:ss.SSS" ]
    target => "@timestamp"
  }

  }

  else if "apache_access" in [tags] {
    grok {
    match => { "message" => "%{COMMONAPACHELOG}" }
    remove_field => ["message"]                                                                                                                                                                                                                                                
    }
    date {
      match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss Z" ]
    }

  }


   throttle {
    before_count => -1
    after_count => 1
    period => 3600
    max_age => 7200
    key => "%{logmessage}"
    add_tag => "throttled"
  }
if [loglevel] == "SEVERE"
and "throttled" not in [tags]
and "Failed to start DB Agent" not in [logmessage]
and "Failed to update DB Agent state" not in [logmessage]
and "Java detection failed" not in [logmessage] {
  mutate { add_tag => "mail" }
}

if "LOG" not in [loglevel]
and "throttled" not in [tags]
and "postgres" in [tags]
{
  mutate { add_tag => "mail" }
}
#and "has already been removed" not in [logmessage]
#    or "The provided email configuration was not valid: At least one receiver address required for sending email with subject" in [logmessage]
if "throttled" not in [tags] and "canceling autovacuum task" not in [logmessage] and "automatic analyze of table" not in [logmessage] {
  if "Consider increasing the configuration parameter"  in [logmessage]
    or "One of the time table entries for job" in [logmessage]
    or "Can not create Memory Snapshot because the license is exhausted for Agent Instance" in [logmessage]
    or "Sending email caused an exception" in [logmessage]
    or "no pg_hba.conf entry for host" in [logmessage]
    or "unsupported frontend protocol" in [logmessage]
    or "org.apache.commons.mail.EmailException" in [logmessage]
    or "Failed to process report for schedule" in [logmessage]
    or "DelegatingDataSource" in [logger]
  {

    mutate { add_tag => "ticket_wartung" }

  }

#"matches no system profile - does not consume a license" in [logmessage]
  if "UNHEALTHY" in [logmessage]
  {
#    if "CODO" not in [logmessage] and "FERNV" not in [logmessage] and  "IBIS" not in [logmessage]
#    {

    mutate { add_tag => "ticket_betrieb" }
#    }
  }
}

#      or "The provided email configuration was not valid" in [logmessage]
#and "CODO" not in [logmessage] and "FERNV" not in [logmessage] and  "IBIS" not in [logmessage]
if [loglevel] == "WARNING" and "throttled" not in [tags] and "postgres" not in [tags] {
  if "No space left on device" in [logmessage]
      or "There may be an issue with the time synchronization with agent" in [logmessage]
      or "Global Agent Time significantly behind current system time" in [logmessage]
      or "Unable to update agent drift" in [logmessage]
      or "Potentially skipped data between" in [logmessage]
      or "Can not create Memory Snapshot" in [logmessage]
      or "Cannot create report, server is low on memory" in [logmessage]
      or "affected by old virtual agent time" in [logmessage]
      or "Check for outdated" in [logmessage]
      or "NullPointerException" in [logmessage]
      or "Unable to initialize the stored session manager" in [logmessage]
      or "TransactionConfigReference is null" in [logmessage]
      or "dump creation command failed" in [logmessage]
      or "Could not create XML Data Export due to an internal error" in [logmessage]
      or "Sending the email to the following server failed" in [logmessage]
      or "matches no system profile - does not consume a license" in [logmessage]
      {
 mutate { add_tag => "mail" }
    }}
# and "CODO" not in [logmessage] and "FERNV" not in [logmessage]  and  "IBIS" not in [logmessage]
if [loglevel] == "WARNING" and "throttled" not in [tags] and "postgres" not in [tags]{
  if  [logger] == "QueueWatchdog" or
        [logger] == "WatchDog" or
        [logger] ==  "Measure" or
        [logger] == "Scheduler" or
        [logger] == "FileTools" or
        [logger] == "AggressiveThroughputAdaptingHealthAction"


        {
          mutate {
            add_tag => "mail"
            }
          }
      }
   }

if "heartbeat" in [tags] {
   throttle {
    before_count => 2
    after_count => 2
    period => 3600
    max_age => 7200
    key => "%{[monitor][status]}"
    add_tag => "throttled"
  }

  if "throttled" not in [tags] {
    if [monitor][status] == "down" {
      mutate {
            add_tag => "mail"
           }
       }
  }
} #heartbeat in tags

#if "metricbeat" in [tags] {
#    mutate {
#        add_field => { "disk_percent" => "%{[system][filesystem][used][pct]}"}
#        convert => { "disk_percent" => "float" }
#    }
#   if [disk_percent] > 0.9 {
#      mutate {
#        add_tag => "high_disk_fill"
#      }
#    }
#
#   throttle {
#    before_count => -1
#     after_count => 1
#     period => 3600
#     max_age => 7200
#     key => "%{limit}%{[beat][hostname]}"
#     add_tag => "throttled"
#    }

#    if "throttled" not in [tags] {
#      if [limit] == "over" {
#        mutate {
#          add_tag => "mail"
#        }
#      }
#    }

# }

} # filter


output {
    #if "_grokparsefailure" in [tags] {
    #    stdout { codec => "rubydebug" }
    #}
    if "_grokparsefailure" not in [tags] {
      if "metricbeat" not in [tags] and "heartbeat" not in [tags] and "postgres" not in [tags] {
        elasticsearch {
            hosts =>  ["eagleeye-a3001:9200"]
            manage_template => false
            index => "%{[@metadata][beat]}-%{+YYYY.MM}"
            user => "logstash_writer"
            password => "ieM9echuu4cohRah4eew"
            # obsolete: document_type => "%{[@metadata][type]}"
        }
        }
      else if "postgres" in [tags] {
        elasticsearch {
            hosts =>  ["eagleeye-a3001:9200"]
            manage_template => false
            index => "postgres-%{+YYYY.MM}"
            user => "logstash_writer"
            password => "ieM9echuu4cohRah4eew"

            # obsolete: document_type => "%{[@metadata][type]}"
        }
        }
      else {
         elasticsearch {
            hosts =>  ["eagleeye-a3001:9200"]
            index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM}"
            user => "logstash_writer"
            password => "ieM9echuu4cohRah4eew"

        }
      }
        if "mail" in [tags] {
          if "heartbeat" in [tags]{
            email {
                to => "robert@"
                from => "host@localhost"
                body => "%{[url][domain]} is down from %{[observer][hostname]}\n%{[error][message]}"
                subject => "%{[url][domain]} is down from %{[observer][hostname]}. This message is suppressed for 1h."
                address => "gateway.dhl.com"
              }
             } # heartbeat in tags
          else if "postgres" in [tags]{
            email {
                to => "robert"
                from => "e"
                body => "%{date} %{logmessage}"
                subject =>  "[logstash] %{loglevel} message in PostgreSQL Server Log on %{[agent][hostname]}"
                address => "gateway.dhl.com"
              }
             } # postgres in tags
           else if "metricbeat" in [tags] {
             email {
                to => "robert"
                from => ""
                body => "%{date} Value %{[system][filesystem][used][pct]} too high for %{system.filesystem.mount_point}"
                subject =>  "Threshold exceeded on %{[agent][hostname]}"
                address => "gateway.dhl.com"
             }
           }
           else {
            email {
                to => "rm"
                from => "e"
                body => "%{datestamp_dt} %{logmessage}"
                subject => "[logstash] %{loglevel} message in Dynatrace Server Log on %{[agent][hostname]}"
                address => "gateway.dhl.com"
            } # email
           } # else not heartbeat
          } # mail in tags

          if "ticket_wartung" in [tags] {
            email {
                to => "e"
                from => "eagleeye@deutschepost.de"
                body => "Schema:"
                subject => "%{[agent][hostname]}: %{loglevel} %{logmessage}"
                address => "gom"
              }
             } # ticket_wartung in tags
          if "ticket_betrieb" in [tags] {
            email {
                to => "ro"
                from => "e"
                body => "Schema: P '%{[agent][hostname]}: %{loglevel} %{logmessage}' "
                subject => "%{[agent][hostname]}: %{loglevel} %{logmessage}"
                address => ".com"
              }
             } # ticket_betrieb in tags
    }  # grokparsefailure
} # output

input {
  file {
       path => "/root/dt/Server/WUM96184/dtserver/6592/logs/Server.0.0.log"
       sincedb_path => "/dev/null"
       start_position => "beginning"
   }
}




filter {

mutate {
    gsub => [
     # replace all forward slashes with underscore                                                                                                                                                                                                                                                                             
      "message", "\t", "    "
    ]
  }


 multiline {
    pattern => "^[^0-9]"
    what => "previous"
  }

  grok {
    patterns_dir => ["/usr/local/logstash-1.5.4/etc/dynatrace_patterns"]
    match => { "message" => "%{DATESTAMP_DT:datestamp_dt} %{WORD:loglevel} \[%{DATA:logger}\]%{GREEDYDATA:logmessage}" }
  }


  date {
    #  2016-08-22 04:18:05                                                                                                                                                                                                                                                                                                     
    match => [ "datestamp_dt", "YYYY-MM-dd HH:mm:ss" ]
    target => "@timestamp"
  }

#kv{}                                                                                                                                                                                                                                                                                                                          

#mutate {                                                                                                                                                                                                                                                                                                                      
#   convert => { "Revenue" => "float" }                                                                                                                                                                                                                                                                                        
#}                                                                                                                                                                                                                                                                                                                             


}


output {
#    if "_grokparsefailure" in [tags] {                                                                                                                                                                                                                                                                                        
#        stdout { codec => "rubydebug" }                                                                                                                                                                                                                                                                                       
#    }                                                                                                                                                                                                                                                                                                                         
    if "_grokparsefailure" not in [tags] {
           elasticsearch {  }
    }
}

Apache

input{
# file { "path" => "/data/teufelsmuehle/access_log" # "/data/apache2/other_vhosts_access_all.log" 
 file { "path" => "/root/apfelreich_access.log" # "/data/apache2/other_vhosts_access_all.log"
        "tags" => "apfelreich"
        "start_position" => "beginning"
        "sincedb_path" => "/dev/null"
 }
#stdin {}
}

filter{
 grok {
  match => [ "message", "%{COMBINEDAPACHELOG}" ]
  # www.apfelreich.net:80 127.0.0.1 - - [31/May/2015:06:27:28 +0200] "GET /index.html HTTP/1.1" 200 454 "-" "monit/5.4"
 }
 mutate {
   gsub => [
   # remove ? from referrer
  "referrer", "\?", " "]
  }
  
 mutate {
   gsub => [
   # remove & from referrer
  "referrer", "\&", " "]
  }
 # key-value-match in referrer
 kv{ source => "referrer"}
 date { locale => "en" match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ] }
 mutate {
  convert => { "bytes" => "integer" }
 }

##### neu ###                                                                                                                                                                                                    
geoip {
      source => "clientip"
      target => "geoip"
      # database => "/etc/logstash/GeoLiteCity.dat"                                                                                                                                                              
      add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
      add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}"  ]
      }
mutate {
        convert => [ "[geoip][coordinates]", "float"]
         }
############# neu ####                                                                                                                                                                                           

#### alt ###                                                                                                                                                                                                     
# geoip{                                                                                                                                                                                                         
#   source => "clientip"                                                                                                                                                                                         
#  }                                                                                                                                                                                                             

#  mutate {                                                                                                                                                                                                      
#   # unsupported in 5 tags => [ "geoip" ]                                                                                                                                                                       
#   # 'coords' will be kept, 'tmplat' is temporary.                                                                                                                                                              
#   # Both of these new fields are strings.                                                                                                                                                                      
#    add_field => [ "coords", "%{geoip.longitude}",                                                                                                                                                              
#                   "tmplat", "%{geoip.latitude}" ]                                                                                                                                                              
#          }                                                                                                                                                                                                     
#    mutate {                                                                                                                                                                                                    
#      #unsupported in 5 tags => [ "geoip" ]                                                                                                                                                                     
#      # Merge 'tmplat' into 'coords'                                                                                                                                                                            
#      merge => [ "coords", "tmplat" ]                                                                                                                                                                           
#       }                                                                                                                                                                                                        
#     mutate {                                                                                                                                                                                                   
#     # unsupported in 5 tags => [ "geoip" ]                                                                                                                                                                     
#     # Convert our new array of strings back to float                                                                                                                                                           
#      convert => [ "coords", "float" ]                                                                                                                                                                          
#     # Delete our temporary latitude field                                                                                                                                                                      
#      remove_field => [ "tmplat" ]                                                                                                                                                                              
#       }                                                                                                                                                                                                        
## alt ###                                                                                                                                                                                                       
}
output{

#stdout { codec => rubydebug }
elasticsearch { host => "labor05" }
}

Jobdb

input{
  file { path => "/root/jobdb/bhist_1.txt"
         sincedb_path => "/dev/null"
         start_position => "beginning"
         type => "jobdb" } 
}

filter {


    if [message] == "" {
      drop { }
    }
#    if [message] == "------------------------------------------------------------------------------" {
#      drop { }
#    }


  multiline {
    pattern => "^\s"
    what  => "previous"
  }

  multiline {
    pattern => "^\w"
    what  => "previous"
  }


  mutate {
    gsub => [
      "message", "\n", "" 
    ]
    gsub => [
      "message", "                     ", ""
    ]
    gsub => [
      "message", "------------------------------------------------------------------------------", ""
    ]

  }

  grok {
     # Job <671106>, Job Name <VW324-PF-ND_MB41-URhDHGV_140_0-D36_270-29-15-0L47_1_1>,User <user>, Project <PROJECT>,
     match => { "message" => "Job <%{NUMBER:jobid}>, Job Name <%{DATA:job_name}>,User <%{WORD}>, Project <%{DATA:project}>,%{GREEDYDATA}%{DATESTAMP_LSF:submitdate}%{GREEDYDATA:afterdate}Submitted from host <%{DATA:submithost}>, to Queue <%{DATA:queue}>%{GREEDYDATA:rest}" }
#                            "Job <%{NUMBER:jobid}>, Job Name <%{DATA:job_name}>,User <%{WORD}>, Project <%{DATA:project}>,%{GREEDYDATA}%{DATESTAMP_LSF:creationdate}%{GREEDYDATA:afterdate}-a %{WORD:appl}%{GREEDYDATA:afterappl}%{DATESTAMP_LSF_SHORT:submitdate}: Submitted from host <%{DATA:submithost}>, to Queue <%{DATA:queue}>%{GREEDYDATA:rest}"
  }

  #kv { trimkey => '<>' field_split => ";, "}

}

output {
  stdout { codec => "rubydebug" }
}

Jobdb-Mysql

input{
  file { path => "/root/jobdb/all_since_2015-06-26.txt"
         sincedb_path => "/dev/null"
         start_position => "beginning"
         type => "jobdb_mysql" } 
}

filter {


    if [message] == "jobid\tsubmitepoch\tdispatchepoch\tendepoch\trequeueepoch\tjob_name\tuser\tproject\tfirstqueue\tlastqueue\tsubmithost\tncpus\tappl\tappl_res\tmode\tresources\tmem\thostlist\tcpulimit\trunl
imit\tstatus\texit_reason\treturn_value\tappl_err_string\tnumber_of_susps\tsignal\tpendtime\tpsusptime\truntime\tususptime\tunkwntime\ttotaltime\tjobdep\tpost_job_process\terr_num\terr_time\tnet_pend\tcomment\
tssusptime\thangup\tprec\tfemzip\tjob_cmd_ofl\tanalysis\tcpcpus\tuserlib\tuserlib_name\tbatch\tpost\tpam_parts_gt0\tpam_inpchkerr\tpam_penkin_ne0\tpam_errterm\tfpm\tfixthick\tdscal_min\tmpilist\tmax_appl_disk_
use\n" {

# "jobid   submitepoch     dispatchepoch   endepoch        requeueepoch    job_name        user    project firstqueue      lastqueue       submithost      ncpus   appl    appl_res        mode    resources     
  mem     hostlist        cpulimit        runlimit        status  exit_reason     return_value    appl_err_string number_of_susps signal  pendtime        psusptime       runtime ususptime       unkwntime      
 totaltime       jobdep  post_job_process        err_num err_time        net_pend        comment ssusptime       hangup  prec    femzip  job_cmd_ofl     analysis        cpcpus  userlib userlib_name    batch   
post    pam_parts_gt0   pam_inpchkerr   pam_penkin_ne0  pam_errterm     fpm     fixthick        dscal_min       mpilist max_appl_disk_use" {
      drop { }
    }
  grok {
     # Job <671106>, Job Name <VW324-PF-ND_MB41-URhDHGV_140_0-D36_270-29-15-0L47_1_1>,User <u0zhb27>, Project <5ZA1606-36505>,
     patterns_dir => "/etc/logstash/patterns"
     match => { "message" => "%{NUMBER:jobid}\t%{DATESTAMP_MYSQL:[@metadata][timestamp]}\t%{DATESTAMP_MYSQL_OR_ZERODATE:dispatchdate}\t%{DATESTAMP_MYSQL_OR_ZERODATE:enddate}\t%{DATESTAMP_MYSQL_OR_ZERODATE:requeuedate}\t%{DATA:jobname}\t%{DATA:user}\t%{DATA:project}\t%{DATA:firstqueue}\t%{DATA:lastqueue}\t%{DATA:submithost}\t%{DATA:ncpus}\t%{DATA:appl}\t%{DATA:appl_res}\t%{DATA:mode}\t%{DATA:ressources}\t%{DATA:mem}\t%{DATA:hostlist}\t%{DATA:cpulimit}\t%{DATA:runlimit}\t%{DATA:status}\t%{DATA:exit_reason}\t%{DATA:return_value}\t%{DATA:appl_err_string}\t%{DATA:number_of_susps}\t%{DATA:signal}\t%{DATA:pendtime}\t%{DATA:psusptime}\t%{DATA:runtime}\t%{DATA:ususptime}\t%{DATA:unkwntime}\t%{DATA:totaltime}\t%{DATA:jobdep}\t%{DATA:post_job_process}\t%{DATA:err_num}\t%{DATA:err_time}\t%{DATA:net_pend}\t%{DATA:comment}\t%{DATA:ssusptime}\t%{DATA:hangup}\t%{DATA:prec}\t%{DATA:femzip}\t%{DATA:job_cmd_ofl}\t%{DATA:analysis}\t%{DATA:cpcpus}\t%{DATA:userlib}\t%{DATA:userlib_name}\t%{DATA:batch}\t%{DATA:post}\t%{DATA:pam_parts_gt0}\t%{DATA:pam_inpchkerr}\t%{DATA:pam_penkin_ne0}\t%{DATA:pam_errterm}\t%{DATA:fpm}\t%{DATA:fixthick}\t%{DATA:dscal_min}\t%{DATA:mpilist}\t%{GREEDYDATA:max_appl_disk_use}"}}

# %{GREEDYDATA:rest}" 


    date { 
       locale => "en" # enforcing locale as date relies on the systems setting
       timezone => "UTC" # as machine clock is in UTC
       # 2015-06-26 00:00:09
       match => [ "[@metadata][timestamp]",  "YYYY-MM-dd HH:mm:ss" ] # updating directly the metadata
       }

     if [max_appl_disk_use] == "NULL" { 
        mutate{   replace => { "max_appl_disk_use" => "0" }}
     }


     mutate {
       convert => { "ncpus" => "integer" } 
       convert => { "mem" => "integer" } 
       convert => { "cpulimit" => "integer" } 
       convert => { "runlimit" => "float" }
       convert => { "number_of_susps" => "integer" } 
       convert => { "pendtime" => "integer" }
       convert => { "psusptime" => "integer" }
       convert => { "runtime" => "integer" }
       convert => { "ususptime" => "integer" }
       convert => { "unkwntime" => "integer" }
       convert => { "totaltime" => "integer" }
       convert => { "err_time" => "integer" }
       convert => { "net_pend" => "integer" }
       convert => { "ssusptime" => "integer" }
       convert => { "cpcpus" => "integer" }
       convert => { "max_appl_disk_use" => "integer" }

     }


     ruby {code =>  "event['core_h'] = event['ncpus'].to_f * event['runtime'].to_f / '3600'.to_f " }
  #kv { trimkey => '<>' field_split => ";, "}

}

output {
#  stdout { codec => "rubydebug" }
  if "_grokparsefailure" not in [tags] {
    elasticsearch { node_name => "jobdb"  document_id => "%{jobid}"}
  }
}

SMB

input {
#  stdin { }
  file {
    path => "/var/log/samba/log.10*"
    sincedb_path => "/dev/null" # remove for production
    start_position => "beginning" # remove for production
    type => "samba"
    codec => plain { charset => "ISO-8859-1" } # necessary for ancient windows encoding
  }

}

filter {
  multiline {
    pattern => "^\s" # line beginning with whitespace...
    what => "previous" # ... belong to the previous line
  }
  mutate {
    gsub => [
      # remove linefeed
      "message", "\n", ""]

  }
  
  grok{
  patterns_dir => "/etc/logstash/patterns"
  match => { "message" => "\[%{SAMBADATE:[@metadata][timestamp]}\,  %{INT:loglevel}\] %{PROG:process}:%{POSINT:pid}\(%{PROG:function}\)  %{GREEDYDATA:rest}" }
       } 

  date { 
       locale => "en" # enforcing locale as date relies on the systems setting
       timezone => "UTC" # as machine clock is in UTC
       match => [ "[@metadata][timestamp]",  "YYYY-MM-dd HH:mm:ss,SSS", "YYYY/MM/dd HH:mm:ss.SSSSSS" ] # updating directly the metadata
       }
  if "service" in [rest]{
    grok {
      match => { "rest" => "%{DATA} service %{GREEDYDATA:service}" }
    }
  }
}

output {
#  stdout { codec => rubydebug { } }
  if "_grokparsefailure" not in [tags] { # don't consider lines without log message
    elasticsearch { node_name => "samba" workers => 2 } 
#host => "elkstack01" protocol => "http" user =>"es_admin" password => "password" } # name of this nodes & where to send data
  }
}

SPM

input {
  #stdin { }
  file {
    path => "/var/log/spm/*.log"
    sincedb_path => "/dev/null" # remove for production
    start_position => "beginning" # remove for production
    type => "spm"
  #  #codec => plain { charset => "ISO-8859-1" } # necessary for ancient windows encoding
  }

}

filter {
  multiline {
    pattern => "^\s" # line beginning with whitespace...
    what => "previous" # ... belong to the previous line
  }
  
  multiline {
    pattern => ".*---------------------- update resources ----------------------.*"
    what => "next"
  }

  mutate {
    gsub => [
      # remove linefeed
      "message", "\n", ""]

  }
  
  grok{
  patterns_dir => "/etc/logstash/patterns"
  match => { "message" => "\[%{WORD:loglevel}%{SPACE}\] %{SPMDATE:[@metadata][timestamp]} \: %{DATA:logmessage} \[%{SENDER:sender}\]" }
       } 

    date { 
       locale => "en" # enforcing locale as date relies on the systems setting
       timezone => "UTC" # as machine clock is in UTC
       match => [ "[@metadata][timestamp]", "YYYY-MM-dd HH:mm:ss,SSS", "YYYY/MM/dd HH:mm:ss.SSSSSS" ] # updating directly the metadata
       }
}

output {
#  stdout { codec => rubydebug { } }
  if "_grokparsefailure" not in [tags] { # don't consider lines without log message
    elasticsearch { node_name => "spm" } 
# host => ["elkstack01", "elkstack02"] protocol => "http" user =>"es_admin" password => "password"} # name of this nodes & where to send data
  }
}

Pattern

jobdb

DATESTAMP_LSF %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}
DATESTAMP_LSF_SHORT %{DAY} %{MONTH} %{MONTHDAY} %{TIME}
DATESTAMP_MYSQL %{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{TIME}
ZERODATE 0000-00-00 00:00:00
DATESTAMP_MYSQL_OR_ZERODATE (?:%{DATESTAMP_MYSQL}|%{ZERODATE})

smb

SAMBADATE %{YEAR}/%{MONTHNUM}/%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND}

spm

SPMDATE %{YEAR}-%{MONTHNUM}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND}
SENDER %{WORD}::%{DATA}

Dynatrace

DATESTAMP_DT (%{YEAR}-%{MONTHNUM2}-%{MONTHDAY} %{HOUR}:%{MINUTE}:%{SECOND})

Wikinger: ComputerKram/ELK-Stack/Logstash (zuletzt geändert am 2021-08-16 12:43:44 durch Robert)