Elk Component Base Syntax

Source: Internet
Author: User
Tags syslog logstash

shipper->broker->indexer->es1.inputinput { stdin {} }output {    stdout { codec=> rubydebug }}file {   codec =>  multiline { pattern =>  "^\s"  what =>  "Previous"}    path => ["xx", "xx"]   exclude =>  "1.log"    add_field  => [  "Log_ip",  "xx"  ]   tags =>  "Tag1"      #设置新事件的标志    delimiter =>  "\ n"     #设置多长时间扫描目录, new files found     discover_interval => 15    #设置多长时间检测文件是否修改    stat_interval  => 1    #监听文件的起始位置, default is end   start_position =>  beginning    #监听文件读取信息记录的位置    sincedb_path =>  "e:/software/ Logstash-1.5.4/logstash-1.5.4/test.txt "    #设置多长时间会写入读取的位置信息    sincedb_write_interval => 15   }       2.filter filter {    multiline {         #  Specify consolidation rules-all rows that do not start with a number need to be merged          pattern =>  "^[^\d"         #  where to merge--Previous row         what =>  "Previous"     }   filter {  multiline {    type =>  "Type"      #类型, not much to say     pattern =>  "Pattern, a regexp"   #参数, can also be considered as characters, A bit like grep&nbsp, and if any characters are met, give the following  what  to deal with     negate => boolean     what =>  "Previous"  or  "Next"   #这个是符合上面  pattern  After the requirements of the specific how to deal with, there are two ways to merge to the above oneLog or the following log   }}      filter {  grep {     match => [  "@message",  "Php fatal error"  ]     drop  => false    add_tag => [fatal_error]        }                                                             grep {        tags => [fatal_error]       match => [   "@message",  ". * (xbox\.com|xbox\.mib\.com\.cn|supports\.game\.mib\.com\.cn)"  ]        drop  => false       add_tag => [xboxerror]             }          }  #过滤掉内容包含5.3.3 and down log filter {    if [message] !~    "5.3.3|down"  {        ruby  {             code =>  "Event.cancel"      }    }} #使用自带的过滤规则显示更多的字段filter  {    grok {         match => {"message"  =>  "%{combinedapachelog}"}   }} #合并不是以 [log beginning filter {    multiline {         pattern =>  "^[^[]"         negate = > true        what =>  "Previous"     }}   filter {  if [path] =~  "Error"  {    mutate {  replace => {  "type"  =>  "Apache_error"  } }    grok  {      match => {  "message"  =>  "%{ Combinedapachelog} " }    }  }  date {     match => [  "timestamp"  ,  "Dd/mmm/yyyy:hh:mm:ss z"  ]  }}    filter {  if [path] =~  Access  {     mutate { replace => { type =>  "Apache_access"  } }     grok {      match => {  "Message"  = >  "%{combinedapacheloG} " }    }    date {       match => [  "timestamp"  ,  "Dd/mmm/yyyy:hh:mm:ss z"  ]     }  } else if [path] =~  "Error"  {    mutate  { replace => { type =>  "Apache_error"  } }  }  else {    mutate { replace => { type =>   "Random_logs"  } }  }}  3.output e-mail output {email {    match => [  "@message",  "AAAAA"  ]   to =>  "[email  protected] "   from => " [email protected] "   options  => [  "Smtpiporhost",  "smtp.mibnet.com",                  "Port",  ",             "     "UserName",  "[email protected]",                  "STARTTLS",  "true",                  "Password",  "Opmonitor",                  "AuthenticationType",  " Login "              ]    subject =>  "123"    body =>  ' 123 '    via =>  smtp}}      output {    if [type] ==   "Syslog"  {        elasticsearch {             hosts =>  "172.16.0.102:9200"              index =>  "syslog-%{+yyyy. MM.DD} "    }}         if [type] ==   "Nginx"  {        elasticsearch {             hosts =>  "172.16.0.102:9200"              index =>  "nglog-%{+yyyy. MM.DD} "     }} #匹配内容包含paramiko与simplejson的日志通邮件发送     if [ Message] =~  /paramiko|simplejson/ {        email  {            to =>  "[Email  protected] "            from =>  "[email protected]"              contenttype =>  "Text/plain; charset=utf-8"              address =>  "Smtp.163.com"              username =>  "[email protected]"              password =>  "12344"              subject =>  "Server%{host} Log Exception"              body =>  "%{@timestamp} %{type}: %{ Message} "        }    }}        output {    stdout { codec => rubydebug  }    redis {        host =>  ' 192.168.1.104 '          data_type =>  ' list '          key =>  ' Logstash:redis '     }}   output {   elasticsearch { host => localhost }  stdout { codec  => rubydebug }}       Replacement mutate {     type =>  "Phplog"     gsub => [  "@message", "'",  "\" "  ]}    Commissioning # /usr/local/logstash-1.5.2/bin/logstash -e  ' input {  stdin { } } output { stdout {} } ' curl  '  logstash -e   ' Input{stdin{}}output{stdout{codec=>rubydebug}} '

# Logstash Agent-f logstash-simple.conf--verbose//Turn on debug mode




This article is from the "people, to have their own ideas" blog, please be sure to keep this source http://szgb2016.blog.51cto.com/340201/1865408

Elk Component Base Syntax

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.