Nginx Logstash“无法加载无效配置”

Nginx Logstash“无法加载无效配置”,nginx,logstash,elastic-stack,logstash-grok,Nginx,Logstash,Elastic Stack,Logstash Grok,我正在尝试为以下输入配置logstash-nginx访问、nginx错误&日志遵循自定义模式&下面是logstash.conf input { beats { port => 5044 codec => multiline { # Grok pattern names are valid! :) pattern => "^%{TIMESTAMP_ISO8601} " negate => true what =&

我正在尝试为以下输入配置logstash-nginx访问、nginx错误&日志遵循自定义模式&下面是logstash.conf

input {
  beats {
    port => 5044

codec => multiline {
      # Grok pattern names are valid! :)
      pattern => "^%{TIMESTAMP_ISO8601} "
      negate => true
      what => previous
    }
    }
}

filter {

if [type] == "nginx-access" {
        grok {
        match => [ "message" , "%{COMBINEDAPACHELOG}+%{GREEDYDATA:extra_fields}"]
        overwrite => [ "message" ]
        }

        mutate {
                convert => ["response", "integer"]
                convert => ["bytes", "integer"]
                convert => ["responsetime", "float"]
        }

        geoip {
                source => "clientip"
                target => "geoip"
                add_tag => [ "nginx-geoip" ]
        }

        date {
                match => [ "timestamp" , "dd/MMM/YYYY:HH:mm:ss Z" ]
                remove_field => [ "timestamp" ]
        }

        useragent { 
                source => "agent"
        }

}  else if [type] == "nginx-error" { 
        grok {
        match => [ "message" , "(?<timestamp>%{YEAR}[./-]%{MONTHNUM}[./-]%{MONTHDAY}[- ]%{TIME}) \[%{LOGLEVEL:severity}\] %{POSINT:pid}#%{NUMBER}: %{GREEDYDATA:errormessage}(?:, client: (?<client>%{IP}|%{HOSTNAME}))(?:, server: %{IPORHOST:server})(?:, request: %{QS:request})?(?:, upstream: \"%{URI:upstream}\")?(?:, host: %{QS:host})?(?:, referrer: \"%{URI:referrer}\")"] 
        overwrite => [ "message" ]
        }

geoip {
                source => "client"
                target => "geoip"
                add_tag => [ "nginx-geoip" ]
        }

        date {
                match => [ "timestamp" , "YYYY/MM/dd HH:mm:ss" ]
                remove_field => [ "timestamp" ]
        }

} else {

  mutate {
    gsub => ["message", "\n", " "]
  }
  grok {
    match => [ "message", "%{TIMESTAMP_ISO8601:timestamp} \[%{NOTSPACE:uid}\] \[%{NOTSPACE:thread}\] %{LOGLEVEL:loglevel} %{DATA:class}\-%{GREEDYDATA:message}" ]
    overwrite => [ "message" ]
  }
  date {
    match => [ "timestamp" , "yyyy-MM-dd HH:mm:ss" ]
    target => "@timestamp"
  }
  if "_grokparsefailure" in [tags] {
            drop { }
  }
}
}

output {

stdout { codec => rubydebug }
if [type] == "nginx-access" {
  elasticsearch { hosts => localhost }
  index => "nginx-access-%{+YYYY.MM.dd}"
} else if [type] == "nginx-error" {
  elasticsearch { hosts => localhost }
  index => "nginx-error-%{+YYYY.MM.dd}"
} else {
  elasticsearch { hosts => localhost }
}

}
我不确定错误是什么。有人能帮我弄清楚吗


我还试着运行hextump&没有垃圾字符似乎很好

我想你忘了放本地主机

 elasticsearch { 
    hosts => ["localhost"]
    index => "%{tempIndex}-%{+xxxx.ww}"
    document_type => "%{[@metadata][type]}"
 }

正确的答案是在elasticsearch块中设置“索引”,如下所示

output {

stdout { codec => rubydebug }
if [type] == "nginx-access" {
  elasticsearch { hosts => localhost 
    index => "nginx-access-%{+YYYY.MM.dd}"
  }
} else if [type] == "nginx-error" {
  elasticsearch { 
      hosts => localhost 
      index => "nginx-error-%{+YYYY.MM.dd}"
  }

} else {
  elasticsearch { 
      hosts => localhost 
  }
}
}
output {

stdout { codec => rubydebug }
if [type] == "nginx-access" {
  elasticsearch { hosts => localhost 
    index => "nginx-access-%{+YYYY.MM.dd}"
  }
} else if [type] == "nginx-error" {
  elasticsearch { 
      hosts => localhost 
      index => "nginx-error-%{+YYYY.MM.dd}"
  }

} else {
  elasticsearch { 
      hosts => localhost 
  }
}
}