elasticsearch,logstash,Json,elasticsearch,Logstash" /> elasticsearch,logstash,Json,elasticsearch,Logstash" />

Json 日志存储:无法将事件索引到Elasticsearch

Json 日志存储:无法将事件索引到Elasticsearch,json,elasticsearch,logstash,Json,elasticsearch,Logstash,我当前在查看logstash 6.5.4容器的docker日志时遇到重复错误 [2019-02-18T17:12:17098][WARN][logstash.outputs.elasticsearch]无法将事件索引到elasticsearch。{:status=>400,:action=>[“index”],{:\u id=>nil,:\u index=>“logstash-2019.02.16”,:\u type=>“doc”,:routing=>nil},#],:response=>{“i

我当前在查看logstash 6.5.4容器的docker日志时遇到重复错误

[2019-02-18T17:12:17098][WARN][logstash.outputs.elasticsearch]无法将事件索引到elasticsearch。{:status=>400,:action=>[“index”],{:\u id=>nil,:\u index=>“logstash-2019.02.16”,:\u type=>“doc”,:routing=>nil},#],:response=>{“index”=>{u index”=>“logstash-2019.02.16”,“\u type=>“doc id”=>nil,”status=>400,“error”=>{“type”=>“mapper”=>“mapper”=>“mapper解析异常”;“解析不匹配的映射类型”[],可能的值是[object,string,long,double,boolean,date,binary],“由”=>{“type”=>“非法参数”引起的异常”,“reason”=>“在[float]上没有匹配的字段类型”,可能的值是[object,string,long,double,boolean,date,binary],}

这是我的json模板:

    {
  "template": "logstash-*",
  "order": 1, 
  "settings": {
    "number_of_shards": 2,
    "number_of_replicas": 1
  },
  "mappings": {
    "_default_": {
      "properties": {
        "time": {
          "type": "date",
          "format": "basic_time_no_millis"
        },
        "before": {
          "type": "date",
          "format": "strict_date_time"
        },
        "after": {
          "type": "date",
          "format": "strict_date_time"
        },
        "logsource": {
          "type": "ip"
        }
      }
    } 
  }
}
这是我的日志存储配置

input {
  redis {
    host => "${REDIS_0_HOST}"
    port => "${REDIS_0_PORT}"
    data_type => "list"
    key => "logstash"
  }
}
input {
  redis {
    host => "${REDIS_1_HOST}"
    port => "${REDIS_1_PORT}"
    data_type => "list"
    key => "logstash"
  }
}

filter {

  # if we were successful parsing a message from the raw log, let's dive deeper into the message and assign more fields 
  if [message] {

    # catch gelatin lib output on startup in containers and drop them
    if "20500017" in [message] { drop { } }
    if "2050001c" in [message] { drop { } }

    # remove trailing whitespace from message field
    mutate {
      strip => ["message"]
    } 

    # handle message repeated X times messages 
    grok {
      match => ["message", "message repeated %{NUMBER:repeat_count} times: \[ %{GREEDYDATA:message}\]"]
      overwrite => [ "message" ]
      tag_on_failure => [ ]
    }

    # handle message fields that already have structured json content
    if [program] == "austin-perf" { 
      json {
        source => "message"
        remove_field => ["message"]
      }
    } else { 
      grok {
        break_on_match => true
        patterns_dir => ["/usr/share/logstash/config/patterns"]
        match => [ 
          "message", "%{OBLOG_REVIVE_DATE}",
          "message", "%{OBLOG_REVIVE}",
          "message", "%{OBLOG_DATE}",
          "message", "%{OBLOG}",
          "message", "%{WORD}, \[%{TIMESTAMP_ISO8601} #%{NUMBER}\]  ?%{WORD:level} -- : %{GREEDYDATA:kvpairs}", # ruby app logs
          "message", "%{USERNAME:level}: ?%{PATH:file} %{NUMBER:line_num} %{GREEDYDATA:kvpairs}",
          "message", "%{USERNAME:level}: ?%{GREEDYDATA:kvpairs}",
          "message", "%{URIPATH:file}:%{POSINT:line_num}" #ruby app exceptions
        ]
      }

      if "\." not in [kvpairs] {
        kv {
          source => "kvpairs"
          include_keys => [
            "pulse_git_events",
            "pulse_trending_count",
            "pulse_news_count",
            "kafka_records",
            "repeat_count",
            "used_memory",
            "new_kafka_articles",
            "wcs_training_time",
            "rokerbot_event",
            "health_check",
            "rokerbot_bot_utterance",
            "rokerbot_user_utterance",
            "Date_Conn_Time",
            "Date_Query_Time",
            "Date_Parse_Time",
            "News_Conn_Time",
            "News_Query_Time",
            "NEWS_FAIL_TIME",
            "writing_image",
            "timed_app",
            "ran_for",
            "app_name",
            "klocker_app_name",
            "memory_used",
            "cpu_usage",
            "rss_mem",
            "vms_mem",
            "shared_mem",
            "uss_mem",
            "pss_mem",
            "text_mem",
            "data_mem",
            "total_gpu_mem",
            "used_gpu_mem",
            "free_gpu_mem"
          ] 
        }
      }

      prune {
        blacklist_names => ["%{URI}"]
      }
    }

    if [file] and [line_num] { 
      mutate {
        add_field => {
          "test_unique" => "%{file}:%{line_num}"
        }
      }
    }
  }

  mutate {
    convert => {
      "pulse_git_events" => "integer"
      "pulse_trending_count" => "integer"
      "pulse_news_count" => "integer"
      "kafka_records" => "integer"
      "repeat_count" => "integer"
      "used_memory" => "integer"
      "new_kafka_articles" => "integer"
      "wcs_training_time" => "integer"
      "ran_for" => "integer"
      "Date_Conn_Time" => "integer"
      "Date_Query_Time" => "integer"
      "Date_Parse_Time" => "integer"
      "News_Conn_Time" => "integer"
      "News_Query_Time" => "integer"
      "NEWS_FAIL_TIME" => "integer"
      "memory_used" => "integer"
      "cpu_usage" => "float"
      "rss_mem" => "integer"
      "vms_mem" => "integer"
      "shared_mem" => "integer"
      "uss_mem" => "integer"
      "pss_mem" => "integer"
      "text_mem" => "integer"
      "data_mem" => "integer"
      "total_gpu_mem" => "integer"
      "used_gpu_mem" => "integer"
      "free_gpu_mem" => "integer"
    }

    lowercase => "level" 
    remove_field => [ "timestamp", "kvpairs", "type", "_type" ]

    add_field => {
      "time" => "%{+HHmmssZ}"
      "weekday" => "%{+EEE}"
    }
  }
}

output {
  elasticsearch {
    hosts => ["${ES_DATA_0}","${ES_DATA_1}"]
    index => "logstash-%{+YYYY.MM.dd}"
  }
}
在当前配置下,cpu使用率下的浮点值似乎是导致问题的原因,但logstash配置不支持mutate筛选器下的双值。这是我认为是5.1.x版本的更新日志存储容器。

您可能必须这样做,例如,为浮动添加“匹配映射类型”。

.

ES正在查看的是一个旧的现有模板,而不是我的模板。删除它解决了问题

ES正在查看的是一个旧的现有模板,而不是我的模板。删除它解决了问题,我相信这是可以的,这样你就可以继续做,而不仅仅是评论我的答案。你能就这些模板的位置进行合作吗?