Json 如何在经纬度之外合成对数坐标点

Json 如何在经纬度之外合成对数坐标点,json,logstash,logstash-configuration,Json,Logstash,Logstash Configuration,我无法通过结合纬度和经度在Logstash中合成地理点。我遵循了其他人的建议和指示,但这些示例似乎是基于旧版本的ELK。由于ELK 2.2,geo_point发生了重大变化,我不确定我是否以正确的方式执行了所有步骤。下面我解释一下我的设置 我使用的麋鹿版本是: curl -XGET 'localhost:9200' { "name" : "Artie", "cluster_name" : "elasticsearch", "version" : { "number" : "2.2.1", "b

我无法通过结合纬度和经度在Logstash中合成地理点。我遵循了其他人的建议和指示,但这些示例似乎是基于旧版本的ELK。由于ELK 2.2,geo_point发生了重大变化,我不确定我是否以正确的方式执行了所有步骤。下面我解释一下我的设置

我使用的麋鹿版本是:

curl -XGET 'localhost:9200' 
{
"name" : "Artie",
"cluster_name" : "elasticsearch",
"version" : {
"number" : "2.2.1",
"build_hash" : "d045fc29d1932bce18b2e65ab8b297fbf6cd41a1",
"build_timestamp" : "2016-03-09T09:38:54Z",
"build_snapshot" : false,
"lucene_version" : "5.4.1"
},
"tagline" : "You Know, for Search"
}
A在Docker容器上使用A和Kibana,但这并不重要

我的logstash.conf是这样的:

cat logstash.conf
input {
    http_poller {
            urls => {
                   myresource => "myhost/data.json"
            }
            request_timeout => 1
            interval => 1
            # Parse every line captured from data.json as a new event. 
            codec => "line" 
    }
}
filter {
    if [message] !~ /\"hex\":/ { 
        # drop messages without "hex"
        drop {} 
    }
    # Capture "hex":72d5a1
    grok {
            match => { "message" => "\"hex\":\"(?<hex>[^\"]+)\"," }
    }
    mutate {
            convert => { "hex"       => "string"  }
    }
    # Capture "lat":50.047613
    if [message] =~ /\"lat\":/ {
        grok {
                match => { "message" => "\"lat\":(?<latitude>[^,]+),"}
        }
        mutate {
                convert => { "latitude"  => "float"   }
        }
    }
    # Capture "lon":1.702955    
    if [message] =~ /\"lon\":/ {
        grok {
                match => { "message" => "\"lon\":(?<longitude>[^,]+)," }
        }
        mutate {
            convert => { "longitude" => "float"   }
        }
    }
    # convert latitude and longitude into location.
    mutate {
        rename => {
                "longitude" => "[location][lon]"
                "latitude" => "[location][lat]"
        }
    }
    mutate {
        remove_field => [ "message" ]
    }
}
output { 
    elasticsearch { 
        hosts => [ "elasticsearchhost:9200" ]
        index => "logstash-%{+YYYY.MM.dd}" 
    }  
}
# curl -XGET localhost:9200/_template/logstash?pretty
{
 "logstash" : {
  "order" : 0,
  "template" : "logstash-*",
  "settings" : {
      "index" : {
        "refresh_interval" : "5s"
      }
    },
    "mappings" : {
      "_default_" : {
        "dynamic_templates" : [ {
          "message_field" : {
            "mapping" : {
              "fielddata" : {
                "format" : "disabled"
              },
              "index" : "analyzed",
              "omit_norms" : true,
              "type" : "string"
            },
            "match_mapping_type" : "string",
            "match" : "message"
          }
        }, {
          "string_fields" : {
            "mapping" : {
              "fielddata" : {
                "format" : "disabled"
              },
              "index" : "analyzed",
              "omit_norms" : true,
              "type" : "string",
              "fields" : {
                "raw" : {
                  "ignore_above" : 256,
                  "index" : "not_analyzed",
                  "type" : "string",
                  "doc_values" : true
                }
              }
            },
            "match_mapping_type" : "string",
            "match" : "*"
          }
        }, {
          "float_fields" : {
            "mapping" : {
              "type" : "float",
              "doc_values" : true
            },
            "match_mapping_type" : "float",
            "match" : "*"
          }
        }, {
          "double_fields" : {
            "mapping" : {
              "type" : "double",
              "doc_values" : true
            },
            "match_mapping_type" : "double",
            "match" : "*"
          }
        }, {
          "byte_fields" : {
            "mapping" : {
              "type" : "byte",
              "doc_values" : true
            },
            "match_mapping_type" : "byte",
            "match" : "*"
          }
        }, {
          "short_fields" : {
            "mapping" : {
              "type" : "short",
              "doc_values" : true
            },
            "match_mapping_type" : "short",
            "match" : "*"
          }
        }, {
          "integer_fields" : {
            "mapping" : {
              "type" : "integer",
              "doc_values" : true
            },
            "match_mapping_type" : "integer",
            "match" : "*"
          }
        }, {
          "long_fields" : {
            "mapping" : {
              "type" : "long",
              "doc_values" : true
            },
            "match_mapping_type" : "long",
            "match" : "*"
          }
        }, {
          "date_fields" : {
            "mapping" : {
              "type" : "date",
              "doc_values" : true
            },
            "match_mapping_type" : "date",
            "match" : "*"
          }
        }, {
          "geo_point_fields" : {
            "mapping" : {
              "type" : "geo_point",
              "doc_values" : true
            },
            "match_mapping_type" : "geo_point",
            "match" : "*"
          }
        } ],
        "_all" : {
          "omit_norms" : true,
          "enabled" : true
        },
        "properties" : {
          "@timestamp" : {
            "type" : "date",
            "doc_values" : true
          },
          "geoip" : {
            "dynamic" : true,
            "type" : "object",
            "properties" : {
              "ip" : {
                "type" : "ip",
                "doc_values" : true
              },
              "latitude" : {
                "type" : "float",
                "doc_values" : true
              },
              "location" : {
                "type" : "geo_point",
                "doc_values" : true
              },
              "longitude" : {
                "type" : "float",
                "doc_values" : true
              }
            }
          },
         "@version" : {
            "index" : "not_analyzed",
            "type" : "string",
            "doc_values" : true
          }
        }
      }
    },
    "aliases" : { }
  }
}
从我在文档中读到的内容来看,表示法
“location”:{“lon”:4.8246,“lat”:52.329208}
看起来不错。但问题是,我不能选择“位置”字段作为Kibana的geo_点

根据麋鹿文件,我需要确保“位置”字段映射到geo_点类型。需要启用doc_值才能正常工作。我不确定是否应该做些什么,因为当我查看模板时,默认情况下“位置”字段似乎已经映射:
“位置”:{“类型”:“geo_点”,“doc_值”:true}

以下是我的模板的外观:

cat logstash.conf
input {
    http_poller {
            urls => {
                   myresource => "myhost/data.json"
            }
            request_timeout => 1
            interval => 1
            # Parse every line captured from data.json as a new event. 
            codec => "line" 
    }
}
filter {
    if [message] !~ /\"hex\":/ { 
        # drop messages without "hex"
        drop {} 
    }
    # Capture "hex":72d5a1
    grok {
            match => { "message" => "\"hex\":\"(?<hex>[^\"]+)\"," }
    }
    mutate {
            convert => { "hex"       => "string"  }
    }
    # Capture "lat":50.047613
    if [message] =~ /\"lat\":/ {
        grok {
                match => { "message" => "\"lat\":(?<latitude>[^,]+),"}
        }
        mutate {
                convert => { "latitude"  => "float"   }
        }
    }
    # Capture "lon":1.702955    
    if [message] =~ /\"lon\":/ {
        grok {
                match => { "message" => "\"lon\":(?<longitude>[^,]+)," }
        }
        mutate {
            convert => { "longitude" => "float"   }
        }
    }
    # convert latitude and longitude into location.
    mutate {
        rename => {
                "longitude" => "[location][lon]"
                "latitude" => "[location][lat]"
        }
    }
    mutate {
        remove_field => [ "message" ]
    }
}
output { 
    elasticsearch { 
        hosts => [ "elasticsearchhost:9200" ]
        index => "logstash-%{+YYYY.MM.dd}" 
    }  
}
# curl -XGET localhost:9200/_template/logstash?pretty
{
 "logstash" : {
  "order" : 0,
  "template" : "logstash-*",
  "settings" : {
      "index" : {
        "refresh_interval" : "5s"
      }
    },
    "mappings" : {
      "_default_" : {
        "dynamic_templates" : [ {
          "message_field" : {
            "mapping" : {
              "fielddata" : {
                "format" : "disabled"
              },
              "index" : "analyzed",
              "omit_norms" : true,
              "type" : "string"
            },
            "match_mapping_type" : "string",
            "match" : "message"
          }
        }, {
          "string_fields" : {
            "mapping" : {
              "fielddata" : {
                "format" : "disabled"
              },
              "index" : "analyzed",
              "omit_norms" : true,
              "type" : "string",
              "fields" : {
                "raw" : {
                  "ignore_above" : 256,
                  "index" : "not_analyzed",
                  "type" : "string",
                  "doc_values" : true
                }
              }
            },
            "match_mapping_type" : "string",
            "match" : "*"
          }
        }, {
          "float_fields" : {
            "mapping" : {
              "type" : "float",
              "doc_values" : true
            },
            "match_mapping_type" : "float",
            "match" : "*"
          }
        }, {
          "double_fields" : {
            "mapping" : {
              "type" : "double",
              "doc_values" : true
            },
            "match_mapping_type" : "double",
            "match" : "*"
          }
        }, {
          "byte_fields" : {
            "mapping" : {
              "type" : "byte",
              "doc_values" : true
            },
            "match_mapping_type" : "byte",
            "match" : "*"
          }
        }, {
          "short_fields" : {
            "mapping" : {
              "type" : "short",
              "doc_values" : true
            },
            "match_mapping_type" : "short",
            "match" : "*"
          }
        }, {
          "integer_fields" : {
            "mapping" : {
              "type" : "integer",
              "doc_values" : true
            },
            "match_mapping_type" : "integer",
            "match" : "*"
          }
        }, {
          "long_fields" : {
            "mapping" : {
              "type" : "long",
              "doc_values" : true
            },
            "match_mapping_type" : "long",
            "match" : "*"
          }
        }, {
          "date_fields" : {
            "mapping" : {
              "type" : "date",
              "doc_values" : true
            },
            "match_mapping_type" : "date",
            "match" : "*"
          }
        }, {
          "geo_point_fields" : {
            "mapping" : {
              "type" : "geo_point",
              "doc_values" : true
            },
            "match_mapping_type" : "geo_point",
            "match" : "*"
          }
        } ],
        "_all" : {
          "omit_norms" : true,
          "enabled" : true
        },
        "properties" : {
          "@timestamp" : {
            "type" : "date",
            "doc_values" : true
          },
          "geoip" : {
            "dynamic" : true,
            "type" : "object",
            "properties" : {
              "ip" : {
                "type" : "ip",
                "doc_values" : true
              },
              "latitude" : {
                "type" : "float",
                "doc_values" : true
              },
              "location" : {
                "type" : "geo_point",
                "doc_values" : true
              },
              "longitude" : {
                "type" : "float",
                "doc_values" : true
              }
            }
          },
         "@version" : {
            "index" : "not_analyzed",
            "type" : "string",
            "doc_values" : true
          }
        }
      }
    },
    "aliases" : { }
  }
}
我没有向该模板添加任何内容。这就是重新安装Logstash和Elastic并使用Logstash.conf文件启动Logstash后的情况

我的问题是:我需要采取什么步骤来解决我的问题

非常感谢

模板中的“位置”字段实际上是“[geoip][location]”,但您的数据是“[hex][location]”。因此,模板的魔力不会应用于您的领域。将数据移动到[geoip][location]或更改模板

此外,在一个regexp中解析消息比先在条件中运行regexp,然后在grok模式中再次运行它更有效。

模板中的“位置”字段实际上是“[geoip][location]”,但您的数据是“[hex][location]”。因此,模板的魔力不会应用于您的领域。将数据移动到[geoip][location]或更改模板


此外,在一个regexp中解析消息比先在条件中运行regexp,然后在grok模式中再次运行它更有效。

更新答案以更明确。更新答案以更明确。