Akka 阿克卡+;卡蒙+;StatsD&x2B;格拉凡纳装置
我们正在进行一项关于测试Akka用于汽车工业的研究项目。因此,我们需要衡量绩效。我决定使用Kamon+StatsD+Grafana,因为它似乎很容易运行 我现在的问题是,我没有把StatsD的数据输入Grafana。我还尝试手动生成直方图作为数据,但没有成功:Akka 阿克卡+;卡蒙+;StatsD&x2B;格拉凡纳装置,akka,graphite,grafana,statsd,kamon,Akka,Graphite,Grafana,Statsd,Kamon,我们正在进行一项关于测试Akka用于汽车工业的研究项目。因此,我们需要衡量绩效。我决定使用Kamon+StatsD+Grafana,因为它似乎很容易运行 我现在的问题是,我没有把StatsD的数据输入Grafana。我还尝试手动生成直方图作为数据,但没有成功: Kamon.metrics.histogram("test-histogram").record(100) 当我创建一个新的数据源时,不管我在application.conf中做了什么,唯一有效的URL是。我还尝试将Docker容器IP
Kamon.metrics.histogram("test-histogram").record(100)
当我创建一个新的数据源时,不管我在application.conf中做了什么,唯一有效的URL是。我还尝试将Docker容器IP用作statsd.hostname,但没有成功
你知道我错过了什么吗
我的设置如下:
application.conf:
include "version"
akka {
loglevel = DEBUG
extensions = ["kamon.statsd.StatsD", "kamon.system.SystemMetrics", "kamon.logreporter.LogReporter"]
actor {
provider = "akka.cluster.ClusterActorRefProvider"
}
remote {
log-remote-lifecycle-events = off
netty.tcp {
//hostname = "132.199.59.153"
//hostname = "169.254.137.212"
hostname = "127.0.0.1"
//hostname = "172.16.32.193"
port = 2552
}
}
cluster {
seed-nodes = [
//"akka.tcp://ActorSystem@132.199.59.153:2552",
//"akka.tcp://ActorSystem@169.254.137.211:2552",
"akka.tcp://ActorSystem@127.0.0.1:2552",
]
}
}
kamon {
metric {
# Time interval for collecting all metrics and send the snapshots to all subscribed actors.
tick-interval = 1 second
# Specify if entities that do not match any include/exclude filter should be tracked.
track-unmatched-entities = yes
filters {
akka-actor {
includes = [ "**" ]
excludes = [ ]
}
akka-dispatcher {
includes = [ "**" ]
excludes = [ ]
}
trace {
includes = [ "**" ]
excludes = [ ]
}
}
}
statsd {
hostname = "172.17.0.2"
port = 8125
flush-interval = 10 seconds
max-packet-size = 1024 bytes
includes {
akka-actor = [ "**" ]
akka-dispatcher = [ "**" ]
trace = [ "**" ]
}
simple-metric-key-generator {
# Application prefix for all metrics pushed to StatsD. The default namespacing scheme for metrics follows
# this pattern:
# application.host.entity.entity-name.metric-name
application = "introduce-kamon-io"
}
}
modules {
kamon-statsd.auto-start = yes
kamon-akka.auto-start = yes
kamon-log-reporter.auto-start = yes
kamon-system-metrics.auto-start = yes
}
}
# Disable legacy metrics in akka-cluster.
//akka.cluster.metrics.enabled=off
# Enable metrics extension in akka-cluster-metrics.
//akka.extensions=["akka.cluster.metrics.ClusterMetricsExtension"]
# Sigar native library extract location during tests.
# Note: use per-jvm-instance folder when running multiple jvm on one host.
//akka.cluster.metrics.native-library-extract-folder=${user.dir}/target/native
import com.typesafe.sbt.SbtAspectj._
name := "VideoProxy"
version := "1.0"
scalaVersion := "2.12.1"
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
val akkaVersion = "2.4.16"
val kamonVersion = "0.6.6"
libraryDependencies ++= Seq(
"org.bytedeco" % "javacv-platform" % "1.3.1",
"com.typesafe.akka" %% "akka-slf4j" % akkaVersion,
"com.typesafe.akka" %% "akka-remote" % akkaVersion,
"com.typesafe.akka" %% "akka-cluster" % akkaVersion,
"com.typesafe.akka" %% "akka-cluster-sharding" % akkaVersion,
"com.typesafe.akka" %% "akka-cluster-tools" % akkaVersion,
"com.typesafe.akka" %% "akka-stream" % akkaVersion,
"com.typesafe.akka" %% "akka-stream-testkit" % akkaVersion,
"org.aspectj" % "aspectjweaver" % "1.8.1",
"io.kamon" %% "kamon-core" % kamonVersion,
"io.kamon" %% "kamon-statsd" % kamonVersion,
"io.kamon" %% "kamon-akka-2.4" % kamonVersion exclude("io.kamon", "kamon-akka_akka-2.4_2.12"),
"io.kamon" %% "kamon-akka-remote-2.4" % kamonVersion exclude("io.kamon", "kamon-akka-remote_akka-2.4_2.12"),
"io.kamon" %% "kamon-log-reporter" % kamonVersion,
"io.kamon" %% "kamon-system-metrics" % kamonVersion
)
aspectjSettings
javaOptions <++= AspectjKeys.weaverOptions in Aspectj
// when you call "sbt run" aspectj weaving kicks in
fork in run := true
build.sbt:
include "version"
akka {
loglevel = DEBUG
extensions = ["kamon.statsd.StatsD", "kamon.system.SystemMetrics", "kamon.logreporter.LogReporter"]
actor {
provider = "akka.cluster.ClusterActorRefProvider"
}
remote {
log-remote-lifecycle-events = off
netty.tcp {
//hostname = "132.199.59.153"
//hostname = "169.254.137.212"
hostname = "127.0.0.1"
//hostname = "172.16.32.193"
port = 2552
}
}
cluster {
seed-nodes = [
//"akka.tcp://ActorSystem@132.199.59.153:2552",
//"akka.tcp://ActorSystem@169.254.137.211:2552",
"akka.tcp://ActorSystem@127.0.0.1:2552",
]
}
}
kamon {
metric {
# Time interval for collecting all metrics and send the snapshots to all subscribed actors.
tick-interval = 1 second
# Specify if entities that do not match any include/exclude filter should be tracked.
track-unmatched-entities = yes
filters {
akka-actor {
includes = [ "**" ]
excludes = [ ]
}
akka-dispatcher {
includes = [ "**" ]
excludes = [ ]
}
trace {
includes = [ "**" ]
excludes = [ ]
}
}
}
statsd {
hostname = "172.17.0.2"
port = 8125
flush-interval = 10 seconds
max-packet-size = 1024 bytes
includes {
akka-actor = [ "**" ]
akka-dispatcher = [ "**" ]
trace = [ "**" ]
}
simple-metric-key-generator {
# Application prefix for all metrics pushed to StatsD. The default namespacing scheme for metrics follows
# this pattern:
# application.host.entity.entity-name.metric-name
application = "introduce-kamon-io"
}
}
modules {
kamon-statsd.auto-start = yes
kamon-akka.auto-start = yes
kamon-log-reporter.auto-start = yes
kamon-system-metrics.auto-start = yes
}
}
# Disable legacy metrics in akka-cluster.
//akka.cluster.metrics.enabled=off
# Enable metrics extension in akka-cluster-metrics.
//akka.extensions=["akka.cluster.metrics.ClusterMetricsExtension"]
# Sigar native library extract location during tests.
# Note: use per-jvm-instance folder when running multiple jvm on one host.
//akka.cluster.metrics.native-library-extract-folder=${user.dir}/target/native
import com.typesafe.sbt.SbtAspectj._
name := "VideoProxy"
version := "1.0"
scalaVersion := "2.12.1"
resolvers += "Typesafe repository" at "http://repo.typesafe.com/typesafe/releases/"
val akkaVersion = "2.4.16"
val kamonVersion = "0.6.6"
libraryDependencies ++= Seq(
"org.bytedeco" % "javacv-platform" % "1.3.1",
"com.typesafe.akka" %% "akka-slf4j" % akkaVersion,
"com.typesafe.akka" %% "akka-remote" % akkaVersion,
"com.typesafe.akka" %% "akka-cluster" % akkaVersion,
"com.typesafe.akka" %% "akka-cluster-sharding" % akkaVersion,
"com.typesafe.akka" %% "akka-cluster-tools" % akkaVersion,
"com.typesafe.akka" %% "akka-stream" % akkaVersion,
"com.typesafe.akka" %% "akka-stream-testkit" % akkaVersion,
"org.aspectj" % "aspectjweaver" % "1.8.1",
"io.kamon" %% "kamon-core" % kamonVersion,
"io.kamon" %% "kamon-statsd" % kamonVersion,
"io.kamon" %% "kamon-akka-2.4" % kamonVersion exclude("io.kamon", "kamon-akka_akka-2.4_2.12"),
"io.kamon" %% "kamon-akka-remote-2.4" % kamonVersion exclude("io.kamon", "kamon-akka-remote_akka-2.4_2.12"),
"io.kamon" %% "kamon-log-reporter" % kamonVersion,
"io.kamon" %% "kamon-system-metrics" % kamonVersion
)
aspectjSettings
javaOptions <++= AspectjKeys.weaverOptions in Aspectj
// when you call "sbt run" aspectj weaving kicks in
fork in run := true
导入com.typesafe.sbt.SbtAspectj_
名称:=“视频代理”
版本:=“1.0”
规模规避:=“2.12.1”
解析程序+=“类型安全存储库”位于http://repo.typesafe.com/typesafe/releases/"
val akkaVersion=“2.4.16”
val kamonVersion=“0.6.6”
libraryDependencies++=Seq(
“org.bytedeco”%“javacv平台”%“1.3.1”,
“com.typesafe.akka”%%“akka-slf4j”%akkaVersion,
“com.typesafe.akka”%%“akka远程”%akkaVersion,
“com.typesafe.akka”%%“akka群集”%akkaVersion,
“com.typesafe.akka”%%“akka群集分片”%akkaVersion,
“com.typesafe.akka”%%“akka群集工具”%akkaVersion,
“com.typesafe.akka”%%“akka流”%akkaVersion,
“com.typesafe.akka”%%“akka流测试工具包”%akkaVersion,
“org.aspectj”%“aspectjweaver”%“1.8.1”,
“io.kamon”%%“kamon核心”%kamon版本,
“io.kamon”%%“kamon statsd”%kamon版本,
“io.kamon”%%“kamon-akka-2.4”%kamon版本排除(“io.kamon”、“kamon-akka_-akka-2.4_2.12”),
“io.kamon”%%“kamon-akka-remote-2.4”%kamon版本排除(“io.kamon”、“kamon-akka-remote_akka-2.4_2.12”),
“io.kamon”%%“kamon日志报告程序”%kamonVersion,
“io.kamon”%%“kamon系统度量”%kamonVersion
)
aspectjSettings
javaOptions