Skip to content
This repository was archived by the owner on Mar 29, 2020. It is now read-only.

Commit c2656e5

Browse files
authored
Merge pull request #31 from Falmarri/master
Update to kamon 2.0.0-M4
2 parents 575eaa7 + c7a594e commit c2656e5

File tree

13 files changed

+409
-311
lines changed

13 files changed

+409
-311
lines changed

build.sbt

Lines changed: 7 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -17,36 +17,30 @@ import com.typesafe.sbt.SbtScalariform.ScalariformKeys
1717
import scalariform.formatter.preferences._
1818

1919

20-
val kamonCore = "io.kamon" %% "kamon-core" % "1.1.6"
21-
val kamonTestKit = "io.kamon" %% "kamon-testkit" % "1.1.6"
20+
val kamonCore = "io.kamon" %% "kamon-core" % "2.0.0-RC1"
21+
val kamonTestKit = "io.kamon" %% "kamon-testkit" % "2.0.0-RC1"
2222
val asyncHttpClient = "com.squareup.okhttp3" % "okhttp" % "3.10.0"
2323
val asyncHttpClientMock = "com.squareup.okhttp3" % "mockwebserver" % "3.10.0"
24+
val scalatest = "org.scalatest" %% "scalatest" % "3.0.8"
2425

2526
lazy val root = (project in file("."))
2627
.settings(name := "kamon-datadog")
2728
.settings(
2829
libraryDependencies ++=
29-
compileScope(kamonCore, asyncHttpClient, scalaCompact.value, playJsonVersion.value) ++
30+
compileScope(kamonCore, asyncHttpClient, playJsonVersion.value) ++
3031
testScope(scalatest, slf4jApi, slf4jnop, kamonCore, kamonTestKit, asyncHttpClientMock),
32+
crossScalaVersions := Seq("2.11.12", "2.12.7", "2.13.0"),
3133
ScalariformKeys.preferences := formatSettings(ScalariformKeys.preferences.value))
3234

3335

3436
def playJsonVersion = Def.setting {
3537
scalaBinaryVersion.value match {
3638
case "2.10" => "com.typesafe.play" %% "play-json" % "2.4.11"
37-
case "2.12" | "2.11" => "com.typesafe.play" %% "play-json" % "2.6.9"
39+
case "2.12" | "2.11" | "2.13" => "com.typesafe.play" %% "play-json" % "2.7.4"
3840
}
3941
}
4042

41-
42-
def scalaCompact = Def.setting {
43-
scalaBinaryVersion.value match {
44-
case "2.10" | "2.11" => "org.scala-lang.modules" %% "scala-java8-compat" % "0.5.0"
45-
case "2.12" => "org.scala-lang.modules" %% "scala-java8-compat" % "0.8.0"
46-
}
47-
}
48-
49-
/* Changing Kamon configuration in real-time seems to turn tests unstable */
43+
/* Changing Kamon configuration in real-time seems to turn tests unstable */
5044
parallelExecution in Test := false
5145

5246
def formatSettings(prefs: IFormattingPreferences) = prefs

project/plugins.sbt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
lazy val root = project in file(".") dependsOn(RootProject(uri("git://github.com/kamon-io/kamon-sbt-umbrella.git#kamon-1.x")))
1+
lazy val root = project in file(".") dependsOn(RootProject(uri("git://github.com/kamon-io/kamon-sbt-umbrella.git#kamon-2.x")))
22
addSbtPlugin("org.scalariform" % "sbt-scalariform" % "1.8.2")

src/main/resources/reference.conf

Lines changed: 59 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -16,48 +16,52 @@ kamon {
1616

1717
# Max packet size for UDP metrics data sent to Datadog.
1818
max-packet-size = 1024 bytes
19-
2019
measurement-formatter = "default"
21-
2220
packetbuffer = "default"
23-
2421
}
2522

2623
#
27-
# Settings relevant to the DatadogAPIReporter
24+
# Settings relevant to the DatadogSpanReporter
2825
#
29-
http {
30-
31-
api-url = "https://app.datadoghq.com/api/v1/series"
26+
trace {
3227

28+
# Default to agent URL (https://docs.datadoghq.com/api/?lang=python#tracing)
29+
api-url = "http://localhost:8126/v0.4/traces"
3330

34-
# Datadog API key to use to send metrics to datadog directly over HTTPS.
35-
# If this is not set, metrics are sent as statsd packets over UDP to dogstatsd.
36-
api-key = ""
37-
38-
using-agent = false
31+
# FQCN of the "kamon.datadog.KamonDataDogTranslator" implementation that will convert Kamon Spans into Datadog
32+
# Spans, or "defult" to use the built-in translator.
33+
translator = "default"
3934

35+
# HTTP client timeout settings:
36+
# - connect-timeout: how long to wait for an HTTP connection to establish before failing the request.
37+
# - read-timeout: how long to wait for a read IO operation to complete before failing the request.
38+
# - write-timeout: how long to wait for a write IO operation to complete before failing the request.
39+
#
4040
connect-timeout = 5 seconds
4141
read-timeout = 5 seconds
42-
request-timeout = 5 seconds
42+
write-timeout = 5 seconds
4343
}
4444

45-
4645
#
47-
# Settings relevant to the DatadogSpanReporter
46+
# Settings relevant to the DatadogAPIReporter
4847
#
49-
trace.http {
48+
api {
5049

51-
# Default to agent URL (https://docs.datadoghq.com/api/?lang=python#tracing)
52-
api-url = "http://localhost:8126/v0.3/traces"
53-
54-
api-key = ${kamon.datadog.http.api-key}
50+
# API endpoint to which metrics time series data will be posted.
51+
api-url = "https://app.datadoghq.com/api/v1/series"
5552

56-
using-agent = true
53+
# Datadog API key to use to send metrics to Datadog directly over HTTPS. The API key will be combined with the
54+
# API URL to get the complete endpoint use for posting time series to Datadog.
55+
api-key = ""
5756

58-
connect-timeout = ${kamon.datadog.http.connect-timeout}
59-
read-timeout = ${kamon.datadog.http.read-timeout}
60-
request-timeout = ${kamon.datadog.http.request-timeout}
57+
# HTTP client timeout settings:
58+
# - connect-timeout: how long to wait for an HTTP connection to establish before failing the request.
59+
# - read-timeout: how long to wait for a read IO operation to complete before failing the request.
60+
# - write-timeout: how long to wait for a write IO operation to complete before failing the request.
61+
#
62+
connect-timeout = 5 seconds
63+
read-timeout = 5 seconds
64+
write-timeout = 5 seconds
6165
}
6266

6367

@@ -71,22 +75,41 @@ kamon {
7175
# Value "b" is equivalent to omitting the setting
7276
information-unit = "b"
7377

74-
additional-tags {
75-
service = "yes"
76-
host = "yes"
77-
instance = "yes"
78-
blacklisted-tags = []
78+
environment-tags {
79+
include-service = "yes"
80+
include-host = "yes"
81+
include-instance = "yes"
82+
exclude = []
83+
84+
filter {
85+
includes = ["**"]
86+
excludes = []
87+
}
7988
}
89+
}
8090

81-
filter-config-key = "datadog-tag-filter"
91+
modules {
92+
datadog-agent {
93+
enabled = true
94+
name = "DatadogAgent"
95+
description = "Datadog agent reporter"
96+
factory = "kamon.datadog.DatadogAgentReporterFactory"
97+
}
8298

83-
}
99+
datadog-trace-agent {
100+
enabled = true
101+
name = "DatadogSpanReporter"
102+
description = "Datadog Span reporter"
103+
factory = "kamon.datadog.DatadogSpanReporterFactory"
104+
}
84105

85-
util.filters {
86-
datadog-tag-filter {
87-
includes = ["**"]
88-
excludes = []
106+
datadog-api {
107+
enabled = false
108+
name = "DatadogHttp"
109+
description = "Datadog HTTP reporter"
110+
factory = "kamon.datadog.DatadogAPIReporterFactory"
89111
}
90112
}
113+
}
114+
91115

92-
}

src/main/scala/kamon/datadog/DatadogAPIReporter.scala

Lines changed: 70 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -24,27 +24,33 @@ import java.util.Locale
2424

2525
import com.typesafe.config.Config
2626
import kamon.metric.MeasurementUnit.Dimension.{ Information, Time }
27-
import kamon.metric.{ MeasurementUnit, MetricDistribution, MetricValue, PeriodSnapshot }
28-
import kamon.util.{ EnvironmentTagBuilder, Matcher }
29-
import kamon.{ Kamon, MetricReporter }
27+
import kamon.metric.{ MeasurementUnit, MetricSnapshot, PeriodSnapshot }
28+
import kamon.tag.{ Tag, TagSet }
29+
import kamon.util.{ EnvironmentTags, Filter }
30+
import kamon.{ module, Kamon }
31+
import kamon.datadog.DatadogAPIReporter.Configuration
32+
import kamon.module.{ MetricReporter, ModuleFactory }
3033
import org.slf4j.LoggerFactory
3134

3235
import scala.util.{ Failure, Success }
3336

34-
class DatadogAPIReporter extends MetricReporter {
37+
class DatadogAPIReporterFactory extends ModuleFactory {
38+
override def create(settings: ModuleFactory.Settings): DatadogAPIReporter = {
39+
val config = DatadogAPIReporter.readConfiguration(settings.config)
40+
new DatadogAPIReporter(config, new HttpClient(config.httpConfig, usingAgent = false))
41+
}
42+
}
43+
44+
class DatadogAPIReporter(@volatile private var configuration: Configuration, @volatile private var httpClient: HttpClient) extends MetricReporter {
3545
import DatadogAPIReporter._
3646

3747
private val logger = LoggerFactory.getLogger(classOf[DatadogAPIReporter])
3848
private val symbols = DecimalFormatSymbols.getInstance(Locale.US)
3949
symbols.setDecimalSeparator('.') // Just in case there is some weird locale config we are not aware of.
4050

4151
private val valueFormat = new DecimalFormat("#0.#########", symbols)
42-
private var configuration = readConfiguration(Kamon.config())
43-
private var httpClient: HttpClient = new HttpClient(configuration.httpConfig)
4452

45-
override def start(): Unit = {
46-
logger.info("Started the Datadog API reporter.")
47-
}
53+
logger.info("Started the Datadog API reporter.")
4854

4955
override def stop(): Unit = {
5056
logger.info("Stopped the Datadog API reporter.")
@@ -53,15 +59,15 @@ class DatadogAPIReporter extends MetricReporter {
5359
override def reconfigure(config: Config): Unit = {
5460
val newConfiguration = readConfiguration(config)
5561
configuration = newConfiguration
56-
httpClient = new HttpClient(configuration.httpConfig)
62+
httpClient = new HttpClient(configuration.httpConfig, usingAgent = false)
5763
}
5864

5965
override def reportPeriodSnapshot(snapshot: PeriodSnapshot): Unit = {
6066
httpClient.doPost("application/json; charset=utf-8", buildRequestBody(snapshot)) match {
6167
case Failure(e) =>
6268
logger.error(e.getMessage)
6369
case Success(response) =>
64-
logger.info(response)
70+
logger.trace(response)
6571
}
6672
}
6773

@@ -72,20 +78,23 @@ class DatadogAPIReporter extends MetricReporter {
7278
val interval = Math.round(Duration.between(snapshot.from, snapshot.to).toMillis() / 1000D)
7379
val seriesBuilder = new StringBuilder()
7480

75-
def addDistribution(metric: MetricDistribution): Unit = {
76-
import metric._
77-
78-
val average = if (distribution.count > 0L) (distribution.sum / distribution.count) else 0L
79-
addMetric(name + ".avg", valueFormat.format(scale(average, unit)), gauge, metric.tags)
80-
addMetric(name + ".count", valueFormat.format(distribution.count), count, metric.tags)
81-
addMetric(name + ".median", valueFormat.format(scale(distribution.percentile(50D).value, unit)), gauge, metric.tags)
82-
addMetric(name + ".95percentile", valueFormat.format(scale(distribution.percentile(95D).value, unit)), gauge, metric.tags)
83-
addMetric(name + ".max", valueFormat.format(scale(distribution.max, unit)), gauge, metric.tags)
84-
addMetric(name + ".min", valueFormat.format(scale(distribution.min, unit)), gauge, metric.tags)
81+
def addDistribution(metric: MetricSnapshot.Distributions): Unit = {
82+
val unit = metric.settings.unit
83+
metric.instruments.foreach { d =>
84+
val dist = d.value
85+
86+
val average = if (dist.count > 0L) (dist.sum / dist.count) else 0L
87+
addMetric(metric.name + ".avg", valueFormat.format(scale(average, unit)), gauge, d.tags)
88+
addMetric(metric.name + ".count", valueFormat.format(dist.count), count, d.tags)
89+
addMetric(metric.name + ".median", valueFormat.format(scale(dist.percentile(50D).value, unit)), gauge, d.tags)
90+
addMetric(metric.name + ".95percentile", valueFormat.format(scale(dist.percentile(95D).value, unit)), gauge, d.tags)
91+
addMetric(metric.name + ".max", valueFormat.format(scale(dist.max, unit)), gauge, d.tags)
92+
addMetric(metric.name + ".min", valueFormat.format(scale(dist.min, unit)), gauge, d.tags)
93+
}
8594
}
8695

87-
def addMetric(metricName: String, value: String, metricType: String, tags: Map[String, String]): Unit = {
88-
val customTags = (configuration.extraTags ++ tags.filterKeys(configuration.tagFilter.accept)).map { case (k, v) quote"$k:$v" }.toSeq
96+
def addMetric(metricName: String, value: String, metricType: String, tags: TagSet): Unit = {
97+
val customTags = (configuration.extraTags ++ tags.iterator(_.toString).map(p => p.key -> p.value).filter(t => configuration.tagFilter.accept(t._1))).map { case (k, v) quote"$k:$v" }
8998
val allTagsString = customTags.mkString("[", ",", "]")
9099

91100
if (seriesBuilder.length() > 0) seriesBuilder.append(",")
@@ -94,13 +103,28 @@ class DatadogAPIReporter extends MetricReporter {
94103
.append(s"""{"metric":"$metricName","interval":$interval,"points":[[$timestamp,$value]],"type":"$metricType","host":"$host","tags":$allTagsString}""")
95104
}
96105

97-
def add(metric: MetricValue, metricType: String): Unit =
98-
addMetric(metric.name, valueFormat.format(scale(metric.value, metric.unit)), metricType, metric.tags)
99-
100-
snapshot.metrics.counters.foreach(add(_, count))
101-
snapshot.metrics.gauges.foreach(add(_, gauge))
106+
snapshot.counters.foreach { snap =>
107+
snap.instruments.foreach { instrument =>
108+
addMetric(
109+
snap.name,
110+
valueFormat.format(scale(instrument.value, snap.settings.unit)),
111+
count,
112+
instrument.tags
113+
)
114+
}
115+
}
116+
snapshot.gauges.foreach { snap =>
117+
snap.instruments.foreach { instrument =>
118+
addMetric(
119+
snap.name,
120+
valueFormat.format(scale(instrument.value, snap.settings.unit)),
121+
gauge,
122+
instrument.tags
123+
)
124+
}
125+
}
102126

103-
(snapshot.metrics.histograms ++ snapshot.metrics.rangeSamplers).foreach(addDistribution)
127+
(snapshot.histograms ++ snapshot.rangeSamplers).foreach(addDistribution)
104128

105129
seriesBuilder
106130
.insert(0, "{\"series\":[")
@@ -110,36 +134,36 @@ class DatadogAPIReporter extends MetricReporter {
110134

111135
}
112136

113-
private def scale(value: Long, unit: MeasurementUnit): Double = unit.dimension match {
137+
private def scale(value: Double, unit: MeasurementUnit): Double = unit.dimension match {
114138
case Time if unit.magnitude != configuration.timeUnit.magnitude =>
115-
MeasurementUnit.scale(value, unit, configuration.timeUnit)
139+
MeasurementUnit.convert(value, unit, configuration.timeUnit)
116140

117141
case Information if unit.magnitude != configuration.informationUnit.magnitude =>
118-
MeasurementUnit.scale(value, unit, configuration.informationUnit)
119-
120-
case _ => value.toDouble
121-
}
142+
MeasurementUnit.convert(value, unit, configuration.informationUnit)
122143

123-
private def readConfiguration(config: Config): Configuration = {
124-
val datadogConfig = config.getConfig("kamon.datadog")
125-
Configuration(
126-
datadogConfig.getConfig("http"),
127-
timeUnit = readTimeUnit(datadogConfig.getString("time-unit")),
128-
informationUnit = readInformationUnit(datadogConfig.getString("information-unit")),
129-
// Remove the "host" tag since it gets added to the datadog payload separately
130-
EnvironmentTagBuilder.create(datadogConfig.getConfig("additional-tags")) - "host",
131-
Kamon.filter(datadogConfig.getString("filter-config-key"))
132-
)
144+
case _ => value
133145
}
134146
}
135147

136148
private object DatadogAPIReporter {
137149
val count = "count"
138150
val gauge = "gauge"
139151

140-
case class Configuration(httpConfig: Config, timeUnit: MeasurementUnit, informationUnit: MeasurementUnit, extraTags: Map[String, String], tagFilter: Matcher)
152+
case class Configuration(httpConfig: Config, timeUnit: MeasurementUnit, informationUnit: MeasurementUnit, extraTags: Seq[(String, String)], tagFilter: Filter)
141153

142154
implicit class QuoteInterp(val sc: StringContext) extends AnyVal {
143155
def quote(args: Any*): String = "\"" + sc.s(args: _*) + "\""
144156
}
157+
158+
def readConfiguration(config: Config): Configuration = {
159+
val datadogConfig = config.getConfig("kamon.datadog")
160+
Configuration(
161+
datadogConfig.getConfig("api"),
162+
timeUnit = readTimeUnit(datadogConfig.getString("time-unit")),
163+
informationUnit = readInformationUnit(datadogConfig.getString("information-unit")),
164+
// Remove the "host" tag since it gets added to the datadog payload separately
165+
EnvironmentTags.from(Kamon.environment, datadogConfig.getConfig("environment-tags")).without("host").all().map(p => p.key -> Tag.unwrapValue(p).toString),
166+
Kamon.filter("kamon.datadog.environment-tags.filter")
167+
)
168+
}
145169
}

0 commit comments

Comments
 (0)