Skip to content

Commit 5964d1b

Browse files
committed
keep original key/value pair during de-duplicatation
Suppose the same key/value pair gets logged multiple times: WithValues("sameKey", 1).WithValues("newKey", 2).WithValues("sameKey", 1) Previously, the de-duplication code would have emitted it near the end: "hello world" newKey=2 sameKey=1 Now, it gets emitted at the place where it first appeared: "hello world" sameKey=1 newKey=2 This makes log entries more consistent when some repeat the key/value pair and others don't. Performance is about the same as before. goos: linux goarch: amd64 pkg: k8s.io/klog/examples/benchmarks cpu: Intel(R) Core(TM) i9-7980XE CPU @ 2.60GHz │ after1 │ after2 │ │ sec/op │ sec/op vs base │ ... geomean 3.442µ 3.445µ +0.09% pkg: k8s.io/klog/examples/output_test geomean 5.646µ 5.631µ -0.26% pkg: k8s.io/klog/v2 geomean 260.6n 260.5n -0.07% pkg: k8s.io/klog/v2/textlogger TextloggerOutput/odd_WithValues-36 6.956µ ± 4% 7.263µ ± 4% +4.42% (p=0.041 n=6) ... TextloggerOutput/log_with_multiple_names_and_values-36 3.353µ ± 4% 3.172µ ± 2% -5.40% (p=0.002 n=6) ... TextloggerOutput/duplicates-36 17.06µ ± 4% 17.16µ ± 9% ~ (p=0.937 n=6) ... TextloggerOutput/mixed_duplicates-36 19.59µ ± 7% 20.35µ ± 5% ~ (p=0.065 n=6) ... TextloggerOutput/print_duplicate_keys_in_arguments-36 2.388µ ± 6% 2.218µ ± 7% -7.12% (p=0.026 n=6) ... geomean 2.860µ 2.849µ -0.38%
1 parent 32d409f commit 5964d1b

File tree

4 files changed

+26
-17
lines changed

4 files changed

+26
-17
lines changed

go.mod

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
module k8s.io/klog/v2
22

3-
go 1.18
3+
go 1.21
44

55
require github.com/go-logr/logr v1.4.1

internal/serialize/keyvalues.go

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ import (
2020
"bytes"
2121
"encoding/json"
2222
"fmt"
23+
"slices"
2324
"strconv"
2425

2526
"github.com/go-logr/logr"
@@ -115,16 +116,24 @@ func (f Formatter) formatKVs(b *bytes.Buffer, kvs ...[]interface{}) {
115116
e.end = b.Len()
116117
i := findObsoleteEntry(existing, e.key)
117118
if i >= 0 {
118-
// The old entry gets obsoleted. This ensures
119-
// that if the more recent one has a different value,
120-
// that value remains. If the value is the same,
121-
// then we could also keep the old entry. That
122-
// would lead to a more natural order of key/value
123-
// pairs in the output (shared key/value in multiple
124-
// log entries always at the beginning) but at the
125-
// cost of another memory comparison.
126-
obsolete = append(obsolete, existing[i].interval)
127-
existing[i].interval = e.interval
119+
data := b.Bytes()
120+
if bytes.Compare(data[existing[i].start:existing[i].end], data[e.start:e.end]) == 0 {
121+
// The new entry gets obsoleted because it's identical.
122+
// This has the advantage that key/value pairs from
123+
// a WithValues call always come first, even if the same
124+
// pair gets added again later. This makes different log
125+
// entries more consistent.
126+
//
127+
// The new entry has a higher start index and thus can be appended.
128+
obsolete = append(obsolete, e.interval)
129+
} else {
130+
// The old entry gets obsoleted because it's value is different.
131+
//
132+
// Sort order is not guaranteed, we have to insert at the right place.
133+
index, _ := slices.BinarySearchFunc(obsolete, existing[i].interval, func(a, b interval) int { return a.start - b.start })
134+
obsolete = slices.Insert(obsolete, index, existing[i].interval)
135+
existing[i].interval = e.interval
136+
}
128137
} else {
129138
// Instead of appending at the end and doing a
130139
// linear search in findEntry, we could keep

test/output.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -446,9 +446,9 @@ I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" span="
446446
text: "duplicates",
447447

448448
expectedOutput: `I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" span="0102030405060708" a=1
449-
I output.go:<LINE>] "duplicates" a=1 b=2 trace="101112131415161718191a1b1c1d1e1f" span="1112131415161718"
449+
I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" a=1 b=2 span="1112131415161718"
450450
I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" span="0102030405060708" a=1
451-
I output.go:<LINE>] "duplicates" a=1 c=3 trace="101112131415161718191a1b1c1d1e1f" span="2122232425262728" d=4
451+
I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" a=1 c=3 span="2122232425262728" d=4
452452
`,
453453
},
454454
}

test/zapr.go

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -275,9 +275,9 @@ I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" span="
275275
`,
276276

277277
`I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" span="0102030405060708" a=1
278-
I output.go:<LINE>] "duplicates" a=1 b=2 trace="101112131415161718191a1b1c1d1e1f" span="1112131415161718"
278+
I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" a=1 b=2 span="1112131415161718"
279279
I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" span="0102030405060708" a=1
280-
I output.go:<LINE>] "duplicates" a=1 c=3 trace="101112131415161718191a1b1c1d1e1f" span="2122232425262728" d=4
280+
I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" a=1 c=3 span="2122232425262728" d=4
281281
`: `{"caller":"test/output.go:<LINE>","msg":"duplicates","trace":"101112131415161718191a1b1c1d1e1f","span":"0102030405060708","a":1,"v":0}
282282
{"caller":"test/output.go:<LINE>","msg":"duplicates","trace":"101112131415161718191a1b1c1d1e1f","span":"0102030405060708","a":1,"b":2,"trace":"101112131415161718191a1b1c1d1e1f","span":"1112131415161718","v":0}
283283
{"caller":"test/output.go:<LINE>","msg":"duplicates","trace":"101112131415161718191a1b1c1d1e1f","span":"0102030405060708","a":1,"v":0}
@@ -383,9 +383,9 @@ I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" span="
383383
`,
384384

385385
`I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" span="0102030405060708" a=1
386-
I output.go:<LINE>] "duplicates" a=1 b=2 trace="101112131415161718191a1b1c1d1e1f" span="1112131415161718"
386+
I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" a=1 b=2 span="1112131415161718"
387387
I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" span="0102030405060708" a=1
388-
I output.go:<LINE>] "duplicates" a=1 c=3 trace="101112131415161718191a1b1c1d1e1f" span="2122232425262728" d=4
388+
I output.go:<LINE>] "duplicates" trace="101112131415161718191a1b1c1d1e1f" a=1 c=3 span="2122232425262728" d=4
389389
`: `{"caller":"test/output.go:<LINE>","msg":"duplicates","v":0,"trace":"101112131415161718191a1b1c1d1e1f","span":"0102030405060708","a":1}
390390
{"caller":"test/output.go:<LINE>","msg":"duplicates","v":0,"trace":"101112131415161718191a1b1c1d1e1f","span":"0102030405060708","a":1,"b":2,"trace":"101112131415161718191a1b1c1d1e1f","span":"1112131415161718"}
391391
{"caller":"test/output.go:<LINE>","msg":"duplicates","v":0,"trace":"101112131415161718191a1b1c1d1e1f","span":"0102030405060708","a":1}

0 commit comments

Comments
 (0)