Skip to content

Commit ba090cd

Browse files
committed
chore!: adopt log/slog, drop go-kit/log
The bulk of this change set was automated by the following script which is being used to aid in converting the various exporters/projects to use slog: https://gist.github.com/tjhop/49f96fb7ebbe55b12deee0b0312d8434 Other changes include: - updates the statsd_exporter codebase to v0.28.0 to use statsd_exporter MetricMapper code that uses log/slog. - update prometheus codebase to commit prometheus/prometheus@911c3ef, which corresponds to the 3.0 beta 1 release. This is required to use new prometheus code that uses log/slog. - refactors/fixes around the tsdb handling in the backfill command for current function signatures/usage. - updates deprecated golangci-lint configs - enables sloglint linter Signed-off-by: TJ Hoplock <[email protected]>
1 parent f91ffb8 commit ba090cd

File tree

9 files changed

+375
-778
lines changed

9 files changed

+375
-778
lines changed

.golangci.yml

+3-2
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
run:
2-
skip-dirs:
1+
issues:
2+
exclude-dirs:
33
- e2e
44

55
# Run only staticcheck and goimports for now. Additional linters will be enabled one-by-one.
@@ -8,6 +8,7 @@ linters:
88
- errorlint
99
- goimports
1010
- staticcheck
11+
- sloglint
1112
disable-all: true
1213

1314
linters-settings:

cmd/getool/backfill.go

+7-8
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,8 @@ import (
2828
"time"
2929

3030
"github.com/alecthomas/units"
31-
"github.com/go-kit/log"
32-
"github.com/go-kit/log/level"
33-
"github.com/prometheus/prometheus/pkg/labels"
31+
"github.com/prometheus/common/promslog"
32+
"github.com/prometheus/prometheus/model/labels"
3433
"github.com/prometheus/prometheus/tsdb"
3534
tsdb_errors "github.com/prometheus/prometheus/tsdb/errors"
3635
"github.com/prometheus/statsd_exporter/pkg/mapper"
@@ -43,7 +42,7 @@ var invalidMetricChars = regexp.MustCompile("[^a-zA-Z0-9_:]")
4342
func createBlocks(input reader.DBReader, mint, maxt, blockDuration int64, maxSamplesInAppender int, outputDir string, metricMapper *mapper.MetricMapper, strictMatch, humanReadable bool) (returnErr error) {
4443
mint = blockDuration * (mint / blockDuration)
4544

46-
db, err := tsdb.OpenDBReadOnly(outputDir, nil)
45+
db, err := tsdb.OpenDBReadOnly(outputDir, "", nil)
4746
if err != nil {
4847
return err
4948
}
@@ -67,7 +66,7 @@ func createBlocks(input reader.DBReader, mint, maxt, blockDuration int64, maxSam
6766
// also need to append samples throughout the whole block range. To allow that, we
6867
// pretend that the block is twice as large here, but only really add sample in the
6968
// original interval later.
70-
w, err := tsdb.NewBlockWriter(log.NewNopLogger(), outputDir, 2*blockDuration)
69+
w, err := tsdb.NewBlockWriter(promslog.NewNopLogger(), outputDir, 2*blockDuration)
7170
if err != nil {
7271
return fmt.Errorf("block writer: %w", err)
7372
}
@@ -104,7 +103,7 @@ func createBlocks(input reader.DBReader, mint, maxt, blockDuration int64, maxSam
104103
return err
105104
}
106105
for _, point := range points {
107-
if _, err := app.Add(l, point.Timestamp, point.Value); err != nil {
106+
if _, err := app.Append(0, l, point.Timestamp, point.Value); err != nil {
108107
return fmt.Errorf("add sample: %w", err)
109108
}
110109

@@ -207,8 +206,8 @@ func backfill(maxSamplesInAppender int, inputDir, outputDir, mappingConfig strin
207206
if mappingConfig != "" {
208207
err := metricMapper.InitFromFile(mappingConfig)
209208
if err != nil {
210-
logger := log.NewLogfmtLogger(log.NewSyncWriter(os.Stderr))
211-
level.Error(logger).Log("msg", "Error loading metric mapping config", "err", err)
209+
logger := promslog.New(&promslog.Config{})
210+
logger.Error("Error loading metric mapping config", "err", err)
212211
return err
213212
}
214213
}

cmd/getool/backfill_test.go

+7-6
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,10 @@ import (
2727
"time"
2828

2929
"github.com/go-graphite/go-whisper"
30-
"github.com/prometheus/prometheus/pkg/labels"
30+
"github.com/prometheus/prometheus/model/labels"
3131
"github.com/prometheus/prometheus/storage"
3232
"github.com/prometheus/prometheus/tsdb"
33+
"github.com/prometheus/prometheus/tsdb/chunkenc"
3334
"github.com/stretchr/testify/require"
3435
)
3536

@@ -128,9 +129,9 @@ mappings:
128129

129130
require.NoError(t, os.MkdirAll(filepath.Join(tmpData, "data", "wal"), 0o777))
130131

131-
db, err := tsdb.OpenDBReadOnly(filepath.Join(tmpData, "data"), nil)
132+
db, err := tsdb.OpenDBReadOnly(filepath.Join(tmpData, "data"), "", nil)
132133
require.NoError(t, err)
133-
q, err := db.Querier(context.TODO(), math.MinInt64, math.MaxInt64)
134+
q, err := db.Querier(math.MinInt64, math.MaxInt64)
134135
require.NoError(t, err)
135136

136137
s := queryAllSeries(t, q)
@@ -160,13 +161,13 @@ type backfillSample struct {
160161
}
161162

162163
func queryAllSeries(t *testing.T, q storage.Querier) []backfillSample {
163-
ss := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "", ".*"))
164+
ss := q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchRegexp, "", ".*"))
164165
samples := []backfillSample{}
165166
for ss.Next() {
166167
series := ss.At()
167-
it := series.Iterator()
168+
it := series.Iterator(nil)
168169
require.NoError(t, it.Err())
169-
for it.Next() {
170+
for it.Next() != chunkenc.ValNone {
170171
ts, v := it.At()
171172
samples = append(samples, backfillSample{Timestamp: ts, Value: v, Labels: series.Labels()})
172173
}

cmd/graphite_exporter/main.go

+21-22
Original file line numberDiff line numberDiff line change
@@ -17,19 +17,18 @@ import (
1717
"bufio"
1818
"bytes"
1919
"fmt"
20+
"log/slog"
2021
"net"
2122
"net/http"
2223
_ "net/http/pprof"
2324
"os"
2425

2526
"github.com/alecthomas/kingpin/v2"
26-
"github.com/go-kit/log"
27-
"github.com/go-kit/log/level"
2827
"github.com/prometheus/client_golang/prometheus"
2928
clientVersion "github.com/prometheus/client_golang/prometheus/collectors/version"
3029
"github.com/prometheus/client_golang/prometheus/promhttp"
31-
"github.com/prometheus/common/promlog"
32-
"github.com/prometheus/common/promlog/flag"
30+
"github.com/prometheus/common/promslog"
31+
"github.com/prometheus/common/promslog/flag"
3332
"github.com/prometheus/common/version"
3433
"github.com/prometheus/exporter-toolkit/web"
3534
"github.com/prometheus/exporter-toolkit/web/kingpinflag"
@@ -57,33 +56,33 @@ func init() {
5756
prometheus.MustRegister(clientVersion.NewCollector("graphite_exporter"))
5857
}
5958

60-
func dumpFSM(mapper *mapper.MetricMapper, dumpFilename string, logger log.Logger) error {
59+
func dumpFSM(mapper *mapper.MetricMapper, dumpFilename string, logger *slog.Logger) error {
6160
if mapper.FSM == nil {
6261
return fmt.Errorf("no FSM available to be dumped, possibly because the mapping contains regex patterns")
6362
}
6463
f, err := os.Create(dumpFilename)
6564
if err != nil {
6665
return err
6766
}
68-
level.Info(logger).Log("msg", "Start dumping FSM", "to", dumpFilename)
67+
logger.Info("Start dumping FSM", "to", dumpFilename)
6968
w := bufio.NewWriter(f)
7069
mapper.FSM.DumpFSM(w)
7170
w.Flush()
7271
f.Close()
73-
level.Info(logger).Log("msg", "Finish dumping FSM")
72+
logger.Info("Finish dumping FSM")
7473
return nil
7574
}
7675

7776
func main() {
78-
promlogConfig := &promlog.Config{}
79-
flag.AddFlags(kingpin.CommandLine, promlogConfig)
77+
promslogConfig := &promslog.Config{}
78+
flag.AddFlags(kingpin.CommandLine, promslogConfig)
8079
kingpin.Version(version.Print("graphite_exporter"))
8180
kingpin.HelpFlag.Short('h')
8281
kingpin.Parse()
83-
logger := promlog.New(promlogConfig)
82+
logger := promslog.New(promslogConfig)
8483

85-
level.Info(logger).Log("msg", "Starting graphite_exporter", "version_info", version.Info())
86-
level.Info(logger).Log("build_context", version.BuildContext())
84+
logger.Info("Starting graphite_exporter", "version_info", version.Info())
85+
logger.Info(version.BuildContext())
8786

8887
http.Handle(*metricsPath, promhttp.Handler())
8988
c := collector.NewGraphiteCollector(logger, *strictMatch, *sampleExpiry)
@@ -93,27 +92,27 @@ func main() {
9392
if *mappingConfig != "" {
9493
err := metricMapper.InitFromFile(*mappingConfig)
9594
if err != nil {
96-
level.Error(logger).Log("msg", "Error loading metric mapping config", "err", err)
95+
logger.Error("Error loading metric mapping config", "err", err)
9796
os.Exit(1)
9897
}
9998
}
10099

101100
cache, err := getCache(*cacheSize, *cacheType, prometheus.DefaultRegisterer)
102101
if err != nil {
103-
level.Error(logger).Log("msg", "error initializing mapper cache", "err", err)
102+
logger.Error("error initializing mapper cache", "err", err)
104103
os.Exit(1)
105104
}
106105
metricMapper.UseCache(cache)
107106

108107
if *checkConfig {
109-
level.Info(logger).Log("msg", "Configuration check successful, exiting")
108+
logger.Info("Configuration check successful, exiting")
110109
return
111110
}
112111

113112
if *dumpFSMPath != "" {
114113
err := dumpFSM(metricMapper, *dumpFSMPath, logger)
115114
if err != nil {
116-
level.Error(logger).Log("msg", "Error dumping FSM", "err", err)
115+
logger.Error("Error dumping FSM", "err", err)
117116
os.Exit(1)
118117
}
119118
}
@@ -122,14 +121,14 @@ func main() {
122121

123122
tcpSock, err := net.Listen("tcp", *graphiteAddress)
124123
if err != nil {
125-
level.Error(logger).Log("msg", "Error binding to TCP socket", "err", err)
124+
logger.Error("Error binding to TCP socket", "err", err)
126125
os.Exit(1)
127126
}
128127
go func() {
129128
for {
130129
conn, err := tcpSock.Accept()
131130
if err != nil {
132-
level.Error(logger).Log("msg", "Error accepting TCP connection", "err", err)
131+
logger.Error("Error accepting TCP connection", "err", err)
133132
continue
134133
}
135134
go func() {
@@ -141,12 +140,12 @@ func main() {
141140

142141
udpAddress, err := net.ResolveUDPAddr("udp", *graphiteAddress)
143142
if err != nil {
144-
level.Error(logger).Log("msg", "Error resolving UDP address", "err", err)
143+
logger.Error("Error resolving UDP address", "err", err)
145144
os.Exit(1)
146145
}
147146
udpSock, err := net.ListenUDP("udp", udpAddress)
148147
if err != nil {
149-
level.Error(logger).Log("msg", "Error listening to UDP address", "err", err)
148+
logger.Error("Error listening to UDP address", "err", err)
150149
os.Exit(1)
151150
}
152151
go func() {
@@ -155,7 +154,7 @@ func main() {
155154
buf := make([]byte, 65536)
156155
chars, srcAddress, err := udpSock.ReadFromUDP(buf)
157156
if err != nil {
158-
level.Error(logger).Log("msg", "Error reading UDP packet", "from", srcAddress, "err", err)
157+
logger.Error("Error reading UDP packet", "from", srcAddress, "err", err)
159158
continue
160159
}
161160
go c.ProcessReader(bytes.NewReader(buf[0:chars]))
@@ -179,7 +178,7 @@ func main() {
179178

180179
server := &http.Server{}
181180
if err := web.ListenAndServe(server, toolkitFlags, logger); err != nil {
182-
level.Error(logger).Log("err", err)
181+
logger.Error("error running HTTP server", "err", err)
183182
os.Exit(1)
184183
}
185184
}

collector/collector.go

+9-10
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ import (
1717
"bufio"
1818
"fmt"
1919
"io"
20+
"log/slog"
2021
"math"
2122
_ "net/http/pprof"
2223
"regexp"
@@ -25,8 +26,6 @@ import (
2526
"sync"
2627
"time"
2728

28-
"github.com/go-kit/log"
29-
"github.com/go-kit/log/level"
3029
"github.com/prometheus/client_golang/prometheus"
3130
"github.com/prometheus/statsd_exporter/pkg/mapper"
3231
)
@@ -40,14 +39,14 @@ type graphiteCollector struct {
4039
sampleCh chan *graphiteSample
4140
lineCh chan string
4241
strictMatch bool
43-
logger log.Logger
42+
logger *slog.Logger
4443
tagParseFailures prometheus.Counter
4544
lastProcessed prometheus.Gauge
4645
sampleExpiryMetric prometheus.Gauge
4746
sampleExpiry time.Duration
4847
}
4948

50-
func NewGraphiteCollector(logger log.Logger, strictMatch bool, sampleExpiry time.Duration) *graphiteCollector {
49+
func NewGraphiteCollector(logger *slog.Logger, strictMatch bool, sampleExpiry time.Duration) *graphiteCollector {
5150
c := &graphiteCollector{
5251
sampleCh: make(chan *graphiteSample),
5352
lineCh: make(chan string),
@@ -128,19 +127,19 @@ func (c *graphiteCollector) parseMetricNameAndTags(name string) (string, prometh
128127

129128
func (c *graphiteCollector) processLine(line string) {
130129
line = strings.TrimSpace(line)
131-
level.Debug(c.logger).Log("msg", "Incoming line", "line", line)
130+
c.logger.Debug("Incoming line", "line", line)
132131

133132
parts := strings.Split(line, " ")
134133
if len(parts) != 3 {
135-
level.Info(c.logger).Log("msg", "Invalid part count", "parts", len(parts), "line", line)
134+
c.logger.Info("Invalid part count", "parts", len(parts), "line", line)
136135
return
137136
}
138137

139138
originalName := parts[0]
140139

141140
parsedName, labels, err := c.parseMetricNameAndTags(originalName)
142141
if err != nil {
143-
level.Debug(c.logger).Log("msg", "Invalid tags", "line", line, "err", err.Error())
142+
c.logger.Debug("Invalid tags", "line", line, "err", err.Error())
144143
}
145144

146145
mapping, mappingLabels, mappingPresent := c.mapper.GetMapping(parsedName, mapper.MetricTypeGauge)
@@ -163,12 +162,12 @@ func (c *graphiteCollector) processLine(line string) {
163162

164163
value, err := strconv.ParseFloat(parts[1], 64)
165164
if err != nil {
166-
level.Info(c.logger).Log("msg", "Invalid value", "line", line)
165+
c.logger.Info("Invalid value", "line", line)
167166
return
168167
}
169168
timestamp, err := strconv.ParseFloat(parts[2], 64)
170169
if err != nil {
171-
level.Info(c.logger).Log("msg", "Invalid timestamp", "line", line)
170+
c.logger.Info("Invalid timestamp", "line", line)
172171
return
173172
}
174173
sample := graphiteSample{
@@ -180,7 +179,7 @@ func (c *graphiteCollector) processLine(line string) {
180179
Help: fmt.Sprintf("Graphite metric %s", name),
181180
Timestamp: time.Unix(int64(timestamp), int64(math.Mod(timestamp, 1.0)*1e9)),
182181
}
183-
level.Debug(c.logger).Log("msg", "Processing sample", "sample", sample)
182+
c.logger.Debug("Processing sample", "sample", sample)
184183
c.lastProcessed.Set(float64(time.Now().UnixNano()) / 1e9)
185184
c.sampleCh <- &sample
186185
}

collector/collector_benchmark_test.go

+2-2
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,13 @@ import (
1919
"testing"
2020
"time"
2121

22-
"github.com/go-kit/log"
2322
"github.com/prometheus/client_golang/prometheus"
23+
"github.com/prometheus/common/promslog"
2424
"github.com/prometheus/statsd_exporter/pkg/mapper"
2525
)
2626

2727
var (
28-
logger = log.NewNopLogger()
28+
logger = promslog.NewNopLogger()
2929
c = NewGraphiteCollector(logger, false, 5*time.Minute)
3030

3131
now = time.Now()

collector/collector_test.go

+3-3
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,14 @@ import (
1818
"testing"
1919
"time"
2020

21-
"github.com/go-kit/log"
2221
"github.com/prometheus/client_golang/prometheus"
22+
"github.com/prometheus/common/promslog"
2323
"github.com/prometheus/statsd_exporter/pkg/mapper"
2424
"github.com/stretchr/testify/assert"
2525
)
2626

2727
func TestParseNameAndTags(t *testing.T) {
28-
logger := log.NewNopLogger()
28+
logger := promslog.NewNopLogger()
2929
c := NewGraphiteCollector(logger, false, 5*time.Minute)
3030
type testCase struct {
3131
line string
@@ -204,7 +204,7 @@ func TestProcessLine(t *testing.T) {
204204
},
205205
}
206206

207-
c := NewGraphiteCollector(log.NewNopLogger(), false, 5*time.Minute)
207+
c := NewGraphiteCollector(promslog.NewNopLogger(), false, 5*time.Minute)
208208

209209
for _, testCase := range testCases {
210210
if testCase.mappingPresent {

0 commit comments

Comments
 (0)