Skip to content

Commit

Permalink
Merge pull request #277 from tjhop/chore/adopt-slog
Browse files Browse the repository at this point in the history
chore!: adopt log/slog, drop go-kit/log
  • Loading branch information
SuperQ authored Oct 29, 2024
2 parents f91ffb8 + ba090cd commit 22beb8a
Show file tree
Hide file tree
Showing 9 changed files with 375 additions and 778 deletions.
5 changes: 3 additions & 2 deletions .golangci.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
run:
skip-dirs:
issues:
exclude-dirs:
- e2e

# Run only staticcheck and goimports for now. Additional linters will be enabled one-by-one.
Expand All @@ -8,6 +8,7 @@ linters:
- errorlint
- goimports
- staticcheck
- sloglint
disable-all: true

linters-settings:
Expand Down
15 changes: 7 additions & 8 deletions cmd/getool/backfill.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,8 @@ import (
"time"

"github.com/alecthomas/units"
"github.com/go-kit/log"
"github.com/go-kit/log/level"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/common/promslog"
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/tsdb"
tsdb_errors "github.com/prometheus/prometheus/tsdb/errors"
"github.com/prometheus/statsd_exporter/pkg/mapper"
Expand All @@ -43,7 +42,7 @@ var invalidMetricChars = regexp.MustCompile("[^a-zA-Z0-9_:]")
func createBlocks(input reader.DBReader, mint, maxt, blockDuration int64, maxSamplesInAppender int, outputDir string, metricMapper *mapper.MetricMapper, strictMatch, humanReadable bool) (returnErr error) {
mint = blockDuration * (mint / blockDuration)

db, err := tsdb.OpenDBReadOnly(outputDir, nil)
db, err := tsdb.OpenDBReadOnly(outputDir, "", nil)
if err != nil {
return err
}
Expand All @@ -67,7 +66,7 @@ func createBlocks(input reader.DBReader, mint, maxt, blockDuration int64, maxSam
// also need to append samples throughout the whole block range. To allow that, we
// pretend that the block is twice as large here, but only really add sample in the
// original interval later.
w, err := tsdb.NewBlockWriter(log.NewNopLogger(), outputDir, 2*blockDuration)
w, err := tsdb.NewBlockWriter(promslog.NewNopLogger(), outputDir, 2*blockDuration)
if err != nil {
return fmt.Errorf("block writer: %w", err)
}
Expand Down Expand Up @@ -104,7 +103,7 @@ func createBlocks(input reader.DBReader, mint, maxt, blockDuration int64, maxSam
return err
}
for _, point := range points {
if _, err := app.Add(l, point.Timestamp, point.Value); err != nil {
if _, err := app.Append(0, l, point.Timestamp, point.Value); err != nil {
return fmt.Errorf("add sample: %w", err)
}

Expand Down Expand Up @@ -207,8 +206,8 @@ func backfill(maxSamplesInAppender int, inputDir, outputDir, mappingConfig strin
if mappingConfig != "" {
err := metricMapper.InitFromFile(mappingConfig)
if err != nil {
logger := log.NewLogfmtLogger(log.NewSyncWriter(os.Stderr))
level.Error(logger).Log("msg", "Error loading metric mapping config", "err", err)
logger := promslog.New(&promslog.Config{})
logger.Error("Error loading metric mapping config", "err", err)
return err
}
}
Expand Down
13 changes: 7 additions & 6 deletions cmd/getool/backfill_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,10 @@ import (
"time"

"github.com/go-graphite/go-whisper"
"github.com/prometheus/prometheus/pkg/labels"
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb"
"github.com/prometheus/prometheus/tsdb/chunkenc"
"github.com/stretchr/testify/require"
)

Expand Down Expand Up @@ -128,9 +129,9 @@ mappings:

require.NoError(t, os.MkdirAll(filepath.Join(tmpData, "data", "wal"), 0o777))

db, err := tsdb.OpenDBReadOnly(filepath.Join(tmpData, "data"), nil)
db, err := tsdb.OpenDBReadOnly(filepath.Join(tmpData, "data"), "", nil)
require.NoError(t, err)
q, err := db.Querier(context.TODO(), math.MinInt64, math.MaxInt64)
q, err := db.Querier(math.MinInt64, math.MaxInt64)
require.NoError(t, err)

s := queryAllSeries(t, q)
Expand Down Expand Up @@ -160,13 +161,13 @@ type backfillSample struct {
}

func queryAllSeries(t *testing.T, q storage.Querier) []backfillSample {
ss := q.Select(false, nil, labels.MustNewMatcher(labels.MatchRegexp, "", ".*"))
ss := q.Select(context.Background(), false, nil, labels.MustNewMatcher(labels.MatchRegexp, "", ".*"))
samples := []backfillSample{}
for ss.Next() {
series := ss.At()
it := series.Iterator()
it := series.Iterator(nil)
require.NoError(t, it.Err())
for it.Next() {
for it.Next() != chunkenc.ValNone {
ts, v := it.At()
samples = append(samples, backfillSample{Timestamp: ts, Value: v, Labels: series.Labels()})
}
Expand Down
43 changes: 21 additions & 22 deletions cmd/graphite_exporter/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,19 +17,18 @@ import (
"bufio"
"bytes"
"fmt"
"log/slog"
"net"
"net/http"
_ "net/http/pprof"
"os"

"github.com/alecthomas/kingpin/v2"
"github.com/go-kit/log"
"github.com/go-kit/log/level"
"github.com/prometheus/client_golang/prometheus"
clientVersion "github.com/prometheus/client_golang/prometheus/collectors/version"
"github.com/prometheus/client_golang/prometheus/promhttp"
"github.com/prometheus/common/promlog"
"github.com/prometheus/common/promlog/flag"
"github.com/prometheus/common/promslog"
"github.com/prometheus/common/promslog/flag"
"github.com/prometheus/common/version"
"github.com/prometheus/exporter-toolkit/web"
"github.com/prometheus/exporter-toolkit/web/kingpinflag"
Expand Down Expand Up @@ -57,33 +56,33 @@ func init() {
prometheus.MustRegister(clientVersion.NewCollector("graphite_exporter"))
}

func dumpFSM(mapper *mapper.MetricMapper, dumpFilename string, logger log.Logger) error {
func dumpFSM(mapper *mapper.MetricMapper, dumpFilename string, logger *slog.Logger) error {
if mapper.FSM == nil {
return fmt.Errorf("no FSM available to be dumped, possibly because the mapping contains regex patterns")
}
f, err := os.Create(dumpFilename)
if err != nil {
return err
}
level.Info(logger).Log("msg", "Start dumping FSM", "to", dumpFilename)
logger.Info("Start dumping FSM", "to", dumpFilename)
w := bufio.NewWriter(f)
mapper.FSM.DumpFSM(w)
w.Flush()
f.Close()
level.Info(logger).Log("msg", "Finish dumping FSM")
logger.Info("Finish dumping FSM")
return nil
}

func main() {
promlogConfig := &promlog.Config{}
flag.AddFlags(kingpin.CommandLine, promlogConfig)
promslogConfig := &promslog.Config{}
flag.AddFlags(kingpin.CommandLine, promslogConfig)
kingpin.Version(version.Print("graphite_exporter"))
kingpin.HelpFlag.Short('h')
kingpin.Parse()
logger := promlog.New(promlogConfig)
logger := promslog.New(promslogConfig)

level.Info(logger).Log("msg", "Starting graphite_exporter", "version_info", version.Info())
level.Info(logger).Log("build_context", version.BuildContext())
logger.Info("Starting graphite_exporter", "version_info", version.Info())
logger.Info(version.BuildContext())

http.Handle(*metricsPath, promhttp.Handler())
c := collector.NewGraphiteCollector(logger, *strictMatch, *sampleExpiry)
Expand All @@ -93,27 +92,27 @@ func main() {
if *mappingConfig != "" {
err := metricMapper.InitFromFile(*mappingConfig)
if err != nil {
level.Error(logger).Log("msg", "Error loading metric mapping config", "err", err)
logger.Error("Error loading metric mapping config", "err", err)
os.Exit(1)
}
}

cache, err := getCache(*cacheSize, *cacheType, prometheus.DefaultRegisterer)
if err != nil {
level.Error(logger).Log("msg", "error initializing mapper cache", "err", err)
logger.Error("error initializing mapper cache", "err", err)
os.Exit(1)
}
metricMapper.UseCache(cache)

if *checkConfig {
level.Info(logger).Log("msg", "Configuration check successful, exiting")
logger.Info("Configuration check successful, exiting")
return
}

if *dumpFSMPath != "" {
err := dumpFSM(metricMapper, *dumpFSMPath, logger)
if err != nil {
level.Error(logger).Log("msg", "Error dumping FSM", "err", err)
logger.Error("Error dumping FSM", "err", err)
os.Exit(1)
}
}
Expand All @@ -122,14 +121,14 @@ func main() {

tcpSock, err := net.Listen("tcp", *graphiteAddress)
if err != nil {
level.Error(logger).Log("msg", "Error binding to TCP socket", "err", err)
logger.Error("Error binding to TCP socket", "err", err)
os.Exit(1)
}
go func() {
for {
conn, err := tcpSock.Accept()
if err != nil {
level.Error(logger).Log("msg", "Error accepting TCP connection", "err", err)
logger.Error("Error accepting TCP connection", "err", err)
continue
}
go func() {
Expand All @@ -141,12 +140,12 @@ func main() {

udpAddress, err := net.ResolveUDPAddr("udp", *graphiteAddress)
if err != nil {
level.Error(logger).Log("msg", "Error resolving UDP address", "err", err)
logger.Error("Error resolving UDP address", "err", err)
os.Exit(1)
}
udpSock, err := net.ListenUDP("udp", udpAddress)
if err != nil {
level.Error(logger).Log("msg", "Error listening to UDP address", "err", err)
logger.Error("Error listening to UDP address", "err", err)
os.Exit(1)
}
go func() {
Expand All @@ -155,7 +154,7 @@ func main() {
buf := make([]byte, 65536)
chars, srcAddress, err := udpSock.ReadFromUDP(buf)
if err != nil {
level.Error(logger).Log("msg", "Error reading UDP packet", "from", srcAddress, "err", err)
logger.Error("Error reading UDP packet", "from", srcAddress, "err", err)
continue
}
go c.ProcessReader(bytes.NewReader(buf[0:chars]))
Expand All @@ -179,7 +178,7 @@ func main() {

server := &http.Server{}
if err := web.ListenAndServe(server, toolkitFlags, logger); err != nil {
level.Error(logger).Log("err", err)
logger.Error("error running HTTP server", "err", err)
os.Exit(1)
}
}
Expand Down
19 changes: 9 additions & 10 deletions collector/collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import (
"bufio"
"fmt"
"io"
"log/slog"
"math"
_ "net/http/pprof"
"regexp"
Expand All @@ -25,8 +26,6 @@ import (
"sync"
"time"

"github.com/go-kit/log"
"github.com/go-kit/log/level"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/statsd_exporter/pkg/mapper"
)
Expand All @@ -40,14 +39,14 @@ type graphiteCollector struct {
sampleCh chan *graphiteSample
lineCh chan string
strictMatch bool
logger log.Logger
logger *slog.Logger
tagParseFailures prometheus.Counter
lastProcessed prometheus.Gauge
sampleExpiryMetric prometheus.Gauge
sampleExpiry time.Duration
}

func NewGraphiteCollector(logger log.Logger, strictMatch bool, sampleExpiry time.Duration) *graphiteCollector {
func NewGraphiteCollector(logger *slog.Logger, strictMatch bool, sampleExpiry time.Duration) *graphiteCollector {
c := &graphiteCollector{
sampleCh: make(chan *graphiteSample),
lineCh: make(chan string),
Expand Down Expand Up @@ -128,19 +127,19 @@ func (c *graphiteCollector) parseMetricNameAndTags(name string) (string, prometh

func (c *graphiteCollector) processLine(line string) {
line = strings.TrimSpace(line)
level.Debug(c.logger).Log("msg", "Incoming line", "line", line)
c.logger.Debug("Incoming line", "line", line)

parts := strings.Split(line, " ")
if len(parts) != 3 {
level.Info(c.logger).Log("msg", "Invalid part count", "parts", len(parts), "line", line)
c.logger.Info("Invalid part count", "parts", len(parts), "line", line)
return
}

originalName := parts[0]

parsedName, labels, err := c.parseMetricNameAndTags(originalName)
if err != nil {
level.Debug(c.logger).Log("msg", "Invalid tags", "line", line, "err", err.Error())
c.logger.Debug("Invalid tags", "line", line, "err", err.Error())
}

mapping, mappingLabels, mappingPresent := c.mapper.GetMapping(parsedName, mapper.MetricTypeGauge)
Expand All @@ -163,12 +162,12 @@ func (c *graphiteCollector) processLine(line string) {

value, err := strconv.ParseFloat(parts[1], 64)
if err != nil {
level.Info(c.logger).Log("msg", "Invalid value", "line", line)
c.logger.Info("Invalid value", "line", line)
return
}
timestamp, err := strconv.ParseFloat(parts[2], 64)
if err != nil {
level.Info(c.logger).Log("msg", "Invalid timestamp", "line", line)
c.logger.Info("Invalid timestamp", "line", line)
return
}
sample := graphiteSample{
Expand All @@ -180,7 +179,7 @@ func (c *graphiteCollector) processLine(line string) {
Help: fmt.Sprintf("Graphite metric %s", name),
Timestamp: time.Unix(int64(timestamp), int64(math.Mod(timestamp, 1.0)*1e9)),
}
level.Debug(c.logger).Log("msg", "Processing sample", "sample", sample)
c.logger.Debug("Processing sample", "sample", sample)
c.lastProcessed.Set(float64(time.Now().UnixNano()) / 1e9)
c.sampleCh <- &sample
}
Expand Down
4 changes: 2 additions & 2 deletions collector/collector_benchmark_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,13 @@ import (
"testing"
"time"

"github.com/go-kit/log"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/promslog"
"github.com/prometheus/statsd_exporter/pkg/mapper"
)

var (
logger = log.NewNopLogger()
logger = promslog.NewNopLogger()
c = NewGraphiteCollector(logger, false, 5*time.Minute)

now = time.Now()
Expand Down
6 changes: 3 additions & 3 deletions collector/collector_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,14 @@ import (
"testing"
"time"

"github.com/go-kit/log"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/common/promslog"
"github.com/prometheus/statsd_exporter/pkg/mapper"
"github.com/stretchr/testify/assert"
)

func TestParseNameAndTags(t *testing.T) {
logger := log.NewNopLogger()
logger := promslog.NewNopLogger()
c := NewGraphiteCollector(logger, false, 5*time.Minute)
type testCase struct {
line string
Expand Down Expand Up @@ -204,7 +204,7 @@ func TestProcessLine(t *testing.T) {
},
}

c := NewGraphiteCollector(log.NewNopLogger(), false, 5*time.Minute)
c := NewGraphiteCollector(promslog.NewNopLogger(), false, 5*time.Minute)

for _, testCase := range testCases {
if testCase.mappingPresent {
Expand Down
Loading

0 comments on commit 22beb8a

Please sign in to comment.