-
Notifications
You must be signed in to change notification settings - Fork 440
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'main' into gh-2918-slog-add-clone
- Loading branch information
Showing
18 changed files
with
770 additions
and
457 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,86 @@ | ||
// Unless explicitly stated otherwise all files in this repository are licensed | ||
// under the Apache License Version 2.0. | ||
// This product includes software developed at Datadog (https://www.datadoghq.com/). | ||
// Copyright 2024 Datadog, Inc. | ||
|
||
package tracing | ||
|
||
import ( | ||
"context" | ||
|
||
"gopkg.in/DataDog/dd-trace-go.v1/datastreams" | ||
"gopkg.in/DataDog/dd-trace-go.v1/datastreams/options" | ||
"gopkg.in/DataDog/dd-trace-go.v1/ddtrace/tracer" | ||
) | ||
|
||
func (tr *Tracer) SetConsumeDSMCheckpoint(msg Message) { | ||
if !tr.dataStreamsEnabled || msg == nil { | ||
return | ||
} | ||
edges := []string{"direction:in", "topic:" + msg.GetTopic(), "type:kafka"} | ||
if tr.kafkaCfg.ConsumerGroupID != "" { | ||
edges = append(edges, "group:"+tr.kafkaCfg.ConsumerGroupID) | ||
} | ||
carrier := NewMessageCarrier(msg) | ||
ctx, ok := tracer.SetDataStreamsCheckpointWithParams( | ||
datastreams.ExtractFromBase64Carrier(context.Background(), carrier), | ||
options.CheckpointParams{PayloadSize: getConsumerMsgSize(msg)}, | ||
edges..., | ||
) | ||
if !ok { | ||
return | ||
} | ||
datastreams.InjectToBase64Carrier(ctx, carrier) | ||
if tr.kafkaCfg.ConsumerGroupID != "" { | ||
// only track Kafka lag if a consumer group is set. | ||
// since there is no ack mechanism, we consider that messages read are committed right away. | ||
tracer.TrackKafkaCommitOffset(tr.kafkaCfg.ConsumerGroupID, msg.GetTopic(), int32(msg.GetPartition()), msg.GetOffset()) | ||
} | ||
} | ||
|
||
func (tr *Tracer) SetProduceDSMCheckpoint(msg Message, writer Writer) { | ||
if !tr.dataStreamsEnabled || msg == nil { | ||
return | ||
} | ||
|
||
var topic string | ||
if writer.GetTopic() != "" { | ||
topic = writer.GetTopic() | ||
} else { | ||
topic = msg.GetTopic() | ||
} | ||
|
||
edges := []string{"direction:out", "topic:" + topic, "type:kafka"} | ||
carrier := MessageCarrier{msg} | ||
ctx, ok := tracer.SetDataStreamsCheckpointWithParams( | ||
datastreams.ExtractFromBase64Carrier(context.Background(), carrier), | ||
options.CheckpointParams{PayloadSize: getProducerMsgSize(msg)}, | ||
edges..., | ||
) | ||
if !ok { | ||
return | ||
} | ||
|
||
// Headers will be dropped if the current protocol does not support them | ||
datastreams.InjectToBase64Carrier(ctx, carrier) | ||
} | ||
|
||
func getProducerMsgSize(msg Message) (size int64) { | ||
for _, header := range msg.GetHeaders() { | ||
size += int64(len(header.GetKey()) + len(header.GetValue())) | ||
} | ||
if msg.GetValue() != nil { | ||
size += int64(len(msg.GetValue())) | ||
} | ||
if msg.GetKey() != nil { | ||
size += int64(len(msg.GetKey())) | ||
} | ||
return size | ||
} | ||
|
||
func getConsumerMsgSize(msg Message) (size int64) { | ||
for _, header := range msg.GetHeaders() { | ||
size += int64(len(header.GetKey()) + len(header.GetValue())) | ||
} | ||
return size + int64(len(msg.GetValue())+len(msg.GetKey())) | ||
} |
52 changes: 52 additions & 0 deletions
52
contrib/segmentio/kafka.go.v0/internal/tracing/message_carrier.go
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,52 @@ | ||
// Unless explicitly stated otherwise all files in this repository are licensed | ||
// under the Apache License Version 2.0. | ||
// This product includes software developed at Datadog (https://www.datadoghq.com/). | ||
// Copyright 2024 Datadog, Inc. | ||
|
||
package tracing | ||
|
||
import ( | ||
"gopkg.in/DataDog/dd-trace-go.v1/ddtrace/tracer" | ||
) | ||
|
||
// A MessageCarrier implements TextMapReader/TextMapWriter for extracting/injecting traces on a kafka.Message | ||
type MessageCarrier struct { | ||
msg Message | ||
} | ||
|
||
var _ interface { | ||
tracer.TextMapReader | ||
tracer.TextMapWriter | ||
} = (*MessageCarrier)(nil) | ||
|
||
// ForeachKey conforms to the TextMapReader interface. | ||
func (c MessageCarrier) ForeachKey(handler func(key, val string) error) error { | ||
for _, h := range c.msg.GetHeaders() { | ||
err := handler(h.GetKey(), string(h.GetValue())) | ||
if err != nil { | ||
return err | ||
} | ||
} | ||
return nil | ||
} | ||
|
||
// Set implements TextMapWriter | ||
func (c MessageCarrier) Set(key, val string) { | ||
headers := c.msg.GetHeaders() | ||
// ensure uniqueness of keys | ||
for i := 0; i < len(headers); i++ { | ||
if headers[i].GetKey() == key { | ||
headers = append(headers[:i], headers[i+1:]...) | ||
i-- | ||
} | ||
} | ||
headers = append(headers, KafkaHeader{ | ||
Key: key, | ||
Value: []byte(val), | ||
}) | ||
c.msg.SetHeaders(headers) | ||
} | ||
|
||
func NewMessageCarrier(msg Message) MessageCarrier { | ||
return MessageCarrier{msg: msg} | ||
} |
Oops, something went wrong.