2024-02-21 15:08:58 +03:00
|
|
|
package kgo
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"unicode/utf8"
|
|
|
|
|
|
|
|
"github.com/twmb/franz-go/pkg/kgo"
|
|
|
|
semconv "go.opentelemetry.io/otel/semconv/v1.18.0"
|
2024-05-06 08:20:27 +03:00
|
|
|
"go.unistack.org/micro/v3/metadata"
|
2024-03-14 16:47:44 +03:00
|
|
|
"go.unistack.org/micro/v3/tracer"
|
2024-02-21 15:08:58 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
type hookTracer struct {
|
|
|
|
clientID string
|
|
|
|
group string
|
|
|
|
tracer tracer.Tracer
|
|
|
|
}
|
|
|
|
|
|
|
|
var (
|
2024-05-06 07:30:17 +03:00
|
|
|
_ kgo.HookProduceRecordBuffered = (*hookTracer)(nil)
|
|
|
|
_ kgo.HookProduceRecordUnbuffered = (*hookTracer)(nil)
|
|
|
|
_ kgo.HookFetchRecordBuffered = (*hookTracer)(nil)
|
|
|
|
_ kgo.HookFetchRecordUnbuffered = (*hookTracer)(nil)
|
2024-02-21 15:08:58 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
// OnProduceRecordBuffered starts a new span for the "publish" operation on a
|
|
|
|
// buffered record.
|
|
|
|
//
|
|
|
|
// It sets span options and injects the span context into record and updates
|
|
|
|
// the record's context, so it can be ended in the OnProduceRecordUnbuffered
|
|
|
|
// hook.
|
|
|
|
func (m *hookTracer) OnProduceRecordBuffered(r *kgo.Record) {
|
|
|
|
// Set up span options.
|
2024-03-15 09:38:31 +03:00
|
|
|
attrs := []interface{}{
|
2024-02-21 15:08:58 +03:00
|
|
|
semconv.MessagingSystemKey.String("kafka"),
|
|
|
|
semconv.MessagingDestinationKindTopic,
|
|
|
|
semconv.MessagingDestinationName(r.Topic),
|
|
|
|
semconv.MessagingOperationPublish,
|
|
|
|
}
|
|
|
|
attrs = maybeKeyAttr(attrs, r)
|
|
|
|
if m.clientID != "" {
|
|
|
|
attrs = append(attrs, semconv.MessagingKafkaClientIDKey.String(m.clientID))
|
|
|
|
}
|
|
|
|
opts := []tracer.SpanOption{
|
2024-03-14 23:25:19 +03:00
|
|
|
tracer.WithSpanLabels(attrs...),
|
2024-02-21 15:08:58 +03:00
|
|
|
tracer.WithSpanKind(tracer.SpanKindProducer),
|
|
|
|
}
|
2024-05-06 08:20:27 +03:00
|
|
|
|
|
|
|
if r.Context == nil {
|
|
|
|
r.Context = context.Background()
|
|
|
|
}
|
|
|
|
|
|
|
|
md, ok := metadata.FromOutgoingContext(r.Context)
|
|
|
|
if !ok {
|
|
|
|
md = metadata.New(len(r.Headers))
|
|
|
|
}
|
|
|
|
for _, h := range r.Headers {
|
|
|
|
md.Set(h.Key, string(h.Value))
|
|
|
|
}
|
|
|
|
|
2024-02-21 15:08:58 +03:00
|
|
|
// Start the "publish" span.
|
2024-05-06 08:20:27 +03:00
|
|
|
ctx, _ := m.tracer.Start(metadata.NewOutgoingContext(r.Context, md), r.Topic+" publish", opts...)
|
2024-02-21 15:08:58 +03:00
|
|
|
// Inject the span context into the record.
|
|
|
|
// t.propagators.Inject(ctx, NewRecordCarrier(r))
|
|
|
|
// Update the record context.
|
|
|
|
r.Context = ctx
|
|
|
|
}
|
|
|
|
|
|
|
|
// OnProduceRecordUnbuffered continues and ends the "publish" span for an
|
|
|
|
// unbuffered record.
|
|
|
|
//
|
|
|
|
// It sets attributes with values unset when producing and records any error
|
|
|
|
// that occurred during the publish operation.
|
|
|
|
func (m *hookTracer) OnProduceRecordUnbuffered(r *kgo.Record, err error) {
|
2024-05-06 07:30:17 +03:00
|
|
|
span, _ := tracer.SpanFromContext(r.Context)
|
2024-02-21 15:08:58 +03:00
|
|
|
span.AddLabels(
|
|
|
|
semconv.MessagingKafkaDestinationPartition(int(r.Partition)),
|
|
|
|
)
|
|
|
|
if err != nil {
|
|
|
|
span.SetStatus(tracer.SpanStatusError, err.Error())
|
|
|
|
}
|
2024-05-06 07:30:17 +03:00
|
|
|
span.Finish()
|
2024-02-21 15:08:58 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// OnFetchRecordBuffered starts a new span for the "receive" operation on a
|
|
|
|
// buffered record.
|
|
|
|
//
|
|
|
|
// It sets the span options and extracts the span context from the record,
|
|
|
|
// updates the record's context to ensure it can be ended in the
|
|
|
|
// OnFetchRecordUnbuffered hook and can be used in downstream consumer
|
|
|
|
// processing.
|
|
|
|
func (m *hookTracer) OnFetchRecordBuffered(r *kgo.Record) {
|
|
|
|
// Set up the span options.
|
2024-03-15 09:38:31 +03:00
|
|
|
attrs := []interface{}{
|
2024-02-21 15:08:58 +03:00
|
|
|
semconv.MessagingSystemKey.String("kafka"),
|
|
|
|
semconv.MessagingSourceKindTopic,
|
|
|
|
semconv.MessagingSourceName(r.Topic),
|
|
|
|
semconv.MessagingOperationReceive,
|
|
|
|
semconv.MessagingKafkaSourcePartition(int(r.Partition)),
|
|
|
|
}
|
|
|
|
attrs = maybeKeyAttr(attrs, r)
|
|
|
|
if m.clientID != "" {
|
|
|
|
attrs = append(attrs, semconv.MessagingKafkaClientIDKey.String(m.clientID))
|
|
|
|
}
|
|
|
|
if m.group != "" {
|
|
|
|
attrs = append(attrs, semconv.MessagingKafkaConsumerGroupKey.String(m.group))
|
|
|
|
}
|
|
|
|
opts := []tracer.SpanOption{
|
2024-03-14 23:25:19 +03:00
|
|
|
tracer.WithSpanLabels(attrs...),
|
2024-02-21 15:08:58 +03:00
|
|
|
tracer.WithSpanKind(tracer.SpanKindConsumer),
|
|
|
|
}
|
|
|
|
|
|
|
|
if r.Context == nil {
|
|
|
|
r.Context = context.Background()
|
|
|
|
}
|
2024-05-06 08:20:27 +03:00
|
|
|
md, ok := metadata.FromIncomingContext(r.Context)
|
|
|
|
if !ok {
|
|
|
|
md = metadata.New(len(r.Headers))
|
|
|
|
}
|
|
|
|
for _, h := range r.Headers {
|
|
|
|
md.Set(h.Key, string(h.Value))
|
|
|
|
}
|
|
|
|
|
2024-02-21 15:08:58 +03:00
|
|
|
// Extract the span context from the record.
|
|
|
|
// ctx := t.propagators.Extract(r.Context, NewRecordCarrier(r))
|
|
|
|
// Start the "receive" span.
|
2024-05-06 08:20:27 +03:00
|
|
|
newCtx, _ := m.tracer.Start(metadata.NewIncomingContext(r.Context, md), r.Topic+" receive", opts...)
|
2024-02-21 15:08:58 +03:00
|
|
|
// Update the record context.
|
|
|
|
r.Context = newCtx
|
|
|
|
}
|
|
|
|
|
|
|
|
// OnFetchRecordUnbuffered continues and ends the "receive" span for an
|
|
|
|
// unbuffered record.
|
|
|
|
func (m *hookTracer) OnFetchRecordUnbuffered(r *kgo.Record, _ bool) {
|
2024-05-06 07:30:17 +03:00
|
|
|
span, _ := tracer.SpanFromContext(r.Context)
|
|
|
|
span.Finish()
|
2024-02-21 15:08:58 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
// WithProcessSpan starts a new span for the "process" operation on a consumer
|
|
|
|
// record.
|
|
|
|
//
|
|
|
|
// It sets up the span options. The user's application code is responsible for
|
|
|
|
// ending the span.
|
|
|
|
//
|
|
|
|
// This should only ever be called within a polling loop of a consumed record and
|
|
|
|
// not a record which has been created for producing, so call this at the start of each
|
|
|
|
// iteration of your processing for the record.
|
|
|
|
func (m *hookTracer) WithProcessSpan(r *kgo.Record) (context.Context, tracer.Span) {
|
|
|
|
// Set up the span options.
|
2024-03-15 09:38:31 +03:00
|
|
|
attrs := []interface{}{
|
2024-02-21 15:08:58 +03:00
|
|
|
semconv.MessagingSystemKey.String("kafka"),
|
|
|
|
semconv.MessagingSourceKindTopic,
|
|
|
|
semconv.MessagingSourceName(r.Topic),
|
|
|
|
semconv.MessagingOperationProcess,
|
|
|
|
semconv.MessagingKafkaSourcePartition(int(r.Partition)),
|
|
|
|
semconv.MessagingKafkaMessageOffset(int(r.Offset)),
|
|
|
|
}
|
|
|
|
attrs = maybeKeyAttr(attrs, r)
|
|
|
|
if m.clientID != "" {
|
|
|
|
attrs = append(attrs, semconv.MessagingKafkaClientIDKey.String(m.clientID))
|
|
|
|
}
|
|
|
|
if m.group != "" {
|
|
|
|
attrs = append(attrs, semconv.MessagingKafkaConsumerGroupKey.String(m.group))
|
|
|
|
}
|
|
|
|
opts := []tracer.SpanOption{
|
2024-03-14 23:25:19 +03:00
|
|
|
tracer.WithSpanLabels(attrs...),
|
2024-02-21 15:08:58 +03:00
|
|
|
tracer.WithSpanKind(tracer.SpanKindConsumer),
|
|
|
|
}
|
|
|
|
|
|
|
|
if r.Context == nil {
|
|
|
|
r.Context = context.Background()
|
|
|
|
}
|
2024-05-06 08:20:27 +03:00
|
|
|
md, ok := metadata.FromIncomingContext(r.Context)
|
|
|
|
if !ok {
|
|
|
|
md = metadata.New(len(r.Headers))
|
|
|
|
}
|
|
|
|
for _, h := range r.Headers {
|
|
|
|
md.Set(h.Key, string(h.Value))
|
|
|
|
}
|
|
|
|
|
2024-02-21 15:08:58 +03:00
|
|
|
// Start a new span using the provided context and options.
|
|
|
|
return m.tracer.Start(r.Context, r.Topic+" process", opts...)
|
|
|
|
}
|
|
|
|
|
2024-03-15 09:38:31 +03:00
|
|
|
func maybeKeyAttr(attrs []interface{}, r *kgo.Record) []interface{} {
|
2024-02-21 15:08:58 +03:00
|
|
|
if r.Key == nil {
|
|
|
|
return attrs
|
|
|
|
}
|
|
|
|
var keykey string
|
|
|
|
if !utf8.Valid(r.Key) {
|
|
|
|
return attrs
|
|
|
|
}
|
|
|
|
keykey = string(r.Key)
|
|
|
|
return append(attrs, semconv.MessagingKafkaMessageKeyKey.String(keykey))
|
|
|
|
}
|