Skip to content

Commit 71b2806

Browse files
committed
allow to have a dedicated key for headers content
Signed-off-by: Thomas Tych <[email protected]>
1 parent ea0f10a commit 71b2806

File tree

6 files changed

+168
-16
lines changed

6 files changed

+168
-16
lines changed

README.md

+4
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,8 @@ Consume events by single consumer.
7272
topics <listening topics(separate with comma',')>
7373
format <input text type (text|json|ltsv|msgpack)> :default => json
7474
message_key <key (Optional, for text format only, default is message)>
75+
add_headers <If true, add kafka's message headers to record>
76+
headers_key <key dedicated to store headers content>
7577
add_prefix <tag prefix (Optional)>
7678
add_suffix <tag suffix (Optional)>
7779

@@ -122,6 +124,7 @@ Consume events by kafka consumer group features..
122124
message_key <key (Optional, for text format only, default is message)>
123125
kafka_message_key <key (Optional, If specified, set kafka's message key to this key)>
124126
add_headers <If true, add kafka's message headers to record>
127+
headers_key <key dedicated to store headers content>
125128
add_prefix <tag prefix (Optional)>
126129
add_suffix <tag suffix (Optional)>
127130
retry_emit_limit <Wait retry_emit_limit x 1s when BuffereQueueLimitError happens. The default is nil and it means waiting until BufferQueueLimitError is resolved>
@@ -159,6 +162,7 @@ With the introduction of the rdkafka-ruby based input plugin we hope to support
159162
message_key <key (Optional, for text format only, default is message)>
160163
kafka_message_key <key (Optional, If specified, set kafka's message key to this key)>
161164
add_headers <If true, add kafka's message headers to record>
165+
headers_key <key dedicated to store headers content>
162166
add_prefix <tag prefix (Optional)>
163167
add_suffix <tag suffix (Optional)>
164168
retry_emit_limit <Wait retry_emit_limit x 1s when BuffereQueueLimitError happens. The default is nil and it means waiting until BufferQueueLimitError is resolved>

lib/fluent/plugin/in_kafka.rb

+20-1
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,11 @@ class Fluent::KafkaInput < Fluent::Input
5151
config_param :kafka_message_key, :string, :default => nil,
5252
:desc => "Set kafka's message key to this field"
5353

54+
config_param :add_headers, :bool, :default => false,
55+
:desc => "Add kafka's message headers to event record"
56+
config_param :headers_key, :string, :default => nil,
57+
:desc => "Record key to store kafka's message headers"
58+
5459
# Kafka#fetch_messages options
5560
config_param :max_bytes, :integer, :default => nil,
5661
:desc => "Maximum number of bytes to fetch."
@@ -235,6 +240,8 @@ def start
235240
@record_time_key,
236241
@tag_source,
237242
@record_tag_key,
243+
@add_headers,
244+
@headers_key,
238245
opt)
239246
}
240247
@topic_watchers.each {|tw|
@@ -259,7 +266,7 @@ def run
259266
end
260267

261268
class TopicWatcher < Coolio::TimerWatcher
262-
def initialize(topic_entry, kafka, interval, parser, add_prefix, add_suffix, offset_manager, router, kafka_message_key, time_source, record_time_key, tag_source, record_tag_key, options={})
269+
def initialize(topic_entry, kafka, interval, parser, add_prefix, add_suffix, offset_manager, router, kafka_message_key, time_source, record_time_key, tag_source, record_tag_key, add_headers, headers_key, options={})
263270
@topic_entry = topic_entry
264271
@kafka = kafka
265272
@callback = method(:consume)
@@ -274,6 +281,8 @@ def initialize(topic_entry, kafka, interval, parser, add_prefix, add_suffix, off
274281
@record_time_key = record_time_key
275282
@tag_source = tag_source
276283
@record_tag_key = record_tag_key
284+
@add_headers = add_headers
285+
@headers_key = headers_key
277286

278287
@next_offset = @topic_entry.offset
279288
if @topic_entry.offset == -1 && offset_manager
@@ -332,6 +341,16 @@ def consume
332341
if @kafka_message_key
333342
record[@kafka_message_key] = msg.key
334343
end
344+
if @add_headers
345+
if @headers_key
346+
headers_record = record[@headers_key] = {}
347+
else
348+
headers_record = record
349+
end
350+
msg.headers.each_pair { |k, v|
351+
headers_record[k] = v
352+
}
353+
end
335354
es.add(record_time, record)
336355
rescue => e
337356
$log.warn "parser error in #{@topic_entry.topic}/#{@topic_entry.partition}", :error => e.to_s, :value => msg.value, :offset => msg.offset

lib/fluent/plugin/in_kafka_group.rb

+16-4
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@ class Fluent::KafkaGroupInput < Fluent::Input
2020
:desc => "For 'text' format only."
2121
config_param :add_headers, :bool, :default => false,
2222
:desc => "Add kafka's message headers to event record"
23+
config_param :headers_key, :string, :default => nil,
24+
:desc => "Record key to store kafka's message headers"
2325
config_param :add_prefix, :string, :default => nil,
2426
:desc => "Tag prefix (Optional)"
2527
config_param :add_suffix, :string, :default => nil,
@@ -259,7 +261,7 @@ def reconnect_consumer
259261
end
260262

261263
def process_batch_with_record_tag(batch)
262-
es = {}
264+
es = {}
263265
batch.messages.each { |msg|
264266
begin
265267
record = @parser_proc.call(msg)
@@ -285,8 +287,13 @@ def process_batch_with_record_tag(batch)
285287
record[@kafka_message_key] = msg.key
286288
end
287289
if @add_headers
290+
if @headers_key
291+
headers_record = record[@headers_key] = {}
292+
else
293+
headers_record = record
294+
end
288295
msg.headers.each_pair { |k, v|
289-
record[k] = v
296+
headers_record[k] = v
290297
}
291298
end
292299
es[tag].add(record_time, record)
@@ -332,8 +339,13 @@ def process_batch(batch)
332339
record[@kafka_message_key] = msg.key
333340
end
334341
if @add_headers
342+
if @headers_key
343+
headers_record = record[@headers_key] = {}
344+
else
345+
headers_record = record
346+
end
335347
msg.headers.each_pair { |k, v|
336-
record[k] = v
348+
headers_record[k] = v
337349
}
338350
end
339351
es.add(record_time, record)
@@ -355,7 +367,7 @@ def run
355367
if @tag_source == :record
356368
process_batch_with_record_tag(batch)
357369
else
358-
process_batch(batch)
370+
process_batch(batch)
359371
end
360372
}
361373
rescue ForShutdown

lib/fluent/plugin/in_rdkafka_group.rb

+8-1
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@ class Fluent::Plugin::RdKafkaGroupInput < Fluent::Plugin::Input
1818
:desc => "For 'text' format only."
1919
config_param :add_headers, :bool, :default => false,
2020
:desc => "Add kafka's message headers to event record"
21+
config_param :headers_key, :string, :default => nil,
22+
:desc => "Record key to store kafka's message headers"
2123
config_param :add_prefix, :string, :default => nil,
2224
:desc => "Tag prefix (Optional)"
2325
config_param :add_suffix, :string, :default => nil,
@@ -254,8 +256,13 @@ def run
254256
record[@kafka_message_key] = msg.key
255257
end
256258
if @add_headers
259+
if @headers_key
260+
headers_record = record[@headers_key] = {}
261+
else
262+
headers_record = record
263+
end
257264
msg.headers.each_pair { |k, v|
258-
record[k] = v
265+
headers_record[k] = v
259266
}
260267
end
261268
es.add(record_time, record)

test/plugin/test_in_kafka.rb

+59-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ def create_driver(conf = CONFIG)
2121
Fluent::Test::Driver::Input.new(Fluent::KafkaInput).configure(conf)
2222
end
2323

24-
2524
def test_configure
2625
d = create_driver
2726
assert_equal TOPIC_NAME, d.instance.topics
@@ -63,4 +62,63 @@ def test_consume
6362
assert_equal expected, d.events[0][2]
6463
end
6564
end
65+
66+
class ConsumeWithHeadersTest < self
67+
CONFIG_TEMPLATE = %[
68+
@type kafka
69+
brokers localhost:9092
70+
format text
71+
@label @kafka
72+
topics %<topic>s
73+
%<conf_adds>s
74+
].freeze
75+
76+
def topic_random
77+
"kafka-input-#{SecureRandom.uuid}"
78+
end
79+
80+
def kafka_test_context(conf_adds: '', topic: topic_random, conf_template: CONFIG_TEMPLATE)
81+
kafka = Kafka.new(['localhost:9092'], client_id: 'kafka')
82+
producer = kafka.producer(required_acks: 1)
83+
84+
config = format(conf_template, topic: topic, conf_adds: conf_adds)
85+
driver = create_driver(config)
86+
87+
yield topic, producer, driver
88+
ensure
89+
kafka.delete_topic(topic)
90+
kafka.close
91+
end
92+
93+
def test_with_headers_content_merged_into_record
94+
conf_adds = 'add_headers true'
95+
kafka_test_context(conf_adds: conf_adds) do |topic, producer, driver|
96+
driver.run(expect_records: 1, timeout: 5) do
97+
producer.produce('Hello, fluent-plugin-kafka!', topic: topic, headers: { header1: 'content1' })
98+
producer.deliver_messages
99+
end
100+
101+
expected = { 'message' => 'Hello, fluent-plugin-kafka!',
102+
'header1' => 'content1' }
103+
assert_equal expected, driver.events[0][2]
104+
end
105+
end
106+
107+
def test_with_headers_content_merged_under_dedicated_key
108+
conf_adds = %(
109+
add_headers true
110+
headers_key kafka_headers
111+
)
112+
kafka_test_context(conf_adds: conf_adds) do |topic, producer, driver|
113+
driver.run(expect_records: 1, timeout: 5) do
114+
producer.produce('Hello, fluent-plugin-kafka!', topic: topic, headers: { header1: 'content1' })
115+
producer.deliver_messages
116+
end
117+
118+
expected = { 'message' => 'Hello, fluent-plugin-kafka!',
119+
'kafka_headers' => { 'header1' => 'content1' } }
120+
assert_equal expected, driver.events[0][2]
121+
end
122+
end
123+
end
66124
end

test/plugin/test_in_kafka_group.rb

+61-9
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ def create_driver(conf = CONFIG)
2323
Fluent::Test::Driver::Input.new(Fluent::KafkaGroupInput).configure(conf)
2424
end
2525

26-
2726
def test_configure
2827
d = create_driver
2928
assert_equal [TOPIC_NAME], d.instance.topics
@@ -48,14 +47,6 @@ def teardown
4847
end
4948

5049
def test_consume
51-
conf = %[
52-
@type kafka
53-
brokers localhost:9092
54-
format text
55-
@label @kafka
56-
refresh_topic_interval 0
57-
topics #{TOPIC_NAME}
58-
]
5950
d = create_driver
6051

6152
d.run(expect_records: 1, timeout: 10) do
@@ -66,4 +57,65 @@ def test_consume
6657
assert_equal expected, d.events[0][2]
6758
end
6859
end
60+
61+
class ConsumeWithHeadersTest < self
62+
CONFIG_TEMPLATE = %(
63+
@type kafka
64+
brokers localhost:9092
65+
consumer_group fluentd
66+
format text
67+
refresh_topic_interval 0
68+
@label @kafka
69+
topics %<topic>s
70+
%<conf_adds>s
71+
).freeze
72+
73+
def topic_random
74+
"kafka-input-#{SecureRandom.uuid}"
75+
end
76+
77+
def kafka_test_context(conf_adds: '', topic: topic_random, conf_template: CONFIG_TEMPLATE)
78+
kafka = Kafka.new(['localhost:9092'], client_id: 'kafka')
79+
producer = kafka.producer(required_acks: 1)
80+
81+
config = format(conf_template, topic: topic, conf_adds: conf_adds)
82+
driver = create_driver(config)
83+
84+
yield topic, producer, driver
85+
ensure
86+
kafka.delete_topic(topic)
87+
kafka.close
88+
end
89+
90+
def test_with_headers_content_merged_into_record
91+
conf_adds = 'add_headers true'
92+
kafka_test_context(conf_adds: conf_adds) do |topic, producer, driver|
93+
driver.run(expect_records: 1, timeout: 5) do
94+
producer.produce('Hello, fluent-plugin-kafka!', topic: topic, headers: { header1: 'content1' })
95+
producer.deliver_messages
96+
end
97+
98+
expected = { 'message' => 'Hello, fluent-plugin-kafka!',
99+
'header1' => 'content1' }
100+
assert_equal expected, driver.events[0][2]
101+
end
102+
end
103+
104+
def test_with_headers_content_merged_under_dedicated_key
105+
conf_adds = %(
106+
add_headers true
107+
headers_key kafka_headers
108+
)
109+
kafka_test_context(conf_adds: conf_adds) do |topic, producer, driver|
110+
driver.run(expect_records: 1, timeout: 5) do
111+
producer.produce('Hello, fluent-plugin-kafka!', topic: topic, headers: { header1: 'content1' })
112+
producer.deliver_messages
113+
end
114+
115+
expected = { 'message' => 'Hello, fluent-plugin-kafka!',
116+
'kafka_headers' => { 'header1' => 'content1' } }
117+
assert_equal expected, driver.events[0][2]
118+
end
119+
end
120+
end
69121
end

0 commit comments

Comments
 (0)