Skip to content

Commit c989079

Browse files
authored
Merge pull request #4147 from daipom/buffer-fix-error-compressing-alreay-compressed-es
Buffer: Fix that `compress` setting causes unexpected error when receiving already compressed MessagePack
2 parents 8112d8f + 2d1c313 commit c989079

File tree

2 files changed

+33
-19
lines changed

2 files changed

+33
-19
lines changed

lib/fluent/event.rb

+1-1
Original file line numberDiff line numberDiff line change
@@ -294,7 +294,7 @@ def to_msgpack_stream(time_int: false, packer: nil)
294294
super
295295
end
296296

297-
def to_compressed_msgpack_stream(time_int: false)
297+
def to_compressed_msgpack_stream(time_int: false, packer: nil)
298298
# time_int is always ignored because @data is always packed binary in this class
299299
@compressed_data
300300
end

test/plugin/test_output_as_buffered_compress.rb

+32-18
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,16 @@ def format(tag, time, record)
3535
@format ? @format.call(tag, time, record) : [tag, time, record].to_json
3636
end
3737
end
38+
39+
def self.dummy_event_stream
40+
Fluent::ArrayEventStream.new(
41+
[
42+
[event_time('2016-04-13 18:33:00'), { 'name' => 'moris', 'age' => 36, 'message' => 'data1' }],
43+
[event_time('2016-04-13 18:33:13'), { 'name' => 'moris', 'age' => 36, 'message' => 'data2' }],
44+
[event_time('2016-04-13 18:33:32'), { 'name' => 'moris', 'age' => 36, 'message' => 'data3' }],
45+
]
46+
)
47+
end
3848
end
3949

4050
class BufferedOutputCompressTest < Test::Unit::TestCase
@@ -60,16 +70,6 @@ def waiting(seconds)
6070
end
6171
end
6272

63-
def dummy_event_stream
64-
Fluent::ArrayEventStream.new(
65-
[
66-
[event_time('2016-04-13 18:33:00'), { 'name' => 'moris', 'age' => 36, 'message' => 'data1' }],
67-
[event_time('2016-04-13 18:33:13'), { 'name' => 'moris', 'age' => 36, 'message' => 'data2' }],
68-
[event_time('2016-04-13 18:33:32'), { 'name' => 'moris', 'age' => 36, 'message' => 'data3' }],
69-
]
70-
)
71-
end
72-
7373
TMP_DIR = File.expand_path('../../tmp/test_output_as_buffered_compress', __FILE__)
7474

7575
setup do
@@ -89,20 +89,34 @@ def dummy_event_stream
8989
end
9090

9191
data(
92-
handle_simple_stream: config_element('buffer', '', { 'flush_interval' => 1, 'compress' => 'gzip' }),
93-
handle_stream_with_standard_format: config_element('buffer', 'tag', { 'flush_interval' => 1, 'compress' => 'gzip' }),
94-
handle_simple_stream_and_file_chunk: config_element('buffer', '', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
95-
handle_stream_with_standard_format_and_file_chunk: config_element('buffer', 'tag', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
92+
:buffer_config,
93+
[
94+
config_element('buffer', '', { 'flush_interval' => 1, 'compress' => 'gzip' }),
95+
config_element('buffer', 'tag', { 'flush_interval' => 1, 'compress' => 'gzip' }),
96+
config_element('buffer', '', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
97+
config_element('buffer', 'tag', { '@type' => 'file', 'path' => File.join(TMP_DIR,'test.*.log'), 'flush_interval' => 1, 'compress' => 'gzip' }),
98+
],
9699
)
97-
test 'call a standard format when output plugin adds data to chunk' do |buffer_config|
100+
data(
101+
:input_es,
102+
[
103+
FluentPluginOutputAsBufferedCompressTest.dummy_event_stream,
104+
# If already compressed data is incoming, it must be written as is (i.e. without decompressed).
105+
# https://github.com/fluent/fluentd/issues/4146
106+
Fluent::CompressedMessagePackEventStream.new(FluentPluginOutputAsBufferedCompressTest.dummy_event_stream.to_compressed_msgpack_stream),
107+
],
108+
)
109+
test 'call a standard format when output plugin adds data to chunk' do |data|
110+
buffer_config = data[:buffer_config]
111+
es = data[:input_es].dup # Note: the data matrix is shared in all patterns, so we need `dup` here.
112+
98113
@i = create_output(:async)
99114
@i.configure(config_element('ROOT','', {}, [buffer_config]))
100115
@i.start
101116
@i.after_start
102117

103118
io = StringIO.new
104-
es = dummy_event_stream
105-
expected = es.map { |e| e }
119+
expected = es.dup.map { |t, r| [t, r] }
106120
compressed_data = ''
107121

108122
assert_equal :gzip, @i.buffer.compress
@@ -138,7 +152,7 @@ def dummy_event_stream
138152
@i.after_start
139153

140154
io = StringIO.new
141-
es = dummy_event_stream
155+
es = FluentPluginOutputAsBufferedCompressTest.dummy_event_stream
142156
expected = es.map { |e| "#{e[1]}\n" }.join # e[1] is record
143157
compressed_data = ''
144158

0 commit comments

Comments
 (0)