@@ -35,6 +35,16 @@ def format(tag, time, record)
35
35
@format ? @format . call ( tag , time , record ) : [ tag , time , record ] . to_json
36
36
end
37
37
end
38
+
39
+ def self . dummy_event_stream
40
+ Fluent ::ArrayEventStream . new (
41
+ [
42
+ [ event_time ( '2016-04-13 18:33:00' ) , { 'name' => 'moris' , 'age' => 36 , 'message' => 'data1' } ] ,
43
+ [ event_time ( '2016-04-13 18:33:13' ) , { 'name' => 'moris' , 'age' => 36 , 'message' => 'data2' } ] ,
44
+ [ event_time ( '2016-04-13 18:33:32' ) , { 'name' => 'moris' , 'age' => 36 , 'message' => 'data3' } ] ,
45
+ ]
46
+ )
47
+ end
38
48
end
39
49
40
50
class BufferedOutputCompressTest < Test ::Unit ::TestCase
@@ -60,16 +70,6 @@ def waiting(seconds)
60
70
end
61
71
end
62
72
63
- def dummy_event_stream
64
- Fluent ::ArrayEventStream . new (
65
- [
66
- [ event_time ( '2016-04-13 18:33:00' ) , { 'name' => 'moris' , 'age' => 36 , 'message' => 'data1' } ] ,
67
- [ event_time ( '2016-04-13 18:33:13' ) , { 'name' => 'moris' , 'age' => 36 , 'message' => 'data2' } ] ,
68
- [ event_time ( '2016-04-13 18:33:32' ) , { 'name' => 'moris' , 'age' => 36 , 'message' => 'data3' } ] ,
69
- ]
70
- )
71
- end
72
-
73
73
TMP_DIR = File . expand_path ( '../../tmp/test_output_as_buffered_compress' , __FILE__ )
74
74
75
75
setup do
@@ -89,20 +89,34 @@ def dummy_event_stream
89
89
end
90
90
91
91
data (
92
- handle_simple_stream : config_element ( 'buffer' , '' , { 'flush_interval' => 1 , 'compress' => 'gzip' } ) ,
93
- handle_stream_with_standard_format : config_element ( 'buffer' , 'tag' , { 'flush_interval' => 1 , 'compress' => 'gzip' } ) ,
94
- handle_simple_stream_and_file_chunk : config_element ( 'buffer' , '' , { '@type' => 'file' , 'path' => File . join ( TMP_DIR , 'test.*.log' ) , 'flush_interval' => 1 , 'compress' => 'gzip' } ) ,
95
- handle_stream_with_standard_format_and_file_chunk : config_element ( 'buffer' , 'tag' , { '@type' => 'file' , 'path' => File . join ( TMP_DIR , 'test.*.log' ) , 'flush_interval' => 1 , 'compress' => 'gzip' } ) ,
92
+ :buffer_config ,
93
+ [
94
+ config_element ( 'buffer' , '' , { 'flush_interval' => 1 , 'compress' => 'gzip' } ) ,
95
+ config_element ( 'buffer' , 'tag' , { 'flush_interval' => 1 , 'compress' => 'gzip' } ) ,
96
+ config_element ( 'buffer' , '' , { '@type' => 'file' , 'path' => File . join ( TMP_DIR , 'test.*.log' ) , 'flush_interval' => 1 , 'compress' => 'gzip' } ) ,
97
+ config_element ( 'buffer' , 'tag' , { '@type' => 'file' , 'path' => File . join ( TMP_DIR , 'test.*.log' ) , 'flush_interval' => 1 , 'compress' => 'gzip' } ) ,
98
+ ] ,
96
99
)
97
- test 'call a standard format when output plugin adds data to chunk' do |buffer_config |
100
+ data (
101
+ :input_es ,
102
+ [
103
+ FluentPluginOutputAsBufferedCompressTest . dummy_event_stream ,
104
+ # If already compressed data is incoming, it must be written as is (i.e. without decompressed).
105
+ # https://github.com/fluent/fluentd/issues/4146
106
+ Fluent ::CompressedMessagePackEventStream . new ( FluentPluginOutputAsBufferedCompressTest . dummy_event_stream . to_compressed_msgpack_stream ) ,
107
+ ] ,
108
+ )
109
+ test 'call a standard format when output plugin adds data to chunk' do |data |
110
+ buffer_config = data [ :buffer_config ]
111
+ es = data [ :input_es ] . dup # Note: the data matrix is shared in all patterns, so we need `dup` here.
112
+
98
113
@i = create_output ( :async )
99
114
@i . configure ( config_element ( 'ROOT' , '' , { } , [ buffer_config ] ) )
100
115
@i . start
101
116
@i . after_start
102
117
103
118
io = StringIO . new
104
- es = dummy_event_stream
105
- expected = es . map { |e | e }
119
+ expected = es . dup . map { |t , r | [ t , r ] }
106
120
compressed_data = ''
107
121
108
122
assert_equal :gzip , @i . buffer . compress
@@ -138,7 +152,7 @@ def dummy_event_stream
138
152
@i . after_start
139
153
140
154
io = StringIO . new
141
- es = dummy_event_stream
155
+ es = FluentPluginOutputAsBufferedCompressTest . dummy_event_stream
142
156
expected = es . map { |e | "#{ e [ 1 ] } \n " } . join # e[1] is record
143
157
compressed_data = ''
144
158
0 commit comments